Skip to content

Commit

Permalink
Improve release scripts #3040
Browse files Browse the repository at this point in the history
Some release archves where missing critical wheels.
The solution is to:

* ensure that we have effectively missing wheels published and made
available from our PyPI repo when building a release

* Improve the fetch_thirdparty script to fall back on fetching a
source distribution when no wheel is found. Fail loudly if neither a
wheel not an sdist is found. Also accept special cases of packages with
no wheels and packages with no sources.

* Ensure that some source-only packages are always included as sources

* Add release smoke tests and run then for each wheel, sdist and app
  archive on supported OS and Python versions combinations

* Move Linux-only requirements to theur own requirement file.

* Enable Linux-only package inspectors on Linux only.

* Run base tests on macos 12

Signed-off-by: Philippe Ombredanne <[email protected]>
  • Loading branch information
pombredanne committed Aug 10, 2022
1 parent bfa6d96 commit 00740f8
Show file tree
Hide file tree
Showing 8 changed files with 489 additions and 128 deletions.
383 changes: 330 additions & 53 deletions .github/workflows/scancode-release.yml

Large diffs are not rendered by default.

17 changes: 17 additions & 0 deletions azure-pipelines.yml
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,15 @@ jobs:
test_suites:
all: venv/bin/pytest -n 2 -vvs tests/scancode/test_cli.py

- template: etc/ci/azure-posix.yml
parameters:
job_name: macos12_cpython
image_name: macos-12
python_versions: ['3.7', '3.8', '3.9', '3.10']
python_architecture: x64
test_suites:
all: venv/bin/pytest -n 2 -vvs tests/scancode/test_cli.py

- template: etc/ci/azure-win.yml
parameters:
job_name: win2019_cpython_1
Expand Down Expand Up @@ -218,6 +227,14 @@ jobs:
test_suites:
all: venv/bin/pip install --upgrade-strategy eager --force-reinstall --upgrade -e .[dev] && venv/bin/pytest -n 2 -vvs tests/scancode/test_cli.py

- template: etc/ci/azure-posix.yml
parameters:
job_name: macos12_cpython_latest_from_pip
image_name: macos-12
python_versions: ['3.7', '3.8', '3.9', '3.10']
test_suites:
all: venv/bin/pip install --upgrade-strategy eager --force-reinstall --upgrade -e .[dev] && venv/bin/pytest -n 2 -vvs tests/scancode/test_cli.py

- template: etc/ci/azure-win.yml
parameters:
job_name: win2019_cpython_latest_from_pip
Expand Down
10 changes: 8 additions & 2 deletions configure
Original file line number Diff line number Diff line change
Expand Up @@ -141,9 +141,15 @@ CLI_ARGS=$1
# Defaults. Change these variables to customize this script
################################

EXTRAS="packages"

if [[ $OSTYPE == 'darwin'* ]]; then
EXTRAS=""
fi

# Requirement arguments passed to pip and used by default or with --dev.
REQUIREMENTS="--editable .[packages] --constraint requirements.txt"
DEV_REQUIREMENTS="--editable .[testing,packages] --constraint requirements.txt --constraint requirements-dev.txt"
REQUIREMENTS="--editable .[$EXTRAS] --constraint requirements.txt --constraint requirements-linux.txt"
DEV_REQUIREMENTS="--editable .[$EXTRAS,testing] --constraint requirements.txt --constraint requirements-linux.txt --constraint requirements-dev.txt"
DOCS_REQUIREMENTS="--editable .[docs] --constraint requirements.txt"

# where we create a virtualenv
Expand Down
81 changes: 47 additions & 34 deletions etc/release/scancode_release_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,27 +9,24 @@
# See https://aboutcode.org for more information about nexB OSS projects.
#

import hashlib
import os
import shutil
import subprocess
import sys

# TODO: also test a pip install with a find-links option to our new PyPI repo


def run_pypi_smoke_tests(pypi_archive):
def run_pypi_smoke_tests(pypi_archive, venv_prefix="venv/bin/"):
"""
Run basic install and "smoke" scancode tests for a PyPI archive.
"""
# archive is either a wheel or an sdist as in
# scancode_toolkit-21.1.21-py3-none-any.whl or scancode-toolkit-21.1.21.tar.gz
run_command(["pip", "install", pypi_archive + "[full]"])
run_command([venv_prefix + "pip", "install", pypi_archive + "[full]"])

with open("some.file", "w") as sf:
sf.write("license: gpl-2.0")

run_command(["scancode", "-clipeu", "--json-pp", "-", "some.file"])
run_command([venv_prefix + "scancode", "-clipeu", "--json-pp", "-", "some.file"])


def run_app_smoke_tests(app_archive):
Expand All @@ -42,13 +39,15 @@ def run_app_smoke_tests(app_archive):
# We split the name on "_" to extract the laft hand side which is name of
# the root directory inside the archive e.g. "scancode-toolkit-21.1.21"
# where the archive gest extracted
extract_dir, _, _py_ver_ext = app_archive.partition("_")

_base, _, fn = app_archive.partition("/")
extract_dir, _, _py_ver_ext = fn.partition("_")
print("run_app_smoke_tests: cwd:", os.getcwd())
print("run_app_smoke_tests:", "extracting archive:", app_archive, "to:", extract_dir)
shutil.unpack_archive(app_archive)
print()
print("cwd:", os.getcwd())

extract_loc = os.path.normpath(os.path.abspath(os.path.expanduser(extract_dir)))
print("extract_loc:", extract_loc)
print("run_app_smoke_tests: extract_loc:", extract_loc)
for f in os.listdir(extract_loc):
print(" ", f)
print()
Expand All @@ -58,20 +57,38 @@ def run_app_smoke_tests(app_archive):
# minimal tests: update when new scans are available
args = [
os.path.join(extract_loc, "scancode"),
"-clipeu",
"--license",
"--license-text",
"--license-clarity-score",

"--copyright",
"--info",
"--email",
"--url",
"--generated",

"--package",
"--system-package",

"--summary",
"--tallies",
"--classify",
"--consolidate",

"--verbose",
"--json",
"test_scan.json",
"--csv",
"test_scan.csv",
"--html",
"test_scan.html",
"--spdx-tv",
"test_scan.spdx",
"--json-pp",
"-",
os.path.join(extract_loc, "apache-2.0.LICENSE"),

"--yaml", "test_scan.yml",
"--json", "test_scan.json",
"--json-lines", "test_scan.json-lines",
"--csv", "test_scan.csv",
"--html", "test_scan.html",
"--cyclonedx", "test_scan.cdx",
"--cyclonedx-xml", "test_scan.cdx.xml",
"--spdx-tv", "test_scan.spdx",

"--debian", "test_scan.debian.copyright",
"--json-pp", "-",
"apache-2.0.LICENSE"
]

print(f"Testing scancode release: {app_archive}")
Expand Down Expand Up @@ -100,18 +117,14 @@ def run_command(args):

if __name__ == "__main__":
args = sys.argv[1:]
action, archive, sha_arch, sha_py = args

with open(archive, "rb") as arch:
current_sha_arch = hashlib.sha256(arch.read()).hexdigest()
assert current_sha_arch == sha_arch

with open(__file__, "rb") as py:
current_sha_py = hashlib.sha256(py.read()).hexdigest()
assert current_sha_py == sha_py
action = args[0]
archive = args[1]

if action == "pypi":
run_pypi_smoke_tests(archive)
else:
# action =='app':
venv_prefix = args[2]
run_pypi_smoke_tests(archive, venv_prefix)
elif action == 'app':
run_app_smoke_tests(archive)
else:
raise Exception("Usage: scancode_release_tests.py <pypi or app> <archive-to-test>")

87 changes: 64 additions & 23 deletions etc/scripts/fetch_thirdparty.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
import itertools
import os
import sys
from collections import defaultdict

import click

Expand Down Expand Up @@ -110,6 +111,39 @@
is_flag=True,
help="Use on disk cached PyPI indexes list of packages and versions and do not refetch if present.",
)
@click.option(
"--sdist-only",
"sdist_only",
type=str,
metavar="SDIST",
default=tuple(),
show_default=False,
multiple=True,
help="Package name(s) that come only in sdist format (no wheels). "
"The command will not fail and exit if no wheel exists for these names",
)
@click.option(
"--wheel-only",
"wheel_only",
type=str,
metavar="WHEEL",
default=tuple(),
show_default=False,
multiple=True,
help="Package name(s) that come only in wheel format (no sdist). "
"The command will not fail and exit if no sdist exists for these names",
)
@click.option(
"--no-dist",
"no_dist",
type=str,
metavar="DIST",
default=tuple(),
show_default=False,
multiple=True,
help="Package name(s) that do not come either in wheel or sdist format. "
"The command will not fail and exit if no distribution exists for these names",
)
@click.help_option("-h", "--help")
def fetch_thirdparty(
requirements_files,
Expand All @@ -122,6 +156,9 @@ def fetch_thirdparty(
sdists,
index_urls,
use_cached_index,
sdist_only,
wheel_only,
no_dist,
):
"""
Download to --dest THIRDPARTY_DIR the PyPI wheels, source distributions,
Expand Down Expand Up @@ -204,58 +241,62 @@ def fetch_thirdparty(
)
repos.append(repo)

wheels_fetched = []
wheels_not_found = []

sdists_fetched = []
sdists_not_found = []
wheels_or_sdist_not_found = defaultdict(list)

for name, version in sorted(required_name_versions):
nv = name, version
print(f"Processing: {name} @ {version}")
if wheels:
for environment in environments:

if TRACE:
print(f" ==> Fetching wheel for envt: {environment}")
fwfns = utils_thirdparty.download_wheel(

fetched = utils_thirdparty.download_wheel(
name=name,
version=version,
environment=environment,
dest_dir=dest_dir,
repos=repos,
)
if fwfns:
wheels_fetched.extend(fwfns)
else:
wheels_not_found.append(f"{name}=={version} for: {environment}")
if not fetched:
wheels_or_sdist_not_found[f"{name}=={version}"].append(environment)
if TRACE:
print(f" NOT FOUND")

if sdists:
if (sdists or
(f"{name}=={version}" in wheels_or_sdist_not_found and name in sdist_only)
):
if TRACE:
print(f" ==> Fetching sdist: {name}=={version}")

fetched = utils_thirdparty.download_sdist(
name=name,
version=version,
dest_dir=dest_dir,
repos=repos,
)
if fetched:
sdists_fetched.append(fetched)
else:
sdists_not_found.append(f"{name}=={version}")
if not fetched:
wheels_or_sdist_not_found[f"{name}=={version}"].append("sdist")
if TRACE:
print(f" NOT FOUND")

if wheels and wheels_not_found:
print(f"==> MISSING WHEELS")
for wh in wheels_not_found:
print(f" {wh}")
mia = []
for nv, dists in wheels_or_sdist_not_found.items():
name, _, version = nv.partition("==")
if name in no_dist:
continue
sdist_missing = sdists and "sdist" in dists and not name in wheel_only
if sdist_missing:
mia.append(f"SDist missing: {nv} {dists}")
wheels_missing = wheels and any(d for d in dists if d != "sdist") and not name in sdist_only
if wheels_missing:
mia.append(f"Wheels missing: {nv} {dists}")

if sdists and sdists_not_found:
print(f"==> MISSING SDISTS")
for sd in sdists_not_found:
print(f" {sd}")
if mia:
for m in mia:
print(m)
raise Exception(mia)

print(f"==> FETCHING OR CREATING ABOUT AND LICENSE FILES")
utils_thirdparty.fetch_abouts_and_licenses(dest_dir=dest_dir, use_cached_index=use_cached_index)
Expand Down
4 changes: 4 additions & 0 deletions requirements-linux.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
libfwsi-python==20220123
packagedcode-msitools==0.101.210706
regipy==3.0.2
rpm-inspector-rpm==4.16.1.3.210404
4 changes: 0 additions & 4 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -34,13 +34,11 @@ jaraco.functools==3.5.1
javaproperties==0.8.1
Jinja2==3.1.2
jsonstreams==0.6.0
libfwsi-python==20220123
license-expression==30.0.0
lxml==4.9.1
MarkupSafe==2.1.1
more-itertools==8.13.0
normality==2.3.3
packagedcode-msitools==0.101.210706
packageurl-python==0.10.0
packaging==21.3
parameter-expansion-patched==0.3.1
Expand All @@ -61,9 +59,7 @@ pyparsing==3.0.9
pytz==2022.1
PyYAML==6.0
rdflib==6.2.0
regipy==3.0.2
requests==2.28.1
rpm-inspector-rpm==4.16.1.3.210404
saneyaml==0.5.2
six==1.16.0
soupsieve==2.3.2.post1
Expand Down
Loading

0 comments on commit 00740f8

Please sign in to comment.