From dde001ef7161a313e4fe43bb522e8ccc02dffa9e Mon Sep 17 00:00:00 2001 From: Paolo Bonzini Date: Thu, 18 May 2023 17:52:25 +0200 Subject: [PATCH 1/7] remove remaining traces of meson submodule Signed-off-by: Paolo Bonzini --- .gitlab-ci.d/buildtest-template.yml | 4 ---- scripts/archive-source.sh | 2 +- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/.gitlab-ci.d/buildtest-template.yml b/.gitlab-ci.d/buildtest-template.yml index 7edb50b760..c9f2e737c0 100644 --- a/.gitlab-ci.d/buildtest-template.yml +++ b/.gitlab-ci.d/buildtest-template.yml @@ -5,10 +5,6 @@ before_script: - JOBS=$(expr $(nproc) + 1) script: - - if test -n "$LD_JOBS"; - then - scripts/git-submodule.sh update meson ; - fi - mkdir build - cd build - ../configure --enable-werror --disable-docs --enable-fdt=system diff --git a/scripts/archive-source.sh b/scripts/archive-source.sh index 23e042dacd..c035329154 100755 --- a/scripts/archive-source.sh +++ b/scripts/archive-source.sh @@ -26,7 +26,7 @@ sub_file="${sub_tdir}/submodule.tar" # independent of what the developer currently has initialized # in their checkout, because the build environment is completely # different to the host OS. -submodules="dtc meson ui/keycodemapdb" +submodules="dtc ui/keycodemapdb" submodules="$submodules tests/fp/berkeley-softfloat-3 tests/fp/berkeley-testfloat-3" sub_deinit="" From c673f3d0fe87f6e23a259c620fba7fc9e9d3621f Mon Sep 17 00:00:00 2001 From: Paolo Bonzini Date: Fri, 19 May 2023 10:06:19 +0200 Subject: [PATCH 2/7] mkvenv: replace distlib.database with importlib.metadata/pkg_resources importlib.metadata is just as good as distlib.database and a bit more battle-proven for "egg" based distributions, and in fact that is exactly why mkvenv.py is not using distlib.database to find entry points: it simply does not work for eggs. The only disadvantage of importlib.metadata is that it is not available by default before Python 3.8, so we need a fallback to pkg_resources (again, just like for the case of finding entry points). Do so to fix issues where incorrect egg metadata results in a JSONDecodeError. While at it, reuse the new _get_version function to diagnose an incorrect version of the package even if importlib.metadata is not available. Signed-off-by: Paolo Bonzini --- python/scripts/mkvenv.py | 113 ++++++++++++++++++++++----------------- python/setup.cfg | 6 --- 2 files changed, 65 insertions(+), 54 deletions(-) diff --git a/python/scripts/mkvenv.py b/python/scripts/mkvenv.py index 8c036c019a..6c78a2c112 100644 --- a/python/scripts/mkvenv.py +++ b/python/scripts/mkvenv.py @@ -76,7 +76,6 @@ from typing import ( Union, ) import venv -import warnings # Try to load distlib, with a fallback to pip's vendored version. @@ -84,7 +83,6 @@ import warnings # outside the venv or before a potential call to ensurepip in checkpip(). HAVE_DISTLIB = True try: - import distlib.database import distlib.scripts import distlib.version except ImportError: @@ -92,7 +90,6 @@ except ImportError: # Reach into pip's cookie jar. pylint and flake8 don't understand # that these imports will be used via distlib.xxx. from pip._vendor import distlib - import pip._vendor.distlib.database # noqa, pylint: disable=unused-import import pip._vendor.distlib.scripts # noqa, pylint: disable=unused-import import pip._vendor.distlib.version # noqa, pylint: disable=unused-import except ImportError: @@ -556,6 +553,57 @@ def pkgname_from_depspec(dep_spec: str) -> str: return match.group(0) +def _get_version_importlib(package: str) -> Optional[str]: + # pylint: disable=import-outside-toplevel + # pylint: disable=no-name-in-module + # pylint: disable=import-error + try: + # First preference: Python 3.8+ stdlib + from importlib.metadata import ( # type: ignore + PackageNotFoundError, + distribution, + ) + except ImportError as exc: + logger.debug("%s", str(exc)) + # Second preference: Commonly available PyPI backport + from importlib_metadata import ( # type: ignore + PackageNotFoundError, + distribution, + ) + + try: + return str(distribution(package).version) + except PackageNotFoundError: + return None + + +def _get_version_pkg_resources(package: str) -> Optional[str]: + # pylint: disable=import-outside-toplevel + # Bundled with setuptools; has a good chance of being available. + import pkg_resources + + try: + return str(pkg_resources.get_distribution(package).version) + except pkg_resources.DistributionNotFound: + return None + + +def _get_version(package: str) -> Optional[str]: + try: + return _get_version_importlib(package) + except ImportError as exc: + logger.debug("%s", str(exc)) + + try: + return _get_version_pkg_resources(package) + except ImportError as exc: + logger.debug("%s", str(exc)) + raise Ouch( + "Neither importlib.metadata nor pkg_resources found. " + "Use Python 3.8+, or install importlib-metadata or setuptools." + ) from exc + + def diagnose( dep_spec: str, online: bool, @@ -581,26 +629,7 @@ def diagnose( bad = False pkg_name = pkgname_from_depspec(dep_spec) - pkg_version = None - - has_importlib = False - try: - # Python 3.8+ stdlib - # pylint: disable=import-outside-toplevel - # pylint: disable=no-name-in-module - # pylint: disable=import-error - from importlib.metadata import ( # type: ignore - PackageNotFoundError, - version, - ) - - has_importlib = True - try: - pkg_version = version(pkg_name) - except PackageNotFoundError: - pass - except ModuleNotFoundError: - pass + pkg_version = _get_version(pkg_name) lines = [] @@ -609,14 +638,9 @@ def diagnose( f"Python package '{pkg_name}' version '{pkg_version}' was found," " but isn't suitable." ) - elif has_importlib: - lines.append( - f"Python package '{pkg_name}' was not found nor installed." - ) else: lines.append( - f"Python package '{pkg_name}' is either not found or" - " not a suitable version." + f"Python package '{pkg_name}' was not found nor installed." ) if wheels_dir: @@ -711,21 +735,18 @@ def _do_ensure( :param online: If True, fall back to PyPI. :param wheels_dir: If specified, search this path for packages. """ - with warnings.catch_warnings(): - warnings.filterwarnings( - "ignore", category=UserWarning, module="distlib" - ) - dist_path = distlib.database.DistributionPath(include_egg=True) - absent = [] - present = [] - for spec in dep_specs: - matcher = distlib.version.LegacyMatcher(spec) - dist = dist_path.get_distribution(matcher.name) - if dist is None or not matcher.match(dist.version): - absent.append(spec) - else: - logger.info("found %s", dist) - present.append(matcher.name) + absent = [] + present = [] + for spec in dep_specs: + matcher = distlib.version.LegacyMatcher(spec) + ver = _get_version(matcher.name) + if ver is None or not matcher.match( + distlib.version.LegacyVersion(ver) + ): + absent.append(spec) + else: + logger.info("found %s %s", matcher.name, ver) + present.append(matcher.name) if present: generate_console_scripts(present) @@ -843,10 +864,6 @@ def main() -> int: if os.environ.get("V"): logging.basicConfig(level=logging.INFO) - # These are incredibly noisy even for V=1 - logging.getLogger("distlib.metadata").addFilter(lambda record: False) - logging.getLogger("distlib.database").addFilter(lambda record: False) - parser = argparse.ArgumentParser( prog="mkvenv", description="QEMU pyvenv bootstrapping utility", diff --git a/python/setup.cfg b/python/setup.cfg index 5abb7d30ad..42f0b0be07 100644 --- a/python/setup.cfg +++ b/python/setup.cfg @@ -115,9 +115,6 @@ ignore_missing_imports = True [mypy-distlib] ignore_missing_imports = True -[mypy-distlib.database] -ignore_missing_imports = True - [mypy-distlib.scripts] ignore_missing_imports = True @@ -127,9 +124,6 @@ ignore_missing_imports = True [mypy-pip._vendor.distlib] ignore_missing_imports = True -[mypy-pip._vendor.distlib.database] -ignore_missing_imports = True - [mypy-pip._vendor.distlib.scripts] ignore_missing_imports = True From b0fcc6fc7fc11278c8ca344d8d9c6e5d05742f79 Mon Sep 17 00:00:00 2001 From: Paolo Bonzini Date: Fri, 19 May 2023 19:32:56 +0200 Subject: [PATCH 3/7] build: rebuild build.ninja using "meson setup --reconfigure" Do not use the rule in build.ninja, because the path to meson is hardcoded in build.ninja and this breaks if meson moves (for example if the distro meson suddenly becomes too old after an update). Reported-by: Peter Maydell Tested-by: Peter Maydell Signed-off-by: Paolo Bonzini --- Makefile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Makefile b/Makefile index 3c7d67142f..08fb6a3b05 100644 --- a/Makefile +++ b/Makefile @@ -115,15 +115,15 @@ Makefile.ninja: build.ninja $(NINJA) -t query build.ninja | sed -n '1,/^ input:/d; /^ outputs:/q; s/$$/ \\/p'; \ } > $@.tmp && mv $@.tmp $@ -include Makefile.ninja +endif +ifneq ($(MESON),) # A separate rule is needed for Makefile dependencies to avoid -n build.ninja: build.ninja.stamp $(build-files): build.ninja.stamp: meson.stamp $(build-files) - $(NINJA) $(if $V,-v,) build.ninja && touch $@ -endif + $(MESON) setup --reconfigure $(SRC_PATH) && touch $@ -ifneq ($(MESON),) Makefile.mtest: build.ninja scripts/mtest2make.py $(MESON) introspect --targets --tests --benchmarks | $(PYTHON) scripts/mtest2make.py > $@ -include Makefile.mtest From 973038db87154f954e8cd889d706089489a43d46 Mon Sep 17 00:00:00 2001 From: Paolo Bonzini Date: Fri, 19 May 2023 20:04:07 +0200 Subject: [PATCH 4/7] configure: fix backwards-compatibility for meson sphinx_build option Reintroduce the cmd_line.txt mangling to remove the sphinx_build option when rerunning meson. The mechanism was removed in commit 75cc28648574 ("configure: remove backwards-compatibility code", 2023-01-11) because the fixups were obsolete at the time; however, the Meson deprecation mechanism doesn't quite work when options are finally removed, so we need to bring it back. Reported-by: Peter Maydell Tested-by: Peter Maydell Signed-off-by: Paolo Bonzini --- configure | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/configure b/configure index bcab685cfd..f7cd376e52 100755 --- a/configure +++ b/configure @@ -1987,6 +1987,14 @@ if test "$skip_meson" = no; then if test "$?" -ne 0 ; then error_exit "meson setup failed" fi +else + if test -f meson-private/cmd_line.txt; then + # Adjust old command line options that were removed + # sed -i is not portable + perl -i -ne ' + /^sphinx_build/ && next; + print;' meson-private/cmd_line.txt + fi fi # Save the configure command line for later reuse. From d37c21b5fb34aaa01eeabf75a57cf141d76af42f Mon Sep 17 00:00:00 2001 From: Paolo Bonzini Date: Fri, 19 May 2023 10:18:07 +0200 Subject: [PATCH 5/7] mkvenv: pass first missing package to diagnose() MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit If sphinx is present but the theme is not, mkvenv will print an inaccurate diagnostic: ERROR: Could not find a version that satisfies the requirement sphinx-rtd-theme>=0.5.0 (from versions: none) ERROR: No matching distribution found for sphinx-rtd-theme>=0.5.0 'sphinx>=1.6.0' not found: • Python package 'sphinx' version '5.3.0' was found, but isn't suitable. • mkvenv was configured to operate offline and did not check PyPI. Instead, ignore the packages that were found to be present, and report an error based on the first absent package. Signed-off-by: Paolo Bonzini --- python/scripts/mkvenv.py | 37 +++++++++++++++++++++++++------------ 1 file changed, 25 insertions(+), 12 deletions(-) diff --git a/python/scripts/mkvenv.py b/python/scripts/mkvenv.py index 6c78a2c112..3a9aef46a5 100644 --- a/python/scripts/mkvenv.py +++ b/python/scripts/mkvenv.py @@ -722,7 +722,8 @@ def _do_ensure( dep_specs: Sequence[str], online: bool = False, wheels_dir: Optional[Union[str, Path]] = None, -) -> None: + prog: Optional[str] = None, +) -> Optional[Tuple[str, bool]]: """ Use pip to ensure we have the package specified by @dep_specs. @@ -752,10 +753,24 @@ def _do_ensure( generate_console_scripts(present) if absent: - # Some packages are missing or aren't a suitable version, - # install a suitable (possibly vendored) package. - print(f"mkvenv: installing {', '.join(absent)}", file=sys.stderr) - pip_install(args=absent, online=online, wheels_dir=wheels_dir) + if online or wheels_dir: + # Some packages are missing or aren't a suitable version, + # install a suitable (possibly vendored) package. + print(f"mkvenv: installing {', '.join(absent)}", file=sys.stderr) + try: + pip_install(args=absent, online=online, wheels_dir=wheels_dir) + return None + except subprocess.CalledProcessError: + pass + + return diagnose( + absent[0], + online, + wheels_dir, + prog if absent[0] == dep_specs[0] else None, + ) + + return None def ensure( @@ -785,14 +800,12 @@ def ensure( if not HAVE_DISTLIB: raise Ouch("a usable distlib could not be found, please install it") - try: - _do_ensure(dep_specs, online, wheels_dir) - except subprocess.CalledProcessError as exc: + result = _do_ensure(dep_specs, online, wheels_dir, prog) + if result: # Well, that's not good. - msg, bad = diagnose(dep_specs[0], online, wheels_dir, prog) - if bad: - raise Ouch(msg) from exc - raise SystemExit(f"\n{msg}\n\n") from exc + if result[1]: + raise Ouch(result[0]) + raise SystemExit(f"\n{result[0]}\n\n") def post_venv_setup() -> None: From 9fd9f3952f20046dd454f85dc46c9d13af50c700 Mon Sep 17 00:00:00 2001 From: Paolo Bonzini Date: Thu, 18 May 2023 08:51:52 +0200 Subject: [PATCH 6/7] gitlab: custom-runners: preserve more artifacts for debugging Since custom runners are not generally available, make it possible to debug the differences between a successful and a failing build by comparing the logs and the build.ninja rules. Acked-by: Richard Henderson Signed-off-by: Paolo Bonzini --- .gitlab-ci.d/custom-runners.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.gitlab-ci.d/custom-runners.yml b/.gitlab-ci.d/custom-runners.yml index 34a1e6f327..8e5b9500f4 100644 --- a/.gitlab-ci.d/custom-runners.yml +++ b/.gitlab-ci.d/custom-runners.yml @@ -20,8 +20,10 @@ variables: artifacts: name: "$CI_JOB_NAME-$CI_COMMIT_REF_SLUG" expire_in: 7 days + when: always paths: - - build/meson-logs/testlog.txt + - build/build.ninja + - build/meson-logs reports: junit: build/meson-logs/testlog.junit.xml From 4b424c757188f7a47630a4d8edcf4ad9f19255bc Mon Sep 17 00:00:00 2001 From: Paolo Bonzini Date: Thu, 18 May 2023 11:27:39 +0200 Subject: [PATCH 7/7] scripts: make sure scripts are invoked via $(PYTHON) Some scripts are invoked via the first "python3" binary in the PATH, because they are executable and their shebang line is "#! /usr/bin/env python3". To enforce usage of $(PYTHON), make them nonexecutable. Scripts invoked via meson need nothing else, and meson-buildoptions.py is already using $(PYTHON). For probe-gdb-support.py however the invocation in the configure script has to be adjusted. Reviewed-by: Richard Henderson Signed-off-by: Paolo Bonzini --- configure | 2 +- scripts/meson-buildoptions.py | 0 scripts/modinfo-collect.py | 0 scripts/modinfo-generate.py | 0 scripts/probe-gdb-support.py | 0 5 files changed, 1 insertion(+), 1 deletion(-) mode change 100755 => 100644 scripts/meson-buildoptions.py mode change 100755 => 100644 scripts/modinfo-collect.py mode change 100755 => 100644 scripts/modinfo-generate.py mode change 100755 => 100644 scripts/probe-gdb-support.py diff --git a/configure b/configure index f7cd376e52..1bdc7fd69b 100755 --- a/configure +++ b/configure @@ -1767,7 +1767,7 @@ if test -n "$gdb_bin"; then gdb_version=$($gdb_bin --version | head -n 1) if version_ge ${gdb_version##* } 9.1; then echo "HAVE_GDB_BIN=$gdb_bin" >> $config_host_mak - gdb_arches=$("$source_path/scripts/probe-gdb-support.py" $gdb_bin) + gdb_arches=$($python "$source_path/scripts/probe-gdb-support.py" $gdb_bin) else gdb_bin="" fi diff --git a/scripts/meson-buildoptions.py b/scripts/meson-buildoptions.py old mode 100755 new mode 100644 diff --git a/scripts/modinfo-collect.py b/scripts/modinfo-collect.py old mode 100755 new mode 100644 diff --git a/scripts/modinfo-generate.py b/scripts/modinfo-generate.py old mode 100755 new mode 100644 diff --git a/scripts/probe-gdb-support.py b/scripts/probe-gdb-support.py old mode 100755 new mode 100644