diff --git a/docs/markdown/IDE-integration.md b/docs/markdown/IDE-integration.md index 77edb755d388..ce8d8b42e548 100644 --- a/docs/markdown/IDE-integration.md +++ b/docs/markdown/IDE-integration.md @@ -93,6 +93,7 @@ can provide code completion for all source files. ```json { "language": "language ID", + "machine": "build" / "host", "compiler": ["The", "compiler", "command"], "parameters": ["list", "of", "compiler", "parameters"], "sources": ["list", "of", "all", "source", "files", "for", "this", "language"], @@ -100,6 +101,13 @@ can provide code completion for all source files. } ``` +*(New in 1.7.0)* The `machine` and `language` keys make it possible to +to access further information about the compiler in the `compilers` +introspection information. `machine` can be absent if `language` is +`unknown`. In this case, information about the compiler is not +available; Meson is therefore unable to know if the output relates +to either the build of the host machine. + It should be noted that the compiler parameters stored in the `parameters` differ from the actual parameters used to compile the file. This is because the parameters are optimized for the usage in an diff --git a/docs/markdown/Unit-tests.md b/docs/markdown/Unit-tests.md index 898366095b05..2651a3fa44f6 100644 --- a/docs/markdown/Unit-tests.md +++ b/docs/markdown/Unit-tests.md @@ -83,13 +83,13 @@ possible. By default Meson uses as many concurrent processes as there are cores on the test machine. You can override this with the environment -variable `MESON_TESTTHREADS` like this. +variable `MESON_TESTTHREADS` or, *since 1.7.0*, `MESON_NUM_PROCESSES`: ```console -$ MESON_TESTTHREADS=5 meson test +$ MESON_NUM_PROCESSES=5 meson test ``` -Setting `MESON_TESTTHREADS` to 0 enables the default behavior (core +Setting `MESON_NUM_PROCESSES` to 0 enables the default behavior (core count), whereas setting an invalid value results in setting the job count to 1. diff --git a/docs/markdown/howtox.md b/docs/markdown/howtox.md index 4a57e8569137..ba6a3b8f8d63 100644 --- a/docs/markdown/howtox.md +++ b/docs/markdown/howtox.md @@ -239,6 +239,26 @@ And then pass it through the variable (remember to use absolute path): $ SCANBUILD=$(pwd)/my-scan-build.sh ninja -C builddir scan-build ``` +## Use clippy + +If your project includes Rust targets, you can invoke clippy like this: + +```console +$ meson setup builddir +$ ninja -C builddir clippy +``` + +Clippy will also obey the `werror` [builtin option](Builtin-options.md#core-options). + +By default Meson uses as many concurrent processes as there are cores +on the test machine. You can override this with the environment +variable `MESON_NUM_PROCESSES`. + +Meson will look for `clippy-driver` in the same directory as `rustc`, +or try to invoke it using `rustup` if `rustc` points to a `rustup` +binary. If `clippy-driver` is not detected properly, you can add it to +a [machine file](Machine-files.md). + ## Use profile guided optimization Using profile guided optimization with GCC is a two phase diff --git a/docs/markdown/snippets/clippy.md b/docs/markdown/snippets/clippy.md new file mode 100644 index 000000000000..47d02083a0ab --- /dev/null +++ b/docs/markdown/snippets/clippy.md @@ -0,0 +1,8 @@ +## Meson can run "clippy" on Rust projects + +Meson now defines a `clippy` target if the project uses the Rust programming +language. The target runs clippy on all Rust sources, using the `clippy-driver` +program from the same Rust toolchain as the `rustc` compiler. + +Using `clippy-driver` as the Rust compiler will now emit a warning, as it +is not meant to be a general-purpose compiler front-end. diff --git a/docs/markdown/snippets/introspect_machine.md b/docs/markdown/snippets/introspect_machine.md new file mode 100644 index 000000000000..9b19bd6a20ca --- /dev/null +++ b/docs/markdown/snippets/introspect_machine.md @@ -0,0 +1,5 @@ +## "machine" entry in target introspection data + +The JSON data returned by `meson introspect --targets` now has a `machine` +entry in each `target_sources` block. The new entry can be one of `build` +or `host` for compiler-built targets, or absent for `custom_target` targets. diff --git a/docs/markdown/snippets/num-processes.md b/docs/markdown/snippets/num-processes.md new file mode 100644 index 000000000000..5336377900ce --- /dev/null +++ b/docs/markdown/snippets/num-processes.md @@ -0,0 +1,8 @@ +## Control the number of child processes with an environment variable + +Previously, `meson test` checked the `MESON_TESTTHREADS` variable to control +the amount of parallel jobs to run; this was useful when `meson test` is +invoked through `ninja test` for example. With this version, a new variable +`MESON_NUM_PROCESSES` is supported with a broader scope: in addition to +`meson test`, it is also used by the `external_project` module and by +Ninja targets that invoke `clang-tidy`, `clang-format` and `clippy`. diff --git a/mesonbuild/ast/introspection.py b/mesonbuild/ast/introspection.py index 3e8d564e2fbf..6bc6286f24f3 100644 --- a/mesonbuild/ast/introspection.py +++ b/mesonbuild/ast/introspection.py @@ -283,7 +283,7 @@ def traverse_nodes(inqueue: T.List[BaseNode]) -> T.List[BaseNode]: kwargs_reduced = {k: v for k, v in kwargs.items() if k in targetclass.known_kwargs and k in {'install', 'build_by_default', 'build_always'}} kwargs_reduced = {k: v.value if isinstance(v, ElementaryNode) else v for k, v in kwargs_reduced.items()} kwargs_reduced = {k: v for k, v in kwargs_reduced.items() if not isinstance(v, BaseNode)} - for_machine = MachineChoice.HOST + for_machine = MachineChoice.BUILD if kwargs.get('native', False) else MachineChoice.HOST objects: T.List[T.Any] = [] empty_sources: T.List[T.Any] = [] # Passing the unresolved sources list causes errors @@ -294,6 +294,7 @@ def traverse_nodes(inqueue: T.List[BaseNode]) -> T.List[BaseNode]: new_target = { 'name': target.get_basename(), + 'machine': target.for_machine.get_lower_case_name(), 'id': target.get_id(), 'type': target.get_typename(), 'defined_in': os.path.normpath(os.path.join(self.source_root, self.subdir, environment.build_filename)), diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py index a4be50f664b1..970fb82f2b4e 100644 --- a/mesonbuild/backend/backends.py +++ b/mesonbuild/backend/backends.py @@ -41,13 +41,14 @@ from ..linkers.linkers import StaticLinker from ..mesonlib import FileMode, FileOrString - from typing_extensions import TypedDict + from typing_extensions import TypedDict, NotRequired _ALL_SOURCES_TYPE = T.List[T.Union[File, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList]] class TargetIntrospectionData(TypedDict): language: str + machine: NotRequired[str] compiler: T.List[str] parameters: T.List[str] sources: T.List[str] @@ -2039,6 +2040,12 @@ def compiler_to_generator_args(self, target: build.BuildTarget, commands += [input] return commands + def have_language(self, langname: str) -> bool: + for for_machine in MachineChoice: + if langname in self.environment.coredata.compilers[for_machine]: + return True + return False + def compiler_to_generator(self, target: build.BuildTarget, compiler: 'Compiler', sources: _ALL_SOURCES_TYPE, diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index 05d5320798fd..23b6994cd8b2 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -463,6 +463,8 @@ class RustCrate: display_name: str root_module: str + crate_type: str + target_name: str edition: RUST_EDITIONS deps: T.List[RustDep] cfg: T.List[str] @@ -837,6 +839,7 @@ def create_target_source_introspection(self, target: build.Target, comp: compile # The new entry src_block = { 'language': lang, + 'machine': comp.for_machine.get_lower_case_name(), 'compiler': comp.get_exelist(), 'parameters': parameters, 'sources': [], @@ -1877,6 +1880,7 @@ def __generate_sources_structure(self, root: Path, structured_sources: build.Str return orderdeps, first_file def _add_rust_project_entry(self, name: str, main_rust_file: str, args: CompilerArgs, + crate_type: str, target_name: str, from_subproject: bool, proc_macro_dylib_path: T.Optional[str], deps: T.List[RustDep]) -> None: raw_edition: T.Optional[str] = mesonlib.first(reversed(args), lambda x: x.startswith('--edition')) @@ -1894,6 +1898,8 @@ def _add_rust_project_entry(self, name: str, main_rust_file: str, args: Compiler len(self.rust_crates), name, main_rust_file, + crate_type, + target_name, edition, deps, cfg, @@ -2133,7 +2139,7 @@ def _link_library(libname: str, static: bool, bundle: bool = False): self._add_rust_project_entry(target.name, os.path.abspath(os.path.join(self.environment.build_dir, main_rust_file)), - args, + args, cratetype, target_name, bool(target.subproject), proc_macro_dylib_path, project_deps) @@ -3639,6 +3645,20 @@ def generate_dist(self) -> None: elem.add_item('pool', 'console') self.add_build(elem) + def generate_clippy(self) -> None: + if 'clippy' in self.all_outputs or not self.have_language('rust'): + return + + cmd = self.environment.get_build_command() + \ + ['--internal', 'clippy', self.environment.build_dir] + elem = self.create_phony_target('clippy', 'CUSTOM_COMMAND', 'PHONY') + elem.add_item('COMMAND', cmd) + elem.add_item('pool', 'console') + for crate in self.rust_crates.values(): + if crate.crate_type in {'rlib', 'dylib', 'proc-macro'}: + elem.add_dep(crate.target_name) + self.add_build(elem) + def generate_scanbuild(self) -> None: if not environment.detect_scanbuild(): return @@ -3658,6 +3678,9 @@ def generate_clangtool(self, name: str, extra_arg: T.Optional[str] = None) -> No if extra_arg: target_name += f'-{extra_arg}' extra_args.append(f'--{extra_arg}') + colorout = self.environment.coredata.optstore.get_value('b_colorout') \ + if OptionKey('b_colorout') in self.environment.coredata.optstore else 'always' + extra_args.extend(['--color', colorout]) if not os.path.exists(os.path.join(self.environment.source_dir, '.clang-' + name)) and \ not os.path.exists(os.path.join(self.environment.source_dir, '_clang-' + name)): return @@ -3703,6 +3726,7 @@ def generate_utils(self) -> None: self.generate_scanbuild() self.generate_clangformat() self.generate_clangtidy() + self.generate_clippy() self.generate_tags('etags', 'TAGS') self.generate_tags('ctags', 'ctags') self.generate_tags('cscope', 'cscope') diff --git a/mesonbuild/compilers/detect.py b/mesonbuild/compilers/detect.py index 7542fb6283a2..d88441dffcd7 100644 --- a/mesonbuild/compilers/detect.py +++ b/mesonbuild/compilers/detect.py @@ -1042,6 +1042,10 @@ def detect_rust_compiler(env: 'Environment', for_machine: MachineChoice) -> Rust version = search_version(out) cls = rust.ClippyRustCompiler + mlog.deprecation( + 'clippy-driver is not intended as a general purpose compiler. ' + 'You can use "ninja clippy" in order to run clippy on a ' + 'meson project.') if 'rustc' in out: # On Linux and mac rustc will invoke gcc (clang for mac diff --git a/mesonbuild/compilers/mixins/gnu.py b/mesonbuild/compilers/mixins/gnu.py index 21a57b44fef1..976fa78714c6 100644 --- a/mesonbuild/compilers/mixins/gnu.py +++ b/mesonbuild/compilers/mixins/gnu.py @@ -8,7 +8,6 @@ import abc import functools import os -import multiprocessing import pathlib import re import subprocess @@ -617,8 +616,9 @@ def get_lto_compile_args(self, *, threads: int = 0, mode: str = 'default') -> T. if threads == 0: if self._has_lto_auto_support: return ['-flto=auto'] - # This matches clang's behavior of using the number of cpus - return [f'-flto={multiprocessing.cpu_count()}'] + # This matches clang's behavior of using the number of cpus, but + # obeying meson's MESON_NUM_PROCESSES convention. + return [f'-flto={mesonlib.determine_worker_count()}'] elif threads > 0: return [f'-flto={threads}'] return super().get_lto_compile_args(threads=threads) diff --git a/mesonbuild/compilers/rust.py b/mesonbuild/compilers/rust.py index f09911db642c..3c041324273e 100644 --- a/mesonbuild/compilers/rust.py +++ b/mesonbuild/compilers/rust.py @@ -33,6 +33,35 @@ 's': ['-C', 'opt-level=s'], } +def get_rustup_run_and_args(exelist: T.List[str]) -> T.Optional[T.Tuple[T.List[str], T.List[str]]]: + """Given the command for a rustc executable, check if it is invoked via + "rustup run" and if so separate the "rustup [OPTIONS] run TOOLCHAIN" + part from the arguments to rustc. If the returned value is not None, + other tools (for example clippy-driver or rustdoc) can be run by placing + the name of the tool between the two elements of the tuple.""" + e = iter(exelist) + try: + if os.path.basename(next(e)) != 'rustup': + return None + # minimum three strings: "rustup run TOOLCHAIN" + n = 3 + opt = next(e) + + # options come first + while opt.startswith('-'): + n += 1 + opt = next(e) + + # then "run TOOLCHAIN" + if opt != 'run': + return None + + next(e) + next(e) + return exelist[:n], list(e) + except StopIteration: + return None + class RustCompiler(Compiler): # rustc doesn't invoke the compiler itself, it doesn't need a LINKER_PREFIX @@ -65,6 +94,7 @@ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoic super().__init__([], exelist, version, for_machine, info, is_cross=is_cross, full_version=full_version, linker=linker) + self.rustup_run_and_args: T.Optional[T.Tuple[T.List[str], T.List[str]]] = get_rustup_run_and_args(exelist) self.base_options.update({OptionKey(o) for o in ['b_colorout', 'b_ndebug']}) if 'link' in self.linker.id: self.base_options.add(OptionKey('b_vscrt')) @@ -245,6 +275,26 @@ def get_assert_args(self, disable: bool, env: 'Environment') -> T.List[str]: action = "no" if disable else "yes" return ['-C', f'debug-assertions={action}', '-C', 'overflow-checks=no'] + def get_rust_tool(self, name: str, env: Environment) -> T.List[str]: + if self.rustup_run_and_args: + rustup_exelist, args = self.rustup_run_and_args + # do not use extend so that exelist is copied + exelist = rustup_exelist + [name] + else: + from ..programs import find_external_program + for prog in find_external_program(env, self.for_machine, name, name, + [name], allow_default_for_cross=False): + exelist = [prog.path] + args = self.exelist[1:] + break + else: + return [] + + tool = exelist[0] + if os.path.isfile(tool) and os.access(tool, os.X_OK): + return exelist + args + return [] + class ClippyRustCompiler(RustCompiler): diff --git a/mesonbuild/mintro.py b/mesonbuild/mintro.py index 810a2b674b40..b9b09c557427 100644 --- a/mesonbuild/mintro.py +++ b/mesonbuild/mintro.py @@ -211,6 +211,7 @@ def nodes_to_paths(node_list: T.List[BaseNode]) -> T.List[Path]: 'build_by_default': i['build_by_default'], 'target_sources': [{ 'language': 'unknown', + 'machine': i['machine'], 'compiler': [], 'parameters': [], 'sources': [str(x) for x in sources], diff --git a/mesonbuild/mtest.py b/mesonbuild/mtest.py index 503cb14325dd..330f58e58db6 100644 --- a/mesonbuild/mtest.py +++ b/mesonbuild/mtest.py @@ -14,7 +14,6 @@ import datetime import enum import json -import multiprocessing import os import pickle import platform @@ -36,7 +35,8 @@ from .coredata import MesonVersionMismatchException, major_versions_differ from .coredata import version as coredata_version from .mesonlib import (MesonException, OrderedSet, RealPathAction, - get_wine_shortpath, join_args, split_args, setup_vsenv) + get_wine_shortpath, join_args, split_args, setup_vsenv, + determine_worker_count) from .options import OptionKey from .mintro import get_infodir, load_info_file from .programs import ExternalProgram @@ -97,27 +97,6 @@ def uniwidth(s: str) -> int: result += UNIWIDTH_MAPPING[w] return result -def determine_worker_count() -> int: - varname = 'MESON_TESTTHREADS' - num_workers = 0 - if varname in os.environ: - try: - num_workers = int(os.environ[varname]) - if num_workers < 0: - raise ValueError - except ValueError: - print(f'Invalid value in {varname}, using 1 thread.') - num_workers = 1 - - if num_workers == 0: - try: - # Fails in some weird environments such as Debian - # reproducible build. - num_workers = multiprocessing.cpu_count() - except Exception: - num_workers = 1 - return num_workers - # Note: when adding arguments, please also add them to the completion # scripts in $MESONSRC/data/shell-completions/ def add_arguments(parser: argparse.ArgumentParser) -> None: @@ -152,7 +131,7 @@ def add_arguments(parser: argparse.ArgumentParser) -> None: help="Run benchmarks instead of tests.") parser.add_argument('--logbase', default='testlog', help="Base name for log file.") - parser.add_argument('-j', '--num-processes', default=determine_worker_count(), type=int, + parser.add_argument('-j', '--num-processes', default=determine_worker_count(['MESON_TESTTHREADS']), type=int, help='How many parallel processes to use.') parser.add_argument('-v', '--verbose', default=False, action='store_true', help='Do not redirect stdout and stderr') diff --git a/mesonbuild/scripts/clangformat.py b/mesonbuild/scripts/clangformat.py index 9ce050458986..b89df3c88e49 100644 --- a/mesonbuild/scripts/clangformat.py +++ b/mesonbuild/scripts/clangformat.py @@ -4,16 +4,15 @@ from __future__ import annotations import argparse -import subprocess from pathlib import Path -from .run_tool import run_tool +from .run_tool import run_clang_tool, run_with_buffered_output from ..environment import detect_clangformat from ..mesonlib import version_compare from ..programs import ExternalProgram import typing as T -def run_clang_format(fname: Path, exelist: T.List[str], check: bool, cformat_ver: T.Optional[str]) -> subprocess.CompletedProcess: +async def run_clang_format(fname: Path, exelist: T.List[str], check: bool, cformat_ver: T.Optional[str]) -> int: clangformat_10 = False if check and cformat_ver: if version_compare(cformat_ver, '>=10'): @@ -22,19 +21,21 @@ def run_clang_format(fname: Path, exelist: T.List[str], check: bool, cformat_ver else: original = fname.read_bytes() before = fname.stat().st_mtime - ret = subprocess.run(exelist + ['-style=file', '-i', str(fname)]) + ret = await run_with_buffered_output(exelist + ['-style=file', '-i', str(fname)]) after = fname.stat().st_mtime if before != after: print('File reformatted: ', fname) if check and not clangformat_10: # Restore the original if only checking. fname.write_bytes(original) - ret.returncode = 1 + return 1 return ret def run(args: T.List[str]) -> int: parser = argparse.ArgumentParser() parser.add_argument('--check', action='store_true') + # not yet used - check which version introduced the undocumented option --color + parser.add_argument('--color', default='always') parser.add_argument('sourcedir') parser.add_argument('builddir') options = parser.parse_args(args) @@ -52,4 +53,4 @@ def run(args: T.List[str]) -> int: else: cformat_ver = None - return run_tool('clang-format', srcdir, builddir, run_clang_format, exelist, options.check, cformat_ver) + return run_clang_tool('clang-format', srcdir, builddir, run_clang_format, exelist, options.check, cformat_ver) diff --git a/mesonbuild/scripts/clangtidy.py b/mesonbuild/scripts/clangtidy.py index a922f8514062..550faeef354e 100644 --- a/mesonbuild/scripts/clangtidy.py +++ b/mesonbuild/scripts/clangtidy.py @@ -11,21 +11,22 @@ import shutil import sys -from .run_tool import run_tool +from .run_tool import run_clang_tool, run_with_buffered_output from ..environment import detect_clangtidy, detect_clangapply import typing as T -def run_clang_tidy(fname: Path, tidyexe: list, builddir: Path, fixesdir: T.Optional[Path]) -> subprocess.CompletedProcess: +async def run_clang_tidy(fname: Path, tidyexe: list, builddir: Path, fixesdir: T.Optional[Path]) -> int: args = [] if fixesdir is not None: handle, name = tempfile.mkstemp(prefix=fname.name + '.', suffix='.yaml', dir=fixesdir) os.close(handle) args.extend(['-export-fixes', name]) - return subprocess.run(tidyexe + args + ['-quiet', '-p', str(builddir), str(fname)]) + return await run_with_buffered_output(tidyexe + args + ['-quiet', '-p', str(builddir), str(fname)]) def run(args: T.List[str]) -> int: parser = argparse.ArgumentParser() parser.add_argument('--fix', action='store_true') + parser.add_argument('--color', default='always') parser.add_argument('sourcedir') parser.add_argument('builddir') options = parser.parse_args(args) @@ -38,6 +39,9 @@ def run(args: T.List[str]) -> int: print(f'Could not execute clang-tidy "{" ".join(tidyexe)}"') return 1 + if options.color == 'always' or options.color == 'auto' and sys.stdout.isatty(): + tidyexe += ['--use-color'] + fixesdir: T.Optional[Path] = None if options.fix: applyexe = detect_clangapply() @@ -52,7 +56,7 @@ def run(args: T.List[str]) -> int: fixesdir.unlink() fixesdir.mkdir(parents=True) - tidyret = run_tool('clang-tidy', srcdir, builddir, run_clang_tidy, tidyexe, builddir, fixesdir) + tidyret = run_clang_tool('clang-tidy', srcdir, builddir, run_clang_tidy, tidyexe, builddir, fixesdir) if fixesdir is not None: print('Applying fix-its...') applyret = subprocess.run(applyexe + ['-format', '-style=file', '-ignore-insert-conflict', fixesdir]).returncode diff --git a/mesonbuild/scripts/clippy.py b/mesonbuild/scripts/clippy.py new file mode 100644 index 000000000000..726680f9aaac --- /dev/null +++ b/mesonbuild/scripts/clippy.py @@ -0,0 +1,66 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2024 The Meson development team + +from __future__ import annotations +from collections import defaultdict +import os +import tempfile +import typing as T + +from .run_tool import run_tool_on_targets, run_with_buffered_output +from .. import build, mlog +from ..compilers.rust import RustCompiler +from ..mesonlib import MachineChoice, PerMachine + +class ClippyDriver: + def __init__(self, build: build.Build, tempdir: str): + self.tools: PerMachine[T.List[str]] = PerMachine([], []) + self.warned: T.DefaultDict[str, bool] = defaultdict(lambda: False) + self.tempdir = tempdir + for machine in MachineChoice: + compilers = build.environment.coredata.compilers[machine] + if 'rust' in compilers: + compiler = compilers['rust'] + assert isinstance(compiler, RustCompiler) + self.tools[machine] = compiler.get_rust_tool('clippy-driver', build.environment) + + def warn_missing_clippy(self, machine: str) -> None: + if self.warned[machine]: + return + mlog.warning(f'clippy-driver not found for {machine} machine') + self.warned[machine] = True + + def __call__(self, target: T.Dict[str, T.Any]) -> T.Iterable[T.Coroutine[T.Any, T.Any, int]]: + for src_block in target['target_sources']: + if src_block['language'] == 'rust': + clippy = getattr(self.tools, src_block['machine']) + if not clippy: + self.warn_missing_clippy(src_block['machine']) + continue + + cmdlist = list(clippy) + prev = None + for arg in src_block['parameters']: + if prev: + prev = None + continue + elif arg in {'--emit', '--out-dir'}: + prev = arg + else: + cmdlist.append(arg) + + cmdlist.extend(src_block['sources']) + # the default for --emit is to go all the way to linking, + # and --emit dep-info= is not enough for clippy to do + # enough analysis, so use --emit metadata. + cmdlist.append('--emit') + cmdlist.append('metadata') + cmdlist.append('--out-dir') + cmdlist.append(self.tempdir) + yield run_with_buffered_output(cmdlist) + +def run(args: T.List[str]) -> int: + os.chdir(args[0]) + build_data = build.load(os.getcwd()) + with tempfile.TemporaryDirectory() as d: + return run_tool_on_targets(ClippyDriver(build_data, d)) diff --git a/mesonbuild/scripts/externalproject.py b/mesonbuild/scripts/externalproject.py index ce49fbcbf26e..4013b0acf233 100644 --- a/mesonbuild/scripts/externalproject.py +++ b/mesonbuild/scripts/externalproject.py @@ -5,12 +5,11 @@ import os import argparse -import multiprocessing import subprocess from pathlib import Path import typing as T -from ..mesonlib import Popen_safe, split_args +from ..mesonlib import Popen_safe, split_args, determine_worker_count class ExternalProject: def __init__(self, options: argparse.Namespace): @@ -48,7 +47,7 @@ def supports_jobs_flag(self) -> bool: def build(self) -> int: make_cmd = self.make.copy() if self.supports_jobs_flag(): - make_cmd.append(f'-j{multiprocessing.cpu_count()}') + make_cmd.append(f'-j{determine_worker_count()}') rc = self._run('build', make_cmd) if rc != 0: return rc diff --git a/mesonbuild/scripts/run_tool.py b/mesonbuild/scripts/run_tool.py index a84de15b12df..c5e9de62076e 100644 --- a/mesonbuild/scripts/run_tool.py +++ b/mesonbuild/scripts/run_tool.py @@ -3,17 +3,86 @@ from __future__ import annotations -import itertools +import asyncio.subprocess import fnmatch -import concurrent.futures +import itertools +import json +import signal +import sys from pathlib import Path +from .. import mlog from ..compilers import lang_suffixes -from ..mesonlib import quiet_git +from ..mesonlib import quiet_git, join_args, determine_worker_count +from ..mtest import complete_all import typing as T -if T.TYPE_CHECKING: - import subprocess +Info = T.TypeVar("Info") + +async def run_with_buffered_output(cmdlist: T.List[str]) -> int: + """Run the command in cmdlist, buffering the output so that it is + not mixed for multiple child processes. Kill the child on + cancellation.""" + quoted_cmdline = join_args(cmdlist) + p: T.Optional[asyncio.subprocess.Process] = None + try: + p = await asyncio.create_subprocess_exec(*cmdlist, + stdin=asyncio.subprocess.DEVNULL, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.STDOUT) + stdo, _ = await p.communicate() + except FileNotFoundError as e: + print(mlog.blue('>>>'), quoted_cmdline, file=sys.stderr) + print(mlog.red('not found:'), e.filename, file=sys.stderr) + return 1 + except asyncio.CancelledError: + if p: + p.kill() + await p.wait() + return p.returncode or 1 + else: + return 0 + + if stdo: + print(mlog.blue('>>>'), quoted_cmdline, flush=True) + sys.stdout.buffer.write(stdo) + return p.returncode + +async def _run_workers(infos: T.Iterable[Info], + fn: T.Callable[[Info], T.Iterable[T.Coroutine[None, None, int]]]) -> int: + futures: T.List[asyncio.Future[int]] = [] + semaphore = asyncio.Semaphore(determine_worker_count()) + + async def run_one(worker_coro: T.Coroutine[None, None, int]) -> int: + try: + async with semaphore: + return await worker_coro + except asyncio.CancelledError as e: + worker_coro.throw(e) + return await worker_coro + + def sigterm_handler() -> None: + for f in futures: + f.cancel() + + if sys.platform != 'win32': + loop = asyncio.get_running_loop() + loop.add_signal_handler(signal.SIGINT, sigterm_handler) + loop.add_signal_handler(signal.SIGTERM, sigterm_handler) + + for i in infos: + futures.extend((asyncio.ensure_future(run_one(x)) for x in fn(i))) + if not futures: + return 0 + + try: + await complete_all(futures) + except BaseException: + for f in futures: + f.cancel() + raise + + return max(f.result() for f in futures if f.done() and not f.cancelled()) def parse_pattern_file(fname: Path) -> T.List[str]: patterns = [] @@ -27,7 +96,7 @@ def parse_pattern_file(fname: Path) -> T.List[str]: pass return patterns -def run_tool(name: str, srcdir: Path, builddir: Path, fn: T.Callable[..., subprocess.CompletedProcess], *args: T.Any) -> int: +def all_clike_files(name: str, srcdir: Path, builddir: Path) -> T.Iterable[Path]: patterns = parse_pattern_file(srcdir / f'.{name}-include') globs: T.Union[T.List[T.List[Path]], T.List[T.Generator[Path, None, None]]] if patterns: @@ -44,29 +113,26 @@ def run_tool(name: str, srcdir: Path, builddir: Path, fn: T.Callable[..., subpro suffixes = set(lang_suffixes['c']).union(set(lang_suffixes['cpp'])) suffixes.add('h') suffixes = {f'.{s}' for s in suffixes} - futures = [] - returncode = 0 - e = concurrent.futures.ThreadPoolExecutor() - try: - for f in itertools.chain(*globs): - strf = str(f) - if f.is_dir() or f.suffix not in suffixes or \ - any(fnmatch.fnmatch(strf, i) for i in ignore): - continue - futures.append(e.submit(fn, f, *args)) - concurrent.futures.wait( - futures, - return_when=concurrent.futures.FIRST_EXCEPTION - ) - finally: - # We try to prevent new subprocesses from being started by canceling - # the futures, but this is not water-tight: some may have started - # between the wait being interrupted or exited and the futures being - # canceled. (A fundamental fix would probably require the ability to - # terminate such subprocesses upon cancellation of the future.) - for x in futures: # Python >=3.9: e.shutdown(cancel_futures=True) - x.cancel() - e.shutdown() - if futures: - returncode = max(x.result().returncode for x in futures) - return returncode + for f in itertools.chain.from_iterable(globs): + strf = str(f) + if f.is_dir() or f.suffix not in suffixes or \ + any(fnmatch.fnmatch(strf, i) for i in ignore): + continue + yield f + +def run_clang_tool(name: str, srcdir: Path, builddir: Path, fn: T.Callable[..., T.Coroutine[None, None, int]], *args: T.Any) -> int: + if sys.platform == 'win32': + asyncio.set_event_loop_policy(asyncio.WindowsProactorEventLoopPolicy()) + + def wrapper(path: Path) -> T.Iterable[T.Coroutine[None, None, int]]: + yield fn(path, *args) + return asyncio.run(_run_workers(all_clike_files(name, srcdir, builddir), wrapper)) + +def run_tool_on_targets(fn: T.Callable[[T.Dict[str, T.Any]], + T.Iterable[T.Coroutine[T.Any, T.Any, int]]]) -> int: + if sys.platform == 'win32': + asyncio.set_event_loop_policy(asyncio.WindowsProactorEventLoopPolicy()) + + with open('meson-info/intro-targets.json', encoding='utf-8') as fp: + targets = json.load(fp) + return asyncio.run(_run_workers(targets, fn)) diff --git a/mesonbuild/utils/universal.py b/mesonbuild/utils/universal.py index 88d8e1f891c7..23355f96d681 100644 --- a/mesonbuild/utils/universal.py +++ b/mesonbuild/utils/universal.py @@ -13,6 +13,7 @@ import stat import time import abc +import multiprocessing import platform, subprocess, operator, os, shlex, shutil, re import collections from functools import lru_cache, wraps @@ -94,6 +95,7 @@ class _VerPickleLoadable(Protocol): 'default_sysconfdir', 'detect_subprojects', 'detect_vcs', + 'determine_worker_count', 'do_conf_file', 'do_conf_str', 'do_replacement', @@ -473,6 +475,10 @@ def classify_unity_sources(compilers: T.Iterable['Compiler'], sources: T.Sequenc return compsrclist +MACHINE_NAMES = ['build', 'host'] +MACHINE_PREFIXES = ['build.', ''] + + class MachineChoice(enum.IntEnum): """Enum class representing one of the two abstract machine names used in @@ -486,10 +492,10 @@ def __str__(self) -> str: return f'{self.get_lower_case_name()} machine' def get_lower_case_name(self) -> str: - return PerMachine('build', 'host')[self] + return MACHINE_NAMES[self.value] def get_prefix(self) -> str: - return PerMachine('build.', '')[self] + return MACHINE_PREFIXES[self.value] class PerMachine(T.Generic[_T]): @@ -498,10 +504,7 @@ def __init__(self, build: _T, host: _T) -> None: self.host = host def __getitem__(self, machine: MachineChoice) -> _T: - return { - MachineChoice.BUILD: self.build, - MachineChoice.HOST: self.host, - }[machine] + return [self.build, self.host][machine.value] def __setitem__(self, machine: MachineChoice, val: _T) -> None: setattr(self, machine.get_lower_case_name(), val) @@ -1085,6 +1088,29 @@ def default_sysconfdir() -> str: return 'etc' +def determine_worker_count(varnames: T.Optional[T.List[str]] = None) -> int: + num_workers = 0 + varnames = varnames or [] + varnames.append('MESON_NUM_PROCESSES') + for varname in varnames: + if varname in os.environ: + try: + num_workers = int(os.environ[varname]) + if num_workers < 0: + raise ValueError + except ValueError: + print(f'Invalid value in {varname}, using 1 thread.') + num_workers = 1 + + if num_workers == 0: + try: + # Fails in some weird environments such as Debian + # reproducible build. + num_workers = multiprocessing.cpu_count() + except Exception: + num_workers = 1 + return num_workers + def has_path_sep(name: str, sep: str = '/\\') -> bool: 'Checks if any of the specified @sep path separators are in @name' for each in sep: diff --git a/unittests/allplatformstests.py b/unittests/allplatformstests.py index b6a87af1fc7f..6544bcce19ff 100644 --- a/unittests/allplatformstests.py +++ b/unittests/allplatformstests.py @@ -3333,7 +3333,7 @@ def assertKeyTypes(key_type_list, obj, strict: bool = True): ('win_subsystem', (str, None)), ] - targets_sources_typelist = [ + targets_sources_unknown_lang_typelist = [ ('language', str), ('compiler', list), ('parameters', list), @@ -3342,6 +3342,10 @@ def assertKeyTypes(key_type_list, obj, strict: bool = True): ('unity_sources', (list, None)), ] + targets_sources_typelist = targets_sources_unknown_lang_typelist + [ + ('machine', str), + ] + target_sources_linker_typelist = [ ('linker', list), ('parameters', list), @@ -3456,7 +3460,10 @@ def assertKeyTypes(key_type_list, obj, strict: bool = True): targets_to_find.pop(i['name'], None) for j in i['target_sources']: if 'compiler' in j: - assertKeyTypes(targets_sources_typelist, j) + if j['language'] == 'unknown': + assertKeyTypes(targets_sources_unknown_lang_typelist, j) + else: + assertKeyTypes(targets_sources_typelist, j) self.assertEqual(j['sources'], [os.path.normpath(f) for f in tgt[4]]) else: assertKeyTypes(target_sources_linker_typelist, j) @@ -3558,6 +3565,7 @@ def test_introspect_targets_from_source(self): sources += j.get('sources', []) i['target_sources'] = [{ 'language': 'unknown', + 'machine': 'host', 'compiler': [], 'parameters': [], 'sources': sources, @@ -4879,6 +4887,24 @@ def output_name(name, type_): @skip_if_not_language('rust') @unittest.skipIf(not shutil.which('clippy-driver'), 'Test requires clippy-driver') def test_rust_clippy(self) -> None: + if self.backend is not Backend.ninja: + raise unittest.SkipTest('Rust is only supported with ninja currently') + # When clippy is used, we should get an exception since a variable named + # "foo" is used, but is on our denylist + testdir = os.path.join(self.rust_test_dir, '1 basic') + self.init(testdir) + self.build('clippy') + + self.wipe() + self.init(testdir, extra_args=['--werror', '-Db_colorout=never']) + with self.assertRaises(subprocess.CalledProcessError) as cm: + self.build('clippy') + self.assertTrue('error: use of a blacklisted/placeholder name `foo`' in cm.exception.stdout or + 'error: use of a disallowed/placeholder name `foo`' in cm.exception.stdout) + + @skip_if_not_language('rust') + @unittest.skipIf(not shutil.which('clippy-driver'), 'Test requires clippy-driver') + def test_rust_clippy_as_rustc(self) -> None: if self.backend is not Backend.ninja: raise unittest.SkipTest('Rust is only supported with ninja currently') # When clippy is used, we should get an exception since a variable named