From 56befd853017f9e73681d71b80b6e4b754079df4 Mon Sep 17 00:00:00 2001 From: Li Haoyi Date: Sun, 15 Dec 2024 20:42:42 +0800 Subject: [PATCH] Provide Spanning Forest JSON debug info by default (#4136) fixes https://github.com/com-lihaoyi/mill/issues/4125 --- build.mill | 2 +- docs/modules/ROOT/nav.adoc | 5 +- .../ROOT/pages/comparisons/unique.adoc | 4 +- docs/modules/ROOT/pages/index.adoc | 4 +- .../large-builds.adoc => large/large.adoc} | 27 ++------- .../ROOT/pages/large/multi-file-builds.adoc | 16 +++++ .../ROOT/pages/large/selective-execution.adoc | 44 ++++++++++++++ .../ROOT/pages/migrating/migrating.adoc | 2 +- .../10-multi-file-builds/bar/package.mill | 0 .../bar/qux/mymodule/src/BarQux.scala | 0 .../10-multi-file-builds/bar/qux/package.mill | 0 .../multi}/10-multi-file-builds/build.mill | 0 .../10-multi-file-builds/foo/package.mill | 0 .../10-multi-file-builds/foo/src/Foo.scala | 0 .../multi}/11-helper-files/build.mill | 0 .../multi}/11-helper-files/foo/package.mill | 0 .../multi}/11-helper-files/foo/src/Foo.scala | 0 .../multi}/11-helper-files/foo/versions.mill | 0 .../multi}/11-helper-files/src/Main.scala | 0 .../multi}/11-helper-files/util.mill | 0 .../multi}/12-helper-files-sc/build.sc | 0 .../multi}/12-helper-files-sc/foo/package.sc | 0 .../12-helper-files-sc/foo/src/Foo.scala | 0 .../multi}/12-helper-files-sc/foo/versions.sc | 0 .../multi}/12-helper-files-sc/src/Main.scala | 0 .../multi}/12-helper-files-sc/util.sc | 0 .../build.mill.scala | 0 .../foo/package.mill.scala | 0 .../foo/src/Foo.scala | 0 .../foo/versions.mill.scala | 0 .../13-helper-files-mill-scala/src/Main.scala | 0 .../util.mill.scala | 0 .../bar/src/bar/Bar.java | 0 .../bar/test/src/bar/BarTests.java | 0 .../9-selective-execution/build.mill | 3 +- .../foo/src/foo/Foo.java | 0 .../foo/test/src/bar/FooTests.java | 0 .../qux/src/qux/Qux.java | 0 .../qux/test/src/qux/QuxTests.java | 0 example/package.mill | 7 ++- kotlinlib/package.mill | 3 + .../kotlinlib/worker/api/KotlinWorker.scala | 2 +- .../client/src/mill/main/client/OutFiles.java | 3 + main/codesig/src/Logger.scala | 31 ++++++---- main/codesig/src/ReachabilityAnalysis.scala | 27 +++++---- main/codesig/src/ResolvedCalls.scala | 17 +----- .../test/src/{Util.scala => TestUtil.scala} | 2 +- main/eval/src/mill/eval/EvaluatorCore.scala | 48 ++++++++++++++- main/eval/src/mill/eval/GroupEvaluator.scala | 47 +++++++++------ main/eval/src/mill/eval/JsonArrayLogger.scala | 1 + main/src/mill/main/SelectiveExecution.scala | 18 +----- .../src/mill/util}/SpanningForest.scala | 58 +++++++++++++------ .../src/mill/util/SpanningForestTests.scala | 46 +++++++++++++++ .../src/mill/runner/MillBuildRootModule.scala | 22 +++---- 54 files changed, 298 insertions(+), 141 deletions(-) rename docs/modules/ROOT/pages/{depth/large-builds.adoc => large/large.adoc} (59%) create mode 100644 docs/modules/ROOT/pages/large/multi-file-builds.adoc create mode 100644 docs/modules/ROOT/pages/large/selective-execution.adoc rename example/{depth/large => large/multi}/10-multi-file-builds/bar/package.mill (100%) rename example/{depth/large => large/multi}/10-multi-file-builds/bar/qux/mymodule/src/BarQux.scala (100%) rename example/{depth/large => large/multi}/10-multi-file-builds/bar/qux/package.mill (100%) rename example/{depth/large => large/multi}/10-multi-file-builds/build.mill (100%) rename example/{depth/large => large/multi}/10-multi-file-builds/foo/package.mill (100%) rename example/{depth/large => large/multi}/10-multi-file-builds/foo/src/Foo.scala (100%) rename example/{depth/large => large/multi}/11-helper-files/build.mill (100%) rename example/{depth/large => large/multi}/11-helper-files/foo/package.mill (100%) rename example/{depth/large => large/multi}/11-helper-files/foo/src/Foo.scala (100%) rename example/{depth/large => large/multi}/11-helper-files/foo/versions.mill (100%) rename example/{depth/large => large/multi}/11-helper-files/src/Main.scala (100%) rename example/{depth/large => large/multi}/11-helper-files/util.mill (100%) rename example/{depth/large => large/multi}/12-helper-files-sc/build.sc (100%) rename example/{depth/large => large/multi}/12-helper-files-sc/foo/package.sc (100%) rename example/{depth/large => large/multi}/12-helper-files-sc/foo/src/Foo.scala (100%) rename example/{depth/large => large/multi}/12-helper-files-sc/foo/versions.sc (100%) rename example/{depth/large => large/multi}/12-helper-files-sc/src/Main.scala (100%) rename example/{depth/large => large/multi}/12-helper-files-sc/util.sc (100%) rename example/{depth/large => large/multi}/13-helper-files-mill-scala/build.mill.scala (100%) rename example/{depth/large => large/multi}/13-helper-files-mill-scala/foo/package.mill.scala (100%) rename example/{depth/large => large/multi}/13-helper-files-mill-scala/foo/src/Foo.scala (100%) rename example/{depth/large => large/multi}/13-helper-files-mill-scala/foo/versions.mill.scala (100%) rename example/{depth/large => large/multi}/13-helper-files-mill-scala/src/Main.scala (100%) rename example/{depth/large => large/multi}/13-helper-files-mill-scala/util.mill.scala (100%) rename example/{depth/large => large/selective}/9-selective-execution/bar/src/bar/Bar.java (100%) rename example/{depth/large => large/selective}/9-selective-execution/bar/test/src/bar/BarTests.java (100%) rename example/{depth/large => large/selective}/9-selective-execution/build.mill (99%) rename example/{depth/large => large/selective}/9-selective-execution/foo/src/foo/Foo.java (100%) rename example/{depth/large => large/selective}/9-selective-execution/foo/test/src/bar/FooTests.java (100%) rename example/{depth/large => large/selective}/9-selective-execution/qux/src/qux/Qux.java (100%) rename example/{depth/large => large/selective}/9-selective-execution/qux/test/src/qux/QuxTests.java (100%) rename main/codesig/test/src/{Util.scala => TestUtil.scala} (93%) rename main/{codesig/src => util/src/mill/util}/SpanningForest.scala (57%) create mode 100644 main/util/test/src/mill/util/SpanningForestTests.scala diff --git a/build.mill b/build.mill index 916ac2d5c8f..0b891de175a 100644 --- a/build.mill +++ b/build.mill @@ -256,7 +256,7 @@ object Deps { } } -def millVersion: T[String] = Task { +def millVersion: T[String] = Task.Input { if (Task.env.contains("MILL_STABLE_VERSION")) VcsVersion.calcVcsState(Task.log).format() else "SNAPSHOT" } diff --git a/docs/modules/ROOT/nav.adoc b/docs/modules/ROOT/nav.adoc index 53bd6fa510c..8d848c59e04 100644 --- a/docs/modules/ROOT/nav.adoc +++ b/docs/modules/ROOT/nav.adoc @@ -95,8 +95,11 @@ // These are things that most Mill developers would not encounter day to day, // but people developing Mill plugins or working on particularly large or // sophisticated Mill builds will need to understand. +* xref:large/large.adoc[] +** xref:large/selective-execution.adoc[] +** xref:large/multi-file-builds.adoc[] + * Mill In Depth -** xref:depth/large-builds.adoc[] ** xref:depth/sandboxing.adoc[] ** xref:depth/evaluation-model.adoc[] ** xref:depth/design-principles.adoc[] diff --git a/docs/modules/ROOT/pages/comparisons/unique.adoc b/docs/modules/ROOT/pages/comparisons/unique.adoc index be471c12b8e..b174510ba67 100644 --- a/docs/modules/ROOT/pages/comparisons/unique.adoc +++ b/docs/modules/ROOT/pages/comparisons/unique.adoc @@ -85,8 +85,8 @@ xref:android/java.adoc[Android], and has demonstrated the ability to branch out more distant toolchains like xref:extending/example-typescript-support.adoc[Typescript] and xref:extending/example-python-support.adoc[Python]. -Mill also works well with xref:depth/large-builds.adoc[large builds]: its build logic can be -split into multiple folders, is incrementally compiled, +Mill also works well with xref:large/large.adoc[large builds]: its build logic can be +xref:large/multi-file-builds.adoc[split into multiple folders], is incrementally compiled, lazily initialized, and automatically cached and parallelized. That means that even large codebases can remain fast and responsive: Mill's own build easily manages over 400 modules, and the tool can likely handle thousands of modules without issue. diff --git a/docs/modules/ROOT/pages/index.adoc b/docs/modules/ROOT/pages/index.adoc index 2bec2abd99e..c34ad0f7da1 100644 --- a/docs/modules/ROOT/pages/index.adoc +++ b/docs/modules/ROOT/pages/index.adoc @@ -13,7 +13,7 @@ or xref:comparisons/gradle.adoc[2-4x faster than Gradle] helps keep builds clean and understandable * Mill is an easier alternative to https://bazel.build/[Bazel] -for xref:depth/large-builds.adoc[large multi-language monorepos] with hundreds of modules +for xref:large/large.adoc[large multi-language monorepos] with hundreds of modules To get started using Mill, see the language-specific introductory documentation linked below: @@ -37,7 +37,7 @@ Java platform's performance and usability: xref:depth/evaluation-model.adoc#_caching_at_each_layer_of_the_evaluation_model[caches] and xref:cli/flags.adoc#_jobs_j[parallelizes] build tasks to keep local development fast, and avoids the long configuration times seen in other tools like Gradle or SBT. -xref:depth/large-builds.adoc#_selective_execution[Selective execution] keeps +xref:large/selective-execution.adoc[Selective execution] keeps CI validation times short by only running the tests necessary to validate a code change. * *Maintainability*: Mill's config and xref:javalib/intro.adoc#_custom_build_logic[custom logic] diff --git a/docs/modules/ROOT/pages/depth/large-builds.adoc b/docs/modules/ROOT/pages/large/large.adoc similarity index 59% rename from docs/modules/ROOT/pages/depth/large-builds.adoc rename to docs/modules/ROOT/pages/large/large.adoc index bdfa62d5334..9c0c428adab 100644 --- a/docs/modules/ROOT/pages/depth/large-builds.adoc +++ b/docs/modules/ROOT/pages/large/large.adoc @@ -1,7 +1,4 @@ = Large Builds and Monorepos -:page-aliases: Structuring_Large_Builds.adoc - -include::partial$gtag-config.adoc[] This section walks through Mill features and techniques used for managing large builds. While Mill works great for small single-module projects, it is also able to work @@ -10,25 +7,13 @@ https://github.com/com-lihaoyi/mill[com-lihaoyi/mill] project has ~400 modules, other proprietary projects may have many more. Mill modules are cheap. Having more modules does not significantly impact performance -or resource usage, build files are incrementally re-compiled when modified, and modules are -lazily loaded and initialized only when needed. So you are encouraged to break up your project +or resource usage, build files are incrementally re-compiled when modified, and modules are +lazily loaded and initialized only when needed. So you are encouraged to break up your project into modules to manage the layering of your codebase or benefit from parallelism. -== Selective Execution - - -include::partial$example/depth/large/9-selective-execution.adoc[] - -== Multi-file Builds - -include::partial$example/depth/large/10-multi-file-builds.adoc[] - -== Helper Files - -include::partial$example/depth/large/11-helper-files.adoc[] - -== Legacy `.sc` extension - -include::partial$example/depth/large/12-helper-files-sc.adoc[] +Apart from Mill's basic scalability and performance, Mill also comes with many features +that can be utilized to help you manage the build system of a large project or codebase: +* xref:large/selective-execution.adoc[] +* xref:large/multi-file-builds.adoc[] \ No newline at end of file diff --git a/docs/modules/ROOT/pages/large/multi-file-builds.adoc b/docs/modules/ROOT/pages/large/multi-file-builds.adoc new file mode 100644 index 00000000000..b2f363c51d3 --- /dev/null +++ b/docs/modules/ROOT/pages/large/multi-file-builds.adoc @@ -0,0 +1,16 @@ += Multi-File Builds +:page-aliases: Structuring_Large_Builds.adoc + +include::partial$gtag-config.adoc[] + +include::partial$example/large/multi/10-multi-file-builds.adoc[] + +== Helper Files + +include::partial$example/large/multi/11-helper-files.adoc[] + +== Legacy `.sc` extension + +include::partial$example/large/multi/12-helper-files-sc.adoc[] + + diff --git a/docs/modules/ROOT/pages/large/selective-execution.adoc b/docs/modules/ROOT/pages/large/selective-execution.adoc new file mode 100644 index 00000000000..75d7c41965b --- /dev/null +++ b/docs/modules/ROOT/pages/large/selective-execution.adoc @@ -0,0 +1,44 @@ += Selective Execution + +include::partial$gtag-config.adoc[] + + +include::partial$example/large/selective/9-selective-execution.adoc[] + + +== Reproducibility and Determinism + +Selective execution relies on the inputs to your project being deterministic +and reproducible, except for the code changes between the two versions, so that +Mill can compare the state of the build inputs before and after and only run +tasks downstream of those that changed. This is usually the case, but there are +some subtleties to be aware of: + +- *Dynamic `Task.Input` to capture Git metadata must be disabled*, e.g. using + https://github.com/lefou/mill-vcs-version[mill-vcs-version]. The easiest way to do + this is to guard such dynamic inputs on an environment variable, such that + in most scenarios it returns a constant `"SNAPSHOT"` string, and only when + necessary do you pass in the environment variable to compute a real version (e.g. + during publishing) + +```scala +def myProjectVersion: T[String] = Task.Input { + if (Task.env.contains("MY_PROJECT_STABLE_VERSION")) VcsVersion.calcVcsState(Task.log).format() + else "SNAPSHOT" +} +``` + +- *The filesystem layout and position of the before/after codebases must be exactly + the same*. This is not an issue when running `selective.prepare`/`selective.run` on + the same folder on one machine, but if the two calls are run on separate machines + you need to make sure the directory path is the same. + +- *You must use the same Operating System amd Filesystem*, as differences there will + cause the filesystem signatures to change and thus spuriously trigger downstream tasks. + e.g. you cannot run `selective.prepare` on a Windows machine and `selective.run` on Linux + +- *Filesystem permissions must be preserved before/after*. e.g. running `selective,run}` + on different Github Actions machines sharing artifacts can cause issues as + `upload-artifact`/`download-artifact` https://github.com/actions/download-artifact#permission-loss[does not preserve filesystem permissions]. + If this is an issue, you can run `chmod -R . 777` before each of `selective.{prepare,run}` + to ensure they have the exact same filesystem permissions. diff --git a/docs/modules/ROOT/pages/migrating/migrating.adoc b/docs/modules/ROOT/pages/migrating/migrating.adoc index 639ce84a3aa..aca914c3ee3 100644 --- a/docs/modules/ROOT/pages/migrating/migrating.adoc +++ b/docs/modules/ROOT/pages/migrating/migrating.adoc @@ -283,7 +283,7 @@ to see which ones may help: * xref:fundamentals/modules.adoc#_trait_modules[Trait Modules] to centralize common config -* xref:depth/large-builds.adoc#_multi_file_builds[Multi-File Builds] to let you co-locate +* xref:large/multi-file-builds.adoc[Multi-File Builds] to let you co-locate build logic and the code being built * xref:extending/writing-plugins.adoc[Writing and Publishing your own Mill Plugins] diff --git a/example/depth/large/10-multi-file-builds/bar/package.mill b/example/large/multi/10-multi-file-builds/bar/package.mill similarity index 100% rename from example/depth/large/10-multi-file-builds/bar/package.mill rename to example/large/multi/10-multi-file-builds/bar/package.mill diff --git a/example/depth/large/10-multi-file-builds/bar/qux/mymodule/src/BarQux.scala b/example/large/multi/10-multi-file-builds/bar/qux/mymodule/src/BarQux.scala similarity index 100% rename from example/depth/large/10-multi-file-builds/bar/qux/mymodule/src/BarQux.scala rename to example/large/multi/10-multi-file-builds/bar/qux/mymodule/src/BarQux.scala diff --git a/example/depth/large/10-multi-file-builds/bar/qux/package.mill b/example/large/multi/10-multi-file-builds/bar/qux/package.mill similarity index 100% rename from example/depth/large/10-multi-file-builds/bar/qux/package.mill rename to example/large/multi/10-multi-file-builds/bar/qux/package.mill diff --git a/example/depth/large/10-multi-file-builds/build.mill b/example/large/multi/10-multi-file-builds/build.mill similarity index 100% rename from example/depth/large/10-multi-file-builds/build.mill rename to example/large/multi/10-multi-file-builds/build.mill diff --git a/example/depth/large/10-multi-file-builds/foo/package.mill b/example/large/multi/10-multi-file-builds/foo/package.mill similarity index 100% rename from example/depth/large/10-multi-file-builds/foo/package.mill rename to example/large/multi/10-multi-file-builds/foo/package.mill diff --git a/example/depth/large/10-multi-file-builds/foo/src/Foo.scala b/example/large/multi/10-multi-file-builds/foo/src/Foo.scala similarity index 100% rename from example/depth/large/10-multi-file-builds/foo/src/Foo.scala rename to example/large/multi/10-multi-file-builds/foo/src/Foo.scala diff --git a/example/depth/large/11-helper-files/build.mill b/example/large/multi/11-helper-files/build.mill similarity index 100% rename from example/depth/large/11-helper-files/build.mill rename to example/large/multi/11-helper-files/build.mill diff --git a/example/depth/large/11-helper-files/foo/package.mill b/example/large/multi/11-helper-files/foo/package.mill similarity index 100% rename from example/depth/large/11-helper-files/foo/package.mill rename to example/large/multi/11-helper-files/foo/package.mill diff --git a/example/depth/large/11-helper-files/foo/src/Foo.scala b/example/large/multi/11-helper-files/foo/src/Foo.scala similarity index 100% rename from example/depth/large/11-helper-files/foo/src/Foo.scala rename to example/large/multi/11-helper-files/foo/src/Foo.scala diff --git a/example/depth/large/11-helper-files/foo/versions.mill b/example/large/multi/11-helper-files/foo/versions.mill similarity index 100% rename from example/depth/large/11-helper-files/foo/versions.mill rename to example/large/multi/11-helper-files/foo/versions.mill diff --git a/example/depth/large/11-helper-files/src/Main.scala b/example/large/multi/11-helper-files/src/Main.scala similarity index 100% rename from example/depth/large/11-helper-files/src/Main.scala rename to example/large/multi/11-helper-files/src/Main.scala diff --git a/example/depth/large/11-helper-files/util.mill b/example/large/multi/11-helper-files/util.mill similarity index 100% rename from example/depth/large/11-helper-files/util.mill rename to example/large/multi/11-helper-files/util.mill diff --git a/example/depth/large/12-helper-files-sc/build.sc b/example/large/multi/12-helper-files-sc/build.sc similarity index 100% rename from example/depth/large/12-helper-files-sc/build.sc rename to example/large/multi/12-helper-files-sc/build.sc diff --git a/example/depth/large/12-helper-files-sc/foo/package.sc b/example/large/multi/12-helper-files-sc/foo/package.sc similarity index 100% rename from example/depth/large/12-helper-files-sc/foo/package.sc rename to example/large/multi/12-helper-files-sc/foo/package.sc diff --git a/example/depth/large/12-helper-files-sc/foo/src/Foo.scala b/example/large/multi/12-helper-files-sc/foo/src/Foo.scala similarity index 100% rename from example/depth/large/12-helper-files-sc/foo/src/Foo.scala rename to example/large/multi/12-helper-files-sc/foo/src/Foo.scala diff --git a/example/depth/large/12-helper-files-sc/foo/versions.sc b/example/large/multi/12-helper-files-sc/foo/versions.sc similarity index 100% rename from example/depth/large/12-helper-files-sc/foo/versions.sc rename to example/large/multi/12-helper-files-sc/foo/versions.sc diff --git a/example/depth/large/12-helper-files-sc/src/Main.scala b/example/large/multi/12-helper-files-sc/src/Main.scala similarity index 100% rename from example/depth/large/12-helper-files-sc/src/Main.scala rename to example/large/multi/12-helper-files-sc/src/Main.scala diff --git a/example/depth/large/12-helper-files-sc/util.sc b/example/large/multi/12-helper-files-sc/util.sc similarity index 100% rename from example/depth/large/12-helper-files-sc/util.sc rename to example/large/multi/12-helper-files-sc/util.sc diff --git a/example/depth/large/13-helper-files-mill-scala/build.mill.scala b/example/large/multi/13-helper-files-mill-scala/build.mill.scala similarity index 100% rename from example/depth/large/13-helper-files-mill-scala/build.mill.scala rename to example/large/multi/13-helper-files-mill-scala/build.mill.scala diff --git a/example/depth/large/13-helper-files-mill-scala/foo/package.mill.scala b/example/large/multi/13-helper-files-mill-scala/foo/package.mill.scala similarity index 100% rename from example/depth/large/13-helper-files-mill-scala/foo/package.mill.scala rename to example/large/multi/13-helper-files-mill-scala/foo/package.mill.scala diff --git a/example/depth/large/13-helper-files-mill-scala/foo/src/Foo.scala b/example/large/multi/13-helper-files-mill-scala/foo/src/Foo.scala similarity index 100% rename from example/depth/large/13-helper-files-mill-scala/foo/src/Foo.scala rename to example/large/multi/13-helper-files-mill-scala/foo/src/Foo.scala diff --git a/example/depth/large/13-helper-files-mill-scala/foo/versions.mill.scala b/example/large/multi/13-helper-files-mill-scala/foo/versions.mill.scala similarity index 100% rename from example/depth/large/13-helper-files-mill-scala/foo/versions.mill.scala rename to example/large/multi/13-helper-files-mill-scala/foo/versions.mill.scala diff --git a/example/depth/large/13-helper-files-mill-scala/src/Main.scala b/example/large/multi/13-helper-files-mill-scala/src/Main.scala similarity index 100% rename from example/depth/large/13-helper-files-mill-scala/src/Main.scala rename to example/large/multi/13-helper-files-mill-scala/src/Main.scala diff --git a/example/depth/large/13-helper-files-mill-scala/util.mill.scala b/example/large/multi/13-helper-files-mill-scala/util.mill.scala similarity index 100% rename from example/depth/large/13-helper-files-mill-scala/util.mill.scala rename to example/large/multi/13-helper-files-mill-scala/util.mill.scala diff --git a/example/depth/large/9-selective-execution/bar/src/bar/Bar.java b/example/large/selective/9-selective-execution/bar/src/bar/Bar.java similarity index 100% rename from example/depth/large/9-selective-execution/bar/src/bar/Bar.java rename to example/large/selective/9-selective-execution/bar/src/bar/Bar.java diff --git a/example/depth/large/9-selective-execution/bar/test/src/bar/BarTests.java b/example/large/selective/9-selective-execution/bar/test/src/bar/BarTests.java similarity index 100% rename from example/depth/large/9-selective-execution/bar/test/src/bar/BarTests.java rename to example/large/selective/9-selective-execution/bar/test/src/bar/BarTests.java diff --git a/example/depth/large/9-selective-execution/build.mill b/example/large/selective/9-selective-execution/build.mill similarity index 99% rename from example/depth/large/9-selective-execution/build.mill rename to example/large/selective/9-selective-execution/build.mill index ea5f136b14d..095ee7675a3 100644 --- a/example/depth/large/9-selective-execution/build.mill +++ b/example/large/selective/9-selective-execution/build.mill @@ -22,7 +22,7 @@ // ```bash // > git checkout main # start from the target branch of the PR // -// > ./mill selective.prepare __.test +// > ./mill selective.prepare // // > git checkout pull-request-branch # go to the pull request branch // @@ -105,3 +105,4 @@ Test run bar.BarTests finished: 0 failed, 0 ignored, 1 total, ... // tasks non-selectively, which is convenient if you want to conditionally disable selective // execution (e.g. perhaps you want to perform selective execution on pre-merge on pull // requests but not post-merge on the main branch) +// diff --git a/example/depth/large/9-selective-execution/foo/src/foo/Foo.java b/example/large/selective/9-selective-execution/foo/src/foo/Foo.java similarity index 100% rename from example/depth/large/9-selective-execution/foo/src/foo/Foo.java rename to example/large/selective/9-selective-execution/foo/src/foo/Foo.java diff --git a/example/depth/large/9-selective-execution/foo/test/src/bar/FooTests.java b/example/large/selective/9-selective-execution/foo/test/src/bar/FooTests.java similarity index 100% rename from example/depth/large/9-selective-execution/foo/test/src/bar/FooTests.java rename to example/large/selective/9-selective-execution/foo/test/src/bar/FooTests.java diff --git a/example/depth/large/9-selective-execution/qux/src/qux/Qux.java b/example/large/selective/9-selective-execution/qux/src/qux/Qux.java similarity index 100% rename from example/depth/large/9-selective-execution/qux/src/qux/Qux.java rename to example/large/selective/9-selective-execution/qux/src/qux/Qux.java diff --git a/example/depth/large/9-selective-execution/qux/test/src/qux/QuxTests.java b/example/large/selective/9-selective-execution/qux/test/src/qux/QuxTests.java similarity index 100% rename from example/depth/large/9-selective-execution/qux/test/src/qux/QuxTests.java rename to example/large/selective/9-selective-execution/qux/test/src/qux/QuxTests.java diff --git a/example/package.mill b/example/package.mill index bbbd900a963..79d9e84f908 100644 --- a/example/package.mill +++ b/example/package.mill @@ -89,11 +89,14 @@ object `package` extends RootModule with Module { object depth extends Module { - object large extends Cross[ExampleCrossModule](build.listIn(millSourcePath / "large")) - object sandbox extends Cross[ExampleCrossModule](build.listIn(millSourcePath / "sandbox")) object javahome extends Cross[ExampleCrossModule](build.listIn(millSourcePath / "javahome")) } + object large extends Module { + + object selective extends Cross[ExampleCrossModule](build.listIn(millSourcePath / "selective")) + object multi extends Cross[ExampleCrossModule](build.listIn(millSourcePath / "multi")) + } object extending extends Module { object imports extends Cross[ExampleCrossModule](build.listIn(millSourcePath / "imports")) diff --git a/kotlinlib/package.mill b/kotlinlib/package.mill index f0fce59c5e8..2913cb65eb2 100644 --- a/kotlinlib/package.mill +++ b/kotlinlib/package.mill @@ -16,6 +16,9 @@ object `package` extends RootModule with build.MillPublishScalaModule with Build def buildInfoObjectName = "Versions" def buildInfoMembers = Seq( BuildInfo.Value("kotlinVersion", build.Deps.kotlinVersion, "Version of Kotlin"), + BuildInfo.Value("kotlinVersion2", build.Deps.kotlinVersion, "Version of Kotlin"), + BuildInfo.Value("kotlinVersion3", build.Deps.kotlinVersion, "Version of Kotlin"), + BuildInfo.Value("kotlinVersion4", build.Deps.kotlinVersion, "Version of Kotlin"), BuildInfo.Value("koverVersion", build.Deps.RuntimeDeps.koverVersion, "Version of Kover."), BuildInfo.Value("ktfmtVersion", build.Deps.RuntimeDeps.ktfmt.version, "Version of Ktfmt."), BuildInfo.Value("ktlintVersion", build.Deps.RuntimeDeps.ktlint.version, "Version of ktlint."), diff --git a/kotlinlib/worker/src/mill/kotlinlib/worker/api/KotlinWorker.scala b/kotlinlib/worker/src/mill/kotlinlib/worker/api/KotlinWorker.scala index 112f8b19fc0..97f1f9ad4c3 100644 --- a/kotlinlib/worker/src/mill/kotlinlib/worker/api/KotlinWorker.scala +++ b/kotlinlib/worker/src/mill/kotlinlib/worker/api/KotlinWorker.scala @@ -10,7 +10,7 @@ import mill.api.{Ctx, Result} trait KotlinWorker { def compile(target: KotlinWorkerTarget, args: Seq[String])(implicit ctx: Ctx): Result[Unit] - + val x = 1 } sealed class KotlinWorkerTarget diff --git a/main/client/src/mill/main/client/OutFiles.java b/main/client/src/mill/main/client/OutFiles.java index 51be8714b36..ddc226c12e4 100644 --- a/main/client/src/mill/main/client/OutFiles.java +++ b/main/client/src/mill/main/client/OutFiles.java @@ -73,4 +73,7 @@ public class OutFiles { * root tasks changed so Mill can decide which tasks to execute. */ public static final String millSelectiveExecution = "mill-selective-execution.json"; + + public static final String millDependencyForest = "mill-dependency-forest.json"; + public static final String millInvalidationForest = "mill-invalidation-forest.json"; } diff --git a/main/codesig/src/Logger.scala b/main/codesig/src/Logger.scala index c0a8e2a1038..b30de439433 100644 --- a/main/codesig/src/Logger.scala +++ b/main/codesig/src/Logger.scala @@ -1,18 +1,29 @@ package mill.codesig -class Logger(logFolder: Option[os.Path]) { +class Logger(mandatoryLogFolder: os.Path, logFolder: Option[os.Path]) { logFolder.foreach(os.remove.all(_)) + os.remove.all(mandatoryLogFolder) private var count = 1 + def log0[T: upickle.default.Writer]( + p: os.Path, + res: sourcecode.Text[T], + prefix: String = "" + ): Unit = { + os.write( + p / s"$prefix${res.source}.json", + upickle.default.stream(res.value, indent = 4), + createFolders = true + ) + count += 1 + } def log[T: upickle.default.Writer](t: => sourcecode.Text[T], prefix: String = ""): Unit = { - lazy val res = t - logFolder.foreach { p => - os.write( - p / s"$count-$prefix${res.source}.json", - upickle.default.stream(res.value, indent = 4), - createFolders = true - ) - count += 1 - } + logFolder.foreach(log0(_, t, s"$count-$prefix")) + } + def mandatoryLog[T: upickle.default.Writer]( + t: => sourcecode.Text[T], + prefix: String = "" + ): Unit = { + log0(mandatoryLogFolder, t, prefix) } } diff --git a/main/codesig/src/ReachabilityAnalysis.scala b/main/codesig/src/ReachabilityAnalysis.scala index 116848bab9d..f58c2fcf6ea 100644 --- a/main/codesig/src/ReachabilityAnalysis.scala +++ b/main/codesig/src/ReachabilityAnalysis.scala @@ -1,7 +1,7 @@ package mill.codesig -import mill.util.Tarjans +import mill.util.{SpanningForest, Tarjans} import upickle.default.{Writer, writer} -import JvmModel._ +import JvmModel.* import scala.collection.immutable.SortedMap import ujson.Obj @@ -77,6 +77,7 @@ class CallGraphAnalysis( .collect { case (CallGraphAnalysis.LocalDef(d), v) => (d.toString, v) } .to(SortedMap) + logger.mandatoryLog(transitiveCallGraphHashes0) logger.log(transitiveCallGraphHashes) lazy val spanningInvalidationForest: Obj = prevTransitiveCallGraphHashesOpt() match { @@ -90,7 +91,7 @@ class CallGraphAnalysis( case None => ujson.Obj() } - logger.log(spanningInvalidationForest) + logger.mandatoryLog(spanningInvalidationForest) } object CallGraphAnalysis { @@ -121,20 +122,22 @@ object CallGraphAnalysis { .filter { nodeIndex => val currentValue = transitiveCallGraphHashes0Map(indexToNodes(nodeIndex)) val prevValue = prevTransitiveCallGraphHashes.get(indexToNodes(nodeIndex).toString) - !prevValue.contains(currentValue) } .toSet - def spanningTreeToJsonTree(node: SpanningForest.Node): ujson.Obj = { - ujson.Obj.from( - node.values.map { case (k, v) => - indexToNodes(k).toString -> spanningTreeToJsonTree(v) - } - ) - } + val reverseGraphMap = indexGraphEdges + .zipWithIndex + .flatMap { case (vs, k) => vs.map((_, k)) } + .groupMap(_._1)(_._2) + + val reverseGraphEdges = + indexGraphEdges.indices.map(reverseGraphMap.getOrElse(_, Array())).toArray - spanningTreeToJsonTree(SpanningForest.apply(indexGraphEdges, nodesWithChangedHashes)) + SpanningForest.spanningTreeToJsonTree( + SpanningForest.apply(reverseGraphEdges, nodesWithChangedHashes, false), + k => indexToNodes(k).toString + ) } def indexGraphEdges( diff --git a/main/codesig/src/ResolvedCalls.scala b/main/codesig/src/ResolvedCalls.scala index 6bb5b5698f7..5a32697a17b 100644 --- a/main/codesig/src/ResolvedCalls.scala +++ b/main/codesig/src/ResolvedCalls.scala @@ -1,6 +1,7 @@ package mill.codesig import JvmModel._ import JType.{Cls => JCls} +import mill.util.SpanningForest.breadthFirst import upickle.default.{ReadWriter, macroRW} case class ResolvedCalls( @@ -188,20 +189,4 @@ object ResolvedCalls { ) } - def breadthFirst[T](start: IterableOnce[T])(edges: T => IterableOnce[T]): Seq[T] = { - val seen = collection.mutable.Set.empty[T] - val seenList = collection.mutable.Buffer.empty[T] - val queued = collection.mutable.Queue.from(start) - - while (queued.nonEmpty) { - val current = queued.dequeue() - seen.add(current) - seenList.append(current) - - for (next <- edges(current).iterator) { - if (!seen.contains(next)) queued.enqueue(next) - } - } - seenList.toSeq - } } diff --git a/main/codesig/test/src/Util.scala b/main/codesig/test/src/TestUtil.scala similarity index 93% rename from main/codesig/test/src/Util.scala rename to main/codesig/test/src/TestUtil.scala index 23b19eafbd5..503f1dfb3b6 100644 --- a/main/codesig/test/src/Util.scala +++ b/main/codesig/test/src/TestUtil.scala @@ -19,7 +19,7 @@ object TestUtil { .map(os.Path(_)) ), (_, _) => false, - new Logger(Some(testLogFolder)), + new Logger(testLogFolder, Some(testLogFolder)), () => None ) } diff --git a/main/eval/src/mill/eval/EvaluatorCore.scala b/main/eval/src/mill/eval/EvaluatorCore.scala index 4bdc2f48fbd..62215659950 100644 --- a/main/eval/src/mill/eval/EvaluatorCore.scala +++ b/main/eval/src/mill/eval/EvaluatorCore.scala @@ -5,12 +5,13 @@ import mill.api.Strict.Agg import mill.api._ import mill.define._ import mill.eval.Evaluator.TaskResult - +import mill.main.client.OutFiles import mill.util._ import java.util.concurrent.atomic.{AtomicBoolean, AtomicInteger} import scala.collection.mutable import scala.concurrent._ +import scala.jdk.CollectionConverters.EnumerationHasAsScala /** * Core logic of evaluating tasks, without any user-facing helper methods @@ -76,6 +77,17 @@ private[mill] trait EvaluatorCore extends GroupEvaluator { val terminals0 = sortedGroups.keys().toVector val failed = new AtomicBoolean(false) val count = new AtomicInteger(1) + val indexToTerminal = sortedGroups.keys().toArray + val terminalToIndex = indexToTerminal.zipWithIndex.toMap + val upstreamIndexEdges = + indexToTerminal.map(t => interGroupDeps.getOrElse(t, Nil).map(terminalToIndex).toArray) + os.write.over( + outPath / OutFiles.millDependencyForest, + SpanningForest.spanningTreeToJsonTree( + SpanningForest(upstreamIndexEdges, indexToTerminal.indices.toSet, true), + i => indexToTerminal(i).render + ).render(indent = 2) + ) val futures = mutable.Map.empty[Terminal, Future[Option[GroupEvaluator.Results]]] @@ -85,6 +97,9 @@ private[mill] trait EvaluatorCore extends GroupEvaluator { val (classToTransitiveClasses, allTransitiveClassMethods) = CodeSigUtils.precomputeMethodNamesPerClass(sortedGroups) + val uncached = new java.util.concurrent.ConcurrentHashMap[Terminal, Unit]() + val changedValueHash = new java.util.concurrent.ConcurrentHashMap[Terminal, Unit]() + def evaluateTerminals( terminals: Seq[Terminal], forkExecutionContext: mill.api.Ctx.Fork.Impl, @@ -115,7 +130,7 @@ private[mill] trait EvaluatorCore extends GroupEvaluator { val taskResults = group.map(t => (t, TaskResult[(Val, Int)](failure, () => failure))).toMap futures(terminal) = Future.successful( - Some(GroupEvaluator.Results(taskResults, group.toSeq, false, -1, -1)) + Some(GroupEvaluator.Results(taskResults, group.toSeq, false, -1, -1, false)) ) } else { futures(terminal) = Future.sequence(deps.map(futures)).map { upstreamValues => @@ -191,12 +206,15 @@ private[mill] trait EvaluatorCore extends GroupEvaluator { threadId = threadNumberer.getThreadId(Thread.currentThread()), cached = res.cached ) + if (!res.cached) uncached.put(terminal, ()) + if (res.valueHashChanged) changedValueHash.put(terminal, ()) profileLogger.log( ProfileLogger.Timing( terminal.render, (duration / 1000).toInt, res.cached, + res.valueHashChanged, deps.map(_.render), res.inputsHash, res.previousInputsHash @@ -254,6 +272,32 @@ private[mill] trait EvaluatorCore extends GroupEvaluator { val results: Map[Task[_], TaskResult[(Val, Int)]] = results0.toMap + val reverseInterGroupDeps = interGroupDeps + .iterator + .flatMap { case (k, vs) => vs.map(_ -> k) } + .toSeq + .groupMap(_._1)(_._2) + + val changedTerminalIndices = changedValueHash.keys().asScala.toSet + val downstreamIndexEdges = indexToTerminal + .map(t => + if (changedTerminalIndices(t)) + reverseInterGroupDeps.getOrElse(t, Nil).map(terminalToIndex).toArray + else Array.empty[Int] + ) + + os.write.over( + outPath / OutFiles.millInvalidationForest, + SpanningForest.spanningTreeToJsonTree( + SpanningForest( + downstreamIndexEdges, + uncached.keys().asScala.map(terminalToIndex).toSet, + true + ), + i => indexToTerminal(i).render + ).render(indent = 2) + ) + EvaluatorCore.Results( goals.indexed.map(results(_).map(_._1).result), // result of flatMap may contain non-distinct entries, diff --git a/main/eval/src/mill/eval/GroupEvaluator.scala b/main/eval/src/mill/eval/GroupEvaluator.scala index 7ef94d96e7b..ec8a064324a 100644 --- a/main/eval/src/mill/eval/GroupEvaluator.scala +++ b/main/eval/src/mill/eval/GroupEvaluator.scala @@ -103,17 +103,21 @@ private[mill] trait GroupEvaluator { executionContext, exclusive ) - GroupEvaluator.Results(newResults, newEvaluated.toSeq, null, inputsHash, -1) + GroupEvaluator.Results( + newResults, + newEvaluated.toSeq, + null, + inputsHash, + -1, + valueHashChanged = false + ) case labelled: Terminal.Labelled[_] => val out = if (!labelled.task.ctx.external) outPath else externalOutPath - val paths = EvaluatorPaths.resolveDestPaths( - out, - Terminal.destSegments(labelled) - ) + val paths = EvaluatorPaths.resolveDestPaths(out, Terminal.destSegments(labelled)) val cached = loadCachedJson(logger, inputsHash, labelled, paths) @@ -121,12 +125,13 @@ private[mill] trait GroupEvaluator { logger, inputsHash, labelled, - forceDiscard = - // worker metadata file removed by user, let's recompute the worker - cached.isEmpty + // worker metadata file removed by user, let's recompute the worker + forceDiscard = cached.isEmpty ) - upToDateWorker.map((_, inputsHash)) orElse cached.flatMap(_._2) match { + upToDateWorker.map((_, inputsHash)) orElse cached.flatMap { + case (inputHash, valOpt, valueHash) => valOpt.map((_, valueHash)) + } match { case Some((v, hashCode)) => val res = Result.Success((v, hashCode)) val newResults: Map[Task[_], TaskResult[(Val, Int)]] = @@ -137,7 +142,8 @@ private[mill] trait GroupEvaluator { Nil, cached = true, inputsHash, - -1 + -1, + valueHashChanged = false ) case _ => @@ -160,12 +166,15 @@ private[mill] trait GroupEvaluator { exclusive ) - newResults(labelled.task) match { + val valueHash = newResults(labelled.task) match { case TaskResult(Result.Failure(_, Some((v, _))), _) => - handleTaskResult(v, v.##, paths.meta, inputsHash, labelled) + val valueHash = if (terminal.task.asWorker.isEmpty) v.## else inputsHash + handleTaskResult(v, valueHash, paths.meta, inputsHash, labelled) case TaskResult(Result.Success((v, _)), _) => - handleTaskResult(v, v.##, paths.meta, inputsHash, labelled) + val valueHash = if (terminal.task.asWorker.isEmpty) v.## else inputsHash + handleTaskResult(v, valueHash, paths.meta, inputsHash, labelled) + valueHash case _ => // Wipe out any cached meta.json file that exists, so @@ -173,6 +182,7 @@ private[mill] trait GroupEvaluator { // assume it's associated with the possibly-borked state of the // destPath after an evaluation failure. os.remove.all(paths.meta) + 0 } GroupEvaluator.Results( @@ -180,7 +190,8 @@ private[mill] trait GroupEvaluator { newEvaluated.toSeq, cached = if (labelled.task.isInstanceOf[InputImpl[_]]) null else false, inputsHash, - cached.map(_._1).getOrElse(-1) + cached.map(_._1).getOrElse(-1), + !cached.map(_._3).contains(valueHash) ) } } @@ -383,7 +394,7 @@ private[mill] trait GroupEvaluator { inputsHash: Int, labelled: Terminal.Labelled[_], paths: EvaluatorPaths - ): Option[(Int, Option[(Val, Int)])] = { + ): Option[(Int, Option[Val], Int)] = { for { cached <- try Some(upickle.default.read[Evaluator.Cached](paths.meta.toIO)) @@ -405,7 +416,8 @@ private[mill] trait GroupEvaluator { None case NonFatal(_) => None } - } yield (Val(parsed), cached.valueHash) + } yield Val(parsed), + cached.valueHash ) } @@ -457,6 +469,7 @@ private[mill] object GroupEvaluator { newEvaluated: Seq[Task[_]], cached: java.lang.Boolean, inputsHash: Int, - previousInputsHash: Int + previousInputsHash: Int, + valueHashChanged: Boolean ) } diff --git a/main/eval/src/mill/eval/JsonArrayLogger.scala b/main/eval/src/mill/eval/JsonArrayLogger.scala index 87292aef7bd..209c82e4e9d 100644 --- a/main/eval/src/mill/eval/JsonArrayLogger.scala +++ b/main/eval/src/mill/eval/JsonArrayLogger.scala @@ -43,6 +43,7 @@ private object ProfileLogger { label: String, millis: Int, cached: java.lang.Boolean = null, + valueHashChanged: java.lang.Boolean = null, dependencies: Seq[String] = Nil, inputsHash: Int, previousInputsHash: Int = -1 diff --git a/main/src/mill/main/SelectiveExecution.scala b/main/src/mill/main/SelectiveExecution.scala index c692e29cd73..36d4783cb3a 100644 --- a/main/src/mill/main/SelectiveExecution.scala +++ b/main/src/mill/main/SelectiveExecution.scala @@ -4,6 +4,7 @@ import mill.api.Strict import mill.define.{InputImpl, NamedTask, Task} import mill.eval.{CodeSigUtils, Evaluator, Plan, Terminal} import mill.main.client.OutFiles +import mill.util.SpanningForest.breadthFirst import mill.resolve.{Resolve, SelectMode} private[mill] object SelectiveExecution { @@ -114,23 +115,6 @@ private[mill] object SelectiveExecution { breadthFirst(changedRootTasks)(downstreamEdgeMap.getOrElse(_, Nil)) } - def breadthFirst[T](start: IterableOnce[T])(edges: T => IterableOnce[T]): Seq[T] = { - val seen = collection.mutable.Set.empty[T] - val seenList = collection.mutable.Buffer.empty[T] - val queued = collection.mutable.Queue.from(start) - - while (queued.nonEmpty) { - val current = queued.dequeue() - seen.add(current) - seenList.append(current) - - for (next <- edges(current).iterator) { - if (!seen.contains(next)) queued.enqueue(next) - } - } - seenList.toSeq - } - def saveMetadata(evaluator: Evaluator, metadata: SelectiveExecution.Metadata): Unit = { os.write.over( evaluator.outPath / OutFiles.millSelectiveExecution, diff --git a/main/codesig/src/SpanningForest.scala b/main/util/src/mill/util/SpanningForest.scala similarity index 57% rename from main/codesig/src/SpanningForest.scala rename to main/util/src/mill/util/SpanningForest.scala index a642cae82db..2627d12a0c9 100644 --- a/main/codesig/src/SpanningForest.scala +++ b/main/util/src/mill/util/SpanningForest.scala @@ -1,5 +1,6 @@ -package mill.codesig -import collection.mutable +package mill.util + +import scala.collection.mutable /** * Algorithm to compute the minimal spanning forest of a directed acyclic graph @@ -11,15 +12,22 @@ import collection.mutable * Returns the forest as a [[Node]] structure with the top-level node containing * the roots of the forest */ -object SpanningForest { - +private[mill] object SpanningForest { + def spanningTreeToJsonTree(node: SpanningForest.Node, stringify: Int => String): ujson.Obj = { + ujson.Obj.from( + node.values.map { case (k, v) => stringify(k) -> spanningTreeToJsonTree(v, stringify) } + ) + } case class Node(values: mutable.Map[Int, Node] = mutable.Map()) - def apply(indexGraphEdges: Array[Array[Int]], importantVertices: Set[Int]): Node = { + def apply( + indexGraphEdges: Array[Array[Int]], + importantVertices: Set[Int], + limitToImportantVertices: Boolean + ): Node = { // Find all importantVertices which are "roots" with no incoming edges // from other importantVertices - val rootChangedNodeIndices = importantVertices.filter(i => - !indexGraphEdges(i).exists(importantVertices.contains(_)) - ) + val destinations = importantVertices.flatMap(indexGraphEdges(_)) + val rootChangedNodeIndices = importantVertices.filter(!destinations.contains(_)) // Prepare a mutable tree structure that we will return, pre-populated with // just the first level of nodes from the `rootChangedNodeIndices`, as well @@ -30,17 +38,11 @@ object SpanningForest { // Do a breadth first search from the `rootChangedNodeIndices` across the // reverse edges of the graph to build up the spanning forest - val downstreamGraphEdges = indexGraphEdges - .zipWithIndex - .flatMap { case (vs, k) => vs.map((_, k)) } - .groupMap(_._1)(_._2) - - ResolvedCalls.breadthFirst(rootChangedNodeIndices) { index => - val nextIndices = - downstreamGraphEdges.getOrElse( - index, - Array[Int]() - ) // needed to add explicit type for Scala 3.5.0-RC6 + breadthFirst(rootChangedNodeIndices) { index => + // needed to add explicit type for Scala 3.5.0-RC6 + val nextIndices = indexGraphEdges(index) + .filter(e => !limitToImportantVertices || importantVertices(e)) + // We build up the spanningForest during a normal breadth first search, // using the `nodeMapping` to quickly find a vertice's tree node so we // can add children to it. We need to duplicate the `seen.contains` logic @@ -54,4 +56,22 @@ object SpanningForest { } spanningForest } + + def breadthFirst[T](start: IterableOnce[T])(edges: T => IterableOnce[T]): Seq[T] = { + val seen = collection.mutable.Set.empty[T] + val seenList = collection.mutable.Buffer.empty[T] + val queued = collection.mutable.Queue.from(start) + + while (queued.nonEmpty) { + val current = queued.dequeue() + seen.add(current) + seenList.append(current) + + for (next <- edges(current).iterator) { + if (!seen.contains(next)) queued.enqueue(next) + } + } + seenList.toSeq + } + } diff --git a/main/util/test/src/mill/util/SpanningForestTests.scala b/main/util/test/src/mill/util/SpanningForestTests.scala new file mode 100644 index 00000000000..afab8147a49 --- /dev/null +++ b/main/util/test/src/mill/util/SpanningForestTests.scala @@ -0,0 +1,46 @@ +package mill.util + +import utest.{TestSuite, Tests, test} +import collection.mutable +import SpanningForest.Node +object SpanningForestTests extends TestSuite { + + val tests = Tests { + + test("test") { + val forest = SpanningForest.apply( + Array( + Array(1), + Array(2), + Array(3), + Array(), + Array() + ), + Set(0), + limitToImportantVertices = false + ) + + val expected = Node( + mutable.Map( + 0 -> Node( + mutable.Map( + 1 -> Node( + mutable.Map( + 2 -> Node( + mutable.Map( + 3 -> Node(mutable.Map()) + ) + ) + ) + ) + ) + ) + ) + ) + + assert(forest == expected) + } + + } + +} diff --git a/runner/src/mill/runner/MillBuildRootModule.scala b/runner/src/mill/runner/MillBuildRootModule.scala index 814228f4790..e36732bca6c 100644 --- a/runner/src/mill/runner/MillBuildRootModule.scala +++ b/runner/src/mill/runner/MillBuildRootModule.scala @@ -12,7 +12,6 @@ import mill.main.client.OutFiles._ import mill.main.client.CodeGenConstants.buildFileExtensions import mill.main.{BuildInfo, RootModule} -import scala.collection.immutable.SortedMap import scala.util.Try import mill.define.Target @@ -178,26 +177,19 @@ abstract class MillBuildRootModule()(implicit (isSimpleTarget && !isForwarderCallsite) || isCommand || isMillDiscover }, - logger = new mill.codesig.Logger(Option.when(debugEnabled)(T.dest / "current")), + logger = new mill.codesig.Logger( + T.dest / "current", + Option.when(debugEnabled)(T.dest / "current") + ), prevTransitiveCallGraphHashesOpt = () => - Option.when(os.exists(T.dest / "previous/result.json"))( + Option.when(os.exists(T.dest / "previous/transitiveCallGraphHashes0.json"))( upickle.default.read[Map[String, Int]]( - os.read.stream(T.dest / "previous/result.json") + os.read.stream(T.dest / "previous/transitiveCallGraphHashes0.json") ) ) ) - val result = codesig.transitiveCallGraphHashes - if (debugEnabled) { - os.write( - T.dest / "current/result.json", - upickle.default.stream( - SortedMap.from(codesig.transitiveCallGraphHashes0.map { case (k, v) => (k.toString, v) }), - indent = 4 - ) - ) - } - result + codesig.transitiveCallGraphHashes } override def sources: T[Seq[PathRef]] = Task {