diff --git a/.bazelrc_shared b/.bazelrc_shared index b12fd528e..9ee97bf7e 100644 --- a/.bazelrc_shared +++ b/.bazelrc_shared @@ -20,7 +20,6 @@ common --modify_execution_info=CppArchive=+supports-path-mapping common --modify_execution_info=CppCompile=+supports-path-mapping common --modify_execution_info=CppModuleMap=+supports-path-mapping -common --announce_rc common --color=yes common:rules --disk_cache=.bazel_cache common:tests --disk_cache=../.bazel_cache @@ -51,3 +50,5 @@ common --incompatible_strict_action_env #common --noincompatible_enable_deprecated_label_apis test --test_output=all + +try-import %workspace%/user.bazelrc diff --git a/.gitignore b/.gitignore index 747870f5c..de7f94214 100644 --- a/.gitignore +++ b/.gitignore @@ -10,6 +10,7 @@ hash2 target/ !bazel-*.rc +/user.bazelrc # Ignore files generated by IDEs. /.classpath diff --git a/MODULE.bazel b/MODULE.bazel index 83339a7c7..8520baa42 100644 --- a/MODULE.bazel +++ b/MODULE.bazel @@ -41,6 +41,7 @@ annex.install( "com.thesamet.scalapb:scalapb-runtime_2.13:{}".format(scalapb_version), "net.sourceforge.argparse4j:argparse4j:0.8.1", "org.jacoco:org.jacoco.core:0.7.5.201505241946", + "org.playframework:play-json_3:3.0.4", "org.scala-lang:scala-compiler:{}".format(scala_2_13_version), "org.scala-lang:scala-library:{}".format(scala_2_13_version), "org.scala-lang:scala-reflect:{}".format(scala_2_13_version), diff --git a/README.md b/README.md index 201624be5..d561d54b1 100644 --- a/README.md +++ b/README.md @@ -9,10 +9,12 @@ vastly improve build times. However, to see these benefits, a project must first tiny packages and make use of fine-grained dependencies. This is not always a realistic short-term goal for large, monorepo Scala projects. -`lucidsoftware/rules_scala` allows for the optional use of Zinc incremental compilation to provide a -stepping stone for these projects as they migrate to Bazel. Although we've verified it to be correct -and determinisitc, we recommend leaving this disabled, as fine-grained and isolated targets are -more in-line with the [Bazel philosophy](https://bazel.build/basics/hermeticity). +`lucidsoftware/rules_scala` used to allow for the optional use of Zinc incremental compilation to +provide a stepping stone for these projects as they migrate to Bazel. Although we still reuse code +from Zinc and the compiler bridge, this ruleset no longer supports incremental compilation. +Mitigating nondeterminism issues required introducing an enormous amount of complexity to the +compilation worker, and we eventually discovered that supporting incremental compilation added +substantial overhead to compilation times. `lucidsoftware/rules_scala` is written with maintainability and accessibility in mind. It aims to facilitate the transition to Bazel, and to satisfy use cases throughout the Scala ecosystem. @@ -40,7 +42,6 @@ straightforward. * Errors on indirect and unused dependencies * Buildozer suggestions for dependency errors * [Optional Worker strategy](docs/scala.md#workers) -* [Optional Zinc-based stateful incremental compilation](docs/stateful.md#stateful-compilation) * [Scalafmt](docs/scalafmt.md#scalafmt) integration * Protobuf support with ScalaPB * [scala_proto_library](docs/stardoc/scala_proto.md#scala_proto_library) @@ -49,7 +50,6 @@ straightforward. * [Customizable rules](docs/newdocs/phases.md#customizing-the-core-rules) * [Multiple Scala versions in one build](docs/newdocs/scala_versions.md#specifying-the-scala-version-to-use), including Scala 3 (Dotty). * [Optimal handling of macros and ijars](docs/newdocs/macros.md#macros-and-ijars) -* [Pass flags to Zinc compiler](docs/newdocs/zinc_flags.md) * Modern implementation using Bazel's most idiomatic APIs ## Usage diff --git a/annex_install.json b/annex_install.json index c2132dc06..9895ecb2d 100644 --- a/annex_install.json +++ b/annex_install.json @@ -1,7 +1,7 @@ { "__AUTOGENERATED_FILE_DO_NOT_MODIFY_THIS_FILE_MANUALLY": "THERE_IS_NO_DATA_ONLY_ZUUL", - "__INPUT_ARTIFACTS_HASH": 416553459, - "__RESOLVED_ARTIFACTS_HASH": -333989184, + "__INPUT_ARTIFACTS_HASH": -1426185539, + "__RESOLVED_ARTIFACTS_HASH": 696573469, "conflict_resolution": { "org.scala-sbt:io_2.13:1.10.1": "org.scala-sbt:io_2.13:1.10.4" }, @@ -237,6 +237,41 @@ }, "version": "0.10.1" }, + "com.fasterxml.jackson.core:jackson-annotations": { + "shasums": { + "jar": "ea1b3a9ec60a353f4d7e78559c7140abb9bcf71b31456e9d57c8289f7cf397f1", + "sources": "e6f7f659bb4abd9be1fe2f259f95a2340744aff373eb34c134d6e2210bdc300d" + }, + "version": "2.14.3" + }, + "com.fasterxml.jackson.core:jackson-core": { + "shasums": { + "jar": "7ee2debad3c002e97b28b84d5f1b2044a38e780abb673948238f5bc656e2fe44", + "sources": "3ee18051ff11ca9e0fd203bb163f2322c09573c76b6c1b13e2bd1031fb6f962a" + }, + "version": "2.14.3" + }, + "com.fasterxml.jackson.core:jackson-databind": { + "shasums": { + "jar": "ef0694046dc86e8a601e69efef24cda88a85773aaf012f5b9b1167d6f2e7de45", + "sources": "f9f6c8dd8b543c02eefc5844d068d90ff070ab913115be5df704b99b1988264b" + }, + "version": "2.14.3" + }, + "com.fasterxml.jackson.datatype:jackson-datatype-jdk8": { + "shasums": { + "jar": "174dff10d8b8a762354401346317be3844f31f807a68eb72ce2b287ca273f4a4", + "sources": "762f23914234ea46addf3cf7c6625c50a03a045c62887b038f61e58d3dc72e15" + }, + "version": "2.14.3" + }, + "com.fasterxml.jackson.datatype:jackson-datatype-jsr310": { + "shasums": { + "jar": "0b1b6d0267f939fe1c2d3167d33176a0b9f8c9b38444787d281e846beddf9ad6", + "sources": "43ca94b09ba2f33407a8367d98600fa3027da371cb6c62cc2a274f00917e6450" + }, + "version": "2.14.3" + }, "com.github.pathikrit:better-files_2.12": { "shasums": { "jar": "79b49bc134f06a6a091a962ec10ce3f1810403bccec7d99bf9928b7eb02e85c4", @@ -671,6 +706,20 @@ }, "version": "5.0.1" }, + "org.playframework:play-functional_3": { + "shasums": { + "jar": "a0ff6d625cb7066a3e559148751e6d5822b76847008e544fceddac29a63ccf3e", + "sources": "130056318d90cadcbb1c8bd65c188963a9d20a302d71b168460d2c7deb54a972" + }, + "version": "3.0.4" + }, + "org.playframework:play-json_3": { + "shasums": { + "jar": "cccc992d104f5694798ee5391d29758ba59e374d1edc82a43cd9f3bdcc57764a", + "sources": "b8eec5e983e866af339aea1aca4d2104fcfa47e90daadfff99409f8509b804fb" + }, + "version": "3.0.4" + }, "org.reactivestreams:reactive-streams": { "shasums": { "jar": "ef867702a614b96eb6c64fb65a8f5e14bdfcabbc1ae056f78a1643f7b79ca0eb", @@ -1279,6 +1328,19 @@ "com.eed3si9n:sjson-new-core_2.13", "org.scala-lang:scala-library" ], + "com.fasterxml.jackson.core:jackson-databind": [ + "com.fasterxml.jackson.core:jackson-annotations", + "com.fasterxml.jackson.core:jackson-core" + ], + "com.fasterxml.jackson.datatype:jackson-datatype-jdk8": [ + "com.fasterxml.jackson.core:jackson-core", + "com.fasterxml.jackson.core:jackson-databind" + ], + "com.fasterxml.jackson.datatype:jackson-datatype-jsr310": [ + "com.fasterxml.jackson.core:jackson-annotations", + "com.fasterxml.jackson.core:jackson-core", + "com.fasterxml.jackson.core:jackson-databind" + ], "com.github.pathikrit:better-files_2.12": [ "org.scala-lang:scala-library" ], @@ -1448,6 +1510,18 @@ "org.jline:jline-native", "org.jline:jline-terminal" ], + "org.playframework:play-functional_3": [ + "org.scala-lang:scala3-library_3" + ], + "org.playframework:play-json_3": [ + "com.fasterxml.jackson.core:jackson-annotations", + "com.fasterxml.jackson.core:jackson-core", + "com.fasterxml.jackson.core:jackson-databind", + "com.fasterxml.jackson.datatype:jackson-datatype-jdk8", + "com.fasterxml.jackson.datatype:jackson-datatype-jsr310", + "org.playframework:play-functional_3", + "org.scala-lang:scala3-library_3" + ], "org.scala-lang.modules:scala-collection-compat_2.13": [ "org.scala-lang:scala-library" ], @@ -1940,6 +2014,61 @@ "com.eed3si9n:sjson-new-scalajson_2.13": [ "sjsonnew.support.scalajson.unsafe" ], + "com.fasterxml.jackson.core:jackson-annotations": [ + "com.fasterxml.jackson.annotation" + ], + "com.fasterxml.jackson.core:jackson-core": [ + "com.fasterxml.jackson.core", + "com.fasterxml.jackson.core.async", + "com.fasterxml.jackson.core.base", + "com.fasterxml.jackson.core.exc", + "com.fasterxml.jackson.core.filter", + "com.fasterxml.jackson.core.format", + "com.fasterxml.jackson.core.io", + "com.fasterxml.jackson.core.io.doubleparser", + "com.fasterxml.jackson.core.io.schubfach", + "com.fasterxml.jackson.core.json", + "com.fasterxml.jackson.core.json.async", + "com.fasterxml.jackson.core.sym", + "com.fasterxml.jackson.core.type", + "com.fasterxml.jackson.core.util" + ], + "com.fasterxml.jackson.core:jackson-databind": [ + "com.fasterxml.jackson.databind", + "com.fasterxml.jackson.databind.annotation", + "com.fasterxml.jackson.databind.cfg", + "com.fasterxml.jackson.databind.deser", + "com.fasterxml.jackson.databind.deser.impl", + "com.fasterxml.jackson.databind.deser.std", + "com.fasterxml.jackson.databind.exc", + "com.fasterxml.jackson.databind.ext", + "com.fasterxml.jackson.databind.introspect", + "com.fasterxml.jackson.databind.jdk14", + "com.fasterxml.jackson.databind.json", + "com.fasterxml.jackson.databind.jsonFormatVisitors", + "com.fasterxml.jackson.databind.jsonschema", + "com.fasterxml.jackson.databind.jsontype", + "com.fasterxml.jackson.databind.jsontype.impl", + "com.fasterxml.jackson.databind.module", + "com.fasterxml.jackson.databind.node", + "com.fasterxml.jackson.databind.ser", + "com.fasterxml.jackson.databind.ser.impl", + "com.fasterxml.jackson.databind.ser.std", + "com.fasterxml.jackson.databind.type", + "com.fasterxml.jackson.databind.util", + "com.fasterxml.jackson.databind.util.internal" + ], + "com.fasterxml.jackson.datatype:jackson-datatype-jdk8": [ + "com.fasterxml.jackson.datatype.jdk8" + ], + "com.fasterxml.jackson.datatype:jackson-datatype-jsr310": [ + "com.fasterxml.jackson.datatype.jsr310", + "com.fasterxml.jackson.datatype.jsr310.deser", + "com.fasterxml.jackson.datatype.jsr310.deser.key", + "com.fasterxml.jackson.datatype.jsr310.ser", + "com.fasterxml.jackson.datatype.jsr310.ser.key", + "com.fasterxml.jackson.datatype.jsr310.util" + ], "com.github.pathikrit:better-files_2.12": [ "better.files" ], @@ -2522,6 +2651,15 @@ "org.objectweb.asm.util", "org.objectweb.asm.xml" ], + "org.playframework:play-functional_3": [ + "play.api.libs.functional", + "play.api.libs.functional.syntax" + ], + "org.playframework:play-json_3": [ + "play.api.libs.json", + "play.api.libs.json.jackson", + "play.api.libs.json.util" + ], "org.reactivestreams:reactive-streams": [ "org.reactivestreams" ], @@ -3149,6 +3287,16 @@ "com.eed3si9n:sjson-new-scalajson_2.12:jar:sources", "com.eed3si9n:sjson-new-scalajson_2.13", "com.eed3si9n:sjson-new-scalajson_2.13:jar:sources", + "com.fasterxml.jackson.core:jackson-annotations", + "com.fasterxml.jackson.core:jackson-annotations:jar:sources", + "com.fasterxml.jackson.core:jackson-core", + "com.fasterxml.jackson.core:jackson-core:jar:sources", + "com.fasterxml.jackson.core:jackson-databind", + "com.fasterxml.jackson.core:jackson-databind:jar:sources", + "com.fasterxml.jackson.datatype:jackson-datatype-jdk8", + "com.fasterxml.jackson.datatype:jackson-datatype-jdk8:jar:sources", + "com.fasterxml.jackson.datatype:jackson-datatype-jsr310", + "com.fasterxml.jackson.datatype:jackson-datatype-jsr310:jar:sources", "com.github.pathikrit:better-files_2.12", "com.github.pathikrit:better-files_2.12:jar:sources", "com.google.code.findbugs:jsr305", @@ -3272,6 +3420,10 @@ "org.jline:jline:jar:sources", "org.ow2.asm:asm-debug-all", "org.ow2.asm:asm-debug-all:jar:sources", + "org.playframework:play-functional_3", + "org.playframework:play-functional_3:jar:sources", + "org.playframework:play-json_3", + "org.playframework:play-json_3:jar:sources", "org.reactivestreams:reactive-streams", "org.reactivestreams:reactive-streams:jar:sources", "org.scala-lang.modules:scala-asm", @@ -3462,6 +3614,16 @@ "com.eed3si9n:sjson-new-scalajson_2.12:jar:sources", "com.eed3si9n:sjson-new-scalajson_2.13", "com.eed3si9n:sjson-new-scalajson_2.13:jar:sources", + "com.fasterxml.jackson.core:jackson-annotations", + "com.fasterxml.jackson.core:jackson-annotations:jar:sources", + "com.fasterxml.jackson.core:jackson-core", + "com.fasterxml.jackson.core:jackson-core:jar:sources", + "com.fasterxml.jackson.core:jackson-databind", + "com.fasterxml.jackson.core:jackson-databind:jar:sources", + "com.fasterxml.jackson.datatype:jackson-datatype-jdk8", + "com.fasterxml.jackson.datatype:jackson-datatype-jdk8:jar:sources", + "com.fasterxml.jackson.datatype:jackson-datatype-jsr310", + "com.fasterxml.jackson.datatype:jackson-datatype-jsr310:jar:sources", "com.github.pathikrit:better-files_2.12", "com.github.pathikrit:better-files_2.12:jar:sources", "com.google.code.findbugs:jsr305", @@ -3585,6 +3747,10 @@ "org.jline:jline:jar:sources", "org.ow2.asm:asm-debug-all", "org.ow2.asm:asm-debug-all:jar:sources", + "org.playframework:play-functional_3", + "org.playframework:play-functional_3:jar:sources", + "org.playframework:play-json_3", + "org.playframework:play-json_3:jar:sources", "org.reactivestreams:reactive-streams", "org.reactivestreams:reactive-streams:jar:sources", "org.scala-lang.modules:scala-asm", @@ -3775,6 +3941,16 @@ "com.eed3si9n:sjson-new-scalajson_2.12:jar:sources", "com.eed3si9n:sjson-new-scalajson_2.13", "com.eed3si9n:sjson-new-scalajson_2.13:jar:sources", + "com.fasterxml.jackson.core:jackson-annotations", + "com.fasterxml.jackson.core:jackson-annotations:jar:sources", + "com.fasterxml.jackson.core:jackson-core", + "com.fasterxml.jackson.core:jackson-core:jar:sources", + "com.fasterxml.jackson.core:jackson-databind", + "com.fasterxml.jackson.core:jackson-databind:jar:sources", + "com.fasterxml.jackson.datatype:jackson-datatype-jdk8", + "com.fasterxml.jackson.datatype:jackson-datatype-jdk8:jar:sources", + "com.fasterxml.jackson.datatype:jackson-datatype-jsr310", + "com.fasterxml.jackson.datatype:jackson-datatype-jsr310:jar:sources", "com.github.pathikrit:better-files_2.12", "com.github.pathikrit:better-files_2.12:jar:sources", "com.google.code.findbugs:jsr305", @@ -3898,6 +4074,10 @@ "org.jline:jline:jar:sources", "org.ow2.asm:asm-debug-all", "org.ow2.asm:asm-debug-all:jar:sources", + "org.playframework:play-functional_3", + "org.playframework:play-functional_3:jar:sources", + "org.playframework:play-json_3", + "org.playframework:play-json_3:jar:sources", "org.reactivestreams:reactive-streams", "org.reactivestreams:reactive-streams:jar:sources", "org.scala-lang.modules:scala-asm", @@ -4033,6 +4213,46 @@ "ch.qos.logback.classic.servlet.LogbackServletContainerInitializer" ] }, + "com.fasterxml.jackson.core:jackson-core": { + "com.fasterxml.jackson.core.JsonFactory": [ + "com.fasterxml.jackson.core.JsonFactory" + ] + }, + "com.fasterxml.jackson.core:jackson-core:jar:sources": { + "com.fasterxml.jackson.core.JsonFactory": [ + "com.fasterxml.jackson.core.JsonFactory" + ] + }, + "com.fasterxml.jackson.core:jackson-databind": { + "com.fasterxml.jackson.core.ObjectCodec": [ + "com.fasterxml.jackson.databind.ObjectMapper" + ] + }, + "com.fasterxml.jackson.core:jackson-databind:jar:sources": { + "com.fasterxml.jackson.core.ObjectCodec": [ + "com.fasterxml.jackson.databind.ObjectMapper" + ] + }, + "com.fasterxml.jackson.datatype:jackson-datatype-jdk8": { + "com.fasterxml.jackson.databind.Module": [ + "com.fasterxml.jackson.datatype.jdk8.Jdk8Module" + ] + }, + "com.fasterxml.jackson.datatype:jackson-datatype-jdk8:jar:sources": { + "com.fasterxml.jackson.databind.Module": [ + "com.fasterxml.jackson.datatype.jdk8.Jdk8Module" + ] + }, + "com.fasterxml.jackson.datatype:jackson-datatype-jsr310": { + "com.fasterxml.jackson.databind.Module": [ + "com.fasterxml.jackson.datatype.jsr310.JavaTimeModule" + ] + }, + "com.fasterxml.jackson.datatype:jackson-datatype-jsr310:jar:sources": { + "com.fasterxml.jackson.databind.Module": [ + "com.fasterxml.jackson.datatype.jsr310.JavaTimeModule" + ] + }, "org.apache.logging.log4j:log4j-api": { "org.apache.logging.log4j.util.PropertySource": [ "org.apache.logging.log4j.util.EnvironmentPropertySource", diff --git a/docs/newdocs/zinc_flags.md b/docs/newdocs/zinc_flags.md deleted file mode 100644 index 4f531f461..000000000 --- a/docs/newdocs/zinc_flags.md +++ /dev/null @@ -1,18 +0,0 @@ -## Pass flags to Zinc compiler - -### Persistence Directory Path -Use `--persistence_dir` to specify the path to save Zinc state files. - -### Use Persistence Directory -Use `--use_persistence` to enable or disable the persistence directory. The default is `true`. - -### Maximum Error Number -Use `--max_errors` to specify the number of errors to be shown from Zinc. The default is `10`. - -## Example -In `.bazelrc` file, add the following snippet -```sh -build --worker_extra_flag=ScalaCompile=--persistence_dir=bazel-zinc -build --worker_extra_flag=ScalaCompile=--use_persistence=true -build --worker_extra_flag=ScalaCompile=--max_errors=20 -``` diff --git a/docs/stardoc/scala.md b/docs/stardoc/scala.md index 6952f5f98..e474cc0a5 100644 --- a/docs/stardoc/scala.md +++ b/docs/stardoc/scala.md @@ -221,7 +221,7 @@ Generates Scaladoc.
load("@rules_scala_annex//rules:scala.bzl", "make_scala_binary")
-make_scala_binary(extras)
+make_scala_binary(*extras)
@@ -241,7 +241,7 @@ make_scala_binary(extras)
load("@rules_scala_annex//rules:scala.bzl", "make_scala_library")
-make_scala_library(extras)
+make_scala_library(*extras)
@@ -261,7 +261,7 @@ make_scala_library(extras)
load("@rules_scala_annex//rules:scala.bzl", "make_scala_test")
-make_scala_test(extras)
+make_scala_test(*extras)
diff --git a/docs/stardoc/scala_proto.md b/docs/stardoc/scala_proto.md
index 0112a0053..428def4df 100644
--- a/docs/stardoc/scala_proto.md
+++ b/docs/stardoc/scala_proto.md
@@ -33,7 +33,7 @@ See example use in [/tests/proto/BUILD](/tests/proto/BUILD)
load("@rules_scala_annex//rules:scala_proto.bzl", "scala_proto_toolchain")
-scala_proto_toolchain(name, compiler, compiler_supports_workers)
+scala_proto_toolchain(name, compiler, compiler_supports_workers, protoc)
Specifies a toolchain of the `@rules_scala_annex//rules/scala_proto:compiler_toolchain_type` toolchain type.
@@ -73,7 +73,8 @@ toolchain(
| Name | Description | Type | Mandatory | Default |
| :------------- | :------------- | :------------- | :------------- | :------------- |
| name | A unique name for this target. | Name | required | |
-| compiler | The compiler to use to generate Scala form proto sources | Label | optional | `None` |
+| compiler | The compiler to use to generate Scala from proto sources | Label | required | |
| compiler_supports_workers | - | Boolean | optional | `False` |
+| protoc | The protoc binary to use | Label | optional | `"@protobuf//:protoc"` |
diff --git a/docs/stateful.md b/docs/stateful.md
deleted file mode 100644
index f5a023563..000000000
--- a/docs/stateful.md
+++ /dev/null
@@ -1,20 +0,0 @@
-# Stateful compilation
-
-Beyond the normal per-target incremental compilation, [Zinc](https://github.com/sbt/zinc) can achieve even finer-grained
-compilation by reusing dependency information collected on previous runs.
-
-Stateful compilers like Zinc [operate outside](https://groups.google.com/forum/#!topic/bazel-discuss/3iUy5jxS3S0) the
-Bazel paradigm, and Bazel cannot enforce correctness. Technically, this caveat applies to all worker strategies:
-performance is improving by maintaining state, but improper state may be shared across actions. In Zinc's case, the risk
-is higher, because the sharing is (intentionally) aggressive.
-
-To enable Zinc's stateful compilation, add
-
-```
---worker_extra_flag=ScalaCompile=--persistence_dir=.bazel-zinc
-```
-
-Additionally, intermediate inputs to compilation can be cached, for a significant performance benefit in some cases, by
-```
---worker_extra_flag=ScalaCompile=--extracted_file_cache=.bazel-zinc-outputs
-```
diff --git a/rules/private/phases/phase_test_launcher.bzl b/rules/private/phases/phase_test_launcher.bzl
index 54b07210c..c6f287741 100644
--- a/rules/private/phases/phase_test_launcher.bzl
+++ b/rules/private/phases/phase_test_launcher.bzl
@@ -22,7 +22,7 @@ def phase_test_launcher(ctx, g):
# "When attaching a transition to an outgoing edge (regardless of whether the transition is a
# 1:1 or 1:2+ transition), `ctx.attr` is forced to be a list if it isn't already. The order of
# elements in this list is unspecified."
- files = ctx.attr._target_jdk[0][java_common.JavaRuntimeInfo].files.to_list() + [g.compile.zinc_info.analysis_store]
+ files = ctx.attr._target_jdk[0][java_common.JavaRuntimeInfo].files.to_list() + [g.compile.tests_file]
coverage_replacements = {}
coverage_runner_jars = depset(direct = [])
@@ -41,8 +41,7 @@ def phase_test_launcher(ctx, g):
all_jars = [test_jars, runner_jars]
args = ctx.actions.args()
- args.add("--analysis_store", g.compile.zinc_info.analysis_store.short_path)
- args.add_all("--frameworks", ctx.attr.frameworks)
+ args.add("--tests_file", g.compile.tests_file.short_path)
if ctx.attr.isolation == "classloader":
shared_deps = java_common.merge(_collect(JavaInfo, ctx.attr.shared_deps))
args.add("--isolation", "classloader")
diff --git a/rules/private/phases/phase_zinc_compile.bzl b/rules/private/phases/phase_zinc_compile.bzl
index aafaf1c46..52ac50809 100644
--- a/rules/private/phases/phase_zinc_compile.bzl
+++ b/rules/private/phases/phase_zinc_compile.bzl
@@ -4,9 +4,6 @@ load(
"@rules_scala_annex//rules:providers.bzl",
_ScalaConfiguration = "ScalaConfiguration",
_ZincCompilationInfo = "ZincCompilationInfo",
- _ZincConfiguration = "ZincConfiguration",
- _ZincDepInfo = "ZincDepInfo",
- _ZincInfo = "ZincInfo",
)
#
@@ -17,8 +14,6 @@ load(
def phase_zinc_compile(ctx, g):
toolchain = ctx.toolchains["//rules/scala:toolchain_type"]
- analysis_store = ctx.actions.declare_file("{}/analysis_store.gz".format(ctx.label.name))
- analysis_store_text = ctx.actions.declare_file("{}/analysis_store.text.gz".format(ctx.label.name))
mains_file = ctx.actions.declare_file("{}.jar.mains.txt".format(ctx.label.name))
used = ctx.actions.declare_file("{}/deps_used.txt".format(ctx.label.name))
tmp = ctx.actions.declare_directory("{}/tmp".format(ctx.label.name))
@@ -34,13 +29,9 @@ def phase_zinc_compile(ctx, g):
)
]
- zincs = [dep[_ZincInfo] for dep in ctx.attr.deps if _ZincInfo in dep]
common_scalacopts = toolchain.scala_configuration.global_scalacopts + ctx.attr.scalacopts
args = ctx.actions.args()
- if toolchain.zinc_configuration.incremental:
- args.add_all(depset(transitive = [zinc.deps for zinc in zincs]), map_each = _compile_analysis)
-
args.add("--compiler_bridge", toolchain.zinc_configuration.compiler_bridge)
args.add_all("--compiler_classpath", g.classpaths.compiler)
args.add_all("--classpath", g.classpaths.compile)
@@ -48,7 +39,6 @@ def phase_zinc_compile(ctx, g):
args.add_all(javacopts, format_each = "--java_compiler_option=%s")
args.add(ctx.label, format = "--label=%s")
args.add("--main_manifest", mains_file)
- args.add("--output_analysis_store", analysis_store)
args.add("--output_jar", g.classpaths.jar)
args.add("--output_used", used)
args.add_all("--plugins", g.classpaths.plugin)
@@ -58,29 +48,32 @@ def phase_zinc_compile(ctx, g):
g.semanticdb.arguments_modifier(args)
- args.add_all("--", g.classpaths.srcs)
- args.set_param_file_format("multiline")
- args.use_param_file("@%s", use_always = True)
-
- worker = toolchain.zinc_configuration.compile_worker
-
inputs = depset(
[toolchain.zinc_configuration.compiler_bridge] + ctx.files.data + ctx.files.srcs,
transitive = [
g.classpaths.plugin,
g.classpaths.compile,
g.classpaths.compiler,
- ] + ([zinc.deps_files for zinc in zincs] if toolchain.zinc_configuration.incremental else []),
+ ],
)
- outputs = [
- g.classpaths.jar,
- mains_file,
- analysis_store,
- analysis_store_text,
- used,
- tmp,
- ] + g.semanticdb.outputs
+ outputs = [g.classpaths.jar, mains_file, used, tmp] + g.semanticdb.outputs
+
+ if hasattr(ctx.attr, "frameworks"):
+ tests_file = ctx.actions.declare_file("{}/tests.json".format(ctx.label.name))
+
+ args.add_all("--test_frameworks", ctx.attr.frameworks)
+ args.add("--tests_file", tests_file)
+
+ outputs.append(tests_file)
+ else:
+ tests_file = None
+
+ args.add_all("--", g.classpaths.srcs)
+ args.set_param_file_format("multiline")
+ args.use_param_file("@%s", use_always = True)
+
+ worker = toolchain.zinc_configuration.compile_worker
execution_requirements_tags = {
"supports-multiplex-workers": "1",
@@ -90,16 +83,6 @@ def phase_zinc_compile(ctx, g):
"supports-path-mapping": "1",
}
- # Disable several things if incremental compilation features are going to be used
- # because incremental compilation require stashing files outside the sandbox that
- # Bazel isn't aware of and is less deterministic than ideal.
- if toolchain.zinc_configuration.incremental:
- execution_requirements_tags["no-sandbox"] = "1"
- execution_requirements_tags["no-cache"] = "1"
- execution_requirements_tags["no-remote"] = "1"
- execution_requirements_tags["supports-multiplex-sandboxing"] = "0"
- execution_requirements_tags["supports-path-mapping"] = "0"
-
# todo: different execution path for nosrc jar?
ctx.actions.run(
arguments = [args],
@@ -116,31 +99,9 @@ def phase_zinc_compile(ctx, g):
for jar in g.javainfo.java_info.outputs.jars:
jars.append(jar.class_jar)
jars.append(jar.ijar)
- zinc_info = _ZincInfo(
- analysis_store = analysis_store,
- deps_files = depset([analysis_store], transitive = [zinc.deps_files for zinc in zincs]),
- label = ctx.label,
- deps = depset(
- [_ZincDepInfo(
- analysis_store = analysis_store,
- jars = tuple(jars),
- label = ctx.label,
- )],
- transitive = [zinc.deps for zinc in zincs],
- ),
- )
- g.out.providers.append(zinc_info)
return _ZincCompilationInfo(
mains_file = mains_file,
+ tests_file = tests_file,
used = used,
- # todo: see about cleaning up & generalizing fields below
- zinc_info = zinc_info,
)
-
-def _compile_analysis(analysis):
- return [
- "--analysis",
- "_{}".format(analysis.label),
- analysis.analysis_store.path,
- ] + [jar.path for jar in analysis.jars]
diff --git a/rules/providers.bzl b/rules/providers.bzl
index 96d38a494..80b8e355f 100644
--- a/rules/providers.bzl
+++ b/rules/providers.bzl
@@ -25,7 +25,6 @@ ZincConfiguration = provider(
"compiler_bridge": "compiled Zinc compiler bridge",
"compile_worker": "the worker label for compilation with Zinc",
"log_level": "log level for the Zinc compiler",
- "incremental": "whether incremental compilation will be available for this Zinc compiler",
},
)
@@ -52,16 +51,6 @@ ScalaRulePhase = provider(
},
)
-ZincInfo = provider(
- doc = "Zinc-specific outputs.",
- fields = {
- "analysis_store": "The analysis store file.",
- "deps": "The depset of library dependency outputs.",
- "deps_files": "The depset of all Zinc files.",
- "label": "The label for this output.",
- },
-)
-
# TODO: move these to another file?
# TODO: implement these with an aspect?
@@ -183,16 +172,7 @@ ZincCompilationInfo = provider(
doc = "Outputs from the Zinc compilation phase.",
fields = {
"mains_file": "File containing the main methods of this compilation.",
+ "tests_file": "File containing discovered tests for use by the test runner. Will be `None` if this isn't a test target.",
"used": "File containing the used deps for this compilation.",
- "zinc_info": "a ZincInfo provider for this compilation.",
- },
-)
-
-ZincDepInfo = provider(
- doc = "Information for a dep in a ZincInfo",
- fields = {
- "analysis_store": "Analysis store for this label",
- "jars": "Jars for this label",
- "label": "The label for this dep",
},
)
diff --git a/rules/register_toolchain.bzl b/rules/register_toolchain.bzl
index a7b87e78b..1307d2fcb 100644
--- a/rules/register_toolchain.bzl
+++ b/rules/register_toolchain.bzl
@@ -90,7 +90,6 @@ def _zinc_configuration_impl(ctx):
zinc_configuration = ZincConfiguration(
compile_worker = ctx.attr._compile_worker,
compiler_bridge = ctx.file.compiler_bridge,
- incremental = ctx.attr.incremental,
log_level = ctx.attr.log_level,
),
deps_configuration = DepsConfiguration(
@@ -135,7 +134,6 @@ off: Don't perform unused dependency checking.""",
),
"global_plugins": attr.label_list(providers = [JavaInfo]),
"global_scalacopts": attr.string_list(),
- "incremental": attr.bool(default = False),
"log_level": attr.string(
default = "warn",
values = ["error", "warn", "info", "debug", "none"],
diff --git a/rules/scala_proto.bzl b/rules/scala_proto.bzl
index c35e60830..6b13e9861 100644
--- a/rules/scala_proto.bzl
+++ b/rules/scala_proto.bzl
@@ -42,7 +42,7 @@ scala_proto_toolchain = rule(
"compiler": attr.label(
allow_files = True,
cfg = "exec",
- doc = "The compiler to use to generate Scala form proto sources",
+ doc = "The compiler to use to generate Scala from proto sources",
executable = True,
mandatory = True,
),
diff --git a/src/main/scala/higherkindness/rules_scala/common/args/ArgsUtil.scala b/src/main/scala/higherkindness/rules_scala/common/args/ArgsUtil.scala
index 917c19916..b33614dd4 100644
--- a/src/main/scala/higherkindness/rules_scala/common/args/ArgsUtil.scala
+++ b/src/main/scala/higherkindness/rules_scala/common/args/ArgsUtil.scala
@@ -1,7 +1,6 @@
-package higherkindness.rules_scala
-package common.args
+package higherkindness.rules_scala.common.args
-import common.error.AnnexWorkerError
+import higherkindness.rules_scala.common.error.AnnexWorkerError
import java.io.{PrintStream, PrintWriter}
import java.nio.file.{Path, Paths}
import net.sourceforge.argparse4j.helper.HelpScreenException
diff --git a/src/main/scala/higherkindness/rules_scala/common/args/Implicits.scala b/src/main/scala/higherkindness/rules_scala/common/args/Implicits.scala
index 310a1c0b9..80b373bf7 100644
--- a/src/main/scala/higherkindness/rules_scala/common/args/Implicits.scala
+++ b/src/main/scala/higherkindness/rules_scala/common/args/Implicits.scala
@@ -1,8 +1,7 @@
-package higherkindness.rules_scala
-package common.args
+package higherkindness.rules_scala.common.args
-import scala.reflect.Selectable.reflectiveSelectable
import net.sourceforge.argparse4j.inf.Argument
+import scala.reflect.Selectable.reflectiveSelectable
object implicits {
implicit final class SetArgumentDefault(val argument: Argument) extends AnyVal {
diff --git a/src/main/scala/higherkindness/rules_scala/common/classloaders/ClassLoaders.scala b/src/main/scala/higherkindness/rules_scala/common/classloaders/ClassLoaders.scala
index 7a84c66cd..c61111b60 100644
--- a/src/main/scala/higherkindness/rules_scala/common/classloaders/ClassLoaders.scala
+++ b/src/main/scala/higherkindness/rules_scala/common/classloaders/ClassLoaders.scala
@@ -1,5 +1,4 @@
-package higherkindness.rules_scala
-package common.classloaders
+package higherkindness.rules_scala.common.classloaders
import java.net.{URL, URLClassLoader}
diff --git a/src/main/scala/higherkindness/rules_scala/common/error/AnnexWorkRequestCancelledException.scala b/src/main/scala/higherkindness/rules_scala/common/error/AnnexWorkRequestCancelledException.scala
index 51fe047dc..20a01a5a3 100644
--- a/src/main/scala/higherkindness/rules_scala/common/error/AnnexWorkRequestCancelledException.scala
+++ b/src/main/scala/higherkindness/rules_scala/common/error/AnnexWorkRequestCancelledException.scala
@@ -1,5 +1,4 @@
-package higherkindness.rules_scala
-package common.error
+package higherkindness.rules_scala.common.error
class AnnexDuplicateActiveRequestException(
val message: String = "",
diff --git a/src/main/scala/higherkindness/rules_scala/common/error/AnnexWorkerError.scala b/src/main/scala/higherkindness/rules_scala/common/error/AnnexWorkerError.scala
index 6008f7e3e..9ebbf1907 100644
--- a/src/main/scala/higherkindness/rules_scala/common/error/AnnexWorkerError.scala
+++ b/src/main/scala/higherkindness/rules_scala/common/error/AnnexWorkerError.scala
@@ -1,5 +1,4 @@
-package higherkindness.rules_scala
-package common.error
+package higherkindness.rules_scala.common.error
import java.io.PrintStream
diff --git a/src/main/scala/higherkindness/rules_scala/common/interrupt/InterruptUtil.scala b/src/main/scala/higherkindness/rules_scala/common/interrupt/InterruptUtil.scala
index 11eb4150e..a282e6060 100644
--- a/src/main/scala/higherkindness/rules_scala/common/interrupt/InterruptUtil.scala
+++ b/src/main/scala/higherkindness/rules_scala/common/interrupt/InterruptUtil.scala
@@ -1,5 +1,4 @@
-package higherkindness.rules_scala
-package common.interrupt
+package higherkindness.rules_scala.common.interrupt
import java.util.concurrent.CancellationException
diff --git a/src/main/scala/higherkindness/rules_scala/common/sandbox/SandboxUtil.scala b/src/main/scala/higherkindness/rules_scala/common/sandbox/SandboxUtil.scala
index 77725e2ef..3e1ebe223 100644
--- a/src/main/scala/higherkindness/rules_scala/common/sandbox/SandboxUtil.scala
+++ b/src/main/scala/higherkindness/rules_scala/common/sandbox/SandboxUtil.scala
@@ -1,5 +1,4 @@
-package higherkindness.rules_scala
-package common.sandbox
+package higherkindness.rules_scala.common.sandbox
import java.io.File
import java.nio.file.Path
diff --git a/src/main/scala/higherkindness/rules_scala/common/sbt-testing/AnnexTestingLogger.scala b/src/main/scala/higherkindness/rules_scala/common/sbt-testing/AnnexTestingLogger.scala
index e77119c5c..a13942e4a 100644
--- a/src/main/scala/higherkindness/rules_scala/common/sbt-testing/AnnexTestingLogger.scala
+++ b/src/main/scala/higherkindness/rules_scala/common/sbt-testing/AnnexTestingLogger.scala
@@ -1,5 +1,4 @@
-package higherkindness.rules_scala
-package common.sbt_testing
+package higherkindness.rules_scala.common.sbt_testing
import sbt.testing.Logger
diff --git a/src/main/scala/higherkindness/rules_scala/common/sbt-testing/BUILD b/src/main/scala/higherkindness/rules_scala/common/sbt-testing/BUILD
index f7c24f689..dc7bb23cf 100644
--- a/src/main/scala/higherkindness/rules_scala/common/sbt-testing/BUILD
+++ b/src/main/scala/higherkindness/rules_scala/common/sbt-testing/BUILD
@@ -24,11 +24,14 @@ scala_library(
scala_library(
name = "common",
srcs = _common_srcs,
- scala_toolchain_name = "annex_zinc_3",
+ scala_toolchain_name = "annex_bootstrap_3",
visibility = ["//visibility:public"],
deps = [
+ "@annex//:org_playframework_play_json_3",
"@annex//:org_scala_lang_modules_scala_xml_2_13",
+ "@annex//:org_scala_sbt_compiler_interface",
"@annex//:org_scala_sbt_test_interface",
+ "@annex//:org_scala_sbt_zinc_core_2_13",
],
)
diff --git a/src/main/scala/higherkindness/rules_scala/common/sbt-testing/JUnitXmlReporter.scala b/src/main/scala/higherkindness/rules_scala/common/sbt-testing/JUnitXmlReporter.scala
index adc5bdebc..523cc9d1d 100644
--- a/src/main/scala/higherkindness/rules_scala/common/sbt-testing/JUnitXmlReporter.scala
+++ b/src/main/scala/higherkindness/rules_scala/common/sbt-testing/JUnitXmlReporter.scala
@@ -1,9 +1,8 @@
-package higherkindness.rules_scala
-package common.sbt_testing
+package higherkindness.rules_scala.common.sbt_testing
import java.io.{PrintWriter, StringWriter}
+import sbt.testing.Status.{Canceled, Error, Failure, Ignored, Pending, Skipped}
import sbt.testing.{Event, Status, TestSelector}
-import Status.{Canceled, Error, Failure, Ignored, Pending, Skipped}
import scala.collection.mutable.ListBuffer
import scala.xml.{Elem, Utility, XML}
diff --git a/src/main/scala/higherkindness/rules_scala/common/sbt-testing/PrefixedTestingLogger.scala b/src/main/scala/higherkindness/rules_scala/common/sbt-testing/PrefixedTestingLogger.scala
index 737749d4d..5e7b54085 100644
--- a/src/main/scala/higherkindness/rules_scala/common/sbt-testing/PrefixedTestingLogger.scala
+++ b/src/main/scala/higherkindness/rules_scala/common/sbt-testing/PrefixedTestingLogger.scala
@@ -1,5 +1,4 @@
-package higherkindness.rules_scala
-package common.sbt_testing
+package higherkindness.rules_scala.common.sbt_testing
import sbt.testing.Logger
diff --git a/src/main/scala/higherkindness/rules_scala/common/sbt-testing/SubprocessRunner.scala b/src/main/scala/higherkindness/rules_scala/common/sbt-testing/SubprocessRunner.scala
index fca8b8a24..7358349bc 100644
--- a/src/main/scala/higherkindness/rules_scala/common/sbt-testing/SubprocessRunner.scala
+++ b/src/main/scala/higherkindness/rules_scala/common/sbt-testing/SubprocessRunner.scala
@@ -1,7 +1,6 @@
-package higherkindness.rules_scala
-package common.sbt_testing
+package higherkindness.rules_scala.common.sbt_testing
-import common.classloaders.ClassLoaders
+import higherkindness.rules_scala.common.classloaders.ClassLoaders
import java.io.ObjectInputStream
import java.nio.file.Paths
import scala.collection.mutable
@@ -13,7 +12,7 @@ object SubprocessTestRunner {
val request = input.readObject().asInstanceOf[TestRequest]
val classLoader = ClassLoaders.sbtTestClassLoader(request.classpath.map(path => Paths.get(path).toUri.toURL))
- val loader = new TestFrameworkLoader(classLoader, request.logger)
+ val loader = new TestFrameworkLoader(classLoader)
val framework = loader.load(request.framework).get
val passed = ClassLoaders.withContextClassLoader(classLoader) {
diff --git a/src/main/scala/higherkindness/rules_scala/common/sbt-testing/Test.scala b/src/main/scala/higherkindness/rules_scala/common/sbt-testing/Test.scala
index abea24b2d..64f49c968 100644
--- a/src/main/scala/higherkindness/rules_scala/common/sbt-testing/Test.scala
+++ b/src/main/scala/higherkindness/rules_scala/common/sbt-testing/Test.scala
@@ -1,24 +1,51 @@
-package higherkindness.rules_scala
-package common.sbt_testing
+package higherkindness.rules_scala.common.sbt_testing
-import sbt.testing.{Event, Fingerprint, Framework, Logger, Runner, Status, Task, TaskDef, TestWildcardSelector}
+import java.nio.file.{Path, Paths}
+import play.api.libs.json.{Format, Json}
+import sbt.testing.{Event, Framework, Logger, Runner, Status, Task, TaskDef, TestWildcardSelector}
import scala.collection.mutable
import scala.util.control.NonFatal
-class TestDefinition(val name: String, val fingerprint: Fingerprint with Serializable) extends Serializable
+case class TestDefinition(name: String, fingerprint: TestFingerprint)
+
+object TestDefinition {
+ implicit val format: Format[TestDefinition] = Json.format[TestDefinition]
+}
+
+class TestFrameworkLoader(loader: ClassLoader) {
+ private val loadedJarsByFramework = mutable.Map.empty[String, mutable.ArrayBuffer[Path]]
+ private def getClassJar(`class`: Class[?]): Option[Path] = for {
+ codeSource <- Option(`class`.getProtectionDomain().getCodeSource())
+ codeSourceUri = codeSource.getLocation().toURI()
+ path <-
+ try {
+ Some(Paths.get(codeSourceUri))
+ } catch {
+ case _: IllegalArgumentException => None
+ }
+ } yield path
+
+ def getLoadedJarsByFramework(): Map[String, Array[Path]] =
+ loadedJarsByFramework.view.map { case (frameworkName, loadedJars) => frameworkName -> loadedJars.toArray }.toMap
-class TestFrameworkLoader(loader: ClassLoader, logger: Logger) {
def load(className: String) = {
- val framework =
+ val (framework, loadedJar) =
try {
- Some(Class.forName(className, true, loader).getDeclaredConstructor().newInstance())
+ val `class` = Class.forName(className, true, loader)
+ val loadedJar = getClassJar(`class`)
+
+ (Some(`class`.getDeclaredConstructor().newInstance()), loadedJar)
} catch {
- case _: ClassNotFoundException => None
+ case _: ClassNotFoundException => (None, None)
case NonFatal(e) => throw new Exception(s"Failed to load framework $className", e)
}
framework.map {
- case framework: Framework => framework
- case _ => throw new Exception(s"$className does not implement ${classOf[Framework].getName}")
+ case framework: Framework =>
+ loadedJar.foreach(loadedJarsByFramework.getOrElseUpdate(className, mutable.ArrayBuffer.empty) += _)
+
+ framework
+
+ case _ => throw new Exception(s"$className does not implement ${classOf[Framework].getName}")
}
}
diff --git a/src/main/scala/higherkindness/rules_scala/workers/zinc/test/TestDiscovery.scala b/src/main/scala/higherkindness/rules_scala/common/sbt-testing/TestDiscovery.scala
similarity index 74%
rename from src/main/scala/higherkindness/rules_scala/workers/zinc/test/TestDiscovery.scala
rename to src/main/scala/higherkindness/rules_scala/common/sbt-testing/TestDiscovery.scala
index d1d2486bd..da0d8216e 100644
--- a/src/main/scala/higherkindness/rules_scala/workers/zinc/test/TestDiscovery.scala
+++ b/src/main/scala/higherkindness/rules_scala/common/sbt-testing/TestDiscovery.scala
@@ -1,14 +1,9 @@
-package higherkindness.rules_scala
-package workers.zinc.test
+package higherkindness.rules_scala.common.sbt_testing
-import common.sbt_testing.TestAnnotatedFingerprint
-import common.sbt_testing.TestDefinition
-import common.sbt_testing.TestSubclassFingerprint
-
-import sbt.testing.{AnnotatedFingerprint, Fingerprint, Framework, SubclassFingerprint, SuiteSelector}
+import sbt.testing.{AnnotatedFingerprint, Framework, SubclassFingerprint}
import scala.collection.mutable
import xsbt.api.Discovery
-import xsbti.api.{AnalyzedClass, ClassLike, Definition}
+import xsbti.api.{ClassLike, Definition}
class TestDiscovery(framework: Framework) {
private val (annotatedPrints, subclassPrints) = {
@@ -22,9 +17,8 @@ class TestDiscovery(framework: Framework) {
(annotatedPrints.toSet, subclassPrints.toSet)
}
- private def definitions(classes: Set[AnalyzedClass]) = {
+ private def definitions(classes: Set[ClassLike]) = {
classes.toSeq
- .flatMap(`class` => Seq(`class`.api.classApi, `class`.api.objectApi))
.flatMap(api => Seq(api, api.structure.declared, api.structure.inherited))
.collect { case cl: ClassLike if cl.topLevel => cl }
}
@@ -34,7 +28,7 @@ class TestDiscovery(framework: Framework) {
definitions,
)
- def apply(classes: Set[AnalyzedClass]) =
+ def apply(classes: Set[ClassLike]) =
for {
(definition, discovered) <- discover(definitions(classes))
fingerprint <- subclassPrints.collect {
diff --git a/src/main/scala/higherkindness/rules_scala/common/sbt-testing/TestRequest.scala b/src/main/scala/higherkindness/rules_scala/common/sbt-testing/TestRequest.scala
index 8e38a9294..ab2a42922 100644
--- a/src/main/scala/higherkindness/rules_scala/common/sbt-testing/TestRequest.scala
+++ b/src/main/scala/higherkindness/rules_scala/common/sbt-testing/TestRequest.scala
@@ -1,5 +1,4 @@
-package higherkindness.rules_scala
-package common.sbt_testing
+package higherkindness.rules_scala.common.sbt_testing
import sbt.testing.Logger
diff --git a/src/main/scala/higherkindness/rules_scala/common/sbt-testing/TestsFile.scala b/src/main/scala/higherkindness/rules_scala/common/sbt-testing/TestsFile.scala
new file mode 100644
index 000000000..2675dcfd1
--- /dev/null
+++ b/src/main/scala/higherkindness/rules_scala/common/sbt-testing/TestsFile.scala
@@ -0,0 +1,24 @@
+package higherkindness.rules_scala.common.sbt_testing
+
+import play.api.libs.json.{Format, JsObject, Json, Reads, Writes}
+
+case class TestsFileData(testsByFramework: Map[String, Seq[TestDefinition]])
+
+object TestsFileData {
+
+ /**
+ * Unlike that produced by [[Json.format]], this [[Format]] is deterministic, which we need to fulfill the
+ * reproducibility component of [[https://bazel.build/basics/hermeticity Bazel's contract]].
+ */
+ implicit val format: Format[TestsFileData] = Format[Map[String, Seq[TestDefinition]]](
+ Reads.mapReads,
+ Writes { testsByFramework =>
+ JsObject(
+ testsByFramework.view
+ .map { case (framework, tests) => framework -> Json.toJson(tests.sortBy(_.name)) }
+ .toList
+ .sortBy { case (framework, _) => framework },
+ )
+ },
+ ).bimap(TestsFileData(_), _.testsByFramework)
+}
diff --git a/src/main/scala/higherkindness/rules_scala/common/sbt-testing/Verbosity.scala b/src/main/scala/higherkindness/rules_scala/common/sbt-testing/Verbosity.scala
index b1f23a720..8cac0e328 100644
--- a/src/main/scala/higherkindness/rules_scala/common/sbt-testing/Verbosity.scala
+++ b/src/main/scala/higherkindness/rules_scala/common/sbt-testing/Verbosity.scala
@@ -1,5 +1,4 @@
-package higherkindness.rules_scala
-package common.sbt_testing
+package higherkindness.rules_scala.common.sbt_testing
sealed abstract class Verbosity(val level: String)
object Verbosity {
diff --git a/src/main/scala/higherkindness/rules_scala/common/sbt-testing/fingerprints.scala b/src/main/scala/higherkindness/rules_scala/common/sbt-testing/fingerprints.scala
index 1fda7fe85..b53feab7e 100644
--- a/src/main/scala/higherkindness/rules_scala/common/sbt-testing/fingerprints.scala
+++ b/src/main/scala/higherkindness/rules_scala/common/sbt-testing/fingerprints.scala
@@ -1,24 +1,38 @@
-package higherkindness.rules_scala
-package common.sbt_testing
+package higherkindness.rules_scala.common.sbt_testing
-import sbt.testing.{AnnotatedFingerprint, SubclassFingerprint}
+import play.api.libs.json.{Format, Json, Reads, Writes}
+import sbt.testing.{AnnotatedFingerprint, Fingerprint, SubclassFingerprint}
-sealed trait TestFingerprint extends Serializable
+sealed trait TestFingerprint extends Fingerprint
-class TestAnnotatedFingerprint(val annotationName: String, val isModule: Boolean)
+object TestFingerprint {
+ implicit val format: Format[TestFingerprint] = Format(
+ Reads(jsValue => jsValue.validate[TestAnnotatedFingerprint].orElse(jsValue.validate[TestSubclassFingerprint])),
+ Writes {
+ case annotated: TestAnnotatedFingerprint => Json.toJson(annotated)
+ case subclass: TestSubclassFingerprint => Json.toJson(subclass)
+ },
+ )
+}
+
+case class TestAnnotatedFingerprint(annotationName: String, isModule: Boolean)
extends AnnotatedFingerprint
with TestFingerprint
object TestAnnotatedFingerprint {
+ implicit val format: Format[TestAnnotatedFingerprint] = Json.format[TestAnnotatedFingerprint]
+
def apply(fingerprint: AnnotatedFingerprint) =
new TestAnnotatedFingerprint(fingerprint.annotationName, fingerprint.isModule)
}
-class TestSubclassFingerprint(val isModule: Boolean, val requireNoArgConstructor: Boolean, val superclassName: String)
+case class TestSubclassFingerprint(isModule: Boolean, requireNoArgConstructor: Boolean, superclassName: String)
extends SubclassFingerprint
with TestFingerprint
object TestSubclassFingerprint {
+ implicit val format: Format[TestSubclassFingerprint] = Json.format[TestSubclassFingerprint]
+
def apply(fingerprint: SubclassFingerprint) =
new TestSubclassFingerprint(fingerprint.isModule, fingerprint.requireNoArgConstructor, fingerprint.superclassName)
}
diff --git a/src/main/scala/higherkindness/rules_scala/common/worker/CancellableTask.scala b/src/main/scala/higherkindness/rules_scala/common/worker/CancellableTask.scala
index d364b898d..8237f5955 100644
--- a/src/main/scala/higherkindness/rules_scala/common/worker/CancellableTask.scala
+++ b/src/main/scala/higherkindness/rules_scala/common/worker/CancellableTask.scala
@@ -1,7 +1,6 @@
-package higherkindness.rules_scala
-package common.worker
+package higherkindness.rules_scala.common.worker
-import java.util.concurrent.{Callable, FutureTask}
+import java.util.concurrent.Callable
import scala.concurrent.{ExecutionContext, ExecutionException, Future, Promise}
import scala.util.Try
diff --git a/src/main/scala/higherkindness/rules_scala/common/worker/FutureTaskWaitOnCancel.scala b/src/main/scala/higherkindness/rules_scala/common/worker/FutureTaskWaitOnCancel.scala
index b3f039b6e..2b4e36d33 100644
--- a/src/main/scala/higherkindness/rules_scala/common/worker/FutureTaskWaitOnCancel.scala
+++ b/src/main/scala/higherkindness/rules_scala/common/worker/FutureTaskWaitOnCancel.scala
@@ -1,5 +1,4 @@
-package higherkindness.rules_scala
-package common.worker
+package higherkindness.rules_scala.common.worker
import java.util.concurrent.{Callable, CancellationException, FutureTask, TimeUnit}
import java.util.concurrent.locks.ReentrantLock
diff --git a/src/main/scala/higherkindness/rules_scala/common/worker/WorkTask.scala b/src/main/scala/higherkindness/rules_scala/common/worker/WorkTask.scala
index 675b00e3f..add4a0ee4 100644
--- a/src/main/scala/higherkindness/rules_scala/common/worker/WorkTask.scala
+++ b/src/main/scala/higherkindness/rules_scala/common/worker/WorkTask.scala
@@ -1,5 +1,4 @@
-package higherkindness.rules_scala
-package common.worker
+package higherkindness.rules_scala.common.worker
import java.io.PrintStream
import java.nio.file.Path
diff --git a/src/main/scala/higherkindness/rules_scala/common/worker/WorkerMain.scala b/src/main/scala/higherkindness/rules_scala/common/worker/WorkerMain.scala
index 04c02fb94..060a9004b 100644
--- a/src/main/scala/higherkindness/rules_scala/common/worker/WorkerMain.scala
+++ b/src/main/scala/higherkindness/rules_scala/common/worker/WorkerMain.scala
@@ -1,12 +1,11 @@
-package higherkindness.rules_scala
-package common.worker
+package higherkindness.rules_scala.common.worker
-import common.error.{AnnexDuplicateActiveRequestException, AnnexWorkerError}
+import higherkindness.rules_scala.common.error.{AnnexDuplicateActiveRequestException, AnnexWorkerError}
import com.google.devtools.build.lib.worker.WorkerProtocol
import java.io.{ByteArrayInputStream, ByteArrayOutputStream, InputStream, OutputStream, PrintStream}
import java.nio.channels.ClosedByInterruptException
-import java.nio.file.{Path, Paths}
-import java.util.concurrent.{Callable, CancellationException, ConcurrentHashMap, ForkJoinPool, FutureTask}
+import java.nio.file.Path
+import java.util.concurrent.{CancellationException, ConcurrentHashMap, ForkJoinPool}
import scala.annotation.tailrec
import scala.concurrent.{ExecutionContext, ExecutionException, Future}
import scala.util.{Failure, Success, Using}
diff --git a/src/main/scala/higherkindness/rules_scala/workers/bloop/compile/BloopRunner.scala b/src/main/scala/higherkindness/rules_scala/workers/bloop/compile/BloopRunner.scala
index fef01f52a..31fe9a722 100644
--- a/src/main/scala/higherkindness/rules_scala/workers/bloop/compile/BloopRunner.scala
+++ b/src/main/scala/higherkindness/rules_scala/workers/bloop/compile/BloopRunner.scala
@@ -1,11 +1,7 @@
-package higherkindness.rules_scala
-package workers.bloop.compile
-
-import common.worker.{WorkTask, WorkerMain}
+package higherkindness.rules_scala.workers.bloop.compile
import bloop.Bloop
-import java.io.PrintStream
-import java.nio.file.Path
+import higherkindness.rules_scala.common.worker.{WorkTask, WorkerMain}
object BloopRunner extends WorkerMain[Unit] {
override def init(args: Option[Array[String]]): Unit = ()
diff --git a/src/main/scala/higherkindness/rules_scala/workers/common/AnalysisUtil.scala b/src/main/scala/higherkindness/rules_scala/workers/common/AnalysisUtil.scala
deleted file mode 100644
index e22bb7999..000000000
--- a/src/main/scala/higherkindness/rules_scala/workers/common/AnalysisUtil.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-package higherkindness.rules_scala
-package workers.common
-
-import java.io.File
-import java.nio.file.{Path, Paths}
-import sbt.internal.inc
-import sbt.internal.inc.consistent.ConsistentFileAnalysisStore
-import xsbti.compile.AnalysisStore
-import xsbti.compile.analysis.ReadWriteMappers
-
-object AnalysisUtil {
-
- def getAnalysisStore(
- analysisStoreFile: File,
- debug: Boolean,
- isIncremental: Boolean,
- workDir: Path,
- ): AnalysisStore = {
- val readWriteMappers = AnnexMapper.mappers(workDir, isIncremental)
- getAnalysisStore(analysisStoreFile, debug, readWriteMappers)
- }
-
- def getAnalysisStore(
- analysisStoreFile: File,
- debug: Boolean,
- readWriteMappers: ReadWriteMappers,
- ): AnalysisStore = {
- if (debug) {
- ConsistentFileAnalysisStore.text(
- analysisStoreFile,
- readWriteMappers,
- reproducible = true,
- )
- } else {
- ConsistentFileAnalysisStore.binary(
- analysisStoreFile,
- readWriteMappers,
- reproducible = true,
- )
- }
- }
-
- def getAnalysis(analysisStore: AnalysisStore): inc.Analysis = {
- analysisStore.get().get().getAnalysis.asInstanceOf[inc.Analysis]
- }
-}
diff --git a/src/main/scala/higherkindness/rules_scala/workers/common/AnnexLogger.scala b/src/main/scala/higherkindness/rules_scala/workers/common/AnnexLogger.scala
index 2bd584d7e..06cc51ee6 100644
--- a/src/main/scala/higherkindness/rules_scala/workers/common/AnnexLogger.scala
+++ b/src/main/scala/higherkindness/rules_scala/workers/common/AnnexLogger.scala
@@ -1,10 +1,9 @@
-package higherkindness.rules_scala
-package workers.common
+package higherkindness.rules_scala.workers.common
-import xsbti.Logger
import java.io.{PrintStream, PrintWriter, StringWriter}
-import java.nio.file.{Path, Paths}
+import java.nio.file.Path
import java.util.function.Supplier
+import xsbti.Logger
final class AnnexLogger(level: LogLevel, workDir: Path, out: PrintStream) extends Logger {
diff --git a/src/main/scala/higherkindness/rules_scala/workers/common/AnnexMapper.scala b/src/main/scala/higherkindness/rules_scala/workers/common/AnnexMapper.scala
index bac4432cb..e9a0b1e67 100644
--- a/src/main/scala/higherkindness/rules_scala/workers/common/AnnexMapper.scala
+++ b/src/main/scala/higherkindness/rules_scala/workers/common/AnnexMapper.scala
@@ -1,33 +1,16 @@
-package higherkindness.rules_scala
-package workers.common
+package higherkindness.rules_scala.workers.common
import com.google.devtools.build.buildjar.jarhelper.JarHelper
-import java.io.{File, InputStream, OutputStream, OutputStreamWriter}
-import java.nio.charset.StandardCharsets
-import java.nio.file.{Files, NoSuchFileException, Path, Paths}
-import java.nio.file.attribute.FileTime
-import java.util
-import java.util.concurrent.ConcurrentHashMap
-import java.util.LinkedHashMap
-import java.util.zip.{GZIPInputStream, GZIPOutputStream}
-import java.util.Optional
-import sbt.internal.inc.binary.converters.{ProtobufReaders, ProtobufWriters}
-import sbt.internal.inc.Schema.Type.{Projection, Structure}
-import sbt.internal.inc.{APIs, Analysis, FarmHash, Hash, LastModified, PlainVirtualFile, PlainVirtualFileConverter, Relations, Schema, SourceInfos, Stamp as StampImpl, Stamper, Stamps}
-import sbt.internal.inc.Schema.{Access, AnalyzedClass, Annotation, AnnotationArgument, ClassDefinition, ClassDependencies, ClassLike, Companions, MethodParameter, NameHash, ParameterList, Path as SchemaPath, Qualifier, Type, TypeParameter, UsedName, UsedNames, Values}
-import sbt.internal.shaded.com.google.protobuf.GeneratedMessageV3
-import sbt.io.IO
-import scala.collection.immutable.TreeMap
-import xsbti.compile.analysis.{GenericMapper, ReadMapper, ReadWriteMappers, Stamp, WriteMapper}
-import xsbti.compile.{AnalysisContents, AnalysisStore, MiniSetup}
-import scala.jdk.CollectionConverters.*
+import java.nio.file.{Path, Paths}
+import sbt.internal.inc.{FarmHash, Hash, LastModified, PlainVirtualFile, PlainVirtualFileConverter}
import xsbti.VirtualFileRef
-import java.util.Objects
+import xsbti.compile.MiniSetup
+import xsbti.compile.analysis.{ReadMapper, ReadWriteMappers, Stamp, WriteMapper}
object AnnexMapper {
val rootPlaceholder = Paths.get("_ROOT_")
- def mappers(root: Path, isIncremental: Boolean) = {
- new ReadWriteMappers(new AnxReadMapper(root, isIncremental), new AnxWriteMapper(root))
+ def mappers(root: Path) = {
+ new ReadWriteMappers(new AnxReadMapper(root), new AnxWriteMapper(root))
}
/**
@@ -44,31 +27,6 @@ object AnnexMapper {
case _ => throw new Exception(s"Unexpected Stamp type encountered when writing. ${stamp.getClass} -- $stamp")
}
}
-
- final def getReadStamp(file: VirtualFileRef, stamp: Stamp, isIncremental: Boolean): Stamp = {
- if (isIncremental) {
- getIncrementalModeReadStamp(file, stamp)
- } else {
- stamp
- }
- }
-
- /**
- * When in incremental mode we do not want to rely on the timestamp from the AnalysisStore because we're assuming it
- * was set to a constant value when written to the AnalysisStore.
- *
- * Instead, for any LastModified stamps, we read the file's time stamp from disk.
- */
- final def getIncrementalModeReadStamp(file: VirtualFileRef, stamp: Stamp): Stamp = {
- stamp match {
- case farmHash: FarmHash => farmHash
- case hash: Hash => hash
- case lastModified: LastModified => {
- Stamper.forLastModifiedP(PlainVirtualFileConverter.converter.toPath(file))
- }
- case _ => throw new Exception(s"Unexpected Stamp type encountered when reading ${stamp.getClass} -- $stamp")
- }
- }
}
final class AnxWriteMapper(root: Path) extends WriteMapper {
@@ -110,7 +68,7 @@ final class AnxWriteMapper(root: Path) extends WriteMapper {
override def mapMiniSetup(miniSetup: MiniSetup): MiniSetup = miniSetup
}
-final class AnxReadMapper(root: Path, isIncremental: Boolean) extends ReadMapper {
+final class AnxReadMapper(root: Path) extends ReadMapper {
private val rootAbs = root.toAbsolutePath().normalize()
private def mapFile(virtualFileRef: VirtualFileRef): Path = {
@@ -136,15 +94,9 @@ final class AnxReadMapper(root: Path, isIncremental: Boolean) extends ReadMapper
override def mapOutputDir(outputDir: Path): Path = mapFile(outputDir)
override def mapSourceDir(sourceDir: Path): Path = mapFile(sourceDir)
- override def mapSourceStamp(file: VirtualFileRef, sourceStamp: Stamp): Stamp = {
- AnnexMapper.getReadStamp(file, sourceStamp, isIncremental)
- }
- override def mapBinaryStamp(file: VirtualFileRef, binaryStamp: Stamp): Stamp = {
- AnnexMapper.getReadStamp(file, binaryStamp, isIncremental)
- }
- override def mapProductStamp(file: VirtualFileRef, productStamp: Stamp): Stamp = {
- AnnexMapper.getReadStamp(file, productStamp, isIncremental)
- }
+ override def mapSourceStamp(file: VirtualFileRef, sourceStamp: Stamp): Stamp = sourceStamp
+ override def mapBinaryStamp(file: VirtualFileRef, binaryStamp: Stamp): Stamp = binaryStamp
+ override def mapProductStamp(file: VirtualFileRef, productStamp: Stamp): Stamp = productStamp
override def mapMiniSetup(miniSetup: MiniSetup): MiniSetup = miniSetup
}
diff --git a/src/main/scala/higherkindness/rules_scala/workers/common/AnnexScalaInstance.scala b/src/main/scala/higherkindness/rules_scala/workers/common/AnnexScalaInstance.scala
index 193a3d15f..87b43bafc 100644
--- a/src/main/scala/higherkindness/rules_scala/workers/common/AnnexScalaInstance.scala
+++ b/src/main/scala/higherkindness/rules_scala/workers/common/AnnexScalaInstance.scala
@@ -1,14 +1,12 @@
-package higherkindness.rules_scala
-package workers.common
+package higherkindness.rules_scala.workers.common
-import xsbti.compile.ScalaInstance
import java.io.{File, IOException}
import java.net.URLClassLoader
import java.nio.file.{AtomicMoveNotSupportedException, FileAlreadyExistsException, Files, Path, Paths, StandardCopyOption}
import java.util.Properties
import java.util.concurrent.ConcurrentHashMap
-import scala.collection.immutable.TreeMap
import scala.util.control.NonFatal
+import xsbti.compile.ScalaInstance
object AnnexScalaInstance {
// See the comment on getAnnexScalaInstance as to why this is necessary
diff --git a/src/main/scala/higherkindness/rules_scala/workers/common/Color.scala b/src/main/scala/higherkindness/rules_scala/workers/common/Color.scala
index 8b1631de4..a471ce7fc 100644
--- a/src/main/scala/higherkindness/rules_scala/workers/common/Color.scala
+++ b/src/main/scala/higherkindness/rules_scala/workers/common/Color.scala
@@ -1,8 +1,6 @@
-package higherkindness.rules_scala
-package workers.common
+package higherkindness.rules_scala.workers.common
-import xsbti.Problem
-import Console.{GREEN as CG, RED as CR, RESET as CRESET, YELLOW as CY}
+import scala.Console.{GREEN as CG, RED as CR, RESET as CRESET, YELLOW as CY}
object Color {
def Info(message: String): String = colorString(message, CG, "Info")
diff --git a/src/main/scala/higherkindness/rules_scala/workers/common/CommonArguments.scala b/src/main/scala/higherkindness/rules_scala/workers/common/CommonArguments.scala
index 69d0fe798..a249b35b7 100644
--- a/src/main/scala/higherkindness/rules_scala/workers/common/CommonArguments.scala
+++ b/src/main/scala/higherkindness/rules_scala/workers/common/CommonArguments.scala
@@ -1,20 +1,15 @@
-package higherkindness.rules_scala
-package workers.common
+package higherkindness.rules_scala.workers.common
-import common.args.ArgsUtil.PathArgumentType
-import common.args.implicits.*
-import common.sandbox.SandboxUtil
+import higherkindness.rules_scala.common.args.ArgsUtil.PathArgumentType
+import higherkindness.rules_scala.common.args.implicits.*
+import higherkindness.rules_scala.common.sandbox.SandboxUtil
+import java.nio.file.{Path, Paths}
+import java.util.Collections
import net.sourceforge.argparse4j.impl.Arguments as ArgumentsImpl
-import net.sourceforge.argparse4j.inf.{Argument, ArgumentParser, ArgumentType, Namespace}
-import java.util.{Collections, List as JList}
-import scala.annotation.nowarn
-import scala.collection.mutable.Buffer
+import net.sourceforge.argparse4j.inf.{ArgumentParser, Namespace}
import scala.jdk.CollectionConverters.*
-import java.io.File
-import java.nio.file.{Path, Paths}
class CommonArguments private (
- val analyses: List[Analysis],
val compilerBridge: Path,
val compilerClasspath: List[Path],
val compilerOptions: Array[String],
@@ -36,11 +31,12 @@ class CommonArguments private (
val label: String,
val logLevel: LogLevel,
val mainManifest: Path,
- val outputAnalysisStore: Path,
val outputJar: Path,
val outputUsed: Path,
val plugins: List[Path],
val sourceJars: List[Path],
+ val testFrameworks: List[String],
+ val testsFile: Option[Path],
val tmpDir: Path,
val sources: List[Path],
)
@@ -85,12 +81,6 @@ object CommonArguments {
* Adds argument parsers for CommonArguments to the given ArgumentParser and then returns the mutated ArgumentParser.
*/
def add(parser: ArgumentParser): ArgumentParser = {
- parser
- .addArgument("--analysis")
- .action(ArgumentsImpl.append)
- .help("Analysis, given as: _label analysis_store [jar ...]")
- .metavar("args")
- .nargs("*")
parser
.addArgument("--compiler_bridge")
.help("Compiler bridge")
@@ -126,6 +116,18 @@ object CommonArguments {
.metavar("debug")
.`type`(ArgumentsImpl.booleanType)
.setDefault_(false)
+ parser
+ .addArgument("--test_frameworks")
+ .help("Class names of sbt.testing.Framework implementations")
+ .metavar("class")
+ .nargs("*")
+ .setDefault_(Collections.emptyList)
+ parser
+ .addArgument("--tests_file")
+ .help("File to output discovered tests in, for use by the test runner.")
+ .metavar("file")
+ .required(false)
+ .`type`(PathArgumentType.apply())
parser
.addArgument("--java_compiler_option")
.help("Java compiler option")
@@ -146,12 +148,6 @@ object CommonArguments {
.metavar("file")
.required(true)
.`type`(PathArgumentType.apply())
- parser
- .addArgument("--output_analysis_store")
- .help("Output Analysis Store")
- .metavar("path")
- .required(true)
- .`type`(PathArgumentType.apply())
parser
.addArgument("--output_jar")
.help("Output jar")
@@ -196,22 +192,7 @@ object CommonArguments {
}
def apply(namespace: Namespace, workDir: Path): CommonArguments = {
- val analysisArgs = Option(namespace.getList[JList[String]]("analysis")).map(_.asScala).getOrElse(List.empty)
-
- val analyses: List[Analysis] = analysisArgs.view
- .map(_.asScala)
- .map { analysisArg =>
- // Analysis strings are of the format: _label analysis_store [jar ...]
- val label = analysisArg(0)
- val analysisStore = analysisArg(1)
- val jars = analysisArg.drop(2).toList
- // Drop the leading _ on the label, which was added to avoid triggering argparse's arg file detection
- Analysis(workDir, label.tail, analysisStore, jars)
- }
- .toList
-
new CommonArguments(
- analyses = analyses,
compilerBridge = SandboxUtil.getSandboxPath(workDir, namespace.get[Path]("compiler_bridge")),
compilerClasspath = SandboxUtil.getSandboxPaths(workDir, namespace.getList[Path]("compiler_classpath")),
compilerOptions = Option(namespace.getList[String]("compiler_option"))
@@ -229,11 +210,12 @@ object CommonArguments {
label = namespace.getString("label"),
logLevel = LogLevel(namespace.getString("log_level")),
mainManifest = SandboxUtil.getSandboxPath(workDir, namespace.get[Path]("main_manifest")),
- outputAnalysisStore = SandboxUtil.getSandboxPath(workDir, namespace.get[Path]("output_analysis_store")),
outputJar = SandboxUtil.getSandboxPath(workDir, namespace.get[Path]("output_jar")),
outputUsed = SandboxUtil.getSandboxPath(workDir, namespace.get[Path]("output_used")),
plugins = SandboxUtil.getSandboxPaths(workDir, namespace.getList[Path]("plugins")),
sourceJars = SandboxUtil.getSandboxPaths(workDir, namespace.getList[Path]("source_jars")),
+ testFrameworks = namespace.getList[String]("test_frameworks").asScala.toList,
+ testsFile = Option(namespace.get[Path]("tests_file")).map(SandboxUtil.getSandboxPath(workDir, _)),
tmpDir = SandboxUtil.getSandboxPath(workDir, namespace.get[Path]("tmp")),
sources = SandboxUtil.getSandboxPaths(workDir, namespace.getList[Path]("sources")),
)
diff --git a/src/main/scala/higherkindness/rules_scala/workers/common/FileUtil.scala b/src/main/scala/higherkindness/rules_scala/workers/common/FileUtil.scala
index 6c822343a..71a79f571 100644
--- a/src/main/scala/higherkindness/rules_scala/workers/common/FileUtil.scala
+++ b/src/main/scala/higherkindness/rules_scala/workers/common/FileUtil.scala
@@ -1,12 +1,11 @@
-package higherkindness.rules_scala
-package workers.common
+package higherkindness.rules_scala.workers.common
-import scala.annotation.tailrec
import java.io.{File, IOException}
import java.nio.channels.FileChannel
-import java.nio.file.{FileAlreadyExistsException, FileVisitResult, Files, Path, Paths, SimpleFileVisitor, StandardCopyOption, StandardOpenOption}
import java.nio.file.attribute.BasicFileAttributes
+import java.nio.file.{FileAlreadyExistsException, FileVisitResult, Files, Path, Paths, SimpleFileVisitor, StandardCopyOption, StandardOpenOption}
import java.util.zip.{ZipEntry, ZipInputStream, ZipOutputStream}
+import scala.annotation.tailrec
class CopyFileVisitor(source: Path, target: Path) extends SimpleFileVisitor[Path] {
override def preVisitDirectory(directory: Path, attributes: BasicFileAttributes) = {
diff --git a/src/main/scala/higherkindness/rules_scala/workers/common/LogLevel.scala b/src/main/scala/higherkindness/rules_scala/workers/common/LogLevel.scala
index a7933e947..4517e41ee 100644
--- a/src/main/scala/higherkindness/rules_scala/workers/common/LogLevel.scala
+++ b/src/main/scala/higherkindness/rules_scala/workers/common/LogLevel.scala
@@ -1,5 +1,4 @@
-package higherkindness.rules_scala
-package workers.common
+package higherkindness.rules_scala.workers.common
sealed abstract class LogLevel(val level: String)
object LogLevel {
diff --git a/src/main/scala/higherkindness/rules_scala/workers/common/LoggedReporter.scala b/src/main/scala/higherkindness/rules_scala/workers/common/LoggedReporter.scala
index 8f2b2e03f..3f682e664 100644
--- a/src/main/scala/higherkindness/rules_scala/workers/common/LoggedReporter.scala
+++ b/src/main/scala/higherkindness/rules_scala/workers/common/LoggedReporter.scala
@@ -1,8 +1,7 @@
-package higherkindness.rules_scala
-package workers.common
+package higherkindness.rules_scala.workers.common
-import xsbti.{Logger, Problem}
import sbt.internal.inc.{LoggedReporter as SbtLoggedReporter, ProblemStringFormats}
+import xsbti.{Logger, Problem}
class LoggedReporter(logger: Logger, versionString: String) extends SbtLoggedReporter(0, logger) {
private val problemStringFormats = new ProblemStringFormats {}
diff --git a/src/main/scala/higherkindness/rules_scala/workers/deps/DepsRunner.scala b/src/main/scala/higherkindness/rules_scala/workers/deps/DepsRunner.scala
index d02901cd5..8739ed2cf 100644
--- a/src/main/scala/higherkindness/rules_scala/workers/deps/DepsRunner.scala
+++ b/src/main/scala/higherkindness/rules_scala/workers/deps/DepsRunner.scala
@@ -1,16 +1,14 @@
-package higherkindness.rules_scala
-package workers.deps
-
-import common.args.ArgsUtil
-import common.args.ArgsUtil.PathArgumentType
-import common.args.implicits.*
-import common.error.AnnexWorkerError
-import common.interrupt.InterruptUtil
-import common.sandbox.SandboxUtil
-import common.worker.{WorkTask, WorkerMain}
-import workers.common.AnnexMapper
-import workers.common.FileUtil
-import java.io.{File, PrintStream}
+package higherkindness.rules_scala.workers.deps
+
+import higherkindness.rules_scala.common.args.ArgsUtil
+import higherkindness.rules_scala.common.args.ArgsUtil.PathArgumentType
+import higherkindness.rules_scala.common.args.implicits.*
+import higherkindness.rules_scala.common.error.AnnexWorkerError
+import higherkindness.rules_scala.common.interrupt.InterruptUtil
+import higherkindness.rules_scala.common.sandbox.SandboxUtil
+import higherkindness.rules_scala.common.worker.{WorkTask, WorkerMain}
+import higherkindness.rules_scala.workers.common.AnnexMapper
+import higherkindness.rules_scala.workers.common.FileUtil
import java.nio.file.{FileAlreadyExistsException, Files, Path, Paths}
import java.util.Collections
import net.sourceforge.argparse4j.ArgumentParsers
@@ -136,7 +134,7 @@ object DepsRunner extends WorkerMain[Unit] {
potentialLabels.collect(groupLabelToJarPaths).flatten
}
- val readWriteMappers = AnnexMapper.mappers(task.workDir, isIncremental = false)
+ val readWriteMappers = AnnexMapper.mappers(task.workDir)
val readMapper = readWriteMappers.getReadMapper()
InterruptUtil.throwIfInterrupted(task.isCancelled)
diff --git a/src/main/scala/higherkindness/rules_scala/workers/jacoco/instrumenter/JacocoInstrumenter.scala b/src/main/scala/higherkindness/rules_scala/workers/jacoco/instrumenter/JacocoInstrumenter.scala
index afbc6fb41..52e6e533e 100644
--- a/src/main/scala/higherkindness/rules_scala/workers/jacoco/instrumenter/JacocoInstrumenter.scala
+++ b/src/main/scala/higherkindness/rules_scala/workers/jacoco/instrumenter/JacocoInstrumenter.scala
@@ -1,16 +1,15 @@
-package higherkindness.rules_scala
-package workers.jacoco.instrumenter
+package higherkindness.rules_scala.workers.jacoco.instrumenter
-import common.args.ArgsUtil
-import common.error.AnnexWorkerError
-import common.interrupt.InterruptUtil
-import common.sandbox.SandboxUtil
-import common.worker.{WorkTask, WorkerMain}
-import java.io.{BufferedInputStream, BufferedOutputStream, PrintStream}
+import higherkindness.rules_scala.common.args.ArgsUtil
+import higherkindness.rules_scala.common.error.AnnexWorkerError
+import higherkindness.rules_scala.common.interrupt.InterruptUtil
+import higherkindness.rules_scala.common.sandbox.SandboxUtil
+import higherkindness.rules_scala.common.worker.{WorkTask, WorkerMain}
+import java.io.{BufferedInputStream, BufferedOutputStream}
import java.net.URI
-import java.nio.file.Files
import java.nio.file.FileSystems
import java.nio.file.FileVisitResult
+import java.nio.file.Files
import java.nio.file.Path
import java.nio.file.Paths
import java.nio.file.SimpleFileVisitor
@@ -19,8 +18,8 @@ import java.nio.file.attribute.BasicFileAttributes
import java.util.Collections
import java.util.List as JList
import net.sourceforge.argparse4j.ArgumentParsers
-import net.sourceforge.argparse4j.inf.{ArgumentParser, Namespace}
import net.sourceforge.argparse4j.impl.Arguments
+import net.sourceforge.argparse4j.inf.{ArgumentParser, Namespace}
import org.jacoco.core.instr.Instrumenter
import org.jacoco.core.runtime.OfflineInstrumentationAccessGenerator
import scala.jdk.CollectionConverters.*
diff --git a/src/main/scala/higherkindness/rules_scala/workers/zinc/compile/AnnexAnalysis.scala b/src/main/scala/higherkindness/rules_scala/workers/zinc/compile/AnnexAnalysis.scala
new file mode 100644
index 000000000..03c7cc757
--- /dev/null
+++ b/src/main/scala/higherkindness/rules_scala/workers/zinc/compile/AnnexAnalysis.scala
@@ -0,0 +1,92 @@
+package higherkindness.rules_scala.workers.zinc.compile
+
+import java.io.File
+import java.nio.file.Path
+import java.util
+import java.util.Optional
+import sbt.internal.inc.SourceInfos
+import scala.collection.mutable.ArrayBuffer
+import xsbti.*
+import xsbti.api.{ClassLike, DependencyContext}
+import xsbti.compile.analysis.ReadSourceInfos
+
+case class AnnexAnalysis(
+ apis: ArrayBuffer[ClassLike] = ArrayBuffer.empty,
+ mainClasses: ArrayBuffer[String] = ArrayBuffer.empty,
+ usedJars: ArrayBuffer[Path] = ArrayBuffer.empty,
+) {
+ def getCallback(converter: FileConverter): AnalysisCallback3 = new AnalysisCallback3 {
+ override def api(sourceFile: File, classApi: ClassLike): Unit = apis += classApi
+ override def api(sourceFile: VirtualFileRef, classApi: ClassLike): Unit = apis += classApi
+ override def apiPhaseCompleted(): Unit = {}
+ override def binaryDependency(
+ onBinaryEntry: File,
+ onBinaryClassName: String,
+ fromClassName: String,
+ fromSourceFile: File,
+ context: DependencyContext,
+ ): Unit = usedJars += onBinaryEntry.toPath
+
+ override def binaryDependency(
+ onBinaryEntry: Path,
+ onBinaryClassName: String,
+ fromClassName: String,
+ fromSourceFile: VirtualFileRef,
+ context: DependencyContext,
+ ): Unit = usedJars += onBinaryEntry
+
+ override def classDependency(onClassName: String, sourceClassName: String, context: DependencyContext): Unit = {}
+ override def classesInOutputJar(): java.util.Set[String] = java.util.Collections.emptySet()
+ override def dependencyPhaseCompleted(): Unit = {}
+ override def enabled(): Boolean = true
+ override def generatedLocalClass(source: File, classFile: File): Unit = {}
+ override def generatedLocalClass(source: VirtualFileRef, classFile: Path): Unit = {}
+ override def generatedNonLocalClass(
+ source: File,
+ classFile: File,
+ binaryClassName: String,
+ sourceClassName: String,
+ ): Unit = {}
+
+ override def generatedNonLocalClass(
+ source: VirtualFileRef,
+ classFile: Path,
+ binaryClassName: String,
+ sourceClassName: String,
+ ): Unit = {}
+
+ override def getPickleJarPair: Optional[T2[Path, Path]] = Optional.empty()
+
+ /**
+ * I don't really understand what this is supposed to do. It doesn't seem to be used in the Scala 3 compiler; the
+ * only place I could find it being used is the Scala 2 compiler bridge (the implementation of the compiler
+ * interface for Scala 2):
+ * [[https://github.com/sbt/zinc/blob/75d54b672adbbda1b528f5759235704de1ba333f/internal/compiler-bridge/src/main/scala/xsbt/CompilerBridge.scala#L196]]
+ *
+ * It seems that when there are compilation problems, the Scala 2 compiler bridge invokes this method, shoves the
+ * result in a `xsbt.InterfaceCompileFailed2`, throws that exception. That doesn't seem very useful, since we
+ * could've just called it ourselves.
+ */
+ override def getSourceInfos: ReadSourceInfos = SourceInfos.empty
+ override def isPickleJava: Boolean = false
+ override def mainClass(sourceFile: File, className: String): Unit = mainClasses += className
+ override def mainClass(sourceFile: VirtualFileRef, className: String): Unit = mainClasses += className
+ override def problem(what: String, pos: Position, message: String, severity: Severity, reported: Boolean): Unit = {}
+ override def problem2(
+ what: String,
+ position: Position,
+ message: String,
+ severity: Severity,
+ reported: Boolean,
+ rendered: Optional[String],
+ diagnosticCode: Optional[DiagnosticCode],
+ diagnosticRelatedInformation: util.List[DiagnosticRelatedInformation],
+ actions: util.List[Action],
+ ): Unit = {}
+
+ override def startSource(source: File): Unit = {}
+ override def startSource(source: VirtualFile): Unit = {}
+ override def toVirtualFile(path: Path): VirtualFile = converter.toVirtualFile(path)
+ override def usedName(className: String, name: String, useScopes: java.util.EnumSet[UseScope]): Unit = {}
+ }
+}
diff --git a/src/main/scala/higherkindness/rules_scala/workers/zinc/compile/BUILD b/src/main/scala/higherkindness/rules_scala/workers/zinc/compile/BUILD
index b8b07f59d..46f16f8dd 100644
--- a/src/main/scala/higherkindness/rules_scala/workers/zinc/compile/BUILD
+++ b/src/main/scala/higherkindness/rules_scala/workers/zinc/compile/BUILD
@@ -8,10 +8,13 @@ scala_binary(
scala_toolchain_name = "annex_bootstrap_3",
visibility = ["//visibility:public"],
deps = [
+ "//src/main/scala/higherkindness/rules_scala/common/classloaders",
"//src/main/scala/higherkindness/rules_scala/common/error",
"//src/main/scala/higherkindness/rules_scala/common/interrupt",
+ "//src/main/scala/higherkindness/rules_scala/common/sbt-testing:common",
"//src/main/scala/higherkindness/rules_scala/common/worker",
"//src/main/scala/higherkindness/rules_scala/workers/common",
+ "//src/main/scala/sbt/internal/inc/classfile",
"//third_party/bazel/src/java_tools/buildjar/java/com/google/devtools/build/buildjar/jarhelper",
],
)
diff --git a/src/main/scala/higherkindness/rules_scala/workers/zinc/compile/Deps.scala b/src/main/scala/higherkindness/rules_scala/workers/zinc/compile/Deps.scala
deleted file mode 100644
index a019ec05d..000000000
--- a/src/main/scala/higherkindness/rules_scala/workers/zinc/compile/Deps.scala
+++ /dev/null
@@ -1,67 +0,0 @@
-package higherkindness.rules_scala
-package workers.zinc.compile
-
-import workers.common.FileUtil
-import java.math.BigInteger
-import java.nio.file.{Files, Path}
-import java.security.MessageDigest
-import sbt.internal.inc.{PlainVirtualFile, Relations}
-import xsbti.compile.PerClasspathEntryLookup
-
-sealed trait Dep {
- def file: Path
- def classpath: Path
-}
-
-case class LibraryDep(file: Path) extends Dep {
- def classpath: Path = file
-}
-
-case class DepAnalysisFiles(apis: Path, relations: Path)
-
-case class ExternalDep(file: Path, classpath: Path, analysisStore: Path) extends Dep
-
-object Dep {
-
- def sha256(file: Path): String = {
- val digest = MessageDigest.getInstance("SHA-256")
- new BigInteger(1, digest.digest(Files.readAllBytes(file))).toString(16)
- }
-
- def create(depsCache: Option[Path], classpath: Seq[Path], analyses: Map[Path, (Path, Path)]): Seq[Dep] = {
- val roots = scala.collection.mutable.Set[Path]()
- classpath.flatMap { original =>
- analyses.get(original).fold[Option[Dep]](Some(LibraryDep(original))) { analysis =>
- val root = analysis._1
- if (roots.add(root)) {
- depsCache match {
- case Some(cacheRoot) => {
- val cachedPath = cacheRoot.resolve(sha256(original))
- FileUtil.extractZipIdempotently(original, cachedPath)
- Some(ExternalDep(original, cachedPath, analysis._2))
- }
- case _ => {
- FileUtil.extractZip(original, root)
- Some(ExternalDep(original, root, analysis._2))
- }
- }
- } else {
- None
- }
- }
- }
- }
-
- def used(deps: Iterable[Dep], relations: Relations, lookup: PerClasspathEntryLookup): Dep => Boolean = {
- val externalDeps = relations.allExternalDeps
- val libraryDeps = relations.allLibraryDeps
-
- // formatting breaks this code
- // format: off
- ({
- case ExternalDep(file, _, _) => externalDeps.exists(lookup.definesClass(PlainVirtualFile(file)).apply)
- case LibraryDep(file) => libraryDeps(PlainVirtualFile(file.toAbsolutePath().normalize()))
- })
- // format: on
- }
-}
diff --git a/src/main/scala/higherkindness/rules_scala/workers/zinc/compile/FilteredInfos.scala b/src/main/scala/higherkindness/rules_scala/workers/zinc/compile/FilteredInfos.scala
deleted file mode 100644
index 0ffa960e7..000000000
--- a/src/main/scala/higherkindness/rules_scala/workers/zinc/compile/FilteredInfos.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-package sbt.internal.inc
-
-import sbt.internal.inc.SourceInfos
-import xsbti.compile.analysis.SourceInfo
-import xsbti.{Problem, VirtualFileRef}
-
-/**
- * The consistent analysis format in Zinc does not send the Position in the Problems in the SourceInfos through the read
- * or write mapper, so the file paths are absolute rather than relativized, which means they are not machine independent
- * and reproducible.
- *
- * This filters out the paths from those Problems/Positions, so we can get a deterministic output.
- *
- * TODO: fix this problem in Zinc.
- */
-object FilteredInfos {
- def getFilteredInfos(infos: SourceInfos): SourceInfos = {
-
- val filteredInfos = infos.allInfos.map { case (virtualFileRef: VirtualFileRef, info: SourceInfo) =>
- val underlyingInfo = info.asInstanceOf[UnderlyingSourceInfo]
-
- val filteredInfo = SourceInfos.makeInfo(
- reported = Seq.empty[Problem],
- unreported = Seq.empty[Problem],
- mainClasses = underlyingInfo.mainClasses,
- )
- (virtualFileRef, filteredInfo)
- }
-
- SourceInfos.of(filteredInfos)
- }
-}
diff --git a/src/main/scala/higherkindness/rules_scala/workers/zinc/compile/FilteredRelations.scala b/src/main/scala/higherkindness/rules_scala/workers/zinc/compile/FilteredRelations.scala
deleted file mode 100644
index d77258678..000000000
--- a/src/main/scala/higherkindness/rules_scala/workers/zinc/compile/FilteredRelations.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-package sbt.internal.inc
-
-/**
- * Zinc's libraryClassName is a map from a library jar to class names. Problem is it's not deterministic. You only get a
- * single class in the map and it's not always the same class. Until that bug in Zinc is fixed, we're setting the
- * libraryClassName to an empty libraryClassName because empty is better than non-deterministic.
- *
- * This class is in the sbt.internal.inc package to get access to the MRelationsNameHashing class, which is private to
- * that package. Super duper hacky, but I wasn't able to find a better way to change the libraryClassName for the
- * relation.
- *
- * TODO: fix this bug in Zinc
- */
-object FilteredRelations {
- def getFilteredRelations(relations: Relations): Relations = {
- val emptyRelations = Relations.empty
-
- relations
- .asInstanceOf[MRelationsNameHashing]
- .copy(
- libraryClassName = emptyRelations.libraryClassName,
- )
- }
-}
diff --git a/src/main/scala/higherkindness/rules_scala/workers/zinc/compile/ZincPersistence.scala b/src/main/scala/higherkindness/rules_scala/workers/zinc/compile/ZincPersistence.scala
deleted file mode 100644
index d4a9984ac..000000000
--- a/src/main/scala/higherkindness/rules_scala/workers/zinc/compile/ZincPersistence.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-package higherkindness.rules_scala
-package workers.zinc.compile
-
-import workers.common.FileUtil
-
-import java.nio.file.Files
-import java.nio.file.Path
-
-trait ZincPersistence {
- def load(): Unit
- def save(): Unit
-}
-
-class FilePersistence(cacheDir: Path, analysisStorePath: Path, jar: Path) extends ZincPersistence {
- private val cacheAnalysisStorePath: Path = cacheDir.resolve("analysis_store.gz")
- private val cacheJar: Path = cacheDir.resolve("classes.jar")
-
- /**
- * Existence indicates that files are incomplete.
- */
- private val tmpMarker: Path = cacheDir.resolve(".tmp")
-
- def load(): Unit = {
- if (Files.exists(cacheDir) && Files.notExists(tmpMarker)) {
- Files.copy(cacheAnalysisStorePath, analysisStorePath)
- Files.copy(cacheJar, jar)
- }
- }
- def save(): Unit = {
- if (Files.exists(cacheDir)) {
- FileUtil.delete(cacheDir)
- }
- Files.createDirectories(cacheDir)
- Files.createFile(tmpMarker)
- Files.copy(analysisStorePath, cacheAnalysisStorePath)
- Files.copy(jar, cacheJar)
- Files.delete(tmpMarker)
- }
-}
-
-object NullPersistence extends ZincPersistence {
- def load(): Unit = ()
- def save(): Unit = ()
-}
diff --git a/src/main/scala/higherkindness/rules_scala/workers/zinc/compile/ZincRunner.scala b/src/main/scala/higherkindness/rules_scala/workers/zinc/compile/ZincRunner.scala
index 0481bbc8e..09ba6a031 100644
--- a/src/main/scala/higherkindness/rules_scala/workers/zinc/compile/ZincRunner.scala
+++ b/src/main/scala/higherkindness/rules_scala/workers/zinc/compile/ZincRunner.scala
@@ -1,57 +1,30 @@
-package higherkindness.rules_scala
-package workers.zinc.compile
-
-import common.args.ArgsUtil
-import common.interrupt.InterruptUtil
-import common.error.AnnexWorkerError
-import common.worker.{WorkTask, WorkerMain}
-import workers.common.{AnalysisUtil, AnnexLogger, AnnexMapper, AnnexScalaInstance, CommonArguments, FileUtil, LoggedReporter}
+package higherkindness.rules_scala.workers.zinc.compile
+
import com.google.devtools.build.buildjar.jarhelper.JarCreator
-import java.io.{File, PrintStream, PrintWriter}
+import higherkindness.rules_scala.common.args.ArgsUtil
+import higherkindness.rules_scala.common.classloaders.ClassLoaders
+import higherkindness.rules_scala.common.error.AnnexWorkerError
+import higherkindness.rules_scala.common.interrupt.InterruptUtil
+import higherkindness.rules_scala.common.sbt_testing.{TestDiscovery, TestFrameworkLoader, TestsFileData}
+import higherkindness.rules_scala.common.worker.{WorkTask, WorkerMain}
+import higherkindness.rules_scala.workers.common.*
+import java.io.{File, PrintWriter}
import java.net.URLClassLoader
+import java.nio.charset.StandardCharsets
import java.nio.file.{Files, Path, Paths}
-import java.util
-import java.util.{List as JList, Optional}
+import java.util.Optional
+import javax.tools.{StandardLocation, ToolProvider}
import net.sourceforge.argparse4j.ArgumentParsers
-import net.sourceforge.argparse4j.helper.HelpScreenException
-import net.sourceforge.argparse4j.impl.Arguments as Arg
-import net.sourceforge.argparse4j.inf.{ArgumentParserException, Namespace}
+import play.api.libs.json.Json
+import sbt.internal.inc.classfile.analyzeJavaClasses
import sbt.internal.inc.classpath.ClassLoaderCache
-import sbt.internal.inc.caching.ClasspathCache
-import sbt.internal.inc.{Analysis, AnalyzingCompiler, CompileFailed, FilteredInfos, FilteredRelations, FilteredSetup, IncrementalCompilerImpl, Locate, PlainVirtualFile, PlainVirtualFileConverter, ZincUtil}
+import sbt.internal.inc.javac.DiagnosticsReporter
+import sbt.internal.inc.{CompileOutput, PlainVirtualFile, PlainVirtualFileConverter, ZincUtil}
+import sbt.internal.util.LoggerWriter
import scala.jdk.CollectionConverters.*
-import scala.util.Try
import scala.util.control.NonFatal
-import xsbti.{T2, VirtualFile, VirtualFileRef}
-import xsbti.compile.{AnalysisContents, AnalysisStore, Changes, ClasspathOptionsUtil, CompileAnalysis, CompileOptions, CompileProgress, CompilerCache, DefaultExternalHooks, DefinesClass, ExternalHooks, FileHash, IncOptions, Inputs, MiniSetup, PerClasspathEntryLookup, PreviousResult, Setup, TastyFiles}
-
-class ZincRunnerWorkerConfig private (
- val persistenceDir: Option[Path],
- val usePersistence: Boolean,
- val extractedFileCache: Option[Path],
-)
-
-object ZincRunnerWorkerConfig {
- def apply(namespace: Namespace): ZincRunnerWorkerConfig = {
- new ZincRunnerWorkerConfig(
- pathFrom("persistence_dir", namespace),
- Option(namespace.getBoolean("use_persistence")).map(Boolean.unbox).getOrElse(false),
- pathFrom("extracted_file_cache", namespace),
- )
- }
-
- private def pathFrom(arg: String, namespace: Namespace): Option[Path] = {
- Option(namespace.getString(arg)).map { pathString =>
- if (pathString.startsWith("~" + File.separator)) {
- Paths.get(pathString.replace("~", sys.props.getOrElse("user.home", "")))
- } else if (pathString.startsWith("~")) {
- throw new Exception("Unsupported home directory expansion")
- } else {
- Paths.get(pathString)
- }
- }
- }
-}
+import xsbti.compile.{DependencyChanges, ScalaInstance}
+import xsbti.{AnalysisCallback, AnalysisCallback3, CompileFailed, Logger, Reporter, VirtualFile, VirtualFileRef}
/**
* Caching
@@ -74,7 +47,7 @@ object ZincRunnerWorkerConfig {
* that Zinc caches. We do so to prevent non-determinism in Zinc's analysis store files. Check the comments in
* AnnexScalaInstance for more info.
*/
-object ZincRunner extends WorkerMain[ZincRunnerWorkerConfig] {
+object ZincRunner extends WorkerMain[Unit] {
// Using Thread.interrupt to interrupt concurrent Zinc/Scala compilations that use a shared ScalaInstance (and thus
// shared classloaders) can cause strange concurrency errors. To avoid those strange concurrency errors we only
@@ -86,44 +59,200 @@ object ZincRunner extends WorkerMain[ZincRunnerWorkerConfig] {
private val classloaderCache = new ClassLoaderCache(new URLClassLoader(Array()))
- private val compilerCache = CompilerCache.fresh
-
// prevents GC of the soft reference in classloaderCache
private var lastCompiler: AnyRef = null
+ private def compileScala(
+ task: WorkTask[Unit],
+ parsedArguments: CommonArguments,
+ scalaInstance: ScalaInstance,
+ normalizedSources: Iterable[Path],
+ classesOutputDirectory: Path,
+ analysisCallback: AnalysisCallback3,
+ reporter: Reporter,
+ logger: Logger,
+ ): Unit = try {
+ val sourceVirtualFiles = normalizedSources.view.map(PlainVirtualFile(_): VirtualFile).toArray
+ val classpath = parsedArguments.classpath.view.map(path => PlainVirtualFile(path): VirtualFile).toArray
+
+ // Check if we should include the -sourceroot flag in the compiler options
+ // We only include it for Scala 3 and later versions, as it is not supported
+ // in Scala 2.x versions.
+ // We include this so that the TASTy file generated by the Scala compiler
+ // will be deterministic across machines and directories Bazel uses for
+ // multiplexed sandbox execution.
+ val shouldIncludeSourceRoot = !scalaInstance.actualVersion.startsWith("0.") &&
+ scalaInstance.actualVersion.startsWith("3")
+
+ val scalacOptions =
+ parsedArguments.plugins.view.map(p => s"-Xplugin:$p").toArray ++
+ parsedArguments.compilerOptions ++
+ parsedArguments.compilerOptionsReferencingPaths.toArray ++
+ (
+ if (shouldIncludeSourceRoot) {
+ Array("-sourceroot", task.workDir.toAbsolutePath.toString)
+ } else {
+ Array.empty[String]
+ }
+ )
+
+ val scalaCompiler = ZincUtil
+ .scalaCompiler(scalaInstance, parsedArguments.compilerBridge)
+ .withClassLoaderCache(classloaderCache)
+
+ lastCompiler = scalaCompiler
+
+ InterruptUtil.throwIfInterrupted(task.isCancelled)
+
+ scalaCompiler.compile(
+ /*
+ * We provide all the sources to the compiler, even Java sources. The Scala compiler isn't capable of compiling
+ * Java, but it can parse it so Scala and Java can reference each other within the same compilation unit.
+ *
+ * For more information on how mixed compilation works, see:
+ * https://github.com/sbt/zinc/blob/8b114cafbbeef9bc54b70c74990b74cd9bb20de3/internal/compiler-interface/src/main/java/xsbti/compile/CompileOrder.java
+ */
+ sourceVirtualFiles,
+ classpath,
+ PlainVirtualFileConverter.converter,
+ changes = new DependencyChanges {
+ override def isEmpty: Boolean = true
+ override def modifiedBinaries(): Array[File] = Array.empty
+ override def modifiedClasses(): Array[String] = Array.empty
+ override def modifiedLibraries(): Array[VirtualFileRef] = Array.empty
+ },
+ scalacOptions,
+ CompileOutput(classesOutputDirectory),
+ analysisCallback,
+ reporter,
+ progressOpt = Optional.empty(),
+ logger,
+ )
+ } catch {
+ // The thread running this may have been interrupted during compilation due to a cancel request.
+ // It's possible that the interruption contribute to the error. We should check if we were
+ // interrupted, so we can respond with a cancellation rather than erroring and failing the build.
+ case _: CompileFailed =>
+ InterruptUtil.throwIfInterrupted(task.isCancelled)
+ throw new AnnexWorkerError(-1)
+ case e: ClassFormatError =>
+ InterruptUtil.throwIfInterrupted(task.isCancelled)
+ throw new AnnexWorkerError(1, "You may be missing a `macro = True` attribute.", e)
+ case e: StackOverflowError =>
+ // Downgrade to NonFatal error.
+ // The JVM is not guaranteed to free shared resources correctly when unwinding the stack to catch a StackOverflowError,
+ // but since we don't share resources between work threads, this should be mostly safe for us for now.
+ // If Bazel could better handle the worker shutting down suddenly, we could allow this to be caught by
+ // the UncaughtExceptionHandler in WorkerMain, and exit the entire process to be safe.
+ InterruptUtil.throwIfInterrupted(task.isCancelled)
+ throw new AnnexWorkerError(1, "StackOverflowError", e)
+ case NonFatal(e) =>
+ InterruptUtil.throwIfInterrupted(task.isCancelled)
+ throw e
+ }
private def labelToPath(label: String) = Paths.get(label.replaceAll("^/+", "").replaceAll(raw"[^\w/]", "_"))
+ private def maybeCompileJava(
+ task: WorkTask[Unit],
+ parsedArguments: CommonArguments,
+ normalizedSources: Iterable[Path],
+ classesOutputDirectory: Path,
+ analysisCallback: AnalysisCallback,
+ reporter: Reporter,
+ logger: Logger,
+ ): Unit = {
+ val javaSources = normalizedSources.view.map(_.toString).filter(_.endsWith(".java")).toList
+
+ if (javaSources.nonEmpty) {
+ val javaCompiler = ToolProvider.getSystemJavaCompiler
+ val writer = new LoggerWriter(logger)
+ val diagnosticListener = new DiagnosticsReporter(reporter)
+ val fileManager = javaCompiler.getStandardFileManager(diagnosticListener, null, null)
+
+ fileManager.setLocation(StandardLocation.CLASS_OUTPUT, List(classesOutputDirectory.toFile).asJava)
+
+ val options = "-classpath" +:
+ (classesOutputDirectory +: parsedArguments.classpath).map(_.toString).mkString(":") +:
+ parsedArguments.javaCompilerOptions.toList
+
+ val compilationTask = javaCompiler.getTask(
+ writer,
+ fileManager,
+ diagnosticListener,
+ options.asJava,
+ null,
+ fileManager.getJavaFileObjects(javaSources*),
+ )
+
+ val wasSuccessful =
+ try {
+ compilationTask.call()
+ } catch {
+ case NonFatal(exception) =>
+ InterruptUtil.throwIfInterrupted(task.isCancelled)
+
+ throw exception
+ }
+
+ if (!wasSuccessful) {
+ InterruptUtil.throwIfInterrupted(task.isCancelled)
- protected def init(args: Option[Array[String]]): ZincRunnerWorkerConfig = {
- val parser = ArgumentParsers.newFor("zinc-worker").addHelp(true).build
- parser.addArgument("--persistence_dir", /* deprecated */ "--persistenceDir").metavar("path")
- parser.addArgument("--use_persistence").`type`(Arg.booleanType)
- parser.addArgument("--extracted_file_cache").metavar("path")
- // deprecated
- parser.addArgument("--max_errors")
- val namespace = parser.parseArgsOrFail(args.getOrElse(Array.empty))
- ZincRunnerWorkerConfig(namespace)
+ throw new AnnexWorkerError(1)
+ }
+
+ InterruptUtil.throwIfInterrupted(task.isCancelled)
+
+ analyzeJavaClasses(
+ normalizedSources.view.map(PlainVirtualFile(_)).toList,
+ parsedArguments.classpath,
+ classesOutputDirectory,
+ logger,
+ analysisCallback,
+ )
+ }
}
+ private def maybeWriteTestsFile(parsedArguments: CommonArguments, analysis: AnnexAnalysis): Array[Path] =
+ parsedArguments.testsFile
+ .map { path =>
+ val classloader = ClassLoaders.sbtTestClassLoader(parsedArguments.classpath.map(_.toUri.toURL))
+ val frameworkLoader = new TestFrameworkLoader(classloader)
+ val testsFileData = TestsFileData(
+ parsedArguments.testFrameworks
+ .flatMap(frameworkName => frameworkLoader.load(frameworkName).map((frameworkName, _)))
+ .map { case (frameworkName, framework) =>
+ frameworkName -> new TestDiscovery(framework)(analysis.apis.toSet)
+ }
+ .toMap,
+ )
+
+ val loadedJarsByFramework = frameworkLoader.getLoadedJarsByFramework()
+ val loadedJars = testsFileData.testsByFramework.view
+ .filter { case (_, tests) => tests.nonEmpty }
+ .keys
+ .flatMap(loadedJarsByFramework.get)
+ .flatten
+ .toArray
+
+ Files.write(path, Json.stringify(Json.toJson(testsFileData)).getBytes(StandardCharsets.UTF_8))
+
+ loadedJars
+ }
+ .getOrElse(Array.empty)
+
+ protected def init(args: Option[Array[String]]): Unit = ()
+
private val parser = {
val parser = ArgumentParsers.newFor("zinc").addHelp(true).defaultFormatWidth(80).fromFilePrefix("@").build()
CommonArguments.add(parser)
}
- protected def work(task: WorkTask[ZincRunnerWorkerConfig]): Unit = {
+ protected def work(task: WorkTask[Unit]): Unit = {
val workRequest = CommonArguments(
ArgsUtil.parseArgsOrFailSafe(task.args, parser, task.output),
task.workDir,
)
InterruptUtil.throwIfInterrupted(task.isCancelled)
- // These two paths must only be used when persistence is enabled because they escape the sandbox.
- // Sandboxing is disabled if persistence is enabled.
- val (persistenceDir, extractedFileCache) = if (task.context.usePersistence) {
- (task.context.persistenceDir, task.context.extractedFileCache)
- } else {
- (None, None)
- }
-
val logger = new AnnexLogger(workRequest.logLevel, task.workDir, task.output)
val tmpDir = workRequest.tmpDir
@@ -143,254 +272,91 @@ object ZincRunner extends WorkerMain[ZincRunnerWorkerConfig] {
// extract upstream classes
val classesDir = tmpDir.resolve("classes")
val outputJar = workRequest.outputJar
-
- val deps = {
- val analyses: Map[Path, (Path, Path)] = {
- if (task.context.usePersistence) {
- workRequest.analyses.flatMap { analysis =>
- analysis.jars.map(jar =>
- jar -> (
- classesDir.resolve(labelToPath(analysis.label)),
- analysis.analysisStore,
- ),
- )
- }.toMap
- } else {
- Map.empty[Path, (Path, Path)]
- }
- }
- Dep.create(extractedFileCache, workRequest.classpath, analyses)
- }
- InterruptUtil.throwIfInterrupted(task.isCancelled)
-
- val debug = workRequest.debug
- val analysisStorePath = workRequest.outputAnalysisStore
- val readWriteMappers = AnnexMapper.mappers(task.workDir, task.context.usePersistence)
- val analysisStore: AnalysisStore = AnalysisUtil.getAnalysisStore(analysisStorePath.toFile, debug, readWriteMappers)
-
- val persistence = persistenceDir.fold[ZincPersistence](NullPersistence) { rootDir =>
- val path = workRequest.label.replaceAll("^/+", "").replaceAll(raw"[^\w/]", "_")
- new FilePersistence(rootDir.resolve(path), analysisStorePath, outputJar)
- }
-
+ val readWriteMappers = AnnexMapper.mappers(task.workDir)
val classesOutputDir = classesDir.resolve(labelToPath(workRequest.label))
- try {
- persistence.load()
- if (Files.exists(outputJar)) {
- try FileUtil.extractZip(outputJar, classesOutputDir)
- catch {
- case NonFatal(e) =>
- FileUtil.delete(classesOutputDir)
- throw e
- }
- }
- } catch {
- case NonFatal(e) =>
- logger.warn(() => s"Failed to load cached analysis: $e")
- Files.delete(analysisStorePath)
- }
Files.createDirectories(classesOutputDir)
- val previousResult = Try(analysisStore.get())
- .fold(
- { e =>
- logger.warn(() => s"Failed to load previous analysis: $e")
- Optional.empty[AnalysisContents]()
- },
- identity,
- )
- .map[PreviousResult](contents =>
- PreviousResult.of(Optional.of(contents.getAnalysis), Optional.of(contents.getMiniSetup)),
- )
- .orElseGet(() => PreviousResult.of(Optional.empty[CompileAnalysis](), Optional.empty[MiniSetup]()))
-
- // setup compiler
- val scalaInstance =
- AnnexScalaInstance.getAnnexScalaInstance(
- workRequest.compilerClasspath.view.map(_.toFile).toArray,
- task.workDir,
- isWorker,
- )
-
- // Check if we should include the -sourceroot flag in the compiler options
- // We only include it for Scala 3 and later versions, as it is not supported
- // in Scala 2.x versions.
- // We include this so that the TASTy file generated by the Scala compiler
- // will be deterministic across machines and directories Bazel uses for
- // multiplexed sandbox execution.
- val shouldIncludeSourceRoot = !scalaInstance.actualVersion.startsWith("0.") &&
- scalaInstance.actualVersion.startsWith("3")
+ val scalaInstance = AnnexScalaInstance
+ .getAnnexScalaInstance(workRequest.compilerClasspath.view.map(_.toFile).toArray, task.workDir, isWorker)
- val scalacOptions =
- workRequest.plugins.view.map(p => s"-Xplugin:$p").toArray ++
- workRequest.compilerOptions ++
- workRequest.compilerOptionsReferencingPaths.toArray ++
- (if (shouldIncludeSourceRoot)
- Array("-sourceroot", task.workDir.toAbsolutePath().toString)
- else
- Array.empty[String])
-
- val compileOptions =
- CompileOptions.create
- .withSources(sources.view.map(source => PlainVirtualFile(source.toAbsolutePath().normalize())).toArray)
- .withClasspath((classesOutputDir +: deps.view.map(_.classpath)).map(path => PlainVirtualFile(path)).toArray)
- .withClassesDirectory(classesOutputDir)
- .withJavacOptions(workRequest.javaCompilerOptions)
- .withScalacOptions(scalacOptions)
-
- val compilers = {
- val scalaCompiler = ZincUtil
- .scalaCompiler(scalaInstance, workRequest.compilerBridge)
- .withClassLoaderCache(classloaderCache)
- lastCompiler = scalaCompiler
- ZincUtil.compilers(
- scalaInstance,
- // This doesn't use -bootclasspath for Scala 2.13 and 3.x. It does use it for older versions.
- // The newer versions no longer need that option. See this commit for more info:
- // https://github.com/sbt/zinc/commit/8e4186a55dbe63df57e72cc37a1e8e92aa3b4bcd
- ClasspathOptionsUtil.noboot(scalaInstance.actualVersion),
- None,
- scalaCompiler,
- )
- }
+ val normalizedSources = sources.view.map(_.toAbsolutePath.normalize()).toArray
+ val reporter = new LoggedReporter(logger, scalaInstance.actualVersion)
- val lookup = {
- val depMap = deps.collect { case ExternalDep(_, depClasspath, depAnalysisStorePath) =>
- depClasspath -> depAnalysisStorePath
- }.toMap
- new AnxPerClasspathEntryLookup(file => {
- depMap
- .get(file)
- .map { analysisStorePath =>
- val analysis = AnalysisUtil.getAnalysis(
- AnalysisUtil.getAnalysisStore(
- analysisStorePath.toFile,
- debug,
- readWriteMappers,
- ),
- )
- Analysis.Empty.copy(
- apis = analysis.apis,
- relations = analysis.relations,
- )
- }
- })
- }
+ InterruptUtil.throwIfInterrupted(task.isCancelled)
- val externalHooks = new DefaultExternalHooks(
- Optional.of(new DeterministicDirectoryHashExternalHooks()),
- Optional.empty(),
+ val analysis = AnnexAnalysis()
+ val analysisCallback = analysis.getCallback(PlainVirtualFileConverter.converter)
+
+ compileScala(
+ task,
+ workRequest,
+ scalaInstance,
+ normalizedSources,
+ classesOutputDir,
+ analysisCallback,
+ reporter,
+ logger,
)
- val setup = {
- val incOptions = IncOptions
- .create()
- .withAuxiliaryClassFiles(Array(TastyFiles.instance()))
- .withExternalHooks(externalHooks)
- val reporter = new LoggedReporter(logger, scalaInstance.actualVersion)
- val skip = false
- val file: Path = null
- Setup.create(
- lookup,
- skip,
- file,
- compilerCache,
- incOptions,
- reporter,
- Optional.empty[CompileProgress](),
- Array.empty[T2[String, String]],
- )
- }
-
- val inputs = Inputs.of(compilers, compileOptions, setup, previousResult)
-
InterruptUtil.throwIfInterrupted(task.isCancelled)
- // compile
- val incrementalCompiler = new IncrementalCompilerImpl()
- val compileResult =
- try {
- incrementalCompiler.compile(inputs, logger)
- } catch {
- // The thread running this may have been interrupted during compilation due to a cancel request.
- // It's possible that the interruption contribute to the error. We should check if we were
- // interrupted, so we can respond with a cancellation rather than erroring and failing the build.
- case _: CompileFailed =>
- InterruptUtil.throwIfInterrupted(task.isCancelled)
- throw new AnnexWorkerError(-1)
- case e: ClassFormatError =>
- InterruptUtil.throwIfInterrupted(task.isCancelled)
- throw new AnnexWorkerError(1, "You may be missing a `macro = True` attribute.", e)
- case e: StackOverflowError =>
- // Downgrade to NonFatal error.
- // The JVM is not guaranteed to free shared resources correctly when unwinding the stack to catch a StackOverflowError,
- // but since we don't share resources between work threads, this should be mostly safe for us for now.
- // If Bazel could better handle the worker shutting down suddenly, we could allow this to be caught by
- // the UncaughtExceptionHandler in WorkerMain, and exit the entire process to be safe.
- InterruptUtil.throwIfInterrupted(task.isCancelled)
- throw new AnnexWorkerError(1, "StackOverflowError", e)
- case NonFatal(e) =>
- InterruptUtil.throwIfInterrupted(task.isCancelled)
- throw e
- }
+ maybeCompileJava(
+ task,
+ workRequest,
+ normalizedSources,
+ classesOutputDir,
+ analysisCallback,
+ reporter,
+ logger,
+ )
InterruptUtil.throwIfInterrupted(task.isCancelled)
- // create analyses
- val pathString = analysisStorePath.toAbsolutePath().normalize().toString()
- // Filter out libraryClassNames from the analysis because it is non-deterministic.
- // Can stop doing this once the bug in Zinc is fixed. Check the comment on FilteredRelations
- // for more info.
- val resultAnalysis = {
- val originalResultAnalysis = compileResult.analysis.asInstanceOf[Analysis]
- originalResultAnalysis.copy(
- relations = FilteredRelations.getFilteredRelations(originalResultAnalysis.relations),
- infos = FilteredInfos.getFilteredInfos(originalResultAnalysis.infos),
- )
- }
-
- // Filter out non-deterministic --sourceroot paths from the setup
- val filteredSetup = FilteredSetup.getFilteredSetup(compileResult.setup)
+ /*
+ * JARs on the classpath used during test discovery, but not used by compiler won't show up in `analysis.usedJars`, so
+ * we need to consider them in addition to those used by the compiler.
+ *
+ * Test discovery works by iterating through the list of test frameworks provided via `workRequest.testFrameworks`,
+ * attempting to classload each framework, and then using that framework to discover tests. If a given framework
+ * successfully loads and we found a test with it we take note of the JAR it was loaded from and consider that JAR
+ * (and therefore the target to which it belongs) used.
+ */
+ val usedDepsDuringTestDiscovery = maybeWriteTestsFile(workRequest, analysis)
- val analysisStoreText = AnalysisUtil.getAnalysisStore(
- new File(pathString.substring(0, pathString.length() - 3) + ".text.gz"),
- true,
- readWriteMappers,
- )
-
- analysisStoreText.set(AnalysisContents.create(resultAnalysis, filteredSetup))
- analysisStore.set(AnalysisContents.create(resultAnalysis, filteredSetup))
+ InterruptUtil.throwIfInterrupted(task.isCancelled)
// create used deps
- val usedDeps =
- // Filter out the Scala standard library as that should just always be
- // implicitly available and not something we should be book keeping.
- deps.filter(Dep.used(deps, resultAnalysis.relations, lookup)).filterNot { dep =>
- val filteredDepFileName = FileUtil.getNameWithoutRulesJvmExternalStampPrefix(dep.file)
-
- scalaInstance.libraryJars.view
- .map(FileUtil.getNameWithoutRulesJvmExternalStampPrefix)
- .contains(filteredDepFileName)
+ val scalaStandardLibraryJars = scalaInstance.libraryJars.view
+ .map(file => Paths.get(FileUtil.getNameWithoutRulesJvmExternalStampPrefix(file)))
+ .toSet
+
+ val usedDeps = workRequest.classpath.view
+ .map(_.normalize().toAbsolutePath)
+ .toSet
+ .intersect((analysis.usedJars.view ++ usedDepsDuringTestDiscovery.view).toSet)
+ // Filter out the Scala standard library as they should always be implicitly available we shouldn't be
+ // bookkeeping them
+ .view
+ .filter { path =>
+ val filteredPath = Paths.get(FileUtil.getNameWithoutRulesJvmExternalStampPrefix(path))
+
+ !scalaStandardLibraryJars.contains(filteredPath)
}
+ .toList
val writeMapper = readWriteMappers.getWriteMapper()
Files.write(
workRequest.outputUsed,
// Send the used deps through the read write mapper, to strip the sandbox prefix and
// make sure they're deterministic across machines
- usedDeps
- .map { usedDep =>
- writeMapper.mapClasspathEntry(usedDep.file).toString
- }
+ usedDeps.view
+ .map(writeMapper.mapClasspathEntry(_).toString)
+ .toList
.sorted
.asJava,
)
// create jar
- val mains =
- resultAnalysis.infos.allInfos.values.toList
- .flatMap(_.getMainClasses.toList)
- .sorted
-
+ val mains = analysis.mainClasses.toArray
val pw = new PrintWriter(workRequest.mainManifest.toFile)
try {
mains.foreach(pw.println)
@@ -405,22 +371,13 @@ object ZincRunner extends WorkerMain[ZincRunnerWorkerConfig] {
jarCreator.setVerbose(false)
mains match {
- case main :: Nil =>
+ case Array(main) =>
jarCreator.setMainClass(main)
case _ =>
}
jarCreator.execute()
- // save persisted files
- if (task.context.usePersistence) {
- try {
- persistence.save()
- } catch {
- case NonFatal(e) => logger.warn(() => s"Failed to save cached analysis: $e")
- }
- }
-
// clear temporary files
FileUtil.delete(tmpDir)
Files.createDirectory(tmpDir)
@@ -428,64 +385,3 @@ object ZincRunner extends WorkerMain[ZincRunnerWorkerConfig] {
InterruptUtil.throwIfInterrupted(task.isCancelled)
}
}
-
-final class AnxPerClasspathEntryLookup(analyses: Path => Option[CompileAnalysis]) extends PerClasspathEntryLookup {
- override def analysis(classpathEntry: VirtualFile): Optional[CompileAnalysis] =
- analyses(PlainVirtualFileConverter.converter.toPath(classpathEntry))
- .fold(Optional.empty[CompileAnalysis])(Optional.of(_))
- override def definesClass(classpathEntry: VirtualFile): DefinesClass =
- Locate.definesClass(classpathEntry)
-}
-
-/**
- * We create this to deterministically set the hash code of directories otherwise they get set to the
- * System.identityHashCode() of an object created during compilation. That results in non-determinism.
- *
- * TODO: Get rid of this once the upstream fix is released:
- * https://github.com/sbt/zinc/commit/b4db1476d7fdb2c530a97c543ec9710c13ac58e3
- */
-final class DeterministicDirectoryHashExternalHooks extends ExternalHooks.Lookup {
- // My understanding is that setting all these to None is the same as the
- // default behavior for external hooks, which provides an Optional.empty for
- // the external hooks.
- // The documentation for the getXYZ methods includes:
- // "None if is unable to determine what was changed, changes otherwise"
- // So I figure None is a safe bet here.
- override def getChangedSources(previousAnalysis: CompileAnalysis): Optional[Changes[VirtualFileRef]] =
- Optional.empty()
- override def getChangedBinaries(previousAnalysis: CompileAnalysis): Optional[util.Set[VirtualFileRef]] =
- Optional.empty()
- override def getRemovedProducts(previousAnalysis: CompileAnalysis): Optional[util.Set[VirtualFileRef]] =
- Optional.empty()
-
- // True here should be a safe default value, based on what I understand.
- // Here's why:
- //
- // There's a guard against this function returning an true incorrectly, so
- // I believe incremental compilation should still function correctly.
- // https://github.com/sbt/zinc/blob/f55b5b5abfba2dfcec0082b6fa8d329286803d2d/internal/zinc-core/src/main/scala/sbt/internal/inc/IncrementalCommon.scala#L186
- //
- // The only other place it's used is linked below. The default is an empty
- // Option, so forall will return true if an ExternalHooks.Lookup is not provided.
- // So this should be the same as default.
- // https://github.com/sbt/zinc/blob/f55b5b5abfba2dfcec0082b6fa8d329286803d2d/internal/zinc-core/src/main/scala/sbt/internal/inc/IncrementalCommon.scala#L429
- override def shouldDoIncrementalCompilation(
- changedClasses: util.Set[String],
- previousAnalysis: CompileAnalysis,
- ): Boolean = true
-
- // We set the hash code of the directories to 0. By default they get set
- // to the System.identityHashCode(), which is dependent on the current execution
- // of the JVM, so it is not deterministic.
- // If Zinc ever changes that behavior, we can get rid of this whole class.
- override def hashClasspath(classpath: Array[VirtualFile]): Optional[Array[FileHash]] = {
- val classpathArrayAsPaths = classpath.map(virtualFile => PlainVirtualFile.extractPath(virtualFile))
- val (directories, files) = classpathArrayAsPaths.partition(path => Files.isDirectory(path))
- val directoryFileHashes = directories.map { path =>
- FileHash.of(path, 0)
- }
- val fileHashes = ClasspathCache.hashClasspath(files.toSeq)
-
- Optional.of(directoryFileHashes ++ fileHashes)
- }
-}
diff --git a/src/main/scala/higherkindness/rules_scala/workers/zinc/doc/DocRunner.scala b/src/main/scala/higherkindness/rules_scala/workers/zinc/doc/DocRunner.scala
index d2bb4fe66..79b5bfaee 100644
--- a/src/main/scala/higherkindness/rules_scala/workers/zinc/doc/DocRunner.scala
+++ b/src/main/scala/higherkindness/rules_scala/workers/zinc/doc/DocRunner.scala
@@ -1,24 +1,21 @@
-package higherkindness.rules_scala
-package workers.zinc.doc
-
-import common.args.ArgsUtil
-import common.args.ArgsUtil.PathArgumentType
-import common.args.implicits.*
-import common.interrupt.InterruptUtil
-import common.worker.{WorkTask, WorkerMain}
-import common.sandbox.SandboxUtil
-import workers.common.{AnnexLogger, AnnexScalaInstance, FileUtil, LogLevel, LoggedReporter}
-import java.io.{File, PrintStream}
+package higherkindness.rules_scala.workers.zinc.doc
+
+import higherkindness.rules_scala.common.args.ArgsUtil
+import higherkindness.rules_scala.common.args.ArgsUtil.PathArgumentType
+import higherkindness.rules_scala.common.args.implicits.*
+import higherkindness.rules_scala.common.interrupt.InterruptUtil
+import higherkindness.rules_scala.common.sandbox.SandboxUtil
+import higherkindness.rules_scala.common.worker.{WorkTask, WorkerMain}
+import higherkindness.rules_scala.workers.common.{AnnexLogger, AnnexScalaInstance, FileUtil, LogLevel, LoggedReporter}
import java.net.URLClassLoader
import java.nio.file.{Files, NoSuchFileException, Path}
-import java.util.{Collections, Optional, Properties}
+import java.util.Collections
import net.sourceforge.argparse4j.ArgumentParsers
import net.sourceforge.argparse4j.impl.Arguments
import net.sourceforge.argparse4j.inf.{ArgumentParser, Namespace}
import sbt.internal.inc.classpath.ClassLoaderCache
import sbt.internal.inc.{PlainVirtualFile, PlainVirtualFileConverter, ZincUtil}
import scala.jdk.CollectionConverters.*
-import xsbti.Logger
object DocRunner extends WorkerMain[Unit] {
diff --git a/src/main/scala/higherkindness/rules_scala/workers/zinc/repl/ReplRunner.scala b/src/main/scala/higherkindness/rules_scala/workers/zinc/repl/ReplRunner.scala
index 9a2c0e2b1..5de829dc6 100644
--- a/src/main/scala/higherkindness/rules_scala/workers/zinc/repl/ReplRunner.scala
+++ b/src/main/scala/higherkindness/rules_scala/workers/zinc/repl/ReplRunner.scala
@@ -1,22 +1,18 @@
-package higherkindness.rules_scala
-package workers.zinc.repl
+package higherkindness.rules_scala.workers.zinc.repl
-import common.args.ArgsUtil.PathArgumentType
-import common.args.implicits.*
-import common.sandbox.SandboxUtil
-import workers.common.LogLevel
-import workers.common.AnnexLogger
-import workers.common.AnnexScalaInstance
-import workers.common.FileUtil
-import java.io.File
+import higherkindness.rules_scala.common.args.ArgsUtil.PathArgumentType
+import higherkindness.rules_scala.common.args.implicits.*
+import higherkindness.rules_scala.common.sandbox.SandboxUtil
+import higherkindness.rules_scala.workers.common.AnnexLogger
+import higherkindness.rules_scala.workers.common.AnnexScalaInstance
+import higherkindness.rules_scala.workers.common.LogLevel
import java.nio.file.{Files, Path, Paths}
import java.util.Collections
import net.sourceforge.argparse4j.ArgumentParsers
import net.sourceforge.argparse4j.impl.Arguments
-import net.sourceforge.argparse4j.inf.{ArgumentParser, Namespace}
+import net.sourceforge.argparse4j.inf.Namespace
import sbt.internal.inc.{PlainVirtualFile, PlainVirtualFileConverter, ZincUtil}
import scala.jdk.CollectionConverters.*
-import xsbti.Logger
object ReplRunner {
diff --git a/src/main/scala/higherkindness/rules_scala/workers/zinc/test/BUILD b/src/main/scala/higherkindness/rules_scala/workers/zinc/test/BUILD
index 2f289687e..6f172804e 100644
--- a/src/main/scala/higherkindness/rules_scala/workers/zinc/test/BUILD
+++ b/src/main/scala/higherkindness/rules_scala/workers/zinc/test/BUILD
@@ -11,12 +11,9 @@ scala_library(
"//src/main/scala/higherkindness/rules_scala/common/classloaders",
"//src/main/scala/higherkindness/rules_scala/common/sandbox",
"//src/main/scala/higherkindness/rules_scala/common/sbt-testing:common",
- "//src/main/scala/higherkindness/rules_scala/workers/common",
"@annex//:net_sourceforge_argparse4j_argparse4j",
- "@annex//:org_scala_sbt_compiler_interface",
+ "@annex//:org_playframework_play_json_3",
"@annex//:org_scala_sbt_test_interface",
- "@annex//:org_scala_sbt_zinc_apiinfo_2_13",
- "@annex//:org_scala_sbt_zinc_core_2_13",
],
)
diff --git a/src/main/scala/higherkindness/rules_scala/workers/zinc/test/TestFrameworkRunner.scala b/src/main/scala/higherkindness/rules_scala/workers/zinc/test/TestFrameworkRunner.scala
index 12a30653b..1e8eccc2e 100644
--- a/src/main/scala/higherkindness/rules_scala/workers/zinc/test/TestFrameworkRunner.scala
+++ b/src/main/scala/higherkindness/rules_scala/workers/zinc/test/TestFrameworkRunner.scala
@@ -1,16 +1,13 @@
-package higherkindness.rules_scala
-package workers.zinc.test
+package higherkindness.rules_scala.workers.zinc.test
-import common.classloaders.ClassLoaders
-import common.sbt_testing.JUnitXmlReporter
-import common.sbt_testing.TestDefinition
-import common.sbt_testing.TestFrameworkLoader
-import common.sbt_testing.TestHelper
-import common.sbt_testing.TestReporter
-import common.sbt_testing.TestRequest
-import common.sbt_testing.TestTaskExecutor
-
-import java.io.PrintWriter
+import higherkindness.rules_scala.common.classloaders.ClassLoaders
+import higherkindness.rules_scala.common.sbt_testing.JUnitXmlReporter
+import higherkindness.rules_scala.common.sbt_testing.TestDefinition
+import higherkindness.rules_scala.common.sbt_testing.TestFrameworkLoader
+import higherkindness.rules_scala.common.sbt_testing.TestHelper
+import higherkindness.rules_scala.common.sbt_testing.TestReporter
+import higherkindness.rules_scala.common.sbt_testing.TestRequest
+import higherkindness.rules_scala.common.sbt_testing.TestTaskExecutor
import java.io.ObjectOutputStream
import java.nio.file.Path
import sbt.testing.{Event, Framework, Logger}
@@ -59,7 +56,7 @@ class ClassLoaderTestRunner(framework: Framework, classLoaderProvider: () => Cla
val failures = mutable.Set[String]()
tests.foreach { test =>
val classLoader = classLoaderProvider()
- val isolatedFramework = new TestFrameworkLoader(classLoader, logger).load(framework.getClass.getName).get
+ val isolatedFramework = new TestFrameworkLoader(classLoader).load(framework.getClass.getName).get
TestHelper.withRunner(isolatedFramework, scopeAndTestName, classLoader, arguments) { runner =>
ClassLoaders.withContextClassLoader(classLoader) {
val tasks = runner.tasks(Array(TestHelper.taskDef(test, scopeAndTestName)))
diff --git a/src/main/scala/higherkindness/rules_scala/workers/zinc/test/TestRunner.scala b/src/main/scala/higherkindness/rules_scala/workers/zinc/test/TestRunner.scala
index 03848587d..d50e9027e 100644
--- a/src/main/scala/higherkindness/rules_scala/workers/zinc/test/TestRunner.scala
+++ b/src/main/scala/higherkindness/rules_scala/workers/zinc/test/TestRunner.scala
@@ -1,16 +1,11 @@
-package higherkindness.rules_scala
-package workers.zinc.test
-
-import common.args.ArgsUtil.PathArgumentType
-import common.classloaders.ClassLoaders
-import common.args.implicits.*
-import common.sandbox.SandboxUtil
-import common.sbt_testing.AnnexTestingLogger
-import common.sbt_testing.TestDefinition
-import common.sbt_testing.TestFrameworkLoader
-import common.sbt_testing.Verbosity
-import workers.common.AnalysisUtil
-import java.io.File
+package higherkindness.rules_scala.workers.zinc.test
+
+import higherkindness.rules_scala.common.args.ArgsUtil.PathArgumentType
+import higherkindness.rules_scala.common.args.implicits.*
+import higherkindness.rules_scala.common.classloaders.ClassLoaders
+import higherkindness.rules_scala.common.sandbox.SandboxUtil
+import higherkindness.rules_scala.common.sbt_testing.{AnnexTestingLogger, TestDefinition, TestFrameworkLoader, TestsFileData, Verbosity}
+import java.io.FileInputStream
import java.net.URLClassLoader
import java.nio.file.attribute.FileTime
import java.nio.file.{FileAlreadyExistsException, Files, Path, Paths}
@@ -18,10 +13,11 @@ import java.time.Instant
import java.util.Collections
import java.util.regex.Pattern
import net.sourceforge.argparse4j.ArgumentParsers
-import net.sourceforge.argparse4j.inf.{ArgumentParser, Namespace}
import net.sourceforge.argparse4j.impl.Arguments
+import net.sourceforge.argparse4j.inf.{ArgumentParser, Namespace}
+import play.api.libs.json.Json
import scala.jdk.CollectionConverters.*
-import scala.util.control.NonFatal
+import scala.util.Using
object TestRunner {
private sealed abstract class Isolation(val level: String)
@@ -87,36 +83,28 @@ object TestRunner {
}
private class TestRunnerRequest private (
- val analysisStore: Path,
val subprocessExecutable: Option[Path],
val isolation: Isolation,
- val frameworks: List[String],
val sharedClasspath: List[Path],
val testClasspath: List[Path],
+ val testsFile: Path,
)
private object TestRunnerRequest {
def apply(runPath: Path, namespace: Namespace): TestRunnerRequest = {
new TestRunnerRequest(
- analysisStore = SandboxUtil.getSandboxPath(runPath, namespace.get[Path]("analysis_store")),
subprocessExecutable =
Option(namespace.get[Path]("subprocess_exec")).map(SandboxUtil.getSandboxPath(runPath, _)),
isolation = Isolation(namespace.getString("isolation")),
- frameworks = namespace.getList[String]("frameworks").asScala.toList,
sharedClasspath = SandboxUtil.getSandboxPaths(runPath, namespace.getList[Path]("shared_classpath")),
testClasspath = SandboxUtil.getSandboxPaths(runPath, namespace.getList[Path]("classpath")),
+ testsFile = namespace.get[Path]("tests_file"),
)
}
}
private val testArgParser: ArgumentParser = {
val parser = ArgumentParsers.newFor("test").addHelp(true).build()
- parser
- .addArgument("--analysis_store")
- .help("Analysis Store file")
- .metavar("class")
- .`type`(PathArgumentType.apply())
- .required(true)
parser
.addArgument("--subprocess_exec")
.help("Executable for SubprocessTestRunner")
@@ -126,12 +114,6 @@ object TestRunner {
.choices(Isolation.values.keys.toSeq: _*)
.help("Test isolation")
.setDefault_(Isolation.None.level)
- parser
- .addArgument("--frameworks")
- .help("Class names of sbt.testing.Framework implementations")
- .metavar("class")
- .nargs("*")
- .setDefault_(Collections.emptyList)
parser
.addArgument("--shared_classpath")
.help("Classpath to share between tests")
@@ -139,6 +121,11 @@ object TestRunner {
.nargs("*")
.`type`(PathArgumentType.apply())
.setDefault_(Collections.emptyList)
+ parser
+ .addArgument("--tests_file")
+ .help("File containing discovered tests.")
+ .metavar("file")
+ .`type`(PathArgumentType.apply())
parser
.addArgument("classpath")
.help("Testing classpath")
@@ -174,28 +161,10 @@ object TestRunner {
val classLoader = ClassLoaders.sbtTestClassLoader(testClasspath.map(_.toUri.toURL).toSeq)
val sharedClassLoader = ClassLoaders.sbtTestClassLoader(sharedUrls)
-
- val apis =
- try {
- AnalysisUtil
- .getAnalysis(
- AnalysisUtil.getAnalysisStore(
- testRunnerRequest.analysisStore.toFile,
- debug = false,
- isIncremental = false,
- // There's no sandboxing here because this isn't a worker, so just use an empty path
- // for the sandbox prefix.
- workDir = Paths.get(""),
- ),
- )
- .apis
- } catch {
- case NonFatal(e) =>
- throw new Exception(s"Failed to load APIs from analysis store: ${testRunnerRequest.analysisStore}", e)
- }
-
- val loader = new TestFrameworkLoader(classLoader, logger)
- val frameworks = testRunnerRequest.frameworks.flatMap(loader.load)
+ val loader = new TestFrameworkLoader(classLoader)
+ val testsFileData = Using(new FileInputStream(testRunnerRequest.testsFile.toString)) { stream =>
+ Json.fromJson[TestsFileData](Json.parse(stream)).get
+ }.get
val testFilter = sys.env.get("TESTBRIDGE_TEST_ONLY").map(_.split("#", 2))
val testClass = testFilter
@@ -205,46 +174,47 @@ object TestRunner {
val testScopeAndName = testFilter.flatMap(_.lift(1))
var count = 0
- val passed = frameworks.forall { framework =>
- val tests = new TestDiscovery(framework)(apis.internal.values.toSet).sortBy(_.name)
- val filter = for {
- index <- sys.env.get("TEST_SHARD_INDEX").map(_.toInt)
- total <- sys.env.get("TEST_TOTAL_SHARDS").map(_.toInt)
- } yield (test: TestDefinition, i: Int) => i % total == index
- val filteredTests = tests.filter { test =>
- testClass.forall(_.matcher(test.name).matches) && {
- count += 1
- filter.fold(true)(_(test, count))
+ val passed = testsFileData.testsByFramework.view
+ .flatMap { case (frameworkName, tests) => loader.load(frameworkName).map((frameworkName, _, tests)) }
+ .forall { case (frameworkName, framework, tests) =>
+ val filter = for {
+ index <- sys.env.get("TEST_SHARD_INDEX").map(_.toInt)
+ total <- sys.env.get("TEST_TOTAL_SHARDS").map(_.toInt)
+ } yield (test: TestDefinition, i: Int) => i % total == index
+ val filteredTests = tests.filter { test =>
+ testClass.forall(_.matcher(test.name).matches) && {
+ count += 1
+ filter.fold(true)(_(test, count))
+ }
}
- }
- filteredTests.isEmpty || {
- val runner = testRunnerRequest.isolation match {
- case Isolation.ClassLoader =>
- val urls = testClasspath.filterNot(sharedClasspath.toSet).map(_.toUri.toURL).toArray
- def classLoaderProvider() = new URLClassLoader(urls, sharedClassLoader)
- new ClassLoaderTestRunner(framework, classLoaderProvider _, logger)
- case Isolation.Process =>
- val executable = testRunnerRequest.subprocessExecutable.map(_.toString).getOrElse {
- throw new Exception("Subprocess executable missing for test ran in process isolation mode.")
- }
- new ProcessTestRunner(
- framework,
- testClasspath.toSeq,
- new ProcessCommand(executable, testRunnerArgs.subprocessArgs),
- logger,
- )
- case Isolation.None => new BasicTestRunner(framework, classLoader, logger)
- }
-
- try {
- runner.execute(filteredTests.toList, testScopeAndName.getOrElse(""), testRunnerArgs.frameworkArgs)
- } catch {
- case e: Throwable =>
- e.printStackTrace()
- false
+ filteredTests.isEmpty || {
+ val runner = testRunnerRequest.isolation match {
+ case Isolation.ClassLoader =>
+ val urls = testClasspath.filterNot(sharedClasspath.toSet).map(_.toUri.toURL).toArray
+ def classLoaderProvider() = new URLClassLoader(urls, sharedClassLoader)
+ new ClassLoaderTestRunner(framework, classLoaderProvider _, logger)
+ case Isolation.Process =>
+ val executable = testRunnerRequest.subprocessExecutable.map(_.toString).getOrElse {
+ throw new Exception("Subprocess executable missing for test ran in process isolation mode.")
+ }
+ new ProcessTestRunner(
+ framework,
+ testClasspath.toSeq,
+ new ProcessCommand(executable, testRunnerArgs.subprocessArgs),
+ logger,
+ )
+ case Isolation.None => new BasicTestRunner(framework, classLoader, logger)
+ }
+
+ try {
+ runner.execute(filteredTests.toList, testScopeAndName.getOrElse(""), testRunnerArgs.frameworkArgs)
+ } catch {
+ case e: Throwable =>
+ e.printStackTrace()
+ false
+ }
}
}
- }
sys.exit(if (passed) 0 else 1)
}
}
diff --git a/src/main/scala/sbt/internal/inc/classfile/BUILD.bazel b/src/main/scala/sbt/internal/inc/classfile/BUILD.bazel
new file mode 100644
index 000000000..56f22f9f9
--- /dev/null
+++ b/src/main/scala/sbt/internal/inc/classfile/BUILD.bazel
@@ -0,0 +1,15 @@
+load("//rules:scala.bzl", "scala_library")
+load("//rules:scalafmt.bzl", "scala_format_test")
+
+scala_library(
+ name = "classfile",
+ srcs = glob(["*.scala"]),
+ scala_toolchain_name = "annex_bootstrap_3",
+ visibility = ["//src/main/scala/higherkindness/rules_scala/workers/zinc:__subpackages__"],
+ deps = ["@annex//:org_scala_sbt_zinc_2_13"],
+)
+
+scala_format_test(
+ name = "format",
+ srcs = glob(["*.scala"]),
+)
diff --git a/src/main/scala/sbt/internal/inc/classfile/package.scala b/src/main/scala/sbt/internal/inc/classfile/package.scala
new file mode 100644
index 000000000..d195a4001
--- /dev/null
+++ b/src/main/scala/sbt/internal/inc/classfile/package.scala
@@ -0,0 +1,59 @@
+package sbt.internal.inc.classfile
+
+import java.io.File
+import java.nio.file.Path
+import sbt.internal.inc.classpath.ClasspathUtil
+import sbt.internal.inc.javac.DirectoryClassFinder
+import sbt.util.Logger
+import xsbti.compile.SingleOutput
+import xsbti.{AnalysisCallback, VirtualFile, VirtualFileRef}
+
+/**
+ * A public wrapper around `sbt.internal.inc.JavaAnalyze` that produces analysis information from class files generated
+ * by the Java compiler and is designed to be called outside of [[sbt.internal.inc.javac.AnalyzingJavaCompiler]]. It
+ * makes certain assumptions about the way in which Java code is compiled:
+ * - The output is a directory of class files, not a JAR file
+ * - That this method will be called once per compilation unit and therefore that all the setup can be done within it
+ * because there's no use in reusing components
+ *
+ * Much of the setup in the implementation of this method is borrowed from
+ * [[sbt.internal.inc.javac.AnalyzingJavaCompiler]].
+ */
+def analyzeJavaClasses(
+ sources: Seq[VirtualFile],
+ classpath: Seq[Path],
+ outputDirectory: Path,
+ logger: Logger,
+ analysisCallback: AnalysisCallback,
+): Unit = {
+ val classFinder = new DirectoryClassFinder(outputDirectory)
+ val classes = classFinder.classes
+
+ try {
+ val output = new SingleOutput {
+ override def getOutputDirectory: File = outputDirectory.toFile
+ }
+
+ val classloader = ClasspathUtil.toLoader(outputDirectory +: classpath)
+
+ /**
+ * TODO: Make this more similar to the `readAPI` defined in [[sbt.internal.inc.javac.AnalyzingJavaCompiler]].
+ *
+ * This method is supposed to use [[sbt.internal.inc.ClassToAPI]] to analyze the list of provided classes and is
+ * supposed to return a set of inheritance pairs (`subclass -> superclass`, but it currently does nothing because I
+ * can't get it to work with `ijar`. [[sbt.internal.inc.ClassToAPI]] attempts to load the methods of the classes
+ * provided to it, which results in an error like this:
+ *
+ * {{{
+ * java.lang.ClassFormatError: Absent Code attribute in method that is not native or abstract in class file ...
+ * }}}
+ *
+ * I'm not sure how this worked when we used Zinc instead of the compiler bridge directly.
+ */
+ def readAPI(source: VirtualFileRef, classes: Seq[Class[?]]): Set[(String, String)] = Set.empty
+
+ JavaAnalyze(classes.paths, sources, logger, output, finalJarOutput = None)(analysisCallback, classloader, readAPI)
+ } finally {
+ classes.close()
+ }
+}
diff --git a/tests/compile/log_level/test b/tests/compile/log_level/test
index 1fc8397fe..0fbbba30f 100755
--- a/tests/compile/log_level/test
+++ b/tests/compile/log_level/test
@@ -6,4 +6,4 @@
# warning we want printed, printed. The alternative is to bazel clean, which
# takes much longer.
bazel shutdown
-bazel build :lib --nouse_action_cache |& grep "compiling 1 Scala source"
+bazel build :lib --nouse_action_cache |& grep "The Scala compiler is invoked with:"
diff --git a/tests/test-frameworks/mixed/BUILD b/tests/test-frameworks/mixed/BUILD
index 9df164264..17176dc20 100644
--- a/tests/test-frameworks/mixed/BUILD
+++ b/tests/test-frameworks/mixed/BUILD
@@ -6,11 +6,8 @@ scala_test(
scala_toolchain_name = "test_zinc_2_13",
shard_count = 2,
tags = ["manual"],
- runtime_deps = [
- "@annex_test//:com_novocode_junit_interface",
- "@annex_test//:org_hamcrest_hamcrest_core",
- ],
deps = [
+ "@annex_test//:com_novocode_junit_interface",
"@annex_test//:junit_junit",
"@annex_test//:org_scalacheck_scalacheck_2_13",
"@annex_test//:org_scalactic_scalactic_2_13",
@@ -29,11 +26,8 @@ scala_test(
scala_toolchain_name = "test_zinc_2_12",
shard_count = 2,
tags = ["manual"],
- runtime_deps = [
- "@annex_test//:com_novocode_junit_interface",
- "@annex_test//:org_hamcrest_hamcrest_core",
- ],
deps = [
+ "@annex_test//:com_novocode_junit_interface",
"@annex_test//:junit_junit",
"@annex_test_2_12//:org_scalacheck_scalacheck_2_12",
"@annex_test_2_12//:org_scalactic_scalactic_2_12",