diff --git a/3rdparty/maven.bzl b/3rdparty/maven.bzl
index 3636e32b7..51406527d 100644
--- a/3rdparty/maven.bzl
+++ b/3rdparty/maven.bzl
@@ -878,7 +878,10 @@ def list_dependencies():
},
"import_args": {
"default_visibility": ["//visibility:public"],
- "deps": ["@scala_annex_com_lihaoyi_fansi_2_12"],
+ "deps": [
+ "@scala_annex_com_lihaoyi_fansi_2_12",
+ "@scala_annex_com_lihaoyi_sourcecode_2_12",
+ ],
"jar_sha256": "2e18aa0884870537bf5c562255fc759d4ebe360882b5cb2141b30eda4034c71d",
"jar_urls": [
"http://central.maven.org/maven2/com/lihaoyi/pprint_2.12/0.5.3/pprint_2.12-0.5.3.jar",
@@ -895,6 +898,7 @@ def list_dependencies():
# duplicates in com.lihaoyi:sourcecode_2.12 promoted to 0.1.4
# - ch.epfl.scala:bloop-backend_2.12:1.0.0 wanted version 0.1.4
# - com.lihaoyi:fastparse_2.12:0.4.2 wanted version 0.1.3
+ # - com.lihaoyi:pprint_2.12:0.5.3 wanted version 0.1.4
{
"bind_args": {
"actual": "@scala_annex_com_lihaoyi_sourcecode_2_12",
@@ -2462,6 +2466,30 @@ def list_dependencies():
},
"lang": "java",
},
+ {
+ "bind_args": {
+ "actual": "@scala_annex_org_scalameta_semanticdb_scalac_2_12_7",
+ "name": "jar/scala_annex_org/scalameta/semanticdb_scalac_2_12_7",
+ },
+ "import_args": {
+ "default_visibility": ["//visibility:public"],
+ "deps": [
+ "@scala_annex_com_lihaoyi_pprint_2_12",
+ "@scala_annex_scala_2_12_scala_library//jar",
+ ],
+ "jar_sha256": "62be6eb517912026e8824f95533a1ed4ae7c886bab5d266ee39ca98dd416a4dc",
+ "jar_urls": [
+ "http://central.maven.org/maven2/org/scalameta/semanticdb-scalac_2.12.7/4.0.0/semanticdb-scalac_2.12.7-4.0.0.jar",
+ ],
+ "licenses": ["notice"],
+ "name": "scala_annex_org_scalameta_semanticdb_scalac_2_12_7",
+ "srcjar_sha256": "91970337ec5b6cc5ad0ae0162c452f1bb4a77bf1880644235dc8e62fa3dfd694",
+ "srcjar_urls": [
+ "http://central.maven.org/maven2/org/scalameta/semanticdb-scalac_2.12.7/4.0.0/semanticdb-scalac_2.12.7-4.0.0-sources.jar",
+ ],
+ },
+ "lang": "java",
+ },
{
"bind_args": {
"actual": "@scala_annex_org_scalatest_scalatest_2_12",
diff --git a/WORKSPACE b/WORKSPACE
index fc04fdd3a..d7f93b711 100644
--- a/WORKSPACE
+++ b/WORKSPACE
@@ -30,13 +30,13 @@ http_archive(
urls = ["https://github.com/google/protobuf/archive/0038ff49af882463c2af9049356eed7df45c3e8e.zip"],
)
-load("//rules/scala:workspace.bzl", "scala_repositories", "scala_register_toolchains")
+load("//rules/scala:workspace.bzl", "scala_register_toolchains", "scala_repositories")
scala_repositories()
scala_register_toolchains()
-load("//rules/scalafmt:workspace.bzl", "scalafmt_repositories", "scalafmt_default_config")
+load("//rules/scalafmt:workspace.bzl", "scalafmt_default_config", "scalafmt_repositories")
scalafmt_repositories()
@@ -44,8 +44,8 @@ scalafmt_default_config(".scalafmt.conf")
load(
"//rules/scala_proto:workspace.bzl",
- "scala_proto_repositories",
"scala_proto_register_toolchains",
+ "scala_proto_repositories",
)
scala_proto_repositories()
diff --git a/dependencies.yaml b/dependencies.yaml
index 1569a4993..7465bb4c0 100644
--- a/dependencies.yaml
+++ b/dependencies.yaml
@@ -19,6 +19,11 @@ dependencies:
modules: ["frontend"]
version: "1.0.0"
+ org.scalameta:
+ semanticdb-scalac_2.12.7:
+ lang: java
+ version: "4.0.0"
+
net.sourceforge.argparse4j:
argparse4j:
lang: java
diff --git a/docs/stardoc/providers.md b/docs/stardoc/providers.md
index cf1fe0fba..d8c69f776 100644
--- a/docs/stardoc/providers.md
+++ b/docs/stardoc/providers.md
@@ -220,6 +220,29 @@ Zinc configuration.
+
+## ScalaRulePhase
+
+A Scala compiler plugin
+
+### Fields
+
+
+
+
+
+
+
+
+ phases |
+
+ the phases to add
+ |
+
+
+
+
+
## ZincInfo
diff --git a/rules/providers.bzl b/rules/providers.bzl
index 4a3e7c08a..2e586a6d4 100644
--- a/rules/providers.bzl
+++ b/rules/providers.bzl
@@ -60,6 +60,13 @@ ZincConfiguration = provider(
},
)
+ScalaRulePhase = provider(
+ doc = "A Scala compiler plugin",
+ fields = {
+ "phases": "the phases to add",
+ },
+)
+
def _declare_zinc_configuration_implementation(ctx):
return [ZincConfiguration(
compiler_bridge = ctx.files.compiler_bridge,
diff --git a/rules/rules_scala/private/compat.bzl b/rules/rules_scala/private/compat.bzl
index f6303065b..dc889a79d 100644
--- a/rules/rules_scala/private/compat.bzl
+++ b/rules/rules_scala/private/compat.bzl
@@ -12,8 +12,8 @@ load(
)
load(
"//rules:scala.bzl",
- _scala_library = "scala_library",
_scala_binary = "scala_binary",
+ _scala_library = "scala_library",
_scala_test = "scala_test",
)
load(
diff --git a/rules/scala/BUILD b/rules/scala/BUILD
index 3ca0249a7..8ffec95d3 100644
--- a/rules/scala/BUILD
+++ b/rules/scala/BUILD
@@ -1,4 +1,4 @@
-load("//rules:scala.bzl", "scala_library", "scala_binary", "scala_deps_toolchain", "scala_runner_toolchain")
+load("//rules:scala.bzl", "scala_binary", "scala_deps_toolchain", "scala_library", "scala_runner_toolchain")
load("//rules:scalac.bzl", "scalac_binary", "scalac_library")
load("//rules:scalafmt.bzl", "scala_format_test")
diff --git a/rules/scala/deps/DepsRunner.scala b/rules/scala/deps/DepsRunner.scala
index ba5c9a282..d5c912e78 100644
--- a/rules/scala/deps/DepsRunner.scala
+++ b/rules/scala/deps/DepsRunner.scala
@@ -38,7 +38,7 @@ object DepsRunner extends SimpleMain {
parser
}
- protected[this] def work(args: Array[String]) = {
+ protected[this] def work(args: Array[String]): Unit = {
val namespace = argParser.parseArgs(args)
val label = namespace.getString("label").tail
@@ -75,11 +75,10 @@ object DepsRunner extends SimpleMain {
println(s"buildozer 'add deps $depLabel' $label")
}
- if (add.nonEmpty || remove.nonEmpty) {
- sys.exit(1)
+ if (add.isEmpty && remove.isEmpty) {
+ try Files.createFile(namespace.get[File]("success").toPath)
+ catch { case _: FileAlreadyExistsException => }
}
- try Files.createFile(namespace.get[File]("success").toPath)
- catch { case _: FileAlreadyExistsException => }
}
}
diff --git a/rules/scala/private/core.bzl b/rules/scala/private/core.bzl
index cf572a630..de73bb0b6 100644
--- a/rules/scala/private/core.bzl
+++ b/rules/scala/private/core.bzl
@@ -1,13 +1,61 @@
load(
- "@rules_scala_annex//rules:providers.bzl",
- _LabeledJars = "LabeledJars",
- _ScalaConfiguration = "ScalaConfiguration",
- _ScalaInfo = "ScalaInfo",
- _ZincConfiguration = "ZincConfiguration",
- _ZincInfo = "ZincInfo",
+ ":private/phases.bzl",
+ _phase_binary_deployjar = "phase_binary_deployjar",
+ _phase_binary_launcher = "phase_binary_launcher",
+ _phase_coda = "phase_coda",
+ _phase_compile = "phase_compile",
+ _phase_depscheck = "phase_depscheck",
+ _phase_ijinfo = "phase_ijinfo",
+ _phase_javainfo = "phase_javainfo",
+ _phase_library_defaultinfo = "phase_library_defaultinfo",
+ _phase_resources = "phase_resources",
+ _phase_singlejar = "phase_singlejar",
+ _phase_test_launcher = "phase_test_launcher",
+ _run_phases = "run_phases",
)
-load("//rules/common:private/utils.bzl", _collect = "collect", _write_launcher = "write_launcher")
-load(":private/import.bzl", _create_intellij_info = "create_intellij_info")
+
+scala_library_phases = [
+ ("javainfo", _phase_javainfo),
+ ("resources", _phase_resources),
+ ("compile", _phase_compile),
+ ("depscheck", _phase_depscheck),
+ ("singlejar", _phase_singlejar),
+ ("ijinfo", _phase_ijinfo),
+ ("library_defaultinfo", _phase_library_defaultinfo),
+ ("coda", _phase_coda),
+]
+
+scala_binary_phases = [
+ ("javainfo", _phase_javainfo),
+ ("resources", _phase_resources),
+ ("compile", _phase_compile),
+ ("depscheck", _phase_depscheck),
+ ("singlejar", _phase_singlejar),
+ ("ijinfo", _phase_ijinfo),
+ ("binary_deployjar", _phase_binary_deployjar),
+ ("binary_launcher", _phase_binary_launcher),
+ ("coda", _phase_coda),
+]
+
+scala_test_phases = [
+ ("javainfo", _phase_javainfo),
+ ("resources", _phase_resources),
+ ("compile", _phase_compile),
+ ("depscheck", _phase_depscheck),
+ ("singlejar", _phase_singlejar),
+ ("ijinfo", _phase_ijinfo),
+ ("test_launcher", _phase_test_launcher),
+ ("coda", _phase_coda),
+]
+
+def scala_library_implementation(ctx):
+ return _run_phases(ctx, scala_library_phases).coda
+
+def scala_binary_implementation(ctx):
+ return _run_phases(ctx, scala_binary_phases).coda
+
+def scala_test_implementation(ctx):
+ return _run_phases(ctx, scala_test_phases).coda
runner_common_attributes = {
"_java_toolchain": attr.label(
@@ -29,6 +77,8 @@ runner_common_attributes = {
),
}
+scala_library_private_attributes = runner_common_attributes
+
scala_binary_private_attributes = dict({
"_java": attr.label(
default = Label("@bazel_tools//tools/jdk:java"),
@@ -42,422 +92,3 @@ scala_binary_private_attributes = dict({
}, **runner_common_attributes)
scala_test_private_attributes = scala_binary_private_attributes
-
-_SINGLE_JAR_MNEMONIC = "SingleJar"
-
-def runner_common(ctx):
- runner = ctx.toolchains["@rules_scala_annex//rules/scala:runner_toolchain_type"]
-
- scala_configuration = ctx.attr.scala[_ScalaConfiguration]
- scala_configuration_runtime_deps = _collect(JavaInfo, scala_configuration.runtime_classpath)
-
- zinc_configuration = ctx.attr.scala[_ZincConfiguration]
-
- sdeps = java_common.merge(_collect(JavaInfo, scala_configuration.runtime_classpath + ctx.attr.deps))
- sruntime_deps = java_common.merge(_collect(JavaInfo, ctx.attr.runtime_deps))
- sexports = java_common.merge(_collect(JavaInfo, ctx.attr.exports))
- splugins = java_common.merge(_collect(JavaInfo, ctx.attr.plugins + scala_configuration.global_plugins))
-
- if len(ctx.attr.srcs) == 0:
- java_info = java_common.merge([sdeps, sexports])
- else:
- compile_jar = java_common.run_ijar(
- ctx.actions,
- jar = ctx.outputs.jar,
- target_label = ctx.label,
- java_toolchain = ctx.attr._java_toolchain,
- )
-
- source_jar = java_common.pack_sources(
- ctx.actions,
- output_jar = ctx.outputs.jar,
- sources = ctx.files.srcs,
- host_javabase = ctx.attr._host_javabase,
- java_toolchain = ctx.attr._java_toolchain,
- )
-
- java_info = JavaInfo(
- compile_jar = compile_jar,
- neverlink = getattr(ctx.attr, "neverlink", False),
- output_jar = ctx.outputs.jar,
- source_jar = source_jar,
- exports = [sexports],
- runtime_deps = [sruntime_deps] + scala_configuration_runtime_deps,
- deps = [sdeps],
- )
-
- apis = ctx.actions.declare_file("{}/apis.gz".format(ctx.label.name))
- infos = ctx.actions.declare_file("{}/infos.gz".format(ctx.label.name))
- mains_file = ctx.actions.declare_file("{}.jar.mains.txt".format(ctx.label.name))
- relations = ctx.actions.declare_file("{}/relations.gz".format(ctx.label.name))
- setup = ctx.actions.declare_file("{}/setup.gz".format(ctx.label.name))
- stamps = ctx.actions.declare_file("{}/stamps.gz".format(ctx.label.name))
- used = ctx.actions.declare_file("{}/deps_used.txt".format(ctx.label.name))
-
- macro_classpath = [
- dep[JavaInfo].transitive_runtime_jars
- for dep in ctx.attr.deps
- if _ScalaInfo in dep and dep[_ScalaInfo].macro
- ]
- compile_classpath = depset(order = "preorder", transitive = macro_classpath + [sdeps.transitive_compile_time_jars])
-
- zipper_inputs, _, zipper_manifests = ctx.resolve_command(tools = [ctx.attr._zipper])
-
- if ctx.files.resources:
- resource_jar = ctx.actions.declare_file("{}/resources.zip".format(ctx.label.name))
- args = ctx.actions.args()
- args.add("c", resource_jar)
- args.set_param_file_format("multiline")
- args.use_param_file("@%s")
- for file in ctx.files.resources:
- args.add("{}={}".format(_resource_path(file, ctx.attr.resource_strip_prefix), file.path))
- ctx.actions.run(
- arguments = [args],
- executable = ctx.executable._zipper,
- inputs = ctx.files.resources,
- input_manifests = zipper_manifests,
- outputs = [resource_jar],
- tools = zipper_inputs,
- )
- else:
- resource_jar = None
-
- class_jar = ctx.actions.declare_file("{}/classes.jar".format(ctx.label.name))
-
- srcs = [file for file in ctx.files.srcs if file.extension.lower() in ["java", "scala"]]
- src_jars = [file for file in ctx.files.srcs if file.extension.lower() in ["srcjar"]]
-
- tmp = ctx.actions.declare_directory("{}/tmp".format(ctx.label.name))
-
- javacopts = [ctx.expand_location(option, ctx.attr.data) for option in ctx.attr.javacopts + java_common.default_javac_opts(ctx, java_toolchain_attr = "_java_toolchain")]
-
- zincs = [dep[_ZincInfo] for dep in ctx.attr.deps if _ZincInfo in dep]
-
- args = ctx.actions.args()
- args.add_all(depset(transitive = [zinc.deps for zinc in zincs]), map_each = _analysis)
- args.add("--compiler_bridge", zinc_configuration.compiler_bridge)
- args.add_all("--compiler_classpath", scala_configuration.compiler_classpath)
- args.add_all("--classpath", compile_classpath)
- args.add_all(runner.scalacopts + ctx.attr.scalacopts, format_each = "--compiler_option=%s")
- args.add_all(javacopts, format_each = "--java_compiler_option=%s")
- args.add(ctx.label, format = "--label=%s")
- args.add("--main_manifest", mains_file)
- args.add("--output_apis", apis)
- args.add("--output_infos", infos)
- args.add("--output_jar", class_jar)
- args.add("--output_relations", relations)
- args.add("--output_setup", setup)
- args.add("--output_stamps", stamps)
- args.add("--output_used", used)
- args.add_all("--plugins", splugins.transitive_runtime_deps)
- args.add_all("--source_jars", src_jars)
- args.add("--tmp", tmp.path)
- args.add_all("--", srcs)
- args.set_param_file_format("multiline")
- args.use_param_file("@%s", use_always = True)
-
- runner_inputs, _, input_manifests = ctx.resolve_command(tools = [runner.runner])
- inputs = depset(
- [zinc_configuration.compiler_bridge] + scala_configuration.compiler_classpath + ctx.files.data + ctx.files.srcs + runner_inputs,
- transitive = [
- splugins.transitive_runtime_deps,
- compile_classpath,
- ] + [zinc.deps_files for zinc in zincs],
- )
-
- outputs = [class_jar, mains_file, apis, infos, relations, setup, stamps, used, tmp]
-
- # todo: different execution path for nosrc jar?
- ctx.actions.run(
- mnemonic = "ScalaCompile",
- inputs = inputs,
- outputs = outputs,
- executable = runner.runner.files_to_run.executable,
- input_manifests = input_manifests,
- execution_requirements = {"no-sandbox": "1", "supports-workers": "1"},
- arguments = [args],
- )
-
- files = [ctx.outputs.jar]
-
- deps_toolchain = ctx.toolchains["@rules_scala_annex//rules/scala:deps_toolchain_type"]
- deps_checks = {}
- labeled_jars = depset(transitive = [dep[_LabeledJars].values for dep in ctx.attr.deps])
- deps_inputs, _, deps_input_manifests = ctx.resolve_command(tools = [deps_toolchain.runner])
- for name in ("direct", "used"):
- deps_check = ctx.actions.declare_file("{}/deps_check_{}".format(ctx.label.name, name))
- deps_args = ctx.actions.args()
- deps_args.add(name, format = "--check_%s=true")
- deps_args.add_all("--direct", [dep.label for dep in ctx.attr.deps], format_each = "_%s")
- deps_args.add_all(labeled_jars, map_each = _labeled_group)
- deps_args.add("--label", ctx.label, format = "_%s")
- deps_args.add_all("--whitelist", [dep.label for dep in ctx.attr.deps_used_whitelist], format_each = "_%s")
- deps_args.add("--")
- deps_args.add(used)
- deps_args.add(deps_check)
- deps_args.set_param_file_format("multiline")
- deps_args.use_param_file("@%s", use_always = True)
- ctx.actions.run(
- mnemonic = "ScalaCheckDeps",
- inputs = [used] + deps_inputs,
- outputs = [deps_check],
- executable = deps_toolchain.runner.files_to_run.executable,
- input_manifests = deps_input_manifests,
- execution_requirements = {"supports-workers": "1"},
- arguments = [deps_args],
- )
- deps_checks[name] = deps_check
-
- inputs = [class_jar] + ctx.files.resource_jars
- args = ctx.actions.args()
- args.add("--exclude_build_data")
- args.add("--normalize")
- args.add("--sources", class_jar)
- if resource_jar:
- args.add("--sources", resource_jar)
- inputs.append(resource_jar)
- for file in [f for f in ctx.files.resource_jars if f.extension.lower() in ["jar"]]:
- args.add("--sources")
- args.add(file)
- args.add("--output", ctx.outputs.jar)
- args.add("--warn_duplicate_resources")
- args.set_param_file_format("multiline")
- args.use_param_file("@%s", use_always = True)
- if deps_toolchain.direct == "error":
- inputs.append(deps_checks["direct"])
- if deps_toolchain.used == "error":
- inputs.append(deps_checks["used"])
- ctx.actions.run(
- arguments = [args],
- executable = ctx.executable._singlejar,
- execution_requirements = {"supports-workers": "1"},
- mnemonic = _SINGLE_JAR_MNEMONIC,
- inputs = inputs,
- outputs = [ctx.outputs.jar],
- )
-
- jars = []
- for jar in java_info.outputs.jars:
- jars.append(jar.class_jar)
- jars.append(jar.ijar)
- zinc_info = _ZincInfo(
- apis = apis,
- deps_files = depset([apis, relations], transitive = [zinc.deps_files for zinc in zincs]),
- label = ctx.label,
- relations = relations,
- deps = depset(
- [struct(
- apis = apis,
- jars = jars,
- label = ctx.label,
- relations = relations,
- )],
- transitive = [zinc.deps for zinc in zincs],
- ),
- )
-
- deps_check = []
- if deps_toolchain.direct != "off":
- deps_check.append(deps_checks["direct"])
- if deps_toolchain.used != "off":
- deps_check.append(deps_checks["used"])
-
- return struct(
- deps_check = deps_check,
- files = depset(files),
- intellij_info = _create_intellij_info(ctx.label, ctx.attr.deps, java_info),
- java_info = java_info,
- mains_files = depset([mains_file]),
- scala_info = _ScalaInfo(macro = ctx.attr.macro, scala_configuration = scala_configuration),
- zinc_info = zinc_info,
- )
-
-scala_library_private_attributes = runner_common_attributes
-
-def scala_library_implementation(ctx):
- res = runner_common(ctx)
- return struct(
- java = res.intellij_info,
- providers = [
- res.java_info,
- res.scala_info,
- res.zinc_info,
- res.intellij_info,
- DefaultInfo(
- files = res.files,
- ),
- OutputGroupInfo(
- # analysis = depset([res.zinc_info.analysis, res.zinc_info.apis]),
- deps = depset(res.deps_check),
- ),
- ],
- )
-
-def _analysis(analysis):
- return (["--analysis", "_{}".format(analysis.label), analysis.apis.path, analysis.relations.path] + [jar.path for jar in analysis.jars])
-
-def _labeled_group(labeled_jars):
- return (["--group", "_{}".format(labeled_jars.label)] + [jar.path for jar in labeled_jars.jars.to_list()])
-
-def _resource_path(file, strip_prefix):
- if strip_prefix:
- if not file.short_path.startswith(strip_prefix):
- fail("{} does not have prefix {}".format(file.short_path, strip_prefix))
- return file.short_path[len(strip_prefix) + 1 - int(file.short_path.endswith("/")):]
- conventional = [
- "src/main/resources/",
- "src/test/resources/",
- ]
- for path in conventional:
- dir1, dir2, rest = file.short_path.partition(path)
- if rest:
- return rest
- return file.short_path
-
-def _build_deployable(ctx, jars_list):
- # This calls bazels singlejar utility.
- # For a full list of available command line options see:
- # https://github.com/bazelbuild/bazel/blob/master/src/java_tools/singlejar/java/com/google/devtools/build/singlejar/SingleJar.java#L311
- args = ctx.actions.args()
- args.add("--normalize")
- args.add("--compression")
- args.add("--sources")
- args.add_all([j.path for j in jars_list])
- if getattr(ctx.attr, "main_class", ""):
- args.add_all(["--main_class", ctx.attr.main_class])
- args.add_all(["--output", ctx.outputs.deploy_jar.path])
-
- ctx.actions.run(
- inputs = jars_list,
- outputs = [ctx.outputs.deploy_jar],
- executable = ctx.executable._singlejar,
- execution_requirements = {"supports-workers": "1"},
- mnemonic = _SINGLE_JAR_MNEMONIC,
- progress_message = "scala deployable %s" % ctx.label,
- arguments = [args],
- )
-
-def scala_binary_implementation(ctx):
- res = runner_common(ctx)
-
- # this is all super sketchy...
- # for the time being
-
- java_info = res.java_info
- mains_file = res.mains_files.to_list()[0]
-
- transitive_rjars = res.java_info.transitive_runtime_jars
- rjars = depset([ctx.outputs.jar], transitive = [transitive_rjars])
- _build_deployable(ctx, rjars.to_list())
-
- files = _write_launcher(
- ctx,
- "{}/".format(ctx.label.name),
- ctx.outputs.bin,
- java_info.transitive_runtime_deps,
- jvm_flags = [ctx.expand_location(f, ctx.attr.data) for f in ctx.attr.jvm_flags],
- main_class = ctx.attr.main_class or "$(head -1 $JAVA_RUNFILES/{}/{})".format(ctx.workspace_name, mains_file.short_path),
- )
-
- return struct(
- java = res.intellij_info,
- providers = [
- res.java_info,
- res.scala_info,
- res.zinc_info,
- res.intellij_info,
- DefaultInfo(
- executable = ctx.outputs.bin,
- files = depset([ctx.outputs.bin], transitive = [res.files]),
- runfiles = ctx.runfiles(
- files = files + ctx.files.data + [mains_file],
- transitive_files = depset(
- direct = [ctx.executable._java],
- order = "default",
- transitive = [java_info.transitive_runtime_deps],
- ),
- collect_default = True,
- ),
- ),
- OutputGroupInfo(
- deps_check = depset(res.deps_check),
- ),
- ],
- )
-
-def scala_test_implementation(ctx):
- res = runner_common(ctx)
-
- files = ctx.files._java + [res.zinc_info.apis]
-
- test_jars = res.java_info.transitive_runtime_deps
- runner_jars = ctx.attr.runner[JavaInfo].transitive_runtime_deps
- all_jars = [test_jars, runner_jars]
-
- args = ctx.actions.args()
- args.add("--apis", res.zinc_info.apis.short_path)
- args.add_all("--frameworks", ctx.attr.frameworks)
- if ctx.attr.isolation == "classloader":
- shared_deps = java_common.merge(_collect(JavaInfo, ctx.attr.shared_deps))
- args.add("--isolation", "classloader")
- args.add_all("--shared_classpath", shared_deps.transitive_runtime_deps, map_each = _short_path)
- elif ctx.attr.isolation == "process":
- subprocess_executable = ctx.actions.declare_file("{}/subprocess".format(ctx.label.name))
- subprocess_runner_jars = ctx.attr.subprocess_runner[JavaInfo].transitive_runtime_deps
- all_jars.append(subprocess_runner_jars)
- files += _write_launcher(
- ctx,
- "{}/subprocess-".format(ctx.label.name),
- subprocess_executable,
- subprocess_runner_jars,
- "annex.SubprocessTestRunner",
- [ctx.expand_location(f, ctx.attr.data) for f in ctx.attr.jvm_flags],
- )
- files.append(subprocess_executable)
- args.add("--isolation", "process")
- args.add("--subprocess_exec", subprocess_executable.short_path)
- args.add_all("--", res.java_info.transitive_runtime_jars, map_each = _short_path)
- args.set_param_file_format("multiline")
- args_file = ctx.actions.declare_file("{}/test.params".format(ctx.label.name))
- ctx.actions.write(args_file, args)
- files.append(args_file)
-
- files += _write_launcher(
- ctx,
- "{}/".format(ctx.label.name),
- ctx.outputs.bin,
- runner_jars,
- "annex.TestRunner",
- [ctx.expand_location(f, ctx.attr.data) for f in ctx.attr.jvm_flags] + [
- "-Dbazel.runPath=$RUNPATH",
- "-DscalaAnnex.test.args=${{RUNPATH}}{}".format(args_file.short_path),
- ],
- )
-
- test_info = DefaultInfo(
- executable = ctx.outputs.bin,
- files = res.files,
- runfiles = ctx.runfiles(
- collect_data = True,
- collect_default = True,
- files = files,
- transitive_files = depset([], transitive = all_jars),
- ),
- )
- return struct(
- java = res.intellij_info,
- providers = [
- res.java_info,
- res.scala_info,
- res.zinc_info,
- res.intellij_info,
- test_info,
- OutputGroupInfo(
- # analysis = depset([res.zinc_info.analysis, res.zinc_info.apis]),
- deps_check = depset(res.deps_check),
- ),
- ],
- )
-
-def _short_path(file):
- return file.short_path
diff --git a/rules/scala/private/phases.bzl b/rules/scala/private/phases.bzl
new file mode 100644
index 000000000..161ad5db6
--- /dev/null
+++ b/rules/scala/private/phases.bzl
@@ -0,0 +1,552 @@
+load(
+ "@rules_scala_annex//rules:providers.bzl",
+ _LabeledJars = "LabeledJars",
+ _ScalaConfiguration = "ScalaConfiguration",
+ _ScalaInfo = "ScalaInfo",
+ _ScalaRulePhase = "ScalaRulePhase",
+ _ZincConfiguration = "ZincConfiguration",
+ _ZincInfo = "ZincInfo",
+)
+load("//rules/common:private/utils.bzl", _collect = "collect", _write_launcher = "write_launcher")
+load(":private/import.bzl", _create_intellij_info = "create_intellij_info")
+
+def run_phases(ctx, phases):
+ scala_configuration = ctx.attr.scala[_ScalaConfiguration]
+ sdeps = java_common.merge(_collect(JavaInfo, scala_configuration.runtime_classpath + ctx.attr.deps))
+ init = struct(
+ scala_configuration = scala_configuration,
+ scalacopts = ctx.attr.scalacopts[:],
+ # todo: probably can remove this from init
+ sdeps = sdeps,
+ )
+
+ phase_providers = [p[_ScalaRulePhase] for p in ctx.attr.plugins if _ScalaRulePhase in p]
+ if phase_providers != []:
+ phases = phases[:]
+
+ for pp in phase_providers:
+ for (relation, peer_name, name, function) in pp.phases:
+ for idx, (needle, _) in enumerate(phases):
+ if needle == peer_name:
+ if relation in ["-", "before"]:
+ phases.insert(idx, (name, function))
+ elif relation in ["+", "after"]:
+ phases.insert(idx + 1, (name, function))
+
+ gd = {
+ "init": init,
+ "out": struct(
+ output_groups = {},
+ providers = [],
+ ),
+ }
+ g = struct(**gd)
+ for (name, function) in phases:
+ p = function(ctx, g)
+ if p != None:
+ gd[name] = p
+ g = struct(**gd)
+
+ return g
+
+_SINGLE_JAR_MNEMONIC = "SingleJar"
+
+#
+# PHASE: resources
+#
+# Resource files are merged into a zip archive.
+#
+# The output is returned in the jar field so the singlejar
+# phase will merge it into the final jar.
+#
+
+def phase_resources(ctx, g):
+ zipper_inputs, _, zipper_manifests = ctx.resolve_command(tools = [ctx.attr._zipper])
+
+ if ctx.files.resources:
+ jar = ctx.actions.declare_file("{}/resources.zip".format(ctx.label.name))
+ args = ctx.actions.args()
+ args.add("c", jar)
+ args.set_param_file_format("multiline")
+ args.use_param_file("@%s")
+ for file in ctx.files.resources:
+ args.add("{}={}".format(_resources_make_path(file, ctx.attr.resource_strip_prefix), file.path))
+ ctx.actions.run(
+ arguments = [args],
+ executable = ctx.executable._zipper,
+ inputs = ctx.files.resources,
+ input_manifests = zipper_manifests,
+ outputs = [jar],
+ tools = zipper_inputs,
+ )
+ return struct(jar = jar)
+ else:
+ return struct()
+
+def _resources_make_path(file, strip_prefix):
+ if strip_prefix:
+ if not file.short_path.startswith(strip_prefix):
+ fail("{} does not have prefix {}".format(file.short_path, strip_prefix))
+ return file.short_path[len(strip_prefix) + 1 - int(file.short_path.endswith("/")):]
+ conventional = [
+ "src/main/resources/",
+ "src/test/resources/",
+ ]
+ for path in conventional:
+ dir1, dir2, rest = file.short_path.partition(path)
+ if rest:
+ return rest
+ return file.short_path
+
+#
+# PHASE: compile
+#
+# Compiles Scala sources ;)
+#
+
+def phase_compile(ctx, g):
+ runner = ctx.toolchains["@rules_scala_annex//rules/scala:runner_toolchain_type"]
+ class_jar = ctx.actions.declare_file("{}/classes.jar".format(ctx.label.name))
+
+ splugins = java_common.merge(_collect(JavaInfo, ctx.attr.plugins + g.init.scala_configuration.global_plugins))
+
+ print(splugins.transitive_runtime_deps.to_list())
+
+ zinc_configuration = ctx.attr.scala[_ZincConfiguration]
+
+ srcs = [file for file in ctx.files.srcs if file.extension.lower() in ["java", "scala"]]
+ src_jars = [file for file in ctx.files.srcs if file.extension.lower() in ["srcjar"]]
+
+ apis = ctx.actions.declare_file("{}/apis.gz".format(ctx.label.name))
+ infos = ctx.actions.declare_file("{}/infos.gz".format(ctx.label.name))
+ mains_file = ctx.actions.declare_file("{}.jar.mains.txt".format(ctx.label.name))
+ relations = ctx.actions.declare_file("{}/relations.gz".format(ctx.label.name))
+ setup = ctx.actions.declare_file("{}/setup.gz".format(ctx.label.name))
+ stamps = ctx.actions.declare_file("{}/stamps.gz".format(ctx.label.name))
+ used = ctx.actions.declare_file("{}/deps_used.txt".format(ctx.label.name))
+
+ macro_classpath = [
+ dep[JavaInfo].transitive_runtime_jars
+ for dep in ctx.attr.deps
+ if _ScalaInfo in dep and dep[_ScalaInfo].macro
+ ]
+ compile_classpath = depset(order = "preorder", transitive = macro_classpath + [g.init.sdeps.transitive_compile_time_jars])
+
+ tmp = ctx.actions.declare_directory("{}/tmp".format(ctx.label.name))
+
+ javacopts = [
+ ctx.expand_location(option, ctx.attr.data)
+ for option in ctx.attr.javacopts + java_common.default_javac_opts(ctx, java_toolchain_attr = "_java_toolchain")
+ ]
+
+ zincs = [dep[_ZincInfo] for dep in ctx.attr.deps if _ZincInfo in dep]
+
+ scalacopts = runner.scalacopts + g.init.scalacopts
+ args = ctx.actions.args()
+ args.add_all(depset(transitive = [zinc.deps for zinc in zincs]), map_each = _compile_analysis)
+ args.add("--compiler_bridge", zinc_configuration.compiler_bridge)
+ args.add_all("--compiler_classpath", g.init.scala_configuration.compiler_classpath)
+ args.add_all("--classpath", compile_classpath)
+ args.add_all(scalacopts, format_each = "--compiler_option=%s")
+ args.add_all(javacopts, format_each = "--java_compiler_option=%s")
+ args.add(ctx.label, format = "--label=%s")
+ args.add("--main_manifest", mains_file)
+ args.add("--output_apis", apis)
+ args.add("--output_infos", infos)
+ args.add("--output_jar", class_jar)
+ args.add("--output_relations", relations)
+ args.add("--output_setup", setup)
+ args.add("--output_stamps", stamps)
+ args.add("--output_used", used)
+ args.add_all("--plugins", splugins.transitive_runtime_deps)
+ args.add_all("--source_jars", src_jars)
+ args.add("--tmp", tmp.path)
+ args.add_all("--", srcs)
+ args.set_param_file_format("multiline")
+ args.use_param_file("@%s", use_always = True)
+
+ runner_inputs, _, input_manifests = ctx.resolve_command(tools = [runner.runner])
+ inputs = depset(
+ [zinc_configuration.compiler_bridge] + g.init.scala_configuration.compiler_classpath + ctx.files.data + ctx.files.srcs + runner_inputs,
+ transitive = [
+ splugins.transitive_runtime_deps,
+ compile_classpath,
+ ] + [zinc.deps_files for zinc in zincs],
+ )
+
+ outputs = [class_jar, mains_file, apis, infos, relations, setup, stamps, used, tmp]
+
+ # todo: different execution path for nosrc jar?
+ ctx.actions.run(
+ mnemonic = "ScalaCompile",
+ inputs = inputs,
+ outputs = outputs,
+ executable = runner.runner.files_to_run.executable,
+ input_manifests = input_manifests,
+ execution_requirements = {"no-sandbox": "1", "supports-workers": "1"},
+ arguments = [args],
+ )
+
+ jars = []
+ for jar in g.javainfo.java_info.outputs.jars:
+ jars.append(jar.class_jar)
+ jars.append(jar.ijar)
+ zinc_info = _ZincInfo(
+ apis = apis,
+ deps_files = depset([apis, relations], transitive = [zinc.deps_files for zinc in zincs]),
+ label = ctx.label,
+ relations = relations,
+ deps = depset(
+ [struct(
+ apis = apis,
+ jars = jars,
+ label = ctx.label,
+ relations = relations,
+ )],
+ transitive = [zinc.deps for zinc in zincs],
+ ),
+ )
+
+ g.out.providers.append(zinc_info)
+ return struct(
+ jar = class_jar,
+ mains_file = mains_file,
+ used = used,
+ # todo: see about cleaning up & generalizing fields below
+ zinc_info = zinc_info,
+ )
+
+def _compile_analysis(analysis):
+ return [
+ "--analysis",
+ "_{}".format(analysis.label),
+ analysis.apis.path,
+ analysis.relations.path,
+ ] + [jar.path for jar in analysis.jars]
+
+#
+# PHASE: depscheck
+# Dependencies are checked to see if they are used/unused.
+# Success files are outputted if dependency checking was "successful"
+# according to the configuration/options.
+
+def phase_depscheck(ctx, g):
+ deps_toolchain = ctx.toolchains["@rules_scala_annex//rules/scala:deps_toolchain_type"]
+ deps_checks = {}
+ labeled_jars = depset(transitive = [dep[_LabeledJars].values for dep in ctx.attr.deps])
+ deps_inputs, _, deps_input_manifests = ctx.resolve_command(tools = [deps_toolchain.runner])
+ for name in ("direct", "used"):
+ deps_check = ctx.actions.declare_file("{}/depscheck_{}.success".format(ctx.label.name, name))
+ deps_args = ctx.actions.args()
+ deps_args.add(name, format = "--check_%s=true")
+ deps_args.add_all("--direct", [dep.label for dep in ctx.attr.deps], format_each = "_%s")
+ deps_args.add_all(labeled_jars, map_each = _depscheck_labeled_group)
+ deps_args.add("--label", ctx.label, format = "_%s")
+ deps_args.add_all("--whitelist", [dep.label for dep in ctx.attr.deps_used_whitelist], format_each = "_%s")
+ deps_args.add("--")
+ deps_args.add(g.compile.used)
+ deps_args.add(deps_check)
+ deps_args.set_param_file_format("multiline")
+ deps_args.use_param_file("@%s", use_always = True)
+ ctx.actions.run(
+ mnemonic = "ScalaCheckDeps",
+ inputs = [g.compile.used] + deps_inputs,
+ outputs = [deps_check],
+ executable = deps_toolchain.runner.files_to_run.executable,
+ input_manifests = deps_input_manifests,
+ execution_requirements = {"supports-workers": "1"},
+ arguments = [deps_args],
+ )
+ deps_checks[name] = deps_check
+
+ outputs = []
+
+ if deps_toolchain.direct == "error":
+ outputs.append(deps_checks["direct"])
+ if deps_toolchain.used == "error":
+ outputs.append(deps_checks["used"])
+
+ g.out.output_groups["depscheck"] = depset(outputs)
+
+ return struct(
+ checks = deps_checks,
+ outputs = outputs,
+ toolchain = deps_toolchain,
+ )
+
+def _depscheck_labeled_group(labeled_jars):
+ return (["--group", "_{}".format(labeled_jars.label)] + [jar.path for jar in labeled_jars.jars.to_list()])
+
+#
+# PHASE: singlejar
+#
+# Creates a single jar output from any resource jars as well
+# as any jar entries from previous phases. The output is the
+# output is written to ctx.outputs.jar.
+#
+# Additionally, this phase checks for missing outputs from previous
+# phases. This allows phases to error, cleanly, by declaring a file
+# in the outputs field but _without_ actually creating it.
+#
+
+def phase_singlejar(ctx, g):
+ inputs = [g.compile.jar] + ctx.files.resource_jars
+ args = ctx.actions.args()
+ args.add("--exclude_build_data")
+ args.add("--normalize")
+
+ for v in [getattr(g, k) for k in dir(g) if k not in ["to_json", "to_proto"]]:
+ if hasattr(v, "jar"):
+ jar = getattr(v, "jar")
+ args.add("--sources", jar)
+ inputs.append(jar)
+ if hasattr(v, "outputs"):
+ # Declare all phase outputs as inputs but _don't_ include them in the args
+ # for singlejar to process. This will cause the build to fail, cleanly, if
+ # any declared outputs are missing from previous phases.
+ inputs.extend(getattr(v, "outputs"))
+
+ for file in [f for f in ctx.files.resource_jars if f.extension.lower() in ["jar"]]:
+ args.add("--sources")
+ args.add(file)
+
+ args.add("--output", ctx.outputs.jar)
+ args.add("--warn_duplicate_resources")
+ args.set_param_file_format("multiline")
+ args.use_param_file("@%s", use_always = True)
+
+ ctx.actions.run(
+ arguments = [args],
+ executable = ctx.executable._singlejar,
+ execution_requirements = {"supports-workers": "1"},
+ mnemonic = _SINGLE_JAR_MNEMONIC,
+ inputs = inputs, # TODO: build up inputs as a depset
+ outputs = [ctx.outputs.jar],
+ )
+
+#
+# PHASE: javainfo
+#
+# Builds up the JavaInfo provider. And the ScalaInfo, while we're at it.
+# And DefaultInfo.
+#
+
+def phase_javainfo(ctx, g):
+ sruntime_deps = java_common.merge(_collect(JavaInfo, ctx.attr.runtime_deps))
+ sexports = java_common.merge(_collect(JavaInfo, ctx.attr.exports))
+ scala_configuration_runtime_deps = _collect(JavaInfo, g.init.scala_configuration.runtime_classpath)
+
+ if len(ctx.attr.srcs) == 0:
+ java_info = java_common.merge([g.init.sdeps, sexports])
+ else:
+ compile_jar = java_common.run_ijar(
+ ctx.actions,
+ jar = ctx.outputs.jar,
+ target_label = ctx.label,
+ java_toolchain = ctx.attr._java_toolchain,
+ )
+
+ source_jar = java_common.pack_sources(
+ ctx.actions,
+ output_jar = ctx.outputs.jar,
+ sources = ctx.files.srcs,
+ host_javabase = ctx.attr._host_javabase,
+ java_toolchain = ctx.attr._java_toolchain,
+ )
+
+ java_info = JavaInfo(
+ compile_jar = compile_jar,
+ neverlink = getattr(ctx.attr, "neverlink", False),
+ output_jar = ctx.outputs.jar,
+ source_jar = source_jar,
+ exports = [sexports],
+ runtime_deps = [sruntime_deps] + scala_configuration_runtime_deps,
+ deps = [g.init.sdeps],
+ )
+
+ scala_info = _ScalaInfo(
+ macro = ctx.attr.macro,
+ scala_configuration = g.init.scala_configuration,
+ )
+
+ output_group_info = OutputGroupInfo(
+ **g.out.output_groups
+ )
+
+ g.out.providers.extend([
+ output_group_info,
+ java_info,
+ scala_info,
+ ])
+
+ return struct(
+ java_info = java_info,
+ output_group_info = output_group_info,
+ scala_info = scala_info,
+ )
+
+#
+# PHASE: ijinfo
+#
+# Creates IntelliJ info
+#
+
+def phase_ijinfo(ctx, g):
+ intellij_info = _create_intellij_info(ctx.label, ctx.attr.deps, g.javainfo.java_info)
+ g.out.providers.append(intellij_info)
+ return struct(intellij_info = intellij_info)
+
+#
+# PHASE: library_defaultinfo
+#
+# Creates DefaultInfo for Scala libraries
+#
+
+def phase_library_defaultinfo(ctx, g):
+ g.out.providers.append(DefaultInfo(
+ files = depset([ctx.outputs.jar]),
+ ))
+
+#
+# PHASE: binary_deployjar
+#
+# Writes the optional deploy jar that includes all of the dependencies
+#
+
+def phase_binary_deployjar(ctx, g):
+ transitive_rjars = g.javainfo.java_info.transitive_runtime_jars
+ rjars = depset([ctx.outputs.jar], transitive = [transitive_rjars])
+ _binary_deployjar_build_deployable(ctx, rjars.to_list())
+
+def _binary_deployjar_build_deployable(ctx, jars_list):
+ # This calls bazels singlejar utility.
+ # For a full list of available command line options see:
+ # https://github.com/bazelbuild/bazel/blob/master/src/java_tools/singlejar/java/com/google/devtools/build/singlejar/SingleJar.java#L311
+ args = ctx.actions.args()
+ args.add("--normalize")
+ args.add("--compression")
+ args.add("--sources")
+ args.add_all([j.path for j in jars_list])
+ if getattr(ctx.attr, "main_class", ""):
+ args.add_all(["--main_class", ctx.attr.main_class])
+ args.add_all(["--output", ctx.outputs.deploy_jar.path])
+
+ ctx.actions.run(
+ inputs = jars_list,
+ outputs = [ctx.outputs.deploy_jar],
+ executable = ctx.executable._singlejar,
+ execution_requirements = {"supports-workers": "1"},
+ mnemonic = _SINGLE_JAR_MNEMONIC,
+ progress_message = "scala deployable %s" % ctx.label,
+ arguments = [args],
+ )
+
+#
+# PHASE: binary_launcher
+#
+# Writes a Scala binary launcher
+#
+
+def phase_binary_launcher(ctx, g):
+ mains_file = g.compile.mains_file
+ files = _write_launcher(
+ ctx,
+ "{}/".format(ctx.label.name),
+ ctx.outputs.bin,
+ g.javainfo.java_info.transitive_runtime_deps,
+ jvm_flags = [ctx.expand_location(f, ctx.attr.data) for f in ctx.attr.jvm_flags],
+ main_class = ctx.attr.main_class or "$(head -1 $JAVA_RUNFILES/{}/{})".format(ctx.workspace_name, mains_file.short_path),
+ )
+
+ g.out.providers.append(DefaultInfo(
+ executable = ctx.outputs.bin,
+ files = depset([ctx.outputs.bin, ctx.outputs.jar]),
+ runfiles = ctx.runfiles(
+ files = files + ctx.files.data + [mains_file],
+ transitive_files = depset(
+ direct = [ctx.executable._java],
+ order = "default",
+ transitive = [g.javainfo.java_info.transitive_runtime_deps],
+ ),
+ collect_default = True,
+ ),
+ ))
+
+#
+# PHASE: test_launcher
+#
+# Writes a Scala test launcher
+#
+
+def phase_test_launcher(ctx, g):
+ files = ctx.files._java + [g.compile.zinc_info.apis]
+
+ test_jars = g.javainfo.java_info.transitive_runtime_deps
+ runner_jars = ctx.attr.runner[JavaInfo].transitive_runtime_deps
+ all_jars = [test_jars, runner_jars]
+
+ args = ctx.actions.args()
+ args.add("--apis", g.compile.zinc_info.apis.short_path)
+ args.add_all("--frameworks", ctx.attr.frameworks)
+ if ctx.attr.isolation == "classloader":
+ shared_deps = java_common.merge(_collect(JavaInfo, ctx.attr.shared_deps))
+ args.add("--isolation", "classloader")
+ args.add_all("--shared_classpath", shared_deps.transitive_runtime_deps, map_each = _test_launcher_short_path)
+ elif ctx.attr.isolation == "process":
+ subprocess_executable = ctx.actions.declare_file("{}/subprocess".format(ctx.label.name))
+ subprocess_runner_jars = ctx.attr.subprocess_runner[JavaInfo].transitive_runtime_deps
+ all_jars.append(subprocess_runner_jars)
+ files += _write_launcher(
+ ctx,
+ "{}/subprocess-".format(ctx.label.name),
+ subprocess_executable,
+ subprocess_runner_jars,
+ "annex.SubprocessTestRunner",
+ [ctx.expand_location(f, ctx.attr.data) for f in ctx.attr.jvm_flags],
+ )
+ files.append(subprocess_executable)
+ args.add("--isolation", "process")
+ args.add("--subprocess_exec", subprocess_executable.short_path)
+ args.add_all("--", g.javainfo.java_info.transitive_runtime_jars, map_each = _test_launcher_short_path)
+ args.set_param_file_format("multiline")
+ args_file = ctx.actions.declare_file("{}/test.params".format(ctx.label.name))
+ ctx.actions.write(args_file, args)
+ files.append(args_file)
+
+ files += _write_launcher(
+ ctx,
+ "{}/".format(ctx.label.name),
+ ctx.outputs.bin,
+ runner_jars,
+ "annex.TestRunner",
+ [ctx.expand_location(f, ctx.attr.data) for f in ctx.attr.jvm_flags] + [
+ "-Dbazel.runPath=$RUNPATH",
+ "-DscalaAnnex.test.args=${{RUNPATH}}{}".format(args_file.short_path),
+ ],
+ )
+
+ g.out.providers.append(DefaultInfo(
+ executable = ctx.outputs.bin,
+ files = depset([ctx.outputs.jar]),
+ runfiles = ctx.runfiles(
+ collect_data = True,
+ collect_default = True,
+ files = files,
+ transitive_files = depset([], transitive = all_jars),
+ ),
+ ))
+
+def _test_launcher_short_path(file):
+ return file.short_path
+
+#
+# PHASE: coda
+#
+# Creates the final rule return structure
+#
+
+def phase_coda(ctx, g):
+ return struct(
+ java = g.ijinfo.intellij_info,
+ providers = g.out.providers,
+ )
diff --git a/rules/scalac.bzl b/rules/scalac.bzl
index 678f7791e..a75f11a32 100644
--- a/rules/scalac.bzl
+++ b/rules/scalac.bzl
@@ -9,10 +9,10 @@ load(
)
load(
"//rules/scalac:private.bzl",
- _scalac_library_implementation = "scalac_library_implementation",
- _scalac_library_private_attributes = "scalac_library_private_attributes",
_scalac_binary_implementation = "scalac_binary_implementation",
_scalac_binary_private_attributes = "scalac_binary_private_attributes",
+ _scalac_library_implementation = "scalac_library_implementation",
+ _scalac_library_private_attributes = "scalac_library_private_attributes",
)
scalac_library = rule(
diff --git a/rules/scalafmt.bzl b/rules/scalafmt.bzl
index 4ad48ed93..4a500931f 100644
--- a/rules/scalafmt.bzl
+++ b/rules/scalafmt.bzl
@@ -1,10 +1,8 @@
load("@bazel_skylib//lib:dicts.bzl", _dicts = "dicts")
load(
"//rules/scalafmt:private/test.bzl",
- _scala_format_test_implementation =
- "scala_format_test_implementation",
- _scala_format_private_attributes =
- "scala_format_private_attributes",
+ _scala_format_private_attributes = "scala_format_private_attributes",
+ _scala_format_test_implementation = "scala_format_test_implementation",
)
"""
diff --git a/rules/semanticdb.scala b/rules/semanticdb.scala
new file mode 100644
index 000000000..e69de29bb
diff --git a/rules/semanticdb/BUILD b/rules/semanticdb/BUILD
new file mode 100644
index 000000000..ca10aa8f7
--- /dev/null
+++ b/rules/semanticdb/BUILD
@@ -0,0 +1,16 @@
+load("@rules_scala_annex//rules:scala.bzl", "scala_library")
+load(
+ ":private.bzl",
+ _make_semanticdb_plugin = "make_semanticdb_plugin",
+)
+
+scala_library(
+ name = "lib",
+ scala = "//external:scala_annex_scala",
+)
+
+_make_semanticdb_plugin(
+ name = "2_12_7",
+ dep = "@scala_annex_org_scalameta_semanticdb_scalac_2_12_7",
+ visibility = ["//visibility:public"],
+)
diff --git a/rules/semanticdb/private.bzl b/rules/semanticdb/private.bzl
new file mode 100644
index 000000000..8915b620b
--- /dev/null
+++ b/rules/semanticdb/private.bzl
@@ -0,0 +1,49 @@
+load(
+ "@rules_scala_annex//rules:providers.bzl",
+ _ScalaRulePhase = "ScalaRulePhase",
+)
+
+SemanticDB = provider(
+ doc = "Scala SemanticDB output",
+ fields = {
+ "output": "the semanticdb file",
+ },
+)
+
+def _phase_semanticdb_before_compile(ctx, g):
+ print("semanticdb before compile phase")
+ g.init.scalacopts.extend([
+ "-Xplugin-require:semanticdb",
+ "-Yrangepos",
+ #"-P:semanticdb:targetroot:~/Desktop/foo",
+ ])
+
+def _phase_semanticdb_after_compile(ctx, g):
+ print("semanticdb after compile phase")
+
+ g.out.providers.append(SemanticDB(
+ output = None,
+ ))
+
+def _my_plugin_implementation(ctx):
+ # TODO: write something intelligent that allows us to pass along
+ # all providers from the underlying dep
+ return [
+ ctx.attr.dep[JavaInfo],
+ _ScalaRulePhase(
+ phases = [
+ ("-", "compile", "semanticdb", _phase_semanticdb_before_compile),
+ ("+", "compile", "semanticdb", _phase_semanticdb_after_compile),
+ ],
+ ),
+ ]
+
+make_semanticdb_plugin = rule(
+ attrs = {
+ "dep": attr.label(
+ mandatory = True,
+ providers = [JavaInfo],
+ ),
+ },
+ implementation = _my_plugin_implementation,
+)
diff --git a/setup-tools.sh b/setup-tools.sh
index e1c8ad698..de92f9d4e 100755
--- a/setup-tools.sh
+++ b/setup-tools.sh
@@ -11,7 +11,7 @@ rm -fr external-tools/buildtools
mkdir -p external-tools/buildtools
echo Downloading buildtools
-curl -L -sS https://github.com/bazelbuild/buildtools/archive/a8cd34f034f2ae1e206eec896cf12d38a0cb26fb.tar.gz | tar zxf - --strip 1 -C external-tools/buildtools
+curl -L -sS https://github.com/bazelbuild/buildtools/archive/e5e9711c13fc2d3b4060ed5421b2d71aba83702f.tar.gz | tar zxf - --strip 1 -C external-tools/buildtools
echo Building buildifier
(cd external-tools/buildtools; bazel run "${BAZEL_OPTS[@]}" --script_path=../buildifier.sh buildifier)
diff --git a/tests/WORKSPACE b/tests/WORKSPACE
index e3280f9b9..deca6ec29 100644
--- a/tests/WORKSPACE
+++ b/tests/WORKSPACE
@@ -38,7 +38,7 @@ scala_repository(
scala_repository(
"scala_2_12",
- ("org.scala-lang", "2.12.6"),
+ ("org.scala-lang", "2.12.7"),
"@compiler_bridge_2_12//:src",
)
diff --git a/tests/compat/ported_tests/BUILD b/tests/compat/ported_tests/BUILD
index 3dbdeba15..b25c688fe 100644
--- a/tests/compat/ported_tests/BUILD
+++ b/tests/compat/ported_tests/BUILD
@@ -1,8 +1,8 @@
load(
"@io_bazel_rules_scala//scala:scala.bzl",
+ "scala_binary",
"scala_library",
"scala_macro_library",
- "scala_binary",
"scala_test",
"scala_test_suite",
)
diff --git a/tests/plugins/outputs/BUILD b/tests/plugins/outputs/BUILD
new file mode 100644
index 000000000..04f4313b8
--- /dev/null
+++ b/tests/plugins/outputs/BUILD
@@ -0,0 +1,27 @@
+load("@rules_scala_annex//rules:scala.bzl", "configure_scala", "scala_library")
+load(":rules.bzl", "my_plugin")
+
+scala_library(
+ name = "my_plugin_lib",
+ srcs = ["plugin.scala"],
+ scala = "@scala_2_12",
+ tags = ["manual"],
+)
+
+my_plugin(
+ name = "my_plugin",
+ tags = ["manual"],
+ deps = [
+ ":my_plugin_lib",
+ ],
+)
+
+scala_library(
+ name = "usage",
+ srcs = ["usage.scala"],
+ plugins = [
+ ":my_plugin",
+ ],
+ scala = "@scala_2_12",
+ tags = ["manual"],
+)
diff --git a/tests/plugins/outputs/plugin.scala b/tests/plugins/outputs/plugin.scala
new file mode 100644
index 000000000..17e7e8e1c
--- /dev/null
+++ b/tests/plugins/outputs/plugin.scala
@@ -0,0 +1,3 @@
+package annex
+
+object Plugin
diff --git a/tests/plugins/outputs/rules.bzl b/tests/plugins/outputs/rules.bzl
new file mode 100644
index 000000000..2d0fb5eb2
--- /dev/null
+++ b/tests/plugins/outputs/rules.bzl
@@ -0,0 +1,41 @@
+load(
+ "@rules_scala_annex//rules:providers.bzl",
+ _ScalaRulePhase = "ScalaRulePhase",
+)
+
+def _foo_before_javainfo(ctx, g):
+ if hasattr(g, "javainfo"):
+ fail("javainfo shouldn't be in the globals, yet")
+
+def _foo_after_javainfo(ctx, g):
+ if not hasattr(g, "javainfo"):
+ fail("javainfo should be in the globals by now")
+
+def _foo_after_coda(ctx, g):
+ if not hasattr(g, "compile"):
+ fail("expected to run after compilation")
+
+ print("plugin phase success")
+
+def _my_plugin_implementation(ctx):
+ sdeps = java_common.merge([dep[JavaInfo] for dep in ctx.attr.deps])
+ return [
+ sdeps,
+ _ScalaRulePhase(
+ phases = [
+ ("-", "javainfo", "foo_before_javainfo", _foo_before_javainfo),
+ ("+", "javainfo", "foo_after_javainfo", _foo_after_javainfo),
+ ("+", "coda", "foo_after_coda", _foo_after_coda),
+ ],
+ ),
+ ]
+
+my_plugin = rule(
+ attrs = {
+ "deps": attr.label_list(
+ mandatory = True,
+ providers = [JavaInfo],
+ ),
+ },
+ implementation = _my_plugin_implementation,
+)
diff --git a/tests/plugins/outputs/test b/tests/plugins/outputs/test
new file mode 100755
index 000000000..0a74fc07e
--- /dev/null
+++ b/tests/plugins/outputs/test
@@ -0,0 +1,4 @@
+#!/bin/bash -e
+. "$(dirname "$0")"/../../common.sh
+
+bazel build :usage 2>&1 | grep "plugin phase success"
diff --git a/tests/plugins/outputs/usage.scala b/tests/plugins/outputs/usage.scala
new file mode 100644
index 000000000..e4051fca8
--- /dev/null
+++ b/tests/plugins/outputs/usage.scala
@@ -0,0 +1,3 @@
+package anx
+
+object Usage
diff --git a/tests/providers/BUILD b/tests/providers/BUILD
index c844dc302..d1f6f80a8 100644
--- a/tests/providers/BUILD
+++ b/tests/providers/BUILD
@@ -10,9 +10,9 @@ load(
)
load(
":build.bzl",
+ "consume_scala_and_zinc_configuration",
"consume_scala_configuration",
"consume_zinc_configuration",
- "consume_scala_and_zinc_configuration",
)
declare_scala_configuration(
diff --git a/tests/scalac/BUILD b/tests/scalac/BUILD
index 0cd94b043..464e76a7a 100644
--- a/tests/scalac/BUILD
+++ b/tests/scalac/BUILD
@@ -1,7 +1,7 @@
load(
"@rules_scala_annex//rules:scalac.bzl",
- "scalac_library",
"scalac_binary",
+ "scalac_library",
)
scalac_library(
diff --git a/tests/semanticdb/BUILD b/tests/semanticdb/BUILD
new file mode 100644
index 000000000..85955887e
--- /dev/null
+++ b/tests/semanticdb/BUILD
@@ -0,0 +1,12 @@
+load("@rules_scala_annex//rules:scala.bzl", "configure_scala", "scala_library")
+load(":rules.bzl", "my_plugin")
+
+scala_library(
+ name = "input",
+ srcs = ["input.scala"],
+ plugins = [
+ "@rules_scala_annex//rules/semanticdb:2_12_7",
+ ],
+ scala = "@scala_2_12",
+ tags = ["manual"],
+)
diff --git a/tests/semanticdb/input.scala b/tests/semanticdb/input.scala
new file mode 100644
index 000000000..71f537ae5
--- /dev/null
+++ b/tests/semanticdb/input.scala
@@ -0,0 +1,5 @@
+package anx
+
+object Foo
+
+object Bar
diff --git a/tests/semanticdb/plugin.scala b/tests/semanticdb/plugin.scala
new file mode 100644
index 000000000..17e7e8e1c
--- /dev/null
+++ b/tests/semanticdb/plugin.scala
@@ -0,0 +1,3 @@
+package annex
+
+object Plugin
diff --git a/tests/semanticdb/test b/tests/semanticdb/test
new file mode 100755
index 000000000..9104a6f3f
--- /dev/null
+++ b/tests/semanticdb/test
@@ -0,0 +1,4 @@
+#!/bin/bash -e
+. "$(dirname "$0")"/../common.sh
+
+bazel build :input
diff --git a/tests/semanticdb/usage.scala b/tests/semanticdb/usage.scala
new file mode 100644
index 000000000..e4051fca8
--- /dev/null
+++ b/tests/semanticdb/usage.scala
@@ -0,0 +1,3 @@
+package anx
+
+object Usage