diff --git a/.gitignore b/.gitignore index 48aeabab011c30893aff2546cc4134e8b20a59d3..ce559b7ca3ddcc4165c87d31d58151e99ddd7c05 100644 --- a/.gitignore +++ b/.gitignore @@ -11,6 +11,6 @@ tulsigen-* *.iml .gradle **/local.properties -**/build +**/build/** .externalNativeBuild diff --git a/README.md b/README.md index e516d93039bd697e3445e0a75f216142250d4ef3..83056a144b12ff7da8c8700cc0629362943402ef 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,8 @@ # Tink - +**`Ubuntu`** | **`macOS`** +----------------------------------------------------------------------------------------- | ----------- + |  ## Introduction diff --git a/WORKSPACE b/WORKSPACE index add718a570cd205c8cb9adb811951f69646b0a81..ba3824a8a929d1991c82074fbb23cf0335ea1935 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -276,13 +276,6 @@ maven_jar( #----------------------------------------------------------------------------- # objc #----------------------------------------------------------------------------- -# TODO(thaidn): remove this dependency by porting what needed to -# third_party/rules_protobuf. -git_repository( - name = "org_pubref_rules_protobuf", - commit = "61efe7c69a6bafffd9f1231f9d6ea97c2014aa64", - remote = "https://github.com/pubref/rules_protobuf.git", -) git_repository( name = "build_bazel_rules_apple", @@ -290,9 +283,6 @@ git_repository( remote = "https://github.com/bazelbuild/rules_apple.git", ) -load("@org_pubref_rules_protobuf//objc:rules.bzl", "objc_proto_repositories") -objc_proto_repositories() - load("@io_bazel_rules_go//go:def.bzl", "go_rules_dependencies", "go_register_toolchains") go_rules_dependencies() go_register_toolchains() diff --git a/kokoro/macos_external/presubmit.cfg b/kokoro/macos_external/presubmit.cfg index b4772bb088c348135861bcda21ce2db62e3d3481..cfc0720d1ec66ed0ef3906810b00fef733efaf32 100644 --- a/kokoro/macos_external/presubmit.cfg +++ b/kokoro/macos_external/presubmit.cfg @@ -2,5 +2,6 @@ build_file: "tink/kokoro/macos_external/presubmit.sh" -gfile_resources: "/x20/teams/blaze/bazel/kokoro/bazel-0.5.3-darwin-x86_64" -gfile_resources: "/x20/teams/ise-team/kokoro/jdk-8u131-macosx-x64/jdk-8u131-macosx-x64.tgz" +gfile_resources: "/x20/teams/ise-crypto/tink/kokoro/jdk-8u131-macosx-x64/jdk-8u131-macosx-x64.tgz" + +gfile_resources: "/x20/teams/ise-crypto/tink/kokoro/bazel-c6122b6ad35ebbed61036b0a2bcfea92b10adb8f-darwin-x86_64" diff --git a/kokoro/macos_external/presubmit.sh b/kokoro/macos_external/presubmit.sh index 0bcb65111eb3adfdea78620f0207cc66c5784f00..7a3d3e066aa77af6d1bee15a1926123193250730 100755 --- a/kokoro/macos_external/presubmit.sh +++ b/kokoro/macos_external/presubmit.sh @@ -25,7 +25,7 @@ set -x : "${IOS_SDK_VERSION:=10.2}" : "${XCODE_VERSION:=8.2.1}" -BAZEL_BIN="${KOKORO_GFILE_DIR}/bazel-0.5.3-darwin-x86_64" +BAZEL_BIN="${KOKORO_GFILE_DIR}/bazel-c6122b6ad35ebbed61036b0a2bcfea92b10adb8f-darwin-x86_64" DISABLE_SANDBOX="--strategy=GenRule=standalone --strategy=Turbine=standalone \ --strategy=CppCompile=standalone --strategy=ProtoCompile=standalone \ @@ -39,6 +39,7 @@ mkdir jdk; cd jdk; cp "${KOKORO_GFILE_DIR}/jdk-8u131-macosx-x64.tgz" ./ tar xf jdk-8u131-macosx-x64.tgz +export ANDROID_HOME="/Users/kbuilder/Library/Android/sdk" export JAVA_HOME="${PWD}/Home" export PATH="${JAVA_HOME}/bin:$PATH" chmod -R a+rx "${JAVA_HOME}" diff --git a/proto/BUILD b/proto/BUILD index c3d20f870c88cf08238d9730f957f7e4f7ef111e..97b8a8b1ec6fae42669347157ea7ae75a2ce8038 100644 --- a/proto/BUILD +++ b/proto/BUILD @@ -10,7 +10,7 @@ package( licenses(["notice"]) # Apache 2.0 load("@io_bazel_rules_go//proto:go_proto_library.bzl", "go_proto_library") -load("@org_pubref_rules_protobuf//objc:rules.bzl", "objc_proto_compile") +load("//third_party/rules_protobuf/objc:rules.bzl", "objc_proto_compile") load("//proto:objc.bzl", "tink_objc_proto_library") # ----------------------------------------------- @@ -50,7 +50,6 @@ objc_proto_compile( protos = ["common.proto"], tags = ["manual"], visibility = ["//visibility:private"], - with_grpc = False, ) # ----------------------------------------------- @@ -96,7 +95,6 @@ objc_proto_compile( protos = ["tink.proto"], tags = ["manual"], visibility = ["//visibility:private"], - with_grpc = False, ) # ----------------------------------------------- @@ -177,7 +175,6 @@ objc_proto_compile( protos = ["ecdsa.proto"], tags = ["manual"], visibility = ["//visibility:private"], - with_grpc = False, deps = [":common_objc_pb"], ) @@ -218,7 +215,6 @@ objc_proto_compile( protos = ["ed25519.proto"], tags = ["manual"], visibility = ["//visibility:private"], - with_grpc = False, ) # ----------------------------------------------- @@ -262,7 +258,6 @@ objc_proto_compile( protos = ["hmac.proto"], tags = ["manual"], visibility = ["//visibility:private"], - with_grpc = False, deps = [":common_objc_pb"], ) @@ -303,7 +298,6 @@ objc_proto_compile( protos = ["aes_ctr.proto"], tags = ["manual"], visibility = ["//visibility:private"], - with_grpc = False, ) # ----------------------------------------------- @@ -351,7 +345,6 @@ objc_proto_compile( protos = ["aes_ctr_hmac_aead.proto"], tags = ["manual"], visibility = ["//visibility:private"], - with_grpc = False, deps = [ ":aes_ctr_objc_pb", ":hmac_objc_pb", @@ -395,7 +388,6 @@ objc_proto_compile( protos = ["aes_gcm.proto"], tags = ["manual"], visibility = ["//visibility:private"], - with_grpc = False, ) # ----------------------------------------------- @@ -441,7 +433,6 @@ objc_proto_compile( protos = ["aes_ctr_hmac_streaming.proto"], tags = ["manual"], visibility = ["//visibility:private"], - with_grpc = False, deps = [ ":common_objc_pb", ":hmac_objc_pb", @@ -487,7 +478,6 @@ objc_proto_compile( protos = ["aes_gcm_hkdf_streaming.proto"], tags = ["manual"], visibility = ["//visibility:private"], - with_grpc = False, deps = [ ":common_objc_pb", ], @@ -530,7 +520,6 @@ objc_proto_compile( protos = ["aes_eax.proto"], tags = ["manual"], visibility = ["//visibility:private"], - with_grpc = False, ) # ----------------------------------------------- @@ -570,7 +559,6 @@ objc_proto_compile( protos = ["chacha20_poly1305.proto"], tags = ["manual"], visibility = ["//visibility:private"], - with_grpc = False, ) # ----------------------------------------------- @@ -610,7 +598,6 @@ objc_proto_compile( protos = ["kms_aead.proto"], tags = ["manual"], visibility = ["//visibility:private"], - with_grpc = False, ) # ----------------------------------------------- @@ -654,7 +641,6 @@ objc_proto_compile( protos = ["kms_envelope.proto"], tags = ["manual"], visibility = ["//visibility:private"], - with_grpc = False, deps = [ ":tink_objc_pb", ], @@ -705,7 +691,6 @@ objc_proto_compile( protos = ["ecies_aead_hkdf.proto"], tags = ["manual"], visibility = ["//visibility:private"], - with_grpc = False, deps = [ ":common_objc_pb", ":tink_objc_pb", diff --git a/proto/objc.bzl b/proto/objc.bzl index 7bf9aee8a167718c10336e6cc84b2528463a820f..79e28aeb406a7d519e1e9f827a69376e8a86471b 100644 --- a/proto/objc.bzl +++ b/proto/objc.bzl @@ -1,7 +1,7 @@ # The actual rule which does the filtering. def _do_filter_impl(ctx): return struct( - files = set([f for f in ctx.files.srcs if f.path.endswith(ctx.attr.suffix)]), + files = depset([f for f in ctx.files.srcs if f.path.endswith(ctx.attr.suffix)]), ) _do_filter = rule( diff --git a/third_party/rules_protobuf/BUILD b/third_party/rules_protobuf/BUILD new file mode 100644 index 0000000000000000000000000000000000000000..0988550c0ce6f33539539fef50e5c843ceedec5c --- /dev/null +++ b/third_party/rules_protobuf/BUILD @@ -0,0 +1,5 @@ +package(default_visibility = ["//visibility:public"]) + +licenses(["notice"]) # Apache 2.0 + +exports_files(["LICENSE"]) diff --git a/third_party/rules_protobuf/LICENSE b/third_party/rules_protobuf/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..29d600b0882dccf2612b41a1d51cd56c99efd9d0 --- /dev/null +++ b/third_party/rules_protobuf/LICENSE @@ -0,0 +1,13 @@ +Copyright 2016 PubRef.org + +Licensed under the Apache License, Version 2.0 (the "License"); you +may not use this file except in compliance with the License. You may +obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +implied. See the License for the specific language governing +permissions and limitations under the License. diff --git a/third_party/rules_protobuf/README.md b/third_party/rules_protobuf/README.md new file mode 100644 index 0000000000000000000000000000000000000000..1767faa593c4e3a40c76a5014774d660ca5d926c --- /dev/null +++ b/third_party/rules_protobuf/README.md @@ -0,0 +1,10 @@ +# Bazel Skylark rules for building protobufs for ObjC + +This is a minimal fork of [Rules Protobuf](https://github.com/pubref/rules_protobuf) +that supports building protobuf for ObjC. + +Tink needs temporarily depend on these rules because objc_proto_library is +not working properly. See https://github.com/bazelbuild/bazel/issues/1802. + +Once either Bazel or Protobuf team fixes objc_proto_library, these rules +can be removed. diff --git a/third_party/rules_protobuf/objc/BUILD b/third_party/rules_protobuf/objc/BUILD new file mode 100644 index 0000000000000000000000000000000000000000..f56423e171de6efbe03a5c2b33269adbc8c699a8 --- /dev/null +++ b/third_party/rules_protobuf/objc/BUILD @@ -0,0 +1,14 @@ +package(default_visibility = ["//visibility:public"]) + +licenses(["notice"]) # Apache 2.0 + +load("//third_party/rules_protobuf/protobuf:rules.bzl", "proto_language") + +proto_language( + name = "objc", + output_file_style = "pascal", + pb_file_extensions = [ + ".pbobjc.h", + ".pbobjc.m", + ], +) diff --git a/third_party/rules_protobuf/objc/rules.bzl b/third_party/rules_protobuf/objc/rules.bzl new file mode 100644 index 0000000000000000000000000000000000000000..ef10485bbbde6e9f619290f3f93da91b1227e1dd --- /dev/null +++ b/third_party/rules_protobuf/objc/rules.bzl @@ -0,0 +1,6 @@ +load("//third_party/rules_protobuf/protobuf:rules.bzl", "proto_compile") + +def objc_proto_compile( + langs = [str(Label("//third_party/rules_protobuf/objc"))], + **kwargs): + proto_compile(langs = langs, **kwargs) diff --git a/third_party/rules_protobuf/protobuf/BUILD b/third_party/rules_protobuf/protobuf/BUILD new file mode 100644 index 0000000000000000000000000000000000000000..f631b6df06d13b4ecf09aed3d810f02b996f197e --- /dev/null +++ b/third_party/rules_protobuf/protobuf/BUILD @@ -0,0 +1,3 @@ +package(default_visibility = ["//visibility:public"]) + +licenses(["notice"]) # Apache 2.0 diff --git a/third_party/rules_protobuf/protobuf/internal/proto_compile.bzl b/third_party/rules_protobuf/protobuf/internal/proto_compile.bzl new file mode 100644 index 0000000000000000000000000000000000000000..5e8114066e6cfe3e391927c53510d41f6d825f8f --- /dev/null +++ b/third_party/rules_protobuf/protobuf/internal/proto_compile.bzl @@ -0,0 +1,536 @@ +def _capitalize(s): + return s[0:1].upper() + s[1:] + + +def _pascal_case(s): + return "".join([_capitalize(part) for part in s.split("_")]) + + +def _emit_params_file_action(ctx, path, mnemonic, cmds): + """Helper function that writes a potentially long command list to a file. + Args: + ctx (struct): The ctx object. + path (string): the file path where the params file should be written. + mnemonic (string): the action mnemomic. + cmds (list<string>): the command list. + Returns: + (File): an executable file that runs the command set. + """ + filename = "%s.%sFile.params" % (path, mnemonic) + f = ctx.new_file(ctx.configuration.bin_dir, filename) + ctx.file_action(output = f, + content = "\n".join(["set -e"] + cmds), + executable = True) + return f + + +def _get_relative_dirname(base, file): + """Return a dirname in the form of path segments relative to base. + If the file.short_path is not within base, return empty list. + Example: if base="foo/bar/baz.txt" + and file.short_path="bar/baz.txt", + return ["bar"]. + Args: + base (string): the base dirname (ctx.label.package) + file (File): the file to calculate relative dirname. + Returns: + (list<string>): path + """ + path = file.dirname + if not path.startswith(base): + return [] + parts = path.split("/") + if parts[0] == "external": + # ignore off the first two items since we'll be cd'ing into + # this dir. + return parts[2:] + base_parts = base.split("/") + return parts[len(base_parts):] + + +def _get_offset_path(root, path): + """Adjust path relative to offset""" + + if path.startswith("/"): + fail("path argument must not be absolute: %s" % path) + + if not root: + return path + + if root == ".": + return path + + # "external/foobar/file.proto" --> "file.proto" + if path.startswith(root): + start = len(root) + if not root.endswith('/'): + start += 1 + return path[start:] + + depth = root.count('/') + 1 + return "../" * depth + path + + +def _get_import_mappings_for(files, prefix, label): + """For a set of files that belong the the given context label, create a mapping to the given prefix.""" + + mappings = {} + for file in files: + src = file.short_path + # File in an external repo looks like: + # '../WORKSPACE/SHORT_PATH'. We want just the SHORT_PATH. + if src.startswith("../"): + parts = src.split("/") + src = "/".join(parts[2:]) + dst = [prefix, label.package] + name_parts = label.name.split(".") + # special case to elide last part if the name is + # 'go_default_library.pb' + if name_parts[0] != "go_default_library": + dst.append(name_parts[0]) + mappings[src] = "/".join(dst) + + return mappings + + +def _build_output_jar(run, builder): + """Build a jar file for protoc to dump java classes into.""" + ctx = run.ctx + execdir = run.data.execdir + name = run.lang.name + protojar = ctx.new_file("%s_%s.jar" % (run.data.label.name, name)) + builder["outputs"] += [protojar] + builder[name + "_jar"] = protojar + builder[name + "_outdir"] = _get_offset_path(execdir, protojar.path) + + +def _build_output_library(run, builder): + """Build a library.js file for protoc to dump java classes into.""" + ctx = run.ctx + execdir = run.data.execdir + name = run.lang.name + jslib = ctx.new_file(run.data.label.name + run.lang.pb_file_extensions[0]) + builder["jslib"] = [jslib] + builder["outputs"] += [jslib] + + parts = jslib.short_path.rpartition("/") + filename = "/".join([parts[0], run.data.label.name]) + library_path = _get_offset_path(run.data.execdir, filename) + builder[name + "_pb_options"] += ["library=" + library_path] + + +def _build_output_srcjar(run, builder): + ctx = run.ctx + name = run.lang.name + protojar = builder[name + "_jar"] + srcjar_name = "%s_%s.srcjar" % (run.data.label.name, name) + srcjar = ctx.new_file("%s_%s.srcjar" % (run.data.label.name, name)) + run.ctx.action( + mnemonic = "CpJarToSrcJar", + inputs = [protojar], + outputs = [srcjar], + arguments = [protojar.path, srcjar.path], + command = "cp $1 $2", + ) + + # Remove protojar from the list of provided outputs + builder["outputs"] = [e for e in builder["outputs"] if e != protojar] + builder["outputs"] += [srcjar] + + if run.data.verbose > 2: + print("Copied jar %s srcjar to %s" % (protojar.path, srcjar.path)) + +def _build_output_files(run, builder): + """Build a list of files we expect to be generated.""" + + ctx = run.ctx + protos = run.data.protos + if not protos: + fail("Empty proto input list.", "protos") + + exts = run.exts + + for file in protos: + base = file.basename[:-len(".proto")] + if run.lang.output_file_style == 'pascal': + base = _pascal_case(base) + if run.lang.output_file_style == 'capitalize': + base = _capitalize(base) + for ext in exts: + path = _get_relative_dirname(ctx.label.package, file) + path.append(base + ext) + pbfile = ctx.new_file("/".join(path)) + builder["outputs"] += [pbfile] + + +def _build_output_libdir(run, builder): + # This is currently csharp-specific, which needs to have the + # output_dir positively adjusted to the package directory. + ctx = run.ctx + execdir = run.data.execdir + name = run.lang.name + builder[name + "_outdir"] = _get_offset_path(execdir, run.data.descriptor_set.dirname) + _build_output_files(run, builder) + + +def _build_descriptor_set(data, builder): + """Build a list of files we expect to be generated.""" + builder["args"] += ["--descriptor_set_out=" + _get_offset_path(data.execdir, data.descriptor_set.path)] + + +def _build_plugin_invocation(name, plugin, execdir, builder): + """Add a '--plugin=NAME=PATH' argument if the language descriptor + requires one. + """ + tool = _get_offset_path(execdir, plugin.path) + builder["inputs"] += [plugin] + builder["args"] += ["--plugin=protoc-gen-%s=%s" % (name, tool)] + + +def _build_protobuf_invocation(run, builder): + """Build a --plugin option if required for basic protobuf generation. + Args: + run (struct): the compilation run object. + builder (dict): the compilation builder data. + Built-in language don't need this. + """ + lang = run.lang + if not lang.pb_plugin: + return + name = lang.pb_plugin_name or lang.name + _build_plugin_invocation(name, + lang.pb_plugin, + run.data.execdir, + builder) + + + +def _get_mappings(files, label, prefix): + """For a set of files that belong the the given context label, create a mapping to the given prefix.""" + mappings = {} + for file in files: + src = file.short_path + #print("mapping file short path: %s" % src) + # File in an external repo looks like: + # '../WORKSPACE/SHORT_PATH'. We want just the SHORT_PATH. + if src.startswith("../"): + parts = src.split("/") + src = "/".join(parts[2:]) + dst = [prefix] + if label.package: + dst.append(label.package) + name_parts = label.name.split(".") + # special case to elide last part if the name is + # 'go_default_library.pb' + if name_parts[0] != "go_default_library": + dst.append(name_parts[0]) + mappings[src] = "/".join(dst) + return mappings + + +def _build_base_namespace(run, builder): + pass + + +def _build_importmappings(run, builder): + """Override behavior to add plugin options before building the --go_out option""" + ctx = run.ctx + go_prefix = run.data.prefix or run.lang.prefix + opts = [] + + # Build the list of import mappings. Start with any configured on + # the rule by attributes. + mappings = run.lang.importmap + run.data.importmap + mappings += _get_mappings(run.data.protos, run.data.label, go_prefix) + + # Then add in the transitive set from dependent rules. + for unit in run.data.transitive_units: + mappings += unit.transitive_mappings + + if run.data.verbose > 1: + print("go_importmap: %s" % mappings) + + for k, v in mappings.items(): + opts += ["M%s=%s" % (k, v)] + + builder["transitive_mappings"] = mappings + + +def _build_plugin_out(name, outdir, options, builder): + """Build the --{lang}_out argument for a given plugin.""" + arg = outdir + if options: + arg = ",".join(options) + ":" + arg + builder["args"] += ["--%s_out=%s" % (name, arg)] + + +def _build_protobuf_out(run, builder): + """Build the --{lang}_out option""" + lang = run.lang + name = lang.pb_plugin_name or lang.name + outdir = builder.get(lang.name + "_outdir", run.outdir) + options = builder.get(lang.name + "_pb_options", []) + + _build_plugin_out(name, outdir, options, builder) + + +def _get_outdir(ctx, lang, execdir): + if ctx.attr.output_to_workspace: + outdir = "." + else: + outdir = ctx.var["GENDIR"] + path = _get_offset_path(execdir, outdir) + if execdir != ".": + path += "/" + execdir + return path + + +def _get_external_root(ctx): + + # Compte set of "external workspace roots" that the proto + # sourcefiles belong to. + external_roots = [] + for file in ctx.files.protos: + path = file.path.split('/') + if path[0] == 'external': + external_roots += ["/".join(path[0:2])] + + # This set size must be 0 or 1. (all source files must exist in this + # workspace or the same external workspace). + roots = depset(external_roots) + n = len(roots) + if n: + if n > 1: + fail( + """ + You are attempting simultaneous compilation of protobuf source files that span multiple workspaces (%s). + Decompose your library rules into smaller units having filesets that belong to only a single workspace at a time. + Note that it is OK to *import* across multiple workspaces, but not compile them as file inputs to protoc. + """ % roots + ) + else: + return external_roots[0] + else: + return "." + + +def _compile(ctx, unit): + + execdir = unit.data.execdir + + protoc = _get_offset_path(execdir, unit.compiler.path) + imports = ["--proto_path=" + i for i in unit.imports] + srcs = [_get_offset_path(execdir, p.path) for p in unit.data.protos] + protoc_cmd = [protoc] + list(unit.args) + imports + srcs + manifest = [f.short_path for f in unit.outputs] + + transitive_units = depset() + for u in unit.data.transitive_units: + transitive_units = transitive_units | u.inputs + inputs = list(unit.inputs | transitive_units) + [unit.compiler] + outputs = list(unit.outputs) + + cmds = [" ".join(protoc_cmd)] + if execdir != ".": + cmds.insert(0, "cd %s" % execdir) + + if unit.data.output_to_workspace: + print( +""" +>************************************************************************** +* - Generating files into the workspace... This is potentially * +* dangerous (may overwrite existing files) and violates bazel's * +* sandbox policy. * +* - Disregard "ERROR: output 'foo.pb.*' was not created." messages. * +* - Build will halt following the "not all outputs were created" message. * +* - Output manifest is printed below. * +**************************************************************************< +%s +>*************************************************************************< +""" % "\n".join(manifest) + ) + + if unit.data.verbose: + print( +""" +************************************************************ +cd $(bazel info execution_root)%s && \ +%s +************************************************************ +%s +************************************************************ +""" % ( + "" if execdir == "." else "/" + execdir, + " \\ \n".join(protoc_cmd), + "\n".join(manifest)) + ) + + if unit.data.verbose > 2: + for i in range(len(protoc_cmd)): + print(" > cmd%s: %s" % (i, protoc_cmd[i])) + for i in range(len(inputs)): + print(" > input%s: %s" % (i, inputs[i])) + for i in range(len(outputs)): + print(" > output%s: %s" % (i, outputs[i])) + + ctx.action( + mnemonic = "ProtoCompile", + command = " && ".join(cmds), + inputs = inputs, + outputs = outputs, + ) + + +def _proto_compile_impl(ctx): + + if ctx.attr.verbose > 1: + print("proto_compile %s:%s" % (ctx.build_file_path, ctx.label.name)) + + # Calculate list of external roots and return the base directory + # we'll use for the protoc invocation. Usually this is '.', but if + # not, its 'external/WORKSPACE' + execdir = _get_external_root(ctx) + + # Propogate proto deps compilation units. + transitive_units = [] + for dep in ctx.attr.deps: + for unit in dep.proto_compile_result.transitive_units: + transitive_units.append(unit) + + if ctx.attr.prefix: + prefix = ctx.attr.prefix.go_prefix + else: + prefix = "" + + # Immutable global state for this compiler run. + data = struct( + label = ctx.label, + workspace_name = ctx.workspace_name, + prefix = prefix, + execdir = execdir, + protos = ctx.files.protos, + descriptor_set = ctx.outputs.descriptor_set, + importmap = ctx.attr.importmap, + pb_options = ctx.attr.pb_options, + verbose = ctx.attr.verbose, + transitive_units = transitive_units, + output_to_workspace = ctx.attr.output_to_workspace, + ) + + #print("transitive_units: %s" % transitive_units) + + # Mutable global state to be populated by the classes. + builder = { + "args": [], # list of string + "imports": ctx.attr.imports + ["."], + "inputs": ctx.files.protos + ctx.files.inputs, + "outputs": [], + } + + # Build a list of structs that will be processed in this compiler + # run. + runs = [] + for l in ctx.attr.langs: + lang = l.proto_language + + exts = [] + if lang.supports_pb: + exts += lang.pb_file_extensions + + runs.append(struct( + ctx = ctx, + outdir = _get_outdir(ctx, lang, execdir), + lang = lang, + data = data, + exts = exts, + output_to_jar = lang.output_to_jar, + )) + + builder["inputs"] += lang.pb_inputs + builder["imports"] += lang.pb_imports + builder[lang.name + "_pb_options"] = lang.pb_options + data.pb_options + + _build_descriptor_set(data, builder) + + for run in runs: + if run.lang.output_to_jar: + _build_output_jar(run, builder) + elif run.lang.output_to_library: + _build_output_library(run, builder) + elif run.lang.output_to_libdir: + _build_output_libdir(run, builder) + else: + _build_output_files(run, builder) + if run.lang.prefix: # golang-specific + _build_importmappings(run, builder) + if run.lang.supports_pb: + _build_protobuf_invocation(run, builder) + _build_protobuf_out(run, builder) + + + # Build final immutable compilation unit for rule and transitive beyond + unit = struct( + compiler = ctx.executable.protoc, + data = data, + transitive_mappings = builder.get("transitive_mappings", {}), + args = depset(builder["args"] + ctx.attr.args), + imports = depset(builder["imports"]), + inputs = depset(builder["inputs"]), + outputs = depset(builder["outputs"] + [ctx.outputs.descriptor_set]), + ) + + # Run protoc + _compile(ctx, unit) + + for run in runs: + if run.lang.output_to_jar: + _build_output_srcjar(run, builder) + + files = depset(builder["outputs"]) + + return struct( + files = files, + proto_compile_result = struct( + unit = unit, + transitive_units = transitive_units + [unit], + ), + ) + +proto_compile = rule( + implementation = _proto_compile_impl, + attrs = { + "args": attr.string_list(), + "langs": attr.label_list( + providers = ["proto_language"], + allow_files = False, + mandatory = False, + ), + "protos": attr.label_list( + allow_files = FileType([".proto"]), + ), + "deps": attr.label_list( + providers = ["proto_compile_result"] + ), + "protoc": attr.label( + default = Label("@com_google_protobuf//:protoc"), + cfg = "host", + executable = True, + ), + "prefix": attr.label( + providers = ["go_prefix"], + ), + "root": attr.string(), + "imports": attr.string_list(), + "importmap": attr.string_dict(), + "inputs": attr.label_list( + allow_files = True, + ), + "pb_options": attr.string_list(), + "output_to_workspace": attr.bool(), + "verbose": attr.int(), + }, + outputs = { + "descriptor_set": "%{name}.descriptor_set", + }, + output_to_genfiles = True, # this needs to be set for cc-rules. +) diff --git a/third_party/rules_protobuf/protobuf/internal/proto_language.bzl b/third_party/rules_protobuf/protobuf/internal/proto_language.bzl new file mode 100644 index 0000000000000000000000000000000000000000..19b0cab1c52244ddeed5ef9f2c6e795db1891855 --- /dev/null +++ b/third_party/rules_protobuf/protobuf/internal/proto_language.bzl @@ -0,0 +1,91 @@ +def _proto_language_impl(ctx): + prefix = None + if hasattr(ctx.attr.prefix, "go_prefix"): + prefix = ctx.attr.prefix.go_prefix + return struct( + proto_language = struct( + name = ctx.label.name, + output_to_workspace = ctx.attr.output_to_workspace, + output_to_jar = ctx.attr.output_to_jar, + output_to_library = ctx.attr.output_to_library, + output_to_libdir = ctx.attr.output_to_libdir, + output_file_style = ctx.attr.output_file_style, + supports_pb = ctx.attr.supports_pb, + pb_file_extensions = ctx.attr.pb_file_extensions, + pb_options = ctx.attr.pb_options, + pb_imports = ctx.attr.pb_imports, + pb_inputs = ctx.files.pb_inputs, + pb_plugin_name = ctx.attr.pb_plugin_name, + pb_plugin = ctx.executable.pb_plugin, + pb_compile_deps = ctx.files.pb_compile_deps, + pb_runtime_deps = ctx.files.pb_runtime_deps, + prefix = prefix, + importmap = ctx.attr.importmap, + ), + ) + + +proto_language_attrs = { + "output_to_workspace": attr.bool(), + "output_to_jar": attr.bool(), + "output_to_library": attr.bool(), + "output_to_libdir": attr.bool(), + "output_file_style": attr.string(), + + "supports_pb": attr.bool(default = True), + "pb_file_extensions": attr.string_list(), + "pb_options": attr.string_list(), + "pb_inputs": attr.label_list(), + "pb_imports": attr.string_list(), + "pb_plugin_name": attr.string(), + "pb_plugin": attr.label( + executable = True, + cfg = "host", + ), + "pb_compile_deps": attr.label_list(), + "pb_runtime_deps": attr.label_list(), + "prefix": attr.label( + providers = ["go_prefix"], + ), + "importmap": attr.string_dict(), +} + + +proto_language = rule( + implementation = _proto_language_impl, + attrs = proto_language_attrs, +) + +def _proto_language_deps_impl(ctx): + files = [] + exts = ctx.attr.file_extensions + + for dep in ctx.attr.langs: + lang = dep.proto_language + if ctx.attr.compile_deps: + files += lang.pb_compile_deps + if ctx.attr.runtime_deps: + files += lang.pb_runtime_deps + + deps = [] + for file in files: + for ext in exts: + if file.path.endswith(ext): + deps.append(file) + + return struct( + files = set(deps), + ) + +proto_language_deps = rule( + implementation = _proto_language_deps_impl, + attrs = { + "langs": attr.label_list( + providers = ["proto_language"], + mandatory = True, + ), + "file_extensions": attr.string_list(mandatory = True), + "compile_deps": attr.bool(default = True), + "runtime_deps": attr.bool(default = False), + } +) diff --git a/third_party/rules_protobuf/protobuf/internal/proto_repositories.bzl b/third_party/rules_protobuf/protobuf/internal/proto_repositories.bzl new file mode 100644 index 0000000000000000000000000000000000000000..b5988681a25044a885ddb1caddf0ecec4be779a6 --- /dev/null +++ b/third_party/rules_protobuf/protobuf/internal/proto_repositories.bzl @@ -0,0 +1,16 @@ +load("//third_party/rules_protobuf/protobuf:internal/require.bzl", "require") + +def proto_repositories(excludes = [], + lang_deps = {}, + lang_requires = [], + protobuf_deps = {}, + protobuf_requires = [], + overrides = {}, + verbose = 0): + return require( + keys = protobuf_requires + lang_requires, + deps = protobuf_deps + lang_deps, + excludes = excludes, + overrides = overrides, + verbose = verbose, + ) diff --git a/third_party/rules_protobuf/protobuf/internal/require.bzl b/third_party/rules_protobuf/protobuf/internal/require.bzl new file mode 100644 index 0000000000000000000000000000000000000000..428e79661d034a40103f36e6858511ff99a3fc4b --- /dev/null +++ b/third_party/rules_protobuf/protobuf/internal/require.bzl @@ -0,0 +1,77 @@ +def _needs_install(name, dep, hkeys=["sha256", "sha1", "tag"], verbose=0): + + # Does it already exist? + existing_rule = native.existing_rule(name) + if not existing_rule: + return True + + # If it has already been defined and our dependency lists a + # hash, do these match? If a hash mismatch is encountered, has + # the user specifically granted permission to continue? + for hkey in hkeys: + expected = dep.get(hkey) + actual = existing_rule.get(hkey) + if expected: + if expected != actual: + msg = """ +An existing {0} rule '{1}' was already loaded with a {2} value of '{3}'. Refusing to overwrite this with the requested value ('{4}'). +Either remove the pre-existing rule from your WORKSPACE or exclude it from loading by rules_protobuf. +""".format(existing_rule["kind"], name, hkey, actual, expected) + + fail(msg) + else: + if verbose > 1: print("Skip reload %s: %s = %s" % (name, hkey, actual)) + return False + + # No kheys for this rule - in this case no reload; first one loaded wins. + if verbose > 1: print("Skipping reload of existing target %s" % name) + return False + + +def _install(deps, verbose): + """Install a list if dependencies for matching native rules. + Return: + list of deps that have no matching native rule. + """ + todo = [] + + for d in deps: + name = d.get("name") + rule = d.pop("rule", None) + if not rule: + fail("Missing attribute 'rule': %s" % name) + if hasattr(native, rule): + rule = getattr(native, rule) + if verbose: print("Loading %s)" % name) + rule(**d) + else: + d["rule"] = rule + todo.append(d) + + return todo + + +def require(keys, + deps = {}, + overrides = {}, + excludes = [], + verbose = 0): + + # + # Make a list of non-excluded required deps with merged data. + # + required = [] + + for key in keys: + dep = deps.get(key) + if not dep: + fail("Unknown workspace dependency: %s" % key) + d = dict(**dep) # copy the 'frozen' object. + if not key in excludes: + over = overrides.get(key) + data = d + over if over else d + if _needs_install(key, data, verbose=verbose): + data["name"] = key + required.append(data) + + return _install(required, verbose) diff --git a/third_party/rules_protobuf/protobuf/rules.bzl b/third_party/rules_protobuf/protobuf/rules.bzl new file mode 100644 index 0000000000000000000000000000000000000000..17325e3529cd8917025f15325723c1dbb6cc3bc5 --- /dev/null +++ b/third_party/rules_protobuf/protobuf/rules.bzl @@ -0,0 +1,3 @@ +load("//third_party/rules_protobuf/protobuf:internal/proto_compile.bzl", "proto_compile") +load("//third_party/rules_protobuf/protobuf:internal/proto_language.bzl", "proto_language", "proto_language_deps") +load("//third_party/rules_protobuf/protobuf:internal/proto_repositories.bzl", "proto_repositories")