Skip to content

Commit

Permalink
Move cc_proto_library from Bazel repository
Browse files Browse the repository at this point in the history
Bazel 6 falls back to native rules, because of ProtoInfo differences.
Bazel 7 is slightly degraded: Kythe flags don't work, DebugContext is left out from CcInfo and temporary files generated by the C++ compiler (but it's only useful for debugging).

Tests will be submitted in separate PRs.

PiperOrigin-RevId: 674030212
  • Loading branch information
protobuf-github-bot authored and copybara-github committed Sep 12, 2024
1 parent 782e8ad commit 5254448
Show file tree
Hide file tree
Showing 6 changed files with 356 additions and 2 deletions.
2 changes: 1 addition & 1 deletion MODULE.bazel
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ bazel_dep(

bazel_dep(
name = "bazel_features",
version = "1.13.0",
version = "1.16.0",
repo_name = "proto_bazel_features",
)

Expand Down
11 changes: 10 additions & 1 deletion bazel/cc_proto_library.bzl
Original file line number Diff line number Diff line change
@@ -1,3 +1,12 @@
"""cc_proto_library rule"""

cc_proto_library = native.cc_proto_library
load("@proto_bazel_features//:features.bzl", "bazel_features")
load("//bazel/private:bazel_cc_proto_library.bzl", _cc_proto_library = "cc_proto_library") # buildifier: disable=bzl-visibility

def cc_proto_library(**kwattrs):
# This condition causes Starlark rules to be used only on Bazel >=7.0.0
if bazel_features.proto.starlark_proto_info:
_cc_proto_library(**kwattrs)
else:
# On older Bazel versions keep using native rules, so that mismatch in ProtoInfo doesn't happen
native.cc_proto_library(**kwattrs) # buildifier: disable=native-cc-proto
197 changes: 197 additions & 0 deletions bazel/private/bazel_cc_proto_library.bzl
Original file line number Diff line number Diff line change
@@ -0,0 +1,197 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2024 Google Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
#
"""Bazel's implementation of cc_proto_library"""

load("@rules_cc//cc:find_cc_toolchain.bzl", "use_cc_toolchain")
load("//bazel/common:proto_common.bzl", "proto_common")
load("//bazel/common:proto_info.bzl", "ProtoInfo")
load("//bazel/private:cc_proto_support.bzl", "cc_proto_compile_and_link")
load("//bazel/private:toolchain_helpers.bzl", "toolchains")

_CC_PROTO_TOOLCHAIN = "@rules_cc//cc/proto:toolchain_type"

_ProtoCcFilesInfo = provider(fields = ["files"], doc = "Provide cc proto files.")
_ProtoCcHeaderInfo = provider(fields = ["headers"], doc = "Provide cc proto headers.")

def _get_output_files(actions, proto_info, suffixes):
result = []
for suffix in suffixes:
result.extend(proto_common.declare_generated_files(
actions = actions,
proto_info = proto_info,
extension = suffix,
))
return result

# TODO: Make this code actually work.
def _get_strip_include_prefix(ctx, proto_info):
proto_root = proto_info.proto_source_root
if proto_root == "." or proto_root == ctx.label.workspace_root:
return ""
strip_include_prefix = ""
if proto_root.startswith(ctx.bin_dir.path):
proto_root = proto_root[len(ctx.bin_dir.path) + 1:]
elif proto_root.startswith(ctx.genfiles_dir.path):
proto_root = proto_root[len(ctx.genfiles_dir.path) + 1:]

if proto_root.startswith(ctx.label.workspace_root):
proto_root = proto_root[len(ctx.label.workspace_root):]

strip_include_prefix = "//" + proto_root
return strip_include_prefix

def _aspect_impl(target, ctx):
proto_info = target[ProtoInfo]
proto_configuration = ctx.fragments.proto

sources = []
headers = []
textual_hdrs = []

proto_toolchain = toolchains.find_toolchain(ctx, "_aspect_cc_proto_toolchain", _CC_PROTO_TOOLCHAIN)
should_generate_code = proto_common.experimental_should_generate_code(proto_info, proto_toolchain, "cc_proto_library", target.label)

if should_generate_code:
if len(proto_info.direct_sources) != 0:
# Bazel 7 didn't expose cc_proto_library_source_suffixes used by Kythe
# gradually falling back to .pb.cc
if type(proto_configuration.cc_proto_library_source_suffixes) == "builtin_function_or_method":
source_suffixes = [".pb.cc"]
header_suffixes = [".pb.h"]
else:
source_suffixes = proto_configuration.cc_proto_library_source_suffixes
header_suffixes = proto_configuration.cc_proto_library_header_suffixes
sources = _get_output_files(ctx.actions, proto_info, source_suffixes)
headers = _get_output_files(ctx.actions, proto_info, header_suffixes)
header_provider = _ProtoCcHeaderInfo(headers = depset(headers))
else:
# If this proto_library doesn't have sources, it provides the combined headers of all its
# direct dependencies. Thus, if a direct dependency does have sources, the generated files
# are also provided by this library. If a direct dependency does not have sources, it will
# do the same thing, so that effectively this library looks through all source-less
# proto_libraries and provides all generated headers of the proto_libraries with sources
# that it depends on.
transitive_headers = []
for dep in getattr(ctx.rule.attr, "deps", []):
if _ProtoCcHeaderInfo in dep:
textual_hdrs.extend(dep[_ProtoCcHeaderInfo].headers.to_list())
transitive_headers.append(dep[_ProtoCcHeaderInfo].headers)
header_provider = _ProtoCcHeaderInfo(headers = depset(transitive = transitive_headers))

else: # shouldn't generate code
header_provider = _ProtoCcHeaderInfo(headers = depset())

proto_common.compile(
actions = ctx.actions,
proto_info = proto_info,
proto_lang_toolchain_info = proto_toolchain,
generated_files = sources + headers,
experimental_output_files = "multiple",
)

deps = []
if proto_toolchain.runtime:
deps = [proto_toolchain.runtime]
deps.extend(getattr(ctx.rule.attr, "deps", []))

cc_info, libraries, temps = cc_proto_compile_and_link(
ctx = ctx,
deps = deps,
sources = sources,
headers = headers,
textual_hdrs = textual_hdrs,
strip_include_prefix = _get_strip_include_prefix(ctx, proto_info),
)

return [
cc_info,
_ProtoCcFilesInfo(files = depset(sources + headers + libraries)),
OutputGroupInfo(temp_files_INTERNAL_ = temps),
header_provider,
]

cc_proto_aspect = aspect(
implementation = _aspect_impl,
attr_aspects = ["deps"],
fragments = ["cpp", "proto"],
required_providers = [ProtoInfo],
provides = [CcInfo],
attrs = toolchains.if_legacy_toolchain({"_aspect_cc_proto_toolchain": attr.label(
default = configuration_field(fragment = "proto", name = "proto_toolchain_for_cc"),
)}),
toolchains = use_cc_toolchain() + toolchains.use_toolchain(_CC_PROTO_TOOLCHAIN),
)

def _cc_proto_library_impl(ctx):
if len(ctx.attr.deps) != 1:
fail(
"'deps' attribute must contain exactly one label " +
"(we didn't name it 'dep' for consistency). " +
"The main use-case for multiple deps is to create a rule that contains several " +
"other targets. This makes dependency bloat more likely. It also makes it harder" +
"to remove unused deps.",
attr = "deps",
)
dep = ctx.attr.deps[0]

proto_toolchain = toolchains.find_toolchain(ctx, "_aspect_cc_proto_toolchain", _CC_PROTO_TOOLCHAIN)
proto_common.check_collocated(ctx.label, dep[ProtoInfo], proto_toolchain)

return [DefaultInfo(files = dep[_ProtoCcFilesInfo].files), dep[CcInfo], dep[OutputGroupInfo]]

cc_proto_library = rule(
implementation = _cc_proto_library_impl,
doc = """
<p>
<code>cc_proto_library</code> generates C++ code from <code>.proto</code> files.
</p>

<p>
<code>deps</code> must point to <a href="protocol-buffer.html#proto_library"><code>proto_library
</code></a> rules.
</p>

<p>
Example:
</p>

<pre>
<code class="lang-starlark">
cc_library(
name = "lib",
deps = [":foo_cc_proto"],
)

cc_proto_library(
name = "foo_cc_proto",
deps = [":foo_proto"],
)

proto_library(
name = "foo_proto",
)
</code>
</pre>
""",
attrs = {
"deps": attr.label_list(
aspects = [cc_proto_aspect],
allow_rules = ["proto_library"],
allow_files = False,
doc = """
The list of <a href="protocol-buffer.html#proto_library"><code>proto_library</code></a>
rules to generate C++ code for.""",
),
} | toolchains.if_legacy_toolchain({
"_aspect_cc_proto_toolchain": attr.label(
default = configuration_field(fragment = "proto", name = "proto_toolchain_for_cc"),
),
}),
provides = [CcInfo],
toolchains = toolchains.use_toolchain(_CC_PROTO_TOOLCHAIN),
)
141 changes: 141 additions & 0 deletions bazel/private/cc_proto_support.bzl
Original file line number Diff line number Diff line change
@@ -0,0 +1,141 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2024 Google Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
#
"""Supporting C++ compilation of generated code"""

load("@proto_bazel_features//:features.bzl", "bazel_features")
load("@rules_cc//cc:find_cc_toolchain.bzl", "find_cc_toolchain")

def get_feature_configuration(ctx, has_sources, extra_requested_features = []):
"""Returns C++ feature configuration for compiling and linking generated C++ files.

Args:
ctx: (RuleCtx) rule context.
has_sources: (bool) Has the proto_library sources.
extra_requested_features: (list[str]) Additionally requested features.
Returns:
(FeatureConfiguration) C++ feature configuration
"""
cc_toolchain = find_cc_toolchain(ctx)
requested_features = ctx.features + extra_requested_features

# TODO: Remove LAYERING_CHECK once we have verified that there are direct
# dependencies for all generated #includes.
unsupported_features = ctx.disabled_features + ["parse_headers", "layering_check"]
if has_sources:
requested_features.append("header_modules")
else:
unsupported_features.append("header_modules")
return cc_common.configure_features(
ctx = ctx,
cc_toolchain = cc_toolchain,
requested_features = requested_features,
unsupported_features = unsupported_features,
)

def _get_libraries_from_linking_outputs(linking_outputs, feature_configuration):
library_to_link = linking_outputs.library_to_link
if not library_to_link:
return []
outputs = []
if library_to_link.static_library:
outputs.append(library_to_link.static_library)
if library_to_link.pic_static_library:
outputs.append(library_to_link.pic_static_library)

# On Windows, dynamic library is not built by default, so don't add them to files_to_build.
if not cc_common.is_enabled(feature_configuration = feature_configuration, feature_name = "targets_windows"):
if library_to_link.resolved_symlink_dynamic_library:
outputs.append(library_to_link.resolved_symlink_dynamic_library)
elif library_to_link.dynamic_library:
outputs.append(library_to_link.dynamic_library)
if library_to_link.resolved_symlink_interface_library:
outputs.append(library_to_link.resolved_symlink_interface_library)
elif library_to_link.interface_library:
outputs.append(library_to_link.interface_library)
return outputs

def cc_proto_compile_and_link(ctx, deps, sources, headers, disallow_dynamic_library = None, feature_configuration = None, alwayslink = False, **kwargs):
"""Creates C++ compilation and linking actions for C++ proto sources.

Args:
ctx: rule context
deps: (list[CcInfo]) List of libraries to be added as dependencies to compilation and linking
actions.
sources:(list[File]) List of C++ sources files.
headers: list(File] List of C++ headers files.
disallow_dynamic_library: (bool) Are dynamic libraries disallowed.
feature_configuration: (FeatureConfiguration) feature configuration to use.
alwayslink: (bool) Should the library be always linked.
**kwargs: Additional arguments passed to the compilation. See cc_common.compile.

Returns:
(CcInfo, list[File], list[File])
- CcInfo provider with compilation context and linking context
- A list of linked libraries related to this proto
- A list of temporary files generated durind compilation
"""
cc_toolchain = find_cc_toolchain(ctx)
feature_configuration = feature_configuration or get_feature_configuration(ctx, bool(sources))
if disallow_dynamic_library == None:
# TODO: Configure output artifact with action_config
# once proto compile action is configurable from the crosstool.
disallow_dynamic_library = not cc_common.is_enabled(
feature_name = "supports_dynamic_linker",
feature_configuration = feature_configuration,
)

(compilation_context, compilation_outputs) = cc_common.compile(
actions = ctx.actions,
feature_configuration = feature_configuration,
cc_toolchain = cc_toolchain,
srcs = sources,
public_hdrs = headers,
compilation_contexts = [dep[CcInfo].compilation_context for dep in deps],
name = ctx.label.name,
# Don't instrument the generated C++ files even when --collect_code_coverage is set.
# If we actually start generating coverage instrumentation for .proto files based on coverage
# data from the generated C++ files, this will have to be removed. Currently, the work done
# to instrument those files and execute the instrumentation is all for nothing, and it can
# be quite a bit of extra computation even when that's not made worse by performance bugs,
# as in b/64963386.
# code_coverage_enabled = False (cc_common.compile disables code_coverage by default)
**kwargs
)

if sources:
linking_context, linking_outputs = cc_common.create_linking_context_from_compilation_outputs(
actions = ctx.actions,
feature_configuration = feature_configuration,
cc_toolchain = cc_toolchain,
compilation_outputs = compilation_outputs,
linking_contexts = [dep[CcInfo].linking_context for dep in deps],
name = ctx.label.name,
disallow_dynamic_library = disallow_dynamic_library,
alwayslink = alwayslink,
)
libraries = _get_libraries_from_linking_outputs(linking_outputs, feature_configuration)
else:
linking_context = cc_common.merge_linking_contexts(
linking_contexts = [dep[CcInfo].linking_context for dep in deps if CcInfo in dep],
)
libraries = []

debug_context = None
temps = []
if bazel_features.cc.protobuf_on_allowlist:
debug_context = cc_common.merge_debug_context(
[cc_common.create_debug_context(compilation_outputs)] +
[dep[CcInfo].debug_context() for dep in deps if CcInfo in dep],
)
temps = compilation_outputs.temps()

return CcInfo(
compilation_context = compilation_context,
linking_context = linking_context,
debug_context = debug_context,
), libraries, temps
6 changes: 6 additions & 0 deletions bazel/private/proto_bazel_features.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,9 @@
"""Vendored version of bazel_features for protobuf, to keep a one-step setup"""

_PROTO_BAZEL_FEATURES = """bazel_features = struct(
cc = struct(
protobuf_on_allowlist = {protobuf_on_allowlist},
),
proto = struct(
starlark_proto_info = {starlark_proto_info},
),
Expand All @@ -29,6 +32,8 @@ def _proto_bazel_features_impl(rctx):
starlark_proto_info = major_version_int >= 7
PackageSpecificationInfo = major_version_int > 6 or (major_version_int == 6 and minor_version_int >= 4)

protobuf_on_allowlist = major_version_int > 7

rctx.file("BUILD.bazel", """
load("@bazel_skylib//:bzl_library.bzl", "bzl_library")
bzl_library(
Expand All @@ -41,6 +46,7 @@ exports_files(["features.bzl"])
rctx.file("features.bzl", _PROTO_BAZEL_FEATURES.format(
starlark_proto_info = repr(starlark_proto_info),
PackageSpecificationInfo = "PackageSpecificationInfo" if PackageSpecificationInfo else "None",
protobuf_on_allowlist = repr(protobuf_on_allowlist),
))

proto_bazel_features = repository_rule(
Expand Down
1 change: 1 addition & 0 deletions java/core/BUILD.bazel
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ load("@bazel_skylib//rules:build_test.bzl", "build_test")
load("@rules_pkg//pkg:mappings.bzl", "pkg_files", "strip_prefix")
load("//:protobuf.bzl", "internal_gen_well_known_protos_java")
load("//:protobuf_version.bzl", "PROTOBUF_JAVA_VERSION")
load("//bazel:cc_proto_library.bzl", "cc_proto_library")
load("//bazel:java_lite_proto_library.bzl", "java_lite_proto_library")
load("//bazel:java_proto_library.bzl", "java_proto_library")
load("//bazel:proto_library.bzl", "proto_library")
Expand Down

0 comments on commit 5254448

Please sign in to comment.