Skip to content

Commit

Permalink
Fix export-codgen with multiple output types from same input type (Ch…
Browse files Browse the repository at this point in the history
…erry-pick of #18373) (#18377)

Having multiple codegen backends enabled for the same input type (e.g.
Go and Python for Protobuf) will currently only export one of the
languages' generated files. This is due to the input type (e.g.
`pants.backend.codegen.protobuf.target_types.ProtobufSourceField`) being
used as the key in an input -> output map, causing the backend that's
enabled last to overwrite any previous backend's registered input ->
output.

Fixes #15698.

Co-authored-by: Andreas Stenius <git@astekk.se>
  • Loading branch information
jyggen and kaos authored Mar 31, 2023
1 parent c01c329 commit 7ad59d1
Show file tree
Hide file tree
Showing 2 changed files with 20 additions and 7 deletions.
13 changes: 6 additions & 7 deletions src/python/pants/backend/codegen/export_codegen_goal.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,26 +46,25 @@ async def export_codegen(
# We run all possible code generators. Running codegen requires specifying the expected
# output_type, so we must inspect what is possible to generate.
all_generate_request_types = union_membership.get(GenerateSourcesRequest)
inputs_to_outputs = {
req.input: req.output for req in all_generate_request_types if req.exportable
}
inputs_to_outputs = [
(req.input, req.output) for req in all_generate_request_types if req.exportable
]
codegen_sources_fields_with_output = []
for tgt in targets:
if not tgt.has_field(SourcesField):
continue
sources = tgt[SourcesField]
for input_type in inputs_to_outputs:
for input_type, output_type in inputs_to_outputs:
if isinstance(sources, input_type):
output_type = inputs_to_outputs[input_type]
codegen_sources_fields_with_output.append((sources, output_type))

if not codegen_sources_fields_with_output:
codegen_targets = sorted(
{
tgt_type.alias
for tgt_type in registered_target_types.types
for input_sources in inputs_to_outputs.keys()
if tgt_type.class_has_field(input_sources, union_membership=union_membership)
for input_source in {input_source for input_source, _ in inputs_to_outputs}
if tgt_type.class_has_field(input_source, union_membership=union_membership)
}
)
logger.warning(
Expand Down
14 changes: 14 additions & 0 deletions src/python/pants/backend/codegen/export_codegen_goal_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,11 @@ class GenNoExportRequest(GenerateSourcesRequest):
exportable = False


class Gen1DuplicatedRequest(GenerateSourcesRequest):
input = Gen1Sources
output = ResourceSourceField


@rule
async def gen1(_: Gen1Request) -> GeneratedSources:
result = await Get(Snapshot, CreateDigest([FileContent("assets/README.md", b"Hello!")]))
Expand All @@ -76,6 +81,12 @@ async def gen_no_export(_: GenNoExportRequest) -> GeneratedSources:
assert False, "Should not ever get here as `GenNoExportRequest.exportable==False`"


@rule
async def gen1_duplicated(_: Gen1DuplicatedRequest) -> GeneratedSources:
result = await Get(Snapshot, CreateDigest([FileContent("assets/DUPLICATED.md", b"Hello!")]))
return GeneratedSources(result)


@pytest.fixture
def rule_runner() -> RuleRunner:
return RuleRunner(
Expand All @@ -84,9 +95,11 @@ def rule_runner() -> RuleRunner:
gen1,
gen2,
gen_no_export,
gen1_duplicated,
UnionRule(GenerateSourcesRequest, Gen1Request),
UnionRule(GenerateSourcesRequest, Gen2Request),
UnionRule(GenerateSourcesRequest, GenNoExportRequest),
UnionRule(GenerateSourcesRequest, Gen1DuplicatedRequest),
*distdir.rules(),
],
target_types=[Gen1Target, Gen2Target],
Expand All @@ -108,4 +121,5 @@ def test_export_codegen(rule_runner: RuleRunner) -> None:
assert result.exit_code == 0
parent_dir = Path(rule_runner.build_root, "dist", "codegen")
assert (parent_dir / "assets" / "README.md").read_text() == "Hello!"
assert (parent_dir / "assets" / "DUPLICATED.md").read_text() == "Hello!"
assert (parent_dir / "src" / "haskell" / "app.hs").read_text() == "10 * 4"

0 comments on commit 7ad59d1

Please sign in to comment.