From 74a27adb19782658dca31c2b6698752dd4d4b848 Mon Sep 17 00:00:00 2001 From: neilavery Date: Tue, 3 Oct 2023 13:51:09 +0100 Subject: [PATCH] #198 - support flags and default properties files --- README.md | 1 + cli/README.md | 74 ++++++++++++++---- cli/include/bin/wrapper.sh | 1 + cli/provision.properties | 14 ++++ cli/resources/provision.properties | 8 ++ .../main/java/io/specmesh/cli/Provision.java | 77 +++++++++++++++---- .../cli/StorageConsumptionFunctionalTest.java | 1 + .../kafka/ClientsFunctionalDemoTest.java | 1 + kafka/README.md | 47 ++++++++++- .../main/java/io/specmesh/kafka/Clients.java | 7 +- .../specmesh/kafka/provision/Provisioner.java | 6 +- .../kafka/admin/SimpleAdminClientTest.java | 1 + 12 files changed, 204 insertions(+), 34 deletions(-) create mode 100644 cli/provision.properties create mode 100644 cli/resources/provision.properties diff --git a/README.md b/README.md index 5eaf4a59..3d89e02a 100644 --- a/README.md +++ b/README.md @@ -135,3 +135,4 @@ channels: # Developer Notes 1. Install the intellij checkstyle plugin and load the config from config/checkstyle.xml +1. build using: ./gradlew spotlessApply build diff --git a/cli/README.md b/cli/README.md index 9fd98ef5..bca0557b 100644 --- a/cli/README.md +++ b/cli/README.md @@ -9,6 +9,27 @@ This page also contains a simple docker guide for local testing. This command will provision Kafka resources using AsyncApi spec (aka. App, or data product) and publish to the configured cluster and schema registry environment. It can be run manually, and also as part of a GitOps workflow, and/or build promotion of the spec into different environments where cluster and SR endpoints are configured as environment variables. +### Common config +`provision` will look for a `provision.properties` file in the docker /app/ folder (i.e. /app/provision.properties) +A sample config can contain default data config. + + +File provision.properties (will be automatically loaded from docker `/app/provision.properties`) +```properties +spec=/app/simple_schema_demo-api.yaml +acl.enabled=true +sr.enabled=true +dry.run=true +bootstrap.server=broker1:9092 +username=admin +secret=nothing +schema.registry=http://schema-registry:8081 +schema.path=/app/1 +sr.api.key=admin +sr.api.secret=nothing + +``` + ### Usage > % docker run --rm --network kafka_network -v "$(pwd)/resources:/app" ghcr.io/specmesh/specmesh-build-cli provision -bs kafka:9092 -sr http://schema-registry:8081 -spec /app/simple_schema_demo-api.yaml -schemaPath /app @@ -18,33 +39,52 @@ This command will provision Kafka resources using AsyncApi spec (aka. App, or da Long form ``` - provision [-d] [-bs=] [-s=] - [-schemaPath=] [-spec=] + Usage: provision [-aclEnabled] [-clean] [-dry] [-srEnabled] [-bs=] + [-s=] [-schemaPath=] [-spec=] [-sr=] [-srKey=] [-srSecret=] [-u=] -Apply the provided specification to provision kafka resources and permissions -on the cluster + [-D=]... +Apply a specification.yaml to provision kafka resources on a cluster. +Use 'provision.properties' for common arguments + Explicit properties file location /app/provision.properties + + + -aclEnabled, --acl-enabled + True (default) will provision/publish/validate + ACls. False will ignore ACL related operations -bs, --bootstrap-server= Kafka bootstrap server url - -d, --dry-run Compares the cluster against the spec, outputting - proposed changes if compatible.If the spec - incompatible with the cluster (not sure how it - could be) then will fail with a descriptive - error message.A return value of 0=indicates no - changes needed; 1=changes needed; -1=not - compatible, blah blah - -s, --secret= secret credential for the cluster connection - -schemaPath, --schemaPath= + -clean, --clean-unspecified + Compares the cluster resources against the spec, + outputting proposed set of resources that are + unexpected (not specified). Use with '-dry-run' + for non-destructive checks. This operation will + not create resources, it will only remove + unspecified resources + -D, --property= + Specify Java runtime properties for Apache Kafka. + -dry, --dry-run Compares the cluster resources against the spec, + outputting proposed changes if compatible. If + the spec incompatible with the cluster then will + fail with a descriptive error message. A return + value of '0' = indicates no changes needed; '1' + = changes needed; '-1' not compatible + -s, --secret= secret credential for the cluster connection + -schemaPath, --schema-path= schemaPath where the set of referenced schemas will be loaded -spec, --spec= specmesh specification file - -sr, --srUrl= + -sr, --schema-registry= schemaRegistryUrl - -srKey, --srApiKey= + -srEnabled, --sr-enabled + True (default) will provision/publish/validate + schemas. False will ignore schema related + operations + -srKey, --sr-api-key= srApiKey for schema registry - -srSecret, --srApiSecret= + -srSecret, --sr-api-secret= srApiSecret for schema secret - -u, --username= username or api key for the cluster connection + -u, --username= username or api key for the cluster connection ``` diff --git a/cli/include/bin/wrapper.sh b/cli/include/bin/wrapper.sh index 805e20c6..eadc3681 100644 --- a/cli/include/bin/wrapper.sh +++ b/cli/include/bin/wrapper.sh @@ -5,6 +5,7 @@ function provision() { exec java \ -Xms64m -Xmx64m \ -Dlog4j.configurationFile=/log/log4j2.xml \ + -Dprovision.properties=/app/provision.properties \ -cp "/opt/specmesh/service/lib/*" \ io.specmesh.cli.Provision "$@" } diff --git a/cli/provision.properties b/cli/provision.properties new file mode 100644 index 00000000..b080c0f1 --- /dev/null +++ b/cli/provision.properties @@ -0,0 +1,14 @@ +spec=/app/simple_schema_demo-api.yaml +acl.enabled=true +sr.enabled=true +dry.run=true +bootstrap.server=broker1:9092 +username=admin +secret=nothing +schema.registry=http://schema-registry:8081 +schema.path=/app/1 +sr.api.key=admin +sr.api.secret=nothing + + + diff --git a/cli/resources/provision.properties b/cli/resources/provision.properties new file mode 100644 index 00000000..fedecf3d --- /dev/null +++ b/cli/resources/provision.properties @@ -0,0 +1,8 @@ +acl.enabled=true +sr.enabled=true +dry.run=true +bootstrap.server=broker1:9092 +schema.registry=http://schema-registry:8081 +spec=/app/simple_schema_demo-api.yaml +schema.path=/app/1 +username=admin \ No newline at end of file diff --git a/cli/src/main/java/io/specmesh/cli/Provision.java b/cli/src/main/java/io/specmesh/cli/Provision.java index 1cd8e3f8..a90789e1 100644 --- a/cli/src/main/java/io/specmesh/cli/Provision.java +++ b/cli/src/main/java/io/specmesh/cli/Provision.java @@ -23,7 +23,11 @@ import io.specmesh.kafka.KafkaApiSpec; import io.specmesh.kafka.provision.Provisioner; import io.specmesh.kafka.provision.Status; +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; import java.util.Map; +import java.util.Properties; import java.util.concurrent.Callable; import picocli.CommandLine; import picocli.CommandLine.Option; @@ -32,8 +36,9 @@ @Command( name = "provision", description = - "Apply the provided specification to provision kafka resources and permissions on" - + " the cluster") + "Apply a specification.yaml to provision kafka resources on a cluster.\n" + + "Use 'provision.properties' for common arguments\n" + + " Explicit properties file location /app/provision.properties\n\n") public class Provision implements Callable { private Status state; @@ -44,7 +49,36 @@ public class Provision implements Callable { * @param args args */ public static void main(final String[] args) { - System.exit(new CommandLine(new Provision()).execute(args)); + + final var properties = new Properties(); + final var propertyFilename = + System.getProperty("provision.properties", "provision.properties"); + try (FileInputStream fis = new FileInputStream(propertyFilename)) { + System.out.println( + "Loading `" + + propertyFilename + + "` from:" + + new File(propertyFilename).getAbsolutePath()); + properties.load(fis); + properties.entrySet().forEach(entry -> { + properties.put(entry.getKey().toString().replace(".", "-"), entry.getValue()); + }); + System.out.println( + "Loaded `properties` from cwd:" + new File(propertyFilename).getAbsolutePath()); + } catch (IOException e) { + System.out.println( + "Missing `" + + propertyFilename + + " ` FROM:" + + new File(propertyFilename).getAbsolutePath() + + "\nERROR:" + + e); + e.printStackTrace(); + } + + final var provider = new CommandLine.PropertiesDefaultProvider(properties); + System.exit( + new CommandLine(new Provision()).setDefaultValueProvider(provider).execute(args)); } @Option( @@ -52,6 +86,22 @@ public static void main(final String[] args) { description = "Kafka bootstrap server url") private String brokerUrl = ""; + @Option( + names = {"-srEnabled", "--sr-enabled"}, + fallbackValue = "true", + description = + "True (default) will provision/publish/validate schemas. False will ignore" + + " schema related operations") + private boolean srEnabled; + + @Option( + names = {"-aclEnabled", "--acl-enabled"}, + fallbackValue = "true", + description = + "True (default) will provision/publish/validate ACls. False will ignore ACL" + + " related operations") + private boolean aclEnabled; + @Option( names = {"-sr", "--schema-registry"}, description = "schemaRegistryUrl") @@ -89,16 +139,18 @@ public static void main(final String[] args) { @Option( names = {"-dry", "--dry-run"}, + fallbackValue = "false", description = "Compares the cluster resources against the spec, outputting proposed changes" - + " if compatible. If the spec incompatible with the cluster (not sure how" - + " it could be) then will fail with a descriptive error message. A return" - + " value of '0' = indicates no changes needed; '1' = changes needed; '-1'" - + " not compatible") + + " if compatible. If the spec incompatible with the cluster then will" + + " fail with a descriptive error message. A return value of '0' =" + + " indicates no changes needed; '1' = changes needed; '-1' not" + + " compatible") private boolean dryRun; @Option( names = {"-clean", "--clean-unspecified"}, + fallbackValue = "false", description = "Compares the cluster resources against the spec, outputting proposed set of" + " resources that are unexpected (not specified). Use with '-dry-run' for" @@ -107,12 +159,9 @@ public static void main(final String[] args) { private boolean cleanUnspecified; @Option( - names = "-D", + names = {"-D", "--property"}, mapFallbackValue = "", - description = - "Specify Java runtime system properties for Apache Kafka. Note: bulk properties" - + " can be set via '-Dconfig.properties=somefile.properties" - + " ") // allow -Dkey + description = "Specify Java runtime properties for Apache Kafka." + " ") // allow -Dkey void setProperty(final Map props) { props.forEach((k, v) -> System.setProperty(k, v)); } @@ -121,12 +170,14 @@ void setProperty(final Map props) { public Integer call() throws Exception { final var status = Provisioner.provision( + aclEnabled, dryRun, cleanUnspecified, specMeshSpec(), schemaPath, Clients.adminClient(brokerUrl, username, secret), - Clients.schemaRegistryClient(schemaRegistryUrl, srApiKey, srApiSecret)); + Clients.schemaRegistryClient( + srEnabled, schemaRegistryUrl, srApiKey, srApiSecret)); System.out.println(status.toString()); this.state = status; diff --git a/cli/src/test/java/io/specmesh/cli/StorageConsumptionFunctionalTest.java b/cli/src/test/java/io/specmesh/cli/StorageConsumptionFunctionalTest.java index 91153c2d..689bf46e 100644 --- a/cli/src/test/java/io/specmesh/cli/StorageConsumptionFunctionalTest.java +++ b/cli/src/test/java/io/specmesh/cli/StorageConsumptionFunctionalTest.java @@ -75,6 +75,7 @@ class StorageConsumptionFunctionalTest { void shouldGetStorageAndConsumptionMetrics() throws Exception { Provisioner.provision( + true, false, false, API_SPEC, diff --git a/kafka-test/src/test/java/io/specmesh/kafka/ClientsFunctionalDemoTest.java b/kafka-test/src/test/java/io/specmesh/kafka/ClientsFunctionalDemoTest.java index 45e90e19..d865e706 100644 --- a/kafka-test/src/test/java/io/specmesh/kafka/ClientsFunctionalDemoTest.java +++ b/kafka-test/src/test/java/io/specmesh/kafka/ClientsFunctionalDemoTest.java @@ -107,6 +107,7 @@ public static void provision() { final SchemaRegistryClient schemaRegistryClient = new CachedSchemaRegistryClient(KAFKA_ENV.schemeRegistryServer(), 5); Provisioner.provision( + true, false, false, API_SPEC, diff --git a/kafka/README.md b/kafka/README.md index cf7e6013..debdf139 100644 --- a/kafka/README.md +++ b/kafka/README.md @@ -10,7 +10,7 @@ Extends SpecMesh with the concept of public, protected and public topics. Topic names in the spec come under the `channel` section. Their names either: - * start with `_public`, `_protected` or `_private` for topics _owned_ by the domain, or + * start with `_public`, `_protected` or `_private` for topics _owned_ by the domain. Sharing can also be specified using 'grant-access:' at the cost of explicit sharing * are the fully qualified topic name of some other (domain's) topic. For topic's owned by the domain, their full topic name is that used in the spec, prefixed with the spec's domain id, @@ -59,3 +59,48 @@ library creates repartition and changelog topics for stores automatically using As services are free to make additional private topics, provisioning does _not_ remove existing private topics not in the spec. This places the responsibility of cleaning up private topics on engineering teams. However, as these are private topics, it is easy to determine if such topics are or are not actively in use by the domain's services. + +### Looking inside ACLs - whats really going on + +Too many ACLs can affect cluster performance. Look at the set of ACLs below. The domain owner `simple.provision_demo` has access to everything (CREATE, READ, WRITE, DESCRIBE) below its designated topic prefix - which is also its domain name. Notice how there are READ, DESCRIBE ACls for all _public topic. Protected /`_protected` topics require a set of ACLs for each topic using the 'LITERAL' pattern type - for each 'grant' to another domain owner `User:some.other.domain.acme-A` there will be a set of ACLs created. + + +The set of ACLs created from the `provisioner-functional-test-api.yaml` +```text +[(pattern=ResourcePattern(resourceType=GROUP, name=simple.provision_demo, patternType=PREFIXED) +=(principal=User:simple.provision_demo, host=*, operation=READ, permissionType=ALLOW)), + +(pattern=ResourcePattern(resourceType=TOPIC, name=simple.provision_demo._private, patternType=PREFIXED) +=(principal=User:simple.provision_demo, host=*, operation=CREATE, permissionType=ALLOW)), + +(pattern=ResourcePattern(resourceType=TRANSACTIONAL_ID, name=simple.provision_demo, patternType=PREFIXED) +=(principal=User:simple.provision_demo, host=*, operation=WRITE, permissionType=ALLOW)), + +(pattern=ResourcePattern(resourceType=TRANSACTIONAL_ID, name=simple.provision_demo, patternType=PREFIXED) +=(principal=User:simple.provision_demo, host=*, operation=DESCRIBE, permissionType=ALLOW)), + +(pattern=ResourcePattern(resourceType=TOPIC, name=simple.provision_demo, patternType=PREFIXED) +=(principal=User:simple.provision_demo, host=*, operation=WRITE, permissionType=ALLOW)), + +(pattern=ResourcePattern(resourceType=TOPIC, name=simple.provision_demo, patternType=PREFIXED) +=(principal=User:simple.provision_demo, host=*, operation=DESCRIBE, permissionType=ALLOW)), + +(pattern=ResourcePattern(resourceType=TOPIC, name=simple.provision_demo, patternType=PREFIXED) +=(principal=User:simple.provision_demo, host=*, operation=READ, permissionType=ALLOW)), + +(pattern=ResourcePattern(resourceType=TOPIC, name=simple.provision_demo._public, patternType=PREFIXED) +=(principal=User:*, host=*, operation=READ, permissionType=ALLOW)), + +(pattern=ResourcePattern(resourceType=TOPIC, name=simple.provision_demo._public, patternType=PREFIXED) +=(principal=User:*, host=*, operation=DESCRIBE, permissionType=ALLOW)), + +(pattern=ResourcePattern(resourceType=TOPIC, name=simple.provision_demo._protected.user_info, patternType=LITERAL) +=(principal=User:some.other.domain.acme-A, host=*, operation=READ, permissionType=ALLOW)), + +(pattern=ResourcePattern(resourceType=TOPIC, name=simple.provision_demo._protected.user_info, patternType=LITERAL) +=(principal=User:some.other.domain.acme-A, host=*, operation=DESCRIBE, permissionType=ALLOW)) + +(pattern=ResourcePattern(resourceType=CLUSTER, name=kafka-cluster, patternType=PREFIXED) +=(principal=User:simple.provision_demo, host=*, operation=IDEMPOTENT_WRITE, permissionType=ALLOW)), +] +``` \ No newline at end of file diff --git a/kafka/src/main/java/io/specmesh/kafka/Clients.java b/kafka/src/main/java/io/specmesh/kafka/Clients.java index 18a6a27f..d5bf027a 100644 --- a/kafka/src/main/java/io/specmesh/kafka/Clients.java +++ b/kafka/src/main/java/io/specmesh/kafka/Clients.java @@ -143,8 +143,11 @@ private static String buildJaasConfig(final String userName, final String passwo } public static Optional schemaRegistryClient( - final String schemaRegistryUrl, final String srApiKey, final String srApiSecret) { - if (schemaRegistryUrl != null) { + final boolean srEnabled, + final String schemaRegistryUrl, + final String srApiKey, + final String srApiSecret) { + if (srEnabled && schemaRegistryUrl != null) { final Map properties = new HashMap<>(); if (srApiKey != null) { properties.put( diff --git a/kafka/src/main/java/io/specmesh/kafka/provision/Provisioner.java b/kafka/src/main/java/io/specmesh/kafka/provision/Provisioner.java index 933b8de8..2a8777b8 100644 --- a/kafka/src/main/java/io/specmesh/kafka/provision/Provisioner.java +++ b/kafka/src/main/java/io/specmesh/kafka/provision/Provisioner.java @@ -31,6 +31,7 @@ private Provisioner() {} /** * Provision Topics, ACLS and schemas * + * @param aclEnabled * @param dryRun test or execute * @param cleanUnspecified cleanup * @param apiSpec given spec @@ -41,6 +42,7 @@ private Provisioner() {} * @throws ProvisioningException when cant provision resources */ public static Status provision( + final boolean aclEnabled, final boolean dryRun, final boolean cleanUnspecified, final KafkaApiSpec apiSpec, @@ -64,7 +66,9 @@ public static Status provision( apiSpec, schemaResources, registryClient))); - status.acls(AclProvisioner.provision(dryRun, cleanUnspecified, apiSpec, adminClient)); + if (aclEnabled) { + status.acls(AclProvisioner.provision(dryRun, cleanUnspecified, apiSpec, adminClient)); + } return status.build(); } diff --git a/kafka/src/test/java/io/specmesh/kafka/admin/SimpleAdminClientTest.java b/kafka/src/test/java/io/specmesh/kafka/admin/SimpleAdminClientTest.java index f7951e38..96edd45c 100644 --- a/kafka/src/test/java/io/specmesh/kafka/admin/SimpleAdminClientTest.java +++ b/kafka/src/test/java/io/specmesh/kafka/admin/SimpleAdminClientTest.java @@ -78,6 +78,7 @@ void shouldRecordStats() throws ExecutionException, InterruptedException, Timeou try (Admin adminClient = KAFKA_ENV.adminClient()) { final var client = SmAdminClient.create(adminClient); Provisioner.provision( + true, false, false, API_SPEC,