diff --git a/inventory-orders-service/build.gradle.kts b/inventory-orders-service/build.gradle.kts index 7290db4..f58d1f8 100644 --- a/inventory-orders-service/build.gradle.kts +++ b/inventory-orders-service/build.gradle.kts @@ -28,7 +28,7 @@ repositories { group = "io.flamingock" version = "1.0-SNAPSHOT" -val flamingockVersion = "1.0.0-beta.5" +val flamingockVersion = "1.0.0-beta.6" logger.lifecycle("Building with flamingock version: $flamingockVersion") val mongodbVersion = "5.5.1" @@ -73,12 +73,16 @@ dependencies { // Others dependencies needed for this example // implementation("org.slf4j:slf4j-simple:2.0.6") // Commented out - Spring Boot provides logging + testImplementation("org.springframework.boot:spring-boot-starter-test") + testImplementation("io.flamingock:flamingock-springboot-test-support") testImplementation("org.junit.jupiter:junit-jupiter-api:5.9.2") testRuntimeOnly("org.junit.jupiter:junit-jupiter-engine:5.9.2") - testImplementation("org.testcontainers:testcontainers-mongodb:2.0.2") - testImplementation("org.testcontainers:testcontainers-kafka:2.0.2") - testImplementation("org.testcontainers:testcontainers-junit-jupiter:2.0.2") + testImplementation("org.springframework.boot:spring-boot-testcontainers") + testImplementation("org.testcontainers:testcontainers:1.21.4") + testImplementation("org.testcontainers:mongodb:1.21.4") + testImplementation("org.testcontainers:kafka:1.21.4") + testImplementation("org.testcontainers:junit-jupiter:1.21.4") } application { diff --git a/inventory-orders-service/gradlew b/inventory-orders-service/gradlew index aeb74cb..a640a45 100755 --- a/inventory-orders-service/gradlew +++ b/inventory-orders-service/gradlew @@ -1,245 +1,2 @@ #!/bin/sh - -# -# Copyright © 2015-2021 the original authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -############################################################################## -# -# Gradle start up script for POSIX generated by Gradle. -# -# Important for running: -# -# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is -# noncompliant, but you have some other compliant shell such as ksh or -# bash, then to run this script, type that shell name before the whole -# command line, like: -# -# ksh Gradle -# -# Busybox and similar reduced shells will NOT work, because this script -# requires all of these POSIX shell features: -# * functions; -# * expansions «$var», «${var}», «${var:-default}», «${var+SET}», -# «${var#prefix}», «${var%suffix}», and «$( cmd )»; -# * compound commands having a testable exit status, especially «case»; -# * various built-in commands including «command», «set», and «ulimit». -# -# Important for patching: -# -# (2) This script targets any POSIX shell, so it avoids extensions provided -# by Bash, Ksh, etc; in particular arrays are avoided. -# -# The "traditional" practice of packing multiple parameters into a -# space-separated string is a well documented source of bugs and security -# problems, so this is (mostly) avoided, by progressively accumulating -# options in "$@", and eventually passing that to Java. -# -# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS, -# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly; -# see the in-line comments for details. -# -# There are tweaks for specific operating systems such as AIX, CygWin, -# Darwin, MinGW, and NonStop. -# -# (3) This script is generated from the Groovy template -# https://github.com/gradle/gradle/blob/HEAD/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt -# within the Gradle project. -# -# You can find Gradle at https://github.com/gradle/gradle/. -# -############################################################################## - -# Attempt to set APP_HOME - -# Resolve links: $0 may be a link -app_path=$0 - -# Need this for daisy-chained symlinks. -while - APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path - [ -h "$app_path" ] -do - ls=$( ls -ld "$app_path" ) - link=${ls#*' -> '} - case $link in #( - /*) app_path=$link ;; #( - *) app_path=$APP_HOME$link ;; - esac -done - -# This is normally unused -# shellcheck disable=SC2034 -APP_BASE_NAME=${0##*/} -APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit - -# Use the maximum available, or set MAX_FD != -1 to use that value. -MAX_FD=maximum - -warn () { - echo "$*" -} >&2 - -die () { - echo - echo "$*" - echo - exit 1 -} >&2 - -# OS specific support (must be 'true' or 'false'). -cygwin=false -msys=false -darwin=false -nonstop=false -case "$( uname )" in #( - CYGWIN* ) cygwin=true ;; #( - Darwin* ) darwin=true ;; #( - MSYS* | MINGW* ) msys=true ;; #( - NONSTOP* ) nonstop=true ;; -esac - -CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar - - -# Determine the Java command to use to start the JVM. -if [ -n "$JAVA_HOME" ] ; then - if [ -x "$JAVA_HOME/jre/sh/java" ] ; then - # IBM's JDK on AIX uses strange locations for the executables - JAVACMD=$JAVA_HOME/jre/sh/java - else - JAVACMD=$JAVA_HOME/bin/java - fi - if [ ! -x "$JAVACMD" ] ; then - die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME - -Please set the JAVA_HOME variable in your environment to match the -location of your Java installation." - fi -else - JAVACMD=java - which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. - -Please set the JAVA_HOME variable in your environment to match the -location of your Java installation." -fi - -# Increase the maximum file descriptors if we can. -if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then - case $MAX_FD in #( - max*) - # In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked. - # shellcheck disable=SC3045 - MAX_FD=$( ulimit -H -n ) || - warn "Could not query maximum file descriptor limit" - esac - case $MAX_FD in #( - '' | soft) :;; #( - *) - # In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked. - # shellcheck disable=SC3045 - ulimit -n "$MAX_FD" || - warn "Could not set maximum file descriptor limit to $MAX_FD" - esac -fi - -# Collect all arguments for the java command, stacking in reverse order: -# * args from the command line -# * the main class name -# * -classpath -# * -D...appname settings -# * --module-path (only if needed) -# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. - -# For Cygwin or MSYS, switch paths to Windows format before running java -if "$cygwin" || "$msys" ; then - APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) - CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) - - JAVACMD=$( cygpath --unix "$JAVACMD" ) - - # Now convert the arguments - kludge to limit ourselves to /bin/sh - for arg do - if - case $arg in #( - -*) false ;; # don't mess with options #( - /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath - [ -e "$t" ] ;; #( - *) false ;; - esac - then - arg=$( cygpath --path --ignore --mixed "$arg" ) - fi - # Roll the args list around exactly as many times as the number of - # args, so each arg winds up back in the position where it started, but - # possibly modified. - # - # NB: a `for` loop captures its iteration list before it begins, so - # changing the positional parameters here affects neither the number of - # iterations, nor the values presented in `arg`. - shift # remove old arg - set -- "$@" "$arg" # push replacement arg - done -fi - - -# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' - -# Collect all arguments for the java command; -# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of -# shell script including quotes and variable substitutions, so put them in -# double quotes to make sure that they get re-expanded; and -# * put everything else in single quotes, so that it's not re-expanded. - -set -- \ - "-Dorg.gradle.appname=$APP_BASE_NAME" \ - -classpath "$CLASSPATH" \ - org.gradle.wrapper.GradleWrapperMain \ - "$@" - -# Stop when "xargs" is not available. -if ! command -v xargs >/dev/null 2>&1 -then - die "xargs is not available" -fi - -# Use "xargs" to parse quoted args. -# -# With -n1 it outputs one arg per line, with the quotes and backslashes removed. -# -# In Bash we could simply go: -# -# readarray ARGS < <( xargs -n1 <<<"$var" ) && -# set -- "${ARGS[@]}" "$@" -# -# but POSIX shell has neither arrays nor command substitution, so instead we -# post-process each arg (as a line of input to sed) to backslash-escape any -# character that might be a shell metacharacter, then use eval to reverse -# that process (while maintaining the separation between arguments), and wrap -# the whole thing up as a single "set" statement. -# -# This will of course break if any of these variables contains a newline or -# an unmatched quote. -# - -eval "set -- $( - printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | - xargs -n1 | - sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | - tr '\n' ' ' - )" '"$@"' - -exec "$JAVACMD" "$@" +exec java -classpath "gradle/wrapper/gradle-wrapper.jar" org.gradle.wrapper.GradleWrapperMain "$@" diff --git a/inventory-orders-service/src/main/java/io/flamingock/examples/inventory/FlamingockConfig.java b/inventory-orders-service/src/main/java/io/flamingock/examples/inventory/FlamingockConfig.java index 9d1eac2..df80a4e 100644 --- a/inventory-orders-service/src/main/java/io/flamingock/examples/inventory/FlamingockConfig.java +++ b/inventory-orders-service/src/main/java/io/flamingock/examples/inventory/FlamingockConfig.java @@ -1,20 +1,46 @@ package io.flamingock.examples.inventory; import com.mongodb.client.MongoClient; +import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient; +import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient; import io.flamingock.community.mongodb.sync.driver.MongoDBSyncAuditStore; +import io.flamingock.examples.inventory.util.KafkaSchemaManager; +import io.flamingock.examples.inventory.util.LaunchDarklyClient; import io.flamingock.examples.inventory.util.MongoDBUtil; import io.flamingock.internal.core.store.CommunityAuditStore; import io.flamingock.targetsystem.nontransactional.NonTransactionalTargetSystem; import io.flamingock.targetystem.mongodb.sync.MongoDBSyncTargetSystem; +import org.apache.kafka.clients.admin.AdminClient; +import org.apache.kafka.clients.admin.AdminClientConfig; +import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; +import jakarta.annotation.PreDestroy; +import java.time.Duration; +import java.util.Collections; +import java.util.Properties; + @Configuration public class FlamingockConfig { - @Bean + @Value("${mongodb.uri:mongodb://localhost:27017/}") + private String mongodbUri; + + @Value("${kafka.bootstrap-servers:localhost:9092}") + private String kafkaBootstrapServers; + + @Value("${kafka.schema-registry-url:http://localhost:8081}") + private String schemaRegistryUrl; + + @Value("${launchdarkly.api-url:http://localhost:8765/api/v2}") + private String launchDarklyApiUrl; + + private AdminClient kafkaAdminClient; + + @Bean(destroyMethod = "close") public MongoClient mongoClient() { - return MongoDBUtil.getMongoClient("mongodb://localhost:27017/"); + return MongoDBUtil.getMongoClient(mongodbUri); } @Bean @@ -24,12 +50,29 @@ public MongoDBSyncTargetSystem mongoDBSyncTargetSystem(MongoClient mongoClient) @Bean public NonTransactionalTargetSystem kafkaTargetSystem() throws Exception { - return TargetSystems.kafkaTargetSystem(); + SchemaRegistryClient schemaRegistryClient = new CachedSchemaRegistryClient( + Collections.singletonList(schemaRegistryUrl), + 100 + ); + + Properties kafkaProps = new Properties(); + kafkaProps.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaBootstrapServers); + this.kafkaAdminClient = AdminClient.create(kafkaProps); + + KafkaSchemaManager schemaManager = new KafkaSchemaManager(schemaRegistryClient, kafkaAdminClient); + schemaManager.createTopicIfNotExists("order-created", 3, (short) 1); + return new NonTransactionalTargetSystem(TargetSystems.KAFKA_TARGET_SYSTEM).addDependency(schemaManager); } @Bean public NonTransactionalTargetSystem toggleTargetSystem() { - return TargetSystems.toggleTargetSystem(); + LaunchDarklyClient launchDarklyClient = new LaunchDarklyClient( + "demo-token", + "inventory-service", + "production", + launchDarklyApiUrl + ); + return new NonTransactionalTargetSystem(TargetSystems.FEATURE_FLAG_TARGET_SYSTEM).addDependency(launchDarklyClient); } @@ -38,4 +81,11 @@ public NonTransactionalTargetSystem toggleTargetSystem() { public CommunityAuditStore auditStore(MongoDBSyncTargetSystem mongoDBSyncTargetSystem) { return MongoDBSyncAuditStore.from(mongoDBSyncTargetSystem); } + + @PreDestroy + public void cleanup() { + if (kafkaAdminClient != null) { + kafkaAdminClient.close(Duration.ofSeconds(2)); + } + } } diff --git a/inventory-orders-service/src/test/java/io/flamingock/examples/inventory/InventoryOrdersAppTest.java b/inventory-orders-service/src/test/java/io/flamingock/examples/inventory/InventoryOrdersAppTest.java new file mode 100644 index 0000000..d7f88ec --- /dev/null +++ b/inventory-orders-service/src/test/java/io/flamingock/examples/inventory/InventoryOrdersAppTest.java @@ -0,0 +1,97 @@ +package io.flamingock.examples.inventory; + +import io.flamingock.examples.inventory.changes._0001__mongodb_addDiscountCodeFieldToOrders; +import io.flamingock.examples.inventory.changes._0002__kafka_updateOrderCreatedSchema; +import io.flamingock.examples.inventory.changes._0003__toggle_addFeatureFlagDiscounts; +import io.flamingock.examples.inventory.changes._0004__mongodb_backfillDiscountsForExistingOrders; +import io.flamingock.examples.inventory.changes._0005__mongodb_addIndexOnDiscountCode; +import io.flamingock.examples.inventory.changes._0006__toggle_cleanupFeatureFlagDiscounts; +import io.flamingock.examples.inventory.changes._0007__kafka_leanupOldSchemaVersion; +import io.flamingock.springboot.testsupport.FlamingockSpringBootTest; +import io.flamingock.springboot.testsupport.FlamingockSpringBootTestSupport; +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.test.annotation.DirtiesContext; +import org.springframework.test.context.DynamicPropertyRegistry; +import org.springframework.test.context.DynamicPropertySource; +import org.testcontainers.containers.BindMode; +import org.testcontainers.containers.GenericContainer; +import org.testcontainers.containers.KafkaContainer; +import org.testcontainers.containers.MongoDBContainer; +import org.testcontainers.containers.Network; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; +import org.testcontainers.utility.DockerImageName; + +import static io.flamingock.support.domain.AuditEntryDefinition.APPLIED; + +@FlamingockSpringBootTest(classes = InventoryOrdersApp.class) +@Testcontainers +@DirtiesContext(classMode = DirtiesContext.ClassMode.AFTER_CLASS) +class InventoryOrdersAppTest { + + static final Network network = Network.newNetwork(); + + @Autowired + public FlamingockSpringBootTestSupport flamingockTestSupport; + + @Container + static final MongoDBContainer mongoDBContainer = new MongoDBContainer(DockerImageName.parse("mongo:6")) + .withNetwork(network) + .withNetworkAliases("mongodb"); + + @Container + static final KafkaContainer kafkaContainer = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:7.5.0")) + .withNetwork(network) + .withNetworkAliases("kafka"); + + @Container + static final GenericContainer schemaRegistryContainer = new GenericContainer<>(DockerImageName.parse("confluentinc/cp-schema-registry:7.5.0")) + .withNetwork(network) + .withNetworkAliases("schema-registry") + .withExposedPorts(8081) + .withEnv("SCHEMA_REGISTRY_HOST_NAME", "schema-registry") + .withEnv("SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS", "kafka:9092") + .withEnv("SCHEMA_REGISTRY_LISTENERS", "http://0.0.0.0:8081") + .dependsOn(kafkaContainer); + + @Container + static final GenericContainer launchDarklyContainer = new GenericContainer<>(DockerImageName.parse("node:18-alpine")) + .withNetwork(network) + .withNetworkAliases("launchdarkly") + .withExposedPorts(8765) + .withWorkingDirectory("/app") + .withFileSystemBind("./mock-launchdarkly-server.js", "/app/server.js", BindMode.READ_ONLY) + .withCommand("node", "server.js"); + + @DynamicPropertySource + static void configureProperties(DynamicPropertyRegistry registry) { + registry.add("mongodb.uri", mongoDBContainer::getConnectionString); + registry.add("kafka.bootstrap-servers", kafkaContainer::getBootstrapServers); + registry.add("kafka.schema-registry-url", () -> String.format("http://%s:%d", + schemaRegistryContainer.getHost(), + schemaRegistryContainer.getMappedPort(8081))); + registry.add("launchdarkly.api-url", () -> String.format("http://%s:%d/api/v2", + launchDarklyContainer.getHost(), + launchDarklyContainer.getMappedPort(8765))); + } + + + + @Test + void allChangesAppliedSuccessfully() { + flamingockTestSupport + .givenBuilderFromContext() + .whenRun() + .thenExpectAuditFinalStateSequence( + APPLIED(_0001__mongodb_addDiscountCodeFieldToOrders.class), + APPLIED(_0002__kafka_updateOrderCreatedSchema.class), + APPLIED(_0003__toggle_addFeatureFlagDiscounts.class), + APPLIED(_0004__mongodb_backfillDiscountsForExistingOrders.class), + APPLIED(_0005__mongodb_addIndexOnDiscountCode.class), + APPLIED(_0006__toggle_cleanupFeatureFlagDiscounts.class), + APPLIED(_0007__kafka_leanupOldSchemaVersion.class) + ) + .verify(); + } +} diff --git a/inventory-orders-service/src/test/java/io/flamingock/examples/inventory/SuccessExecutionTest.java b/inventory-orders-service/src/test/java/io/flamingock/examples/inventory/SuccessExecutionTest.java deleted file mode 100644 index 71191e6..0000000 --- a/inventory-orders-service/src/test/java/io/flamingock/examples/inventory/SuccessExecutionTest.java +++ /dev/null @@ -1,268 +0,0 @@ -/* - * Copyright 2023 Flamingock (https://www.flamingock.io) - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package io.flamingock.examples.inventory; - -import com.mongodb.ConnectionString; -import com.mongodb.MongoClientSettings; -import com.mongodb.client.MongoClient; -import com.mongodb.client.MongoClients; -import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient; -import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient; -import io.flamingock.api.annotations.EnableFlamingock; -import io.flamingock.api.annotations.Stage; -import io.flamingock.community.Flamingock; -import io.flamingock.community.mongodb.sync.driver.MongoDBSyncAuditStore; -import io.flamingock.examples.inventory.util.LaunchDarklyClient; -import io.flamingock.examples.inventory.util.KafkaSchemaManager; -import io.flamingock.internal.core.store.CommunityAuditStore; -import io.flamingock.targetsystem.nontransactional.NonTransactionalTargetSystem; -import io.flamingock.targetystem.mongodb.sync.MongoDBSyncTargetSystem; -import org.apache.kafka.clients.admin.AdminClient; -import org.apache.kafka.clients.admin.AdminClientConfig; -import org.bson.Document; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.io.TempDir; -import org.testcontainers.containers.BindMode; -import org.testcontainers.containers.GenericContainer; -import org.testcontainers.containers.KafkaContainer; -import org.testcontainers.containers.MongoDBContainer; -import org.testcontainers.containers.Network; -import org.testcontainers.junit.jupiter.Container; -import org.testcontainers.junit.jupiter.Testcontainers; -import org.testcontainers.utility.DockerImageName; - -import java.nio.file.Path; -import java.util.*; - -import static io.flamingock.examples.inventory.TargetSystems.DATABASE_NAME; -import static io.flamingock.examples.inventory.TargetSystems.KAFKA_TARGET_SYSTEM; -import static io.flamingock.examples.inventory.TargetSystems.MONGODB_TARGET_SYSTEM; -import static io.flamingock.examples.inventory.TargetSystems.FEATURE_FLAG_TARGET_SYSTEM; -import static org.junit.jupiter.api.Assertions.*; - -@Testcontainers -public class SuccessExecutionTest { - private static final Network network = Network.newNetwork(); - - @Container - public static final MongoDBContainer mongoDBContainer = new MongoDBContainer(DockerImageName.parse("mongo:6")) - .withNetwork(network) - .withNetworkAliases("mongodb"); - - @Container - public static final KafkaContainer kafkaContainer = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:7.5.0")) - .withNetwork(network) - .withNetworkAliases("kafka"); - - @Container - public static final GenericContainer schemaRegistryContainer = new GenericContainer<>(DockerImageName.parse("confluentinc/cp-schema-registry:7.5.0")) - .withNetwork(network) - .withNetworkAliases("schema-registry") - .withExposedPorts(8081) - .withEnv("SCHEMA_REGISTRY_HOST_NAME", "schema-registry") - .withEnv("SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS", "kafka:9092") - .withEnv("SCHEMA_REGISTRY_LISTENERS", "http://0.0.0.0:8081") - .dependsOn(kafkaContainer); - - @Container - public static final GenericContainer launchDarklyContainer = new GenericContainer<>(DockerImageName.parse("node:18-alpine")) - .withNetwork(network) - .withNetworkAliases("launchdarkly") - .withExposedPorts(8765) - .withWorkingDirectory("/app") - .withFileSystemBind("./mock-launchdarkly-server.js", "/app/server.js", BindMode.READ_ONLY) - .withCommand("node", "server.js"); - - @TempDir - static Path tempDir; - - private static MongoClient mongoClient; - private static SchemaRegistryClient schemaRegistryClient; - private static AdminClient kafkaAdminClient; - private static LaunchDarklyClient launchDarklyClient; - - @BeforeAll - static void beforeAll() throws Exception { - // Wait for containers to be ready - Thread.sleep(2000); - - // Setup MongoDB client - mongoClient = MongoClients.create(MongoClientSettings - .builder() - .applyConnectionString(new ConnectionString(mongoDBContainer.getConnectionString())) - .build()); - - // Setup Schema Registry client - String schemaRegistryUrl = String.format("http://%s:%d", - schemaRegistryContainer.getHost(), - schemaRegistryContainer.getMappedPort(8081)); - schemaRegistryClient = new CachedSchemaRegistryClient( - Collections.singletonList(schemaRegistryUrl), - 100 - ); - - // Setup Kafka Admin client - Properties kafkaProps = new Properties(); - kafkaProps.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaContainer.getBootstrapServers()); - kafkaAdminClient = AdminClient.create(kafkaProps); - - // Setup LaunchDarkly client for testing with dev-server - String launchDarklyUrl = String.format("http://%s:%d/api/v2", - launchDarklyContainer.getHost(), - launchDarklyContainer.getMappedPort(8765)); - launchDarklyClient = new LaunchDarklyClient("test-token", "inventory-service", "test", launchDarklyUrl); - - // Create KafkaSchemaManager - KafkaSchemaManager schemaManager = new KafkaSchemaManager(schemaRegistryClient, kafkaAdminClient); - - // Wait a bit more for schema registry to be fully ready - Thread.sleep(1000); - - // Wait for LaunchDarkly dev-server to be ready - Thread.sleep(1000); - - // Run Flamingock migrations - runFlamingockMigrations(mongoClient, schemaManager, launchDarklyClient); - } - - @EnableFlamingock( - stages = { - @Stage(name = "inventory", location = "io.flamingock.examples.inventory.changes") - } - ) - static class TestConfig {} - - private static void runFlamingockMigrations(MongoClient mongoClient, KafkaSchemaManager schemaManager, LaunchDarklyClient launchDarklyClient) { - MongoDBSyncTargetSystem mongoTarget = new MongoDBSyncTargetSystem(MONGODB_TARGET_SYSTEM, mongoClient, DATABASE_NAME); - CommunityAuditStore auditStore = MongoDBSyncAuditStore.from(mongoTarget); - - NonTransactionalTargetSystem kafkaTarget = new NonTransactionalTargetSystem(KAFKA_TARGET_SYSTEM).addDependency(schemaManager); - NonTransactionalTargetSystem flagTarget = new NonTransactionalTargetSystem(FEATURE_FLAG_TARGET_SYSTEM).addDependency(launchDarklyClient); - - Flamingock.builder() - .setAuditStore(auditStore) - .addTargetSystems(mongoTarget, kafkaTarget, flagTarget) - .build() - .run(); - } - - @Test - @DisplayName("SHOULD create orders collection with discount fields") - void testMongoDbChanges() { - // Verify orders were created with discount fields - List orders = mongoClient.getDatabase(DATABASE_NAME) - .getCollection("orders") - .find() - .into(new ArrayList<>()); - - assertEquals(2, orders.size()); - - // Check that all orders have discountCode field (backfilled) - for (Document order : orders) { - assertTrue(order.containsKey("discountCode")); - assertEquals("NONE", order.getString("discountCode")); - assertTrue(order.containsKey("discountApplied")); - assertEquals(false, order.getBoolean("discountApplied")); - } - - // Verify specific orders - Optional order1 = orders.stream() - .filter(o -> "ORD-001".equals(o.getString("orderId"))) - .findFirst(); - assertTrue(order1.isPresent()); - assertEquals("CUST-101", order1.get().getString("customerId")); - - Optional order2 = orders.stream() - .filter(o -> "ORD-002".equals(o.getString("orderId"))) - .findFirst(); - assertTrue(order2.isPresent()); - assertEquals("CUST-102", order2.get().getString("customerId")); - } - - @Test - @DisplayName("SHOULD register Kafka schemas with discount field") - void testKafkaSchemaChanges() throws Exception { - // Verify that schemas were registered - Collection subjects = schemaRegistryClient.getAllSubjects(); - assertTrue(subjects.contains("order-created-value")); - - // Verify we have at least 1 version (V2 with discountCode) - List versions = schemaRegistryClient.getAllVersions("order-created-value"); - assertTrue(versions.size() >= 1, "Should have at least 1 schema version"); - - // Get latest schema and verify it contains discountCode - String latestSchema = schemaRegistryClient.getLatestSchemaMetadata("order-created-value") - .getSchema(); - assertTrue(latestSchema.contains("discountCode")); - } - - @Test - @DisplayName("SHOULD interact with LaunchDarkly Management API") - void testLaunchDarklyIntegration() throws Exception { - // Verify that the LaunchDarkly client was initialized and used during migrations - assertNotNull(launchDarklyClient, "LaunchDarkly client should be initialized"); - - // In demo mode, the LaunchDarkly client will attempt HTTP calls to the Management API - // These will fail gracefully due to the dummy token, but will log the intended operations - // showing how real flag creation/deletion would work - - // In a real test environment with valid LaunchDarkly credentials, you would: - // 1. Set up test environment flags - // 2. Verify flags were created/deleted via API calls - // 3. Check flag states through LaunchDarkly API - - // This test demonstrates that Flamingock successfully coordinates changes - // across multiple systems including LaunchDarkly feature flag management via REST API - } - - @Test - @DisplayName("SHOULD record all changes in Flamingock audit logs") - void testFlamingockAuditLogs() { - List auditLogs = mongoClient.getDatabase(DATABASE_NAME) - .getCollection("flamingockAuditLog") - .find() - .into(new ArrayList<>()); - - // Should have 2 entries per change (STARTED and APPLIED) - assertEquals(14, auditLogs.size()); // 7 changes × 2 entries - - // Verify each change was executed - verifyChangeExecution(auditLogs, "add-discount-code-field-to-orders"); - verifyChangeExecution(auditLogs, "update-order-created-schema"); - verifyChangeExecution(auditLogs, "add-feature-flag-discounts"); - verifyChangeExecution(auditLogs, "backfill-discounts-for-existing-orders"); - verifyChangeExecution(auditLogs, "add-index-on-discount-code"); - verifyChangeExecution(auditLogs, "cleanup-feature-flag-discounts"); - verifyChangeExecution(auditLogs, "cleanup-old-schema-version"); - } - - private void verifyChangeExecution(List auditLogs, String changeId) { - // Check STARTED entry - boolean hasStarted = auditLogs.stream() - .anyMatch(log -> changeId.equals(log.getString("changeId")) - && "STARTED".equals(log.getString("state"))); - assertTrue(hasStarted, "Change " + changeId + " should have STARTED entry"); - - // Check APPLIED entry - boolean hasApplied = auditLogs.stream() - .anyMatch(log -> changeId.equals(log.getString("changeId")) - && "APPLIED".equals(log.getString("state"))); - assertTrue(hasApplied, "Change " + changeId + " should have APPLIED entry"); - } -}