Skip to content

Commit

Permalink
Add ability to run tests methods in parallel
Browse files Browse the repository at this point in the history
  • Loading branch information
carlesarnal committed Sep 3, 2024
1 parent 7145e1e commit bdb2f1a
Show file tree
Hide file tree
Showing 11 changed files with 6 additions and 74 deletions.
2 changes: 1 addition & 1 deletion app/src/main/resources/application.properties
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,7 @@ apicurio.import.workDir=${java.io.tmpdir}

## SQL Storage
apicurio.storage.sql.kind=h2
apicurio.datasource.url=jdbc:h2:mem:registry_db
apicurio.datasource.url=jdbc:h2:mem:${quarkus.uuid}
apicurio.datasource.username=sa
apicurio.datasource.password=sa
apicurio.datasource.jdbc.initial-size=20
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,11 @@
import com.microsoft.kiota.ApiException;
import io.apicurio.deployment.PortForwardManager;
import io.apicurio.registry.client.auth.VertXAuthFactory;
import io.apicurio.registry.model.GroupId;
import io.apicurio.registry.rest.client.RegistryClient;
import io.apicurio.registry.rest.client.models.ArtifactSearchResults;
import io.apicurio.registry.rest.client.models.CreateArtifact;
import io.apicurio.registry.rest.client.models.CreateArtifactResponse;
import io.apicurio.registry.rest.client.models.CreateVersion;
import io.apicurio.registry.rest.client.models.IfArtifactExists;
import io.apicurio.registry.rest.client.models.SearchedArtifact;
import io.apicurio.registry.rest.client.models.SearchedVersion;
import io.apicurio.registry.rest.client.models.VersionMetaData;
import io.apicurio.registry.utils.tests.SimpleDisplayName;
Expand All @@ -25,7 +22,6 @@
import io.restassured.parsing.Parser;
import io.restassured.response.Response;
import org.eclipse.microprofile.config.ConfigProvider;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.DisplayNameGeneration;
Expand Down Expand Up @@ -64,7 +60,6 @@

import static io.restassured.RestAssured.given;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;

/**
* Base class for all base classes for integration tests or for integration tests directly. This class must
Expand Down Expand Up @@ -107,33 +102,6 @@ void prepareRestAssured() {
RestAssured.urlEncodingEnabled = false;
}

@AfterEach
public void cleanArtifacts() throws Exception {
logger.info("Removing all artifacts");
// Retrying to delete artifacts can solve the problem with bad order caused by artifacts references
// TODO: Solve problem with artifact references circle - maybe use of deleteAllUserData for cleaning
// artifacts after IT
retry(() -> {
ArtifactSearchResults artifacts = registryClient.search().artifacts().get();
for (SearchedArtifact artifact : artifacts.getArtifacts()) {
try {
registryClient.groups().byGroupId(normalizeGroupId(artifact.getGroupId())).artifacts()
.byArtifactId(artifact.getArtifactId()).delete();
registryClient.groups().byGroupId(GroupId.DEFAULT.getRawGroupIdWithDefaultString())
.artifacts().delete();
} catch (ApiException e) {
// because of async storage artifact may be already deleted but listed anyway
logger.info(e.getMessage());
} catch (Exception e) {
logger.error("", e);
}
}
ensureClusterSync(client -> {
assertTrue(client.search().artifacts().get().getCount() == 0);
});
}, "CleanArtifacts", 5);
}

private static String normalizeGroupId(String groupId) {
return groupId != null ? groupId : "default"; // TODO
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,11 +44,6 @@ public class SimpleAuthIT extends ApicurioRegistryBaseIT {
createArtifact.getFirstVersion().getContent().setContent("{}");
}

@Override
public void cleanArtifacts() throws Exception {
// Don't clean
}

@Override
protected RegistryClient createRegistryClient() {
var auth = buildOIDCWebClient(authServerUrlConfigured, JWKSMockServer.ADMIN_CLIENT_ID, "test1");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,11 +45,6 @@
@QuarkusIntegrationTest
public class RegistryConverterIT extends ApicurioRegistryBaseIT {

@Override
public void cleanArtifacts() throws Exception {
// Don't clean up
}

@Test
public void testConfiguration() throws Exception {
String groupId = "ns_" + TestUtils.generateGroupId().replace("-", "_");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
import io.apicurio.tests.utils.Constants;
import io.quarkus.test.junit.QuarkusIntegrationTest;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test;

Expand All @@ -14,11 +13,6 @@ public class KafkaSqlSnapshottingIT extends ApicurioRegistryBaseIT {

private static final String NEW_ARTIFACTS_SNAPSHOT_TEST_GROUP_ID = "SNAPSHOT_TEST_GROUP_ID";

@Override
@BeforeEach
public void cleanArtifacts() throws Exception {
}

@Test
public void testRecoverFromSnapshot() throws InterruptedException {
// We expect 1000 artifacts to be present in the snapshots group, created before the snapshot.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,17 +37,11 @@
@Tag(Constants.MIGRATION)
@Disabled
public class DoNotPreserveIdsImportIT extends ApicurioRegistryBaseIT {

private static final Logger log = LoggerFactory.getLogger(DataMigrationIT.class);
public static InputStream doNotPreserveIdsImportDataToImport;
public static JsonSchemaMsgFactory jsonSchema;
public static Map<String, String> doNotPreserveIdsImportArtifacts = new HashMap<>();

@Override
public void cleanArtifacts() throws Exception {
// Don't clean up
}

@Test
public void testDoNotPreserveIdsImport() throws Exception {
var adapter = new VertXRequestAdapter(VertXAuthFactory.defaultVertx);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,11 +48,6 @@ public class AvroSerdeIT extends ApicurioRegistryBaseIT {
private final Class<AvroKafkaSerializer> serializer = AvroKafkaSerializer.class;
private final Class<AvroKafkaDeserializer> deserializer = AvroKafkaDeserializer.class;

@Override
public void cleanArtifacts() throws Exception {
// Don't clean up
}

@BeforeAll
void setupEnvironment() {
kafkaCluster.startIfNeeded();
Expand Down Expand Up @@ -400,7 +395,7 @@ void testAvroConfluentForMultipleTopics() throws Exception {
String topicName1 = TestUtils.generateTopic();
String topicName2 = TestUtils.generateTopic();
String topicName3 = TestUtils.generateTopic();
String subjectName = "myrecordconfluent6";
String subjectName = "myrecordconfluent" + System.currentTimeMillis();
String schemaKey = "key1";

kafkaCluster.createTopic(topicName1, 1, 1);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,15 +28,9 @@
public class JsonSchemaSerdeIT extends ApicurioRegistryBaseIT {

private KafkaFacade kafkaCluster = KafkaFacade.getInstance();

private Class<JsonSchemaKafkaSerializer> serializer = JsonSchemaKafkaSerializer.class;
private Class<JsonSchemaKafkaDeserializer> deserializer = JsonSchemaKafkaDeserializer.class;

@Override
public void cleanArtifacts() throws Exception {
// Don't clean up
}

@BeforeAll
void setupEnvironment() {
kafkaCluster.startIfNeeded();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@
public class ProtobufSerdeIT extends ApicurioRegistryBaseIT {

private KafkaFacade kafkaCluster = KafkaFacade.getInstance();

private Class<ProtobufKafkaSerializer> serializer = ProtobufKafkaSerializer.class;
private Class<ProtobufKafkaDeserializer> deserializer = ProtobufKafkaDeserializer.class;

Expand All @@ -44,11 +43,6 @@ void teardownEnvironment() throws Exception {
kafkaCluster.stopIfPossible();
}

@Override
public void cleanArtifacts() throws Exception {
// Don't clean up
}

@Test
@Tag(Constants.ACCEPTANCE)
void testTopicIdStrategyFindLatest() throws Exception {
Expand Down
2 changes: 1 addition & 1 deletion integration-tests/src/test/resources/infra/kafka/kafka.yml
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ spec:
spec:
containers:
- name: kafka-service
image: quay.io/strimzi/kafka:latest-kafka-3.5.0-amd64
image: quay.io/strimzi/kafka:latest-kafka-3.5.0-arm64
command:
- /bin/sh
- -c
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
junit.jupiter.execution.parallel.enabled=true
junit.jupiter.execution.parallel.mode.default=concurrent
junit.jupiter.execution.parallel.mode.classes.default=same_thread

0 comments on commit bdb2f1a

Please sign in to comment.