From 10ecbb9f19dc503974d05b08fce2b23f06d67232 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Thu, 27 Feb 2025 11:34:30 -0500 Subject: [PATCH 001/113] first pass checks --- .gitlab-ci.yml | 41 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index bca4b89eed3..d8080952c9a 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -143,6 +143,47 @@ test_published_artifacts: paths: - ./check_reports +.check_job: + extends: .gradle_build + needs: [ build ] + stage: tests + variables: + BUILD_CACHE_TYPE: lib + script: + - ./gradlew $GRADLE_TARGET -PskipTests -PrunBuildSrcTests -PskipSpotless $GRADLE_ARGS + after_script: + - .circleci/collect_reports.sh --destination ./check_reports --move + artifacts: + when: always + paths: + - ./check_reports + - '.gradle/daemon/*/*.out.log' + +check_base: + extends: .check_job + variables: + GRADLE_TARGET: ":baseCheck" + +check_inst: + extends: .check_job + variables: + GRADLE_TARGET: ":instrumentationCheck" + +check_smoke: + extends: .check_job + variables: + GRADLE_TARGET: ":smokeCheck" + +check_profiling: + extends: .check_job + variables: + GRADLE_TARGET: ":profilingCheck" + +check_debugger: + extends: .check_job + variables: + GRADLE_TARGET: ":debuggerCheck" + muzzle: extends: .gradle_build needs: [ build ] From 26a7adbd04374c8959ba0b252c85a7a40dda7fa0 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Thu, 27 Feb 2025 13:17:15 -0500 Subject: [PATCH 002/113] add some debug --- .circleci/config.continue.yml.j2 | 5 ++++- .gitlab-ci.yml | 4 +++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/.circleci/config.continue.yml.j2 b/.circleci/config.continue.yml.j2 index a44d1f95e6d..1bffad9a553 100644 --- a/.circleci/config.continue.yml.j2 +++ b/.circleci/config.continue.yml.j2 @@ -410,6 +410,10 @@ jobs: - restore_build_cache: cacheType: << parameters.cacheType >> + - run: + name: Print some vars + command: echo "-PtaskPartitionCount=${CIRCLE_NODE_TOTAL} -PtaskPartition=${CIRCLE_NODE_INDEX}" + - run: name: Check Project command: >- @@ -1101,7 +1105,6 @@ build_test_jobs: &build_test_jobs parallelism: 4 gradleTarget: ":instrumentationCheck" cacheType: inst - triggeredBy: *instrumentation_modules - check: requires: diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index d8080952c9a..c9a3deea589 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -150,7 +150,7 @@ test_published_artifacts: variables: BUILD_CACHE_TYPE: lib script: - - ./gradlew $GRADLE_TARGET -PskipTests -PrunBuildSrcTests -PskipSpotless $GRADLE_ARGS + - ./gradlew $GRADLE_TARGET -PskipTests -PrunBuildSrcTests -PskipSpotless -PtaskPartitionCount=$CI_NODE_TOTAL -PtaskPartition=$CI_NODE_INDEX $GRADLE_ARGS after_script: - .circleci/collect_reports.sh --destination ./check_reports --move artifacts: @@ -166,11 +166,13 @@ check_base: check_inst: extends: .check_job + parallel: 4 variables: GRADLE_TARGET: ":instrumentationCheck" check_smoke: extends: .check_job + parallel: 4 variables: GRADLE_TARGET: ":smokeCheck" From f8448c965d1f0fa18a829d35003c94f3a0633f06 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Fri, 28 Feb 2025 10:40:13 -0500 Subject: [PATCH 003/113] Revert "add some debug" This reverts commit 26a7adbd04374c8959ba0b252c85a7a40dda7fa0. --- .circleci/config.continue.yml.j2 | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/.circleci/config.continue.yml.j2 b/.circleci/config.continue.yml.j2 index 1bffad9a553..a44d1f95e6d 100644 --- a/.circleci/config.continue.yml.j2 +++ b/.circleci/config.continue.yml.j2 @@ -410,10 +410,6 @@ jobs: - restore_build_cache: cacheType: << parameters.cacheType >> - - run: - name: Print some vars - command: echo "-PtaskPartitionCount=${CIRCLE_NODE_TOTAL} -PtaskPartition=${CIRCLE_NODE_INDEX}" - - run: name: Check Project command: >- @@ -1105,6 +1101,7 @@ build_test_jobs: &build_test_jobs parallelism: 4 gradleTarget: ":instrumentationCheck" cacheType: inst + triggeredBy: *instrumentation_modules - check: requires: From 0b8643a0f8f111dc50a66ca8ad872067757b9f77 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Fri, 28 Feb 2025 10:49:16 -0500 Subject: [PATCH 004/113] Normalize node index and node total --- .gitlab-ci.yml | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index c9a3deea589..a33983dd30a 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -42,6 +42,13 @@ default: - export DATADOG_API_KEY_PROD=$(aws ssm get-parameter --region us-east-1 --name ci.dd-trace-java.DATADOG_API_KEY_PROD --with-decryption --query "Parameter.Value" --out text) - export DATADOG_API_KEY_DDSTAGING=$(aws ssm get-parameter --region us-east-1 --name ci.dd-trace-java.dd_api_key --with-decryption --query "Parameter.Value" --out text) +# CI_NODE_INDEX and CI_NODE_TOTAL are 1-indexed and not always set. These steps normalize the numbers for jobs +.normalize_node_index: &normalize_node_index + - echo "CI_NODE_TOTAL=$CI_NODE_TOTAL , CI_NODE_INDEX=$CI_NODE_INDEX" + - export NORMALIZED_NODE_TOTAL=${CI_NODE_TOTAL:-1} + - ONE_INDEXED_NODE_INDEX=${CI_NODE_INDEX:-1}; export NORMALIZED_NODE_INDEX=$((CI_NODE_INDEX - 1)) + - echo "NORMALIZED_NODE_TOTAL=$NORMALIZED_NODE_TOTAL , NORMALIZED_NODE_INDEX=$NORMALIZED_NODE_INDEX" + .gradle_build: &gradle_build image: ghcr.io/datadog/dd-trace-java-docker-build:${JAVA_BUILD_IMAGE_VERSION}-base stage: build @@ -68,6 +75,7 @@ default: before_script: - export GRADLE_USER_HOME=`pwd`/.gradle - export GRADLE_ARGS=" --build-cache --stacktrace --no-daemon --parallel --max-workers=$GRADLE_WORKERS" + - *normalize_node_index # for weird reasons, gradle will always "chmod 700" the .gradle folder # with Gitlab caching, .gradle is always owned by root and thus gradle's chmod invocation fails # This dance is a hack to have .gradle owned by the Gitlab runner user @@ -195,13 +203,10 @@ muzzle: BUILD_CACHE_TYPE: test script: - export SKIP_BUILDSCAN="true" - - echo CI_NODE_INDEX=$CI_NODE_INDEX - - echo CI_NODE_TOTAL=CI_NODE_TOTAL - ./gradlew writeMuzzleTasksToFile $GRADLE_ARGS - sort workspace/build/muzzleTasks > sortedMuzzleTasks - - split --number=l/$CI_NODE_TOTAL --suffix-length=1 --numeric-suffixes sortedMuzzleTasks muzzleSplit - - export NODE_ZERO_INDEX=$((CI_NODE_INDEX - 1)) - - ./gradlew `cat muzzleSplit${NODE_ZERO_INDEX} | xargs` $GRADLE_ARGS + - split --number=l/$NORMALIZED_NODE_TOTAL --suffix-length=1 --numeric-suffixes sortedMuzzleTasks muzzleSplit + - ./gradlew `cat muzzleSplit${NORMALIZED_NODE_INDEX} | xargs` $GRADLE_ARGS after_script: - .circleci/collect_reports.sh artifacts: From 65d41ac5f828a5723b54fc1304a857b60e5d2ea0 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Fri, 28 Feb 2025 13:00:56 -0500 Subject: [PATCH 005/113] fix normalization --- .gitlab-ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index a33983dd30a..41f0555ad28 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -46,7 +46,7 @@ default: .normalize_node_index: &normalize_node_index - echo "CI_NODE_TOTAL=$CI_NODE_TOTAL , CI_NODE_INDEX=$CI_NODE_INDEX" - export NORMALIZED_NODE_TOTAL=${CI_NODE_TOTAL:-1} - - ONE_INDEXED_NODE_INDEX=${CI_NODE_INDEX:-1}; export NORMALIZED_NODE_INDEX=$((CI_NODE_INDEX - 1)) + - ONE_INDEXED_NODE_INDEX=${CI_NODE_INDEX:-1}; export NORMALIZED_NODE_INDEX=$((ONE_INDEXED_NODE_INDEX - 1)) - echo "NORMALIZED_NODE_TOTAL=$NORMALIZED_NODE_TOTAL , NORMALIZED_NODE_INDEX=$NORMALIZED_NODE_INDEX" .gradle_build: &gradle_build From 661749deea7e6f899132ebbf71833cbd448d4a31 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Mon, 3 Mar 2025 10:12:10 -0500 Subject: [PATCH 006/113] actually use the normalized values --- .gitlab-ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 41f0555ad28..6148e218395 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -158,7 +158,7 @@ test_published_artifacts: variables: BUILD_CACHE_TYPE: lib script: - - ./gradlew $GRADLE_TARGET -PskipTests -PrunBuildSrcTests -PskipSpotless -PtaskPartitionCount=$CI_NODE_TOTAL -PtaskPartition=$CI_NODE_INDEX $GRADLE_ARGS + - ./gradlew $GRADLE_TARGET -PskipTests -PrunBuildSrcTests -PskipSpotless -PtaskPartitionCount=$NORMALIZED_NODE_TOTAL -PtaskPartition=$NORMALIZED_NODE_INDEX $GRADLE_ARGS after_script: - .circleci/collect_reports.sh --destination ./check_reports --move artifacts: From 70c494de33c7f1b45e38d44fd17c61b61c14e886 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Mon, 3 Mar 2025 11:31:25 -0500 Subject: [PATCH 007/113] first pass: migrate tests to Gitlab --- .gitlab-ci.yml | 165 +++++++++++++++++- .gitlab/check_test_agent_results.sh | 31 ++++ .../trace/agent/test/AgentTestRunner.groovy | 7 +- 3 files changed, 199 insertions(+), 4 deletions(-) create mode 100755 .gitlab/check_test_agent_results.sh diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 08a667862de..de32c43161a 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -29,6 +29,43 @@ variables: description: "Enable profiling of tests" value: "false" +.test_matrix: &test_matrix + - testJvm: &test_jvms + - "8" + - "semeru11" + - "oracle8" + - "21" + - "ubuntu17" + - "zulu8" + - "semeru8" + - "ibm8" + - "zulu11" + - "11" + - "17" + - "semeru17" + +# Gitlab doesn't support "parallel" and "parallel:matrix" at the same time +# These blocks emulate "parallel" by including it in the matrix +.test_matrix_4: &test_matrix_4 + - testJvm: *test_jvms + CI_NODE_INDEX: ["1", "2", "3", "4"] + CI_NODE_TOTAL: 4 + +.test_matrix_6: &test_matrix_6 + - testJvm: *test_jvms + CI_NODE_INDEX: ["1", "2", "3", "4", "5", "6"] + CI_NODE_TOTAL: 6 + +.test_matrix_8: &test_matrix_8 + - testJvm: *test_jvms + CI_NODE_INDEX: ["1", "2", "3", "4", "5", "6", "7", "8"] + CI_NODE_TOTAL: 8 + +.test_matrix_12: &test_matrix_12 + - testJvm: *test_jvms + CI_NODE_INDEX: ["1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12"] + CI_NODE_TOTAL: 12 + default: tags: [ "arch:amd64" ] @@ -44,10 +81,10 @@ default: # CI_NODE_INDEX and CI_NODE_TOTAL are 1-indexed and not always set. These steps normalize the numbers for jobs .normalize_node_index: &normalize_node_index - - echo "CI_NODE_TOTAL=$CI_NODE_TOTAL , CI_NODE_INDEX=$CI_NODE_INDEX" + - echo "CI_NODE_TOTAL=${CI_NODE_TOTAL}, CI_NODE_INDEX=$CI_NODE_INDEX" - export NORMALIZED_NODE_TOTAL=${CI_NODE_TOTAL:-1} - ONE_INDEXED_NODE_INDEX=${CI_NODE_INDEX:-1}; export NORMALIZED_NODE_INDEX=$((ONE_INDEXED_NODE_INDEX - 1)) - - echo "NORMALIZED_NODE_TOTAL=$NORMALIZED_NODE_TOTAL , NORMALIZED_NODE_INDEX=$NORMALIZED_NODE_INDEX" + - echo "NORMALIZED_NODE_TOTAL=${NORMALIZED_NODE_TOTAL}, NORMALIZED_NODE_INDEX=$NORMALIZED_NODE_INDEX" .gradle_build: &gradle_build image: ghcr.io/datadog/dd-trace-java-docker-build:${JAVA_BUILD_IMAGE_VERSION}-base @@ -252,11 +289,12 @@ muzzle-dep-report: .test_job: extends: .gradle_build image: ghcr.io/datadog/dd-trace-java-docker-build:$testJvm + tags: [ "docker-in-docker:amd64" ] # use docker-in-docker runner for testcontainers needs: [ build ] stage: tests variables: BUILD_CACHE_TYPE: lib - GRADLE_PARAMS: "" + GRADLE_PARAMS: "-PskipFlakyTests" CONTINUE_ON_FAILURE: "false" script: - > @@ -283,6 +321,25 @@ muzzle-dep-report: - ./results - '.gradle/daemon/*/*.out.log' +.test_job_with_test_agent: + extends: .test_job + variables: + CI_USE_TEST_AGENT: "true" + CI_AGENT_HOST: local-agent + services: + - name: ghcr.io/datadog/dd-apm-test-agent/ddapm-test-agent:v1.11.0 + alias: local-agent + variables: + LOG_LEVEL: "DEBUG" + TRACE_LANGUAGE: "java" + DD_SUPPRESS_TRACE_PARSE_ERRORS: "true" + DD_POOL_TRACE_CHECK_FAILURES: "true" + DD_DISABLE_ERROR_RESPONSES: "true" + ENABLED_CHECKS: "trace_content_length,trace_stall,meta_tracer_version_header,trace_count_header,trace_peer_service,trace_dd_service" + script: + - !reference [.test_job, script] + - .gitlab/check_test_agent_results.sh + agent_integration_tests: extends: .test_job variables: @@ -297,6 +354,108 @@ agent_integration_tests: DD_BIND_HOST: "0.0.0.0" DD_API_KEY: "invalid_key_but_this_is_fine" +test_base: + extends: .test_job + variables: + GRADLE_TARGET: ":baseTest" + parallel: + matrix: *test_matrix_4 + script: + - if [ "$testJvm" == "8" ]; then export GRADLE_PARAMS="-PskipFlakyTests -PcheckCoverage"; fi + - !reference [.test_job, script] + +test_inst: + extends: .test_job_with_test_agent + variables: + GRADLE_TARGET: ":instrumentationTest" + parallel: + matrix: *test_matrix_12 + +test_inst_latest: + extends: .test_job_with_test_agent + variables: + GRADLE_TARGET: ":instrumentationLatestDepTest" + parallel: + matrix: + - testJvm: ["8", "17", "21" ] + # Gitlab doesn't support "parallel" and "parallel:matrix" at the same time + # This emulates "parallel" by including it in the matrix + CI_NODE_INDEX: [ "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12" ] + CI_NODE_TOTAL: 12 + +test_flaky: + extends: .test_job_with_test_agent + variables: + GRADLE_PARAMS: "-PrunFlakyTests" + testJvm: "8" + CONTINUE_ON_FAILURE: "true" + parallel: + matrix: + - GRADLE_TARGET: [":baseTest", ":instrumentationTest", ":smokeTest", ":debuggerTest"] + # Gitlab doesn't support "parallel" and "parallel:matrix" at the same time + # This emulates "parallel" by including it in the matrix + CI_NODE_INDEX: [ "1", "2", "3", "4" ] + CI_NODE_TOTAL: 4 + +test_flaky_inst: + extends: .test_job + variables: + GRADLE_TARGET: ":instrumentationTest" + GRADLE_PARAMS: "-PrunFlakyTests" + testJvm: "8" + CONTINUE_ON_FAILURE: "true" + parallel: + matrix: + # Gitlab doesn't support "parallel" and "parallel:matrix" at the same time + # This emulates "parallel" by including it in the matrix + - CI_NODE_INDEX: [ "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12" ] + CI_NODE_TOTAL: 12 + +test_profiling: + extends: .test_job + variables: + GRADLE_TARGET: ":profilingTest" + parallel: + matrix: *test_matrix + +test_debugger: + extends: .test_job + variables: + GRADLE_TARGET: ":debuggerTest" + parallel: + matrix: *test_matrix + +test_smoke: + extends: .test_job + variables: + GRADLE_TARGET: "stageMainDist :smokeTest" + GRADLE_PARAMS: "-PskipFlakyTests" + parallel: + matrix: *test_matrix_4 + +test_ssi_smoke: + extends: .test_job + variables: + GRADLE_TARGET: "stageMainDist :smokeTest" + DD_INJECT_FORCE: "true" + DD_INJECTION_ENABLED: "tracer" + parallel: + matrix: *test_matrix_4 + +test_smoke_graalvm: + extends: .test_job + variables: + GRADLE_TARGET: "stageMainDist :dd-smoke-test:spring-boot-3.0-native:test" + parallel: + matrix: + - testJvm: ["graalvm17", "graalvm21"] + +test_smoke_semeru8_debugger: + extends: .test_job + variables: + GRADLE_TARGET: "stageMainDist dd-smoke-tests:debugger-integration-tests:test" + testJvm: "semeru8" + required: extends: .fan_in needs: diff --git a/.gitlab/check_test_agent_results.sh b/.gitlab/check_test_agent_results.sh new file mode 100755 index 00000000000..cfbc8f098be --- /dev/null +++ b/.gitlab/check_test_agent_results.sh @@ -0,0 +1,31 @@ +#!/bin/bash + +set +e # Disable exiting from testagent response failure +SUMMARY_RESPONSE=$(curl -s -w "\n%{http_code}" -o summary_response.txt "http://${CI_AGENT_HOST}:8126/test/trace_check/summary") +set -e +SUMMARY_RESPONSE_CODE=$(echo "$SUMMARY_RESPONSE" | awk 'END {print $NF}') + +if [[ SUMMARY_RESPONSE_CODE -eq 200 ]]; then + echo "APM Test Agent is running. (HTTP 200)" +else + echo "APM Test Agent is not running and was not used for testing. No checks failed." + exit 0 +fi + +RESPONSE=$(curl -s -w "\n%{http_code}" -o response.txt "http://${CI_AGENT_HOST}:8126/test/trace_check/failures") +RESPONSE_CODE=$(echo "$RESPONSE" | awk 'END {print $NF}') + +if [[ $RESPONSE_CODE -eq 200 ]]; then + echo "All APM Test Agent Check Traces returned successful! (HTTP 200)" + echo "APM Test Agent Check Traces Summary Results:" + cat summary_response.txt | jq '.' +elif [[ $RESPONSE_CODE -eq 404 ]]; then + echo "Real APM Agent running in place of TestAgent, no checks to validate!" +else + echo "APM Test Agent Check Traces failed with response code: $RESPONSE_CODE" + echo "Failures:" + cat response.txt + echo "APM Test Agent Check Traces Summary Results:" + cat summary_response.txt | jq '.' + exit 1 +fi diff --git a/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/AgentTestRunner.groovy b/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/AgentTestRunner.groovy index 95836bec3fa..2237c6de4d0 100644 --- a/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/AgentTestRunner.groovy +++ b/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/AgentTestRunner.groovy @@ -366,8 +366,13 @@ abstract class AgentTestRunner extends DDSpecification implements AgentBuilder.L TEST_WRITER = new ListWriter() if (isTestAgentEnabled()) { + String agentHost = System.getenv("CI_AGENT_HOST") + if (agentHost == null) { + agentHost = DEFAULT_AGENT_HOST + } + // emit traces to the APM Test-Agent for Cross-Tracer Testing Trace Checks - HttpUrl agentUrl = HttpUrl.get("http://" + DEFAULT_AGENT_HOST + ":" + DEFAULT_TRACE_AGENT_PORT) + HttpUrl agentUrl = HttpUrl.get("http://" + agentHost + ":" + DEFAULT_TRACE_AGENT_PORT) OkHttpClient client = buildHttpClient(agentUrl, null, null, TimeUnit.SECONDS.toMillis(DEFAULT_AGENT_TIMEOUT)) DDAgentFeaturesDiscovery featureDiscovery = new DDAgentFeaturesDiscovery(client, Monitoring.DISABLED, agentUrl, Config.get().isTraceAgentV05Enabled(), Config.get().isTracerMetricsEnabled()) TEST_AGENT_API = new DDAgentApi(client, agentUrl, featureDiscovery, Monitoring.DISABLED, Config.get().isTracerMetricsEnabled()) From 2ae3cf3f9a57d05009006bf81e9b3c9c6c4df540 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Thu, 6 Mar 2025 10:59:51 -0500 Subject: [PATCH 008/113] less scenarios for now --- .gitlab-ci.yml | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 26fdf51fe4f..128c9c459cb 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -32,17 +32,17 @@ variables: .test_matrix: &test_matrix - testJvm: &test_jvms - "8" - - "semeru11" - - "oracle8" - - "21" - - "ubuntu17" - - "zulu8" +# - "semeru11" +# - "oracle8" +# - "21" +# - "ubuntu17" +# - "zulu8" - "semeru8" - - "ibm8" - - "zulu11" +# - "ibm8" +# - "zulu11" - "11" - - "17" - - "semeru17" +# - "17" +# - "semeru17" # Gitlab doesn't support "parallel" and "parallel:matrix" at the same time # These blocks emulate "parallel" by including it in the matrix From c3ebdc0425f6b1a4e740be51e9b48413e204c5cc Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Mon, 10 Mar 2025 17:21:22 -0400 Subject: [PATCH 009/113] bump memory and workers --- .gitlab-ci.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 128c9c459cb..6f6477a0747 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -293,6 +293,9 @@ muzzle-dep-report: needs: [ build ] stage: tests variables: + KUBERNETES_MEMORY_REQUEST: 16Gi + KUBERNETES_MEMORY_LIMIT: 16Gi + GRADLE_WORKERS: 4 BUILD_CACHE_TYPE: lib GRADLE_PARAMS: "-PskipFlakyTests" CONTINUE_ON_FAILURE: "false" From 2d78199461f1008a6c813b77c2580180867b1990 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Mon, 10 Mar 2025 19:22:59 -0400 Subject: [PATCH 010/113] actually use task partitions --- .gitlab-ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 6f6477a0747..f8628b7e87f 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -307,7 +307,7 @@ muzzle-dep-report: fi - *prepare_test_env - export GRADLE_OPTS="-Dorg.gradle.jvmargs='-Xms2G -Xmx2G $PROFILER_COMMAND -XX:ErrorFile=/tmp/hs_err_pid%p.log -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp' -Ddatadog.forkedMaxHeapSize=768M -Ddatadog.forkedMinHeapSize=128M" - - ./gradlew $GRADLE_TARGET $GRADLE_PARAMS -PtestJvm=$testJvm $GRADLE_ARGS --continue || $CONTINUE_ON_FAILURE + - ./gradlew $GRADLE_TARGET $GRADLE_PARAMS -PtestJvm=$testJvm -PtaskPartitionCount=$NORMALIZED_NODE_TOTAL -PtaskPartition=$NORMALIZED_NODE_INDEX $GRADLE_ARGS --continue || $CONTINUE_ON_FAILURE after_script: - *restore_pretest_env - *set_datadog_api_keys From aba4f200552a8037d3c5ce99ce87a62614e669d2 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Tue, 11 Mar 2025 16:23:36 -0400 Subject: [PATCH 011/113] Add section start/end --- .gitlab-ci.yml | 16 +++++++++++++++- .gitlab/gitlab-utils.sh | 17 +++++++++++++++++ 2 files changed, 32 insertions(+), 1 deletion(-) create mode 100755 .gitlab/gitlab-utils.sh diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index f8628b7e87f..af4227e9d9a 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -110,17 +110,20 @@ default: - workspace policy: $BUILD_CACHE_POLICY before_script: + - source .gitlab/gitlab-utils.sh - export GRADLE_USER_HOME=`pwd`/.gradle - export GRADLE_ARGS=" --build-cache --stacktrace --no-daemon --parallel --max-workers=$GRADLE_WORKERS" - *normalize_node_index # for weird reasons, gradle will always "chmod 700" the .gradle folder # with Gitlab caching, .gradle is always owned by root and thus gradle's chmod invocation fails # This dance is a hack to have .gradle owned by the Gitlab runner user + - gitlab_section_start "gradle-dance" "Fix .gradle directory permissions" - mkdir -p .gradle - cp -r .gradle .gradle-copy - rm -rf .gradle - mv .gradle-copy .gradle - ls -la + - gitlab_section_end "gradle-dance" build: extends: .gradle_build @@ -182,7 +185,10 @@ test_published_artifacts: - export GRADLE_OPTS="-Dorg.gradle.jvmargs='-Xmx512M -Xms512M -XX:ErrorFile=/tmp/hs_err_pid%p.log -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp'" - ./gradlew check --info $GRADLE_ARGS after_script: + - source .gitlab/gitlab-utils.sh + - gitlab_section_start "collect-reports" "Collecting reports" - .circleci/collect_reports.sh + - gitlab_section_end "collect-reports" artifacts: when: always paths: @@ -197,7 +203,10 @@ test_published_artifacts: script: - ./gradlew $GRADLE_TARGET -PskipTests -PrunBuildSrcTests -PskipSpotless -PtaskPartitionCount=$NORMALIZED_NODE_TOTAL -PtaskPartition=$NORMALIZED_NODE_INDEX $GRADLE_ARGS after_script: + - source .gitlab/gitlab-utils.sh + - gitlab_section_start "collect-reports" "Collecting reports" - .circleci/collect_reports.sh --destination ./check_reports --move + - gitlab_section_end "collect-reports" artifacts: when: always paths: @@ -247,7 +256,10 @@ muzzle: - split --number=l/$NORMALIZED_NODE_TOTAL --suffix-length=1 --numeric-suffixes sortedMuzzleTasks muzzleSplit - ./gradlew `cat muzzleSplit${NORMALIZED_NODE_INDEX} | xargs` $GRADLE_ARGS after_script: + - source .gitlab/gitlab-utils.sh + - gitlab_section_start "collect-reports" "Collecting reports" - .circleci/collect_reports.sh + - gitlab_section_end "collect-reports" artifacts: when: always paths: @@ -311,11 +323,13 @@ muzzle-dep-report: after_script: - *restore_pretest_env - *set_datadog_api_keys + - source .gitlab/gitlab-utils.sh + - gitlab_section_start "collect-reports" "Collecting reports" - .circleci/collect_reports.sh - if [ "$PROFILE_TESTS" == "true" ]; then .circleci/collect_profiles.sh; fi - .circleci/collect_results.sh - .circleci/upload_ciapp.sh tests $testJvm -# TODO Get APM Test Agent Trace Check Results + - gitlab_section_end "collect-reports" artifacts: when: always paths: diff --git a/.gitlab/gitlab-utils.sh b/.gitlab/gitlab-utils.sh new file mode 100755 index 00000000000..6a668fe2250 --- /dev/null +++ b/.gitlab/gitlab-utils.sh @@ -0,0 +1,17 @@ +#!/bin/bash + +# From https://docs.gitlab.com/ci/jobs/job_logs/#use-a-script-to-improve-display-of-collapsible-sections +# function for starting the section +function gitlab_section_start () { + local section_title="${1}" + local section_description="${2:-$section_title}" + + echo -e "section_start:`date +%s`:${section_title}[collapsed=true]\r\e[0K${section_description}" +} + +# Function for ending the section +function gitlab_section_end () { + local section_title="${1}" + + echo -e "section_end:`date +%s`:${section_title}\r\e[0K" +} From 87bffbf2c6096d059aab3fb480d2418c4c51aeee Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Tue, 11 Mar 2025 16:29:34 -0400 Subject: [PATCH 012/113] Allow individual tests to control gradle memory --- .gitlab-ci.yml | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index af4227e9d9a..b00476ff983 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -37,10 +37,10 @@ variables: # - "21" # - "ubuntu17" # - "zulu8" - - "semeru8" +# - "semeru8" # - "ibm8" # - "zulu11" - - "11" +# - "11" # - "17" # - "semeru17" @@ -94,7 +94,7 @@ default: MAVEN_OPTS: "-Xms64M -Xmx512M" GRADLE_WORKERS: 2 KUBERNETES_CPU_REQUEST: 8 - KUBERNETES_MEMORY_REQUEST: 6Gi + KUBERNETES_MEMORY_REQUEST: 8Gi cache: - key: '$CI_SERVER_VERSION-v2' # Dependencies cache. Reset the cache every time gitlab is upgraded. ~Every couple months paths: @@ -182,7 +182,7 @@ test_published_artifacts: - export GRADLE_OPTS="-Dorg.gradle.jvmargs='-Xmx2G -Xms2G -XX:ErrorFile=/tmp/hs_err_pid%p.log -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp'" - ./gradlew publishToMavenLocal $GRADLE_ARGS - cd test-published-dependencies - - export GRADLE_OPTS="-Dorg.gradle.jvmargs='-Xmx512M -Xms512M -XX:ErrorFile=/tmp/hs_err_pid%p.log -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp'" + - export GRADLE_OPTS="-Dorg.gradle.jvmargs='-Xmx1G -Xms1G -XX:ErrorFile=/tmp/hs_err_pid%p.log -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp'" - ./gradlew check --info $GRADLE_ARGS after_script: - source .gitlab/gitlab-utils.sh @@ -308,6 +308,7 @@ muzzle-dep-report: KUBERNETES_MEMORY_REQUEST: 16Gi KUBERNETES_MEMORY_LIMIT: 16Gi GRADLE_WORKERS: 4 + GRADLE_MEM: 3G BUILD_CACHE_TYPE: lib GRADLE_PARAMS: "-PskipFlakyTests" CONTINUE_ON_FAILURE: "false" @@ -318,7 +319,7 @@ muzzle-dep-report: export PROFILER_COMMAND="-XX:StartFlightRecording=settings=profile,filename=/tmp/${CI_JOB_NAME_SLUG}.jfr,dumponexit=true"; fi - *prepare_test_env - - export GRADLE_OPTS="-Dorg.gradle.jvmargs='-Xms2G -Xmx2G $PROFILER_COMMAND -XX:ErrorFile=/tmp/hs_err_pid%p.log -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp' -Ddatadog.forkedMaxHeapSize=768M -Ddatadog.forkedMinHeapSize=128M" + - export GRADLE_OPTS="-Dorg.gradle.jvmargs='-Xms$GRADLE_MEM -Xmx$GRADLE_MEM $PROFILER_COMMAND -XX:ErrorFile=/tmp/hs_err_pid%p.log -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp' -Ddatadog.forkedMaxHeapSize=768M -Ddatadog.forkedMinHeapSize=128M" - ./gradlew $GRADLE_TARGET $GRADLE_PARAMS -PtestJvm=$testJvm -PtaskPartitionCount=$NORMALIZED_NODE_TOTAL -PtaskPartition=$NORMALIZED_NODE_INDEX $GRADLE_ARGS --continue || $CONTINUE_ON_FAILURE after_script: - *restore_pretest_env From 6b6d4e582200c0c00c75a65916f554ca203b68e3 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Tue, 11 Mar 2025 16:50:26 -0400 Subject: [PATCH 013/113] remove a little of the reportspam --- .circleci/collect_reports.sh | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/.circleci/collect_reports.sh b/.circleci/collect_reports.sh index 9e085003c62..ce66a61358e 100755 --- a/.circleci/collect_reports.sh +++ b/.circleci/collect_reports.sh @@ -35,12 +35,12 @@ done mkdir -p $REPORTS_DIR >/dev/null 2>&1 -cp /tmp/hs_err_pid*.log $REPORTS_DIR || true -cp /tmp/java_pid*.hprof $REPORTS_DIR || true -cp /tmp/javacore.* $REPORTS_DIR || true -cp /tmp/*.trc $REPORTS_DIR || true -cp /tmp/*.dmp $REPORTS_DIR || true -cp /tmp/dd-profiler/*.jfr $REPORTS_DIR || true +cp /tmp/hs_err_pid*.log $REPORTS_DIR 2>/dev/null || true +cp /tmp/java_pid*.hprof $REPORTS_DIR 2>/dev/null || true +cp /tmp/javacore.* $REPORTS_DIR 2>/dev/null || true +cp /tmp/*.trc $REPORTS_DIR 2>/dev/null || true +cp /tmp/*.dmp $REPORTS_DIR 2>/dev/null || true +cp /tmp/dd-profiler/*.jfr $REPORTS_DIR 2>/dev/null || true function process_reports () { project_to_save=$1 @@ -59,9 +59,9 @@ function process_reports () { else echo "copying reports for $project_to_save" mkdir -p $report_path - cp -r workspace/$project_to_save/build/reports/* $report_path/ || true - cp workspace/$project_to_save/build/hs_err_pid*.log $report_path/ || true - cp workspace/$project_to_save/build/javacore*.txt $report_path/ || true + cp -r workspace/$project_to_save/build/reports/* $report_path/ 2>/dev/null || true + cp workspace/$project_to_save/build/hs_err_pid*.log $report_path/ 2>/dev/null || true + cp workspace/$project_to_save/build/javacore*.txt $report_path/ 2>/dev/null || true fi } @@ -73,4 +73,4 @@ for report_path in workspace/**/build/reports; do process_reports $report_path done -tar -cvzf reports.tar $REPORTS_DIR +tar -czf reports.tar $REPORTS_DIR From b2340e0f5ed4c0b3b77f053ebb7b93ab4927d000 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Wed, 12 Mar 2025 16:42:15 -0400 Subject: [PATCH 014/113] use concrete docker image tags --- .../test/groovy/EventBridgeClientTest.groovy | 2 +- .../src/test/groovy/S3ClientTest.groovy | 2 +- .../groovy/PayloadTaggingTest.groovy | 2 +- .../src/test/groovy/SfnClientTest.groovy | 2 +- .../src/test/groovy/SnsClientTest.groovy | 2 +- .../src/test/groovy/SnsClientTest.groovy | 2 +- .../RemoteJDBCInstrumentationTest.groovy | 2 +- ...kaClientCustomPropagationConfigTest.groovy | 5 +++-- .../test/groovy/KafkaClientTestBase.groovy | 19 ++++++++++--------- .../groovy/Lettuce5AsyncClientTest.groovy | 3 ++- .../groovy/Lettuce5ReactiveClientTest.groovy | 4 +++- .../test/groovy/Lettuce5SyncClientTest.groovy | 4 +++- .../SpringBootMongoIntegrationTest.groovy | 2 +- 13 files changed, 29 insertions(+), 22 deletions(-) diff --git a/dd-java-agent/instrumentation/aws-java-eventbridge-2.0/src/test/groovy/EventBridgeClientTest.groovy b/dd-java-agent/instrumentation/aws-java-eventbridge-2.0/src/test/groovy/EventBridgeClientTest.groovy index 60b2d63ecb6..4a4c445964e 100644 --- a/dd-java-agent/instrumentation/aws-java-eventbridge-2.0/src/test/groovy/EventBridgeClientTest.groovy +++ b/dd-java-agent/instrumentation/aws-java-eventbridge-2.0/src/test/groovy/EventBridgeClientTest.groovy @@ -21,7 +21,7 @@ import java.time.Duration import java.util.concurrent.CompletableFuture class EventBridgeClientTest extends AgentTestRunner { - static final LOCALSTACK = new GenericContainer(DockerImageName.parse("localstack/localstack")) + static final LOCALSTACK = new GenericContainer(DockerImageName.parse("localstack/localstack:4.2.0")) .withExposedPorts(4566) .withEnv("SERVICES", "sns,sqs,events") .withReuse(true) diff --git a/dd-java-agent/instrumentation/aws-java-s3-2.0/src/test/groovy/S3ClientTest.groovy b/dd-java-agent/instrumentation/aws-java-s3-2.0/src/test/groovy/S3ClientTest.groovy index fe9decb6f11..9fe676f0b52 100644 --- a/dd-java-agent/instrumentation/aws-java-s3-2.0/src/test/groovy/S3ClientTest.groovy +++ b/dd-java-agent/instrumentation/aws-java-s3-2.0/src/test/groovy/S3ClientTest.groovy @@ -16,7 +16,7 @@ import spock.lang.Shared import java.time.Duration class S3ClientTest extends AgentTestRunner { - static final LOCALSTACK = new GenericContainer(DockerImageName.parse("localstack/localstack")) + static final LOCALSTACK = new GenericContainer(DockerImageName.parse("localstack/localstack:4.2.0")) .withExposedPorts(4566) .withEnv("SERVICES", "s3") .withReuse(true) diff --git a/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/payloadTaggingTest/groovy/PayloadTaggingTest.groovy b/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/payloadTaggingTest/groovy/PayloadTaggingTest.groovy index 146d0085709..b321fb276ec 100644 --- a/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/payloadTaggingTest/groovy/PayloadTaggingTest.groovy +++ b/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/payloadTaggingTest/groovy/PayloadTaggingTest.groovy @@ -26,7 +26,7 @@ abstract class AbstractPayloadTaggingTest extends AgentTestRunner { static final Object NA = {} static final int DEFAULT_PORT = 4566 - static final LOCALSTACK = new GenericContainer(DockerImageName.parse("localstack/localstack")) + static final LOCALSTACK = new GenericContainer(DockerImageName.parse("localstack/localstack:4.2.0")) .withExposedPorts(DEFAULT_PORT) .withEnv("SERVICES", "apigateway,events,s3,sns,sqs,kinesis") .withReuse(true) diff --git a/dd-java-agent/instrumentation/aws-java-sfn-2.0/src/test/groovy/SfnClientTest.groovy b/dd-java-agent/instrumentation/aws-java-sfn-2.0/src/test/groovy/SfnClientTest.groovy index 3298ff48ee9..b90c4b4b131 100644 --- a/dd-java-agent/instrumentation/aws-java-sfn-2.0/src/test/groovy/SfnClientTest.groovy +++ b/dd-java-agent/instrumentation/aws-java-sfn-2.0/src/test/groovy/SfnClientTest.groovy @@ -25,7 +25,7 @@ abstract class SfnClientTest extends VersionedNamingTestBase { @Shared Object endPoint def setupSpec() { - localStack = new GenericContainer(DockerImageName.parse("localstack/localstack")) + localStack = new GenericContainer(DockerImageName.parse("localstack/localstack:4.2.0")) .withExposedPorts(4566) .withEnv("SERVICES", "stepfunctions") .withReuse(true) diff --git a/dd-java-agent/instrumentation/aws-java-sns-1.0/src/test/groovy/SnsClientTest.groovy b/dd-java-agent/instrumentation/aws-java-sns-1.0/src/test/groovy/SnsClientTest.groovy index d008f7626f8..1653ecfa586 100644 --- a/dd-java-agent/instrumentation/aws-java-sns-1.0/src/test/groovy/SnsClientTest.groovy +++ b/dd-java-agent/instrumentation/aws-java-sns-1.0/src/test/groovy/SnsClientTest.groovy @@ -27,7 +27,7 @@ import java.time.Duration abstract class SnsClientTest extends VersionedNamingTestBase { - static final LOCALSTACK = new GenericContainer(DockerImageName.parse("localstack/localstack")) + static final LOCALSTACK = new GenericContainer(DockerImageName.parse("localstack/localstack:4.2.0")) .withExposedPorts(4566) // Default LocalStack port .withEnv("SERVICES", "sns,sqs") // Enable SNS and SQS service .withReuse(true) diff --git a/dd-java-agent/instrumentation/aws-java-sns-2.0/src/test/groovy/SnsClientTest.groovy b/dd-java-agent/instrumentation/aws-java-sns-2.0/src/test/groovy/SnsClientTest.groovy index c0066e3595e..3e40aa138dc 100644 --- a/dd-java-agent/instrumentation/aws-java-sns-2.0/src/test/groovy/SnsClientTest.groovy +++ b/dd-java-agent/instrumentation/aws-java-sns-2.0/src/test/groovy/SnsClientTest.groovy @@ -24,7 +24,7 @@ import java.time.Duration import static datadog.trace.agent.test.utils.TraceUtils.basicSpan abstract class SnsClientTest extends VersionedNamingTestBase { - static final LOCALSTACK = new GenericContainer(DockerImageName.parse("localstack/localstack")) + static final LOCALSTACK = new GenericContainer(DockerImageName.parse("localstack/localstack:4.2.0")) .withExposedPorts(4566) // Default LocalStack port .withEnv("SERVICES", "sns,sqs") // Enable SNS and SQS service .withReuse(true) diff --git a/dd-java-agent/instrumentation/jdbc/src/test/groovy/RemoteJDBCInstrumentationTest.groovy b/dd-java-agent/instrumentation/jdbc/src/test/groovy/RemoteJDBCInstrumentationTest.groovy index 581384efd86..92a7ceb50d7 100644 --- a/dd-java-agent/instrumentation/jdbc/src/test/groovy/RemoteJDBCInstrumentationTest.groovy +++ b/dd-java-agent/instrumentation/jdbc/src/test/groovy/RemoteJDBCInstrumentationTest.groovy @@ -183,7 +183,7 @@ abstract class RemoteJDBCInstrumentationTest extends VersionedNamingTestBase { } def setupSpec() { - postgres = new PostgreSQLContainer("postgres:11.1") + postgres = new PostgreSQLContainer("postgres:11.2") .withDatabaseName(dbName.get(POSTGRESQL)).withUsername(jdbcUserNames.get(POSTGRESQL)).withPassword(jdbcPasswords.get(POSTGRESQL)) postgres.start() PortUtils.waitForPortToOpen(postgres.getHost(), postgres.getMappedPort(PostgreSQLContainer.POSTGRESQL_PORT), 5, TimeUnit.SECONDS) diff --git a/dd-java-agent/instrumentation/kafka-clients-3.8/src/test/groovy/KafkaClientCustomPropagationConfigTest.groovy b/dd-java-agent/instrumentation/kafka-clients-3.8/src/test/groovy/KafkaClientCustomPropagationConfigTest.groovy index 5db495e5904..5b01035207e 100644 --- a/dd-java-agent/instrumentation/kafka-clients-3.8/src/test/groovy/KafkaClientCustomPropagationConfigTest.groovy +++ b/dd-java-agent/instrumentation/kafka-clients-3.8/src/test/groovy/KafkaClientCustomPropagationConfigTest.groovy @@ -27,6 +27,7 @@ import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.startSpan import static datadog.trace.instrumentation.kafka_clients38.KafkaDecorator.KAFKA_PRODUCE class KafkaClientCustomPropagationConfigTest extends AgentTestRunner { + static final DOCKER_IMAGE_NAME = DockerImageName.parse("confluentinc/cp-kafka:7.8.0") static final SHARED_TOPIC = ["topic1", "topic2", "topic3", "topic4"] static final MESSAGE = "Testing without headers for certain topics" @@ -58,7 +59,7 @@ class KafkaClientCustomPropagationConfigTest extends AgentTestRunner { def "test kafka client header propagation with topic filters"() { setup: injectSysConfig(TraceInstrumentationConfig.KAFKA_CLIENT_PROPAGATION_DISABLED_TOPICS, value as String) - KafkaContainer kafkaContainer = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:latest")).withEmbeddedZookeeper().withEnv("KAFKA_CREATE_TOPICS","topic1,topic2,topic3,topic4") + KafkaContainer kafkaContainer = new KafkaContainer(DOCKER_IMAGE_NAME).withEmbeddedZookeeper().withEnv("KAFKA_CREATE_TOPICS","topic1,topic2,topic3,topic4") kafkaContainer.start() def senderProps = KafkaTestUtils.producerProps(kafkaContainer.getBootstrapServers()) @@ -166,7 +167,7 @@ class KafkaClientCustomPropagationConfigTest extends AgentTestRunner { def "test consumer with topic filters"() { setup: injectSysConfig(TraceInstrumentationConfig.KAFKA_CLIENT_PROPAGATION_DISABLED_TOPICS, value as String) - KafkaContainer kafkaContainer = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:latest")).withEmbeddedZookeeper().withEnv("KAFKA_CREATE_TOPICS","topic1,topic2,topic3,topic4") + KafkaContainer kafkaContainer = new KafkaContainer(DOCKER_IMAGE_NAME).withEmbeddedZookeeper().withEnv("KAFKA_CREATE_TOPICS","topic1,topic2,topic3,topic4") kafkaContainer.start() def senderProps = KafkaTestUtils.producerProps(kafkaContainer.getBootstrapServers()) def producerFactory = new DefaultKafkaProducerFactory(senderProps) diff --git a/dd-java-agent/instrumentation/kafka-clients-3.8/src/test/groovy/KafkaClientTestBase.groovy b/dd-java-agent/instrumentation/kafka-clients-3.8/src/test/groovy/KafkaClientTestBase.groovy index 0f952574a69..13ed8f4236f 100644 --- a/dd-java-agent/instrumentation/kafka-clients-3.8/src/test/groovy/KafkaClientTestBase.groovy +++ b/dd-java-agent/instrumentation/kafka-clients-3.8/src/test/groovy/KafkaClientTestBase.groovy @@ -42,6 +42,7 @@ import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.activeSpan import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.isAsyncPropagationEnabled abstract class KafkaClientTestBase extends VersionedNamingTestBase { + static final DOCKER_IMAGE_NAME = DockerImageName.parse("confluentinc/cp-kafka:7.8.0") static final SHARED_TOPIC = "shared.topic" static final String MESSAGE = "Testing without headers for certain topics" @@ -155,7 +156,7 @@ abstract class KafkaClientTestBase extends VersionedNamingTestBase { def "test kafka produce and consume"() { setup: // Create and start a Kafka container using Testcontainers - KafkaContainer kafkaContainer = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:latest")).withEmbeddedZookeeper().withEnv("KAFKA_CREATE_TOPICS", SHARED_TOPIC) + KafkaContainer kafkaContainer = new KafkaContainer(DOCKER_IMAGE_NAME).withEmbeddedZookeeper().withEnv("KAFKA_CREATE_TOPICS", SHARED_TOPIC) kafkaContainer.start() def senderProps = KafkaTestUtils.producerProps(kafkaContainer.getBootstrapServers()) @@ -298,7 +299,7 @@ abstract class KafkaClientTestBase extends VersionedNamingTestBase { def "test producing message too large"() { setup: // set a low max request size, so that we can crash it - KafkaContainer kafkaContainer = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:latest")).withEmbeddedZookeeper().withEnv("KAFKA_CREATE_TOPICS", SHARED_TOPIC) + KafkaContainer kafkaContainer = new KafkaContainer(DOCKER_IMAGE_NAME).withEmbeddedZookeeper().withEnv("KAFKA_CREATE_TOPICS", SHARED_TOPIC) kafkaContainer.start() def senderProps = KafkaTestUtils.producerProps(kafkaContainer.getBootstrapServers()) @@ -320,7 +321,7 @@ abstract class KafkaClientTestBase extends VersionedNamingTestBase { @Flaky def "test spring kafka template produce and consume"() { setup: - KafkaContainer kafkaContainer = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:latest")).withEmbeddedZookeeper().withEnv("KAFKA_CREATE_TOPICS", SHARED_TOPIC) + KafkaContainer kafkaContainer = new KafkaContainer(DOCKER_IMAGE_NAME).withEmbeddedZookeeper().withEnv("KAFKA_CREATE_TOPICS", SHARED_TOPIC) kafkaContainer.start() def senderProps = KafkaTestUtils.producerProps(kafkaContainer.getBootstrapServers()) @@ -464,7 +465,7 @@ abstract class KafkaClientTestBase extends VersionedNamingTestBase { @Flaky def "test pass through tombstone"() { setup: - KafkaContainer kafkaContainer = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:latest")).withEmbeddedZookeeper().withEnv("KAFKA_CREATE_TOPICS", SHARED_TOPIC) + KafkaContainer kafkaContainer = new KafkaContainer(DOCKER_IMAGE_NAME).withEmbeddedZookeeper().withEnv("KAFKA_CREATE_TOPICS", SHARED_TOPIC) kafkaContainer.start() def senderProps = KafkaTestUtils.producerProps(kafkaContainer.getBootstrapServers()) @@ -536,7 +537,7 @@ abstract class KafkaClientTestBase extends VersionedNamingTestBase { @Flaky def "test records(TopicPartition) kafka consume"() { setup: - KafkaContainer kafkaContainer = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:latest")).withEmbeddedZookeeper().withEnv("KAFKA_CREATE_TOPICS", SHARED_TOPIC) + KafkaContainer kafkaContainer = new KafkaContainer(DOCKER_IMAGE_NAME).withEmbeddedZookeeper().withEnv("KAFKA_CREATE_TOPICS", SHARED_TOPIC) kafkaContainer.start() // set up the Kafka consumer properties @@ -598,7 +599,7 @@ abstract class KafkaClientTestBase extends VersionedNamingTestBase { def "test records(TopicPartition).subList kafka consume"() { setup: - KafkaContainer kafkaContainer = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:latest")).withEmbeddedZookeeper().withEnv("KAFKA_CREATE_TOPICS", SHARED_TOPIC) + KafkaContainer kafkaContainer = new KafkaContainer(DOCKER_IMAGE_NAME).withEmbeddedZookeeper().withEnv("KAFKA_CREATE_TOPICS", SHARED_TOPIC) kafkaContainer.start() def senderProps = KafkaTestUtils.producerProps(kafkaContainer.getBootstrapServers()) @@ -660,7 +661,7 @@ abstract class KafkaClientTestBase extends VersionedNamingTestBase { @Flaky def "test records(TopicPartition).forEach kafka consume"() { setup: - KafkaContainer kafkaContainer = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:latest")).withEmbeddedZookeeper().withEnv("KAFKA_CREATE_TOPICS", SHARED_TOPIC) + KafkaContainer kafkaContainer = new KafkaContainer(DOCKER_IMAGE_NAME).withEmbeddedZookeeper().withEnv("KAFKA_CREATE_TOPICS", SHARED_TOPIC) kafkaContainer.start() def senderProps = KafkaTestUtils.producerProps(kafkaContainer.getBootstrapServers()) @@ -722,7 +723,7 @@ abstract class KafkaClientTestBase extends VersionedNamingTestBase { @Flaky def "test iteration backwards over ConsumerRecords"() { setup: - KafkaContainer kafkaContainer = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:latest")).withEmbeddedZookeeper().withEnv("KAFKA_CREATE_TOPICS", SHARED_TOPIC) + KafkaContainer kafkaContainer = new KafkaContainer(DOCKER_IMAGE_NAME).withEmbeddedZookeeper().withEnv("KAFKA_CREATE_TOPICS", SHARED_TOPIC) kafkaContainer.start() def senderProps = KafkaTestUtils.producerProps(kafkaContainer.getBootstrapServers()) @@ -837,7 +838,7 @@ abstract class KafkaClientTestBase extends VersionedNamingTestBase { @Flaky def "test kafka client header propagation manual config"() { setup: - KafkaContainer kafkaContainer = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:latest")).withEmbeddedZookeeper().withEnv("KAFKA_CREATE_TOPICS", SHARED_TOPIC) + KafkaContainer kafkaContainer = new KafkaContainer(DOCKER_IMAGE_NAME).withEmbeddedZookeeper().withEnv("KAFKA_CREATE_TOPICS", SHARED_TOPIC) kafkaContainer.start() def senderProps = KafkaTestUtils.producerProps(kafkaContainer.getBootstrapServers()) diff --git a/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5AsyncClientTest.groovy b/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5AsyncClientTest.groovy index 09eac435c21..9471514cfed 100644 --- a/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5AsyncClientTest.groovy +++ b/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5AsyncClientTest.groovy @@ -15,6 +15,7 @@ import io.lettuce.core.api.sync.RedisCommands import io.lettuce.core.codec.StringCodec import io.lettuce.core.protocol.AsyncCommand import org.testcontainers.containers.wait.strategy.Wait +import org.testcontainers.utility.DockerImageName import spock.lang.Shared import spock.util.concurrent.AsyncConditions @@ -49,7 +50,7 @@ abstract class Lettuce5AsyncClientTest extends VersionedNamingTestBase { String embeddedDbUri @Shared - RedisContainer redisServer = new RedisContainer(RedisContainer.DEFAULT_IMAGE_NAME) + RedisContainer redisServer = new RedisContainer(DockerImageName.parse("redis:6.2.6")) .waitingFor(Wait.forListeningPort()) @Shared diff --git a/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5ReactiveClientTest.groovy b/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5ReactiveClientTest.groovy index fde1401b52a..f4e55eee299 100644 --- a/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5ReactiveClientTest.groovy +++ b/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5ReactiveClientTest.groovy @@ -1,3 +1,5 @@ +import org.testcontainers.utility.DockerImageName + import static datadog.trace.agent.test.utils.TraceUtils.runUnderTrace import static datadog.trace.instrumentation.lettuce5.LettuceInstrumentationUtil.AGENT_CRASHING_COMMAND_PREFIX @@ -37,7 +39,7 @@ abstract class Lettuce5ReactiveClientTest extends VersionedNamingTestBase { int port @Shared - RedisContainer redisServer = new RedisContainer(RedisContainer.DEFAULT_TAG) + RedisContainer redisServer = new RedisContainer(DockerImageName.parse("redis:6.2.6")) .waitingFor(Wait.forListeningPort()) RedisClient redisClient diff --git a/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5SyncClientTest.groovy b/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5SyncClientTest.groovy index ee7182481cf..e868500afae 100644 --- a/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5SyncClientTest.groovy +++ b/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5SyncClientTest.groovy @@ -1,3 +1,5 @@ +import org.testcontainers.utility.DockerImageName + import static datadog.trace.instrumentation.lettuce5.LettuceInstrumentationUtil.AGENT_CRASHING_COMMAND_PREFIX import com.redis.testcontainers.RedisContainer @@ -35,7 +37,7 @@ abstract class Lettuce5SyncClientTest extends VersionedNamingTestBase { String embeddedDbUri @Shared - RedisContainer redisServer = new RedisContainer(RedisContainer.DEFAULT_IMAGE_NAME) + RedisContainer redisServer = new RedisContainer(DockerImageName.parse("redis:6.2.6")) .waitingFor(Wait.forListeningPort()) @Shared diff --git a/dd-smoke-tests/springboot-mongo/src/test/groovy/datadog/smoketest/SpringBootMongoIntegrationTest.groovy b/dd-smoke-tests/springboot-mongo/src/test/groovy/datadog/smoketest/SpringBootMongoIntegrationTest.groovy index 72b33709b1d..f9d9c4d8930 100644 --- a/dd-smoke-tests/springboot-mongo/src/test/groovy/datadog/smoketest/SpringBootMongoIntegrationTest.groovy +++ b/dd-smoke-tests/springboot-mongo/src/test/groovy/datadog/smoketest/SpringBootMongoIntegrationTest.groovy @@ -23,7 +23,7 @@ class SpringBootMongoIntegrationTest extends AbstractServerSmokeTest { @Override void beforeProcessBuilders() { - mongoDbContainer = new MongoDBContainer(DockerImageName.parse("mongo:4.0.10")) + mongoDbContainer = new MongoDBContainer(DockerImageName.parse("mongo:4.4.29")) mongoDbContainer.start() mongoDbUri = mongoDbContainer.replicaSetUrl } From 42784ee0c9933977293b22e28ac63cd3c893d01f Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Fri, 14 Mar 2025 13:26:56 -0400 Subject: [PATCH 015/113] configure test containers to use mirror --- .gitlab-ci.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index b00476ff983..4a08a4a21f3 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -312,6 +312,9 @@ muzzle-dep-report: BUILD_CACHE_TYPE: lib GRADLE_PARAMS: "-PskipFlakyTests" CONTINUE_ON_FAILURE: "false" + TESTCONTAINERS_CHECKS_DISABLE: "true" + TESTCONTAINERS_RYUK_DISABLED: "true" + TESTCONTAINERS_HUB_IMAGE_NAME_PREFIX: "registry.ddbuild.io/images/mirror/" script: - > if [ "$PROFILE_TESTS" == "true" ] && [ "$testJvm" != "ibm8" ] && [ "$testJvm" != "oracle8" ]; From e781c3bce409be9c53cbceda5c17bc1d8a3a6de3 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Fri, 14 Mar 2025 14:33:58 -0400 Subject: [PATCH 016/113] fix limit --- .gitlab-ci.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index a8944540896..64e2349cc69 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -96,6 +96,7 @@ default: GRADLE_WORKERS: 2 KUBERNETES_CPU_REQUEST: 8 KUBERNETES_MEMORY_REQUEST: 8Gi + KUBERNETES_MEMORY_LIMIT: 8Gi cache: - key: '$CI_SERVER_VERSION-v2' # Dependencies cache. Reset the cache every time gitlab is upgraded. ~Every couple months paths: From ffb90db96f4784a46ea74f72e2c4d12d41fc0cbe Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Mon, 17 Mar 2025 11:30:17 -0400 Subject: [PATCH 017/113] fix missing container versions --- .../src/test/groovy/DynamoDbClientTest.groovy | 2 +- .../redisson-2.0.0/src/test/groovy/RedissonClientTest.groovy | 4 +++- .../redisson-2.3.0/src/test/groovy/RedissonClientTest.groovy | 4 +++- .../redisson-3.10.3/src/test/groovy/RedissonClientTest.groovy | 4 +++- .../src/test/groovy/VertxRedisTestBase.groovy | 4 +++- 5 files changed, 13 insertions(+), 5 deletions(-) diff --git a/dd-java-agent/instrumentation/aws-java-dynamodb-2.0/src/test/groovy/DynamoDbClientTest.groovy b/dd-java-agent/instrumentation/aws-java-dynamodb-2.0/src/test/groovy/DynamoDbClientTest.groovy index 60ca0ad91f9..e42b2a4da5b 100644 --- a/dd-java-agent/instrumentation/aws-java-dynamodb-2.0/src/test/groovy/DynamoDbClientTest.groovy +++ b/dd-java-agent/instrumentation/aws-java-dynamodb-2.0/src/test/groovy/DynamoDbClientTest.groovy @@ -29,7 +29,7 @@ import spock.lang.Shared import java.time.Duration class DynamoDbClientTest extends AgentTestRunner { - static final LOCALSTACK = new GenericContainer(DockerImageName.parse("localstack/localstack")) + static final LOCALSTACK = new GenericContainer(DockerImageName.parse("localstack/localstack:4.2.0")) .withExposedPorts(4566) .withEnv("SERVICES", "dynamodb") .withReuse(true) diff --git a/dd-java-agent/instrumentation/redisson/redisson-2.0.0/src/test/groovy/RedissonClientTest.groovy b/dd-java-agent/instrumentation/redisson/redisson-2.0.0/src/test/groovy/RedissonClientTest.groovy index abf5d5bb63f..db6dfdcde1e 100644 --- a/dd-java-agent/instrumentation/redisson/redisson-2.0.0/src/test/groovy/RedissonClientTest.groovy +++ b/dd-java-agent/instrumentation/redisson/redisson-2.0.0/src/test/groovy/RedissonClientTest.groovy @@ -1,3 +1,5 @@ +import org.testcontainers.utility.DockerImageName + import static datadog.trace.api.config.TraceInstrumentationConfig.DB_CLIENT_HOST_SPLIT_BY_INSTANCE import com.redis.testcontainers.RedisContainer @@ -18,7 +20,7 @@ import spock.lang.Shared abstract class RedissonClientTest extends VersionedNamingTestBase { @Shared - RedisServer redisServer = new RedisContainer(RedisContainer.DEFAULT_IMAGE_NAME).waitingFor(Wait.forListeningPort()) + RedisServer redisServer = new RedisContainer(DockerImageName.parse("redis:6.2.6")).waitingFor(Wait.forListeningPort()) @Shared Config config = new Config() diff --git a/dd-java-agent/instrumentation/redisson/redisson-2.3.0/src/test/groovy/RedissonClientTest.groovy b/dd-java-agent/instrumentation/redisson/redisson-2.3.0/src/test/groovy/RedissonClientTest.groovy index 2345f6de2cd..d6c71465cd3 100644 --- a/dd-java-agent/instrumentation/redisson/redisson-2.3.0/src/test/groovy/RedissonClientTest.groovy +++ b/dd-java-agent/instrumentation/redisson/redisson-2.3.0/src/test/groovy/RedissonClientTest.groovy @@ -1,3 +1,5 @@ +import org.testcontainers.utility.DockerImageName + import static datadog.trace.agent.test.utils.TraceUtils.runUnderTrace import static datadog.trace.api.config.TraceInstrumentationConfig.DB_CLIENT_HOST_SPLIT_BY_INSTANCE import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.activeSpan @@ -17,7 +19,7 @@ import spock.lang.Shared abstract class RedissonClientTest extends VersionedNamingTestBase { @Shared - RedisServer redisServer = new RedisContainer(RedisContainer.DEFAULT_IMAGE_NAME).waitingFor(Wait.forListeningPort()) + RedisServer redisServer = new RedisContainer(DockerImageName.parse("redis:6.2.6")).waitingFor(Wait.forListeningPort()) @Shared Config config = new Config() diff --git a/dd-java-agent/instrumentation/redisson/redisson-3.10.3/src/test/groovy/RedissonClientTest.groovy b/dd-java-agent/instrumentation/redisson/redisson-3.10.3/src/test/groovy/RedissonClientTest.groovy index e215da16ffb..4ed910249cd 100644 --- a/dd-java-agent/instrumentation/redisson/redisson-3.10.3/src/test/groovy/RedissonClientTest.groovy +++ b/dd-java-agent/instrumentation/redisson/redisson-3.10.3/src/test/groovy/RedissonClientTest.groovy @@ -1,3 +1,5 @@ +import org.testcontainers.utility.DockerImageName + import static datadog.trace.agent.test.utils.TraceUtils.runUnderTrace import static datadog.trace.api.config.TraceInstrumentationConfig.DB_CLIENT_HOST_SPLIT_BY_INSTANCE import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.activeSpan @@ -17,7 +19,7 @@ import spock.lang.Shared abstract class RedissonClientTest extends VersionedNamingTestBase { @Shared - RedisServer redisServer = new RedisContainer(RedisContainer.DEFAULT_IMAGE_NAME).waitingFor(Wait.forListeningPort()) + RedisServer redisServer = new RedisContainer(DockerImageName.parse("redis:6.2.6")).waitingFor(Wait.forListeningPort()) @Shared Config config = new Config() diff --git a/dd-java-agent/instrumentation/vertx-redis-client-3.9/src/test/groovy/VertxRedisTestBase.groovy b/dd-java-agent/instrumentation/vertx-redis-client-3.9/src/test/groovy/VertxRedisTestBase.groovy index d4c8151e9ee..7feed90fd12 100644 --- a/dd-java-agent/instrumentation/vertx-redis-client-3.9/src/test/groovy/VertxRedisTestBase.groovy +++ b/dd-java-agent/instrumentation/vertx-redis-client-3.9/src/test/groovy/VertxRedisTestBase.groovy @@ -1,3 +1,5 @@ +import org.testcontainers.utility.DockerImageName + import static datadog.trace.agent.test.utils.TraceUtils.basicSpan import static datadog.trace.agent.test.utils.TraceUtils.runUnderTrace import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.activeSpan @@ -33,7 +35,7 @@ abstract class VertxRedisTestBase extends VersionedNamingTestBase { @AutoCleanup(value = "stop") @Shared - def redisServer = new RedisContainer(RedisContainer.DEFAULT_IMAGE_NAME.withTag(RedisContainer.DEFAULT_TAG)) + def redisServer = new RedisContainer(DockerImageName.parse("redis:6.2.6")) .waitingFor(Wait.forListeningPort()) @Shared From c18b491f599f84c16a89b097000fbf6b18ad8999 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Mon, 17 Mar 2025 11:30:38 -0400 Subject: [PATCH 018/113] Better gitlab naming with smarter split --- .gitlab-ci.yml | 22 ++++++++-------------- 1 file changed, 8 insertions(+), 14 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 64e2349cc69..643b4ad6c74 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -49,23 +49,19 @@ variables: # These blocks emulate "parallel" by including it in the matrix .test_matrix_4: &test_matrix_4 - testJvm: *test_jvms - CI_NODE_INDEX: ["1", "2", "3", "4"] - CI_NODE_TOTAL: 4 + CI_SPLIT: ["1/4", "2/4", "3/4", "4/4"] .test_matrix_6: &test_matrix_6 - testJvm: *test_jvms - CI_NODE_INDEX: ["1", "2", "3", "4", "5", "6"] - CI_NODE_TOTAL: 6 + CI_SPLIT: ["1/6", "2/6", "3/6", "4/6", "5/6", "6/6"] .test_matrix_8: &test_matrix_8 - testJvm: *test_jvms - CI_NODE_INDEX: ["1", "2", "3", "4", "5", "6", "7", "8"] - CI_NODE_TOTAL: 8 + CI_SPLIT: ["1/8", "2/8", "3/8", "4/8", "5/8", "6/8", "7/8", "8/8"] .test_matrix_12: &test_matrix_12 - testJvm: *test_jvms - CI_NODE_INDEX: ["1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12"] - CI_NODE_TOTAL: 12 + CI_SPLIT: [ "1/12", "2/12", "3/12", "4/12", "5/12", "6/12", "7/12", "8/12", "9/12", "10/12", "11/12", "12/12" ] default: tags: [ "arch:amd64" ] @@ -82,6 +78,7 @@ default: # CI_NODE_INDEX and CI_NODE_TOTAL are 1-indexed and not always set. These steps normalize the numbers for jobs .normalize_node_index: &normalize_node_index + - if [ -n "$CI_SPLIT" ]; then CI_NODE_INDEX="${CI_SPLIT%%/*}"; CI_NODE_TOTAL="${CI_SPLIT##*/}"; fi - echo "CI_NODE_TOTAL=${CI_NODE_TOTAL}, CI_NODE_INDEX=$CI_NODE_INDEX" - export NORMALIZED_NODE_TOTAL=${CI_NODE_TOTAL:-1} - ONE_INDEXED_NODE_INDEX=${CI_NODE_INDEX:-1}; export NORMALIZED_NODE_INDEX=$((ONE_INDEXED_NODE_INDEX - 1)) @@ -401,8 +398,7 @@ test_inst_latest: - testJvm: ["8", "17", "21" ] # Gitlab doesn't support "parallel" and "parallel:matrix" at the same time # This emulates "parallel" by including it in the matrix - CI_NODE_INDEX: [ "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12" ] - CI_NODE_TOTAL: 12 + CI_SPLIT: [ "1/12", "2/12", "3/12", "4/12", "5/12", "6/12", "7/12", "8/12", "9/12", "10/12", "11/12", "12/12" ] test_flaky: extends: .test_job_with_test_agent @@ -415,8 +411,7 @@ test_flaky: - GRADLE_TARGET: [":baseTest", ":instrumentationTest", ":smokeTest", ":debuggerTest"] # Gitlab doesn't support "parallel" and "parallel:matrix" at the same time # This emulates "parallel" by including it in the matrix - CI_NODE_INDEX: [ "1", "2", "3", "4" ] - CI_NODE_TOTAL: 4 + CI_SPLIT: [ "1/4", "2/4", "3/4", "4/4" ] test_flaky_inst: extends: .test_job @@ -429,8 +424,7 @@ test_flaky_inst: matrix: # Gitlab doesn't support "parallel" and "parallel:matrix" at the same time # This emulates "parallel" by including it in the matrix - - CI_NODE_INDEX: [ "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12" ] - CI_NODE_TOTAL: 12 + - CI_SPLIT: [ "1/12", "2/12", "3/12", "4/12", "5/12", "6/12", "7/12", "8/12", "9/12", "10/12", "11/12", "12/12" ] test_profiling: extends: .test_job From a09c6266733853b722390985060e9a36037e2d73 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Mon, 17 Mar 2025 11:51:25 -0400 Subject: [PATCH 019/113] update caches with test dep cache, save test results --- .gitlab-ci.yml | 43 ++++++++++++++++++++++++++++++++++++------- 1 file changed, 36 insertions(+), 7 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 643b4ad6c74..fe5d89db3e1 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -88,14 +88,15 @@ default: image: ghcr.io/datadog/dd-trace-java-docker-build:${JAVA_BUILD_IMAGE_VERSION}-base stage: build variables: - GRADLE_OPTS: "-Dorg.gradle.jvmargs='-Xmx2560M -Xms2560M -XX:ErrorFile=/tmp/hs_err_pid%p.log -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp'" MAVEN_OPTS: "-Xms64M -Xmx512M" GRADLE_WORKERS: 2 + GRADLE_MEM: 2560M KUBERNETES_CPU_REQUEST: 8 KUBERNETES_MEMORY_REQUEST: 8Gi KUBERNETES_MEMORY_LIMIT: 8Gi + BUILD_CACHE_TYPE: lib #default cache: - - key: '$CI_SERVER_VERSION-v2' # Dependencies cache. Reset the cache every time gitlab is upgraded. ~Every couple months + - key: '$CI_SERVER_VERSION-$BUILD_CACHE_TYPE' # Dependencies cache. Reset the cache every time gitlab is upgraded. ~Every couple months paths: # Cached dependencies and wrappers for gradle - .gradle/wrapper @@ -111,6 +112,7 @@ default: before_script: - source .gitlab/gitlab-utils.sh - export GRADLE_USER_HOME=`pwd`/.gradle + - export GRADLE_OPTS="-Dorg.gradle.jvmargs='-Xmx$GRADLE_MEM -Xms$GRADLE_MEM -XX:ErrorFile=/tmp/hs_err_pid%p.log -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp'" - export GRADLE_ARGS=" --build-cache --stacktrace --no-daemon --parallel --max-workers=$GRADLE_WORKERS -PmavenRepositoryProxy=$MAVEN_REPOSITORY_PROXY" - *normalize_node_index # for weird reasons, gradle will always "chmod 700" the .gradle folder @@ -147,6 +149,19 @@ build: reports: dotenv: build.env +build_tests: + extends: .gradle_build + variables: + BUILD_CACHE_POLICY: push + BUILD_CACHE_TYPE: test + DEPENDENCY_CACHE_POLICY: pull + GRADLE_MEM: 4G + GRADLE_WORKERS: 4 + KUBERNETES_MEMORY_REQUEST: 18Gi + KUBERNETES_MEMORY_LIMIT: 18Gi + script: + - ./gradlew clean :baseTest :instrumentationTest :instrumentationLatestDepTest :instrumentationTest :profilingTest -PskipTests $GRADLE_ARGS + build_and_populate_dep_cache: extends: build variables: @@ -158,6 +173,17 @@ build_and_populate_dep_cache: - when: manual allow_failure: true +build_and_populate_test_dep_cache: + extends: build_tests + variables: + BUILD_CACHE_POLICY: push + DEPENDENCY_CACHE_POLICY: push + rules: + - if: '$POPULATE_CACHE' + when: on_success + - when: manual + allow_failure: true + spotless: extends: .gradle_build stage: tests @@ -241,11 +267,11 @@ check_debugger: muzzle: extends: .gradle_build - needs: [ build ] + needs: [ build_tests ] stage: tests parallel: 8 variables: - BUILD_CACHE_TYPE: lib + BUILD_CACHE_TYPE: test script: - export SKIP_BUILDSCAN="true" - ./gradlew writeMuzzleTasksToFile $GRADLE_ARGS @@ -265,7 +291,7 @@ muzzle: muzzle-dep-report: extends: .gradle_build - needs: [ build ] + needs: [ build_tests ] stage: tests variables: BUILD_CACHE_TYPE: test @@ -299,14 +325,14 @@ muzzle-dep-report: extends: .gradle_build image: ghcr.io/datadog/dd-trace-java-docker-build:$testJvm tags: [ "docker-in-docker:amd64" ] # use docker-in-docker runner for testcontainers - needs: [ build ] + needs: [ build_tests ] stage: tests variables: KUBERNETES_MEMORY_REQUEST: 16Gi KUBERNETES_MEMORY_LIMIT: 16Gi GRADLE_WORKERS: 4 GRADLE_MEM: 3G - BUILD_CACHE_TYPE: lib + BUILD_CACHE_TYPE: test GRADLE_PARAMS: "-PskipFlakyTests" CONTINUE_ON_FAILURE: "false" TESTCONTAINERS_CHECKS_DISABLE: "true" @@ -319,6 +345,7 @@ muzzle-dep-report: export PROFILER_COMMAND="-XX:StartFlightRecording=settings=profile,filename=/tmp/${CI_JOB_NAME_SLUG}.jfr,dumponexit=true"; fi - *prepare_test_env + - exit 1 - export GRADLE_OPTS="-Dorg.gradle.jvmargs='-Xms$GRADLE_MEM -Xmx$GRADLE_MEM $PROFILER_COMMAND -XX:ErrorFile=/tmp/hs_err_pid%p.log -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp' -Ddatadog.forkedMaxHeapSize=768M -Ddatadog.forkedMinHeapSize=128M" - ./gradlew $GRADLE_TARGET $GRADLE_PARAMS -PtestJvm=$testJvm -PtaskPartitionCount=$NORMALIZED_NODE_TOTAL -PtaskPartition=$NORMALIZED_NODE_INDEX $GRADLE_ARGS --continue || $CONTINUE_ON_FAILURE after_script: @@ -338,6 +365,8 @@ muzzle-dep-report: - ./profiles.tar - ./results - '.gradle/daemon/*/*.out.log' + reports: + junit: results/*.xml .test_job_with_test_agent: extends: .test_job From 97d5bc6aea1883d9c185d68682735d299d378f87 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Wed, 19 Mar 2025 15:08:13 -0400 Subject: [PATCH 020/113] Cleanup repositories {} blocks --- build.gradle | 7 +------ buildSrc/build.gradle.kts | 6 ++---- .../build.gradle.kts | 6 ++---- dd-java-agent/agent-ci-visibility/build.gradle | 6 ++++++ dd-java-agent/benchmark-integration/build.gradle | 6 ++++++ .../play-perftest/build.gradle | 7 ++++++- dd-java-agent/instrumentation/build.gradle | 6 ++++++ .../instrumentation/datanucleus-4/build.gradle | 7 +++++++ dd-java-agent/testing/build.gradle | 7 ------- dd-smoke-tests/apm-tracing-disabled/build.gradle | 4 ---- .../armeria-grpc/application/settings.gradle | 5 +++++ .../datastreams/kafkaschemaregistry/build.gradle | 8 -------- dd-smoke-tests/grpc-1.5/build.gradle | 5 ----- dd-smoke-tests/kafka-3/application/settings.gradle | 5 +++++ dd-smoke-tests/play-2.4/build.gradle | 7 ++++++- dd-smoke-tests/play-2.5/build.gradle | 7 ++++++- dd-smoke-tests/play-2.6/build.gradle | 7 ++++++- dd-smoke-tests/play-2.7/build.gradle | 7 ++++++- dd-smoke-tests/play-2.8-otel/build.gradle | 7 ++++++- dd-smoke-tests/play-2.8-split-routes/build.gradle | 7 ++++++- dd-smoke-tests/play-2.8/build.gradle | 7 ++++++- .../quarkus-native/application/settings.gradle | 5 +++++ dd-smoke-tests/quarkus/application/settings.gradle | 5 +++++ .../spring-boot-2.5-webflux/build.gradle | 6 ------ .../application/settings.gradle | 5 +++++ .../application/settings.gradle | 5 +++++ .../application/settings.gradle | 5 +++++ .../application/settings.gradle | 5 +++++ .../application/settings.gradle | 5 +++++ dd-smoke-tests/springboot-freemarker/build.gradle | 4 ---- dd-smoke-tests/springboot-java-11/build.gradle | 4 ---- dd-smoke-tests/springboot-java-17/build.gradle | 4 ---- dd-smoke-tests/springboot-jetty-jsp/build.gradle | 4 ---- dd-smoke-tests/springboot-thymeleaf/build.gradle | 4 ---- dd-smoke-tests/springboot-tomcat-jsp/build.gradle | 4 ---- dd-smoke-tests/wildfly/spring-ear/settings.gradle | 5 +++++ dd-smoke-tests/wildfly/spring-ear/war/build.gradle | 6 ++++++ gradle/repositories.gradle | 14 +++++++++++++- remote-config/remote-config-core/build.gradle | 6 ------ settings.gradle | 13 +++++++++++++ test-published-dependencies/build.gradle | 7 +------ 41 files changed, 161 insertions(+), 89 deletions(-) diff --git a/build.gradle b/build.gradle index f655620bb00..8629065f394 100644 --- a/build.gradle +++ b/build.gradle @@ -32,6 +32,7 @@ description = 'dd-trace-java' def isCI = System.getenv("CI") != null +apply from: "$rootDir/gradle/repositories.gradle" apply from: "$rootDir/gradle/scm.gradle" spotless { // only resolve the spotless dependencies once in the build @@ -79,12 +80,6 @@ allprojects { } } -repositories { - mavenLocal() - mavenCentral() - gradlePluginPortal() -} - tasks.register("latestDepTest") nexusPublishing { diff --git a/buildSrc/build.gradle.kts b/buildSrc/build.gradle.kts index 3b67df19176..e88a6807bcf 100644 --- a/buildSrc/build.gradle.kts +++ b/buildSrc/build.gradle.kts @@ -22,10 +22,8 @@ gradlePlugin { } } -repositories { - mavenLocal() - mavenCentral() - gradlePluginPortal() +apply { + from("$rootDir/../gradle/repositories.gradle") } dependencies { diff --git a/buildSrc/call-site-instrumentation-plugin/build.gradle.kts b/buildSrc/call-site-instrumentation-plugin/build.gradle.kts index 8e21a9af7ab..d955d11cd26 100644 --- a/buildSrc/call-site-instrumentation-plugin/build.gradle.kts +++ b/buildSrc/call-site-instrumentation-plugin/build.gradle.kts @@ -24,10 +24,8 @@ spotless { } } -repositories { - mavenLocal() - mavenCentral() - gradlePluginPortal() +apply { + from("$rootDir/../gradle/repositories.gradle") } dependencies { diff --git a/dd-java-agent/agent-ci-visibility/build.gradle b/dd-java-agent/agent-ci-visibility/build.gradle index 4cd0f623c03..39663f247ce 100644 --- a/dd-java-agent/agent-ci-visibility/build.gradle +++ b/dd-java-agent/agent-ci-visibility/build.gradle @@ -1,5 +1,11 @@ buildscript { repositories { + mavenLocal() + if (project.rootProject.hasProperty("mavenRepositoryProxy")) { + maven { + url project.rootProject.property("mavenRepositoryProxy") + } + } mavenCentral() } diff --git a/dd-java-agent/benchmark-integration/build.gradle b/dd-java-agent/benchmark-integration/build.gradle index c367c4fa342..8e020260d5e 100644 --- a/dd-java-agent/benchmark-integration/build.gradle +++ b/dd-java-agent/benchmark-integration/build.gradle @@ -1,5 +1,11 @@ buildscript { repositories { + mavenLocal() + if (project.rootProject.hasProperty("mavenRepositoryProxy")) { + maven { + url project.rootProject.property("mavenRepositoryProxy") + } + } mavenCentral() gradlePluginPortal() } diff --git a/dd-java-agent/benchmark-integration/play-perftest/build.gradle b/dd-java-agent/benchmark-integration/play-perftest/build.gradle index 52540ef3ef5..9fe22103921 100644 --- a/dd-java-agent/benchmark-integration/play-perftest/build.gradle +++ b/dd-java-agent/benchmark-integration/play-perftest/build.gradle @@ -26,8 +26,13 @@ dependencies { } repositories { + mavenLocal() + if (project.rootProject.hasProperty("mavenRepositoryProxy")) { + maven { + url project.rootProject.property("mavenRepositoryProxy") + } + } mavenCentral() - gradlePluginPortal() maven { name "lightbend-maven-releases" url "https://repo.lightbend.com/lightbend/maven-release" diff --git a/dd-java-agent/instrumentation/build.gradle b/dd-java-agent/instrumentation/build.gradle index 777de23d76f..1852c7a6854 100644 --- a/dd-java-agent/instrumentation/build.gradle +++ b/dd-java-agent/instrumentation/build.gradle @@ -2,6 +2,12 @@ buildscript { repositories { + mavenLocal() + if (project.rootProject.hasProperty("mavenRepositoryProxy")) { + maven { + url project.rootProject.property("mavenRepositoryProxy") + } + } mavenCentral() } diff --git a/dd-java-agent/instrumentation/datanucleus-4/build.gradle b/dd-java-agent/instrumentation/datanucleus-4/build.gradle index 5d999846521..330489573b4 100644 --- a/dd-java-agent/instrumentation/datanucleus-4/build.gradle +++ b/dd-java-agent/instrumentation/datanucleus-4/build.gradle @@ -2,7 +2,14 @@ import org.datanucleus.enhancer.DataNucleusEnhancer buildscript { repositories { + mavenLocal() + if (project.rootProject.hasProperty("mavenRepositoryProxy")) { + maven { + url project.rootProject.property("mavenRepositoryProxy") + } + } mavenCentral() + gradlePluginPortal() } dependencies { diff --git a/dd-java-agent/testing/build.gradle b/dd-java-agent/testing/build.gradle index 859a5d9ecd5..3403b5ca8b9 100644 --- a/dd-java-agent/testing/build.gradle +++ b/dd-java-agent/testing/build.gradle @@ -9,13 +9,6 @@ ext { apply from: "$rootDir/gradle/java.gradle" -repositories { - ivy { - artifactPattern 'https://sqreen-ci-java.s3.amazonaws.com/jars/[organisation]/[artifact]-[revision](-[classifier]).[ext]' - ivyPattern 'https://sqreen-ci-java.s3.amazonaws.com/jars/[organisation]/[module]-[revision].xml' - } -} - minimumBranchCoverage = 0.5 minimumInstructionCoverage = 0.5 excludedClassesCoverage += [ diff --git a/dd-smoke-tests/apm-tracing-disabled/build.gradle b/dd-smoke-tests/apm-tracing-disabled/build.gradle index d5f5774ef34..6035c46a8b9 100644 --- a/dd-smoke-tests/apm-tracing-disabled/build.gradle +++ b/dd-smoke-tests/apm-tracing-disabled/build.gradle @@ -12,10 +12,6 @@ java { sourceCompatibility = '1.8' } -repositories { - mavenCentral() -} - dependencies { implementation 'org.springframework.boot:spring-boot-starter-web' implementation group: 'io.opentracing', name: 'opentracing-api', version: '0.32.0' diff --git a/dd-smoke-tests/armeria-grpc/application/settings.gradle b/dd-smoke-tests/armeria-grpc/application/settings.gradle index 6ec6749fcad..387db3f06d9 100644 --- a/dd-smoke-tests/armeria-grpc/application/settings.gradle +++ b/dd-smoke-tests/armeria-grpc/application/settings.gradle @@ -1,6 +1,11 @@ pluginManagement { repositories { mavenLocal() + if (settings.hasProperty("mavenRepositoryProxy")) { + maven { + url settings["mavenRepositoryProxy"] + } + } mavenCentral() gradlePluginPortal() } diff --git a/dd-smoke-tests/datastreams/kafkaschemaregistry/build.gradle b/dd-smoke-tests/datastreams/kafkaschemaregistry/build.gradle index d09cb263e6f..ad707301e3b 100644 --- a/dd-smoke-tests/datastreams/kafkaschemaregistry/build.gradle +++ b/dd-smoke-tests/datastreams/kafkaschemaregistry/build.gradle @@ -4,14 +4,6 @@ plugins { id 'org.springframework.boot' version '2.6.3' } -repositories { - mavenCentral() - maven { - url 'https://packages.confluent.io/maven/' - } - maven { url 'https://repo.spring.io/libs-milestone' } -} - apply from: "$rootDir/gradle/java.gradle" description = 'Kafka Smoke Tests.' diff --git a/dd-smoke-tests/grpc-1.5/build.gradle b/dd-smoke-tests/grpc-1.5/build.gradle index 0b1c81d99a8..88fb250c622 100644 --- a/dd-smoke-tests/grpc-1.5/build.gradle +++ b/dd-smoke-tests/grpc-1.5/build.gradle @@ -10,11 +10,6 @@ plugins { apply from: "$rootDir/gradle/java.gradle" -repositories { - // Use Maven Central for resolving dependencies. - mavenCentral() -} - def grpcVersion = '1.58.0' def protobufVersion = '3.24.0' def protocVersion = protobufVersion diff --git a/dd-smoke-tests/kafka-3/application/settings.gradle b/dd-smoke-tests/kafka-3/application/settings.gradle index c115e2b5db1..18d3cc89555 100644 --- a/dd-smoke-tests/kafka-3/application/settings.gradle +++ b/dd-smoke-tests/kafka-3/application/settings.gradle @@ -1,6 +1,11 @@ pluginManagement { repositories { mavenLocal() + if (settings.hasProperty("mavenRepositoryProxy")) { + maven { + url settings["mavenRepositoryProxy"] + } + } mavenCentral() gradlePluginPortal() } diff --git a/dd-smoke-tests/play-2.4/build.gradle b/dd-smoke-tests/play-2.4/build.gradle index cbb2a0d97cf..ae28bc2e495 100644 --- a/dd-smoke-tests/play-2.4/build.gradle +++ b/dd-smoke-tests/play-2.4/build.gradle @@ -32,8 +32,13 @@ distributions { } repositories { + mavenLocal() + if (project.rootProject.hasProperty("mavenRepositoryProxy")) { + maven { + url project.rootProject.property("mavenRepositoryProxy") + } + } mavenCentral() - gradlePluginPortal() maven { name "lightbend-maven-releases" url "https://repo.lightbend.com/lightbend/maven-release" diff --git a/dd-smoke-tests/play-2.5/build.gradle b/dd-smoke-tests/play-2.5/build.gradle index 55b187fb769..b52f8c1f042 100644 --- a/dd-smoke-tests/play-2.5/build.gradle +++ b/dd-smoke-tests/play-2.5/build.gradle @@ -32,8 +32,13 @@ distributions { } repositories { + mavenLocal() + if (project.rootProject.hasProperty("mavenRepositoryProxy")) { + maven { + url project.rootProject.property("mavenRepositoryProxy") + } + } mavenCentral() - gradlePluginPortal() maven { name "lightbend-maven-releases" url "https://repo.lightbend.com/lightbend/maven-release" diff --git a/dd-smoke-tests/play-2.6/build.gradle b/dd-smoke-tests/play-2.6/build.gradle index d3d18b56f2a..cf4605af069 100644 --- a/dd-smoke-tests/play-2.6/build.gradle +++ b/dd-smoke-tests/play-2.6/build.gradle @@ -32,8 +32,13 @@ distributions { } repositories { + mavenLocal() + if (project.rootProject.hasProperty("mavenRepositoryProxy")) { + maven { + url project.rootProject.property("mavenRepositoryProxy") + } + } mavenCentral() - gradlePluginPortal() maven { name "lightbend-maven-releases" url "https://repo.lightbend.com/lightbend/maven-release" diff --git a/dd-smoke-tests/play-2.7/build.gradle b/dd-smoke-tests/play-2.7/build.gradle index 357c7be9a60..64340d29733 100644 --- a/dd-smoke-tests/play-2.7/build.gradle +++ b/dd-smoke-tests/play-2.7/build.gradle @@ -32,8 +32,13 @@ distributions { } repositories { + mavenLocal() + if (project.rootProject.hasProperty("mavenRepositoryProxy")) { + maven { + url project.rootProject.property("mavenRepositoryProxy") + } + } mavenCentral() - gradlePluginPortal() maven { name "lightbend-maven-releases" url "https://repo.lightbend.com/lightbend/maven-release" diff --git a/dd-smoke-tests/play-2.8-otel/build.gradle b/dd-smoke-tests/play-2.8-otel/build.gradle index 13ed0848d71..6160d167a7a 100644 --- a/dd-smoke-tests/play-2.8-otel/build.gradle +++ b/dd-smoke-tests/play-2.8-otel/build.gradle @@ -27,8 +27,13 @@ distributions { } repositories { + mavenLocal() + if (project.rootProject.hasProperty("mavenRepositoryProxy")) { + maven { + url project.rootProject.property("mavenRepositoryProxy") + } + } mavenCentral() - gradlePluginPortal() maven { name "lightbend-maven-releases" url "https://repo.lightbend.com/lightbend/maven-release" diff --git a/dd-smoke-tests/play-2.8-split-routes/build.gradle b/dd-smoke-tests/play-2.8-split-routes/build.gradle index 0c831c70fbd..df4b4dae14e 100644 --- a/dd-smoke-tests/play-2.8-split-routes/build.gradle +++ b/dd-smoke-tests/play-2.8-split-routes/build.gradle @@ -27,8 +27,13 @@ distributions { } repositories { + mavenLocal() + if (project.rootProject.hasProperty("mavenRepositoryProxy")) { + maven { + url project.rootProject.property("mavenRepositoryProxy") + } + } mavenCentral() - gradlePluginPortal() maven { name "lightbend-maven-releases" url "https://repo.lightbend.com/lightbend/maven-release" diff --git a/dd-smoke-tests/play-2.8/build.gradle b/dd-smoke-tests/play-2.8/build.gradle index 75dcdae7913..09dd2a31002 100644 --- a/dd-smoke-tests/play-2.8/build.gradle +++ b/dd-smoke-tests/play-2.8/build.gradle @@ -27,8 +27,13 @@ distributions { } repositories { + mavenLocal() + if (project.rootProject.hasProperty("mavenRepositoryProxy")) { + maven { + url project.rootProject.property("mavenRepositoryProxy") + } + } mavenCentral() - gradlePluginPortal() maven { name "lightbend-maven-releases" url "https://repo.lightbend.com/lightbend/maven-release" diff --git a/dd-smoke-tests/quarkus-native/application/settings.gradle b/dd-smoke-tests/quarkus-native/application/settings.gradle index 5b6c7b616d3..54d8314939a 100644 --- a/dd-smoke-tests/quarkus-native/application/settings.gradle +++ b/dd-smoke-tests/quarkus-native/application/settings.gradle @@ -1,6 +1,11 @@ pluginManagement { repositories { mavenLocal() + if (settings.hasProperty("mavenRepositoryProxy")) { + maven { + url settings["mavenRepositoryProxy"] + } + } mavenCentral() gradlePluginPortal() } diff --git a/dd-smoke-tests/quarkus/application/settings.gradle b/dd-smoke-tests/quarkus/application/settings.gradle index eb582e9be9e..a61d3c14925 100644 --- a/dd-smoke-tests/quarkus/application/settings.gradle +++ b/dd-smoke-tests/quarkus/application/settings.gradle @@ -1,6 +1,11 @@ pluginManagement { repositories { mavenLocal() + if (settings.hasProperty("mavenRepositoryProxy")) { + maven { + url settings["mavenRepositoryProxy"] + } + } mavenCentral() gradlePluginPortal() } diff --git a/dd-smoke-tests/spring-boot-2.5-webflux/build.gradle b/dd-smoke-tests/spring-boot-2.5-webflux/build.gradle index 9de2ebaa7f2..99420989357 100644 --- a/dd-smoke-tests/spring-boot-2.5-webflux/build.gradle +++ b/dd-smoke-tests/spring-boot-2.5-webflux/build.gradle @@ -17,12 +17,6 @@ shadowJar { configurations = [project.configurations.runtimeClasspath] } -// Only needed until the final release of 2.5.0 -repositories { - mavenCentral() - maven { url 'https://repo.spring.io/milestone' } -} - dependencies { implementation group: 'org.springframework.boot', name: 'spring-boot-starter-webflux', version: '2.5.0+' testImplementation project(':dd-smoke-tests') diff --git a/dd-smoke-tests/spring-boot-2.7-webflux/application/settings.gradle b/dd-smoke-tests/spring-boot-2.7-webflux/application/settings.gradle index e91a7f738ef..0cd51889884 100644 --- a/dd-smoke-tests/spring-boot-2.7-webflux/application/settings.gradle +++ b/dd-smoke-tests/spring-boot-2.7-webflux/application/settings.gradle @@ -1,6 +1,11 @@ pluginManagement { repositories { mavenLocal() + if (settings.hasProperty("mavenRepositoryProxy")) { + maven { + url settings["mavenRepositoryProxy"] + } + } mavenCentral() gradlePluginPortal() } diff --git a/dd-smoke-tests/spring-boot-3.0-native/application/settings.gradle b/dd-smoke-tests/spring-boot-3.0-native/application/settings.gradle index 47224987ea1..ba75f4a860a 100644 --- a/dd-smoke-tests/spring-boot-3.0-native/application/settings.gradle +++ b/dd-smoke-tests/spring-boot-3.0-native/application/settings.gradle @@ -1,6 +1,11 @@ pluginManagement { repositories { mavenLocal() + if (settings.hasProperty("mavenRepositoryProxy")) { + maven { + url settings["mavenRepositoryProxy"] + } + } mavenCentral() gradlePluginPortal() } diff --git a/dd-smoke-tests/spring-boot-3.0-webflux/application/settings.gradle b/dd-smoke-tests/spring-boot-3.0-webflux/application/settings.gradle index d0ed8fbc660..693f570cf88 100644 --- a/dd-smoke-tests/spring-boot-3.0-webflux/application/settings.gradle +++ b/dd-smoke-tests/spring-boot-3.0-webflux/application/settings.gradle @@ -1,6 +1,11 @@ pluginManagement { repositories { mavenLocal() + if (settings.hasProperty("mavenRepositoryProxy")) { + maven { + url settings["mavenRepositoryProxy"] + } + } mavenCentral() gradlePluginPortal() } diff --git a/dd-smoke-tests/spring-boot-3.0-webmvc/application/settings.gradle b/dd-smoke-tests/spring-boot-3.0-webmvc/application/settings.gradle index eefefc8b056..7abd3b491e2 100644 --- a/dd-smoke-tests/spring-boot-3.0-webmvc/application/settings.gradle +++ b/dd-smoke-tests/spring-boot-3.0-webmvc/application/settings.gradle @@ -1,6 +1,11 @@ pluginManagement { repositories { mavenLocal() + if (settings.hasProperty("mavenRepositoryProxy")) { + maven { + url settings["mavenRepositoryProxy"] + } + } mavenCentral() gradlePluginPortal() } diff --git a/dd-smoke-tests/spring-boot-3.3-webmvc/application/settings.gradle b/dd-smoke-tests/spring-boot-3.3-webmvc/application/settings.gradle index b67b84505e6..f0fab663f1d 100644 --- a/dd-smoke-tests/spring-boot-3.3-webmvc/application/settings.gradle +++ b/dd-smoke-tests/spring-boot-3.3-webmvc/application/settings.gradle @@ -1,6 +1,11 @@ pluginManagement { repositories { mavenLocal() + if (settings.hasProperty("mavenRepositoryProxy")) { + maven { + url settings["mavenRepositoryProxy"] + } + } mavenCentral() gradlePluginPortal() } diff --git a/dd-smoke-tests/springboot-freemarker/build.gradle b/dd-smoke-tests/springboot-freemarker/build.gradle index 9cbd3352d80..e9e6d962c19 100644 --- a/dd-smoke-tests/springboot-freemarker/build.gradle +++ b/dd-smoke-tests/springboot-freemarker/build.gradle @@ -12,10 +12,6 @@ java { sourceCompatibility = '1.8' } -repositories { - mavenCentral() -} - dependencies { implementation group: 'org.springframework.boot', name: 'spring-boot-starter-web', version: '1.5.18.RELEASE' implementation group: 'org.freemarker', name: 'freemarker', version: '2.3.24-incubating' diff --git a/dd-smoke-tests/springboot-java-11/build.gradle b/dd-smoke-tests/springboot-java-11/build.gradle index 031e6615c5c..336ce124b67 100644 --- a/dd-smoke-tests/springboot-java-11/build.gradle +++ b/dd-smoke-tests/springboot-java-11/build.gradle @@ -12,10 +12,6 @@ ext { apply from: "$rootDir/gradle/java.gradle" description = 'SpringBoot Java 11 Smoke Tests.' -repositories { - mavenCentral() -} - dependencies { implementation group: 'org.springframework.boot', name: 'spring-boot-starter-web', version: '2.2.0.RELEASE' diff --git a/dd-smoke-tests/springboot-java-17/build.gradle b/dd-smoke-tests/springboot-java-17/build.gradle index c6d99c2de64..b138ccfb383 100644 --- a/dd-smoke-tests/springboot-java-17/build.gradle +++ b/dd-smoke-tests/springboot-java-17/build.gradle @@ -12,10 +12,6 @@ ext { apply from: "$rootDir/gradle/java.gradle" description = 'SpringBoot Java 17 Smoke Tests.' -repositories { - mavenCentral() -} - dependencies { implementation group: 'org.springframework.boot', name: 'spring-boot-starter-web', version: '2.2.0.RELEASE' diff --git a/dd-smoke-tests/springboot-jetty-jsp/build.gradle b/dd-smoke-tests/springboot-jetty-jsp/build.gradle index a953ed4adc1..aa99cd77534 100644 --- a/dd-smoke-tests/springboot-jetty-jsp/build.gradle +++ b/dd-smoke-tests/springboot-jetty-jsp/build.gradle @@ -13,10 +13,6 @@ java { sourceCompatibility = '1.8' } -repositories { - mavenCentral() -} - sourceSets { main { resources.srcDir("src/main/webapp") diff --git a/dd-smoke-tests/springboot-thymeleaf/build.gradle b/dd-smoke-tests/springboot-thymeleaf/build.gradle index 61977c12bf1..80cfcedc0c4 100644 --- a/dd-smoke-tests/springboot-thymeleaf/build.gradle +++ b/dd-smoke-tests/springboot-thymeleaf/build.gradle @@ -12,10 +12,6 @@ java { sourceCompatibility = '1.8' } -repositories { - mavenCentral() -} - dependencies { implementation 'org.springframework.boot:spring-boot-starter-web' implementation 'org.springframework.boot:spring-boot-starter-thymeleaf' diff --git a/dd-smoke-tests/springboot-tomcat-jsp/build.gradle b/dd-smoke-tests/springboot-tomcat-jsp/build.gradle index 31e6db4bebd..c80b878adfc 100644 --- a/dd-smoke-tests/springboot-tomcat-jsp/build.gradle +++ b/dd-smoke-tests/springboot-tomcat-jsp/build.gradle @@ -13,10 +13,6 @@ java { sourceCompatibility = '1.8' } -repositories { - mavenCentral() -} - sourceSets { main { resources.srcDir("src/main/webapp") diff --git a/dd-smoke-tests/wildfly/spring-ear/settings.gradle b/dd-smoke-tests/wildfly/spring-ear/settings.gradle index 734bb11b017..05402fe766b 100644 --- a/dd-smoke-tests/wildfly/spring-ear/settings.gradle +++ b/dd-smoke-tests/wildfly/spring-ear/settings.gradle @@ -1,6 +1,11 @@ pluginManagement { repositories { mavenLocal() + if (settings.hasProperty("mavenRepositoryProxy")) { + maven { + url settings["mavenRepositoryProxy"] + } + } mavenCentral() gradlePluginPortal() } diff --git a/dd-smoke-tests/wildfly/spring-ear/war/build.gradle b/dd-smoke-tests/wildfly/spring-ear/war/build.gradle index 2ebbfebc77b..2c32e3b8d86 100644 --- a/dd-smoke-tests/wildfly/spring-ear/war/build.gradle +++ b/dd-smoke-tests/wildfly/spring-ear/war/build.gradle @@ -2,6 +2,12 @@ apply plugin: 'java' apply plugin: 'war' repositories { + mavenLocal() + if (project.rootProject.hasProperty("mavenRepositoryProxy")) { + maven { + url project.rootProject.property("mavenRepositoryProxy") + } + } mavenCentral() } diff --git a/gradle/repositories.gradle b/gradle/repositories.gradle index 0b0a3e7ecd8..b35471e4041 100644 --- a/gradle/repositories.gradle +++ b/gradle/repositories.gradle @@ -1,10 +1,10 @@ repositories { + mavenLocal() if (project.rootProject.hasProperty("mavenRepositoryProxy")) { maven { url project.rootProject.property("mavenRepositoryProxy") } } - mavenLocal() mavenCentral() // add sonatype repository for snapshot dependencies maven { @@ -23,4 +23,16 @@ repositories { includeGroup 'io.sqreen' } } + maven { + url 'https://packages.confluent.io/maven/' + content { + includeGroupAndSubgroups "io.confluent" + } + } + maven { + url 'https://repo.spring.io/libs-milestone' + content { + includeGroupAndSubgroups "org.springframework" + } + } } diff --git a/remote-config/remote-config-core/build.gradle b/remote-config/remote-config-core/build.gradle index 35bc61dbd96..3836370a512 100644 --- a/remote-config/remote-config-core/build.gradle +++ b/remote-config/remote-config-core/build.gradle @@ -1,9 +1,3 @@ -buildscript { - repositories { - mavenCentral() - } -} - apply from: "$rootDir/gradle/java.gradle" ext { diff --git a/settings.gradle b/settings.gradle index 4df68ccfc00..d277a597e9f 100644 --- a/settings.gradle +++ b/settings.gradle @@ -1,3 +1,16 @@ +pluginManagement { + repositories { + mavenLocal() + if (settings.hasProperty("mavenRepositoryProxy")) { + maven { + url settings["mavenRepositoryProxy"] + } + } + mavenCentral() + gradlePluginPortal() + } +} + plugins { id 'com.gradle.develocity' version '3.18' } diff --git a/test-published-dependencies/build.gradle b/test-published-dependencies/build.gradle index 8e28c12b8d5..83811be6b8e 100644 --- a/test-published-dependencies/build.gradle +++ b/test-published-dependencies/build.gradle @@ -12,11 +12,6 @@ allprojects { group = 'com.datadoghq' version = versionFromFile - repositories { - mavenLocal() - mavenCentral() - gradlePluginPortal() - } - + apply from: "$sharedConfigDirectory/repositories.gradle" apply from: "$sharedConfigDirectory/spotless.gradle" } From 2b37e3bd93d375e6da7a3299d783b0acfc8e5db2 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Wed, 19 Mar 2025 15:24:43 -0400 Subject: [PATCH 021/113] update proxy url to use internal routing --- .gitlab-ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 267fe3347d3..c1a474e1ebd 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -23,7 +23,7 @@ variables: DEPENDENCY_CACHE_POLICY: pull BUILD_CACHE_POLICY: pull GRADLE_VERSION: "8.4" # must match gradle-wrapper.properties - MAVEN_REPOSITORY_PROXY: "https://artifactual.us1.prod.dog/repository/maven-central/" + MAVEN_REPOSITORY_PROXY: "https://artifactual.artifactual.all-clusters.local-dc.fabric.dog:8081/repository/global-maven-proxy/" JAVA_BUILD_IMAGE_VERSION: "v25.01" REPO_NOTIFICATION_CHANNEL: "#apm-java-escalations" PROFILE_TESTS: From 7092cd0a350843994e130144eea7e2cd2d2bddfc Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Wed, 19 Mar 2025 15:45:05 -0400 Subject: [PATCH 022/113] no "s" --- .gitlab-ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index c1a474e1ebd..ca983dd28bc 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -23,7 +23,7 @@ variables: DEPENDENCY_CACHE_POLICY: pull BUILD_CACHE_POLICY: pull GRADLE_VERSION: "8.4" # must match gradle-wrapper.properties - MAVEN_REPOSITORY_PROXY: "https://artifactual.artifactual.all-clusters.local-dc.fabric.dog:8081/repository/global-maven-proxy/" + MAVEN_REPOSITORY_PROXY: "http://artifactual.artifactual.all-clusters.local-dc.fabric.dog:8081/repository/global-maven-proxy/" JAVA_BUILD_IMAGE_VERSION: "v25.01" REPO_NOTIFICATION_CHANNEL: "#apm-java-escalations" PROFILE_TESTS: From 391d0c3221b74d8960d3e1b6034144f05d7536d0 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Wed, 19 Mar 2025 15:54:23 -0400 Subject: [PATCH 023/113] allow insecure everywhere --- dd-java-agent/agent-ci-visibility/build.gradle | 1 + dd-java-agent/benchmark-integration/build.gradle | 1 + dd-java-agent/benchmark-integration/play-perftest/build.gradle | 1 + dd-java-agent/instrumentation/build.gradle | 1 + dd-java-agent/instrumentation/datanucleus-4/build.gradle | 1 + dd-smoke-tests/armeria-grpc/application/settings.gradle | 1 + dd-smoke-tests/kafka-3/application/settings.gradle | 1 + dd-smoke-tests/play-2.4/build.gradle | 1 + dd-smoke-tests/play-2.5/build.gradle | 1 + dd-smoke-tests/play-2.6/build.gradle | 1 + dd-smoke-tests/play-2.7/build.gradle | 1 + dd-smoke-tests/play-2.8-otel/build.gradle | 1 + dd-smoke-tests/play-2.8-split-routes/build.gradle | 1 + dd-smoke-tests/play-2.8/build.gradle | 1 + dd-smoke-tests/quarkus-native/application/settings.gradle | 1 + dd-smoke-tests/quarkus/application/settings.gradle | 1 + .../spring-boot-2.7-webflux/application/settings.gradle | 1 + .../spring-boot-3.0-native/application/settings.gradle | 1 + .../spring-boot-3.0-webflux/application/settings.gradle | 1 + .../spring-boot-3.0-webmvc/application/settings.gradle | 1 + .../spring-boot-3.3-webmvc/application/settings.gradle | 1 + dd-smoke-tests/wildfly/spring-ear/settings.gradle | 1 + dd-smoke-tests/wildfly/spring-ear/war/build.gradle | 1 + gradle/repositories.gradle | 1 + settings.gradle | 1 + 25 files changed, 25 insertions(+) diff --git a/dd-java-agent/agent-ci-visibility/build.gradle b/dd-java-agent/agent-ci-visibility/build.gradle index 39663f247ce..84b6ac370f0 100644 --- a/dd-java-agent/agent-ci-visibility/build.gradle +++ b/dd-java-agent/agent-ci-visibility/build.gradle @@ -4,6 +4,7 @@ buildscript { if (project.rootProject.hasProperty("mavenRepositoryProxy")) { maven { url project.rootProject.property("mavenRepositoryProxy") + allowInsecureProtocol true } } mavenCentral() diff --git a/dd-java-agent/benchmark-integration/build.gradle b/dd-java-agent/benchmark-integration/build.gradle index 8e020260d5e..342d57a916b 100644 --- a/dd-java-agent/benchmark-integration/build.gradle +++ b/dd-java-agent/benchmark-integration/build.gradle @@ -4,6 +4,7 @@ buildscript { if (project.rootProject.hasProperty("mavenRepositoryProxy")) { maven { url project.rootProject.property("mavenRepositoryProxy") + allowInsecureProtocol true } } mavenCentral() diff --git a/dd-java-agent/benchmark-integration/play-perftest/build.gradle b/dd-java-agent/benchmark-integration/play-perftest/build.gradle index 9fe22103921..3a6e79dadeb 100644 --- a/dd-java-agent/benchmark-integration/play-perftest/build.gradle +++ b/dd-java-agent/benchmark-integration/play-perftest/build.gradle @@ -30,6 +30,7 @@ repositories { if (project.rootProject.hasProperty("mavenRepositoryProxy")) { maven { url project.rootProject.property("mavenRepositoryProxy") + allowInsecureProtocol true } } mavenCentral() diff --git a/dd-java-agent/instrumentation/build.gradle b/dd-java-agent/instrumentation/build.gradle index 1852c7a6854..5e04ed9d098 100644 --- a/dd-java-agent/instrumentation/build.gradle +++ b/dd-java-agent/instrumentation/build.gradle @@ -6,6 +6,7 @@ buildscript { if (project.rootProject.hasProperty("mavenRepositoryProxy")) { maven { url project.rootProject.property("mavenRepositoryProxy") + allowInsecureProtocol true } } mavenCentral() diff --git a/dd-java-agent/instrumentation/datanucleus-4/build.gradle b/dd-java-agent/instrumentation/datanucleus-4/build.gradle index 330489573b4..f6d215fce93 100644 --- a/dd-java-agent/instrumentation/datanucleus-4/build.gradle +++ b/dd-java-agent/instrumentation/datanucleus-4/build.gradle @@ -6,6 +6,7 @@ buildscript { if (project.rootProject.hasProperty("mavenRepositoryProxy")) { maven { url project.rootProject.property("mavenRepositoryProxy") + allowInsecureProtocol true } } mavenCentral() diff --git a/dd-smoke-tests/armeria-grpc/application/settings.gradle b/dd-smoke-tests/armeria-grpc/application/settings.gradle index 387db3f06d9..0bad037caf9 100644 --- a/dd-smoke-tests/armeria-grpc/application/settings.gradle +++ b/dd-smoke-tests/armeria-grpc/application/settings.gradle @@ -4,6 +4,7 @@ pluginManagement { if (settings.hasProperty("mavenRepositoryProxy")) { maven { url settings["mavenRepositoryProxy"] + allowInsecureProtocol true } } mavenCentral() diff --git a/dd-smoke-tests/kafka-3/application/settings.gradle b/dd-smoke-tests/kafka-3/application/settings.gradle index 18d3cc89555..b6478078496 100644 --- a/dd-smoke-tests/kafka-3/application/settings.gradle +++ b/dd-smoke-tests/kafka-3/application/settings.gradle @@ -4,6 +4,7 @@ pluginManagement { if (settings.hasProperty("mavenRepositoryProxy")) { maven { url settings["mavenRepositoryProxy"] + allowInsecureProtocol true } } mavenCentral() diff --git a/dd-smoke-tests/play-2.4/build.gradle b/dd-smoke-tests/play-2.4/build.gradle index ae28bc2e495..9be50043bde 100644 --- a/dd-smoke-tests/play-2.4/build.gradle +++ b/dd-smoke-tests/play-2.4/build.gradle @@ -36,6 +36,7 @@ repositories { if (project.rootProject.hasProperty("mavenRepositoryProxy")) { maven { url project.rootProject.property("mavenRepositoryProxy") + allowInsecureProtocol true } } mavenCentral() diff --git a/dd-smoke-tests/play-2.5/build.gradle b/dd-smoke-tests/play-2.5/build.gradle index b52f8c1f042..6c992889cd0 100644 --- a/dd-smoke-tests/play-2.5/build.gradle +++ b/dd-smoke-tests/play-2.5/build.gradle @@ -36,6 +36,7 @@ repositories { if (project.rootProject.hasProperty("mavenRepositoryProxy")) { maven { url project.rootProject.property("mavenRepositoryProxy") + allowInsecureProtocol true } } mavenCentral() diff --git a/dd-smoke-tests/play-2.6/build.gradle b/dd-smoke-tests/play-2.6/build.gradle index cf4605af069..2c50adfcb96 100644 --- a/dd-smoke-tests/play-2.6/build.gradle +++ b/dd-smoke-tests/play-2.6/build.gradle @@ -36,6 +36,7 @@ repositories { if (project.rootProject.hasProperty("mavenRepositoryProxy")) { maven { url project.rootProject.property("mavenRepositoryProxy") + allowInsecureProtocol true } } mavenCentral() diff --git a/dd-smoke-tests/play-2.7/build.gradle b/dd-smoke-tests/play-2.7/build.gradle index 64340d29733..ac64c0f6f01 100644 --- a/dd-smoke-tests/play-2.7/build.gradle +++ b/dd-smoke-tests/play-2.7/build.gradle @@ -36,6 +36,7 @@ repositories { if (project.rootProject.hasProperty("mavenRepositoryProxy")) { maven { url project.rootProject.property("mavenRepositoryProxy") + allowInsecureProtocol true } } mavenCentral() diff --git a/dd-smoke-tests/play-2.8-otel/build.gradle b/dd-smoke-tests/play-2.8-otel/build.gradle index 6160d167a7a..44421b6e91f 100644 --- a/dd-smoke-tests/play-2.8-otel/build.gradle +++ b/dd-smoke-tests/play-2.8-otel/build.gradle @@ -31,6 +31,7 @@ repositories { if (project.rootProject.hasProperty("mavenRepositoryProxy")) { maven { url project.rootProject.property("mavenRepositoryProxy") + allowInsecureProtocol true } } mavenCentral() diff --git a/dd-smoke-tests/play-2.8-split-routes/build.gradle b/dd-smoke-tests/play-2.8-split-routes/build.gradle index df4b4dae14e..3c540f3f436 100644 --- a/dd-smoke-tests/play-2.8-split-routes/build.gradle +++ b/dd-smoke-tests/play-2.8-split-routes/build.gradle @@ -31,6 +31,7 @@ repositories { if (project.rootProject.hasProperty("mavenRepositoryProxy")) { maven { url project.rootProject.property("mavenRepositoryProxy") + allowInsecureProtocol true } } mavenCentral() diff --git a/dd-smoke-tests/play-2.8/build.gradle b/dd-smoke-tests/play-2.8/build.gradle index 09dd2a31002..acea44b532a 100644 --- a/dd-smoke-tests/play-2.8/build.gradle +++ b/dd-smoke-tests/play-2.8/build.gradle @@ -31,6 +31,7 @@ repositories { if (project.rootProject.hasProperty("mavenRepositoryProxy")) { maven { url project.rootProject.property("mavenRepositoryProxy") + allowInsecureProtocol true } } mavenCentral() diff --git a/dd-smoke-tests/quarkus-native/application/settings.gradle b/dd-smoke-tests/quarkus-native/application/settings.gradle index 54d8314939a..ff115e0cadf 100644 --- a/dd-smoke-tests/quarkus-native/application/settings.gradle +++ b/dd-smoke-tests/quarkus-native/application/settings.gradle @@ -4,6 +4,7 @@ pluginManagement { if (settings.hasProperty("mavenRepositoryProxy")) { maven { url settings["mavenRepositoryProxy"] + allowInsecureProtocol true } } mavenCentral() diff --git a/dd-smoke-tests/quarkus/application/settings.gradle b/dd-smoke-tests/quarkus/application/settings.gradle index a61d3c14925..bc6492d81fd 100644 --- a/dd-smoke-tests/quarkus/application/settings.gradle +++ b/dd-smoke-tests/quarkus/application/settings.gradle @@ -4,6 +4,7 @@ pluginManagement { if (settings.hasProperty("mavenRepositoryProxy")) { maven { url settings["mavenRepositoryProxy"] + allowInsecureProtocol true } } mavenCentral() diff --git a/dd-smoke-tests/spring-boot-2.7-webflux/application/settings.gradle b/dd-smoke-tests/spring-boot-2.7-webflux/application/settings.gradle index 0cd51889884..ded06a39f3e 100644 --- a/dd-smoke-tests/spring-boot-2.7-webflux/application/settings.gradle +++ b/dd-smoke-tests/spring-boot-2.7-webflux/application/settings.gradle @@ -4,6 +4,7 @@ pluginManagement { if (settings.hasProperty("mavenRepositoryProxy")) { maven { url settings["mavenRepositoryProxy"] + allowInsecureProtocol true } } mavenCentral() diff --git a/dd-smoke-tests/spring-boot-3.0-native/application/settings.gradle b/dd-smoke-tests/spring-boot-3.0-native/application/settings.gradle index ba75f4a860a..ba2988ac170 100644 --- a/dd-smoke-tests/spring-boot-3.0-native/application/settings.gradle +++ b/dd-smoke-tests/spring-boot-3.0-native/application/settings.gradle @@ -4,6 +4,7 @@ pluginManagement { if (settings.hasProperty("mavenRepositoryProxy")) { maven { url settings["mavenRepositoryProxy"] + allowInsecureProtocol true } } mavenCentral() diff --git a/dd-smoke-tests/spring-boot-3.0-webflux/application/settings.gradle b/dd-smoke-tests/spring-boot-3.0-webflux/application/settings.gradle index 693f570cf88..1fe3ceca980 100644 --- a/dd-smoke-tests/spring-boot-3.0-webflux/application/settings.gradle +++ b/dd-smoke-tests/spring-boot-3.0-webflux/application/settings.gradle @@ -4,6 +4,7 @@ pluginManagement { if (settings.hasProperty("mavenRepositoryProxy")) { maven { url settings["mavenRepositoryProxy"] + allowInsecureProtocol true } } mavenCentral() diff --git a/dd-smoke-tests/spring-boot-3.0-webmvc/application/settings.gradle b/dd-smoke-tests/spring-boot-3.0-webmvc/application/settings.gradle index 7abd3b491e2..ed95d363660 100644 --- a/dd-smoke-tests/spring-boot-3.0-webmvc/application/settings.gradle +++ b/dd-smoke-tests/spring-boot-3.0-webmvc/application/settings.gradle @@ -4,6 +4,7 @@ pluginManagement { if (settings.hasProperty("mavenRepositoryProxy")) { maven { url settings["mavenRepositoryProxy"] + allowInsecureProtocol true } } mavenCentral() diff --git a/dd-smoke-tests/spring-boot-3.3-webmvc/application/settings.gradle b/dd-smoke-tests/spring-boot-3.3-webmvc/application/settings.gradle index f0fab663f1d..b66984ca2e6 100644 --- a/dd-smoke-tests/spring-boot-3.3-webmvc/application/settings.gradle +++ b/dd-smoke-tests/spring-boot-3.3-webmvc/application/settings.gradle @@ -4,6 +4,7 @@ pluginManagement { if (settings.hasProperty("mavenRepositoryProxy")) { maven { url settings["mavenRepositoryProxy"] + allowInsecureProtocol true } } mavenCentral() diff --git a/dd-smoke-tests/wildfly/spring-ear/settings.gradle b/dd-smoke-tests/wildfly/spring-ear/settings.gradle index 05402fe766b..754fdd42b8e 100644 --- a/dd-smoke-tests/wildfly/spring-ear/settings.gradle +++ b/dd-smoke-tests/wildfly/spring-ear/settings.gradle @@ -4,6 +4,7 @@ pluginManagement { if (settings.hasProperty("mavenRepositoryProxy")) { maven { url settings["mavenRepositoryProxy"] + allowInsecureProtocol true } } mavenCentral() diff --git a/dd-smoke-tests/wildfly/spring-ear/war/build.gradle b/dd-smoke-tests/wildfly/spring-ear/war/build.gradle index 2c32e3b8d86..b5600006fa0 100644 --- a/dd-smoke-tests/wildfly/spring-ear/war/build.gradle +++ b/dd-smoke-tests/wildfly/spring-ear/war/build.gradle @@ -6,6 +6,7 @@ repositories { if (project.rootProject.hasProperty("mavenRepositoryProxy")) { maven { url project.rootProject.property("mavenRepositoryProxy") + allowInsecureProtocol true } } mavenCentral() diff --git a/gradle/repositories.gradle b/gradle/repositories.gradle index b35471e4041..e7a68c92e11 100644 --- a/gradle/repositories.gradle +++ b/gradle/repositories.gradle @@ -3,6 +3,7 @@ repositories { if (project.rootProject.hasProperty("mavenRepositoryProxy")) { maven { url project.rootProject.property("mavenRepositoryProxy") + allowInsecureProtocol true } } mavenCentral() diff --git a/settings.gradle b/settings.gradle index d277a597e9f..86ba5e9789d 100644 --- a/settings.gradle +++ b/settings.gradle @@ -4,6 +4,7 @@ pluginManagement { if (settings.hasProperty("mavenRepositoryProxy")) { maven { url settings["mavenRepositoryProxy"] + allowInsecureProtocol true } } mavenCentral() From e2257572e79ceff5eafbf883f7f1ec4e50e048ca Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Wed, 19 Mar 2025 18:56:34 -0400 Subject: [PATCH 024/113] include apache with confluence --- gradle/repositories.gradle | 1 + 1 file changed, 1 insertion(+) diff --git a/gradle/repositories.gradle b/gradle/repositories.gradle index e7a68c92e11..cdcbb792a2d 100644 --- a/gradle/repositories.gradle +++ b/gradle/repositories.gradle @@ -28,6 +28,7 @@ repositories { url 'https://packages.confluent.io/maven/' content { includeGroupAndSubgroups "io.confluent" + includeGroupAndSubgroups "org.apache.kafka" } } maven { From 7019854bb7be9a947e2bc03173dc4bec4ac67f0f Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Thu, 20 Mar 2025 12:00:14 -0400 Subject: [PATCH 025/113] remove intentional fail --- .gitlab-ci.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 4a17a6a9c5d..dba274ae161 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -345,7 +345,6 @@ muzzle-dep-report: export PROFILER_COMMAND="-XX:StartFlightRecording=settings=profile,filename=/tmp/${CI_JOB_NAME_SLUG}.jfr,dumponexit=true"; fi - *prepare_test_env - - exit 1 - export GRADLE_OPTS="-Dorg.gradle.jvmargs='-Xms$GRADLE_MEM -Xmx$GRADLE_MEM $PROFILER_COMMAND -XX:ErrorFile=/tmp/hs_err_pid%p.log -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp' -Ddatadog.forkedMaxHeapSize=768M -Ddatadog.forkedMinHeapSize=128M" - ./gradlew $GRADLE_TARGET $GRADLE_PARAMS -PtestJvm=$testJvm -PtaskPartitionCount=$NORMALIZED_NODE_TOTAL -PtaskPartition=$NORMALIZED_NODE_INDEX $GRADLE_ARGS --continue || $CONTINUE_ON_FAILURE after_script: From 08a85b0e812462a27e90fa862c5cbd6dc2548aa5 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Thu, 20 Mar 2025 19:35:34 -0400 Subject: [PATCH 026/113] use gradlePluginCache, mavenCentral after pluginportal --- .gitlab-ci.yml | 5 +++-- dd-java-agent/agent-ci-visibility/build.gradle | 7 +++++++ dd-java-agent/benchmark-integration/build.gradle | 8 +++++++- dd-java-agent/instrumentation/build.gradle | 7 +++++++ dd-java-agent/instrumentation/datanucleus-4/build.gradle | 1 - dd-smoke-tests/armeria-grpc/application/settings.gradle | 8 +++++++- dd-smoke-tests/kafka-3/application/settings.gradle | 8 +++++++- dd-smoke-tests/quarkus-native/application/settings.gradle | 8 +++++++- dd-smoke-tests/quarkus/application/settings.gradle | 8 +++++++- .../spring-boot-2.7-webflux/application/settings.gradle | 8 +++++++- .../spring-boot-3.0-native/application/settings.gradle | 8 +++++++- .../spring-boot-3.0-webflux/application/settings.gradle | 8 +++++++- .../spring-boot-3.0-webmvc/application/settings.gradle | 8 +++++++- .../spring-boot-3.3-webmvc/application/settings.gradle | 8 +++++++- dd-smoke-tests/wildfly/spring-ear/settings.gradle | 8 +++++++- settings.gradle | 8 +++++++- 16 files changed, 101 insertions(+), 15 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index ca983dd28bc..b85d3205718 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -23,7 +23,8 @@ variables: DEPENDENCY_CACHE_POLICY: pull BUILD_CACHE_POLICY: pull GRADLE_VERSION: "8.4" # must match gradle-wrapper.properties - MAVEN_REPOSITORY_PROXY: "http://artifactual.artifactual.all-clusters.local-dc.fabric.dog:8081/repository/global-maven-proxy/" + MAVEN_REPOSITORY_PROXY: "http://artifactual.artifactual.all-clusters.local-dc.fabric.dog:8081/repository/maven-central/" + GRADLE_PLUGIN_PROXY: "http://artifactual.artifactual.all-clusters.local-dc.fabric.dog:8081/repository/gradle-plugin-portal-proxy/" JAVA_BUILD_IMAGE_VERSION: "v25.01" REPO_NOTIFICATION_CHANNEL: "#apm-java-escalations" PROFILE_TESTS: @@ -75,7 +76,7 @@ default: policy: $BUILD_CACHE_POLICY before_script: - export GRADLE_USER_HOME=`pwd`/.gradle - - export GRADLE_ARGS=" --build-cache --stacktrace --no-daemon --parallel --max-workers=$GRADLE_WORKERS -PmavenRepositoryProxy=$MAVEN_REPOSITORY_PROXY" + - export GRADLE_ARGS=" --build-cache --stacktrace --no-daemon --parallel --max-workers=$GRADLE_WORKERS -PmavenRepositoryProxy=$MAVEN_REPOSITORY_PROXY -PgradlePluginProxy=$GRADLE_PLUGIN_PROXY" - *normalize_node_index # for weird reasons, gradle will always "chmod 700" the .gradle folder # with Gitlab caching, .gradle is always owned by root and thus gradle's chmod invocation fails diff --git a/dd-java-agent/agent-ci-visibility/build.gradle b/dd-java-agent/agent-ci-visibility/build.gradle index 84b6ac370f0..44b0f25d875 100644 --- a/dd-java-agent/agent-ci-visibility/build.gradle +++ b/dd-java-agent/agent-ci-visibility/build.gradle @@ -1,12 +1,19 @@ buildscript { repositories { mavenLocal() + if (project.rootProject.hasProperty("gradlePluginProxy")) { + maven { + url project.rootProject.property("gradlePluginProxy") + allowInsecureProtocol true + } + } if (project.rootProject.hasProperty("mavenRepositoryProxy")) { maven { url project.rootProject.property("mavenRepositoryProxy") allowInsecureProtocol true } } + gradlePluginPortal() mavenCentral() } diff --git a/dd-java-agent/benchmark-integration/build.gradle b/dd-java-agent/benchmark-integration/build.gradle index 342d57a916b..bd1262e17fe 100644 --- a/dd-java-agent/benchmark-integration/build.gradle +++ b/dd-java-agent/benchmark-integration/build.gradle @@ -1,14 +1,20 @@ buildscript { repositories { mavenLocal() + if (project.rootProject.hasProperty("gradlePluginProxy")) { + maven { + url project.rootProject.property("gradlePluginProxy") + allowInsecureProtocol true + } + } if (project.rootProject.hasProperty("mavenRepositoryProxy")) { maven { url project.rootProject.property("mavenRepositoryProxy") allowInsecureProtocol true } } - mavenCentral() gradlePluginPortal() + mavenCentral() } dependencies { classpath 'com.github.jengelman.gradle.plugins:shadow:2.0.1' diff --git a/dd-java-agent/instrumentation/build.gradle b/dd-java-agent/instrumentation/build.gradle index 5e04ed9d098..0cc4e692e3f 100644 --- a/dd-java-agent/instrumentation/build.gradle +++ b/dd-java-agent/instrumentation/build.gradle @@ -3,12 +3,19 @@ buildscript { repositories { mavenLocal() + if (project.rootProject.hasProperty("gradlePluginProxy")) { + maven { + url project.rootProject.property("gradlePluginProxy") + allowInsecureProtocol true + } + } if (project.rootProject.hasProperty("mavenRepositoryProxy")) { maven { url project.rootProject.property("mavenRepositoryProxy") allowInsecureProtocol true } } + gradlePluginPortal() mavenCentral() } diff --git a/dd-java-agent/instrumentation/datanucleus-4/build.gradle b/dd-java-agent/instrumentation/datanucleus-4/build.gradle index f6d215fce93..f550936c4b1 100644 --- a/dd-java-agent/instrumentation/datanucleus-4/build.gradle +++ b/dd-java-agent/instrumentation/datanucleus-4/build.gradle @@ -10,7 +10,6 @@ buildscript { } } mavenCentral() - gradlePluginPortal() } dependencies { diff --git a/dd-smoke-tests/armeria-grpc/application/settings.gradle b/dd-smoke-tests/armeria-grpc/application/settings.gradle index 0bad037caf9..61b0c5ba72e 100644 --- a/dd-smoke-tests/armeria-grpc/application/settings.gradle +++ b/dd-smoke-tests/armeria-grpc/application/settings.gradle @@ -1,14 +1,20 @@ pluginManagement { repositories { mavenLocal() + if (settings.hasProperty("gradlePluginProxy")) { + maven { + url settings["gradlePluginProxy"] + allowInsecureProtocol true + } + } if (settings.hasProperty("mavenRepositoryProxy")) { maven { url settings["mavenRepositoryProxy"] allowInsecureProtocol true } } - mavenCentral() gradlePluginPortal() + mavenCentral() } } diff --git a/dd-smoke-tests/kafka-3/application/settings.gradle b/dd-smoke-tests/kafka-3/application/settings.gradle index b6478078496..72681ccb63f 100644 --- a/dd-smoke-tests/kafka-3/application/settings.gradle +++ b/dd-smoke-tests/kafka-3/application/settings.gradle @@ -1,14 +1,20 @@ pluginManagement { repositories { mavenLocal() + if (settings.hasProperty("gradlePluginProxy")) { + maven { + url settings["gradlePluginProxy"] + allowInsecureProtocol true + } + } if (settings.hasProperty("mavenRepositoryProxy")) { maven { url settings["mavenRepositoryProxy"] allowInsecureProtocol true } } - mavenCentral() gradlePluginPortal() + mavenCentral() } } diff --git a/dd-smoke-tests/quarkus-native/application/settings.gradle b/dd-smoke-tests/quarkus-native/application/settings.gradle index ff115e0cadf..889da068c92 100644 --- a/dd-smoke-tests/quarkus-native/application/settings.gradle +++ b/dd-smoke-tests/quarkus-native/application/settings.gradle @@ -1,14 +1,20 @@ pluginManagement { repositories { mavenLocal() + if (settings.hasProperty("gradlePluginProxy")) { + maven { + url settings["gradlePluginProxy"] + allowInsecureProtocol true + } + } if (settings.hasProperty("mavenRepositoryProxy")) { maven { url settings["mavenRepositoryProxy"] allowInsecureProtocol true } } - mavenCentral() gradlePluginPortal() + mavenCentral() } plugins { id 'io.quarkus' version "${quarkusPluginVersion}" diff --git a/dd-smoke-tests/quarkus/application/settings.gradle b/dd-smoke-tests/quarkus/application/settings.gradle index bc6492d81fd..73ecd2bfcc8 100644 --- a/dd-smoke-tests/quarkus/application/settings.gradle +++ b/dd-smoke-tests/quarkus/application/settings.gradle @@ -1,14 +1,20 @@ pluginManagement { repositories { mavenLocal() + if (settings.hasProperty("gradlePluginProxy")) { + maven { + url settings["gradlePluginProxy"] + allowInsecureProtocol true + } + } if (settings.hasProperty("mavenRepositoryProxy")) { maven { url settings["mavenRepositoryProxy"] allowInsecureProtocol true } } - mavenCentral() gradlePluginPortal() + mavenCentral() } plugins { id 'io.quarkus' version "${quarkusPluginVersion}" diff --git a/dd-smoke-tests/spring-boot-2.7-webflux/application/settings.gradle b/dd-smoke-tests/spring-boot-2.7-webflux/application/settings.gradle index ded06a39f3e..d6db824f713 100644 --- a/dd-smoke-tests/spring-boot-2.7-webflux/application/settings.gradle +++ b/dd-smoke-tests/spring-boot-2.7-webflux/application/settings.gradle @@ -1,14 +1,20 @@ pluginManagement { repositories { mavenLocal() + if (settings.hasProperty("gradlePluginProxy")) { + maven { + url settings["gradlePluginProxy"] + allowInsecureProtocol true + } + } if (settings.hasProperty("mavenRepositoryProxy")) { maven { url settings["mavenRepositoryProxy"] allowInsecureProtocol true } } - mavenCentral() gradlePluginPortal() + mavenCentral() } } diff --git a/dd-smoke-tests/spring-boot-3.0-native/application/settings.gradle b/dd-smoke-tests/spring-boot-3.0-native/application/settings.gradle index ba2988ac170..5bdbdea2fd8 100644 --- a/dd-smoke-tests/spring-boot-3.0-native/application/settings.gradle +++ b/dd-smoke-tests/spring-boot-3.0-native/application/settings.gradle @@ -1,14 +1,20 @@ pluginManagement { repositories { mavenLocal() + if (settings.hasProperty("gradlePluginProxy")) { + maven { + url settings["gradlePluginProxy"] + allowInsecureProtocol true + } + } if (settings.hasProperty("mavenRepositoryProxy")) { maven { url settings["mavenRepositoryProxy"] allowInsecureProtocol true } } - mavenCentral() gradlePluginPortal() + mavenCentral() } } diff --git a/dd-smoke-tests/spring-boot-3.0-webflux/application/settings.gradle b/dd-smoke-tests/spring-boot-3.0-webflux/application/settings.gradle index 1fe3ceca980..2ddbb5ba1c2 100644 --- a/dd-smoke-tests/spring-boot-3.0-webflux/application/settings.gradle +++ b/dd-smoke-tests/spring-boot-3.0-webflux/application/settings.gradle @@ -1,14 +1,20 @@ pluginManagement { repositories { mavenLocal() + if (settings.hasProperty("gradlePluginProxy")) { + maven { + url settings["gradlePluginProxy"] + allowInsecureProtocol true + } + } if (settings.hasProperty("mavenRepositoryProxy")) { maven { url settings["mavenRepositoryProxy"] allowInsecureProtocol true } } - mavenCentral() gradlePluginPortal() + mavenCentral() } } diff --git a/dd-smoke-tests/spring-boot-3.0-webmvc/application/settings.gradle b/dd-smoke-tests/spring-boot-3.0-webmvc/application/settings.gradle index ed95d363660..bce01bccc03 100644 --- a/dd-smoke-tests/spring-boot-3.0-webmvc/application/settings.gradle +++ b/dd-smoke-tests/spring-boot-3.0-webmvc/application/settings.gradle @@ -1,14 +1,20 @@ pluginManagement { repositories { mavenLocal() + if (settings.hasProperty("gradlePluginProxy")) { + maven { + url settings["gradlePluginProxy"] + allowInsecureProtocol true + } + } if (settings.hasProperty("mavenRepositoryProxy")) { maven { url settings["mavenRepositoryProxy"] allowInsecureProtocol true } } - mavenCentral() gradlePluginPortal() + mavenCentral() } } diff --git a/dd-smoke-tests/spring-boot-3.3-webmvc/application/settings.gradle b/dd-smoke-tests/spring-boot-3.3-webmvc/application/settings.gradle index b66984ca2e6..81cf6e29659 100644 --- a/dd-smoke-tests/spring-boot-3.3-webmvc/application/settings.gradle +++ b/dd-smoke-tests/spring-boot-3.3-webmvc/application/settings.gradle @@ -1,14 +1,20 @@ pluginManagement { repositories { mavenLocal() + if (settings.hasProperty("gradlePluginProxy")) { + maven { + url settings["gradlePluginProxy"] + allowInsecureProtocol true + } + } if (settings.hasProperty("mavenRepositoryProxy")) { maven { url settings["mavenRepositoryProxy"] allowInsecureProtocol true } } - mavenCentral() gradlePluginPortal() + mavenCentral() } } diff --git a/dd-smoke-tests/wildfly/spring-ear/settings.gradle b/dd-smoke-tests/wildfly/spring-ear/settings.gradle index 754fdd42b8e..fb85720eee8 100644 --- a/dd-smoke-tests/wildfly/spring-ear/settings.gradle +++ b/dd-smoke-tests/wildfly/spring-ear/settings.gradle @@ -1,14 +1,20 @@ pluginManagement { repositories { mavenLocal() + if (settings.hasProperty("gradlePluginProxy")) { + maven { + url settings["gradlePluginProxy"] + allowInsecureProtocol true + } + } if (settings.hasProperty("mavenRepositoryProxy")) { maven { url settings["mavenRepositoryProxy"] allowInsecureProtocol true } } - mavenCentral() gradlePluginPortal() + mavenCentral() } } diff --git a/settings.gradle b/settings.gradle index 86ba5e9789d..73d11f28c94 100644 --- a/settings.gradle +++ b/settings.gradle @@ -1,14 +1,20 @@ pluginManagement { repositories { mavenLocal() + if (settings.hasProperty("gradlePluginProxy")) { + maven { + url settings["gradlePluginProxy"] + allowInsecureProtocol true + } + } if (settings.hasProperty("mavenRepositoryProxy")) { maven { url settings["mavenRepositoryProxy"] allowInsecureProtocol true } } - mavenCentral() gradlePluginPortal() + mavenCentral() } } From 525031a7f512d07d0ca672de5cc6b32b7d2fb08f Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Thu, 20 Mar 2025 19:59:27 -0400 Subject: [PATCH 027/113] proxy mule as well --- .../mule-4/application/pom.xml | 22 +++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/dd-java-agent/instrumentation/mule-4/application/pom.xml b/dd-java-agent/instrumentation/mule-4/application/pom.xml index 39f32ab235d..192b37fbf84 100644 --- a/dd-java-agent/instrumentation/mule-4/application/pom.xml +++ b/dd-java-agent/instrumentation/mule-4/application/pom.xml @@ -99,4 +99,26 @@ + + + maven-proxy-profile + + + env.MAVEN_REPOSITORY_PROXY + + + + + maven-proxy-repo + ${env.MAVEN_REPOSITORY_PROXY} + + + + + maven-plugin-proxy + ${env.MAVEN_REPOSITORY_PROXY} + + + + From 7847b01eae70c6eb5fae2050385236a956f75e8a Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Mon, 31 Mar 2025 16:42:07 -0400 Subject: [PATCH 028/113] change BUILD_CACHE_TYPE to CACHE_TYPE since its used for both --- .gitlab-ci.yml | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index d766349d118..2473cb1f9ab 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -95,16 +95,16 @@ default: KUBERNETES_CPU_REQUEST: 8 KUBERNETES_MEMORY_REQUEST: 8Gi KUBERNETES_MEMORY_LIMIT: 8Gi - BUILD_CACHE_TYPE: lib #default + CACHE_TYPE: lib #default cache: - - key: '$CI_SERVER_VERSION-$BUILD_CACHE_TYPE' # Dependencies cache. Reset the cache every time gitlab is upgraded. ~Every couple months + - key: '$CI_SERVER_VERSION-$CACHE_TYPE' # Dependencies cache. Reset the cache every time gitlab is upgraded. ~Every couple months paths: # Cached dependencies and wrappers for gradle - .gradle/wrapper - .gradle/caches - .gradle/notifications policy: $DEPENDENCY_CACHE_POLICY - - key: $CI_PIPELINE_ID-$BUILD_CACHE_TYPE # Incremental build cache. Shared by all jobs in the pipeline of the same type + - key: $CI_PIPELINE_ID-$CACHE_TYPE # Incremental build cache. Shared by all jobs in the pipeline of the same type paths: - .gradle/caches/$GRADLE_VERSION - .gradle/$GRADLE_VERSION/executionHistory @@ -131,7 +131,7 @@ build: extends: .gradle_build variables: BUILD_CACHE_POLICY: push - BUILD_CACHE_TYPE: lib + CACHE_TYPE: lib DEPENDENCY_CACHE_POLICY: pull script: - if [ $CI_PIPELINE_SOURCE == "schedule" ] ; then ./gradlew resolveAndLockAll --write-locks; fi @@ -154,7 +154,7 @@ build_tests: extends: .gradle_build variables: BUILD_CACHE_POLICY: push - BUILD_CACHE_TYPE: test + CACHE_TYPE: test DEPENDENCY_CACHE_POLICY: pull GRADLE_MEM: 4G GRADLE_WORKERS: 4 @@ -199,7 +199,7 @@ test_published_artifacts: stage: tests needs: [ build ] variables: - BUILD_CACHE_TYPE: lib + CACHE_TYPE: lib script: - mvn_local_repo=$(./mvnw help:evaluate -Dexpression=settings.localRepository -q -DforceStdout) - rm -rf "${mvn_local_repo}/com/datadoghq" @@ -225,7 +225,7 @@ test_published_artifacts: needs: [ build ] stage: tests variables: - BUILD_CACHE_TYPE: lib + CACHE_TYPE: lib script: - ./gradlew $GRADLE_TARGET -PskipTests -PrunBuildSrcTests -PskipSpotless -PtaskPartitionCount=$NORMALIZED_NODE_TOTAL -PtaskPartition=$NORMALIZED_NODE_INDEX $GRADLE_ARGS after_script: @@ -272,7 +272,7 @@ muzzle: stage: tests parallel: 8 variables: - BUILD_CACHE_TYPE: test + CACHE_TYPE: test script: - export SKIP_BUILDSCAN="true" - ./gradlew writeMuzzleTasksToFile $GRADLE_ARGS @@ -295,7 +295,7 @@ muzzle-dep-report: needs: [ build_tests ] stage: tests variables: - BUILD_CACHE_TYPE: test + CACHE_TYPE: test script: - export SKIP_BUILDSCAN="true" - ./gradlew generateMuzzleReport muzzleInstrumentationReport $GRADLE_ARGS @@ -333,7 +333,7 @@ muzzle-dep-report: KUBERNETES_MEMORY_LIMIT: 16Gi GRADLE_WORKERS: 4 GRADLE_MEM: 3G - BUILD_CACHE_TYPE: test + CACHE_TYPE: test GRADLE_PARAMS: "-PskipFlakyTests" CONTINUE_ON_FAILURE: "false" TESTCONTAINERS_CHECKS_DISABLE: "true" @@ -598,7 +598,7 @@ deploy_to_sonatype: stage: publish needs: [ build ] variables: - BUILD_CACHE_TYPE: lib + CACHE_TYPE: lib rules: - if: '$POPULATE_CACHE' when: never From 5085ef90d176c5dc6daa475d76224a036c9167d8 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Tue, 1 Apr 2025 10:59:33 -0400 Subject: [PATCH 029/113] split cache like with circleci. Cache became too big --- .gitlab-ci.yml | 63 +++++++++++++++++++++++++++++++++++--------------- 1 file changed, 44 insertions(+), 19 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 2473cb1f9ab..503a53c0032 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -154,36 +154,50 @@ build_tests: extends: .gradle_build variables: BUILD_CACHE_POLICY: push - CACHE_TYPE: test DEPENDENCY_CACHE_POLICY: pull GRADLE_MEM: 4G GRADLE_WORKERS: 4 KUBERNETES_MEMORY_REQUEST: 18Gi KUBERNETES_MEMORY_LIMIT: 18Gi + parallel: + matrix: + - GRADLE_TARGET: ":baseTest" + CACHE_TYPE: "base" + - GRADLE_TARGET: ":instrumentationTest" + CACHE_TYPE: "inst" + - GRADLE_TARGET: ":instrumentationLatestDepTest" + CACHE_TYPE: "latestdep" + - GRADLE_TARGET: ":smokeTest" + CACHE_TYPE: "smoke" + - GRADLE_TARGET: ":profilingTest" + CACHE_TYPE: "profiling" script: - - ./gradlew clean :baseTest :instrumentationTest :instrumentationLatestDepTest :instrumentationTest :profilingTest -PskipTests $GRADLE_ARGS - -build_and_populate_dep_cache: - extends: build - variables: - BUILD_CACHE_POLICY: push - DEPENDENCY_CACHE_POLICY: push - rules: - - if: '$POPULATE_CACHE' - when: on_success - - when: manual - allow_failure: true + - ./gradlew clean $GRADLE_TARGET -PskipTests $GRADLE_ARGS -build_and_populate_test_dep_cache: +populate_dep_cache: extends: build_tests variables: - BUILD_CACHE_POLICY: push + BUILD_CACHE_POLICY: pull DEPENDENCY_CACHE_POLICY: push rules: - if: '$POPULATE_CACHE' when: on_success - when: manual allow_failure: true + parallel: + matrix: + - GRADLE_TARGET: ":dd-java-agent:shadowJar :dd-trace-api:jar :dd-trace-ot:shadowJar" + CACHE_TYPE: "lib" + - GRADLE_TARGET: ":baseTest" + CACHE_TYPE: "base" + - GRADLE_TARGET: ":instrumentationTest" + CACHE_TYPE: "inst" + - GRADLE_TARGET: ":instrumentationLatestDepTest" + CACHE_TYPE: "latestdep" + - GRADLE_TARGET: ":smokeTest" + CACHE_TYPE: "smoke" + - GRADLE_TARGET: ":profilingTest" + CACHE_TYPE: "profiling" spotless: extends: .gradle_build @@ -272,7 +286,7 @@ muzzle: stage: tests parallel: 8 variables: - CACHE_TYPE: test + CACHE_TYPE: inst script: - export SKIP_BUILDSCAN="true" - ./gradlew writeMuzzleTasksToFile $GRADLE_ARGS @@ -295,7 +309,7 @@ muzzle-dep-report: needs: [ build_tests ] stage: tests variables: - CACHE_TYPE: test + CACHE_TYPE: inst script: - export SKIP_BUILDSCAN="true" - ./gradlew generateMuzzleReport muzzleInstrumentationReport $GRADLE_ARGS @@ -333,7 +347,6 @@ muzzle-dep-report: KUBERNETES_MEMORY_LIMIT: 16Gi GRADLE_WORKERS: 4 GRADLE_MEM: 3G - CACHE_TYPE: test GRADLE_PARAMS: "-PskipFlakyTests" CONTINUE_ON_FAILURE: "false" TESTCONTAINERS_CHECKS_DISABLE: "true" @@ -393,6 +406,7 @@ agent_integration_tests: testJvm: "8" CI_AGENT_HOST: local-agent GRADLE_TARGET: "traceAgentTest" + CACHE_TYPE: "base" services: - name: datadog/agent:7.34.0 alias: local-agent @@ -405,6 +419,7 @@ test_base: extends: .test_job variables: GRADLE_TARGET: ":baseTest" + CACHE_TYPE: "base" parallel: matrix: *test_matrix_4 script: @@ -415,6 +430,7 @@ test_inst: extends: .test_job_with_test_agent variables: GRADLE_TARGET: ":instrumentationTest" + CACHE_TYPE: "inst" parallel: matrix: *test_matrix_12 @@ -422,6 +438,7 @@ test_inst_latest: extends: .test_job_with_test_agent variables: GRADLE_TARGET: ":instrumentationLatestDepTest" + CACHE_TYPE: "latestDep" parallel: matrix: - testJvm: ["8", "17", "21" ] @@ -433,11 +450,12 @@ test_flaky: extends: .test_job_with_test_agent variables: GRADLE_PARAMS: "-PrunFlakyTests" + CACHE_TYPE: "base" testJvm: "8" CONTINUE_ON_FAILURE: "true" parallel: matrix: - - GRADLE_TARGET: [":baseTest", ":instrumentationTest", ":smokeTest", ":debuggerTest"] + - GRADLE_TARGET: [":baseTest", ":smokeTest", ":debuggerTest"] # Gitlab doesn't support "parallel" and "parallel:matrix" at the same time # This emulates "parallel" by including it in the matrix CI_SPLIT: [ "1/4", "2/4", "3/4", "4/4" ] @@ -447,6 +465,7 @@ test_flaky_inst: variables: GRADLE_TARGET: ":instrumentationTest" GRADLE_PARAMS: "-PrunFlakyTests" + CACHE_TYPE: "inst" testJvm: "8" CONTINUE_ON_FAILURE: "true" parallel: @@ -459,6 +478,7 @@ test_profiling: extends: .test_job variables: GRADLE_TARGET: ":profilingTest" + CACHE_TYPE: "profiling" parallel: matrix: *test_matrix @@ -466,6 +486,7 @@ test_debugger: extends: .test_job variables: GRADLE_TARGET: ":debuggerTest" + CACHE_TYPE: "base" parallel: matrix: *test_matrix @@ -474,6 +495,7 @@ test_smoke: variables: GRADLE_TARGET: "stageMainDist :smokeTest" GRADLE_PARAMS: "-PskipFlakyTests" + CACHE_TYPE: "smoke" parallel: matrix: *test_matrix_4 @@ -481,6 +503,7 @@ test_ssi_smoke: extends: .test_job variables: GRADLE_TARGET: "stageMainDist :smokeTest" + CACHE_TYPE: "smoke" DD_INJECT_FORCE: "true" DD_INJECTION_ENABLED: "tracer" parallel: @@ -490,6 +513,7 @@ test_smoke_graalvm: extends: .test_job variables: GRADLE_TARGET: "stageMainDist :dd-smoke-test:spring-boot-3.0-native:test" + CACHE_TYPE: "smoke" parallel: matrix: - testJvm: ["graalvm17", "graalvm21"] @@ -498,6 +522,7 @@ test_smoke_semeru8_debugger: extends: .test_job variables: GRADLE_TARGET: "stageMainDist dd-smoke-tests:debugger-integration-tests:test" + CACHE_TYPE: "smoke" testJvm: "semeru8" required: From 0c400ca16670b5664e16c2f8533851e737f6d88c Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Tue, 1 Apr 2025 12:06:13 -0400 Subject: [PATCH 030/113] Some caches are too big to be saved --- .gitlab-ci.yml | 22 ++++++++++++++-------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 503a53c0032..f96ee5dd136 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -104,6 +104,9 @@ default: - .gradle/caches - .gradle/notifications policy: $DEPENDENCY_CACHE_POLICY + fallback_keys: # Use fallback keys because all cache types are not populated. See note under: populate_dep_cache + - '$CI_SERVER_VERSION-base' + - '$CI_SERVER_VERSION-lib' - key: $CI_PIPELINE_ID-$CACHE_TYPE # Incremental build cache. Shared by all jobs in the pipeline of the same type paths: - .gradle/caches/$GRADLE_VERSION @@ -163,14 +166,15 @@ build_tests: matrix: - GRADLE_TARGET: ":baseTest" CACHE_TYPE: "base" + - GRADLE_TARGET: ":profilingTest" + CACHE_TYPE: "profiling" - GRADLE_TARGET: ":instrumentationTest" CACHE_TYPE: "inst" - GRADLE_TARGET: ":instrumentationLatestDepTest" CACHE_TYPE: "latestdep" - GRADLE_TARGET: ":smokeTest" CACHE_TYPE: "smoke" - - GRADLE_TARGET: ":profilingTest" - CACHE_TYPE: "profiling" + script: - ./gradlew clean $GRADLE_TARGET -PskipTests $GRADLE_ARGS @@ -190,14 +194,16 @@ populate_dep_cache: CACHE_TYPE: "lib" - GRADLE_TARGET: ":baseTest" CACHE_TYPE: "base" - - GRADLE_TARGET: ":instrumentationTest" - CACHE_TYPE: "inst" - - GRADLE_TARGET: ":instrumentationLatestDepTest" - CACHE_TYPE: "latestdep" - - GRADLE_TARGET: ":smokeTest" - CACHE_TYPE: "smoke" - GRADLE_TARGET: ":profilingTest" CACHE_TYPE: "profiling" +# Gitlab doesn't support s3 based caches >5GB. Fixed in Gitlab 17.5 +# See: https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26921#note_2132307223 +# - GRADLE_TARGET: ":instrumentationTest" +# CACHE_TYPE: "inst" +# - GRADLE_TARGET: ":instrumentationLatestDepTest" +# CACHE_TYPE: "latestdep" +# - GRADLE_TARGET: ":smokeTest" +# CACHE_TYPE: "smoke" spotless: extends: .gradle_build From 5a65b3a599804cff755bea2bb21ffe7e5345fc7e Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Tue, 1 Apr 2025 17:03:38 -0400 Subject: [PATCH 031/113] test fixes --- .../aws-java-s3-2.0/src/test/groovy/S3ClientTest.groovy | 2 +- .../src/test/groovy/CouchbaseClient31Test.groovy | 2 +- .../src/test/groovy/CouchbaseClient32Test.groovy | 2 +- .../src/test/groovy/CassandraClientTest.groovy | 2 +- .../src/test/groovy/CassandraClientTest.groovy | 2 +- .../src/test/groovy/CassandraClientTest.groovy | 2 +- .../google-pubsub/src/test/groovy/PubSubTest.groovy | 2 +- .../mongo/src/test/groovy/MongoBaseTest.groovy | 2 +- .../redisson-2.0.0/src/test/groovy/RedissonClientTest.groovy | 4 ++-- .../redisson-2.3.0/src/test/groovy/RedissonClientTest.groovy | 2 +- .../redisson-3.10.3/src/test/groovy/RedissonClientTest.groovy | 2 +- .../src/test/groovy/VertxRedisTestBase.groovy | 2 +- 12 files changed, 13 insertions(+), 13 deletions(-) diff --git a/dd-java-agent/instrumentation/aws-java-s3-2.0/src/test/groovy/S3ClientTest.groovy b/dd-java-agent/instrumentation/aws-java-s3-2.0/src/test/groovy/S3ClientTest.groovy index ee97267660a..83cd3b002b3 100644 --- a/dd-java-agent/instrumentation/aws-java-s3-2.0/src/test/groovy/S3ClientTest.groovy +++ b/dd-java-agent/instrumentation/aws-java-s3-2.0/src/test/groovy/S3ClientTest.groovy @@ -365,7 +365,7 @@ class S3ClientTest extends AgentTestRunner { tag "bucketname", bucketName tag "http.method", "POST" tag "http.status_code", 200 - tag "http.url", { it.startsWith("http://localhost") && it.contains("/$key") } + tag "http.url", { it.startsWith("http://" + LOCALSTACK.getHost()) && it.contains("/$key") } tag "peer.hostname", "localhost" tag "peer.port", { it instanceof Integer } tag "span.kind", "client" diff --git a/dd-java-agent/instrumentation/couchbase/couchbase-3.1/src/test/groovy/CouchbaseClient31Test.groovy b/dd-java-agent/instrumentation/couchbase/couchbase-3.1/src/test/groovy/CouchbaseClient31Test.groovy index 1c76f2a6cba..e37cda84896 100644 --- a/dd-java-agent/instrumentation/couchbase/couchbase-3.1/src/test/groovy/CouchbaseClient31Test.groovy +++ b/dd-java-agent/instrumentation/couchbase/couchbase-3.1/src/test/groovy/CouchbaseClient31Test.groovy @@ -358,7 +358,7 @@ abstract class CouchbaseClient31Test extends VersionedNamingTestBase { it.tag(DDTags.ERROR_TYPE, ex.class.name) it.tag(DDTags.ERROR_STACK, String) } - "$InstrumentationTags.COUCHBASE_SEED_NODES" { it =="localhost" || it == "127.0.0.1" } + "$InstrumentationTags.COUCHBASE_SEED_NODES" { it =="localhost" || it == "127.0.0.1" || it == couchbase.getHost() } if (isLatestDepTest && extraTags != null) { tag('db.system','couchbase') diff --git a/dd-java-agent/instrumentation/couchbase/couchbase-3.2/src/test/groovy/CouchbaseClient32Test.groovy b/dd-java-agent/instrumentation/couchbase/couchbase-3.2/src/test/groovy/CouchbaseClient32Test.groovy index 2a8cd522888..10150d91ef3 100644 --- a/dd-java-agent/instrumentation/couchbase/couchbase-3.2/src/test/groovy/CouchbaseClient32Test.groovy +++ b/dd-java-agent/instrumentation/couchbase/couchbase-3.2/src/test/groovy/CouchbaseClient32Test.groovy @@ -422,7 +422,7 @@ abstract class CouchbaseClient32Test extends VersionedNamingTestBase { "$Tags.SPAN_KIND" Tags.SPAN_KIND_CLIENT "$Tags.DB_TYPE" 'couchbase' 'db.system' 'couchbase' - "$InstrumentationTags.COUCHBASE_SEED_NODES" { it =="localhost" || it == "127.0.0.1" } + "$InstrumentationTags.COUCHBASE_SEED_NODES" { it =="localhost" || it == "127.0.0.1" || it == couchbase.getHost() } if (isErrored) { it.tag(DDTags.ERROR_MSG, { exMessage.length() > 0 && ((String) it).startsWith(exMessage) }) it.tag(DDTags.ERROR_TYPE, ex.class.name) diff --git a/dd-java-agent/instrumentation/datastax-cassandra-3.8/src/test/groovy/CassandraClientTest.groovy b/dd-java-agent/instrumentation/datastax-cassandra-3.8/src/test/groovy/CassandraClientTest.groovy index f97bed3a7b2..d4e0ac21065 100644 --- a/dd-java-agent/instrumentation/datastax-cassandra-3.8/src/test/groovy/CassandraClientTest.groovy +++ b/dd-java-agent/instrumentation/datastax-cassandra-3.8/src/test/groovy/CassandraClientTest.groovy @@ -171,7 +171,7 @@ abstract class CassandraClientTest extends VersionedNamingTestBase { tags { "$Tags.COMPONENT" "java-cassandra" "$Tags.SPAN_KIND" Tags.SPAN_KIND_CLIENT - "$Tags.PEER_HOSTNAME" "localhost" + "$Tags.PEER_HOSTNAME" container.getHost() "$Tags.PEER_HOST_IPV4" "127.0.0.1" "$Tags.PEER_PORT" port "$Tags.DB_TYPE" "cassandra" diff --git a/dd-java-agent/instrumentation/datastax-cassandra-3/src/test/groovy/CassandraClientTest.groovy b/dd-java-agent/instrumentation/datastax-cassandra-3/src/test/groovy/CassandraClientTest.groovy index f97bed3a7b2..d4e0ac21065 100644 --- a/dd-java-agent/instrumentation/datastax-cassandra-3/src/test/groovy/CassandraClientTest.groovy +++ b/dd-java-agent/instrumentation/datastax-cassandra-3/src/test/groovy/CassandraClientTest.groovy @@ -171,7 +171,7 @@ abstract class CassandraClientTest extends VersionedNamingTestBase { tags { "$Tags.COMPONENT" "java-cassandra" "$Tags.SPAN_KIND" Tags.SPAN_KIND_CLIENT - "$Tags.PEER_HOSTNAME" "localhost" + "$Tags.PEER_HOSTNAME" container.getHost() "$Tags.PEER_HOST_IPV4" "127.0.0.1" "$Tags.PEER_PORT" port "$Tags.DB_TYPE" "cassandra" diff --git a/dd-java-agent/instrumentation/datastax-cassandra-4/src/test/groovy/CassandraClientTest.groovy b/dd-java-agent/instrumentation/datastax-cassandra-4/src/test/groovy/CassandraClientTest.groovy index 35eabe01d9e..8f982348710 100644 --- a/dd-java-agent/instrumentation/datastax-cassandra-4/src/test/groovy/CassandraClientTest.groovy +++ b/dd-java-agent/instrumentation/datastax-cassandra-4/src/test/groovy/CassandraClientTest.groovy @@ -247,7 +247,7 @@ abstract class CassandraClientTest extends VersionedNamingTestBase { tags { "$Tags.COMPONENT" "java-cassandra" "$Tags.SPAN_KIND" Tags.SPAN_KIND_CLIENT - "$Tags.PEER_HOSTNAME" "localhost" + "$Tags.PEER_HOSTNAME" container.getHost() "$Tags.PEER_HOST_IPV4" "127.0.0.1" "$Tags.PEER_PORT" port "$Tags.DB_TYPE" "cassandra" diff --git a/dd-java-agent/instrumentation/google-pubsub/src/test/groovy/PubSubTest.groovy b/dd-java-agent/instrumentation/google-pubsub/src/test/groovy/PubSubTest.groovy index fe28d715bc1..cf24e0115bc 100644 --- a/dd-java-agent/instrumentation/google-pubsub/src/test/groovy/PubSubTest.groovy +++ b/dd-java-agent/instrumentation/google-pubsub/src/test/groovy/PubSubTest.groovy @@ -279,7 +279,7 @@ abstract class PubSubTest extends VersionedNamingTestBase { if ({ isDataStreamsEnabled() }) { "$DDTags.PATHWAY_HASH" { String } } - "$Tags.PEER_HOSTNAME" "localhost" + "$Tags.PEER_HOSTNAME" emulator.getHost() "$Tags.PEER_HOST_IPV4" "127.0.0.1" "$Tags.PEER_PORT" { Integer } peerServiceFrom(Tags.RPC_SERVICE) diff --git a/dd-java-agent/instrumentation/mongo/src/test/groovy/MongoBaseTest.groovy b/dd-java-agent/instrumentation/mongo/src/test/groovy/MongoBaseTest.groovy index 2421b63153d..9d6ae857165 100644 --- a/dd-java-agent/instrumentation/mongo/src/test/groovy/MongoBaseTest.groovy +++ b/dd-java-agent/instrumentation/mongo/src/test/groovy/MongoBaseTest.groovy @@ -81,7 +81,7 @@ abstract class MongoBaseTest extends VersionedNamingTestBase { tags { "$Tags.COMPONENT" "java-mongo" "$Tags.SPAN_KIND" Tags.SPAN_KIND_CLIENT - "$Tags.PEER_HOSTNAME" "localhost" + "$Tags.PEER_HOSTNAME" mongoDbContainer.getHost() "$Tags.PEER_PORT" port "$Tags.DB_TYPE" dbType "$Tags.DB_INSTANCE" instance diff --git a/dd-java-agent/instrumentation/redisson/redisson-2.0.0/src/test/groovy/RedissonClientTest.groovy b/dd-java-agent/instrumentation/redisson/redisson-2.0.0/src/test/groovy/RedissonClientTest.groovy index db6dfdcde1e..f112598c75e 100644 --- a/dd-java-agent/instrumentation/redisson/redisson-2.0.0/src/test/groovy/RedissonClientTest.groovy +++ b/dd-java-agent/instrumentation/redisson/redisson-2.0.0/src/test/groovy/RedissonClientTest.groovy @@ -130,7 +130,7 @@ abstract class RedissonClientTest extends VersionedNamingTestBase { "$Tags.COMPONENT" "redis-command" "$Tags.SPAN_KIND" Tags.SPAN_KIND_CLIENT "$Tags.DB_TYPE" "redis" - "$Tags.PEER_HOSTNAME" "localhost" + "$Tags.PEER_HOSTNAME" redisServer.getHost() "$Tags.PEER_HOST_IPV4" "127.0.0.1" "$Tags.PEER_PORT" redisServer.firstMappedPort peerServiceFrom(Tags.PEER_HOSTNAME) @@ -326,7 +326,7 @@ abstract class RedissonClientTest extends VersionedNamingTestBase { "$Tags.COMPONENT" "redis-command" "$Tags.SPAN_KIND" Tags.SPAN_KIND_CLIENT "$Tags.DB_TYPE" "redis" - "$Tags.PEER_HOSTNAME" "localhost" + "$Tags.PEER_HOSTNAME" redisServer.getHost() "$Tags.PEER_HOST_IPV4" "127.0.0.1" "$Tags.PEER_PORT" redisServer.firstMappedPort peerServiceFrom(Tags.PEER_HOSTNAME) diff --git a/dd-java-agent/instrumentation/redisson/redisson-2.3.0/src/test/groovy/RedissonClientTest.groovy b/dd-java-agent/instrumentation/redisson/redisson-2.3.0/src/test/groovy/RedissonClientTest.groovy index d6c71465cd3..18c3d3a1623 100644 --- a/dd-java-agent/instrumentation/redisson/redisson-2.3.0/src/test/groovy/RedissonClientTest.groovy +++ b/dd-java-agent/instrumentation/redisson/redisson-2.3.0/src/test/groovy/RedissonClientTest.groovy @@ -129,7 +129,7 @@ abstract class RedissonClientTest extends VersionedNamingTestBase { "$Tags.COMPONENT" "redis-command" "$Tags.SPAN_KIND" Tags.SPAN_KIND_CLIENT "$Tags.DB_TYPE" "redis" - "$Tags.PEER_HOSTNAME" "localhost" + "$Tags.PEER_HOSTNAME" redisServer.getHost() "$Tags.PEER_HOST_IPV4" "127.0.0.1" "$Tags.PEER_PORT" redisServer.firstMappedPort peerServiceFrom(Tags.PEER_HOSTNAME) diff --git a/dd-java-agent/instrumentation/redisson/redisson-3.10.3/src/test/groovy/RedissonClientTest.groovy b/dd-java-agent/instrumentation/redisson/redisson-3.10.3/src/test/groovy/RedissonClientTest.groovy index 4ed910249cd..fc8fbbf961c 100644 --- a/dd-java-agent/instrumentation/redisson/redisson-3.10.3/src/test/groovy/RedissonClientTest.groovy +++ b/dd-java-agent/instrumentation/redisson/redisson-3.10.3/src/test/groovy/RedissonClientTest.groovy @@ -122,7 +122,7 @@ abstract class RedissonClientTest extends VersionedNamingTestBase { "$Tags.COMPONENT" "redis-command" "$Tags.SPAN_KIND" Tags.SPAN_KIND_CLIENT "$Tags.DB_TYPE" "redis" - "$Tags.PEER_HOSTNAME" "localhost" + "$Tags.PEER_HOSTNAME" redisServer.getHost() "$Tags.PEER_HOST_IPV4" "127.0.0.1" "$Tags.PEER_PORT" redisServer.firstMappedPort peerServiceFrom(Tags.PEER_HOSTNAME) diff --git a/dd-java-agent/instrumentation/vertx-redis-client-3.9/src/test/groovy/VertxRedisTestBase.groovy b/dd-java-agent/instrumentation/vertx-redis-client-3.9/src/test/groovy/VertxRedisTestBase.groovy index 7feed90fd12..97a1c25959f 100644 --- a/dd-java-agent/instrumentation/vertx-redis-client-3.9/src/test/groovy/VertxRedisTestBase.groovy +++ b/dd-java-agent/instrumentation/vertx-redis-client-3.9/src/test/groovy/VertxRedisTestBase.groovy @@ -138,7 +138,7 @@ abstract class VertxRedisTestBase extends VersionedNamingTestBase { "$Tags.DB_TYPE" "redis" // FIXME: in some cases the connection is not extracted. Better to skip this test than mark the whole test as flaky "$Tags.PEER_PORT" { it == null || it == port } - "$Tags.PEER_HOSTNAME" { it == null || it == "127.0.0.1" || it == "localhost" } + "$Tags.PEER_HOSTNAME" { it == null || it == "127.0.0.1" || it == "localhost" || it == redisServer.getHost() } if (tag(Tags.PEER_HOSTNAME) != null) { peerServiceFrom(Tags.PEER_HOSTNAME) defaultTags() From 7adf224798e76d9cccf516275d00b1fb3d0a4e9b Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Tue, 1 Apr 2025 18:20:20 -0400 Subject: [PATCH 032/113] more test fixes --- .../src/test/groovy/S3ClientTest.groovy | 14 +++++++------- .../src/test/groovy/CassandraClientTest.groovy | 4 ++-- .../src/test/groovy/MongoSyncClientTest.groovy | 6 +++--- .../src/test/groovy/MongoAsyncClientTest.groovy | 2 +- .../src/test/groovy/MongoCore37ClientTest.groovy | 6 +++--- .../src/test/groovy/Mongo4ClientTest.groovy | 6 +++--- .../src/test/groovy/MongoReactiveClientTest.groovy | 6 +++--- .../src/test/groovy/RedissonClientTest.groovy | 2 +- .../src/test/groovy/RedissonClientTest.groovy | 2 +- 9 files changed, 24 insertions(+), 24 deletions(-) diff --git a/dd-java-agent/instrumentation/aws-java-s3-2.0/src/test/groovy/S3ClientTest.groovy b/dd-java-agent/instrumentation/aws-java-s3-2.0/src/test/groovy/S3ClientTest.groovy index 83cd3b002b3..5941f78b28d 100644 --- a/dd-java-agent/instrumentation/aws-java-s3-2.0/src/test/groovy/S3ClientTest.groovy +++ b/dd-java-agent/instrumentation/aws-java-s3-2.0/src/test/groovy/S3ClientTest.groovy @@ -276,8 +276,8 @@ class S3ClientTest extends AgentTestRunner { tag "bucketname", bucketName tag "http.method", "POST" tag "http.status_code", 200 - tag "http.url", { it.startsWith("http://localhost") && it.contains("/$key") } - tag "peer.hostname", "localhost" + tag "http.url", { it.startsWith("http://" + LOCALSTACK.getHost()) && it.contains("/$key") } + tag "peer.hostname", LOCALSTACK.getHost() tag "peer.port", { it instanceof Integer } tag "span.kind", "client" tag "aws.requestId", { it != null } @@ -303,8 +303,8 @@ class S3ClientTest extends AgentTestRunner { tag "bucketname", bucketName tag "http.method", "PUT" tag "http.status_code", 200 - tag "http.url", { it.startsWith("http://localhost") && it.contains("/$key") } - tag "peer.hostname", "localhost" + tag "http.url", { it.startsWith("http://" + LOCALSTACK.getHost()) && it.contains("/$key") } + tag "peer.hostname", LOCALSTACK.getHost() tag "peer.port", { it instanceof Integer } tag "span.kind", "client" tag "aws.requestId", { it != null } @@ -330,8 +330,8 @@ class S3ClientTest extends AgentTestRunner { tag "bucketname", bucketName tag "http.method", "PUT" tag "http.status_code", 200 - tag "http.url", { it.startsWith("http://localhost") && it.contains("/$key") } - tag "peer.hostname", "localhost" + tag "http.url", { it.startsWith("http://" + LOCALSTACK.getHost()) && it.contains("/$key") } + tag "peer.hostname", LOCALSTACK.getHost() tag "peer.port", { it instanceof Integer } tag "span.kind", "client" tag "aws.requestId", { it != null } @@ -366,7 +366,7 @@ class S3ClientTest extends AgentTestRunner { tag "http.method", "POST" tag "http.status_code", 200 tag "http.url", { it.startsWith("http://" + LOCALSTACK.getHost()) && it.contains("/$key") } - tag "peer.hostname", "localhost" + tag "peer.hostname", LOCALSTACK.getHost() tag "peer.port", { it instanceof Integer } tag "span.kind", "client" tag "aws.requestId", { it != null } diff --git a/dd-java-agent/instrumentation/datastax-cassandra-4/src/test/groovy/CassandraClientTest.groovy b/dd-java-agent/instrumentation/datastax-cassandra-4/src/test/groovy/CassandraClientTest.groovy index 8f982348710..ba3aadb8773 100644 --- a/dd-java-agent/instrumentation/datastax-cassandra-4/src/test/groovy/CassandraClientTest.groovy +++ b/dd-java-agent/instrumentation/datastax-cassandra-4/src/test/groovy/CassandraClientTest.groovy @@ -47,7 +47,7 @@ abstract class CassandraClientTest extends VersionedNamingTestBase { container = new CassandraContainer("cassandra:4").withStartupTimeout(Duration.ofSeconds(120)) container.start() port = container.getMappedPort(9042) - address = new InetSocketAddress("127.0.0.1", port) + address = new InetSocketAddress(container.getHost(), port) runUnderTrace("setup") { Session session = sessionBuilder().build() @@ -252,7 +252,7 @@ abstract class CassandraClientTest extends VersionedNamingTestBase { "$Tags.PEER_PORT" port "$Tags.DB_TYPE" "cassandra" "$Tags.DB_INSTANCE" keyspace - "$InstrumentationTags.CASSANDRA_CONTACT_POINTS" "127.0.0.1:${port}" + "$InstrumentationTags.CASSANDRA_CONTACT_POINTS" "${container.contactPoint.hostString}:${container.contactPoint.port}" if (throwable != null) { errorTags(throwable) diff --git a/dd-java-agent/instrumentation/mongo/driver-3.10-sync-test/src/test/groovy/MongoSyncClientTest.groovy b/dd-java-agent/instrumentation/mongo/driver-3.10-sync-test/src/test/groovy/MongoSyncClientTest.groovy index 74bb354af88..c5ce4bd7806 100644 --- a/dd-java-agent/instrumentation/mongo/driver-3.10-sync-test/src/test/groovy/MongoSyncClientTest.groovy +++ b/dd-java-agent/instrumentation/mongo/driver-3.10-sync-test/src/test/groovy/MongoSyncClientTest.groovy @@ -20,7 +20,7 @@ abstract class MongoSyncClientTest extends MongoBaseTest { MongoClient client def setup() throws Exception { - client = MongoClients.create("mongodb://localhost:$port/?appname=some-description") + client = MongoClients.create("mongodb://${mongoDbContainer.getHost()}:$port/?appname=some-description") } def cleanup() throws Exception { @@ -52,7 +52,7 @@ abstract class MongoSyncClientTest extends MongoBaseTest { def "test create collection no description"() { setup: String collectionName = randomCollectionName() - MongoDatabase db = MongoClients.create("mongodb://localhost:$port").getDatabase(databaseName) + MongoDatabase db = MongoClients.create("mongodb://${mongoDbContainer.getHost()}:$port").getDatabase(databaseName) when: db.createCollection(collectionName) @@ -205,7 +205,7 @@ abstract class MongoSyncClientTest extends MongoBaseTest { def "test client failure"() { setup: String collectionName = randomCollectionName() - def client = MongoClients.create("mongodb://localhost:$UNUSABLE_PORT/?serverselectiontimeoutms=10") + def client = MongoClients.create("mongodb://${mongoDbContainer.getHost()}:$UNUSABLE_PORT/?serverselectiontimeoutms=10") when: MongoDatabase db = client.getDatabase(databaseName) diff --git a/dd-java-agent/instrumentation/mongo/driver-3.3-async-test/src/test/groovy/MongoAsyncClientTest.groovy b/dd-java-agent/instrumentation/mongo/driver-3.3-async-test/src/test/groovy/MongoAsyncClientTest.groovy index 032365e83b7..ba4af0de88e 100644 --- a/dd-java-agent/instrumentation/mongo/driver-3.3-async-test/src/test/groovy/MongoAsyncClientTest.groovy +++ b/dd-java-agent/instrumentation/mongo/driver-3.3-async-test/src/test/groovy/MongoAsyncClientTest.groovy @@ -56,7 +56,7 @@ abstract class MongoAsyncClientTest extends MongoBaseTest { def "test create collection no description"() { setup: String collectionName = randomCollectionName() - MongoDatabase db = MongoClients.create("mongodb://localhost:$port").getDatabase(databaseName) + MongoDatabase db = MongoClients.create("mongodb://${mongoDbContainer.getHost()}:$port").getDatabase(databaseName) when: db.createCollection(collectionName, toCallback {}) diff --git a/dd-java-agent/instrumentation/mongo/driver-3.7-core-test/src/test/groovy/MongoCore37ClientTest.groovy b/dd-java-agent/instrumentation/mongo/driver-3.7-core-test/src/test/groovy/MongoCore37ClientTest.groovy index 6d424fca2a4..7066defce6e 100644 --- a/dd-java-agent/instrumentation/mongo/driver-3.7-core-test/src/test/groovy/MongoCore37ClientTest.groovy +++ b/dd-java-agent/instrumentation/mongo/driver-3.7-core-test/src/test/groovy/MongoCore37ClientTest.groovy @@ -20,7 +20,7 @@ abstract class MongoCore37ClientTest extends MongoBaseTest { MongoClient client def setup() throws Exception { - client = MongoClients.create("mongodb://localhost:$port/?appname=some-instance") + client = MongoClients.create("mongodb://${mongoDbContainer.getHost()}:$port/?appname=some-instance") } def cleanup() throws Exception { @@ -52,7 +52,7 @@ abstract class MongoCore37ClientTest extends MongoBaseTest { def "test create collection no description"() { setup: String collectionName = randomCollectionName() - MongoDatabase db = MongoClients.create("mongodb://localhost:$port").getDatabase(databaseName) + MongoDatabase db = MongoClients.create("mongodb://${mongoDbContainer.getHost()}:$port").getDatabase(databaseName) when: db.createCollection(collectionName) @@ -205,7 +205,7 @@ abstract class MongoCore37ClientTest extends MongoBaseTest { def "test client failure"() { setup: String collectionName = randomCollectionName() - def client = MongoClients.create("mongodb://localhost:$UNUSABLE_PORT/?serverselectiontimeoutms=10") + def client = MongoClients.create("mongodb://${mongoDbContainer.getHost()}:$UNUSABLE_PORT/?serverselectiontimeoutms=10") when: MongoDatabase db = client.getDatabase(databaseName) diff --git a/dd-java-agent/instrumentation/mongo/driver-4.0/src/test/groovy/Mongo4ClientTest.groovy b/dd-java-agent/instrumentation/mongo/driver-4.0/src/test/groovy/Mongo4ClientTest.groovy index fa4de78c9a9..1757b1b8235 100644 --- a/dd-java-agent/instrumentation/mongo/driver-4.0/src/test/groovy/Mongo4ClientTest.groovy +++ b/dd-java-agent/instrumentation/mongo/driver-4.0/src/test/groovy/Mongo4ClientTest.groovy @@ -22,7 +22,7 @@ abstract class Mongo4ClientTest extends MongoBaseTest { MongoClient client def setup() throws Exception { - client = MongoClients.create("mongodb://localhost:$port/?appname=some-description") + client = MongoClients.create("mongodb://${mongoDbContainer.getHost()}:$port/?appname=some-description") } def cleanup() throws Exception { @@ -64,7 +64,7 @@ abstract class Mongo4ClientTest extends MongoBaseTest { def "test create collection no description"() { setup: String collectionName = randomCollectionName() - MongoDatabase db = MongoClients.create("mongodb://localhost:$port").getDatabase(databaseName) + MongoDatabase db = MongoClients.create("mongodb://${mongoDbContainer.getHost()}:$port").getDatabase(databaseName) when: db.createCollection(collectionName) @@ -217,7 +217,7 @@ abstract class Mongo4ClientTest extends MongoBaseTest { def "test client failure"() { setup: String collectionName = randomCollectionName() - def client = MongoClients.create("mongodb://localhost:$UNUSABLE_PORT/?serverselectiontimeoutms=10") + def client = MongoClients.create("mongodb://${mongoDbContainer.getHost()}:$UNUSABLE_PORT/?serverselectiontimeoutms=10") when: MongoDatabase db = client.getDatabase(databaseName) diff --git a/dd-java-agent/instrumentation/mongo/driver-4.0/src/test/groovy/MongoReactiveClientTest.groovy b/dd-java-agent/instrumentation/mongo/driver-4.0/src/test/groovy/MongoReactiveClientTest.groovy index d47f350c664..6211c2de1b3 100644 --- a/dd-java-agent/instrumentation/mongo/driver-4.0/src/test/groovy/MongoReactiveClientTest.groovy +++ b/dd-java-agent/instrumentation/mongo/driver-4.0/src/test/groovy/MongoReactiveClientTest.groovy @@ -27,7 +27,7 @@ abstract class MongoReactiveClientTest extends MongoBaseTest { MongoClient client def setup() throws Exception { - client = MongoClients.create("mongodb://localhost:$port/?appname=some-description") + client = MongoClients.create("mongodb://${mongoDbContainer.getHost()}:$port/?appname=some-description") } def cleanup() throws Exception { @@ -117,7 +117,7 @@ abstract class MongoReactiveClientTest extends MongoBaseTest { def "test create collection no description"() { setup: String collectionName = randomCollectionName() - MongoDatabase db = MongoClients.create("mongodb://localhost:$port").getDatabase(databaseName) + MongoDatabase db = MongoClients.create("mongodb://${mongoDbContainer.getHost()}:$port").getDatabase(databaseName) when: db.createCollection(collectionName).subscribe(toSubscriber {}) @@ -133,7 +133,7 @@ abstract class MongoReactiveClientTest extends MongoBaseTest { def "test create collection no description with parent"() { setup: String collectionName = randomCollectionName() - MongoDatabase db = MongoClients.create("mongodb://localhost:$port").getDatabase(databaseName) + MongoDatabase db = MongoClients.create("mongodb://${mongoDbContainer.getHost()}:$port").getDatabase(databaseName) when: runUnderTrace("parent") { diff --git a/dd-java-agent/instrumentation/redisson/redisson-2.3.0/src/test/groovy/RedissonClientTest.groovy b/dd-java-agent/instrumentation/redisson/redisson-2.3.0/src/test/groovy/RedissonClientTest.groovy index 18c3d3a1623..80fb3469f9d 100644 --- a/dd-java-agent/instrumentation/redisson/redisson-2.3.0/src/test/groovy/RedissonClientTest.groovy +++ b/dd-java-agent/instrumentation/redisson/redisson-2.3.0/src/test/groovy/RedissonClientTest.groovy @@ -325,7 +325,7 @@ abstract class RedissonClientTest extends VersionedNamingTestBase { "$Tags.COMPONENT" "redis-command" "$Tags.SPAN_KIND" Tags.SPAN_KIND_CLIENT "$Tags.DB_TYPE" "redis" - "$Tags.PEER_HOSTNAME" "localhost" + "$Tags.PEER_HOSTNAME" redisServer.getHost() "$Tags.PEER_HOST_IPV4" "127.0.0.1" "$Tags.PEER_PORT" redisServer.firstMappedPort peerServiceFrom(Tags.PEER_HOSTNAME) diff --git a/dd-java-agent/instrumentation/redisson/redisson-3.10.3/src/test/groovy/RedissonClientTest.groovy b/dd-java-agent/instrumentation/redisson/redisson-3.10.3/src/test/groovy/RedissonClientTest.groovy index fc8fbbf961c..db253dce65a 100644 --- a/dd-java-agent/instrumentation/redisson/redisson-3.10.3/src/test/groovy/RedissonClientTest.groovy +++ b/dd-java-agent/instrumentation/redisson/redisson-3.10.3/src/test/groovy/RedissonClientTest.groovy @@ -318,7 +318,7 @@ abstract class RedissonClientTest extends VersionedNamingTestBase { "$Tags.COMPONENT" "redis-command" "$Tags.SPAN_KIND" Tags.SPAN_KIND_CLIENT "$Tags.DB_TYPE" "redis" - "$Tags.PEER_HOSTNAME" "localhost" + "$Tags.PEER_HOSTNAME" redisServer.getHost() "$Tags.PEER_HOST_IPV4" "127.0.0.1" "$Tags.PEER_PORT" redisServer.firstMappedPort peerServiceFrom(Tags.PEER_HOSTNAME) From c708e92455c357b2766842b699d70ba05c0b56f6 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Tue, 1 Apr 2025 18:28:22 -0400 Subject: [PATCH 033/113] disable some tests for now --- .gitlab-ci.yml | 78 +++++++++++++++++++++++++------------------------- 1 file changed, 39 insertions(+), 39 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index f96ee5dd136..c593c2d5aba 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -440,45 +440,45 @@ test_inst: parallel: matrix: *test_matrix_12 -test_inst_latest: - extends: .test_job_with_test_agent - variables: - GRADLE_TARGET: ":instrumentationLatestDepTest" - CACHE_TYPE: "latestDep" - parallel: - matrix: - - testJvm: ["8", "17", "21" ] - # Gitlab doesn't support "parallel" and "parallel:matrix" at the same time - # This emulates "parallel" by including it in the matrix - CI_SPLIT: [ "1/12", "2/12", "3/12", "4/12", "5/12", "6/12", "7/12", "8/12", "9/12", "10/12", "11/12", "12/12" ] - -test_flaky: - extends: .test_job_with_test_agent - variables: - GRADLE_PARAMS: "-PrunFlakyTests" - CACHE_TYPE: "base" - testJvm: "8" - CONTINUE_ON_FAILURE: "true" - parallel: - matrix: - - GRADLE_TARGET: [":baseTest", ":smokeTest", ":debuggerTest"] - # Gitlab doesn't support "parallel" and "parallel:matrix" at the same time - # This emulates "parallel" by including it in the matrix - CI_SPLIT: [ "1/4", "2/4", "3/4", "4/4" ] - -test_flaky_inst: - extends: .test_job - variables: - GRADLE_TARGET: ":instrumentationTest" - GRADLE_PARAMS: "-PrunFlakyTests" - CACHE_TYPE: "inst" - testJvm: "8" - CONTINUE_ON_FAILURE: "true" - parallel: - matrix: - # Gitlab doesn't support "parallel" and "parallel:matrix" at the same time - # This emulates "parallel" by including it in the matrix - - CI_SPLIT: [ "1/12", "2/12", "3/12", "4/12", "5/12", "6/12", "7/12", "8/12", "9/12", "10/12", "11/12", "12/12" ] +#test_inst_latest: +# extends: .test_job_with_test_agent +# variables: +# GRADLE_TARGET: ":instrumentationLatestDepTest" +# CACHE_TYPE: "latestDep" +# parallel: +# matrix: +# - testJvm: ["8", "17", "21" ] +# # Gitlab doesn't support "parallel" and "parallel:matrix" at the same time +# # This emulates "parallel" by including it in the matrix +# CI_SPLIT: [ "1/12", "2/12", "3/12", "4/12", "5/12", "6/12", "7/12", "8/12", "9/12", "10/12", "11/12", "12/12" ] +# +#test_flaky: +# extends: .test_job_with_test_agent +# variables: +# GRADLE_PARAMS: "-PrunFlakyTests" +# CACHE_TYPE: "base" +# testJvm: "8" +# CONTINUE_ON_FAILURE: "true" +# parallel: +# matrix: +# - GRADLE_TARGET: [":baseTest", ":smokeTest", ":debuggerTest"] +# # Gitlab doesn't support "parallel" and "parallel:matrix" at the same time +# # This emulates "parallel" by including it in the matrix +# CI_SPLIT: [ "1/4", "2/4", "3/4", "4/4" ] +# +#test_flaky_inst: +# extends: .test_job +# variables: +# GRADLE_TARGET: ":instrumentationTest" +# GRADLE_PARAMS: "-PrunFlakyTests" +# CACHE_TYPE: "inst" +# testJvm: "8" +# CONTINUE_ON_FAILURE: "true" +# parallel: +# matrix: +# # Gitlab doesn't support "parallel" and "parallel:matrix" at the same time +# # This emulates "parallel" by including it in the matrix +# - CI_SPLIT: [ "1/12", "2/12", "3/12", "4/12", "5/12", "6/12", "7/12", "8/12", "9/12", "10/12", "11/12", "12/12" ] test_profiling: extends: .test_job From 57b86e901d1a6e8f4124dd4e665477f83f6730ed Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Tue, 1 Apr 2025 20:04:02 -0400 Subject: [PATCH 034/113] more fixes --- .../src/test/groovy/S3ClientTest.groovy | 12 ++++++------ .../src/test/groovy/MongoCore31ClientTest.groovy | 6 +++--- .../src/test/groovy/MongoJava31ClientTest.groovy | 6 +++--- .../src/test/groovy/MongoJava34ClientTest.groovy | 6 +++--- 4 files changed, 15 insertions(+), 15 deletions(-) diff --git a/dd-java-agent/instrumentation/aws-java-s3-2.0/src/test/groovy/S3ClientTest.groovy b/dd-java-agent/instrumentation/aws-java-s3-2.0/src/test/groovy/S3ClientTest.groovy index 5941f78b28d..b852b75c94d 100644 --- a/dd-java-agent/instrumentation/aws-java-s3-2.0/src/test/groovy/S3ClientTest.groovy +++ b/dd-java-agent/instrumentation/aws-java-s3-2.0/src/test/groovy/S3ClientTest.groovy @@ -92,8 +92,8 @@ class S3ClientTest extends AgentTestRunner { tag "bucketname", bucketName tag "http.method", "PUT" tag "http.status_code", 200 - tag "http.url", { it.startsWith("http://localhost") && it.contains("/$key") } - tag "peer.hostname", "localhost" + tag "http.url", { it.startsWith("http://" + LOCALSTACK.getHost()) && it.contains("/$key") } + tag "peer.hostname", LOCALSTACK.getHost() tag "peer.port", { it instanceof Integer } tag "span.kind", "client" tag "aws.requestId", { it != null } @@ -151,8 +151,8 @@ class S3ClientTest extends AgentTestRunner { tag "bucketname", bucketName tag "http.method", "PUT" tag "http.status_code", 200 - tag "http.url", { it.startsWith("http://localhost") && it.contains("/$sourceKey") } - tag "peer.hostname", "localhost" + tag "http.url", { it.startsWith("http://" + LOCALSTACK.getHost()) && it.contains("/$sourceKey") } + tag "peer.hostname", LOCALSTACK.getHost() tag "peer.port", { it instanceof Integer } tag "span.kind", "client" tag "aws.requestId", { it != null } @@ -185,8 +185,8 @@ class S3ClientTest extends AgentTestRunner { tag "bucketname", bucketName tag "http.method", "PUT" tag "http.status_code", 200 - tag "http.url", { it.startsWith("http://localhost") && it.contains("/$destKey") } - tag "peer.hostname", "localhost" + tag "http.url", { it.startsWith("http://" + LOCALSTACK.getHost()) && it.contains("/$destKey") } + tag "peer.hostname", LOCALSTACK.getHost() tag "peer.port", { it instanceof Integer } tag "span.kind", "client" tag "aws.requestId", { it != null } diff --git a/dd-java-agent/instrumentation/mongo/driver-3.1-core-test/src/test/groovy/MongoCore31ClientTest.groovy b/dd-java-agent/instrumentation/mongo/driver-3.1-core-test/src/test/groovy/MongoCore31ClientTest.groovy index df22d41839f..11b8aa8c12b 100644 --- a/dd-java-agent/instrumentation/mongo/driver-3.1-core-test/src/test/groovy/MongoCore31ClientTest.groovy +++ b/dd-java-agent/instrumentation/mongo/driver-3.1-core-test/src/test/groovy/MongoCore31ClientTest.groovy @@ -25,7 +25,7 @@ abstract class MongoCore31ClientTest extends MongoBaseTest { MongoClient client def setup() throws Exception { - client = new MongoClient(new ServerAddress("localhost", port), + client = new MongoClient(new ServerAddress(mongoDbContainer.getHost(), port), MongoClientOptions.builder() .description("some-description") .addCommandListener(new CommandListener() { @@ -70,7 +70,7 @@ abstract class MongoCore31ClientTest extends MongoBaseTest { def "test create collection no description"() { setup: String collectionName = randomCollectionName() - MongoDatabase db = new MongoClient("localhost", port).getDatabase(databaseName) + MongoDatabase db = new MongoClient(mongoDbContainer.getHost(), port).getDatabase(databaseName) when: db.createCollection(collectionName) @@ -218,7 +218,7 @@ abstract class MongoCore31ClientTest extends MongoBaseTest { setup: String collectionName = randomCollectionName() def options = MongoClientOptions.builder().serverSelectionTimeout(10).build() - def client = new MongoClient(new ServerAddress("localhost", UNUSABLE_PORT), [], options) + def client = new MongoClient(new ServerAddress(mongoDbContainer.getHost(), UNUSABLE_PORT), [], options) when: MongoDatabase db = client.getDatabase(databaseName) diff --git a/dd-java-agent/instrumentation/mongo/driver-3.1/src/test/groovy/MongoJava31ClientTest.groovy b/dd-java-agent/instrumentation/mongo/driver-3.1/src/test/groovy/MongoJava31ClientTest.groovy index 4a798cdc1c5..e7eec08b185 100644 --- a/dd-java-agent/instrumentation/mongo/driver-3.1/src/test/groovy/MongoJava31ClientTest.groovy +++ b/dd-java-agent/instrumentation/mongo/driver-3.1/src/test/groovy/MongoJava31ClientTest.groovy @@ -25,7 +25,7 @@ abstract class MongoJava31ClientTest extends MongoBaseTest { MongoClient client def setup() throws Exception { - client = new MongoClient(new ServerAddress("localhost", port), + client = new MongoClient(new ServerAddress(mongoDbContainer.getHost(), port), MongoClientOptions.builder() .description("some-description") .addCommandListener(new CommandListener() { @@ -72,7 +72,7 @@ abstract class MongoJava31ClientTest extends MongoBaseTest { def "test create collection no description"() { setup: String collectionName = randomCollectionName() - MongoDatabase db = new MongoClient("localhost", port).getDatabase(databaseName) + MongoDatabase db = new MongoClient(mongoDbContainer.getHost(), port).getDatabase(databaseName) when: db.createCollection(collectionName) @@ -220,7 +220,7 @@ abstract class MongoJava31ClientTest extends MongoBaseTest { setup: String collectionName = randomCollectionName() def options = MongoClientOptions.builder().serverSelectionTimeout(10).build() - def client = new MongoClient(new ServerAddress("localhost", UNUSABLE_PORT), [], options) + def client = new MongoClient(new ServerAddress(mongoDbContainer.getHost(), UNUSABLE_PORT), [], options) when: MongoDatabase db = client.getDatabase(databaseName) diff --git a/dd-java-agent/instrumentation/mongo/driver-3.4/src/test/groovy/MongoJava34ClientTest.groovy b/dd-java-agent/instrumentation/mongo/driver-3.4/src/test/groovy/MongoJava34ClientTest.groovy index 424748dabb0..d9f1b0f7313 100644 --- a/dd-java-agent/instrumentation/mongo/driver-3.4/src/test/groovy/MongoJava34ClientTest.groovy +++ b/dd-java-agent/instrumentation/mongo/driver-3.4/src/test/groovy/MongoJava34ClientTest.groovy @@ -25,7 +25,7 @@ abstract class MongoJava34ClientTest extends MongoBaseTest { MongoClient client def setup() throws Exception { - client = new MongoClient(new ServerAddress("localhost", port), + client = new MongoClient(new ServerAddress(mongoDbContainer.getHost(), port), MongoClientOptions.builder() .description("some-description") .addCommandListener(new CommandListener() { @@ -70,7 +70,7 @@ abstract class MongoJava34ClientTest extends MongoBaseTest { def "test create collection no description"() { setup: String collectionName = randomCollectionName() - MongoDatabase db = new MongoClient("localhost", port).getDatabase(databaseName) + MongoDatabase db = new MongoClient(mongoDbContainer.getHost(), port).getDatabase(databaseName) when: db.createCollection(collectionName) @@ -218,7 +218,7 @@ abstract class MongoJava34ClientTest extends MongoBaseTest { setup: String collectionName = randomCollectionName() def options = MongoClientOptions.builder().serverSelectionTimeout(10).build() - def client = new MongoClient(new ServerAddress("localhost", UNUSABLE_PORT), [], options) + def client = new MongoClient(new ServerAddress(mongoDbContainer.getHost(), UNUSABLE_PORT), [], options) when: MongoDatabase db = client.getDatabase(databaseName) From b3b2a14dded1ba6458c9b467a6b2b6b3b087d167 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Tue, 1 Apr 2025 21:23:41 -0400 Subject: [PATCH 035/113] Use maven proxy with openliberty smoketests --- .../application/pom.xml | 20 +++++++++++++++++++ .../application/pom.xml | 20 +++++++++++++++++++ 2 files changed, 40 insertions(+) diff --git a/dd-smoke-tests/springboot-openliberty-20/application/pom.xml b/dd-smoke-tests/springboot-openliberty-20/application/pom.xml index d9e341675ae..85b6859c514 100644 --- a/dd-smoke-tests/springboot-openliberty-20/application/pom.xml +++ b/dd-smoke-tests/springboot-openliberty-20/application/pom.xml @@ -107,5 +107,25 @@ ${target.dir} + + maven-proxy-profile + + + env.MAVEN_REPOSITORY_PROXY + + + + + maven-proxy-repo + ${env.MAVEN_REPOSITORY_PROXY} + + + + + maven-plugin-proxy + ${env.MAVEN_REPOSITORY_PROXY} + + + diff --git a/dd-smoke-tests/springboot-openliberty-23/application/pom.xml b/dd-smoke-tests/springboot-openliberty-23/application/pom.xml index a6b66b8083a..5a8fb308d78 100644 --- a/dd-smoke-tests/springboot-openliberty-23/application/pom.xml +++ b/dd-smoke-tests/springboot-openliberty-23/application/pom.xml @@ -107,5 +107,25 @@ ${target.dir} + + maven-proxy-profile + + + env.MAVEN_REPOSITORY_PROXY + + + + + maven-proxy-repo + ${env.MAVEN_REPOSITORY_PROXY} + + + + + maven-plugin-proxy + ${env.MAVEN_REPOSITORY_PROXY} + + + From 1e01c543c17328364558ee412aca5f8c899e2947 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Tue, 1 Apr 2025 23:59:16 -0400 Subject: [PATCH 036/113] missed one more mongodb case --- .../src/test/groovy/MongoAsyncClientTest.groovy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dd-java-agent/instrumentation/mongo/driver-3.3-async-test/src/test/groovy/MongoAsyncClientTest.groovy b/dd-java-agent/instrumentation/mongo/driver-3.3-async-test/src/test/groovy/MongoAsyncClientTest.groovy index ba4af0de88e..c870e9608f9 100644 --- a/dd-java-agent/instrumentation/mongo/driver-3.3-async-test/src/test/groovy/MongoAsyncClientTest.groovy +++ b/dd-java-agent/instrumentation/mongo/driver-3.3-async-test/src/test/groovy/MongoAsyncClientTest.groovy @@ -27,7 +27,7 @@ abstract class MongoAsyncClientTest extends MongoBaseTest { .clusterSettings( ClusterSettings.builder() .description("some-description") - .applyConnectionString(new ConnectionString("mongodb://localhost:$port")) + .applyConnectionString(new ConnectionString("mongodb://${mongoDbContainer.getHost()}:$port")) .build()) .build()) } From de3fc5bacd78163a51ceefb6ad5cb268172f17c0 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Wed, 2 Apr 2025 00:04:42 -0400 Subject: [PATCH 037/113] enable some more of the matrix --- .gitlab-ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index c593c2d5aba..7e033425bd0 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -42,8 +42,8 @@ variables: # - "semeru8" # - "ibm8" # - "zulu11" -# - "11" -# - "17" + - "11" + - "17" # - "semeru17" # Gitlab doesn't support "parallel" and "parallel:matrix" at the same time From 38aa2938e367d0fcb01f3f42640b98569290a01e Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Wed, 2 Apr 2025 12:42:40 -0400 Subject: [PATCH 038/113] Fix lettuce tests --- .../groovy/Lettuce4AsyncClientTest.groovy | 81 +------------- .../test/groovy/Lettuce4ClientTestBase.groovy | 82 ++++++++++++++ .../test/groovy/Lettuce4SyncClientTest.groovy | 76 +------------ .../groovy/Lettuce5AsyncClientTest.groovy | 76 +------------ .../test/groovy/Lettuce5ClientTestBase.groovy | 78 ++++++++++++++ .../groovy/Lettuce5ReactiveClientTest.groovy | 59 +---------- .../test/groovy/Lettuce5SyncClientTest.groovy | 100 ++++++------------ 7 files changed, 203 insertions(+), 349 deletions(-) create mode 100644 dd-java-agent/instrumentation/lettuce-4/src/test/groovy/Lettuce4ClientTestBase.groovy create mode 100644 dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5ClientTestBase.groovy diff --git a/dd-java-agent/instrumentation/lettuce-4/src/test/groovy/Lettuce4AsyncClientTest.groovy b/dd-java-agent/instrumentation/lettuce-4/src/test/groovy/Lettuce4AsyncClientTest.groovy index e3b755f1abc..912db6eac67 100644 --- a/dd-java-agent/instrumentation/lettuce-4/src/test/groovy/Lettuce4AsyncClientTest.groovy +++ b/dd-java-agent/instrumentation/lettuce-4/src/test/groovy/Lettuce4AsyncClientTest.groovy @@ -1,22 +1,15 @@ import static datadog.trace.instrumentation.lettuce4.InstrumentationPoints.AGENT_CRASHING_COMMAND_PREFIX -import com.lambdaworks.redis.ClientOptions import com.lambdaworks.redis.RedisClient import com.lambdaworks.redis.RedisConnectionException import com.lambdaworks.redis.RedisFuture import com.lambdaworks.redis.RedisURI import com.lambdaworks.redis.api.StatefulConnection -import com.lambdaworks.redis.api.async.RedisAsyncCommands -import com.lambdaworks.redis.api.sync.RedisCommands import com.lambdaworks.redis.codec.Utf8StringCodec import com.lambdaworks.redis.protocol.AsyncCommand -import datadog.trace.agent.test.naming.VersionedNamingTestBase -import datadog.trace.agent.test.utils.PortUtils import datadog.trace.api.Config import datadog.trace.api.DDSpanTypes import datadog.trace.bootstrap.instrumentation.api.Tags -import redis.embedded.RedisServer -import spock.lang.Shared import spock.util.concurrent.AsyncConditions import java.util.concurrent.CancellationException @@ -26,79 +19,7 @@ import java.util.function.BiFunction import java.util.function.Consumer import java.util.function.Function -abstract class Lettuce4AsyncClientTest extends VersionedNamingTestBase { - public static final String HOST = "127.0.0.1" - public static final int DB_INDEX = 0 - // Disable autoreconnect so we do not get stray traces popping up on server shutdown - public static final ClientOptions CLIENT_OPTIONS = new ClientOptions.Builder().autoReconnect(false).build() - - @Shared - int port - @Shared - int incorrectPort - @Shared - String dbAddr - @Shared - String dbAddrNonExistent - @Shared - String dbUriNonExistent - @Shared - String embeddedDbUri - - @Shared - RedisServer redisServer - - @Shared - Map testHashMap = [ - firstname: "John", - lastname : "Doe", - age : "53" - ] - - RedisClient redisClient - StatefulConnection connection - RedisAsyncCommands asyncCommands - RedisCommands syncCommands - - def setupSpec() { - port = PortUtils.randomOpenPort() - incorrectPort = PortUtils.randomOpenPort() - dbAddr = HOST + ":" + port + "/" + DB_INDEX - dbAddrNonExistent = HOST + ":" + incorrectPort + "/" + DB_INDEX - dbUriNonExistent = "redis://" + dbAddrNonExistent - embeddedDbUri = "redis://" + dbAddr - - redisServer = RedisServer.builder() - // bind to localhost to avoid firewall popup - .setting("bind " + HOST) - // set max memory to avoid problems in CI - .setting("maxmemory 128M") - .port(port).build() - } - - def setup() { - redisClient = RedisClient.create(embeddedDbUri) - - println "Using redis: $redisServer.args" - redisServer.start() - redisClient.setOptions(CLIENT_OPTIONS) - - connection = redisClient.connect() - asyncCommands = connection.async() - syncCommands = connection.sync() - - syncCommands.set("TESTKEY", "TESTVAL") - - // 1 set + 1 connect trace - TEST_WRITER.waitForTraces(2) - TEST_WRITER.clear() - } - - def cleanup() { - connection.close() - redisServer.stop() - } - +abstract class Lettuce4AsyncClientTest extends Lettuce4ClientTestBase { def "connect using get on ConnectionFuture"() { setup: RedisClient testConnectionClient = RedisClient.create(embeddedDbUri) diff --git a/dd-java-agent/instrumentation/lettuce-4/src/test/groovy/Lettuce4ClientTestBase.groovy b/dd-java-agent/instrumentation/lettuce-4/src/test/groovy/Lettuce4ClientTestBase.groovy new file mode 100644 index 00000000000..1c824f160e2 --- /dev/null +++ b/dd-java-agent/instrumentation/lettuce-4/src/test/groovy/Lettuce4ClientTestBase.groovy @@ -0,0 +1,82 @@ +import com.lambdaworks.redis.ClientOptions +import com.lambdaworks.redis.RedisClient +import com.lambdaworks.redis.api.StatefulConnection +import com.lambdaworks.redis.api.sync.RedisCommands +import datadog.trace.agent.test.naming.VersionedNamingTestBase +import datadog.trace.agent.test.utils.PortUtils +import redis.embedded.RedisServer +import spock.lang.Shared + +import static datadog.trace.agent.test.utils.TraceUtils.runUnderTrace + +abstract class Lettuce4ClientTestBase extends VersionedNamingTestBase { + public static final String HOST = "127.0.0.1" + public static final int DB_INDEX = 0 + // Disable autoreconnect so we do not get stray traces popping up on server shutdown + public static final ClientOptions CLIENT_OPTIONS = new ClientOptions.Builder().autoReconnect(false).build() + + @Shared + int port + @Shared + int incorrectPort + @Shared + String dbAddr + @Shared + String dbAddrNonExistent + @Shared + String dbUriNonExistent + @Shared + String embeddedDbUri + + @Shared + RedisServer redisServer + + @Shared + Map testHashMap = [ + firstname: "John", + lastname : "Doe", + age : "53" + ] + + RedisClient redisClient + StatefulConnection connection + RedisCommands syncCommands + + def setupSpec() { + port = PortUtils.randomOpenPort() + incorrectPort = PortUtils.randomOpenPort() + dbAddr = HOST + ":" + port + "/" + DB_INDEX + dbAddrNonExistent = HOST + ":" + incorrectPort + "/" + DB_INDEX + dbUriNonExistent = "redis://" + dbAddrNonExistent + embeddedDbUri = "redis://" + dbAddr + + redisServer = RedisServer.builder() + // bind to localhost to avoid firewall popup + .setting("bind " + HOST) + // set max memory to avoid problems in CI + .setting("maxmemory 128M") + .port(port).build() + } + + def setup() { + redisServer.start() + + redisClient = RedisClient.create(embeddedDbUri) + redisClient.setOptions(CLIENT_OPTIONS) + + runUnderTrace("setup") { + connection = redisClient.connect() + syncCommands = connection.sync() + + syncCommands.set("TESTKEY", "TESTVAL") + syncCommands.hmset("TESTHM", testHashMap) + } + TEST_WRITER.waitForTraces(1) + TEST_WRITER.clear() + } + + def cleanup() { + connection.close() + redisServer.stop() + } +} diff --git a/dd-java-agent/instrumentation/lettuce-4/src/test/groovy/Lettuce4SyncClientTest.groovy b/dd-java-agent/instrumentation/lettuce-4/src/test/groovy/Lettuce4SyncClientTest.groovy index ac455653b52..1e0bb003906 100644 --- a/dd-java-agent/instrumentation/lettuce-4/src/test/groovy/Lettuce4SyncClientTest.groovy +++ b/dd-java-agent/instrumentation/lettuce-4/src/test/groovy/Lettuce4SyncClientTest.groovy @@ -1,87 +1,13 @@ import static datadog.trace.instrumentation.lettuce4.InstrumentationPoints.AGENT_CRASHING_COMMAND_PREFIX -import com.lambdaworks.redis.ClientOptions import com.lambdaworks.redis.RedisClient import com.lambdaworks.redis.RedisConnectionException import com.lambdaworks.redis.api.StatefulConnection -import com.lambdaworks.redis.api.sync.RedisCommands -import datadog.trace.agent.test.naming.VersionedNamingTestBase -import datadog.trace.agent.test.utils.PortUtils import datadog.trace.api.Config import datadog.trace.api.DDSpanTypes import datadog.trace.bootstrap.instrumentation.api.Tags -import redis.embedded.RedisServer -import spock.lang.Shared - -abstract class Lettuce4SyncClientTest extends VersionedNamingTestBase { - public static final String HOST = "127.0.0.1" - public static final int DB_INDEX = 0 - // Disable autoreconnect so we do not get stray traces popping up on server shutdown - public static final ClientOptions CLIENT_OPTIONS = new ClientOptions.Builder().autoReconnect(false).build() - - @Shared - int port - @Shared - int incorrectPort - @Shared - String dbAddr - @Shared - String dbAddrNonExistent - @Shared - String dbUriNonExistent - @Shared - String embeddedDbUri - - @Shared - RedisServer redisServer - - @Shared - Map testHashMap = [ - firstname: "John", - lastname : "Doe", - age : "53" - ] - - RedisClient redisClient - StatefulConnection connection - RedisCommands syncCommands - - def setupSpec() { - port = PortUtils.randomOpenPort() - incorrectPort = PortUtils.randomOpenPort() - dbAddr = HOST + ":" + port + "/" + DB_INDEX - dbAddrNonExistent = HOST + ":" + incorrectPort + "/" + DB_INDEX - dbUriNonExistent = "redis://" + dbAddrNonExistent - embeddedDbUri = "redis://" + dbAddr - - redisServer = RedisServer.builder() - // bind to localhost to avoid firewall popup - .setting("bind " + HOST) - // set max memory to avoid problems in CI - .setting("maxmemory 128M") - .port(port).build() - } - - def setup() { - redisClient = RedisClient.create(embeddedDbUri) - - redisServer.start() - connection = redisClient.connect() - syncCommands = connection.sync() - - syncCommands.set("TESTKEY", "TESTVAL") - syncCommands.hmset("TESTHM", testHashMap) - - // 2 sets + 1 connect trace - TEST_WRITER.waitForTraces(3) - TEST_WRITER.clear() - } - - def cleanup() { - connection.close() - redisServer.stop() - } +abstract class Lettuce4SyncClientTest extends Lettuce4ClientTestBase { def "connect"() { setup: RedisClient testConnectionClient = RedisClient.create(embeddedDbUri) diff --git a/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5AsyncClientTest.groovy b/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5AsyncClientTest.groovy index 9471514cfed..758d6d20b23 100644 --- a/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5AsyncClientTest.groovy +++ b/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5AsyncClientTest.groovy @@ -29,76 +29,10 @@ import java.util.function.BiFunction import java.util.function.Consumer import java.util.function.Function +import static datadog.trace.agent.test.utils.TraceUtils.runUnderTrace import static datadog.trace.instrumentation.lettuce5.LettuceInstrumentationUtil.AGENT_CRASHING_COMMAND_PREFIX -abstract class Lettuce5AsyncClientTest extends VersionedNamingTestBase { - public static final int DB_INDEX = 0 - // Disable autoreconnect so we do not get stray traces popping up on server shutdown - public static final ClientOptions CLIENT_OPTIONS = ClientOptions.builder().autoReconnect(false).build() - - @Shared - int port - @Shared - int incorrectPort - @Shared - String dbAddr - @Shared - String dbAddrNonExistent - @Shared - String dbUriNonExistent - @Shared - String embeddedDbUri - - @Shared - RedisContainer redisServer = new RedisContainer(DockerImageName.parse("redis:6.2.6")) - .waitingFor(Wait.forListeningPort()) - - @Shared - Map testHashMap = [ - firstname: "John", - lastname : "Doe", - age : "53" - ] - - RedisClient redisClient - StatefulConnection connection - RedisAsyncCommands asyncCommands - RedisCommands syncCommands - - def setupSpec() { - redisServer.start() - println "Using redis: $redisServer.redisURI" - port = redisServer.firstMappedPort - incorrectPort = PortUtils.randomOpenPort() - dbAddr = redisServer.getHost() + ":" + port + "/" + DB_INDEX - dbAddrNonExistent = redisServer.getHost() + ":" + incorrectPort + "/" + DB_INDEX - dbUriNonExistent = "redis://" + dbAddrNonExistent - embeddedDbUri = "redis://" + dbAddr - } - - def setup() { - redisClient = RedisClient.create(embeddedDbUri) - redisClient.setOptions(CLIENT_OPTIONS) - - connection = redisClient.connect() - asyncCommands = connection.async() - syncCommands = connection.sync() - - syncCommands.set("TESTKEY", "TESTVAL") - - // 1 set + 1 connect trace - TEST_WRITER.waitForTraces(2) - TEST_WRITER.clear() - } - - def cleanup() { - connection.close() - } - - def cleanupSpec() { - redisServer.stop() - } - +abstract class Lettuce5AsyncClientTest extends Lettuce5ClientTestBase { def "connect using get on ConnectionFuture"() { setup: RedisClient testConnectionClient = RedisClient.create(embeddedDbUri) @@ -583,7 +517,7 @@ abstract class Lettuce5AsyncClientTest extends VersionedNamingTestBase { } } -class Lettuce5SyncClientV0Test extends Lettuce5AsyncClientTest { +class Lettuce5AsyncClientV0Test extends Lettuce5AsyncClientTest { @Override int version() { @@ -601,7 +535,7 @@ class Lettuce5SyncClientV0Test extends Lettuce5AsyncClientTest { } } -class Lettuce5SyncClientV1ForkedTest extends Lettuce5AsyncClientTest { +class Lettuce5AsyncClientV1ForkedTest extends Lettuce5AsyncClientTest { @Override int version() { @@ -620,7 +554,7 @@ class Lettuce5SyncClientV1ForkedTest extends Lettuce5AsyncClientTest { } -class Lettuce5AsyncProfilingForkedTest extends Lettuce5AsyncClientTest { +class Lettuce5AsyncClientProfilingForkedTest extends Lettuce5AsyncClientTest { @Override protected void configurePreAgent() { diff --git a/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5ClientTestBase.groovy b/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5ClientTestBase.groovy new file mode 100644 index 00000000000..d693b9dada9 --- /dev/null +++ b/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5ClientTestBase.groovy @@ -0,0 +1,78 @@ +import com.redis.testcontainers.RedisContainer +import datadog.trace.agent.test.naming.VersionedNamingTestBase +import datadog.trace.agent.test.utils.PortUtils +import io.lettuce.core.ClientOptions +import io.lettuce.core.RedisClient +import io.lettuce.core.api.StatefulRedisConnection +import io.lettuce.core.api.async.RedisAsyncCommands +import io.lettuce.core.api.reactive.RedisReactiveCommands +import io.lettuce.core.api.sync.RedisCommands +import org.testcontainers.containers.wait.strategy.Wait +import org.testcontainers.utility.DockerImageName +import spock.util.concurrent.PollingConditions + +import static datadog.trace.agent.test.utils.TraceUtils.runUnderTrace + +abstract class Lettuce5ClientTestBase extends VersionedNamingTestBase { + public static final int DB_INDEX = 0 + // Disable autoreconnect so we do not get stray traces popping up on server shutdown + public static final ClientOptions CLIENT_OPTIONS = ClientOptions.builder().autoReconnect(false).build() + + public static final Map testHashMap = [ + firstname: "John", + lastname : "Doe", + age : "53" + ] + + int port + int incorrectPort + String dbAddr + String dbAddrNonExistent + String dbUriNonExistent + String embeddedDbUri + + RedisContainer redisServer = new RedisContainer(DockerImageName.parse("redis:6.2.6")) + .waitingFor(Wait.forListeningPort()) + + RedisClient redisClient + StatefulRedisConnection connection + RedisReactiveCommands reactiveCommands + RedisAsyncCommands asyncCommands + RedisCommands syncCommands + + def setup() { + redisServer.start() + println "Using redis: $redisServer.redisURI" + + port = redisServer.firstMappedPort + incorrectPort = PortUtils.randomOpenPort() + dbAddr = redisServer.getHost() + ":" + port + "/" + DB_INDEX + dbAddrNonExistent = redisServer.getHost() + ":" + incorrectPort + "/" + DB_INDEX + dbUriNonExistent = "redis://" + dbAddrNonExistent + embeddedDbUri = "redis://" + dbAddr + + redisClient = RedisClient.create(embeddedDbUri) + redisClient.setOptions(CLIENT_OPTIONS) + + runUnderTrace("setup") { + new PollingConditions(delay: 3, timeout: 15).eventually { + (connection = redisClient.connect()) != null + } + reactiveCommands = connection.reactive() + asyncCommands = connection.async() + syncCommands = connection.sync() + + syncCommands.set("TESTKEY", "TESTVAL") + syncCommands.hmset("TESTHM", testHashMap) + } + + TEST_WRITER.waitForTraces(1) + TEST_WRITER.clear() + } + + def cleanup() { + connection.close() + redisClient.shutdown() + redisServer.stop() + } +} diff --git a/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5ReactiveClientTest.groovy b/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5ReactiveClientTest.groovy index f4e55eee299..7ea2b849572 100644 --- a/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5ReactiveClientTest.groovy +++ b/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5ReactiveClientTest.groovy @@ -1,6 +1,7 @@ import org.testcontainers.utility.DockerImageName import static datadog.trace.agent.test.utils.TraceUtils.runUnderTrace +import static datadog.trace.agent.test.utils.TraceUtils.runnableUnderTrace import static datadog.trace.instrumentation.lettuce5.LettuceInstrumentationUtil.AGENT_CRASHING_COMMAND_PREFIX import com.redis.testcontainers.RedisContainer @@ -21,61 +22,7 @@ import spock.util.concurrent.PollingConditions import java.util.function.Consumer -abstract class Lettuce5ReactiveClientTest extends VersionedNamingTestBase { - public static final int DB_INDEX = 0 - // Disable autoreconnect so we do not get stray traces popping up on server shutdown - public static final ClientOptions CLIENT_OPTIONS = ClientOptions.builder().autoReconnect(false).build() - - @Override - boolean useStrictTraceWrites() { - // TODO fix this by making sure that spans get closed properly - return false - } - - @Shared - String embeddedDbUri - - @Shared - int port - - @Shared - RedisContainer redisServer = new RedisContainer(DockerImageName.parse("redis:6.2.6")) - .waitingFor(Wait.forListeningPort()) - - RedisClient redisClient - StatefulRedisConnection connection - RedisReactiveCommands reactiveCommands - RedisCommands syncCommands - - def setup() { - redisServer.start() - println "Using redis: $redisServer.redisURI" - port = redisServer.firstMappedPort - String dbAddr = redisServer.getHost() + ":" + port + "/" + DB_INDEX - embeddedDbUri = "redis://" + dbAddr - redisClient = RedisClient.create(embeddedDbUri) - - redisClient.setOptions(CLIENT_OPTIONS) - - new PollingConditions(delay: 3, timeout: 15).eventually { - (connection = redisClient.connect()) != null - } - reactiveCommands = connection.reactive() - syncCommands = connection.sync() - - syncCommands.set("TESTKEY", "TESTVAL") - - // 1 set + 1 connect trace - TEST_WRITER.waitForTraces(2) - TEST_WRITER.clear() - } - - def cleanup() { - connection.close() - redisClient.shutdown() - redisServer.stop() - } - +abstract class Lettuce5ReactiveClientTest extends Lettuce5ClientTestBase { def "set command with subscribe on a defined consumer"() { def conds = new AsyncConditions() @@ -200,7 +147,7 @@ abstract class Lettuce5ReactiveClientTest extends VersionedNamingTestBase { when: reactiveCommands.randomkey().subscribe { res -> conds.evaluate { - assert res == "TESTKEY" + assert res == "TESTKEY" || res == "TESTHM" } } diff --git a/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5SyncClientTest.groovy b/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5SyncClientTest.groovy index e868500afae..1e1fb8d6eaf 100644 --- a/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5SyncClientTest.groovy +++ b/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5SyncClientTest.groovy @@ -1,5 +1,6 @@ import org.testcontainers.utility.DockerImageName +import static datadog.trace.agent.test.utils.TraceUtils.runUnderTrace import static datadog.trace.instrumentation.lettuce5.LettuceInstrumentationUtil.AGENT_CRASHING_COMMAND_PREFIX import com.redis.testcontainers.RedisContainer @@ -18,71 +19,7 @@ import spock.lang.Shared import java.util.concurrent.CompletionException -abstract class Lettuce5SyncClientTest extends VersionedNamingTestBase { - public static final int DB_INDEX = 0 - // Disable autoreconnect so we do not get stray traces popping up on server shutdown - public static final ClientOptions CLIENT_OPTIONS = ClientOptions.builder().autoReconnect(false).build() - - @Shared - int port - @Shared - int incorrectPort - @Shared - String dbAddr - @Shared - String dbAddrNonExistent - @Shared - String dbUriNonExistent - @Shared - String embeddedDbUri - - @Shared - RedisContainer redisServer = new RedisContainer(DockerImageName.parse("redis:6.2.6")) - .waitingFor(Wait.forListeningPort()) - - @Shared - Map testHashMap = [ - firstname: "John", - lastname : "Doe", - age : "53" - ] - - RedisClient redisClient - StatefulConnection connection - RedisCommands syncCommands - - def setupSpec() { - redisServer.start() - println "Using redis: $redisServer.redisURI" - port = redisServer.firstMappedPort - incorrectPort = PortUtils.randomOpenPort() - dbAddr = redisServer.getHost() + ":" + port + "/" + DB_INDEX - dbAddrNonExistent = redisServer.getHost() + ":" + incorrectPort + "/" + DB_INDEX - dbUriNonExistent = "redis://" + dbAddrNonExistent - embeddedDbUri = "redis://" + dbAddr - } - - def setup() { - redisClient = RedisClient.create(embeddedDbUri) - connection = redisClient.connect() - syncCommands = connection.sync() - - syncCommands.set("TESTKEY", "TESTVAL") - syncCommands.hmset("TESTHM", testHashMap) - - // 2 sets + 1 connect trace - TEST_WRITER.waitForTraces(3) - TEST_WRITER.clear() - } - - def cleanup() { - connection.close() - } - - def cleanupSpec() { - redisServer.stop() - } - +abstract class Lettuce5SyncClientTest extends Lettuce5ClientTestBase { def "connect"() { setup: RedisClient testConnectionClient = RedisClient.create(embeddedDbUri) @@ -232,7 +169,10 @@ abstract class Lettuce5SyncClientTest extends VersionedNamingTestBase { tags { "$Tags.COMPONENT" "redis-client" "$Tags.SPAN_KIND" Tags.SPAN_KIND_CLIENT + "$Tags.PEER_HOSTNAME" redisServer.getHost() + "$Tags.PEER_PORT" port "$Tags.DB_TYPE" "redis" + "db.redis.dbIndex" 0 peerServiceFrom(Tags.PEER_HOSTNAME) defaultTags() } @@ -419,7 +359,8 @@ abstract class Lettuce5SyncClientTest extends VersionedNamingTestBase { } } } -class Lettuce5AsyncClientV0Test extends Lettuce5AsyncClientTest { + +class Lettuce5SyncClientV0Test extends Lettuce5SyncClientTest { @Override int version() { @@ -437,7 +378,7 @@ class Lettuce5AsyncClientV0Test extends Lettuce5AsyncClientTest { } } -class Lettuce5AsyncClientV1ForkedTest extends Lettuce5AsyncClientTest { +class Lettuce5SyncClientV1ForkedTest extends Lettuce5SyncClientTest { @Override int version() { @@ -454,3 +395,28 @@ class Lettuce5AsyncClientV1ForkedTest extends Lettuce5AsyncClientTest { return "redis.command" } } + +class Lettuce5SyncClientProfilingForkedTest extends Lettuce5SyncClientTest { + + @Override + protected void configurePreAgent() { + + super.configurePreAgent() + injectSysConfig('dd.profiling.enabled', 'true') + } + + @Override + int version() { + return 0 + } + + @Override + String service() { + return "redis" + } + + @Override + String operation() { + return "redis.query" + } +} From 9c8cd5a0fe00c83881f6093dbbf8183a4f664817 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Wed, 2 Apr 2025 12:47:06 -0400 Subject: [PATCH 039/113] Fix lettuce tests # Conflicts: # dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5AsyncClientTest.groovy # dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5ReactiveClientTest.groovy # dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5SyncClientTest.groovy --- .../groovy/Lettuce4AsyncClientTest.groovy | 81 +------------- .../test/groovy/Lettuce4ClientTestBase.groovy | 82 ++++++++++++++ .../test/groovy/Lettuce4SyncClientTest.groovy | 76 +------------ .../groovy/Lettuce5AsyncClientTest.groovy | 76 +------------ .../test/groovy/Lettuce5ClientTestBase.groovy | 78 ++++++++++++++ .../groovy/Lettuce5ReactiveClientTest.groovy | 59 +--------- .../test/groovy/Lettuce5SyncClientTest.groovy | 102 ++++++------------ 7 files changed, 205 insertions(+), 349 deletions(-) create mode 100644 dd-java-agent/instrumentation/lettuce-4/src/test/groovy/Lettuce4ClientTestBase.groovy create mode 100644 dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5ClientTestBase.groovy diff --git a/dd-java-agent/instrumentation/lettuce-4/src/test/groovy/Lettuce4AsyncClientTest.groovy b/dd-java-agent/instrumentation/lettuce-4/src/test/groovy/Lettuce4AsyncClientTest.groovy index e3b755f1abc..912db6eac67 100644 --- a/dd-java-agent/instrumentation/lettuce-4/src/test/groovy/Lettuce4AsyncClientTest.groovy +++ b/dd-java-agent/instrumentation/lettuce-4/src/test/groovy/Lettuce4AsyncClientTest.groovy @@ -1,22 +1,15 @@ import static datadog.trace.instrumentation.lettuce4.InstrumentationPoints.AGENT_CRASHING_COMMAND_PREFIX -import com.lambdaworks.redis.ClientOptions import com.lambdaworks.redis.RedisClient import com.lambdaworks.redis.RedisConnectionException import com.lambdaworks.redis.RedisFuture import com.lambdaworks.redis.RedisURI import com.lambdaworks.redis.api.StatefulConnection -import com.lambdaworks.redis.api.async.RedisAsyncCommands -import com.lambdaworks.redis.api.sync.RedisCommands import com.lambdaworks.redis.codec.Utf8StringCodec import com.lambdaworks.redis.protocol.AsyncCommand -import datadog.trace.agent.test.naming.VersionedNamingTestBase -import datadog.trace.agent.test.utils.PortUtils import datadog.trace.api.Config import datadog.trace.api.DDSpanTypes import datadog.trace.bootstrap.instrumentation.api.Tags -import redis.embedded.RedisServer -import spock.lang.Shared import spock.util.concurrent.AsyncConditions import java.util.concurrent.CancellationException @@ -26,79 +19,7 @@ import java.util.function.BiFunction import java.util.function.Consumer import java.util.function.Function -abstract class Lettuce4AsyncClientTest extends VersionedNamingTestBase { - public static final String HOST = "127.0.0.1" - public static final int DB_INDEX = 0 - // Disable autoreconnect so we do not get stray traces popping up on server shutdown - public static final ClientOptions CLIENT_OPTIONS = new ClientOptions.Builder().autoReconnect(false).build() - - @Shared - int port - @Shared - int incorrectPort - @Shared - String dbAddr - @Shared - String dbAddrNonExistent - @Shared - String dbUriNonExistent - @Shared - String embeddedDbUri - - @Shared - RedisServer redisServer - - @Shared - Map testHashMap = [ - firstname: "John", - lastname : "Doe", - age : "53" - ] - - RedisClient redisClient - StatefulConnection connection - RedisAsyncCommands asyncCommands - RedisCommands syncCommands - - def setupSpec() { - port = PortUtils.randomOpenPort() - incorrectPort = PortUtils.randomOpenPort() - dbAddr = HOST + ":" + port + "/" + DB_INDEX - dbAddrNonExistent = HOST + ":" + incorrectPort + "/" + DB_INDEX - dbUriNonExistent = "redis://" + dbAddrNonExistent - embeddedDbUri = "redis://" + dbAddr - - redisServer = RedisServer.builder() - // bind to localhost to avoid firewall popup - .setting("bind " + HOST) - // set max memory to avoid problems in CI - .setting("maxmemory 128M") - .port(port).build() - } - - def setup() { - redisClient = RedisClient.create(embeddedDbUri) - - println "Using redis: $redisServer.args" - redisServer.start() - redisClient.setOptions(CLIENT_OPTIONS) - - connection = redisClient.connect() - asyncCommands = connection.async() - syncCommands = connection.sync() - - syncCommands.set("TESTKEY", "TESTVAL") - - // 1 set + 1 connect trace - TEST_WRITER.waitForTraces(2) - TEST_WRITER.clear() - } - - def cleanup() { - connection.close() - redisServer.stop() - } - +abstract class Lettuce4AsyncClientTest extends Lettuce4ClientTestBase { def "connect using get on ConnectionFuture"() { setup: RedisClient testConnectionClient = RedisClient.create(embeddedDbUri) diff --git a/dd-java-agent/instrumentation/lettuce-4/src/test/groovy/Lettuce4ClientTestBase.groovy b/dd-java-agent/instrumentation/lettuce-4/src/test/groovy/Lettuce4ClientTestBase.groovy new file mode 100644 index 00000000000..1c824f160e2 --- /dev/null +++ b/dd-java-agent/instrumentation/lettuce-4/src/test/groovy/Lettuce4ClientTestBase.groovy @@ -0,0 +1,82 @@ +import com.lambdaworks.redis.ClientOptions +import com.lambdaworks.redis.RedisClient +import com.lambdaworks.redis.api.StatefulConnection +import com.lambdaworks.redis.api.sync.RedisCommands +import datadog.trace.agent.test.naming.VersionedNamingTestBase +import datadog.trace.agent.test.utils.PortUtils +import redis.embedded.RedisServer +import spock.lang.Shared + +import static datadog.trace.agent.test.utils.TraceUtils.runUnderTrace + +abstract class Lettuce4ClientTestBase extends VersionedNamingTestBase { + public static final String HOST = "127.0.0.1" + public static final int DB_INDEX = 0 + // Disable autoreconnect so we do not get stray traces popping up on server shutdown + public static final ClientOptions CLIENT_OPTIONS = new ClientOptions.Builder().autoReconnect(false).build() + + @Shared + int port + @Shared + int incorrectPort + @Shared + String dbAddr + @Shared + String dbAddrNonExistent + @Shared + String dbUriNonExistent + @Shared + String embeddedDbUri + + @Shared + RedisServer redisServer + + @Shared + Map testHashMap = [ + firstname: "John", + lastname : "Doe", + age : "53" + ] + + RedisClient redisClient + StatefulConnection connection + RedisCommands syncCommands + + def setupSpec() { + port = PortUtils.randomOpenPort() + incorrectPort = PortUtils.randomOpenPort() + dbAddr = HOST + ":" + port + "/" + DB_INDEX + dbAddrNonExistent = HOST + ":" + incorrectPort + "/" + DB_INDEX + dbUriNonExistent = "redis://" + dbAddrNonExistent + embeddedDbUri = "redis://" + dbAddr + + redisServer = RedisServer.builder() + // bind to localhost to avoid firewall popup + .setting("bind " + HOST) + // set max memory to avoid problems in CI + .setting("maxmemory 128M") + .port(port).build() + } + + def setup() { + redisServer.start() + + redisClient = RedisClient.create(embeddedDbUri) + redisClient.setOptions(CLIENT_OPTIONS) + + runUnderTrace("setup") { + connection = redisClient.connect() + syncCommands = connection.sync() + + syncCommands.set("TESTKEY", "TESTVAL") + syncCommands.hmset("TESTHM", testHashMap) + } + TEST_WRITER.waitForTraces(1) + TEST_WRITER.clear() + } + + def cleanup() { + connection.close() + redisServer.stop() + } +} diff --git a/dd-java-agent/instrumentation/lettuce-4/src/test/groovy/Lettuce4SyncClientTest.groovy b/dd-java-agent/instrumentation/lettuce-4/src/test/groovy/Lettuce4SyncClientTest.groovy index ac455653b52..1e0bb003906 100644 --- a/dd-java-agent/instrumentation/lettuce-4/src/test/groovy/Lettuce4SyncClientTest.groovy +++ b/dd-java-agent/instrumentation/lettuce-4/src/test/groovy/Lettuce4SyncClientTest.groovy @@ -1,87 +1,13 @@ import static datadog.trace.instrumentation.lettuce4.InstrumentationPoints.AGENT_CRASHING_COMMAND_PREFIX -import com.lambdaworks.redis.ClientOptions import com.lambdaworks.redis.RedisClient import com.lambdaworks.redis.RedisConnectionException import com.lambdaworks.redis.api.StatefulConnection -import com.lambdaworks.redis.api.sync.RedisCommands -import datadog.trace.agent.test.naming.VersionedNamingTestBase -import datadog.trace.agent.test.utils.PortUtils import datadog.trace.api.Config import datadog.trace.api.DDSpanTypes import datadog.trace.bootstrap.instrumentation.api.Tags -import redis.embedded.RedisServer -import spock.lang.Shared - -abstract class Lettuce4SyncClientTest extends VersionedNamingTestBase { - public static final String HOST = "127.0.0.1" - public static final int DB_INDEX = 0 - // Disable autoreconnect so we do not get stray traces popping up on server shutdown - public static final ClientOptions CLIENT_OPTIONS = new ClientOptions.Builder().autoReconnect(false).build() - - @Shared - int port - @Shared - int incorrectPort - @Shared - String dbAddr - @Shared - String dbAddrNonExistent - @Shared - String dbUriNonExistent - @Shared - String embeddedDbUri - - @Shared - RedisServer redisServer - - @Shared - Map testHashMap = [ - firstname: "John", - lastname : "Doe", - age : "53" - ] - - RedisClient redisClient - StatefulConnection connection - RedisCommands syncCommands - - def setupSpec() { - port = PortUtils.randomOpenPort() - incorrectPort = PortUtils.randomOpenPort() - dbAddr = HOST + ":" + port + "/" + DB_INDEX - dbAddrNonExistent = HOST + ":" + incorrectPort + "/" + DB_INDEX - dbUriNonExistent = "redis://" + dbAddrNonExistent - embeddedDbUri = "redis://" + dbAddr - - redisServer = RedisServer.builder() - // bind to localhost to avoid firewall popup - .setting("bind " + HOST) - // set max memory to avoid problems in CI - .setting("maxmemory 128M") - .port(port).build() - } - - def setup() { - redisClient = RedisClient.create(embeddedDbUri) - - redisServer.start() - connection = redisClient.connect() - syncCommands = connection.sync() - - syncCommands.set("TESTKEY", "TESTVAL") - syncCommands.hmset("TESTHM", testHashMap) - - // 2 sets + 1 connect trace - TEST_WRITER.waitForTraces(3) - TEST_WRITER.clear() - } - - def cleanup() { - connection.close() - redisServer.stop() - } +abstract class Lettuce4SyncClientTest extends Lettuce4ClientTestBase { def "connect"() { setup: RedisClient testConnectionClient = RedisClient.create(embeddedDbUri) diff --git a/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5AsyncClientTest.groovy b/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5AsyncClientTest.groovy index 09eac435c21..8d79f68a9fe 100644 --- a/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5AsyncClientTest.groovy +++ b/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5AsyncClientTest.groovy @@ -28,76 +28,10 @@ import java.util.function.BiFunction import java.util.function.Consumer import java.util.function.Function +import static datadog.trace.agent.test.utils.TraceUtils.runUnderTrace import static datadog.trace.instrumentation.lettuce5.LettuceInstrumentationUtil.AGENT_CRASHING_COMMAND_PREFIX -abstract class Lettuce5AsyncClientTest extends VersionedNamingTestBase { - public static final int DB_INDEX = 0 - // Disable autoreconnect so we do not get stray traces popping up on server shutdown - public static final ClientOptions CLIENT_OPTIONS = ClientOptions.builder().autoReconnect(false).build() - - @Shared - int port - @Shared - int incorrectPort - @Shared - String dbAddr - @Shared - String dbAddrNonExistent - @Shared - String dbUriNonExistent - @Shared - String embeddedDbUri - - @Shared - RedisContainer redisServer = new RedisContainer(RedisContainer.DEFAULT_IMAGE_NAME) - .waitingFor(Wait.forListeningPort()) - - @Shared - Map testHashMap = [ - firstname: "John", - lastname : "Doe", - age : "53" - ] - - RedisClient redisClient - StatefulConnection connection - RedisAsyncCommands asyncCommands - RedisCommands syncCommands - - def setupSpec() { - redisServer.start() - println "Using redis: $redisServer.redisURI" - port = redisServer.firstMappedPort - incorrectPort = PortUtils.randomOpenPort() - dbAddr = redisServer.getHost() + ":" + port + "/" + DB_INDEX - dbAddrNonExistent = redisServer.getHost() + ":" + incorrectPort + "/" + DB_INDEX - dbUriNonExistent = "redis://" + dbAddrNonExistent - embeddedDbUri = "redis://" + dbAddr - } - - def setup() { - redisClient = RedisClient.create(embeddedDbUri) - redisClient.setOptions(CLIENT_OPTIONS) - - connection = redisClient.connect() - asyncCommands = connection.async() - syncCommands = connection.sync() - - syncCommands.set("TESTKEY", "TESTVAL") - - // 1 set + 1 connect trace - TEST_WRITER.waitForTraces(2) - TEST_WRITER.clear() - } - - def cleanup() { - connection.close() - } - - def cleanupSpec() { - redisServer.stop() - } - +abstract class Lettuce5AsyncClientTest extends Lettuce5ClientTestBase { def "connect using get on ConnectionFuture"() { setup: RedisClient testConnectionClient = RedisClient.create(embeddedDbUri) @@ -582,7 +516,7 @@ abstract class Lettuce5AsyncClientTest extends VersionedNamingTestBase { } } -class Lettuce5SyncClientV0Test extends Lettuce5AsyncClientTest { +class Lettuce5AsyncClientV0Test extends Lettuce5AsyncClientTest { @Override int version() { @@ -600,7 +534,7 @@ class Lettuce5SyncClientV0Test extends Lettuce5AsyncClientTest { } } -class Lettuce5SyncClientV1ForkedTest extends Lettuce5AsyncClientTest { +class Lettuce5AsyncClientV1ForkedTest extends Lettuce5AsyncClientTest { @Override int version() { @@ -619,7 +553,7 @@ class Lettuce5SyncClientV1ForkedTest extends Lettuce5AsyncClientTest { } -class Lettuce5AsyncProfilingForkedTest extends Lettuce5AsyncClientTest { +class Lettuce5AsyncClientProfilingForkedTest extends Lettuce5AsyncClientTest { @Override protected void configurePreAgent() { diff --git a/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5ClientTestBase.groovy b/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5ClientTestBase.groovy new file mode 100644 index 00000000000..d693b9dada9 --- /dev/null +++ b/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5ClientTestBase.groovy @@ -0,0 +1,78 @@ +import com.redis.testcontainers.RedisContainer +import datadog.trace.agent.test.naming.VersionedNamingTestBase +import datadog.trace.agent.test.utils.PortUtils +import io.lettuce.core.ClientOptions +import io.lettuce.core.RedisClient +import io.lettuce.core.api.StatefulRedisConnection +import io.lettuce.core.api.async.RedisAsyncCommands +import io.lettuce.core.api.reactive.RedisReactiveCommands +import io.lettuce.core.api.sync.RedisCommands +import org.testcontainers.containers.wait.strategy.Wait +import org.testcontainers.utility.DockerImageName +import spock.util.concurrent.PollingConditions + +import static datadog.trace.agent.test.utils.TraceUtils.runUnderTrace + +abstract class Lettuce5ClientTestBase extends VersionedNamingTestBase { + public static final int DB_INDEX = 0 + // Disable autoreconnect so we do not get stray traces popping up on server shutdown + public static final ClientOptions CLIENT_OPTIONS = ClientOptions.builder().autoReconnect(false).build() + + public static final Map testHashMap = [ + firstname: "John", + lastname : "Doe", + age : "53" + ] + + int port + int incorrectPort + String dbAddr + String dbAddrNonExistent + String dbUriNonExistent + String embeddedDbUri + + RedisContainer redisServer = new RedisContainer(DockerImageName.parse("redis:6.2.6")) + .waitingFor(Wait.forListeningPort()) + + RedisClient redisClient + StatefulRedisConnection connection + RedisReactiveCommands reactiveCommands + RedisAsyncCommands asyncCommands + RedisCommands syncCommands + + def setup() { + redisServer.start() + println "Using redis: $redisServer.redisURI" + + port = redisServer.firstMappedPort + incorrectPort = PortUtils.randomOpenPort() + dbAddr = redisServer.getHost() + ":" + port + "/" + DB_INDEX + dbAddrNonExistent = redisServer.getHost() + ":" + incorrectPort + "/" + DB_INDEX + dbUriNonExistent = "redis://" + dbAddrNonExistent + embeddedDbUri = "redis://" + dbAddr + + redisClient = RedisClient.create(embeddedDbUri) + redisClient.setOptions(CLIENT_OPTIONS) + + runUnderTrace("setup") { + new PollingConditions(delay: 3, timeout: 15).eventually { + (connection = redisClient.connect()) != null + } + reactiveCommands = connection.reactive() + asyncCommands = connection.async() + syncCommands = connection.sync() + + syncCommands.set("TESTKEY", "TESTVAL") + syncCommands.hmset("TESTHM", testHashMap) + } + + TEST_WRITER.waitForTraces(1) + TEST_WRITER.clear() + } + + def cleanup() { + connection.close() + redisClient.shutdown() + redisServer.stop() + } +} diff --git a/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5ReactiveClientTest.groovy b/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5ReactiveClientTest.groovy index fde1401b52a..964320b0cff 100644 --- a/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5ReactiveClientTest.groovy +++ b/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5ReactiveClientTest.groovy @@ -1,4 +1,5 @@ import static datadog.trace.agent.test.utils.TraceUtils.runUnderTrace +import static datadog.trace.agent.test.utils.TraceUtils.runnableUnderTrace import static datadog.trace.instrumentation.lettuce5.LettuceInstrumentationUtil.AGENT_CRASHING_COMMAND_PREFIX import com.redis.testcontainers.RedisContainer @@ -19,61 +20,7 @@ import spock.util.concurrent.PollingConditions import java.util.function.Consumer -abstract class Lettuce5ReactiveClientTest extends VersionedNamingTestBase { - public static final int DB_INDEX = 0 - // Disable autoreconnect so we do not get stray traces popping up on server shutdown - public static final ClientOptions CLIENT_OPTIONS = ClientOptions.builder().autoReconnect(false).build() - - @Override - boolean useStrictTraceWrites() { - // TODO fix this by making sure that spans get closed properly - return false - } - - @Shared - String embeddedDbUri - - @Shared - int port - - @Shared - RedisContainer redisServer = new RedisContainer(RedisContainer.DEFAULT_TAG) - .waitingFor(Wait.forListeningPort()) - - RedisClient redisClient - StatefulRedisConnection connection - RedisReactiveCommands reactiveCommands - RedisCommands syncCommands - - def setup() { - redisServer.start() - println "Using redis: $redisServer.redisURI" - port = redisServer.firstMappedPort - String dbAddr = redisServer.getHost() + ":" + port + "/" + DB_INDEX - embeddedDbUri = "redis://" + dbAddr - redisClient = RedisClient.create(embeddedDbUri) - - redisClient.setOptions(CLIENT_OPTIONS) - - new PollingConditions(delay: 3, timeout: 15).eventually { - (connection = redisClient.connect()) != null - } - reactiveCommands = connection.reactive() - syncCommands = connection.sync() - - syncCommands.set("TESTKEY", "TESTVAL") - - // 1 set + 1 connect trace - TEST_WRITER.waitForTraces(2) - TEST_WRITER.clear() - } - - def cleanup() { - connection.close() - redisClient.shutdown() - redisServer.stop() - } - +abstract class Lettuce5ReactiveClientTest extends Lettuce5ClientTestBase { def "set command with subscribe on a defined consumer"() { def conds = new AsyncConditions() @@ -198,7 +145,7 @@ abstract class Lettuce5ReactiveClientTest extends VersionedNamingTestBase { when: reactiveCommands.randomkey().subscribe { res -> conds.evaluate { - assert res == "TESTKEY" + assert res == "TESTKEY" || res == "TESTHM" } } diff --git a/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5SyncClientTest.groovy b/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5SyncClientTest.groovy index ee7182481cf..1e1fb8d6eaf 100644 --- a/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5SyncClientTest.groovy +++ b/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5SyncClientTest.groovy @@ -1,3 +1,6 @@ +import org.testcontainers.utility.DockerImageName + +import static datadog.trace.agent.test.utils.TraceUtils.runUnderTrace import static datadog.trace.instrumentation.lettuce5.LettuceInstrumentationUtil.AGENT_CRASHING_COMMAND_PREFIX import com.redis.testcontainers.RedisContainer @@ -16,71 +19,7 @@ import spock.lang.Shared import java.util.concurrent.CompletionException -abstract class Lettuce5SyncClientTest extends VersionedNamingTestBase { - public static final int DB_INDEX = 0 - // Disable autoreconnect so we do not get stray traces popping up on server shutdown - public static final ClientOptions CLIENT_OPTIONS = ClientOptions.builder().autoReconnect(false).build() - - @Shared - int port - @Shared - int incorrectPort - @Shared - String dbAddr - @Shared - String dbAddrNonExistent - @Shared - String dbUriNonExistent - @Shared - String embeddedDbUri - - @Shared - RedisContainer redisServer = new RedisContainer(RedisContainer.DEFAULT_IMAGE_NAME) - .waitingFor(Wait.forListeningPort()) - - @Shared - Map testHashMap = [ - firstname: "John", - lastname : "Doe", - age : "53" - ] - - RedisClient redisClient - StatefulConnection connection - RedisCommands syncCommands - - def setupSpec() { - redisServer.start() - println "Using redis: $redisServer.redisURI" - port = redisServer.firstMappedPort - incorrectPort = PortUtils.randomOpenPort() - dbAddr = redisServer.getHost() + ":" + port + "/" + DB_INDEX - dbAddrNonExistent = redisServer.getHost() + ":" + incorrectPort + "/" + DB_INDEX - dbUriNonExistent = "redis://" + dbAddrNonExistent - embeddedDbUri = "redis://" + dbAddr - } - - def setup() { - redisClient = RedisClient.create(embeddedDbUri) - connection = redisClient.connect() - syncCommands = connection.sync() - - syncCommands.set("TESTKEY", "TESTVAL") - syncCommands.hmset("TESTHM", testHashMap) - - // 2 sets + 1 connect trace - TEST_WRITER.waitForTraces(3) - TEST_WRITER.clear() - } - - def cleanup() { - connection.close() - } - - def cleanupSpec() { - redisServer.stop() - } - +abstract class Lettuce5SyncClientTest extends Lettuce5ClientTestBase { def "connect"() { setup: RedisClient testConnectionClient = RedisClient.create(embeddedDbUri) @@ -230,7 +169,10 @@ abstract class Lettuce5SyncClientTest extends VersionedNamingTestBase { tags { "$Tags.COMPONENT" "redis-client" "$Tags.SPAN_KIND" Tags.SPAN_KIND_CLIENT + "$Tags.PEER_HOSTNAME" redisServer.getHost() + "$Tags.PEER_PORT" port "$Tags.DB_TYPE" "redis" + "db.redis.dbIndex" 0 peerServiceFrom(Tags.PEER_HOSTNAME) defaultTags() } @@ -417,7 +359,8 @@ abstract class Lettuce5SyncClientTest extends VersionedNamingTestBase { } } } -class Lettuce5AsyncClientV0Test extends Lettuce5AsyncClientTest { + +class Lettuce5SyncClientV0Test extends Lettuce5SyncClientTest { @Override int version() { @@ -435,7 +378,7 @@ class Lettuce5AsyncClientV0Test extends Lettuce5AsyncClientTest { } } -class Lettuce5AsyncClientV1ForkedTest extends Lettuce5AsyncClientTest { +class Lettuce5SyncClientV1ForkedTest extends Lettuce5SyncClientTest { @Override int version() { @@ -452,3 +395,28 @@ class Lettuce5AsyncClientV1ForkedTest extends Lettuce5AsyncClientTest { return "redis.command" } } + +class Lettuce5SyncClientProfilingForkedTest extends Lettuce5SyncClientTest { + + @Override + protected void configurePreAgent() { + + super.configurePreAgent() + injectSysConfig('dd.profiling.enabled', 'true') + } + + @Override + int version() { + return 0 + } + + @Override + String service() { + return "redis" + } + + @Override + String operation() { + return "redis.query" + } +} From d7592b82680eebbaf49562e1426345a1458160e5 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Wed, 2 Apr 2025 12:52:51 -0400 Subject: [PATCH 040/113] cleanup imports --- .../test/groovy/Lettuce4ClientTestBase.groovy | 4 ++-- .../test/groovy/Lettuce5AsyncClientTest.groovy | 9 --------- .../test/groovy/Lettuce5ClientTestBase.groovy | 2 +- .../groovy/Lettuce5ReactiveClientTest.groovy | 17 +++++------------ .../test/groovy/Lettuce5SyncClientTest.groovy | 10 ---------- 5 files changed, 8 insertions(+), 34 deletions(-) diff --git a/dd-java-agent/instrumentation/lettuce-4/src/test/groovy/Lettuce4ClientTestBase.groovy b/dd-java-agent/instrumentation/lettuce-4/src/test/groovy/Lettuce4ClientTestBase.groovy index 1c824f160e2..ba5abdbcce5 100644 --- a/dd-java-agent/instrumentation/lettuce-4/src/test/groovy/Lettuce4ClientTestBase.groovy +++ b/dd-java-agent/instrumentation/lettuce-4/src/test/groovy/Lettuce4ClientTestBase.groovy @@ -51,9 +51,9 @@ abstract class Lettuce4ClientTestBase extends VersionedNamingTestBase { embeddedDbUri = "redis://" + dbAddr redisServer = RedisServer.builder() - // bind to localhost to avoid firewall popup + // bind to localhost to avoid firewall popup .setting("bind " + HOST) - // set max memory to avoid problems in CI + // set max memory to avoid problems in CI .setting("maxmemory 128M") .port(port).build() } diff --git a/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5AsyncClientTest.groovy b/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5AsyncClientTest.groovy index 8d79f68a9fe..519373506ac 100644 --- a/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5AsyncClientTest.groovy +++ b/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5AsyncClientTest.groovy @@ -1,21 +1,13 @@ -import com.redis.testcontainers.RedisContainer -import datadog.trace.agent.test.naming.VersionedNamingTestBase -import datadog.trace.agent.test.utils.PortUtils import datadog.trace.api.Config import datadog.trace.api.DDSpanTypes import datadog.trace.bootstrap.instrumentation.api.Tags -import io.lettuce.core.ClientOptions import io.lettuce.core.ConnectionFuture import io.lettuce.core.RedisClient import io.lettuce.core.RedisFuture import io.lettuce.core.RedisURI import io.lettuce.core.api.StatefulConnection -import io.lettuce.core.api.async.RedisAsyncCommands -import io.lettuce.core.api.sync.RedisCommands import io.lettuce.core.codec.StringCodec import io.lettuce.core.protocol.AsyncCommand -import org.testcontainers.containers.wait.strategy.Wait -import spock.lang.Shared import spock.util.concurrent.AsyncConditions import java.time.Duration @@ -28,7 +20,6 @@ import java.util.function.BiFunction import java.util.function.Consumer import java.util.function.Function -import static datadog.trace.agent.test.utils.TraceUtils.runUnderTrace import static datadog.trace.instrumentation.lettuce5.LettuceInstrumentationUtil.AGENT_CRASHING_COMMAND_PREFIX abstract class Lettuce5AsyncClientTest extends Lettuce5ClientTestBase { diff --git a/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5ClientTestBase.groovy b/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5ClientTestBase.groovy index d693b9dada9..36031ad5c9f 100644 --- a/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5ClientTestBase.groovy +++ b/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5ClientTestBase.groovy @@ -32,7 +32,7 @@ abstract class Lettuce5ClientTestBase extends VersionedNamingTestBase { String embeddedDbUri RedisContainer redisServer = new RedisContainer(DockerImageName.parse("redis:6.2.6")) - .waitingFor(Wait.forListeningPort()) + .waitingFor(Wait.forListeningPort()) RedisClient redisClient StatefulRedisConnection connection diff --git a/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5ReactiveClientTest.groovy b/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5ReactiveClientTest.groovy index 964320b0cff..8716f34b786 100644 --- a/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5ReactiveClientTest.groovy +++ b/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5ReactiveClientTest.groovy @@ -1,22 +1,11 @@ import static datadog.trace.agent.test.utils.TraceUtils.runUnderTrace -import static datadog.trace.agent.test.utils.TraceUtils.runnableUnderTrace import static datadog.trace.instrumentation.lettuce5.LettuceInstrumentationUtil.AGENT_CRASHING_COMMAND_PREFIX -import com.redis.testcontainers.RedisContainer -import datadog.trace.agent.test.naming.VersionedNamingTestBase import datadog.trace.api.Config import datadog.trace.api.DDSpanTypes import datadog.trace.bootstrap.instrumentation.api.Tags -import io.lettuce.core.ClientOptions -import io.lettuce.core.RedisClient -import io.lettuce.core.api.StatefulRedisConnection -import io.lettuce.core.api.reactive.RedisReactiveCommands -import io.lettuce.core.api.sync.RedisCommands -import org.testcontainers.containers.wait.strategy.Wait import reactor.core.scheduler.Schedulers -import spock.lang.Shared import spock.util.concurrent.AsyncConditions -import spock.util.concurrent.PollingConditions import java.util.function.Consumer @@ -69,7 +58,11 @@ abstract class Lettuce5ReactiveClientTest extends Lettuce5ClientTestBase { def conds = new AsyncConditions() when: - reactiveCommands.get("TESTKEY").subscribe { res -> conds.evaluate { assert res == "TESTVAL" } } + reactiveCommands.get("TESTKEY").subscribe { res -> + conds.evaluate { + assert res == "TESTVAL" + } + } then: conds.await() diff --git a/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5SyncClientTest.groovy b/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5SyncClientTest.groovy index 1e1fb8d6eaf..e0d946558d7 100644 --- a/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5SyncClientTest.groovy +++ b/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5SyncClientTest.groovy @@ -1,21 +1,11 @@ -import org.testcontainers.utility.DockerImageName - -import static datadog.trace.agent.test.utils.TraceUtils.runUnderTrace import static datadog.trace.instrumentation.lettuce5.LettuceInstrumentationUtil.AGENT_CRASHING_COMMAND_PREFIX -import com.redis.testcontainers.RedisContainer -import datadog.trace.agent.test.naming.VersionedNamingTestBase -import datadog.trace.agent.test.utils.PortUtils import datadog.trace.api.Config import datadog.trace.api.DDSpanTypes import datadog.trace.bootstrap.instrumentation.api.Tags -import io.lettuce.core.ClientOptions import io.lettuce.core.RedisClient import io.lettuce.core.RedisConnectionException import io.lettuce.core.api.StatefulConnection -import io.lettuce.core.api.sync.RedisCommands -import org.testcontainers.containers.wait.strategy.Wait -import spock.lang.Shared import java.util.concurrent.CompletionException From 7dad59875488c791d8d3249322247ac7b5cb143b Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Wed, 2 Apr 2025 12:55:32 -0400 Subject: [PATCH 041/113] create asyncCommands --- .../lettuce-4/src/test/groovy/Lettuce4ClientTestBase.groovy | 3 +++ 1 file changed, 3 insertions(+) diff --git a/dd-java-agent/instrumentation/lettuce-4/src/test/groovy/Lettuce4ClientTestBase.groovy b/dd-java-agent/instrumentation/lettuce-4/src/test/groovy/Lettuce4ClientTestBase.groovy index ba5abdbcce5..0f1c6a82575 100644 --- a/dd-java-agent/instrumentation/lettuce-4/src/test/groovy/Lettuce4ClientTestBase.groovy +++ b/dd-java-agent/instrumentation/lettuce-4/src/test/groovy/Lettuce4ClientTestBase.groovy @@ -1,6 +1,7 @@ import com.lambdaworks.redis.ClientOptions import com.lambdaworks.redis.RedisClient import com.lambdaworks.redis.api.StatefulConnection +import com.lambdaworks.redis.api.async.RedisAsyncCommands import com.lambdaworks.redis.api.sync.RedisCommands import datadog.trace.agent.test.naming.VersionedNamingTestBase import datadog.trace.agent.test.utils.PortUtils @@ -41,6 +42,7 @@ abstract class Lettuce4ClientTestBase extends VersionedNamingTestBase { RedisClient redisClient StatefulConnection connection RedisCommands syncCommands + RedisAsyncCommands asyncCommands def setupSpec() { port = PortUtils.randomOpenPort() @@ -67,6 +69,7 @@ abstract class Lettuce4ClientTestBase extends VersionedNamingTestBase { runUnderTrace("setup") { connection = redisClient.connect() syncCommands = connection.sync() + asyncCommands = connection.async() syncCommands.set("TESTKEY", "TESTVAL") syncCommands.hmset("TESTHM", testHashMap) From 305b97e86b693cc11f8261a96935cc6393bd71b6 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Wed, 2 Apr 2025 14:19:09 -0400 Subject: [PATCH 042/113] codenarc --- .../lettuce-5/src/test/groovy/Lettuce5ClientTestBase.groovy | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5ClientTestBase.groovy b/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5ClientTestBase.groovy index 36031ad5c9f..9943c014c8c 100644 --- a/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5ClientTestBase.groovy +++ b/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5ClientTestBase.groovy @@ -9,6 +9,7 @@ import io.lettuce.core.api.reactive.RedisReactiveCommands import io.lettuce.core.api.sync.RedisCommands import org.testcontainers.containers.wait.strategy.Wait import org.testcontainers.utility.DockerImageName +import spock.lang.Shared import spock.util.concurrent.PollingConditions import static datadog.trace.agent.test.utils.TraceUtils.runUnderTrace @@ -18,7 +19,8 @@ abstract class Lettuce5ClientTestBase extends VersionedNamingTestBase { // Disable autoreconnect so we do not get stray traces popping up on server shutdown public static final ClientOptions CLIENT_OPTIONS = ClientOptions.builder().autoReconnect(false).build() - public static final Map testHashMap = [ + @Shared + Map testHashMap = [ firstname: "John", lastname : "Doe", age : "53" From 5a4913b934ac9054fafb266b65ef68219fc4452a Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Wed, 2 Apr 2025 15:02:04 -0400 Subject: [PATCH 043/113] Cleanup aggregator tests to try to reduce flakiness --- .../ConflatingMetricAggregatorTest.groovy | 37 +++++++++++++------ 1 file changed, 26 insertions(+), 11 deletions(-) diff --git a/dd-trace-core/src/test/groovy/datadog/trace/common/metrics/ConflatingMetricAggregatorTest.groovy b/dd-trace-core/src/test/groovy/datadog/trace/common/metrics/ConflatingMetricAggregatorTest.groovy index 8fa819362d9..486254798d7 100644 --- a/dd-trace-core/src/test/groovy/datadog/trace/common/metrics/ConflatingMetricAggregatorTest.groovy +++ b/dd-trace-core/src/test/groovy/datadog/trace/common/metrics/ConflatingMetricAggregatorTest.groovy @@ -23,7 +23,7 @@ class ConflatingMetricAggregatorTest extends DDSpecification { static final int HTTP_OK = 200 @Shared - long reportingInterval = 10 + long reportingInterval = 100 @Shared int queueSize = 256 @@ -106,9 +106,10 @@ class ConflatingMetricAggregatorTest extends DDSpecification { CountDownLatch latch = new CountDownLatch(1) aggregator.publish([new SimpleSpan("service", "operation", "resource", "type", false, true, false, 0, 100, HTTP_OK)]) aggregator.report() - latch.await(2, SECONDS) + def latchTriggered = latch.await(2, SECONDS) then: + latchTriggered 1 * writer.startBucket(1, _, _) 1 * writer.add(new MetricKey("resource", "service", "operation", "type", HTTP_OK, false), _) >> { MetricKey key, AggregateMetric value -> value.getHitCount() == 1 && value.getTopLevelCount() == 1 && value.getDuration() == 100 @@ -135,9 +136,10 @@ class ConflatingMetricAggregatorTest extends DDSpecification { new SimpleSpan("service", "operation", "resource", "type", measured, topLevel, false, 0, 100, HTTP_OK) ]) aggregator.report() - latch.await(2, SECONDS) + def latchTriggered = latch.await(2, SECONDS) then: + latchTriggered 1 * writer.startBucket(1, _, _) 1 * writer.add(new MetricKey("resource", "service", "operation", "type", HTTP_OK, false), _) >> { MetricKey key, AggregateMetric value -> value.getHitCount() == 1 && value.getTopLevelCount() == topLevelCount && value.getDuration() == 100 @@ -177,9 +179,10 @@ class ConflatingMetricAggregatorTest extends DDSpecification { aggregator.publish(trace) } aggregator.report() - latch.await(2, SECONDS) + def latchTriggered = latch.await(2, SECONDS) then: "metrics should be conflated" + latchTriggered 1 * writer.finishBucket() >> { latch.countDown() } 1 * writer.startBucket(2, _, SECONDS.toNanos(reportingInterval)) 1 * writer.add(new MetricKey("resource", "service", "operation", "type", HTTP_OK, false), _) >> { MetricKey key, AggregateMetric value -> @@ -216,9 +219,10 @@ class ConflatingMetricAggregatorTest extends DDSpecification { ]) } aggregator.report() - latch.await(2, SECONDS) + def latchTriggered = latch.await(2, SECONDS) then: "the first aggregate should be dropped but the rest reported" + latchTriggered 1 * writer.startBucket(10, _, SECONDS.toNanos(reportingInterval)) for (int i = 1; i < 11; ++i) { 1 * writer.add(new MetricKey("resource", "service" + i, "operation", "type", HTTP_OK, false), _) >> { MetricKey key, AggregateMetric value -> @@ -252,9 +256,10 @@ class ConflatingMetricAggregatorTest extends DDSpecification { ]) } aggregator.report() - latch.await(2, SECONDS) + def latchTriggered = latch.await(2, SECONDS) then: "all aggregates should be reported" + latchTriggered 1 * writer.startBucket(5, _, SECONDS.toNanos(reportingInterval)) for (int i = 0; i < 5; ++i) { 1 * writer.add(new MetricKey("resource", "service" + i, "operation", "type", HTTP_OK, false), _) >> { MetricKey key, AggregateMetric value -> @@ -271,9 +276,10 @@ class ConflatingMetricAggregatorTest extends DDSpecification { ]) } aggregator.report() - latch.await(2, SECONDS) + latchTriggered = latch.await(2, SECONDS) then: "aggregate not updated in cycle is not reported" + latchTriggered 1 * writer.startBucket(4, _, SECONDS.toNanos(reportingInterval)) for (int i = 1; i < 5; ++i) { 1 * writer.add(new MetricKey("resource", "service" + i, "operation", "type", HTTP_OK, false), _) >> { MetricKey key, AggregateMetric value -> @@ -307,16 +313,17 @@ class ConflatingMetricAggregatorTest extends DDSpecification { ]) } aggregator.report() - latch.await(2, SECONDS) + def latchTriggered = latch.await(2, SECONDS) then: "all aggregates should be reported" + latchTriggered 1 * writer.startBucket(5, _, SECONDS.toNanos(reportingInterval)) for (int i = 0; i < 5; ++i) { 1 * writer.add(new MetricKey("resource", "service" + i, "operation", "type", HTTP_OK, false), _) >> { MetricKey key, AggregateMetric value -> value.getHitCount() == 1 && value.getDuration() == duration } } - 1 * writer.finishBucket() + 1 * writer.finishBucket() >> { latch.countDown() } when: reportAndWaitUntilEmpty(aggregator) @@ -349,9 +356,10 @@ class ConflatingMetricAggregatorTest extends DDSpecification { new SimpleSpan("service" + i, "operation", "resource", "type", false, true, false, 0, duration, HTTP_OK) ]) } - latch.await(2, SECONDS) + def latchTriggered = latch.await(2, SECONDS) then: "all aggregates should be reported" + latchTriggered 1 * writer.startBucket(5, _, SECONDS.toNanos(1)) for (int i = 0; i < 5; ++i) { 1 * writer.add(new MetricKey("resource", "service" + i, "operation", "type", HTTP_OK, false), _) >> { MetricKey key, AggregateMetric value -> @@ -421,9 +429,10 @@ class ConflatingMetricAggregatorTest extends DDSpecification { new SimpleSpan("service" + i, "operation", "resource", "type", false, true, false, 0, duration, HTTP_OK) ]) } - latch.await(2, SECONDS) + def latchTriggered = latch.await(2, SECONDS) then: "writer should be reset if reporting fails" + latchTriggered 1 * writer.startBucket(_, _, _) >> { throw new IllegalArgumentException("something went wrong") } @@ -449,6 +458,9 @@ class ConflatingMetricAggregatorTest extends DDSpecification { then: notThrown(TimeoutException) !flushed + + cleanup: + aggregator.close() } def "force flush should wait for aggregator to start"() { @@ -480,6 +492,9 @@ class ConflatingMetricAggregatorTest extends DDSpecification { then: notThrown(TimeoutException) flushed + + cleanup: + aggregator.close() } def reportAndWaitUntilEmpty(ConflatingMetricsAggregator aggregator) { From c5c158fef199bb5e96dec80dd68bfea336a925f0 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Wed, 2 Apr 2025 19:03:35 -0400 Subject: [PATCH 044/113] debug maven logs for now --- .gitlab-ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 7e033425bd0..8d3e92fc837 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -89,7 +89,7 @@ default: image: ghcr.io/datadog/dd-trace-java-docker-build:${JAVA_BUILD_IMAGE_VERSION}-base stage: build variables: - MAVEN_OPTS: "-Xms64M -Xmx512M" + MAVEN_OPTS: "-Xms64M -Xmx512M -Dorg.slf4j.simpleLogger.defaultLogLevel=debug" GRADLE_WORKERS: 2 GRADLE_MEM: 2560M KUBERNETES_CPU_REQUEST: 8 From 4dee866754f79c0a44c7af030093e498f4560397 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Thu, 3 Apr 2025 10:45:54 -0400 Subject: [PATCH 045/113] try non-ssh url --- .../src/test/resources/ci/git/shallow/git/config | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dd-java-agent/agent-ci-visibility/src/test/resources/ci/git/shallow/git/config b/dd-java-agent/agent-ci-visibility/src/test/resources/ci/git/shallow/git/config index 67fc9ed5f57..f145b18c5c0 100644 --- a/dd-java-agent/agent-ci-visibility/src/test/resources/ci/git/shallow/git/config +++ b/dd-java-agent/agent-ci-visibility/src/test/resources/ci/git/shallow/git/config @@ -6,7 +6,7 @@ ignorecase = true precomposeunicode = true [remote "origin"] - url = git@github.com:Netflix/zuul.git + url = https://github.com/Netflix/zuul.git fetch = +refs/heads/master:refs/remotes/origin/master [branch "master"] remote = origin From c23dcde23cad4f1364cf322060d7437d1c09ab11 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Thu, 3 Apr 2025 10:53:04 -0400 Subject: [PATCH 046/113] Revert "debug maven logs for now" This reverts commit c5c158fef199bb5e96dec80dd68bfea336a925f0. --- .gitlab-ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 8d3e92fc837..7e033425bd0 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -89,7 +89,7 @@ default: image: ghcr.io/datadog/dd-trace-java-docker-build:${JAVA_BUILD_IMAGE_VERSION}-base stage: build variables: - MAVEN_OPTS: "-Xms64M -Xmx512M -Dorg.slf4j.simpleLogger.defaultLogLevel=debug" + MAVEN_OPTS: "-Xms64M -Xmx512M" GRADLE_WORKERS: 2 GRADLE_MEM: 2560M KUBERNETES_CPU_REQUEST: 8 From 6d6dc39bbac5ff9f1e7a5fe75615f680216bd66e Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Thu, 3 Apr 2025 11:12:08 -0400 Subject: [PATCH 047/113] add some debug logging --- .../test/groovy/SpringBootNativeInstrumentationTest.groovy | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/dd-smoke-tests/spring-boot-3.0-native/src/test/groovy/SpringBootNativeInstrumentationTest.groovy b/dd-smoke-tests/spring-boot-3.0-native/src/test/groovy/SpringBootNativeInstrumentationTest.groovy index c5b9edeea24..8d3f214f9b7 100644 --- a/dd-smoke-tests/spring-boot-3.0-native/src/test/groovy/SpringBootNativeInstrumentationTest.groovy +++ b/dd-smoke-tests/spring-boot-3.0-native/src/test/groovy/SpringBootNativeInstrumentationTest.groovy @@ -39,7 +39,8 @@ class SpringBootNativeInstrumentationTest extends AbstractServerSmokeTest { '-Ddd.profiling.upload.period=1', '-Ddd.profiling.start-force-first=true', "-Ddd.profiling.debug.dump_path=${testJfrDir}", - "-Ddd.integration.spring-boot.enabled=true" + "-Ddd.integration.spring-boot.enabled=true", + "-Ddd.trace.debug=true" ]) ProcessBuilder processBuilder = new ProcessBuilder(command) processBuilder.directory(new File(buildDirectory)) @@ -115,4 +116,8 @@ class SpringBootNativeInstrumentationTest extends AbstractServerSmokeTest { }) return jfrCount.get() } + + def logLevel() { + return "debug" + } } From 93fdb13ae015b864372c15d4fc03ec145be58f78 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Thu, 3 Apr 2025 11:15:30 -0400 Subject: [PATCH 048/113] codenarc --- .../lettuce-5/src/test/groovy/Lettuce5SyncClientTest.groovy | 3 --- 1 file changed, 3 deletions(-) diff --git a/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5SyncClientTest.groovy b/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5SyncClientTest.groovy index 0bfdd9e87d6..e0d946558d7 100644 --- a/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5SyncClientTest.groovy +++ b/dd-java-agent/instrumentation/lettuce-5/src/test/groovy/Lettuce5SyncClientTest.groovy @@ -1,6 +1,3 @@ -import org.testcontainers.utility.DockerImageName - -import static datadog.trace.agent.test.utils.TraceUtils.runUnderTrace import static datadog.trace.instrumentation.lettuce5.LettuceInstrumentationUtil.AGENT_CRASHING_COMMAND_PREFIX import datadog.trace.api.Config From aa6a29ded49f7b509857ecd9ad1328f196ff53a4 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Thu, 3 Apr 2025 11:43:06 -0400 Subject: [PATCH 049/113] try fixed value of parallelism --- .../log-injection/src/test/resources/SpockConfig.groovy | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/dd-smoke-tests/log-injection/src/test/resources/SpockConfig.groovy b/dd-smoke-tests/log-injection/src/test/resources/SpockConfig.groovy index cfa0ebc2632..f9abbe2c07b 100644 --- a/dd-smoke-tests/log-injection/src/test/resources/SpockConfig.groovy +++ b/dd-smoke-tests/log-injection/src/test/resources/SpockConfig.groovy @@ -1,5 +1,11 @@ runner { parallel { enabled true + + // Runtime.getRuntime().availableProcessors() is used to scale the parallelism by default + // but it returns weird values in Gitlab/kubernetes so fix the parallelism to a specific value + if (System.getenv("GITLAB_CI") != null) { + fixed(4) + } } } From 58e4a01be34cb010ecbb4215a22cba71ac5f8462 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Thu, 3 Apr 2025 12:37:30 -0400 Subject: [PATCH 050/113] enable debug logs for smoketest building only --- .gitlab-ci.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 7e033425bd0..e73f6a1f2c0 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -174,6 +174,7 @@ build_tests: CACHE_TYPE: "latestdep" - GRADLE_TARGET: ":smokeTest" CACHE_TYPE: "smoke" + MAVEN_OPTS: "-Xms64M -Xmx512M -Dorg.slf4j.simpleLogger.defaultLogLevel=debug" script: - ./gradlew clean $GRADLE_TARGET -PskipTests $GRADLE_ARGS From 28cf04258020ce60e17744bffeae23e8ee689053 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Thu, 3 Apr 2025 16:40:39 -0400 Subject: [PATCH 051/113] limit number of reserved threads --- .gitlab-ci.yml | 2 +- .../trace/agent/test/server/http/TestHttpServer.groovy | 6 ++++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index e73f6a1f2c0..40cafd2f448 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -174,7 +174,7 @@ build_tests: CACHE_TYPE: "latestdep" - GRADLE_TARGET: ":smokeTest" CACHE_TYPE: "smoke" - MAVEN_OPTS: "-Xms64M -Xmx512M -Dorg.slf4j.simpleLogger.defaultLogLevel=debug" + MAVEN_OPTS: "-Xms64M -Xmx512M -Dorg.slf4j.simpleLogger.defaultLogLevel=debug" # Fixme: Build :smokeTest build fails unless mvn debug logging is on script: - ./gradlew clean $GRADLE_TARGET -PskipTests $GRADLE_ARGS diff --git a/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/server/http/TestHttpServer.groovy b/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/server/http/TestHttpServer.groovy index dcb0d6a2292..85a8bcb8e04 100644 --- a/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/server/http/TestHttpServer.groovy +++ b/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/server/http/TestHttpServer.groovy @@ -19,6 +19,7 @@ import org.eclipse.jetty.server.SslConnectionFactory import org.eclipse.jetty.server.handler.AbstractHandler import org.eclipse.jetty.server.handler.HandlerList import org.eclipse.jetty.util.ssl.SslContextFactory +import org.eclipse.jetty.util.thread.QueuedThreadPool import javax.net.ssl.HostnameVerifier import javax.net.ssl.SSLContext @@ -125,6 +126,11 @@ class TestHttpServer implements AutoCloseable { customizer.call(internalServer) + // Jetty uses a heuristic to set reserved threads that breaks in CI + // It depends on Runtime.getRuntime().availableProcessors() which is not always correct in kubernetes + // Set explicitly to a reasonable number + (internalServer.getThreadPool() as QueuedThreadPool).setReservedThreads(10) + internalServer.start() // set after starting, otherwise two callbacks get added. internalServer.stopAtShutdown = true From 99593da04aaae7e78fc32fb22913055cdb6773e0 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Thu, 3 Apr 2025 16:52:54 -0400 Subject: [PATCH 052/113] some debug for why the smoketest is not running --- dd-smoke-tests/spring-boot-3.0-native/build.gradle | 1 + 1 file changed, 1 insertion(+) diff --git a/dd-smoke-tests/spring-boot-3.0-native/build.gradle b/dd-smoke-tests/spring-boot-3.0-native/build.gradle index ed37cbb264c..ffeea5184c7 100644 --- a/dd-smoke-tests/spring-boot-3.0-native/build.gradle +++ b/dd-smoke-tests/spring-boot-3.0-native/build.gradle @@ -76,6 +76,7 @@ if (version >= 17) { jvmArgs "-Ddd.profiling.enabled=true" } } else { + project.logger.lifecycle("Skipping test: testJvm: {}, matcher: {} {}", testJvm, matcher, matcher?.size()) tasks.withType(Test).configureEach { enabled = false } From c038257ffa01e61009722e23c160f72493e32856 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Thu, 3 Apr 2025 17:05:48 -0400 Subject: [PATCH 053/113] more debug for gradle launcher failure --- .../trace/civisibility/utils/ShellCommandExecutor.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/utils/ShellCommandExecutor.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/utils/ShellCommandExecutor.java index 402ef4ecccf..92103b75fd4 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/utils/ShellCommandExecutor.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/utils/ShellCommandExecutor.java @@ -161,7 +161,11 @@ private T executeCommand( throw new TimeoutException( "Timeout while waiting for '" + String.join(" ", command) - + "'; " + + "'; in " + + executionFolder + + "\n StdOut: \n" + + IOUtils.readFully(inputStreamConsumer.read(), Charset.defaultCharset()) + + "\n StdErr: \n " + IOUtils.readFully(errorStreamConsumer.read(), Charset.defaultCharset())); } } catch (InterruptedException e) { From 6dd342a7b7d718b4d911030a38c1cc92ef9239a7 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Thu, 3 Apr 2025 17:25:07 -0400 Subject: [PATCH 054/113] try fixing graal test --- .../groovy/SpringBootNativeInstrumentationTest.groovy | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/dd-smoke-tests/spring-boot-3.0-native/src/test/groovy/SpringBootNativeInstrumentationTest.groovy b/dd-smoke-tests/spring-boot-3.0-native/src/test/groovy/SpringBootNativeInstrumentationTest.groovy index 8d3f214f9b7..40cdb006f73 100644 --- a/dd-smoke-tests/spring-boot-3.0-native/src/test/groovy/SpringBootNativeInstrumentationTest.groovy +++ b/dd-smoke-tests/spring-boot-3.0-native/src/test/groovy/SpringBootNativeInstrumentationTest.groovy @@ -7,6 +7,7 @@ import spock.lang.Shared import spock.lang.TempDir import org.openjdk.jmc.flightrecorder.JfrLoaderToolkit +import spock.util.concurrent.PollingConditions import java.nio.file.FileVisitResult import java.nio.file.Files @@ -70,6 +71,7 @@ class SpringBootNativeInstrumentationTest extends AbstractServerSmokeTest { def "check native instrumentation"() { setup: String url = "http://localhost:${httpPort}/hello" + def conditions = new PollingConditions(initialDelay: 2, timeout: 6) when: def response = client.newCall(new Request.Builder().url(url).get().build()).execute() @@ -81,13 +83,9 @@ class SpringBootNativeInstrumentationTest extends AbstractServerSmokeTest { responseBodyStr.contains("Hello world") waitForTraceCount(1) - // sanity test for profiler generating JFR files - // the recording is collected after 1 second of execution - // make sure the app has been up and running for at least 1.5 seconds - while (System.nanoTime() - ts < 1_500_000_000L) { - LockSupport.parkNanos(1_000_000) + conditions.eventually { + assert countJfrs() > 0 } - countJfrs() > 0 } int countJfrs() { From 854ba803c47d6a9fe4f1f4e30355285f6c2ce0e6 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Thu, 3 Apr 2025 19:20:42 -0400 Subject: [PATCH 055/113] try to force the logging --- dd-smoke-tests/spring-boot-3.0-native/build.gradle | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/dd-smoke-tests/spring-boot-3.0-native/build.gradle b/dd-smoke-tests/spring-boot-3.0-native/build.gradle index ffeea5184c7..b9f2901dc86 100644 --- a/dd-smoke-tests/spring-boot-3.0-native/build.gradle +++ b/dd-smoke-tests/spring-boot-3.0-native/build.gradle @@ -13,6 +13,8 @@ def java17Home = System.getenv('JAVA_17_HOME') // Check 'testJvm' gradle command parameter to be at least GraalVM 17 def matcher = testJvm?.toLowerCase(Locale.ROOT) =~ /graalvm([0-9]+)/ def version = matcher?.size() == 1 ? Integer.parseInt(matcher[0][1]) : -1 +project.logger.lifecycle("TESTLOG: testJvm: {}, matcher: {} {}", testJvm, matcher, matcher?.size()) +println("TESTLOG2") if (version >= 17) { // Retrieve GRAALVM_HOME from JVM environment variables def testJvmEnv = "JAVA_${testJvm}_HOME" @@ -76,7 +78,6 @@ if (version >= 17) { jvmArgs "-Ddd.profiling.enabled=true" } } else { - project.logger.lifecycle("Skipping test: testJvm: {}, matcher: {} {}", testJvm, matcher, matcher?.size()) tasks.withType(Test).configureEach { enabled = false } From 5e0c67acd8a1c6aa755078aa12d625bc8dcde026 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Thu, 3 Apr 2025 19:30:22 -0400 Subject: [PATCH 056/113] codenarc --- .../src/test/groovy/SpringBootNativeInstrumentationTest.groovy | 2 -- 1 file changed, 2 deletions(-) diff --git a/dd-smoke-tests/spring-boot-3.0-native/src/test/groovy/SpringBootNativeInstrumentationTest.groovy b/dd-smoke-tests/spring-boot-3.0-native/src/test/groovy/SpringBootNativeInstrumentationTest.groovy index 40cdb006f73..286f52b8e35 100644 --- a/dd-smoke-tests/spring-boot-3.0-native/src/test/groovy/SpringBootNativeInstrumentationTest.groovy +++ b/dd-smoke-tests/spring-boot-3.0-native/src/test/groovy/SpringBootNativeInstrumentationTest.groovy @@ -15,7 +15,6 @@ import java.nio.file.Path import java.nio.file.SimpleFileVisitor import java.nio.file.attribute.BasicFileAttributes import java.util.concurrent.atomic.AtomicInteger -import java.util.concurrent.locks.LockSupport class SpringBootNativeInstrumentationTest extends AbstractServerSmokeTest { @Shared @@ -77,7 +76,6 @@ class SpringBootNativeInstrumentationTest extends AbstractServerSmokeTest { def response = client.newCall(new Request.Builder().url(url).get().build()).execute() then: - def ts = System.nanoTime() def responseBodyStr = response.body().string() responseBodyStr != null responseBodyStr.contains("Hello world") From b7d542738dca647382f3bd5917eff10b168ba01e Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Thu, 3 Apr 2025 19:37:23 -0400 Subject: [PATCH 057/113] set available processors for jetty --- .gitlab-ci.yml | 1 + .../trace/agent/test/server/http/TestHttpServer.groovy | 6 ------ 2 files changed, 1 insertion(+), 6 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 40cafd2f448..75bc976a5e1 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -359,6 +359,7 @@ muzzle-dep-report: TESTCONTAINERS_CHECKS_DISABLE: "true" TESTCONTAINERS_RYUK_DISABLED: "true" TESTCONTAINERS_HUB_IMAGE_NAME_PREFIX: "registry.ddbuild.io/images/mirror/" + JETTY_AVAILABLE_PROCESSORS: 4 # Jetty incorrectly calculates processor count in containers script: - > if [ "$PROFILE_TESTS" == "true" ] && [ "$testJvm" != "ibm8" ] && [ "$testJvm" != "oracle8" ]; diff --git a/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/server/http/TestHttpServer.groovy b/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/server/http/TestHttpServer.groovy index 85a8bcb8e04..539f2f303ac 100644 --- a/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/server/http/TestHttpServer.groovy +++ b/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/server/http/TestHttpServer.groovy @@ -125,12 +125,6 @@ class TestHttpServer implements AutoCloseable { internalServer.addConnector(https) customizer.call(internalServer) - - // Jetty uses a heuristic to set reserved threads that breaks in CI - // It depends on Runtime.getRuntime().availableProcessors() which is not always correct in kubernetes - // Set explicitly to a reasonable number - (internalServer.getThreadPool() as QueuedThreadPool).setReservedThreads(10) - internalServer.start() // set after starting, otherwise two callbacks get added. internalServer.stopAtShutdown = true From cc2af16f2d0e5b136b7348f36d7395fb9318a5b6 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Fri, 4 Apr 2025 10:09:32 -0400 Subject: [PATCH 058/113] codenarc --- .../datadog/trace/agent/test/server/http/TestHttpServer.groovy | 1 - 1 file changed, 1 deletion(-) diff --git a/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/server/http/TestHttpServer.groovy b/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/server/http/TestHttpServer.groovy index 539f2f303ac..3b7f444dbd0 100644 --- a/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/server/http/TestHttpServer.groovy +++ b/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/server/http/TestHttpServer.groovy @@ -19,7 +19,6 @@ import org.eclipse.jetty.server.SslConnectionFactory import org.eclipse.jetty.server.handler.AbstractHandler import org.eclipse.jetty.server.handler.HandlerList import org.eclipse.jetty.util.ssl.SslContextFactory -import org.eclipse.jetty.util.thread.QueuedThreadPool import javax.net.ssl.HostnameVerifier import javax.net.ssl.SSLContext From 9d432482ca0fe58f10c7dcb7058bb01c8f11cda4 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Fri, 4 Apr 2025 10:35:26 -0400 Subject: [PATCH 059/113] bluntly bump max threads --- .../trace/agent/test/server/http/TestHttpServer.groovy | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/server/http/TestHttpServer.groovy b/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/server/http/TestHttpServer.groovy index 3b7f444dbd0..e6fa205751a 100644 --- a/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/server/http/TestHttpServer.groovy +++ b/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/server/http/TestHttpServer.groovy @@ -19,6 +19,7 @@ import org.eclipse.jetty.server.SslConnectionFactory import org.eclipse.jetty.server.handler.AbstractHandler import org.eclipse.jetty.server.handler.HandlerList import org.eclipse.jetty.util.ssl.SslContextFactory +import org.eclipse.jetty.util.thread.QueuedThreadPool import javax.net.ssl.HostnameVerifier import javax.net.ssl.SSLContext @@ -82,7 +83,10 @@ class TestHttpServer implements AutoCloseable { } private TestHttpServer() { - internalServer = new Server() + // In some versions, Jetty requires max threads > than some arbitrary value + // The arbitrary value can be high in CI + // There is no easy way to override the configuration in a version-neutral way + internalServer = new Server(new QueuedThreadPool(400)) TrustManager[] trustManagers = new TrustManager[1] trustManagers[0] = trustManager From cc3744cb202998d7882a93c55cf181892dab791d Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Fri, 4 Apr 2025 12:31:21 -0400 Subject: [PATCH 060/113] some more debug --- build.gradle | 1 + dd-smoke-tests/spring-boot-3.0-native/build.gradle | 2 +- gradle/java_no_deps.gradle | 1 + 3 files changed, 3 insertions(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index 8629065f394..5d3cdd8622a 100644 --- a/build.gradle +++ b/build.gradle @@ -134,6 +134,7 @@ def writeMainVersionFileTask = tasks.register('writeMainVersionFile') { allprojects { tasks.withType(JavaForkOptions).configureEach { + project.logger.lifecycle("MAX HEAP: {}", (String)System.properties["datadog.forkedMaxHeapSize"]) maxHeapSize = System.properties["datadog.forkedMaxHeapSize"] minHeapSize = System.properties["datadog.forkedMinHeapSize"] jvmArgs "-XX:ErrorFile=/tmp/hs_err_pid%p.log" diff --git a/dd-smoke-tests/spring-boot-3.0-native/build.gradle b/dd-smoke-tests/spring-boot-3.0-native/build.gradle index b9f2901dc86..c257fbb9be7 100644 --- a/dd-smoke-tests/spring-boot-3.0-native/build.gradle +++ b/dd-smoke-tests/spring-boot-3.0-native/build.gradle @@ -13,7 +13,7 @@ def java17Home = System.getenv('JAVA_17_HOME') // Check 'testJvm' gradle command parameter to be at least GraalVM 17 def matcher = testJvm?.toLowerCase(Locale.ROOT) =~ /graalvm([0-9]+)/ def version = matcher?.size() == 1 ? Integer.parseInt(matcher[0][1]) : -1 -project.logger.lifecycle("TESTLOG: testJvm: {}, matcher: {} {}", testJvm, matcher, matcher?.size()) +project.logger.lifecycle("TESTLOG: testJvm: {}, matcher: {} {} {}", testJvm, matcher, matcher?.size(), version) println("TESTLOG2") if (version >= 17) { // Retrieve GRAALVM_HOME from JVM environment variables diff --git a/gradle/java_no_deps.gradle b/gradle/java_no_deps.gradle index 95a87f0e8ed..c6ffe3974b9 100644 --- a/gradle/java_no_deps.gradle +++ b/gradle/java_no_deps.gradle @@ -237,6 +237,7 @@ project.afterEvaluate { tasks.withType(JavaExec).configureEach { if (!it.maxHeapSize) { + project.logger.lifecycle("SETTING MAX HEAP for main class {}", (String)it.mainClass) it.maxHeapSize('256M') } } From b3770bc3b45b1495648589a7f26f66375e73aac0 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Fri, 4 Apr 2025 15:09:21 -0400 Subject: [PATCH 061/113] remove verbose logging --- build.gradle | 1 - 1 file changed, 1 deletion(-) diff --git a/build.gradle b/build.gradle index 5d3cdd8622a..8629065f394 100644 --- a/build.gradle +++ b/build.gradle @@ -134,7 +134,6 @@ def writeMainVersionFileTask = tasks.register('writeMainVersionFile') { allprojects { tasks.withType(JavaForkOptions).configureEach { - project.logger.lifecycle("MAX HEAP: {}", (String)System.properties["datadog.forkedMaxHeapSize"]) maxHeapSize = System.properties["datadog.forkedMaxHeapSize"] minHeapSize = System.properties["datadog.forkedMinHeapSize"] jvmArgs "-XX:ErrorFile=/tmp/hs_err_pid%p.log" From 5630d8622a8798b13ea0341efc59cec466043abf Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Fri, 4 Apr 2025 15:09:35 -0400 Subject: [PATCH 062/113] try memcached test with new location --- .../trace/instrumentation/spymemcached/SpymemcachedTest.groovy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dd-java-agent/instrumentation/spymemcached-2.10/src/test/groovy/datadog/trace/instrumentation/spymemcached/SpymemcachedTest.groovy b/dd-java-agent/instrumentation/spymemcached-2.10/src/test/groovy/datadog/trace/instrumentation/spymemcached/SpymemcachedTest.groovy index 7357a9749fb..e55405e7e47 100644 --- a/dd-java-agent/instrumentation/spymemcached-2.10/src/test/groovy/datadog/trace/instrumentation/spymemcached/SpymemcachedTest.groovy +++ b/dd-java-agent/instrumentation/spymemcached-2.10/src/test/groovy/datadog/trace/instrumentation/spymemcached/SpymemcachedTest.groovy @@ -56,7 +56,7 @@ abstract class SpymemcachedTest extends VersionedNamingTestBase { } def setupSpec() { - memcachedContainer = new GenericContainer('memcached:1.6.14-alpine') + memcachedContainer = new GenericContainer('library/memcached:1.6.14-alpine') .withExposedPorts(defaultMemcachedPort) .withStartupTimeout(Duration.ofSeconds(120)) memcachedContainer.start() From cdfb06065496b35abef29835514988321455a858 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Fri, 4 Apr 2025 15:31:57 -0400 Subject: [PATCH 063/113] opt out of split by parallel --- .gitlab-ci.yml | 2 ++ dd-smoke-tests/spring-boot-3.0-native/build.gradle | 2 -- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 75bc976a5e1..033178369ba 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -79,6 +79,7 @@ default: # CI_NODE_INDEX and CI_NODE_TOTAL are 1-indexed and not always set. These steps normalize the numbers for jobs .normalize_node_index: &normalize_node_index + - if [ "$CI_NO_SPLIT" == "true" ] ; then CI_NODE_INDEX=1; CI_NODE_TOTAL=1; fi # A job uses parallel but doesn't intend to split by index - if [ -n "$CI_SPLIT" ]; then CI_NODE_INDEX="${CI_SPLIT%%/*}"; CI_NODE_TOTAL="${CI_SPLIT##*/}"; fi - echo "CI_NODE_TOTAL=${CI_NODE_TOTAL}, CI_NODE_INDEX=$CI_NODE_INDEX" - export NORMALIZED_NODE_TOTAL=${CI_NODE_TOTAL:-1} @@ -522,6 +523,7 @@ test_smoke_graalvm: variables: GRADLE_TARGET: "stageMainDist :dd-smoke-test:spring-boot-3.0-native:test" CACHE_TYPE: "smoke" + CI_NO_SPLIT: "true" parallel: matrix: - testJvm: ["graalvm17", "graalvm21"] diff --git a/dd-smoke-tests/spring-boot-3.0-native/build.gradle b/dd-smoke-tests/spring-boot-3.0-native/build.gradle index c257fbb9be7..ed37cbb264c 100644 --- a/dd-smoke-tests/spring-boot-3.0-native/build.gradle +++ b/dd-smoke-tests/spring-boot-3.0-native/build.gradle @@ -13,8 +13,6 @@ def java17Home = System.getenv('JAVA_17_HOME') // Check 'testJvm' gradle command parameter to be at least GraalVM 17 def matcher = testJvm?.toLowerCase(Locale.ROOT) =~ /graalvm([0-9]+)/ def version = matcher?.size() == 1 ? Integer.parseInt(matcher[0][1]) : -1 -project.logger.lifecycle("TESTLOG: testJvm: {}, matcher: {} {} {}", testJvm, matcher, matcher?.size(), version) -println("TESTLOG2") if (version >= 17) { // Retrieve GRAALVM_HOME from JVM environment variables def testJvmEnv = "JAVA_${testJvm}_HOME" From b836b3526825e1f3e69b2deb8830ed285058076f Mon Sep 17 00:00:00 2001 From: Santiago Mola Date: Mon, 7 Apr 2025 07:54:00 +0200 Subject: [PATCH 064/113] Try with 3 gradle workers --- .gitlab-ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 033178369ba..add193af52c 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -160,7 +160,7 @@ build_tests: BUILD_CACHE_POLICY: push DEPENDENCY_CACHE_POLICY: pull GRADLE_MEM: 4G - GRADLE_WORKERS: 4 + GRADLE_WORKERS: 3 KUBERNETES_MEMORY_REQUEST: 18Gi KUBERNETES_MEMORY_LIMIT: 18Gi parallel: From 96ea71cf36ced5523d97b0091465bff759b994d0 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Mon, 7 Apr 2025 12:47:19 -0400 Subject: [PATCH 065/113] try decreasing reporting interval --- .../trace/common/metrics/ConflatingMetricAggregatorTest.groovy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dd-trace-core/src/test/groovy/datadog/trace/common/metrics/ConflatingMetricAggregatorTest.groovy b/dd-trace-core/src/test/groovy/datadog/trace/common/metrics/ConflatingMetricAggregatorTest.groovy index 486254798d7..498a4b4a0af 100644 --- a/dd-trace-core/src/test/groovy/datadog/trace/common/metrics/ConflatingMetricAggregatorTest.groovy +++ b/dd-trace-core/src/test/groovy/datadog/trace/common/metrics/ConflatingMetricAggregatorTest.groovy @@ -23,7 +23,7 @@ class ConflatingMetricAggregatorTest extends DDSpecification { static final int HTTP_OK = 200 @Shared - long reportingInterval = 100 + long reportingInterval = 1 @Shared int queueSize = 256 From 1594733bf1363f4c76c16f3db116fcf7ce3cb7f7 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Mon, 7 Apr 2025 15:57:21 -0400 Subject: [PATCH 066/113] Refractor CrashTrackingSmokeTest to use OutputThreads --- .../smoketest/CrashtrackingSmokeTest.java | 200 +++--------------- .../datadog/smoketest/ProcessManager.groovy | 127 +---------- .../java/datadog/smoketest/OutputThreads.java | 168 +++++++++++++++ 3 files changed, 200 insertions(+), 295 deletions(-) create mode 100644 dd-smoke-tests/src/main/java/datadog/smoketest/OutputThreads.java diff --git a/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java b/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java index 469f86f0620..6fbae1c07ff 100644 --- a/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java +++ b/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java @@ -1,17 +1,14 @@ package datadog.smoketest; -import static org.hamcrest.CoreMatchers.containsString; -import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.jupiter.api.Assertions.assertNotEquals; import static org.junit.jupiter.api.Assumptions.assumeFalse; import datadog.trace.api.Platform; -import java.io.BufferedReader; import java.io.File; -import java.io.InputStreamReader; import java.nio.file.FileSystems; import java.nio.file.Files; import java.nio.file.Path; +import java.nio.file.Paths; import java.util.Arrays; import java.util.Comparator; import java.util.stream.Stream; @@ -19,6 +16,7 @@ import okhttp3.mockwebserver.MockResponse; import okhttp3.mockwebserver.MockWebServer; import okhttp3.mockwebserver.RecordedRequest; +import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; @@ -29,15 +27,19 @@ * that ships with OS X by default. */ public class CrashtrackingSmokeTest { + private static Path LOG_FILE_DIR; private MockWebServer tracingServer; @BeforeAll static void setupAll() { // Only Hotspot based implementation are supported assumeFalse(Platform.isJ9()); + + LOG_FILE_DIR = Paths.get(System.getProperty("datadog.smoketest.builddir"), "reports"); } private Path tempDir; + private static OutputThreads outputThreads = new OutputThreads(); @BeforeEach void setup() throws Exception { @@ -52,6 +54,9 @@ public MockResponse dispatch(final RecordedRequest request) throws InterruptedEx } }); // tracingServer.start(8126); + synchronized (outputThreads.testLogMessages) { + outputThreads.testLogMessages.clear(); + } } @AfterEach @@ -64,6 +69,11 @@ void teardown() throws Exception { Files.deleteIfExists(tempDir); } + @AfterAll + static void shutdown() { + outputThreads.close(); + } + private static String javaPath() { final String separator = FileSystems.getDefault().getSeparator(); return System.getProperty("java.home") + separator + "bin" + separator + "java"; @@ -108,51 +118,14 @@ void testCrashTracking() throws Exception { appShadowJar(), script.toString())); pb.environment().put("DD_TRACE_AGENT_PORT", String.valueOf(tracingServer.getPort())); - StringBuilder stdoutStr = new StringBuilder(); - StringBuilder stderrStr = new StringBuilder(); Process p = pb.start(); - Thread stdout = - new Thread( - () -> { - try (BufferedReader br = - new BufferedReader(new InputStreamReader(p.getInputStream()))) { - br.lines() - .forEach( - l -> { - System.out.println(l); - stdoutStr.append(l).append('\n'); - }); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); - Thread stderr = - new Thread( - () -> { - try (BufferedReader br = - new BufferedReader(new InputStreamReader(p.getErrorStream()))) { - br.lines() - .forEach( - l -> { - System.err.println(l); - stderrStr.append(l).append('\n'); - }); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); - stdout.setDaemon(true); - stderr.setDaemon(true); - stdout.start(); - stderr.start(); + outputThreads.captureOutput(p, LOG_FILE_DIR.resolve("testProcess.testCrashTracking.log").toFile()); assertNotEquals(0, p.waitFor(), "Application should have crashed"); - assertThat(stdoutStr.toString(), containsString(" was uploaded successfully")); - assertThat( - stderrStr.toString(), - containsString( + outputThreads.processTestLogLines((line) -> line.contains(" was uploaded successfully")); + outputThreads.processTestLogLines((line) -> line.contains( "com.datadog.crashtracking.CrashUploader - Successfully uploaded the crash files")); } @@ -183,52 +156,15 @@ void testCrashTrackingLegacy() throws Exception { appShadowJar(), script.toString())); pb.environment().put("DD_TRACE_AGENT_PORT", String.valueOf(tracingServer.getPort())); - StringBuilder stdoutStr = new StringBuilder(); - StringBuilder stderrStr = new StringBuilder(); Process p = pb.start(); - Thread stdout = - new Thread( - () -> { - try (BufferedReader br = - new BufferedReader(new InputStreamReader(p.getInputStream()))) { - br.lines() - .forEach( - l -> { - System.out.println(l); - stdoutStr.append(l).append('\n'); - }); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); - Thread stderr = - new Thread( - () -> { - try (BufferedReader br = - new BufferedReader(new InputStreamReader(p.getErrorStream()))) { - br.lines() - .forEach( - l -> { - System.err.println(l); - stderrStr.append(l).append('\n'); - }); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); - stdout.setDaemon(true); - stderr.setDaemon(true); - stdout.start(); - stderr.start(); + outputThreads.captureOutput(p, LOG_FILE_DIR.resolve("testProcess.testCrashTrackingLegacy.log").toFile()); assertNotEquals(0, p.waitFor(), "Application should have crashed"); - assertThat(stdoutStr.toString(), containsString(" was uploaded successfully")); - assertThat( - stderrStr.toString(), - containsString( - "com.datadog.crashtracking.CrashUploader - Successfully uploaded the crash files")); + outputThreads.processTestLogLines((line) -> line.contains(" was uploaded successfully")); + outputThreads.processTestLogLines((line) -> line.contains( + "com.datadog.crashtracking.CrashUploader - Successfully uploaded the crash files")); } /* @@ -255,51 +191,14 @@ void testOomeTracking() throws Exception { "-jar", appShadowJar(), script.toString())); - StringBuilder stdoutStr = new StringBuilder(); - StringBuilder stderrStr = new StringBuilder(); Process p = pb.start(); - Thread stdout = - new Thread( - () -> { - try (BufferedReader br = - new BufferedReader(new InputStreamReader(p.getInputStream()))) { - br.lines() - .forEach( - l -> { - System.out.println(l); - stdoutStr.append(l).append('\n'); - }); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); - Thread stderr = - new Thread( - () -> { - try (BufferedReader br = - new BufferedReader(new InputStreamReader(p.getErrorStream()))) { - br.lines() - .forEach( - l -> { - System.err.println(l); - stderrStr.append(l).append('\n'); - }); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); - stdout.setDaemon(true); - stderr.setDaemon(true); - stdout.start(); - stderr.start(); + outputThreads.captureOutput(p, LOG_FILE_DIR.resolve("testProcess.testOomeTracking.log").toFile()); assertNotEquals(0, p.waitFor(), "Application should have crashed"); - assertThat( - stderrStr.toString(), - containsString("com.datadog.crashtracking.OOMENotifier - OOME event sent")); - assertThat(stdoutStr.toString(), containsString("OOME Event generated successfully")); + outputThreads.processTestLogLines((line) -> line.contains("com.datadog.crashtracking.OOMENotifier - OOME event sent")); + outputThreads.processTestLogLines((line) -> line.contains("OOME Event generated successfully")); } @Test @@ -326,58 +225,19 @@ void testCombineTracking() throws Exception { appShadowJar(), oomeScript.toString())); pb.environment().put("DD_TRACE_AGENT_PORT", String.valueOf(tracingServer.getPort())); - StringBuilder stdoutStr = new StringBuilder(); - StringBuilder stderrStr = new StringBuilder(); Process p = pb.start(); - Thread stdout = - new Thread( - () -> { - try (BufferedReader br = - new BufferedReader(new InputStreamReader(p.getInputStream()))) { - br.lines() - .forEach( - l -> { - System.out.println(l); - stdoutStr.append(l).append('\n'); - }); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); - Thread stderr = - new Thread( - () -> { - try (BufferedReader br = - new BufferedReader(new InputStreamReader(p.getErrorStream()))) { - br.lines() - .forEach( - l -> { - System.err.println(l); - stderrStr.append(l).append('\n'); - }); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); - stdout.setDaemon(true); - stderr.setDaemon(true); - stdout.start(); - stderr.start(); + outputThreads.captureOutput(p, LOG_FILE_DIR.resolve("testProcess.testCombineTracking.log").toFile()); assertNotEquals(0, p.waitFor(), "Application should have crashed"); // Crash uploader did get triggered - assertThat(stdoutStr.toString(), containsString(" was uploaded successfully")); - assertThat( - stderrStr.toString(), - containsString( - "com.datadog.crashtracking.CrashUploader - Successfully uploaded the crash files")); + outputThreads.processTestLogLines((line) -> line.contains(" was uploaded successfully")); + outputThreads.processTestLogLines((line) -> line.contains( + "com.datadog.crashtracking.CrashUploader - Successfully uploaded the crash files")); // OOME notifier did get triggered - assertThat( - stderrStr.toString(), - containsString("com.datadog.crashtracking.OOMENotifier - OOME event sent")); - assertThat(stdoutStr.toString(), containsString("OOME Event generated successfully")); + outputThreads.processTestLogLines((line) -> line.contains("com.datadog.crashtracking.OOMENotifier - OOME event sent")); + outputThreads.processTestLogLines((line) -> line.contains("OOME Event generated successfully")); } } diff --git a/dd-smoke-tests/src/main/groovy/datadog/smoketest/ProcessManager.groovy b/dd-smoke-tests/src/main/groovy/datadog/smoketest/ProcessManager.groovy index fdcc9db82be..422958892ff 100644 --- a/dd-smoke-tests/src/main/groovy/datadog/smoketest/ProcessManager.groovy +++ b/dd-smoke-tests/src/main/groovy/datadog/smoketest/ProcessManager.groovy @@ -7,13 +7,7 @@ import spock.lang.AutoCleanup import spock.lang.Shared import spock.lang.Specification -import java.nio.ByteBuffer import java.nio.CharBuffer -import java.nio.channels.Channels -import java.nio.channels.ReadableByteChannel -import java.nio.channels.WritableByteChannel -import java.nio.charset.CharsetDecoder -import java.nio.charset.StandardCharsets import java.nio.file.Files import java.nio.file.Paths import java.util.concurrent.TimeoutException @@ -78,93 +72,6 @@ abstract class ProcessManager extends Specification { @AutoCleanup OutputThreads outputThreads = new OutputThreads() - class OutputThreads implements Closeable { - final ThreadGroup tg = new ThreadGroup("smoke-output") - final List testLogMessages = new ArrayList<>() - - void close() { - tg.interrupt() - Thread[] threads = new Thread[tg.activeCount()] - tg.enumerate(threads) - threads*.join() - } - - @CompileStatic - class ProcessOutputRunnable implements Runnable { - final ReadableByteChannel rc - ByteBuffer buffer = ByteBuffer.allocate(MAX_LINE_SIZE) - final WritableByteChannel wc - CharsetDecoder decoder = StandardCharsets.UTF_8.newDecoder() - - ProcessOutputRunnable(InputStream is, File output) { - rc = Channels.newChannel(is) - wc = Channels.newChannel(new FileOutputStream(output)) - } - - @Override - void run() { - boolean online = true - while (online) { - // we may have data in the buffer we did not consume for line splitting purposes - int skip = buffer.position() - - try { - if (rc.read(buffer) == -1) { - online = false - } - } catch (IOException ioe) { - online = false - } - - buffer.flip() - // write to log file - wc.write(buffer.duplicate().position(skip) as ByteBuffer) - - // subBuff will always start at the beginning of the next (potential) line - ByteBuffer subBuff = buffer.duplicate() - int consumed = 0 - while (true) { - boolean hasRemaining = subBuff.hasRemaining() - if (hasRemaining) { - int c = subBuff.get() - if (c != '\n' && c != '\r') { - continue - } - // found line end - } else if (online && consumed > 0) { - break - // did not find line end, but we already consumed a line - // save the data for the next read iteration - } // else we did not consume any line, or there will be no further reads. - // Treat the buffer as single line despite lack of terminator - - consumed += subBuff.position() - String line = decoder.decode(subBuff.duplicate().flip() as ByteBuffer).toString().trim() - if (line != '') { - synchronized (testLogMessages) { - testLogMessages << line - testLogMessages.notifyAll() - } - } - - if (hasRemaining) { - subBuff = subBuff.slice() - } else { - break - } - } - - buffer.position(consumed) - buffer.compact() - } - } - } - - void captureOutput(Process p, File outputFile) { - new Thread(tg, new ProcessOutputRunnable(p.inputStream, outputFile)).start() - } - } - def setupSpec() { if (buildDirectory == null || shadowJarPath == null) { throw new AssertionError("Expected system properties not found. Smoke tests have to be run from Gradle. Please make sure that is the case.") @@ -333,37 +240,7 @@ abstract class ProcessManager extends Specification { * @param checker should return true if a match is found */ void processTestLogLines(Closure checker) { - int l = 0 - def tlm = outputThreads.testLogMessages - long waitStart - - while (true) { - String msg - synchronized (tlm) { - if (l >= tlm.size()) { - long waitTime - if (waitStart != 0) { - waitTime = 5000 - (System.currentTimeMillis() - waitStart) - if (waitTime < 0) { - throw new TimeoutException() - } - } else { - waitStart = System.currentTimeMillis() - waitTime = 5000 - } - tlm.wait(waitTime) - } - if (l >= tlm.size()) { - throw new TimeoutException() - } - // the array is only cleared at the end of the test, so index l exists - msg = tlm.get(l++) - } - - if (checker(msg)) { - break - } - } + outputThreads.processTestLogLines {return checker(it) } } protected void beforeProcessBuilders() {} @@ -383,7 +260,7 @@ abstract class ProcessManager extends Specification { return "01234567890abcdef123456789ABCDEF" } - static final int MAX_LINE_SIZE = 1024 * 1024 + protected static final int MAX_LINE_SIZE = 1024 * 1024 @CompileStatic @SuppressForbidden diff --git a/dd-smoke-tests/src/main/java/datadog/smoketest/OutputThreads.java b/dd-smoke-tests/src/main/java/datadog/smoketest/OutputThreads.java new file mode 100644 index 00000000000..d381271bff9 --- /dev/null +++ b/dd-smoke-tests/src/main/java/datadog/smoketest/OutputThreads.java @@ -0,0 +1,168 @@ +package datadog.smoketest; + +import java.io.Closeable; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.nio.ByteBuffer; +import java.nio.channels.Channels; +import java.nio.channels.ReadableByteChannel; +import java.nio.channels.WritableByteChannel; +import java.nio.charset.CharacterCodingException; +import java.nio.charset.CharsetDecoder; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.TimeoutException; +import java.util.function.Function; + +public class OutputThreads implements Closeable { + private static final long THREAD_JOIN_TIMEOUT_MILLIS = 10 * 1000; + final ThreadGroup tg = new ThreadGroup("smoke-output"); + final List testLogMessages = new ArrayList<>(); + + public void close() { + tg.interrupt(); + Thread[] threads = new Thread[tg.activeCount()]; + tg.enumerate(threads); + + for (Thread thread : threads) { + try { + thread.join(THREAD_JOIN_TIMEOUT_MILLIS); + } catch (InterruptedException e) { + // ignore + } + } + } + + class ProcessOutputRunnable implements Runnable { + final ReadableByteChannel rc; + ByteBuffer buffer = ByteBuffer.allocate(ProcessManager.MAX_LINE_SIZE); + final WritableByteChannel wc; + CharsetDecoder decoder = StandardCharsets.UTF_8.newDecoder(); + + ProcessOutputRunnable(InputStream is, File output) throws FileNotFoundException { + rc = Channels.newChannel(is); + wc = Channels.newChannel(new FileOutputStream(output)); + } + + @Override + public void run() { + boolean online = true; + while (online) { + // we may have data in the buffer we did not consume for line splitting purposes + int skip = buffer.position(); + + try { + if (rc.read(buffer) == -1) { + online = false; + } + } catch (IOException ioe) { + online = false; + } + + buffer.flip(); + // write to log file + try { + wc.write((ByteBuffer) buffer.duplicate().position(skip)); + } catch (IOException e) { + System.out.println("ERROR WRITING TO LOG FILE: " + e.getMessage()); + e.printStackTrace(); + return; + } + + // subBuff will always start at the beginning of the next (potential) line + ByteBuffer subBuff = buffer.duplicate(); + int consumed = 0; + while (true) { + boolean hasRemaining = subBuff.hasRemaining(); + if (hasRemaining) { + int c = subBuff.get(); + if (c != '\n' && c != '\r') { + continue; + } + // found line end + } else if (online && consumed > 0) { + break; + // did not find line end, but we already consumed a line + // save the data for the next read iteration + } // else we did not consume any line, or there will be no further reads. + // Treat the buffer as single line despite lack of terminator + + consumed += subBuff.position(); + String line = null; + try { + line = decoder.decode((ByteBuffer) subBuff.duplicate().flip()).toString().trim(); + } catch (CharacterCodingException e) { + throw new RuntimeException(e); + } + + if (!line.isEmpty()) { + synchronized (testLogMessages) { + testLogMessages.add(line); + testLogMessages.notifyAll(); + } + } + + if (hasRemaining) { + subBuff = subBuff.slice(); + } else { + break; + } + } + + buffer.position(consumed); + buffer.compact(); + } + } + } + + public void captureOutput(Process p, File outputFile) throws FileNotFoundException { + new Thread(tg, new ProcessOutputRunnable(p.getInputStream(), outputFile)).start(); + } + + /** + * Tries to find a log line that matches the given predicate. After reading all the log lines + * already collected, it will wait up to 5 seconds for a new line matching the predicate. + * + * @param checker should return true if a match is found + */ + public boolean processTestLogLines(Function checker) throws TimeoutException { + int l = 0; + long waitStart = 0; + + while (true) { + String msg; + synchronized (testLogMessages) { + if (l >= testLogMessages.size()) { + long waitTime; + if (waitStart != 0) { + waitTime = 5000 - (System.currentTimeMillis() - waitStart); + if (waitTime < 0) { + throw new TimeoutException(); + } + } else { + waitStart = System.currentTimeMillis(); + waitTime = 5000; + } + try { + testLogMessages.wait(waitTime); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + if (l >= testLogMessages.size()) { + throw new TimeoutException(); + } + // the array is only cleared at the end of the test, so index l exists + msg = testLogMessages.get(l++); + } + + if (checker.apply(msg)) { + return true; + } + } + } +} From b8adb1ecec583b96cd2e624c97d5ce4684009621 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Mon, 7 Apr 2025 17:10:12 -0400 Subject: [PATCH 067/113] Simple forked memory test --- .../test/groovy/mule4/MuleMemoryForkedTest.groovy | 14 ++++++++++++++ 1 file changed, 14 insertions(+) create mode 100644 dd-java-agent/instrumentation/mule-4/src/test/groovy/mule4/MuleMemoryForkedTest.groovy diff --git a/dd-java-agent/instrumentation/mule-4/src/test/groovy/mule4/MuleMemoryForkedTest.groovy b/dd-java-agent/instrumentation/mule-4/src/test/groovy/mule4/MuleMemoryForkedTest.groovy new file mode 100644 index 00000000000..235219adcb8 --- /dev/null +++ b/dd-java-agent/instrumentation/mule-4/src/test/groovy/mule4/MuleMemoryForkedTest.groovy @@ -0,0 +1,14 @@ +package mule4 + +import datadog.trace.test.util.DDSpecification + +class MuleMemoryForkedTest extends DDSpecification { + + def "Forked memory should be high"() { + when: + def max = Runtime.getRuntime().maxMemory() + + then: + max == 768 * 1024 * 1024 + } +} From 12dd18d1c530cd96938989af3d230ce28cd9f774 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Mon, 7 Apr 2025 18:10:04 -0400 Subject: [PATCH 068/113] better ergonomics --- .../smoketest/CrashtrackingSmokeTest.java | 48 ++++++++++++------- .../datadog/smoketest/ProcessManager.groovy | 4 +- .../java/datadog/smoketest/OutputThreads.java | 4 +- 3 files changed, 35 insertions(+), 21 deletions(-) diff --git a/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java b/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java index 6fbae1c07ff..cb264b2aa61 100644 --- a/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java +++ b/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java @@ -1,6 +1,7 @@ package datadog.smoketest; import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.fail; import static org.junit.jupiter.api.Assumptions.assumeFalse; import datadog.trace.api.Platform; @@ -11,6 +12,7 @@ import java.nio.file.Paths; import java.util.Arrays; import java.util.Comparator; +import java.util.concurrent.TimeoutException; import java.util.stream.Stream; import okhttp3.mockwebserver.Dispatcher; import okhttp3.mockwebserver.MockResponse; @@ -120,13 +122,14 @@ void testCrashTracking() throws Exception { pb.environment().put("DD_TRACE_AGENT_PORT", String.valueOf(tracingServer.getPort())); Process p = pb.start(); - outputThreads.captureOutput(p, LOG_FILE_DIR.resolve("testProcess.testCrashTracking.log").toFile()); + outputThreads.captureOutput( + p, LOG_FILE_DIR.resolve("testProcess.testCrashTracking.log").toFile()); assertNotEquals(0, p.waitFor(), "Application should have crashed"); - outputThreads.processTestLogLines((line) -> line.contains(" was uploaded successfully")); - outputThreads.processTestLogLines((line) -> line.contains( - "com.datadog.crashtracking.CrashUploader - Successfully uploaded the crash files")); + assertOutputContains(" was uploaded successfully"); + assertOutputContains( + "com.datadog.crashtracking.CrashUploader - Successfully uploaded the crash files"); } /* @@ -158,13 +161,14 @@ void testCrashTrackingLegacy() throws Exception { pb.environment().put("DD_TRACE_AGENT_PORT", String.valueOf(tracingServer.getPort())); Process p = pb.start(); - outputThreads.captureOutput(p, LOG_FILE_DIR.resolve("testProcess.testCrashTrackingLegacy.log").toFile()); + outputThreads.captureOutput( + p, LOG_FILE_DIR.resolve("testProcess.testCrashTrackingLegacy.log").toFile()); assertNotEquals(0, p.waitFor(), "Application should have crashed"); - outputThreads.processTestLogLines((line) -> line.contains(" was uploaded successfully")); - outputThreads.processTestLogLines((line) -> line.contains( - "com.datadog.crashtracking.CrashUploader - Successfully uploaded the crash files")); + assertOutputContains(" was uploaded successfully"); + assertOutputContains( + "com.datadog.crashtracking.CrashUploader - Successfully uploaded the crash files"); } /* @@ -193,12 +197,13 @@ void testOomeTracking() throws Exception { script.toString())); Process p = pb.start(); - outputThreads.captureOutput(p, LOG_FILE_DIR.resolve("testProcess.testOomeTracking.log").toFile()); + outputThreads.captureOutput( + p, LOG_FILE_DIR.resolve("testProcess.testOomeTracking.log").toFile()); assertNotEquals(0, p.waitFor(), "Application should have crashed"); - outputThreads.processTestLogLines((line) -> line.contains("com.datadog.crashtracking.OOMENotifier - OOME event sent")); - outputThreads.processTestLogLines((line) -> line.contains("OOME Event generated successfully")); + assertOutputContains("com.datadog.crashtracking.OOMENotifier - OOME event sent"); + assertOutputContains("OOME Event generated successfully"); } @Test @@ -227,17 +232,26 @@ void testCombineTracking() throws Exception { pb.environment().put("DD_TRACE_AGENT_PORT", String.valueOf(tracingServer.getPort())); Process p = pb.start(); - outputThreads.captureOutput(p, LOG_FILE_DIR.resolve("testProcess.testCombineTracking.log").toFile()); + outputThreads.captureOutput( + p, LOG_FILE_DIR.resolve("testProcess.testCombineTracking.log").toFile()); assertNotEquals(0, p.waitFor(), "Application should have crashed"); // Crash uploader did get triggered - outputThreads.processTestLogLines((line) -> line.contains(" was uploaded successfully")); - outputThreads.processTestLogLines((line) -> line.contains( - "com.datadog.crashtracking.CrashUploader - Successfully uploaded the crash files")); + assertOutputContains(" was uploaded successfully"); + assertOutputContains( + "com.datadog.crashtracking.CrashUploader - Successfully uploaded the crash files"); // OOME notifier did get triggered - outputThreads.processTestLogLines((line) -> line.contains("com.datadog.crashtracking.OOMENotifier - OOME event sent")); - outputThreads.processTestLogLines((line) -> line.contains("OOME Event generated successfully")); + assertOutputContains("com.datadog.crashtracking.OOMENotifier - OOME event sent"); + assertOutputContains("OOME Event generated successfully"); + } + + private void assertOutputContains(String s) { + try { + outputThreads.processTestLogLines((line) -> line.contains(s)); + } catch (TimeoutException e) { + fail("String: '" + s + "' not found in output"); + } } } diff --git a/dd-smoke-tests/src/main/groovy/datadog/smoketest/ProcessManager.groovy b/dd-smoke-tests/src/main/groovy/datadog/smoketest/ProcessManager.groovy index 422958892ff..9d7fe392fd7 100644 --- a/dd-smoke-tests/src/main/groovy/datadog/smoketest/ProcessManager.groovy +++ b/dd-smoke-tests/src/main/groovy/datadog/smoketest/ProcessManager.groovy @@ -260,13 +260,11 @@ abstract class ProcessManager extends Specification { return "01234567890abcdef123456789ABCDEF" } - protected static final int MAX_LINE_SIZE = 1024 * 1024 - @CompileStatic @SuppressForbidden private static void eachLine(File file, Closure closure) { def reader = new InputStreamReader(new FileInputStream(file)) - CharBuffer buffer = CharBuffer.allocate(MAX_LINE_SIZE) + CharBuffer buffer = CharBuffer.allocate(OutputThreads.MAX_LINE_SIZE) while (reader.read(buffer) != -1) { buffer.flip() while (buffer.hasRemaining()) { diff --git a/dd-smoke-tests/src/main/java/datadog/smoketest/OutputThreads.java b/dd-smoke-tests/src/main/java/datadog/smoketest/OutputThreads.java index d381271bff9..5ad06329300 100644 --- a/dd-smoke-tests/src/main/java/datadog/smoketest/OutputThreads.java +++ b/dd-smoke-tests/src/main/java/datadog/smoketest/OutputThreads.java @@ -20,6 +20,8 @@ public class OutputThreads implements Closeable { private static final long THREAD_JOIN_TIMEOUT_MILLIS = 10 * 1000; + public static final int MAX_LINE_SIZE = 1024 * 1024; + final ThreadGroup tg = new ThreadGroup("smoke-output"); final List testLogMessages = new ArrayList<>(); @@ -39,7 +41,7 @@ public void close() { class ProcessOutputRunnable implements Runnable { final ReadableByteChannel rc; - ByteBuffer buffer = ByteBuffer.allocate(ProcessManager.MAX_LINE_SIZE); + ByteBuffer buffer = ByteBuffer.allocate(MAX_LINE_SIZE); final WritableByteChannel wc; CharsetDecoder decoder = StandardCharsets.UTF_8.newDecoder(); From 1290b772273a724d306ba3af4b2c013dbb094021 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Mon, 7 Apr 2025 19:29:10 -0400 Subject: [PATCH 069/113] try increasing timeout --- .../test/java/datadog/smoketest/CrashtrackingSmokeTest.java | 5 ++++- .../src/main/java/datadog/smoketest/OutputThreads.java | 4 ++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java b/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java index cb264b2aa61..6b6a0d4f8ed 100644 --- a/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java +++ b/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java @@ -251,7 +251,10 @@ private void assertOutputContains(String s) { try { outputThreads.processTestLogLines((line) -> line.contains(s)); } catch (TimeoutException e) { - fail("String: '" + s + "' not found in output"); + // FIXME JUNit fail() is more correct but doesn't work. SEE: https://github.com/gradle/gradle/issues/27871 + // fixed in Gradle version 8.7 + // fail("String: '" + s + "' not found in output"); + throw new RuntimeException("String: '" + s + "' not found in output"); } } } diff --git a/dd-smoke-tests/src/main/java/datadog/smoketest/OutputThreads.java b/dd-smoke-tests/src/main/java/datadog/smoketest/OutputThreads.java index 5ad06329300..5201988c894 100644 --- a/dd-smoke-tests/src/main/java/datadog/smoketest/OutputThreads.java +++ b/dd-smoke-tests/src/main/java/datadog/smoketest/OutputThreads.java @@ -141,13 +141,13 @@ public boolean processTestLogLines(Function checker) throws Tim if (l >= testLogMessages.size()) { long waitTime; if (waitStart != 0) { - waitTime = 5000 - (System.currentTimeMillis() - waitStart); + waitTime = 10000 - (System.currentTimeMillis() - waitStart); if (waitTime < 0) { throw new TimeoutException(); } } else { waitStart = System.currentTimeMillis(); - waitTime = 5000; + waitTime = 10000; } try { testLogMessages.wait(waitTime); From c9ba32ad42c4b49257a7d3f8364195a7be8d85f6 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Mon, 7 Apr 2025 23:17:14 -0400 Subject: [PATCH 070/113] extend agent test runner --- .../src/test/groovy/{mule4 => }/MuleMemoryForkedTest.groovy | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) rename dd-java-agent/instrumentation/mule-4/src/test/groovy/{mule4 => }/MuleMemoryForkedTest.groovy (55%) diff --git a/dd-java-agent/instrumentation/mule-4/src/test/groovy/mule4/MuleMemoryForkedTest.groovy b/dd-java-agent/instrumentation/mule-4/src/test/groovy/MuleMemoryForkedTest.groovy similarity index 55% rename from dd-java-agent/instrumentation/mule-4/src/test/groovy/mule4/MuleMemoryForkedTest.groovy rename to dd-java-agent/instrumentation/mule-4/src/test/groovy/MuleMemoryForkedTest.groovy index 235219adcb8..e7f4f54727e 100644 --- a/dd-java-agent/instrumentation/mule-4/src/test/groovy/mule4/MuleMemoryForkedTest.groovy +++ b/dd-java-agent/instrumentation/mule-4/src/test/groovy/MuleMemoryForkedTest.groovy @@ -1,8 +1,6 @@ -package mule4 +import datadog.trace.agent.test.AgentTestRunner -import datadog.trace.test.util.DDSpecification - -class MuleMemoryForkedTest extends DDSpecification { +class MuleMemoryForkedTest extends AgentTestRunner { def "Forked memory should be high"() { when: From 9973da835f54de67da4f90a58d4f21ffb7dcd573 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Tue, 8 Apr 2025 12:26:53 -0400 Subject: [PATCH 071/113] don't override forkedMinHeapSize and forkedMaxHeapSize --- gradle/configure_tests.gradle | 1 - 1 file changed, 1 deletion(-) diff --git a/gradle/configure_tests.gradle b/gradle/configure_tests.gradle index 60280fa3144..7f0065e9eb6 100644 --- a/gradle/configure_tests.gradle +++ b/gradle/configure_tests.gradle @@ -69,7 +69,6 @@ tasks.withType(Test).configureEach { if (name.startsWith("forkedTest") || name.endsWith("ForkedTest")) { setExcludes([]) setIncludes(["**/*ForkedTest*"]) - jvmArgs += ["-Xms256M", "-Xmx256M"] forkEvery 1 // Limit the number of concurrent forked tests usesService(forkedTestLimit) From 296bc5af30be208d411dc60e515d82e8648d1409 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Tue, 8 Apr 2025 12:52:08 -0400 Subject: [PATCH 072/113] print out requests --- .../smoketest/CrashtrackingSmokeTest.java | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java b/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java index 6b6a0d4f8ed..2041fec10dc 100644 --- a/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java +++ b/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java @@ -1,11 +1,11 @@ package datadog.smoketest; import static org.junit.jupiter.api.Assertions.assertNotEquals; -import static org.junit.jupiter.api.Assertions.fail; import static org.junit.jupiter.api.Assumptions.assumeFalse; import datadog.trace.api.Platform; import java.io.File; +import java.io.IOException; import java.nio.file.FileSystems; import java.nio.file.Files; import java.nio.file.Path; @@ -18,6 +18,7 @@ import okhttp3.mockwebserver.MockResponse; import okhttp3.mockwebserver.MockWebServer; import okhttp3.mockwebserver.RecordedRequest; +import okio.BufferedSource; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; @@ -52,6 +53,18 @@ void setup() throws Exception { new Dispatcher() { @Override public MockResponse dispatch(final RecordedRequest request) throws InterruptedException { + System.out.println("URL ====== " + request.getPath()); + BufferedSource source = request.getBody(); + String line = null; + do { + try { + line = source.readUtf8Line(); + } catch (IOException e) { + System.out.println("Error reading line " + e.getMessage()); + break; + } + System.out.println(line); + } while (line != null); return new MockResponse().setResponseCode(200); } }); @@ -251,7 +264,8 @@ private void assertOutputContains(String s) { try { outputThreads.processTestLogLines((line) -> line.contains(s)); } catch (TimeoutException e) { - // FIXME JUNit fail() is more correct but doesn't work. SEE: https://github.com/gradle/gradle/issues/27871 + // FIXME JUNit fail() is more correct but doesn't work. SEE: + // https://github.com/gradle/gradle/issues/27871 // fixed in Gradle version 8.7 // fail("String: '" + s + "' not found in output"); throw new RuntimeException("String: '" + s + "' not found in output"); From 277c391093d17b4d93cb79528f8c7a3fd34332ce Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Tue, 8 Apr 2025 12:52:34 -0400 Subject: [PATCH 073/113] remove logging statement --- gradle/java_no_deps.gradle | 1 - 1 file changed, 1 deletion(-) diff --git a/gradle/java_no_deps.gradle b/gradle/java_no_deps.gradle index c6ffe3974b9..95a87f0e8ed 100644 --- a/gradle/java_no_deps.gradle +++ b/gradle/java_no_deps.gradle @@ -237,7 +237,6 @@ project.afterEvaluate { tasks.withType(JavaExec).configureEach { if (!it.maxHeapSize) { - project.logger.lifecycle("SETTING MAX HEAP for main class {}", (String)it.mainClass) it.maxHeapSize('256M') } } From fc3a9ae0b91c654574ac21fb42e224c80fc6b681 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Tue, 8 Apr 2025 16:36:31 -0400 Subject: [PATCH 074/113] Better crash tracking --- .../datadog/smoketest/CrashTelemetryData.java | 13 ++++ .../smoketest/CrashtrackingSmokeTest.java | 62 +++++++++++++------ .../smoketest/MinimalTelemetryData.java | 5 ++ 3 files changed, 60 insertions(+), 20 deletions(-) create mode 100644 dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashTelemetryData.java create mode 100644 dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/MinimalTelemetryData.java diff --git a/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashTelemetryData.java b/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashTelemetryData.java new file mode 100644 index 00000000000..21f6df81f01 --- /dev/null +++ b/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashTelemetryData.java @@ -0,0 +1,13 @@ +package datadog.smoketest; + +import java.util.List; + +public class CrashTelemetryData extends MinimalTelemetryData { + List payload; + + public static class LogMessage { + public String message; + public String level; + public String tags; + } +} diff --git a/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java b/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java index 2041fec10dc..2cfa1e9af9c 100644 --- a/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java +++ b/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java @@ -1,24 +1,32 @@ package datadog.smoketest; +import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assumptions.assumeFalse; +import com.squareup.moshi.JsonAdapter; +import com.squareup.moshi.Moshi; import datadog.trace.api.Platform; import java.io.File; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.nio.file.FileSystems; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Arrays; import java.util.Comparator; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.stream.Stream; import okhttp3.mockwebserver.Dispatcher; import okhttp3.mockwebserver.MockResponse; import okhttp3.mockwebserver.MockWebServer; import okhttp3.mockwebserver.RecordedRequest; -import okio.BufferedSource; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; @@ -32,6 +40,7 @@ public class CrashtrackingSmokeTest { private static Path LOG_FILE_DIR; private MockWebServer tracingServer; + private BlockingQueue crashEvents = new LinkedBlockingQueue<>(); @BeforeAll static void setupAll() { @@ -48,23 +57,33 @@ static void setupAll() { void setup() throws Exception { tempDir = Files.createTempDirectory("dd-smoketest-"); + crashEvents.clear(); + + Moshi moshi = new Moshi.Builder().build(); tracingServer = new MockWebServer(); tracingServer.setDispatcher( new Dispatcher() { @Override public MockResponse dispatch(final RecordedRequest request) throws InterruptedException { - System.out.println("URL ====== " + request.getPath()); - BufferedSource source = request.getBody(); - String line = null; - do { + String data = request.getBody().readString(StandardCharsets.UTF_8); + + if ("/telemetry/proxy/api/v2/apmtelemetry".equals(request.getPath())) { try { - line = source.readUtf8Line(); + JsonAdapter adapter = + moshi.adapter(MinimalTelemetryData.class); + MinimalTelemetryData minimal = adapter.fromJson(data); + if ("logs".equals(minimal.request_type)) { + JsonAdapter crashAdapter = + moshi.adapter(CrashTelemetryData.class); + crashEvents.add(crashAdapter.fromJson(data)); + } } catch (IOException e) { - System.out.println("Error reading line " + e.getMessage()); - break; + System.out.println("Unable to parse " + e); } - System.out.println(line); - } while (line != null); + } + System.out.println("URL ====== " + request.getPath()); + System.out.println(data); + return new MockResponse().setResponseCode(200); } }); @@ -139,10 +158,7 @@ void testCrashTracking() throws Exception { p, LOG_FILE_DIR.resolve("testProcess.testCrashTracking.log").toFile()); assertNotEquals(0, p.waitFor(), "Application should have crashed"); - - assertOutputContains(" was uploaded successfully"); - assertOutputContains( - "com.datadog.crashtracking.CrashUploader - Successfully uploaded the crash files"); + assertCrashData(); } /* @@ -179,9 +195,8 @@ void testCrashTrackingLegacy() throws Exception { assertNotEquals(0, p.waitFor(), "Application should have crashed"); - assertOutputContains(" was uploaded successfully"); - assertOutputContains( - "com.datadog.crashtracking.CrashUploader - Successfully uploaded the crash files"); + assertNotEquals(0, p.waitFor(), "Application should have crashed"); + assertCrashData(); } /* @@ -212,6 +227,7 @@ void testOomeTracking() throws Exception { Process p = pb.start(); outputThreads.captureOutput( p, LOG_FILE_DIR.resolve("testProcess.testOomeTracking.log").toFile()); + pb.environment().put("DD_DOGSTATSD_PORT", String.valueOf(tracingServer.getPort())); assertNotEquals(0, p.waitFor(), "Application should have crashed"); @@ -243,6 +259,7 @@ void testCombineTracking() throws Exception { appShadowJar(), oomeScript.toString())); pb.environment().put("DD_TRACE_AGENT_PORT", String.valueOf(tracingServer.getPort())); + pb.environment().put("DD_DOGSTATSD_PORT", String.valueOf(tracingServer.getPort())); Process p = pb.start(); outputThreads.captureOutput( @@ -251,9 +268,7 @@ void testCombineTracking() throws Exception { assertNotEquals(0, p.waitFor(), "Application should have crashed"); // Crash uploader did get triggered - assertOutputContains(" was uploaded successfully"); - assertOutputContains( - "com.datadog.crashtracking.CrashUploader - Successfully uploaded the crash files"); + assertCrashData(); // OOME notifier did get triggered assertOutputContains("com.datadog.crashtracking.OOMENotifier - OOME event sent"); @@ -271,4 +286,11 @@ private void assertOutputContains(String s) { throw new RuntimeException("String: '" + s + "' not found in output"); } } + + private void assertCrashData() throws InterruptedException { + CrashTelemetryData crashData = crashEvents.poll(10, TimeUnit.SECONDS); + assertNotNull(crashData, "Crash data not uploaded"); + assertThat(crashData.payload.get(0).message, containsString("OutOfMemory")); + assertThat(crashData.payload.get(0).tags, containsString("severity:crash")); + } } diff --git a/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/MinimalTelemetryData.java b/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/MinimalTelemetryData.java new file mode 100644 index 00000000000..4940d794de9 --- /dev/null +++ b/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/MinimalTelemetryData.java @@ -0,0 +1,5 @@ +package datadog.smoketest; + +public class MinimalTelemetryData { + String request_type; +} From 3631181e639020b8f2935e067c6d731acf74765a Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Tue, 8 Apr 2025 17:30:08 -0400 Subject: [PATCH 075/113] remove intentionally failing test --- .../src/test/groovy/MuleMemoryForkedTest.groovy | 12 ------------ 1 file changed, 12 deletions(-) delete mode 100644 dd-java-agent/instrumentation/mule-4/src/test/groovy/MuleMemoryForkedTest.groovy diff --git a/dd-java-agent/instrumentation/mule-4/src/test/groovy/MuleMemoryForkedTest.groovy b/dd-java-agent/instrumentation/mule-4/src/test/groovy/MuleMemoryForkedTest.groovy deleted file mode 100644 index e7f4f54727e..00000000000 --- a/dd-java-agent/instrumentation/mule-4/src/test/groovy/MuleMemoryForkedTest.groovy +++ /dev/null @@ -1,12 +0,0 @@ -import datadog.trace.agent.test.AgentTestRunner - -class MuleMemoryForkedTest extends AgentTestRunner { - - def "Forked memory should be high"() { - when: - def max = Runtime.getRuntime().maxMemory() - - then: - max == 768 * 1024 * 1024 - } -} From 0975adc7e7d67977a625532e6ffa94e626532ff3 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Wed, 9 Apr 2025 12:05:40 -0400 Subject: [PATCH 076/113] use a test UDP server to record oom events --- .../smoketest/CrashtrackingSmokeTest.java | 48 +++---- .../java/datadog/smoketest/TestUDPServer.java | 132 ++++++++++++++++++ 2 files changed, 156 insertions(+), 24 deletions(-) create mode 100644 dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/TestUDPServer.java diff --git a/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java b/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java index 2cfa1e9af9c..2251d2b78df 100644 --- a/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java +++ b/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java @@ -1,6 +1,7 @@ package datadog.smoketest; import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.CoreMatchers.startsWith; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.jupiter.api.Assertions.assertNotEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; @@ -21,7 +22,6 @@ import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; import java.util.stream.Stream; import okhttp3.mockwebserver.Dispatcher; import okhttp3.mockwebserver.MockResponse; @@ -38,8 +38,10 @@ * that ships with OS X by default. */ public class CrashtrackingSmokeTest { + private static final long DATA_TIMEOUT_MS = 10 * 1000; private static Path LOG_FILE_DIR; private MockWebServer tracingServer; + private TestUDPServer udpServer; private BlockingQueue crashEvents = new LinkedBlockingQueue<>(); @BeforeAll @@ -87,7 +89,10 @@ public MockResponse dispatch(final RecordedRequest request) throws InterruptedEx return new MockResponse().setResponseCode(200); } }); - // tracingServer.start(8126); + + udpServer = new TestUDPServer(); + udpServer.start(); + synchronized (outputThreads.testLogMessages) { outputThreads.testLogMessages.clear(); } @@ -96,6 +101,7 @@ public MockResponse dispatch(final RecordedRequest request) throws InterruptedEx @AfterEach void teardown() throws Exception { tracingServer.shutdown(); + udpServer.close(); try (Stream fileStream = Files.walk(tempDir)) { fileStream.sorted(Comparator.reverseOrder()).map(Path::toFile).forEach(File::delete); @@ -227,12 +233,10 @@ void testOomeTracking() throws Exception { Process p = pb.start(); outputThreads.captureOutput( p, LOG_FILE_DIR.resolve("testProcess.testOomeTracking.log").toFile()); - pb.environment().put("DD_DOGSTATSD_PORT", String.valueOf(tracingServer.getPort())); + pb.environment().put("DD_DOGSTATSD_PORT", String.valueOf(udpServer.getPort())); assertNotEquals(0, p.waitFor(), "Application should have crashed"); - - assertOutputContains("com.datadog.crashtracking.OOMENotifier - OOME event sent"); - assertOutputContains("OOME Event generated successfully"); + assertOOMEvent(); } @Test @@ -259,7 +263,7 @@ void testCombineTracking() throws Exception { appShadowJar(), oomeScript.toString())); pb.environment().put("DD_TRACE_AGENT_PORT", String.valueOf(tracingServer.getPort())); - pb.environment().put("DD_DOGSTATSD_PORT", String.valueOf(tracingServer.getPort())); + pb.environment().put("DD_DOGSTATSD_PORT", String.valueOf(udpServer.getPort())); Process p = pb.start(); outputThreads.captureOutput( @@ -269,28 +273,24 @@ void testCombineTracking() throws Exception { // Crash uploader did get triggered assertCrashData(); - - // OOME notifier did get triggered - assertOutputContains("com.datadog.crashtracking.OOMENotifier - OOME event sent"); - assertOutputContains("OOME Event generated successfully"); - } - - private void assertOutputContains(String s) { - try { - outputThreads.processTestLogLines((line) -> line.contains(s)); - } catch (TimeoutException e) { - // FIXME JUNit fail() is more correct but doesn't work. SEE: - // https://github.com/gradle/gradle/issues/27871 - // fixed in Gradle version 8.7 - // fail("String: '" + s + "' not found in output"); - throw new RuntimeException("String: '" + s + "' not found in output"); - } + assertOOMEvent(); } private void assertCrashData() throws InterruptedException { - CrashTelemetryData crashData = crashEvents.poll(10, TimeUnit.SECONDS); + CrashTelemetryData crashData = crashEvents.poll(DATA_TIMEOUT_MS, TimeUnit.MILLISECONDS); assertNotNull(crashData, "Crash data not uploaded"); assertThat(crashData.payload.get(0).message, containsString("OutOfMemory")); assertThat(crashData.payload.get(0).tags, containsString("severity:crash")); } + + private void assertOOMEvent() throws InterruptedException { + byte[] data = udpServer.getMessages().poll(DATA_TIMEOUT_MS, TimeUnit.MILLISECONDS); + assertNotNull(data, "OOM Event not received"); + String event = new String(data); + + assertThat(event, startsWith("_e")); + assertThat(event, containsString(":OutOfMemoryError")); + assertThat(event, containsString("t:error")); + assertThat(event, containsString("s:java")); + } } diff --git a/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/TestUDPServer.java b/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/TestUDPServer.java new file mode 100644 index 00000000000..6668c78a41d --- /dev/null +++ b/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/TestUDPServer.java @@ -0,0 +1,132 @@ +package datadog.smoketest; + +import java.io.Closeable; +import java.io.IOException; +import java.net.DatagramPacket; +import java.net.DatagramSocket; +import java.net.InetAddress; +import java.net.SocketException; +import java.net.SocketTimeoutException; +import java.util.Arrays; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingQueue; + +/** Simple test UDP Server. Not for production use but good enough for tests */ +public class TestUDPServer implements Closeable { + public static final int DEFAULT_TIMEOUT_MS = 30 * 1000; + public static final int DEFAULT_PACKET_SIZE = 2000; + + private static final byte[] END_MESSAGE = "END____".getBytes(); + + private final BlockingQueue dataPackets = new LinkedBlockingQueue<>(); + private final int timeout; + private final int packetSize; + private final int port; + + private volatile boolean closed = false; + private volatile boolean closing = false; + private DatagramSocket socket; + private Thread readerThread; + + public TestUDPServer() { + this(DEFAULT_TIMEOUT_MS, DEFAULT_PACKET_SIZE, 0); + } + + public TestUDPServer(int timeout, int packetSize, int port) { + this.timeout = timeout; + this.packetSize = packetSize; + this.port = port; + } + + public synchronized void start() throws SocketException { + if (closed) { + throw new IllegalStateException("Server closed"); + } + if (socket != null) { + // already started + return; + } + + socket = new DatagramSocket(port); + socket.setSoTimeout(timeout); + readerThread = + new Thread( + () -> { + while (!closed && !closing) { + byte[] data = new byte[packetSize]; + try { + DatagramPacket packet = new DatagramPacket(data, packetSize); + socket.receive(packet); + + byte[] trimmedData = new byte[packet.getLength()]; + System.arraycopy( + packet.getData(), packet.getOffset(), trimmedData, 0, packet.getLength()); + + if (Arrays.equals(trimmedData, END_MESSAGE)) { + System.out.println("[TestUDPServer] Received message to close"); + break; + } + System.out.println( + "[TestUDPServer] Received message: " + new String(trimmedData)); + dataPackets.add(trimmedData); + } catch (SocketTimeoutException e) { + System.out.println("[TestUDPServer] Timeout waiting for message"); + // ignore no data sent + } catch (IOException e) { + System.out.println("[TestUDPServer] Error in receiving packet " + e.getMessage()); + e.printStackTrace(); + break; + } + } + closed = true; + }, + "Test UDP Server Receiver"); + + readerThread.setDaemon(true); + readerThread.start(); + } + + @Override + public synchronized void close() { + if (closed) { + // Already closed + return; + } + if (socket == null) { + throw new IllegalStateException("Socket not open"); + } + + closing = true; + + try (DatagramSocket clientSocket = new DatagramSocket()) { + clientSocket.send( + new DatagramPacket( + END_MESSAGE, END_MESSAGE.length, InetAddress.getByName("localhost"), getPort())); + } catch (IOException e) { + System.out.println( + "[TestUDPServer] Exception sending close message. Will rely on socket timeout"); + e.printStackTrace(); + } + + // Closed state is set by the reader thread. Wait for it to finish + try { + readerThread.join(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + public int getPort() { + if (port != 0) { + return port; + } else if (socket != null) { + return socket.getLocalPort(); + } else { + throw new IllegalStateException("Socket not open and port not explicitly set"); + } + } + + public BlockingQueue getMessages() { + return dataPackets; + } +} From 561f192644ffbd0e8bcb145f80ee9883c69694f1 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Wed, 9 Apr 2025 12:19:13 -0400 Subject: [PATCH 077/113] Make crash tracking smoke test more resiliant --- .../datadog/smoketest/CrashTelemetryData.java | 13 + .../smoketest/CrashtrackingSmokeTest.java | 261 ++++++------------ .../smoketest/MinimalTelemetryData.java | 5 + .../java/datadog/smoketest/TestUDPServer.java | 132 +++++++++ .../datadog/smoketest/ProcessManager.groovy | 129 +-------- .../java/datadog/smoketest/OutputThreads.java | 170 ++++++++++++ 6 files changed, 408 insertions(+), 302 deletions(-) create mode 100644 dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashTelemetryData.java create mode 100644 dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/MinimalTelemetryData.java create mode 100644 dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/TestUDPServer.java create mode 100644 dd-smoke-tests/src/main/java/datadog/smoketest/OutputThreads.java diff --git a/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashTelemetryData.java b/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashTelemetryData.java new file mode 100644 index 00000000000..21f6df81f01 --- /dev/null +++ b/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashTelemetryData.java @@ -0,0 +1,13 @@ +package datadog.smoketest; + +import java.util.List; + +public class CrashTelemetryData extends MinimalTelemetryData { + List payload; + + public static class LogMessage { + public String message; + public String level; + public String tags; + } +} diff --git a/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java b/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java index 469f86f0620..e6c0b8c01c9 100644 --- a/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java +++ b/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java @@ -1,24 +1,33 @@ package datadog.smoketest; import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.CoreMatchers.startsWith; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assumptions.assumeFalse; +import com.squareup.moshi.JsonAdapter; +import com.squareup.moshi.Moshi; import datadog.trace.api.Platform; -import java.io.BufferedReader; import java.io.File; -import java.io.InputStreamReader; +import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.nio.file.FileSystems; import java.nio.file.Files; import java.nio.file.Path; +import java.nio.file.Paths; import java.util.Arrays; import java.util.Comparator; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.TimeUnit; import java.util.stream.Stream; import okhttp3.mockwebserver.Dispatcher; import okhttp3.mockwebserver.MockResponse; import okhttp3.mockwebserver.MockWebServer; import okhttp3.mockwebserver.RecordedRequest; +import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; @@ -29,34 +38,70 @@ * that ships with OS X by default. */ public class CrashtrackingSmokeTest { + private static final long DATA_TIMEOUT_MS = 10 * 1000; + private static Path LOG_FILE_DIR; private MockWebServer tracingServer; + private TestUDPServer udpServer; + private BlockingQueue crashEvents = new LinkedBlockingQueue<>(); @BeforeAll static void setupAll() { // Only Hotspot based implementation are supported assumeFalse(Platform.isJ9()); + + LOG_FILE_DIR = Paths.get(System.getProperty("datadog.smoketest.builddir"), "reports"); } private Path tempDir; + private static OutputThreads outputThreads = new OutputThreads(); @BeforeEach void setup() throws Exception { tempDir = Files.createTempDirectory("dd-smoketest-"); + crashEvents.clear(); + + Moshi moshi = new Moshi.Builder().build(); tracingServer = new MockWebServer(); tracingServer.setDispatcher( new Dispatcher() { @Override public MockResponse dispatch(final RecordedRequest request) throws InterruptedException { + String data = request.getBody().readString(StandardCharsets.UTF_8); + + if ("/telemetry/proxy/api/v2/apmtelemetry".equals(request.getPath())) { + try { + JsonAdapter adapter = + moshi.adapter(MinimalTelemetryData.class); + MinimalTelemetryData minimal = adapter.fromJson(data); + if ("logs".equals(minimal.request_type)) { + JsonAdapter crashAdapter = + moshi.adapter(CrashTelemetryData.class); + crashEvents.add(crashAdapter.fromJson(data)); + } + } catch (IOException e) { + System.out.println("Unable to parse " + e); + } + } + System.out.println("URL ====== " + request.getPath()); + System.out.println(data); + return new MockResponse().setResponseCode(200); } }); - // tracingServer.start(8126); + + udpServer = new TestUDPServer(); + udpServer.start(); + + synchronized (outputThreads.testLogMessages) { + outputThreads.testLogMessages.clear(); + } } @AfterEach void teardown() throws Exception { tracingServer.shutdown(); + udpServer.close(); try (Stream fileStream = Files.walk(tempDir)) { fileStream.sorted(Comparator.reverseOrder()).map(Path::toFile).forEach(File::delete); @@ -64,6 +109,11 @@ void teardown() throws Exception { Files.deleteIfExists(tempDir); } + @AfterAll + static void shutdown() { + outputThreads.close(); + } + private static String javaPath() { final String separator = FileSystems.getDefault().getSeparator(); return System.getProperty("java.home") + separator + "bin" + separator + "java"; @@ -108,52 +158,13 @@ void testCrashTracking() throws Exception { appShadowJar(), script.toString())); pb.environment().put("DD_TRACE_AGENT_PORT", String.valueOf(tracingServer.getPort())); - StringBuilder stdoutStr = new StringBuilder(); - StringBuilder stderrStr = new StringBuilder(); Process p = pb.start(); - Thread stdout = - new Thread( - () -> { - try (BufferedReader br = - new BufferedReader(new InputStreamReader(p.getInputStream()))) { - br.lines() - .forEach( - l -> { - System.out.println(l); - stdoutStr.append(l).append('\n'); - }); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); - Thread stderr = - new Thread( - () -> { - try (BufferedReader br = - new BufferedReader(new InputStreamReader(p.getErrorStream()))) { - br.lines() - .forEach( - l -> { - System.err.println(l); - stderrStr.append(l).append('\n'); - }); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); - stdout.setDaemon(true); - stderr.setDaemon(true); - stdout.start(); - stderr.start(); + outputThreads.captureOutput( + p, LOG_FILE_DIR.resolve("testProcess.testCrashTracking.log").toFile()); assertNotEquals(0, p.waitFor(), "Application should have crashed"); - - assertThat(stdoutStr.toString(), containsString(" was uploaded successfully")); - assertThat( - stderrStr.toString(), - containsString( - "com.datadog.crashtracking.CrashUploader - Successfully uploaded the crash files")); + assertCrashData(); } /* @@ -183,52 +194,14 @@ void testCrashTrackingLegacy() throws Exception { appShadowJar(), script.toString())); pb.environment().put("DD_TRACE_AGENT_PORT", String.valueOf(tracingServer.getPort())); - StringBuilder stdoutStr = new StringBuilder(); - StringBuilder stderrStr = new StringBuilder(); Process p = pb.start(); - Thread stdout = - new Thread( - () -> { - try (BufferedReader br = - new BufferedReader(new InputStreamReader(p.getInputStream()))) { - br.lines() - .forEach( - l -> { - System.out.println(l); - stdoutStr.append(l).append('\n'); - }); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); - Thread stderr = - new Thread( - () -> { - try (BufferedReader br = - new BufferedReader(new InputStreamReader(p.getErrorStream()))) { - br.lines() - .forEach( - l -> { - System.err.println(l); - stderrStr.append(l).append('\n'); - }); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); - stdout.setDaemon(true); - stderr.setDaemon(true); - stdout.start(); - stderr.start(); + outputThreads.captureOutput( + p, LOG_FILE_DIR.resolve("testProcess.testCrashTrackingLegacy.log").toFile()); assertNotEquals(0, p.waitFor(), "Application should have crashed"); - assertThat(stdoutStr.toString(), containsString(" was uploaded successfully")); - assertThat( - stderrStr.toString(), - containsString( - "com.datadog.crashtracking.CrashUploader - Successfully uploaded the crash files")); + assertCrashData(); } /* @@ -255,51 +228,14 @@ void testOomeTracking() throws Exception { "-jar", appShadowJar(), script.toString())); - StringBuilder stdoutStr = new StringBuilder(); - StringBuilder stderrStr = new StringBuilder(); Process p = pb.start(); - Thread stdout = - new Thread( - () -> { - try (BufferedReader br = - new BufferedReader(new InputStreamReader(p.getInputStream()))) { - br.lines() - .forEach( - l -> { - System.out.println(l); - stdoutStr.append(l).append('\n'); - }); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); - Thread stderr = - new Thread( - () -> { - try (BufferedReader br = - new BufferedReader(new InputStreamReader(p.getErrorStream()))) { - br.lines() - .forEach( - l -> { - System.err.println(l); - stderrStr.append(l).append('\n'); - }); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); - stdout.setDaemon(true); - stderr.setDaemon(true); - stdout.start(); - stderr.start(); + outputThreads.captureOutput( + p, LOG_FILE_DIR.resolve("testProcess.testOomeTracking.log").toFile()); + pb.environment().put("DD_DOGSTATSD_PORT", String.valueOf(udpServer.getPort())); assertNotEquals(0, p.waitFor(), "Application should have crashed"); - - assertThat( - stderrStr.toString(), - containsString("com.datadog.crashtracking.OOMENotifier - OOME event sent")); - assertThat(stdoutStr.toString(), containsString("OOME Event generated successfully")); + assertOOMEvent(); } @Test @@ -326,58 +262,33 @@ void testCombineTracking() throws Exception { appShadowJar(), oomeScript.toString())); pb.environment().put("DD_TRACE_AGENT_PORT", String.valueOf(tracingServer.getPort())); - StringBuilder stdoutStr = new StringBuilder(); - StringBuilder stderrStr = new StringBuilder(); + pb.environment().put("DD_DOGSTATSD_PORT", String.valueOf(udpServer.getPort())); Process p = pb.start(); - Thread stdout = - new Thread( - () -> { - try (BufferedReader br = - new BufferedReader(new InputStreamReader(p.getInputStream()))) { - br.lines() - .forEach( - l -> { - System.out.println(l); - stdoutStr.append(l).append('\n'); - }); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); - Thread stderr = - new Thread( - () -> { - try (BufferedReader br = - new BufferedReader(new InputStreamReader(p.getErrorStream()))) { - br.lines() - .forEach( - l -> { - System.err.println(l); - stderrStr.append(l).append('\n'); - }); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); - stdout.setDaemon(true); - stderr.setDaemon(true); - stdout.start(); - stderr.start(); + outputThreads.captureOutput( + p, LOG_FILE_DIR.resolve("testProcess.testCombineTracking.log").toFile()); assertNotEquals(0, p.waitFor(), "Application should have crashed"); - // Crash uploader did get triggered - assertThat(stdoutStr.toString(), containsString(" was uploaded successfully")); - assertThat( - stderrStr.toString(), - containsString( - "com.datadog.crashtracking.CrashUploader - Successfully uploaded the crash files")); - - // OOME notifier did get triggered - assertThat( - stderrStr.toString(), - containsString("com.datadog.crashtracking.OOMENotifier - OOME event sent")); - assertThat(stdoutStr.toString(), containsString("OOME Event generated successfully")); + assertCrashData(); + assertOOMEvent(); + } + + private void assertCrashData() throws InterruptedException { + CrashTelemetryData crashData = crashEvents.poll(DATA_TIMEOUT_MS, TimeUnit.MILLISECONDS); + assertNotNull(crashData, "Crash data not uploaded"); + assertThat(crashData.payload.get(0).message, containsString("OutOfMemory")); + assertThat(crashData.payload.get(0).tags, containsString("severity:crash")); + } + + private void assertOOMEvent() throws InterruptedException { + byte[] data = udpServer.getMessages().poll(DATA_TIMEOUT_MS, TimeUnit.MILLISECONDS); + assertNotNull(data, "OOM Event not received"); + String event = new String(data); + + assertThat(event, startsWith("_e")); + assertThat(event, containsString(":OutOfMemoryError")); + assertThat(event, containsString("t:error")); + assertThat(event, containsString("s:java")); } } diff --git a/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/MinimalTelemetryData.java b/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/MinimalTelemetryData.java new file mode 100644 index 00000000000..4940d794de9 --- /dev/null +++ b/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/MinimalTelemetryData.java @@ -0,0 +1,5 @@ +package datadog.smoketest; + +public class MinimalTelemetryData { + String request_type; +} diff --git a/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/TestUDPServer.java b/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/TestUDPServer.java new file mode 100644 index 00000000000..6668c78a41d --- /dev/null +++ b/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/TestUDPServer.java @@ -0,0 +1,132 @@ +package datadog.smoketest; + +import java.io.Closeable; +import java.io.IOException; +import java.net.DatagramPacket; +import java.net.DatagramSocket; +import java.net.InetAddress; +import java.net.SocketException; +import java.net.SocketTimeoutException; +import java.util.Arrays; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingQueue; + +/** Simple test UDP Server. Not for production use but good enough for tests */ +public class TestUDPServer implements Closeable { + public static final int DEFAULT_TIMEOUT_MS = 30 * 1000; + public static final int DEFAULT_PACKET_SIZE = 2000; + + private static final byte[] END_MESSAGE = "END____".getBytes(); + + private final BlockingQueue dataPackets = new LinkedBlockingQueue<>(); + private final int timeout; + private final int packetSize; + private final int port; + + private volatile boolean closed = false; + private volatile boolean closing = false; + private DatagramSocket socket; + private Thread readerThread; + + public TestUDPServer() { + this(DEFAULT_TIMEOUT_MS, DEFAULT_PACKET_SIZE, 0); + } + + public TestUDPServer(int timeout, int packetSize, int port) { + this.timeout = timeout; + this.packetSize = packetSize; + this.port = port; + } + + public synchronized void start() throws SocketException { + if (closed) { + throw new IllegalStateException("Server closed"); + } + if (socket != null) { + // already started + return; + } + + socket = new DatagramSocket(port); + socket.setSoTimeout(timeout); + readerThread = + new Thread( + () -> { + while (!closed && !closing) { + byte[] data = new byte[packetSize]; + try { + DatagramPacket packet = new DatagramPacket(data, packetSize); + socket.receive(packet); + + byte[] trimmedData = new byte[packet.getLength()]; + System.arraycopy( + packet.getData(), packet.getOffset(), trimmedData, 0, packet.getLength()); + + if (Arrays.equals(trimmedData, END_MESSAGE)) { + System.out.println("[TestUDPServer] Received message to close"); + break; + } + System.out.println( + "[TestUDPServer] Received message: " + new String(trimmedData)); + dataPackets.add(trimmedData); + } catch (SocketTimeoutException e) { + System.out.println("[TestUDPServer] Timeout waiting for message"); + // ignore no data sent + } catch (IOException e) { + System.out.println("[TestUDPServer] Error in receiving packet " + e.getMessage()); + e.printStackTrace(); + break; + } + } + closed = true; + }, + "Test UDP Server Receiver"); + + readerThread.setDaemon(true); + readerThread.start(); + } + + @Override + public synchronized void close() { + if (closed) { + // Already closed + return; + } + if (socket == null) { + throw new IllegalStateException("Socket not open"); + } + + closing = true; + + try (DatagramSocket clientSocket = new DatagramSocket()) { + clientSocket.send( + new DatagramPacket( + END_MESSAGE, END_MESSAGE.length, InetAddress.getByName("localhost"), getPort())); + } catch (IOException e) { + System.out.println( + "[TestUDPServer] Exception sending close message. Will rely on socket timeout"); + e.printStackTrace(); + } + + // Closed state is set by the reader thread. Wait for it to finish + try { + readerThread.join(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + public int getPort() { + if (port != 0) { + return port; + } else if (socket != null) { + return socket.getLocalPort(); + } else { + throw new IllegalStateException("Socket not open and port not explicitly set"); + } + } + + public BlockingQueue getMessages() { + return dataPackets; + } +} diff --git a/dd-smoke-tests/src/main/groovy/datadog/smoketest/ProcessManager.groovy b/dd-smoke-tests/src/main/groovy/datadog/smoketest/ProcessManager.groovy index fdcc9db82be..9d7fe392fd7 100644 --- a/dd-smoke-tests/src/main/groovy/datadog/smoketest/ProcessManager.groovy +++ b/dd-smoke-tests/src/main/groovy/datadog/smoketest/ProcessManager.groovy @@ -7,13 +7,7 @@ import spock.lang.AutoCleanup import spock.lang.Shared import spock.lang.Specification -import java.nio.ByteBuffer import java.nio.CharBuffer -import java.nio.channels.Channels -import java.nio.channels.ReadableByteChannel -import java.nio.channels.WritableByteChannel -import java.nio.charset.CharsetDecoder -import java.nio.charset.StandardCharsets import java.nio.file.Files import java.nio.file.Paths import java.util.concurrent.TimeoutException @@ -78,93 +72,6 @@ abstract class ProcessManager extends Specification { @AutoCleanup OutputThreads outputThreads = new OutputThreads() - class OutputThreads implements Closeable { - final ThreadGroup tg = new ThreadGroup("smoke-output") - final List testLogMessages = new ArrayList<>() - - void close() { - tg.interrupt() - Thread[] threads = new Thread[tg.activeCount()] - tg.enumerate(threads) - threads*.join() - } - - @CompileStatic - class ProcessOutputRunnable implements Runnable { - final ReadableByteChannel rc - ByteBuffer buffer = ByteBuffer.allocate(MAX_LINE_SIZE) - final WritableByteChannel wc - CharsetDecoder decoder = StandardCharsets.UTF_8.newDecoder() - - ProcessOutputRunnable(InputStream is, File output) { - rc = Channels.newChannel(is) - wc = Channels.newChannel(new FileOutputStream(output)) - } - - @Override - void run() { - boolean online = true - while (online) { - // we may have data in the buffer we did not consume for line splitting purposes - int skip = buffer.position() - - try { - if (rc.read(buffer) == -1) { - online = false - } - } catch (IOException ioe) { - online = false - } - - buffer.flip() - // write to log file - wc.write(buffer.duplicate().position(skip) as ByteBuffer) - - // subBuff will always start at the beginning of the next (potential) line - ByteBuffer subBuff = buffer.duplicate() - int consumed = 0 - while (true) { - boolean hasRemaining = subBuff.hasRemaining() - if (hasRemaining) { - int c = subBuff.get() - if (c != '\n' && c != '\r') { - continue - } - // found line end - } else if (online && consumed > 0) { - break - // did not find line end, but we already consumed a line - // save the data for the next read iteration - } // else we did not consume any line, or there will be no further reads. - // Treat the buffer as single line despite lack of terminator - - consumed += subBuff.position() - String line = decoder.decode(subBuff.duplicate().flip() as ByteBuffer).toString().trim() - if (line != '') { - synchronized (testLogMessages) { - testLogMessages << line - testLogMessages.notifyAll() - } - } - - if (hasRemaining) { - subBuff = subBuff.slice() - } else { - break - } - } - - buffer.position(consumed) - buffer.compact() - } - } - } - - void captureOutput(Process p, File outputFile) { - new Thread(tg, new ProcessOutputRunnable(p.inputStream, outputFile)).start() - } - } - def setupSpec() { if (buildDirectory == null || shadowJarPath == null) { throw new AssertionError("Expected system properties not found. Smoke tests have to be run from Gradle. Please make sure that is the case.") @@ -333,37 +240,7 @@ abstract class ProcessManager extends Specification { * @param checker should return true if a match is found */ void processTestLogLines(Closure checker) { - int l = 0 - def tlm = outputThreads.testLogMessages - long waitStart - - while (true) { - String msg - synchronized (tlm) { - if (l >= tlm.size()) { - long waitTime - if (waitStart != 0) { - waitTime = 5000 - (System.currentTimeMillis() - waitStart) - if (waitTime < 0) { - throw new TimeoutException() - } - } else { - waitStart = System.currentTimeMillis() - waitTime = 5000 - } - tlm.wait(waitTime) - } - if (l >= tlm.size()) { - throw new TimeoutException() - } - // the array is only cleared at the end of the test, so index l exists - msg = tlm.get(l++) - } - - if (checker(msg)) { - break - } - } + outputThreads.processTestLogLines {return checker(it) } } protected void beforeProcessBuilders() {} @@ -383,13 +260,11 @@ abstract class ProcessManager extends Specification { return "01234567890abcdef123456789ABCDEF" } - static final int MAX_LINE_SIZE = 1024 * 1024 - @CompileStatic @SuppressForbidden private static void eachLine(File file, Closure closure) { def reader = new InputStreamReader(new FileInputStream(file)) - CharBuffer buffer = CharBuffer.allocate(MAX_LINE_SIZE) + CharBuffer buffer = CharBuffer.allocate(OutputThreads.MAX_LINE_SIZE) while (reader.read(buffer) != -1) { buffer.flip() while (buffer.hasRemaining()) { diff --git a/dd-smoke-tests/src/main/java/datadog/smoketest/OutputThreads.java b/dd-smoke-tests/src/main/java/datadog/smoketest/OutputThreads.java new file mode 100644 index 00000000000..5201988c894 --- /dev/null +++ b/dd-smoke-tests/src/main/java/datadog/smoketest/OutputThreads.java @@ -0,0 +1,170 @@ +package datadog.smoketest; + +import java.io.Closeable; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.nio.ByteBuffer; +import java.nio.channels.Channels; +import java.nio.channels.ReadableByteChannel; +import java.nio.channels.WritableByteChannel; +import java.nio.charset.CharacterCodingException; +import java.nio.charset.CharsetDecoder; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.TimeoutException; +import java.util.function.Function; + +public class OutputThreads implements Closeable { + private static final long THREAD_JOIN_TIMEOUT_MILLIS = 10 * 1000; + public static final int MAX_LINE_SIZE = 1024 * 1024; + + final ThreadGroup tg = new ThreadGroup("smoke-output"); + final List testLogMessages = new ArrayList<>(); + + public void close() { + tg.interrupt(); + Thread[] threads = new Thread[tg.activeCount()]; + tg.enumerate(threads); + + for (Thread thread : threads) { + try { + thread.join(THREAD_JOIN_TIMEOUT_MILLIS); + } catch (InterruptedException e) { + // ignore + } + } + } + + class ProcessOutputRunnable implements Runnable { + final ReadableByteChannel rc; + ByteBuffer buffer = ByteBuffer.allocate(MAX_LINE_SIZE); + final WritableByteChannel wc; + CharsetDecoder decoder = StandardCharsets.UTF_8.newDecoder(); + + ProcessOutputRunnable(InputStream is, File output) throws FileNotFoundException { + rc = Channels.newChannel(is); + wc = Channels.newChannel(new FileOutputStream(output)); + } + + @Override + public void run() { + boolean online = true; + while (online) { + // we may have data in the buffer we did not consume for line splitting purposes + int skip = buffer.position(); + + try { + if (rc.read(buffer) == -1) { + online = false; + } + } catch (IOException ioe) { + online = false; + } + + buffer.flip(); + // write to log file + try { + wc.write((ByteBuffer) buffer.duplicate().position(skip)); + } catch (IOException e) { + System.out.println("ERROR WRITING TO LOG FILE: " + e.getMessage()); + e.printStackTrace(); + return; + } + + // subBuff will always start at the beginning of the next (potential) line + ByteBuffer subBuff = buffer.duplicate(); + int consumed = 0; + while (true) { + boolean hasRemaining = subBuff.hasRemaining(); + if (hasRemaining) { + int c = subBuff.get(); + if (c != '\n' && c != '\r') { + continue; + } + // found line end + } else if (online && consumed > 0) { + break; + // did not find line end, but we already consumed a line + // save the data for the next read iteration + } // else we did not consume any line, or there will be no further reads. + // Treat the buffer as single line despite lack of terminator + + consumed += subBuff.position(); + String line = null; + try { + line = decoder.decode((ByteBuffer) subBuff.duplicate().flip()).toString().trim(); + } catch (CharacterCodingException e) { + throw new RuntimeException(e); + } + + if (!line.isEmpty()) { + synchronized (testLogMessages) { + testLogMessages.add(line); + testLogMessages.notifyAll(); + } + } + + if (hasRemaining) { + subBuff = subBuff.slice(); + } else { + break; + } + } + + buffer.position(consumed); + buffer.compact(); + } + } + } + + public void captureOutput(Process p, File outputFile) throws FileNotFoundException { + new Thread(tg, new ProcessOutputRunnable(p.getInputStream(), outputFile)).start(); + } + + /** + * Tries to find a log line that matches the given predicate. After reading all the log lines + * already collected, it will wait up to 5 seconds for a new line matching the predicate. + * + * @param checker should return true if a match is found + */ + public boolean processTestLogLines(Function checker) throws TimeoutException { + int l = 0; + long waitStart = 0; + + while (true) { + String msg; + synchronized (testLogMessages) { + if (l >= testLogMessages.size()) { + long waitTime; + if (waitStart != 0) { + waitTime = 10000 - (System.currentTimeMillis() - waitStart); + if (waitTime < 0) { + throw new TimeoutException(); + } + } else { + waitStart = System.currentTimeMillis(); + waitTime = 10000; + } + try { + testLogMessages.wait(waitTime); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + if (l >= testLogMessages.size()) { + throw new TimeoutException(); + } + // the array is only cleared at the end of the test, so index l exists + msg = testLogMessages.get(l++); + } + + if (checker.apply(msg)) { + return true; + } + } + } +} From 551f88e69b54d4c2e2b5e2efb8723272b4a12bdf Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Wed, 9 Apr 2025 13:17:53 -0400 Subject: [PATCH 078/113] loop through messages until the correct event --- .../smoketest/CrashtrackingSmokeTest.java | 11 ++++++----- .../java/datadog/smoketest/TestUDPServer.java | 16 ++++++++-------- 2 files changed, 14 insertions(+), 13 deletions(-) diff --git a/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java b/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java index e6c0b8c01c9..9e0f3cd9f5a 100644 --- a/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java +++ b/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java @@ -1,7 +1,6 @@ package datadog.smoketest; import static org.hamcrest.CoreMatchers.containsString; -import static org.hamcrest.CoreMatchers.startsWith; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.jupiter.api.Assertions.assertNotEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; @@ -282,11 +281,13 @@ private void assertCrashData() throws InterruptedException { } private void assertOOMEvent() throws InterruptedException { - byte[] data = udpServer.getMessages().poll(DATA_TIMEOUT_MS, TimeUnit.MILLISECONDS); - assertNotNull(data, "OOM Event not received"); - String event = new String(data); + String event; + do { + event = udpServer.getMessages().poll(DATA_TIMEOUT_MS, TimeUnit.MILLISECONDS); + } while (event != null && !event.startsWith("_e")); + + assertNotNull(event, "OOM Event not received"); - assertThat(event, startsWith("_e")); assertThat(event, containsString(":OutOfMemoryError")); assertThat(event, containsString("t:error")); assertThat(event, containsString("s:java")); diff --git a/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/TestUDPServer.java b/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/TestUDPServer.java index 6668c78a41d..a748b907467 100644 --- a/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/TestUDPServer.java +++ b/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/TestUDPServer.java @@ -18,7 +18,7 @@ public class TestUDPServer implements Closeable { private static final byte[] END_MESSAGE = "END____".getBytes(); - private final BlockingQueue dataPackets = new LinkedBlockingQueue<>(); + private final BlockingQueue dataPackets = new LinkedBlockingQueue<>(); private final int timeout; private final int packetSize; private final int port; @@ -63,17 +63,17 @@ public synchronized void start() throws SocketException { packet.getData(), packet.getOffset(), trimmedData, 0, packet.getLength()); if (Arrays.equals(trimmedData, END_MESSAGE)) { - System.out.println("[TestUDPServer] Received message to close"); + System.err.println("[TestUDPServer] Received message to close"); break; } - System.out.println( + System.err.println( "[TestUDPServer] Received message: " + new String(trimmedData)); - dataPackets.add(trimmedData); + dataPackets.add(new String(trimmedData)); } catch (SocketTimeoutException e) { - System.out.println("[TestUDPServer] Timeout waiting for message"); + System.err.println("[TestUDPServer] Timeout waiting for message"); // ignore no data sent } catch (IOException e) { - System.out.println("[TestUDPServer] Error in receiving packet " + e.getMessage()); + System.err.println("[TestUDPServer] Error in receiving packet " + e.getMessage()); e.printStackTrace(); break; } @@ -103,7 +103,7 @@ public synchronized void close() { new DatagramPacket( END_MESSAGE, END_MESSAGE.length, InetAddress.getByName("localhost"), getPort())); } catch (IOException e) { - System.out.println( + System.err.println( "[TestUDPServer] Exception sending close message. Will rely on socket timeout"); e.printStackTrace(); } @@ -126,7 +126,7 @@ public int getPort() { } } - public BlockingQueue getMessages() { + public BlockingQueue getMessages() { return dataPackets; } } From 3df5bbfe7a0cc467e4412a9da2829db047e5e776 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Wed, 9 Apr 2025 15:35:30 -0400 Subject: [PATCH 079/113] some debug --- .../test/java/datadog/smoketest/CrashtrackingSmokeTest.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java b/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java index 9e0f3cd9f5a..3fe8800f954 100644 --- a/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java +++ b/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java @@ -37,7 +37,7 @@ * that ships with OS X by default. */ public class CrashtrackingSmokeTest { - private static final long DATA_TIMEOUT_MS = 10 * 1000; + private static final long DATA_TIMEOUT_MS = 25 * 1000; private static Path LOG_FILE_DIR; private MockWebServer tracingServer; private TestUDPServer udpServer; @@ -232,6 +232,7 @@ void testOomeTracking() throws Exception { outputThreads.captureOutput( p, LOG_FILE_DIR.resolve("testProcess.testOomeTracking.log").toFile()); pb.environment().put("DD_DOGSTATSD_PORT", String.valueOf(udpServer.getPort())); + System.out.println("Set port to: " + pb.environment().get("DD_DOGSTATSD_PORT")); assertNotEquals(0, p.waitFor(), "Application should have crashed"); assertOOMEvent(); @@ -263,6 +264,8 @@ void testCombineTracking() throws Exception { pb.environment().put("DD_TRACE_AGENT_PORT", String.valueOf(tracingServer.getPort())); pb.environment().put("DD_DOGSTATSD_PORT", String.valueOf(udpServer.getPort())); + System.out.println("Set port to: " + pb.environment().get("DD_DOGSTATSD_PORT")); + Process p = pb.start(); outputThreads.captureOutput( p, LOG_FILE_DIR.resolve("testProcess.testCombineTracking.log").toFile()); From e4b8c9ea93d4d3bdbeee627b6d73775ee4aeb210 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Thu, 10 Apr 2025 09:38:35 -0400 Subject: [PATCH 080/113] configurable dogstatsd port --- .../communication/monitor/DDAgentStatsDClientManager.java | 3 +-- internal-api/src/main/java/datadog/trace/api/Config.java | 7 +++++++ 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/communication/src/main/java/datadog/communication/monitor/DDAgentStatsDClientManager.java b/communication/src/main/java/datadog/communication/monitor/DDAgentStatsDClientManager.java index 5dea07a019e..43fd170ac7d 100644 --- a/communication/src/main/java/datadog/communication/monitor/DDAgentStatsDClientManager.java +++ b/communication/src/main/java/datadog/communication/monitor/DDAgentStatsDClientManager.java @@ -1,6 +1,5 @@ package datadog.communication.monitor; -import static datadog.trace.api.ConfigDefaults.DEFAULT_DOGSTATSD_PORT; import static datadog.trace.bootstrap.instrumentation.api.WriterConstants.LOGGING_WRITER_TYPE; import datadog.trace.api.Config; @@ -22,7 +21,7 @@ public static StatsDClientManager statsDClientManager() { return INSTANCE; } - private static final AtomicInteger defaultStatsDPort = new AtomicInteger(DEFAULT_DOGSTATSD_PORT); + private static final AtomicInteger defaultStatsDPort = new AtomicInteger(Config.get().getDogsStatsDPort()); public static void setDefaultStatsDPort(final int newPort) { if (newPort > 0 && defaultStatsDPort.getAndSet(newPort) != newPort) { diff --git a/internal-api/src/main/java/datadog/trace/api/Config.java b/internal-api/src/main/java/datadog/trace/api/Config.java index 69cec835552..0a6f84057b4 100644 --- a/internal-api/src/main/java/datadog/trace/api/Config.java +++ b/internal-api/src/main/java/datadog/trace/api/Config.java @@ -527,6 +527,7 @@ public static String getHostName() { private final List traceAgentArgs; private final String dogStatsDPath; private final List dogStatsDArgs; + private final int dogStatsDPort; private String env; private String version; @@ -1084,6 +1085,8 @@ private Config(final ConfigProvider configProvider, final InstrumenterConfig ins configProvider.getInteger( DOGSTATSD_START_DELAY, DEFAULT_DOGSTATSD_START_DELAY, JMX_FETCH_START_DELAY); + dogStatsDPort = configProvider.getInteger(DOGSTATSD_PORT, DEFAULT_DOGSTATSD_PORT); + statsDClientQueueSize = configProvider.getInteger(STATSD_CLIENT_QUEUE_SIZE); statsDClientSocketBuffer = configProvider.getInteger(STATSD_CLIENT_SOCKET_BUFFER); statsDClientSocketTimeout = configProvider.getInteger(STATSD_CLIENT_SOCKET_TIMEOUT); @@ -3528,6 +3531,10 @@ public List getDogStatsDArgs() { return dogStatsDArgs; } + public int getDogsStatsDPort() { + return dogStatsDPort; + } + public String getConfigFileStatus() { return configFileStatus; } From 8794264d1158ddabf88fc76614afa106376dcb9b Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Thu, 10 Apr 2025 10:19:55 -0400 Subject: [PATCH 081/113] enable everything --- .gitlab-ci.yml | 96 +++++++++++++++++++++++++------------------------- 1 file changed, 48 insertions(+), 48 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index add193af52c..5f5eb7fa342 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -34,17 +34,17 @@ variables: .test_matrix: &test_matrix - testJvm: &test_jvms - "8" -# - "semeru11" -# - "oracle8" -# - "21" -# - "ubuntu17" -# - "zulu8" -# - "semeru8" -# - "ibm8" -# - "zulu11" + - "semeru11" + - "oracle8" + - "21" + - "ubuntu17" + - "zulu8" + - "semeru8" + - "ibm8" + - "zulu11" - "11" - "17" -# - "semeru17" + - "semeru17" # Gitlab doesn't support "parallel" and "parallel:matrix" at the same time # These blocks emulate "parallel" by including it in the matrix @@ -443,45 +443,45 @@ test_inst: parallel: matrix: *test_matrix_12 -#test_inst_latest: -# extends: .test_job_with_test_agent -# variables: -# GRADLE_TARGET: ":instrumentationLatestDepTest" -# CACHE_TYPE: "latestDep" -# parallel: -# matrix: -# - testJvm: ["8", "17", "21" ] -# # Gitlab doesn't support "parallel" and "parallel:matrix" at the same time -# # This emulates "parallel" by including it in the matrix -# CI_SPLIT: [ "1/12", "2/12", "3/12", "4/12", "5/12", "6/12", "7/12", "8/12", "9/12", "10/12", "11/12", "12/12" ] -# -#test_flaky: -# extends: .test_job_with_test_agent -# variables: -# GRADLE_PARAMS: "-PrunFlakyTests" -# CACHE_TYPE: "base" -# testJvm: "8" -# CONTINUE_ON_FAILURE: "true" -# parallel: -# matrix: -# - GRADLE_TARGET: [":baseTest", ":smokeTest", ":debuggerTest"] -# # Gitlab doesn't support "parallel" and "parallel:matrix" at the same time -# # This emulates "parallel" by including it in the matrix -# CI_SPLIT: [ "1/4", "2/4", "3/4", "4/4" ] -# -#test_flaky_inst: -# extends: .test_job -# variables: -# GRADLE_TARGET: ":instrumentationTest" -# GRADLE_PARAMS: "-PrunFlakyTests" -# CACHE_TYPE: "inst" -# testJvm: "8" -# CONTINUE_ON_FAILURE: "true" -# parallel: -# matrix: -# # Gitlab doesn't support "parallel" and "parallel:matrix" at the same time -# # This emulates "parallel" by including it in the matrix -# - CI_SPLIT: [ "1/12", "2/12", "3/12", "4/12", "5/12", "6/12", "7/12", "8/12", "9/12", "10/12", "11/12", "12/12" ] +test_inst_latest: + extends: .test_job_with_test_agent + variables: + GRADLE_TARGET: ":instrumentationLatestDepTest" + CACHE_TYPE: "latestDep" + parallel: + matrix: + - testJvm: ["8", "17", "21" ] + # Gitlab doesn't support "parallel" and "parallel:matrix" at the same time + # This emulates "parallel" by including it in the matrix + CI_SPLIT: [ "1/12", "2/12", "3/12", "4/12", "5/12", "6/12", "7/12", "8/12", "9/12", "10/12", "11/12", "12/12" ] + +test_flaky: + extends: .test_job_with_test_agent + variables: + GRADLE_PARAMS: "-PrunFlakyTests" + CACHE_TYPE: "base" + testJvm: "8" + CONTINUE_ON_FAILURE: "true" + parallel: + matrix: + - GRADLE_TARGET: [":baseTest", ":smokeTest", ":debuggerTest"] + # Gitlab doesn't support "parallel" and "parallel:matrix" at the same time + # This emulates "parallel" by including it in the matrix + CI_SPLIT: [ "1/4", "2/4", "3/4", "4/4" ] + +test_flaky_inst: + extends: .test_job + variables: + GRADLE_TARGET: ":instrumentationTest" + GRADLE_PARAMS: "-PrunFlakyTests" + CACHE_TYPE: "inst" + testJvm: "8" + CONTINUE_ON_FAILURE: "true" + parallel: + matrix: + # Gitlab doesn't support "parallel" and "parallel:matrix" at the same time + # This emulates "parallel" by including it in the matrix + - CI_SPLIT: [ "1/12", "2/12", "3/12", "4/12", "5/12", "6/12", "7/12", "8/12", "9/12", "10/12", "11/12", "12/12" ] test_profiling: extends: .test_job From d6250d5207eea976c9d36d7a35ab07bbbb1c1c17 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Thu, 10 Apr 2025 18:17:07 -0400 Subject: [PATCH 082/113] use sleep instead of park nanos --- .../src/main/java/com/datadog/crashtracking/OOMENotifier.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dd-java-agent/agent-crashtracking/src/main/java/com/datadog/crashtracking/OOMENotifier.java b/dd-java-agent/agent-crashtracking/src/main/java/com/datadog/crashtracking/OOMENotifier.java index c9728426a98..4898aee608c 100644 --- a/dd-java-agent/agent-crashtracking/src/main/java/com/datadog/crashtracking/OOMENotifier.java +++ b/dd-java-agent/agent-crashtracking/src/main/java/com/datadog/crashtracking/OOMENotifier.java @@ -13,7 +13,7 @@ public final class OOMENotifier { // This method is called via CLI so we don't need to be paranoid about the forbiddend APIs @SuppressForbidden - public static void sendOomeEvent(String taglist) { + public static void sendOomeEvent(String taglist) throws Exception { try (StatsDClient client = statsDClientManager().statsDClient(null, null, null, null, null, false)) { String[] tags = taglist.split(","); @@ -24,7 +24,7 @@ public static void sendOomeEvent(String taglist) { "Java process encountered out of memory error", tags); log.info("OOME event sent"); - LockSupport.parkNanos(2_000_000_000L); // wait 2s to allow statsd client flushing the event + Thread.sleep(2 * 1000); // wait 2s to allow statsd client flushing the event } } } From 0ecc8bb9697af1124b93e26b64083393c59e7e9d Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Thu, 10 Apr 2025 18:35:29 -0400 Subject: [PATCH 083/113] save body data ahead of time --- .../instrumentation/springweb6/boot/TestController.groovy | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/dd-java-agent/instrumentation/spring-webmvc-6.0/src/test/groovy/datadog/trace/instrumentation/springweb6/boot/TestController.groovy b/dd-java-agent/instrumentation/spring-webmvc-6.0/src/test/groovy/datadog/trace/instrumentation/springweb6/boot/TestController.groovy index 29f41d2c101..02ff69ef0fc 100644 --- a/dd-java-agent/instrumentation/spring-webmvc-6.0/src/test/groovy/datadog/trace/instrumentation/springweb6/boot/TestController.groovy +++ b/dd-java-agent/instrumentation/spring-webmvc-6.0/src/test/groovy/datadog/trace/instrumentation/springweb6/boot/TestController.groovy @@ -30,9 +30,10 @@ class TestController { @RequestMapping("/forwarded") @ResponseBody CompletableFuture forwarded(HttpServletRequest request) { + def body = request.getHeader("x-forwarded-for") CompletableFuture.supplyAsync { HttpServerTest.controller(FORWARDED) { - request.getHeader("x-forwarded-for") + body } } } From 38a35ade728887c96b57f5d7663e431517ce68ca Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Thu, 10 Apr 2025 18:40:33 -0400 Subject: [PATCH 084/113] retry once for system failures --- .gitlab-ci.yml | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 5f5eb7fa342..f2bf027b45b 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -260,6 +260,15 @@ test_published_artifacts: paths: - ./check_reports - '.gradle/daemon/*/*.out.log' + retry: + max: 1 + when: + - unknown_failure + - stuck_or_timeout_failure + - runner_system_failure + - unmet_prerequisites + - scheduler_failure + - data_integrity_failure check_base: extends: .check_job @@ -389,6 +398,15 @@ muzzle-dep-report: - '.gradle/daemon/*/*.out.log' reports: junit: results/*.xml + retry: + max: 1 + when: + - unknown_failure + - stuck_or_timeout_failure + - runner_system_failure + - unmet_prerequisites + - scheduler_failure + - data_integrity_failure .test_job_with_test_agent: extends: .test_job From 2e0d7ba52e4f78ce953c7291db53bd812b669db4 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Thu, 10 Apr 2025 19:10:14 -0400 Subject: [PATCH 085/113] spotless --- .../src/main/java/com/datadog/crashtracking/OOMENotifier.java | 1 - 1 file changed, 1 deletion(-) diff --git a/dd-java-agent/agent-crashtracking/src/main/java/com/datadog/crashtracking/OOMENotifier.java b/dd-java-agent/agent-crashtracking/src/main/java/com/datadog/crashtracking/OOMENotifier.java index 4898aee608c..1c1b317074d 100644 --- a/dd-java-agent/agent-crashtracking/src/main/java/com/datadog/crashtracking/OOMENotifier.java +++ b/dd-java-agent/agent-crashtracking/src/main/java/com/datadog/crashtracking/OOMENotifier.java @@ -4,7 +4,6 @@ import datadog.trace.api.StatsDClient; import de.thetaphi.forbiddenapis.SuppressForbidden; -import java.util.concurrent.locks.LockSupport; import org.slf4j.Logger; import org.slf4j.LoggerFactory; From ac863f0f44f7b34c8175a4005461d095bbfc0a74 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Fri, 11 Apr 2025 12:32:29 -0400 Subject: [PATCH 086/113] cleanup --- .../datadog/crashtracking/OOMENotifier.java | 5 ++- .../smoketest/CrashtrackingSmokeTest.java | 40 ++++++++----------- .../java/datadog/smoketest/OutputThreads.java | 6 +++ 3 files changed, 26 insertions(+), 25 deletions(-) diff --git a/dd-java-agent/agent-crashtracking/src/main/java/com/datadog/crashtracking/OOMENotifier.java b/dd-java-agent/agent-crashtracking/src/main/java/com/datadog/crashtracking/OOMENotifier.java index 1c1b317074d..c9728426a98 100644 --- a/dd-java-agent/agent-crashtracking/src/main/java/com/datadog/crashtracking/OOMENotifier.java +++ b/dd-java-agent/agent-crashtracking/src/main/java/com/datadog/crashtracking/OOMENotifier.java @@ -4,6 +4,7 @@ import datadog.trace.api.StatsDClient; import de.thetaphi.forbiddenapis.SuppressForbidden; +import java.util.concurrent.locks.LockSupport; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -12,7 +13,7 @@ public final class OOMENotifier { // This method is called via CLI so we don't need to be paranoid about the forbiddend APIs @SuppressForbidden - public static void sendOomeEvent(String taglist) throws Exception { + public static void sendOomeEvent(String taglist) { try (StatsDClient client = statsDClientManager().statsDClient(null, null, null, null, null, false)) { String[] tags = taglist.split(","); @@ -23,7 +24,7 @@ public static void sendOomeEvent(String taglist) throws Exception { "Java process encountered out of memory error", tags); log.info("OOME event sent"); - Thread.sleep(2 * 1000); // wait 2s to allow statsd client flushing the event + LockSupport.parkNanos(2_000_000_000L); // wait 2s to allow statsd client flushing the event } } } diff --git a/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java b/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java index 3fe8800f954..54d9290941c 100644 --- a/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java +++ b/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java @@ -37,22 +37,22 @@ * that ships with OS X by default. */ public class CrashtrackingSmokeTest { - private static final long DATA_TIMEOUT_MS = 25 * 1000; - private static Path LOG_FILE_DIR; + private static final long DATA_TIMEOUT_MS = 10 * 1000; + private static final OutputThreads OUTPUT = new OutputThreads(); + private static final Path LOG_FILE_DIR = + Paths.get(System.getProperty("datadog.smoketest.builddir"), "reports"); + private MockWebServer tracingServer; private TestUDPServer udpServer; - private BlockingQueue crashEvents = new LinkedBlockingQueue<>(); + private final BlockingQueue crashEvents = new LinkedBlockingQueue<>(); @BeforeAll static void setupAll() { // Only Hotspot based implementation are supported assumeFalse(Platform.isJ9()); - - LOG_FILE_DIR = Paths.get(System.getProperty("datadog.smoketest.builddir"), "reports"); } private Path tempDir; - private static OutputThreads outputThreads = new OutputThreads(); @BeforeEach void setup() throws Exception { @@ -65,7 +65,9 @@ void setup() throws Exception { tracingServer.setDispatcher( new Dispatcher() { @Override - public MockResponse dispatch(final RecordedRequest request) throws InterruptedException { + public MockResponse dispatch(final RecordedRequest request) { + System.out.println("URL ====== " + request.getPath()); + String data = request.getBody().readString(StandardCharsets.UTF_8); if ("/telemetry/proxy/api/v2/apmtelemetry".equals(request.getPath())) { @@ -82,7 +84,7 @@ public MockResponse dispatch(final RecordedRequest request) throws InterruptedEx System.out.println("Unable to parse " + e); } } - System.out.println("URL ====== " + request.getPath()); + System.out.println(data); return new MockResponse().setResponseCode(200); @@ -92,9 +94,7 @@ public MockResponse dispatch(final RecordedRequest request) throws InterruptedEx udpServer = new TestUDPServer(); udpServer.start(); - synchronized (outputThreads.testLogMessages) { - outputThreads.testLogMessages.clear(); - } + OUTPUT.clearMessages(); } @AfterEach @@ -110,7 +110,7 @@ void teardown() throws Exception { @AfterAll static void shutdown() { - outputThreads.close(); + OUTPUT.close(); } private static String javaPath() { @@ -159,8 +159,7 @@ void testCrashTracking() throws Exception { pb.environment().put("DD_TRACE_AGENT_PORT", String.valueOf(tracingServer.getPort())); Process p = pb.start(); - outputThreads.captureOutput( - p, LOG_FILE_DIR.resolve("testProcess.testCrashTracking.log").toFile()); + OUTPUT.captureOutput(p, LOG_FILE_DIR.resolve("testProcess.testCrashTracking.log").toFile()); assertNotEquals(0, p.waitFor(), "Application should have crashed"); assertCrashData(); @@ -195,7 +194,7 @@ void testCrashTrackingLegacy() throws Exception { pb.environment().put("DD_TRACE_AGENT_PORT", String.valueOf(tracingServer.getPort())); Process p = pb.start(); - outputThreads.captureOutput( + OUTPUT.captureOutput( p, LOG_FILE_DIR.resolve("testProcess.testCrashTrackingLegacy.log").toFile()); assertNotEquals(0, p.waitFor(), "Application should have crashed"); @@ -227,12 +226,10 @@ void testOomeTracking() throws Exception { "-jar", appShadowJar(), script.toString())); + pb.environment().put("DD_DOGSTATSD_PORT", String.valueOf(udpServer.getPort())); Process p = pb.start(); - outputThreads.captureOutput( - p, LOG_FILE_DIR.resolve("testProcess.testOomeTracking.log").toFile()); - pb.environment().put("DD_DOGSTATSD_PORT", String.valueOf(udpServer.getPort())); - System.out.println("Set port to: " + pb.environment().get("DD_DOGSTATSD_PORT")); + OUTPUT.captureOutput(p, LOG_FILE_DIR.resolve("testProcess.testOomeTracking.log").toFile()); assertNotEquals(0, p.waitFor(), "Application should have crashed"); assertOOMEvent(); @@ -264,11 +261,8 @@ void testCombineTracking() throws Exception { pb.environment().put("DD_TRACE_AGENT_PORT", String.valueOf(tracingServer.getPort())); pb.environment().put("DD_DOGSTATSD_PORT", String.valueOf(udpServer.getPort())); - System.out.println("Set port to: " + pb.environment().get("DD_DOGSTATSD_PORT")); - Process p = pb.start(); - outputThreads.captureOutput( - p, LOG_FILE_DIR.resolve("testProcess.testCombineTracking.log").toFile()); + OUTPUT.captureOutput(p, LOG_FILE_DIR.resolve("testProcess.testCombineTracking.log").toFile()); assertNotEquals(0, p.waitFor(), "Application should have crashed"); diff --git a/dd-smoke-tests/src/main/java/datadog/smoketest/OutputThreads.java b/dd-smoke-tests/src/main/java/datadog/smoketest/OutputThreads.java index 5201988c894..7e369280cf7 100644 --- a/dd-smoke-tests/src/main/java/datadog/smoketest/OutputThreads.java +++ b/dd-smoke-tests/src/main/java/datadog/smoketest/OutputThreads.java @@ -167,4 +167,10 @@ public boolean processTestLogLines(Function checker) throws Tim } } } + + public void clearMessages() { + synchronized (testLogMessages) { + testLogMessages.clear(); + } + } } From da9a29c3510c2bdec71317f49f7378f087aa2c55 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Mon, 14 Apr 2025 11:47:58 -0400 Subject: [PATCH 087/113] spotless/merge issues --- .../test/java/datadog/smoketest/CrashtrackingSmokeTest.java | 4 ---- 1 file changed, 4 deletions(-) diff --git a/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java b/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java index 2bd83005df5..54d9290941c 100644 --- a/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java +++ b/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java @@ -1,7 +1,6 @@ package datadog.smoketest; import static org.hamcrest.CoreMatchers.containsString; -import static org.hamcrest.CoreMatchers.startsWith; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.jupiter.api.Assertions.assertNotEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; @@ -51,12 +50,9 @@ public class CrashtrackingSmokeTest { static void setupAll() { // Only Hotspot based implementation are supported assumeFalse(Platform.isJ9()); - - LOG_FILE_DIR = Paths.get(System.getProperty("datadog.smoketest.builddir"), "reports"); } private Path tempDir; - private static OutputThreads outputThreads = new OutputThreads(); @BeforeEach void setup() throws Exception { From 0f26adf2e25a2b0cdc66b959b428145f2a59dd73 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Tue, 15 Apr 2025 12:15:47 -0400 Subject: [PATCH 088/113] Skip ExceptionHistogramTest on J9 --- .../jfr/exceptions/ExceptionHistogramTest.java | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/dd-java-agent/instrumentation/exception-profiling/src/test/java/datadog/trace/bootstrap/instrumentation/jfr/exceptions/ExceptionHistogramTest.java b/dd-java-agent/instrumentation/exception-profiling/src/test/java/datadog/trace/bootstrap/instrumentation/jfr/exceptions/ExceptionHistogramTest.java index be40f1ea288..79aba1d601b 100644 --- a/dd-java-agent/instrumentation/exception-profiling/src/test/java/datadog/trace/bootstrap/instrumentation/jfr/exceptions/ExceptionHistogramTest.java +++ b/dd-java-agent/instrumentation/exception-profiling/src/test/java/datadog/trace/bootstrap/instrumentation/jfr/exceptions/ExceptionHistogramTest.java @@ -5,10 +5,12 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assumptions.assumeFalse; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSortedMap; import datadog.trace.api.Config; +import datadog.trace.api.Platform; import java.io.IOException; import java.time.Instant; import java.util.Comparator; @@ -19,6 +21,7 @@ import jdk.jfr.FlightRecorder; import jdk.jfr.Recording; import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.openjdk.jmc.common.item.Aggregators; @@ -58,6 +61,11 @@ public boolean equals(final Object obj) { private Recording snapshot; private ExceptionHistogram instance; + @BeforeAll + public static void precheck() { + assumeFalse(Platform.isJ9()); + } + @BeforeEach public void setup() { recording = new Recording(); From edeebc25b8eb3533e43860f9fccf8d1cf832b0b6 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Tue, 15 Apr 2025 13:15:43 -0400 Subject: [PATCH 089/113] add a print statement around the body for debugging --- .../trace/instrumentation/springweb6/boot/TestController.groovy | 1 + 1 file changed, 1 insertion(+) diff --git a/dd-java-agent/instrumentation/spring-webmvc-6.0/src/test/groovy/datadog/trace/instrumentation/springweb6/boot/TestController.groovy b/dd-java-agent/instrumentation/spring-webmvc-6.0/src/test/groovy/datadog/trace/instrumentation/springweb6/boot/TestController.groovy index 02ff69ef0fc..c383bc46a7c 100644 --- a/dd-java-agent/instrumentation/spring-webmvc-6.0/src/test/groovy/datadog/trace/instrumentation/springweb6/boot/TestController.groovy +++ b/dd-java-agent/instrumentation/spring-webmvc-6.0/src/test/groovy/datadog/trace/instrumentation/springweb6/boot/TestController.groovy @@ -31,6 +31,7 @@ class TestController { @ResponseBody CompletableFuture forwarded(HttpServletRequest request) { def body = request.getHeader("x-forwarded-for") + println "FORWARDED FOR: >" + body + "<|||" CompletableFuture.supplyAsync { HttpServerTest.controller(FORWARDED) { body From 078a3c7b86ad790e47208c810500bc13901184d3 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Tue, 15 Apr 2025 13:16:29 -0400 Subject: [PATCH 090/113] comment out all but 8, 17, and 21 for now --- .gitlab-ci.yml | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index f2bf027b45b..ec496574342 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -34,17 +34,17 @@ variables: .test_matrix: &test_matrix - testJvm: &test_jvms - "8" - - "semeru11" - - "oracle8" +# - "semeru11" +# - "oracle8" - "21" - - "ubuntu17" - - "zulu8" - - "semeru8" - - "ibm8" - - "zulu11" - - "11" +# - "ubuntu17" +# - "zulu8" +# - "semeru8" +# - "ibm8" +# - "zulu11" +# - "11" - "17" - - "semeru17" +# - "semeru17" # Gitlab doesn't support "parallel" and "parallel:matrix" at the same time # These blocks emulate "parallel" by including it in the matrix From a546529e7bfadcee767b06e78ba1ee56b9c6518f Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Tue, 15 Apr 2025 15:21:58 -0400 Subject: [PATCH 091/113] fix spring 6 forwarding headers --- .../instrumentation/springweb6/boot/SpringBootBasedTest.groovy | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/dd-java-agent/instrumentation/spring-webmvc-6.0/src/test/groovy/datadog/trace/instrumentation/springweb6/boot/SpringBootBasedTest.groovy b/dd-java-agent/instrumentation/spring-webmvc-6.0/src/test/groovy/datadog/trace/instrumentation/springweb6/boot/SpringBootBasedTest.groovy index 7700fac82ef..4c97b185c40 100644 --- a/dd-java-agent/instrumentation/spring-webmvc-6.0/src/test/groovy/datadog/trace/instrumentation/springweb6/boot/SpringBootBasedTest.groovy +++ b/dd-java-agent/instrumentation/spring-webmvc-6.0/src/test/groovy/datadog/trace/instrumentation/springweb6/boot/SpringBootBasedTest.groovy @@ -72,7 +72,8 @@ class SpringBootBasedTest extends HttpServerTest void start() { app.setDefaultProperties(["server.port": 0, "server.context-path": "/$servletContext", "spring.mvc.throw-exception-if-no-handler-found": false, - "spring.web.resources.add-mappings" : false]) + "spring.web.resources.add-mappings" : false, + "server.forward-headers-strategy": "NONE"]) context = app.run() port = (context as ServletWebServerApplicationContext).webServer.port try { From f16a6aff0cb14c4620d87797b63f030a19164a64 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Tue, 15 Apr 2025 17:15:02 -0400 Subject: [PATCH 092/113] revert logging statements --- .../instrumentation/springweb6/boot/TestController.groovy | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/dd-java-agent/instrumentation/spring-webmvc-6.0/src/test/groovy/datadog/trace/instrumentation/springweb6/boot/TestController.groovy b/dd-java-agent/instrumentation/spring-webmvc-6.0/src/test/groovy/datadog/trace/instrumentation/springweb6/boot/TestController.groovy index c383bc46a7c..29f41d2c101 100644 --- a/dd-java-agent/instrumentation/spring-webmvc-6.0/src/test/groovy/datadog/trace/instrumentation/springweb6/boot/TestController.groovy +++ b/dd-java-agent/instrumentation/spring-webmvc-6.0/src/test/groovy/datadog/trace/instrumentation/springweb6/boot/TestController.groovy @@ -30,11 +30,9 @@ class TestController { @RequestMapping("/forwarded") @ResponseBody CompletableFuture forwarded(HttpServletRequest request) { - def body = request.getHeader("x-forwarded-for") - println "FORWARDED FOR: >" + body + "<|||" CompletableFuture.supplyAsync { HttpServerTest.controller(FORWARDED) { - body + request.getHeader("x-forwarded-for") } } } From a1e0bb149c8e951f873408d4cb1e2a0a47b68c90 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Tue, 15 Apr 2025 18:15:44 -0400 Subject: [PATCH 093/113] needs to be set in 3.1 for latestDepTest --- .../src/test/groovy/test/boot/SpringBootBasedTest.groovy | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/dd-java-agent/instrumentation/spring-webmvc-3.1/src/test/groovy/test/boot/SpringBootBasedTest.groovy b/dd-java-agent/instrumentation/spring-webmvc-3.1/src/test/groovy/test/boot/SpringBootBasedTest.groovy index a80247071ee..5829a8288c1 100644 --- a/dd-java-agent/instrumentation/spring-webmvc-3.1/src/test/groovy/test/boot/SpringBootBasedTest.groovy +++ b/dd-java-agent/instrumentation/spring-webmvc-3.1/src/test/groovy/test/boot/SpringBootBasedTest.groovy @@ -65,7 +65,9 @@ class SpringBootBasedTest extends HttpServerTest @Override HttpServer server() { - new SpringBootServer(application(), servletContext) + def app = application() + app.setDefaultProperties(["server.forward-headers-strategy": "NONE"]) + new SpringBootServer(app, servletContext) } @Override From 3f62a1826d2db903e4dd12fa5034719e30272d18 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Tue, 15 Apr 2025 20:29:06 -0400 Subject: [PATCH 094/113] faster OOM --- .../smoketest/crashtracking/CrashtrackingTestApplication.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dd-smoke-tests/crashtracking/src/main/java/datadog/smoketest/crashtracking/CrashtrackingTestApplication.java b/dd-smoke-tests/crashtracking/src/main/java/datadog/smoketest/crashtracking/CrashtrackingTestApplication.java index 65b3305345e..5e8be98ea6d 100644 --- a/dd-smoke-tests/crashtracking/src/main/java/datadog/smoketest/crashtracking/CrashtrackingTestApplication.java +++ b/dd-smoke-tests/crashtracking/src/main/java/datadog/smoketest/crashtracking/CrashtrackingTestApplication.java @@ -39,7 +39,7 @@ public static void main(String[] args) throws Exception { while (size < 1024) { buffer.add(new byte[size * 1024 * 1024]); System.out.println("Allocated " + size + "MB"); - if (size < 256) { + if (size < 512) { size *= 2; } } From 014057c8a4dc421dcc55c7833c760d908037bb69 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Wed, 16 Apr 2025 11:48:13 -0400 Subject: [PATCH 095/113] property was set in incorrect location --- .../src/test/groovy/test/boot/SpringBootBasedTest.groovy | 4 +--- .../src/test/groovy/test/boot/SpringBootServer.groovy | 2 +- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/dd-java-agent/instrumentation/spring-webmvc-3.1/src/test/groovy/test/boot/SpringBootBasedTest.groovy b/dd-java-agent/instrumentation/spring-webmvc-3.1/src/test/groovy/test/boot/SpringBootBasedTest.groovy index 5829a8288c1..a80247071ee 100644 --- a/dd-java-agent/instrumentation/spring-webmvc-3.1/src/test/groovy/test/boot/SpringBootBasedTest.groovy +++ b/dd-java-agent/instrumentation/spring-webmvc-3.1/src/test/groovy/test/boot/SpringBootBasedTest.groovy @@ -65,9 +65,7 @@ class SpringBootBasedTest extends HttpServerTest @Override HttpServer server() { - def app = application() - app.setDefaultProperties(["server.forward-headers-strategy": "NONE"]) - new SpringBootServer(app, servletContext) + new SpringBootServer(application(), servletContext) } @Override diff --git a/dd-java-agent/instrumentation/spring-webmvc-3.1/src/test/groovy/test/boot/SpringBootServer.groovy b/dd-java-agent/instrumentation/spring-webmvc-3.1/src/test/groovy/test/boot/SpringBootServer.groovy index bc875bb70d5..2befeef9e7f 100644 --- a/dd-java-agent/instrumentation/spring-webmvc-3.1/src/test/groovy/test/boot/SpringBootServer.groovy +++ b/dd-java-agent/instrumentation/spring-webmvc-3.1/src/test/groovy/test/boot/SpringBootServer.groovy @@ -23,7 +23,7 @@ class SpringBootServer implements WebsocketServer { @Override void start() { - app.setDefaultProperties(["server.port": 0, "server.context-path": "/$servletContext"]) + app.setDefaultProperties(["server.port": 0, "server.context-path": "/$servletContext", "server.forward-headers-strategy": "NONE"]) context = app.run() as EmbeddedWebApplicationContext port = context.embeddedServletContainer.port try { From 3c28009c33a17bce7b0693f15d66edcb2df8f3f5 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Wed, 16 Apr 2025 12:25:22 -0400 Subject: [PATCH 096/113] ssi_smoke, flaky, and esoteric jvms on master only --- .gitlab-ci.yml | 36 ++++++++++++++++++++++++++---------- 1 file changed, 26 insertions(+), 10 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index ec496574342..b3a18364b2e 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -27,6 +27,7 @@ variables: GRADLE_PLUGIN_PROXY: "http://artifactual.artifactual.all-clusters.local-dc.fabric.dog:8081/repository/gradle-plugin-portal-proxy/" JAVA_BUILD_IMAGE_VERSION: "v25.01" REPO_NOTIFICATION_CHANNEL: "#apm-java-escalations" + DEFAULT_TEST_JVMS: /^(8|11|17|21)$/ PROFILE_TESTS: description: "Enable profiling of tests" value: "false" @@ -34,17 +35,17 @@ variables: .test_matrix: &test_matrix - testJvm: &test_jvms - "8" -# - "semeru11" -# - "oracle8" - - "21" -# - "ubuntu17" -# - "zulu8" -# - "semeru8" -# - "ibm8" -# - "zulu11" -# - "11" + - "11" - "17" -# - "semeru17" + - "21" + - "semeru11" + - "oracle8" + - "ubuntu17" + - "zulu8" + - "semeru8" + - "ibm8" + - "zulu11" + - "semeru17" # Gitlab doesn't support "parallel" and "parallel:matrix" at the same time # These blocks emulate "parallel" by including it in the matrix @@ -64,6 +65,10 @@ variables: - testJvm: *test_jvms CI_SPLIT: [ "1/12", "2/12", "3/12", "4/12", "5/12", "6/12", "7/12", "8/12", "9/12", "10/12", "11/12", "12/12" ] +.master_only: &master_only + - if: $CI_COMMIT_BRANCH == "master" + when: on_success + default: tags: [ "arch:amd64" ] @@ -370,6 +375,11 @@ muzzle-dep-report: TESTCONTAINERS_RYUK_DISABLED: "true" TESTCONTAINERS_HUB_IMAGE_NAME_PREFIX: "registry.ddbuild.io/images/mirror/" JETTY_AVAILABLE_PROCESSORS: 4 # Jetty incorrectly calculates processor count in containers + rules: + - if: $testJvm =~ $DEFAULT_TEST_JVMS + when: on_success + - if: $CI_COMMIT_BRANCH == "master" + when: on_success script: - > if [ "$PROFILE_TESTS" == "true" ] && [ "$testJvm" != "ibm8" ] && [ "$testJvm" != "oracle8" ]; @@ -480,6 +490,7 @@ test_flaky: CACHE_TYPE: "base" testJvm: "8" CONTINUE_ON_FAILURE: "true" + rules: *master_only parallel: matrix: - GRADLE_TARGET: [":baseTest", ":smokeTest", ":debuggerTest"] @@ -495,6 +506,7 @@ test_flaky_inst: CACHE_TYPE: "inst" testJvm: "8" CONTINUE_ON_FAILURE: "true" + rules: *master_only parallel: matrix: # Gitlab doesn't support "parallel" and "parallel:matrix" at the same time @@ -509,11 +521,14 @@ test_profiling: parallel: matrix: *test_matrix +# specific jvms list for debugger project because J9-based JVM have issues with local vars +# so need to test at least against one J9-based JVM test_debugger: extends: .test_job variables: GRADLE_TARGET: ":debuggerTest" CACHE_TYPE: "base" + DEFAULT_TEST_JVMS: /^(8|11|17|21|semeru8)$/ parallel: matrix: *test_matrix @@ -528,6 +543,7 @@ test_smoke: test_ssi_smoke: extends: .test_job + rules: *master_only variables: GRADLE_TARGET: "stageMainDist :smokeTest" CACHE_TYPE: "smoke" From 5c2c16188be6bd4f3d3cdf99705c35dfd664a72c Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Wed, 16 Apr 2025 13:53:41 -0400 Subject: [PATCH 097/113] use normal runner when TestContainers isn't needed --- .gitlab-ci.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index b3a18364b2e..d9dcb3057dd 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -439,6 +439,7 @@ muzzle-dep-report: agent_integration_tests: extends: .test_job + tags: [ "arch:amd64" ] variables: testJvm: "8" CI_AGENT_HOST: local-agent @@ -534,6 +535,7 @@ test_debugger: test_smoke: extends: .test_job + tags: [ "arch:amd64" ] variables: GRADLE_TARGET: "stageMainDist :smokeTest" GRADLE_PARAMS: "-PskipFlakyTests" @@ -543,6 +545,7 @@ test_smoke: test_ssi_smoke: extends: .test_job + tags: [ "arch:amd64" ] rules: *master_only variables: GRADLE_TARGET: "stageMainDist :smokeTest" @@ -554,6 +557,7 @@ test_ssi_smoke: test_smoke_graalvm: extends: .test_job + tags: [ "arch:amd64" ] variables: GRADLE_TARGET: "stageMainDist :dd-smoke-test:spring-boot-3.0-native:test" CACHE_TYPE: "smoke" @@ -564,6 +568,7 @@ test_smoke_graalvm: test_smoke_semeru8_debugger: extends: .test_job + tags: [ "arch:amd64" ] variables: GRADLE_TARGET: "stageMainDist dd-smoke-tests:debugger-integration-tests:test" CACHE_TYPE: "smoke" From 67a45fef9133e22a266fca9be8908c183b543398 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Wed, 16 Apr 2025 14:30:41 -0400 Subject: [PATCH 098/113] skip forwarding header filter here too --- .../src/latestDepTest/groovy/test/boot/SpringBootServer.groovy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dd-java-agent/instrumentation/spring-webmvc-3.1/src/latestDepTest/groovy/test/boot/SpringBootServer.groovy b/dd-java-agent/instrumentation/spring-webmvc-3.1/src/latestDepTest/groovy/test/boot/SpringBootServer.groovy index 32793911bbf..ed3e95d7abf 100644 --- a/dd-java-agent/instrumentation/spring-webmvc-3.1/src/latestDepTest/groovy/test/boot/SpringBootServer.groovy +++ b/dd-java-agent/instrumentation/spring-webmvc-3.1/src/latestDepTest/groovy/test/boot/SpringBootServer.groovy @@ -22,7 +22,7 @@ class SpringBootServer implements WebsocketServer { @Override void start() { - app.setDefaultProperties(["server.port": 0, "server.servlet.context-path": "/$servletContext"]) + app.setDefaultProperties(["server.port": 0, "server.servlet.context-path": "/$servletContext", "server.forward-headers-strategy": "NONE"]) context = app.run() as ServletWebServerApplicationContext port = context.getWebServer().getPort() try { From 96cfd44b0850f1f77dda57a1fe2b163c1c1281c0 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Wed, 16 Apr 2025 16:29:50 -0400 Subject: [PATCH 099/113] tweaks --- .gitlab-ci.yml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index d9dcb3057dd..beb067d4b37 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -378,6 +378,8 @@ muzzle-dep-report: rules: - if: $testJvm =~ $DEFAULT_TEST_JVMS when: on_success + - if: $NON_DEFAULT_JVMS == "true" + when: on_success - if: $CI_COMMIT_BRANCH == "master" when: on_success script: @@ -535,7 +537,6 @@ test_debugger: test_smoke: extends: .test_job - tags: [ "arch:amd64" ] variables: GRADLE_TARGET: "stageMainDist :smokeTest" GRADLE_PARAMS: "-PskipFlakyTests" @@ -545,7 +546,6 @@ test_smoke: test_ssi_smoke: extends: .test_job - tags: [ "arch:amd64" ] rules: *master_only variables: GRADLE_TARGET: "stageMainDist :smokeTest" @@ -562,6 +562,7 @@ test_smoke_graalvm: GRADLE_TARGET: "stageMainDist :dd-smoke-test:spring-boot-3.0-native:test" CACHE_TYPE: "smoke" CI_NO_SPLIT: "true" + NON_DEFAULT_JVMS: "true" parallel: matrix: - testJvm: ["graalvm17", "graalvm21"] @@ -572,6 +573,7 @@ test_smoke_semeru8_debugger: variables: GRADLE_TARGET: "stageMainDist dd-smoke-tests:debugger-integration-tests:test" CACHE_TYPE: "smoke" + NON_DEFAULT_JVMS: "true" testJvm: "semeru8" required: From 69dd021ea892391adb3390821f62a1b303fd539f Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Thu, 17 Apr 2025 11:21:12 -0400 Subject: [PATCH 100/113] limit executor size and fix IPV4 assumptions --- .../trace/agent/test/base/HttpServerTest.groovy | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/base/HttpServerTest.groovy b/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/base/HttpServerTest.groovy index 529a41510e7..e261aa58101 100644 --- a/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/base/HttpServerTest.groovy +++ b/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/base/HttpServerTest.groovy @@ -565,7 +565,10 @@ abstract class HttpServerTest extends WithHttpServer { def responses def request = request(SUCCESS, method, body).build() if (testParallelRequest()) { - def executor = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors()) + // Limit pool size. Too many threads overwhelm the server and starve the host + // availableProcessors() in CI can be very high and incorrect depending on JDK version + def poolSize = Math.min(4, Runtime.getRuntime().availableProcessors()) + def executor = Executors.newFixedThreadPool(poolSize) def completionService = new ExecutorCompletionService(executor) (1..count).each { completionService.submit { @@ -1286,7 +1289,7 @@ abstract class HttpServerTest extends WithHttpServer { def traces = extraSpan ? 2 : 1 def extraTags = [(IG_RESPONSE_STATUS): String.valueOf(endpoint.status)] as Map if (hasPeerInformation()) { - extraTags.put(IG_PEER_ADDRESS, { it == "127.0.0.1" || it == "0.0.0.0" }) + extraTags.put(IG_PEER_ADDRESS, { it == "127.0.0.1" || it == "0.0.0.0" || it == "0:0:0:0:0:0:0:1" }) extraTags.put(IG_PEER_PORT, { Integer.parseInt(it as String) instanceof Integer }) } extraTags.put(IG_RESPONSE_HEADER_TAG, IG_RESPONSE_HEADER_VALUE) @@ -2208,8 +2211,13 @@ abstract class HttpServerTest extends WithHttpServer { if (hasPeerPort) { "$Tags.PEER_PORT" Integer } - "$Tags.PEER_HOST_IPV4" { it == "127.0.0.1" || (endpoint == FORWARDED && it == endpoint.body) } - "$Tags.HTTP_CLIENT_IP" { it == "127.0.0.1" || (endpoint == FORWARDED && it == endpoint.body) } + if(span.getTag(Tags.PEER_HOST_IPV6) != null) { + "$Tags.PEER_HOST_IPV6" { it == "0:0:0:0:0:0:0:1" || (endpoint == FORWARDED && it == endpoint.body) } + "$Tags.HTTP_CLIENT_IP" { it == "0:0:0:0:0:0:0:1" || (endpoint == FORWARDED && it == endpoint.body) } + } else { + "$Tags.PEER_HOST_IPV4" { it == "127.0.0.1" || (endpoint == FORWARDED && it == endpoint.body) } + "$Tags.HTTP_CLIENT_IP" { it == "127.0.0.1" || (endpoint == FORWARDED && it == endpoint.body) } + } } else { "$Tags.HTTP_CLIENT_IP" clientIp } From 0a21371f59994e0fa4c60c7fbf7455cf4223ec73 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Thu, 17 Apr 2025 13:26:34 -0400 Subject: [PATCH 101/113] try more workers with less memory --- .gitlab-ci.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index beb067d4b37..655e18a6b62 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -367,8 +367,9 @@ muzzle-dep-report: variables: KUBERNETES_MEMORY_REQUEST: 16Gi KUBERNETES_MEMORY_LIMIT: 16Gi - GRADLE_WORKERS: 4 - GRADLE_MEM: 3G + KUBERNETES_CPU_REQUEST: 10 + GRADLE_WORKERS: 6 + GRADLE_MEM: 2G GRADLE_PARAMS: "-PskipFlakyTests" CONTINUE_ON_FAILURE: "false" TESTCONTAINERS_CHECKS_DISABLE: "true" From 848a328c45babb24b9b9d8f0fc200901b8f3c8b0 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Thu, 17 Apr 2025 14:48:25 -0400 Subject: [PATCH 102/113] set max memory for all crashtracking tests --- .../test/java/datadog/smoketest/CrashtrackingSmokeTest.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java b/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java index 54d9290941c..e3d8b38659a 100644 --- a/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java +++ b/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java @@ -217,6 +217,8 @@ void testOomeTracking() throws Exception { Arrays.asList( javaPath(), "-javaagent:" + agentShadowJar(), + "-Xmx96m", + "-Xms96m", "-XX:OnOutOfMemoryError=" + onErrorValue, "-XX:ErrorFile=" + errorFile, "-XX:+CrashOnOutOfMemoryError", // Use OOME to trigger crash @@ -248,6 +250,8 @@ void testCombineTracking() throws Exception { Arrays.asList( javaPath(), "-javaagent:" + agentShadowJar(), + "-Xmx96m", + "-Xms96m", "-XX:OnOutOfMemoryError=" + onOomeValue, "-XX:OnError=" + onErrorValue, "-XX:ErrorFile=" + errorFile, From 196e96c761aff4cc04305b956e82c725a3cc82c8 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Thu, 17 Apr 2025 14:52:04 -0400 Subject: [PATCH 103/113] double retry --- .gitlab-ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 655e18a6b62..f44097066e6 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -266,7 +266,7 @@ test_published_artifacts: - ./check_reports - '.gradle/daemon/*/*.out.log' retry: - max: 1 + max: 2 when: - unknown_failure - stuck_or_timeout_failure @@ -412,7 +412,7 @@ muzzle-dep-report: reports: junit: results/*.xml retry: - max: 1 + max: 2 when: - unknown_failure - stuck_or_timeout_failure From 4b14d00ab3d1bb06730506cfa74754ec2e13a491 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Thu, 17 Apr 2025 14:58:54 -0400 Subject: [PATCH 104/113] scale up memory, less jobs --- .gitlab-ci.yml | 24 ++++++++++++++---------- 1 file changed, 14 insertions(+), 10 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index f44097066e6..0d73f11b5cb 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -49,6 +49,10 @@ variables: # Gitlab doesn't support "parallel" and "parallel:matrix" at the same time # These blocks emulate "parallel" by including it in the matrix +.test_matrix_2: &test_matrix_2 + - testJvm: *test_jvms + CI_SPLIT: ["1/2", "2/2"] + .test_matrix_4: &test_matrix_4 - testJvm: *test_jvms CI_SPLIT: ["1/4", "2/4", "3/4", "4/4"] @@ -365,11 +369,11 @@ muzzle-dep-report: needs: [ build_tests ] stage: tests variables: - KUBERNETES_MEMORY_REQUEST: 16Gi - KUBERNETES_MEMORY_LIMIT: 16Gi - KUBERNETES_CPU_REQUEST: 10 - GRADLE_WORKERS: 6 - GRADLE_MEM: 2G + KUBERNETES_MEMORY_REQUEST: 32Gi + KUBERNETES_MEMORY_LIMIT: 32Gi + KUBERNETES_CPU_REQUEST: 12 + GRADLE_WORKERS: 8 + GRADLE_MEM: 3G GRADLE_PARAMS: "-PskipFlakyTests" CONTINUE_ON_FAILURE: "false" TESTCONTAINERS_CHECKS_DISABLE: "true" @@ -473,7 +477,7 @@ test_inst: GRADLE_TARGET: ":instrumentationTest" CACHE_TYPE: "inst" parallel: - matrix: *test_matrix_12 + matrix: *test_matrix_6 test_inst_latest: extends: .test_job_with_test_agent @@ -485,7 +489,7 @@ test_inst_latest: - testJvm: ["8", "17", "21" ] # Gitlab doesn't support "parallel" and "parallel:matrix" at the same time # This emulates "parallel" by including it in the matrix - CI_SPLIT: [ "1/12", "2/12", "3/12", "4/12", "5/12", "6/12", "7/12", "8/12", "9/12", "10/12", "11/12", "12/12" ] + CI_SPLIT: [ "1/6", "2/6", "3/6", "4/6", "5/6", "6/6"] test_flaky: extends: .test_job_with_test_agent @@ -515,7 +519,7 @@ test_flaky_inst: matrix: # Gitlab doesn't support "parallel" and "parallel:matrix" at the same time # This emulates "parallel" by including it in the matrix - - CI_SPLIT: [ "1/12", "2/12", "3/12", "4/12", "5/12", "6/12", "7/12", "8/12", "9/12", "10/12", "11/12", "12/12" ] + - CI_SPLIT: [ "1/6", "2/6", "3/6", "4/6", "5/6", "6/6"] test_profiling: extends: .test_job @@ -543,7 +547,7 @@ test_smoke: GRADLE_PARAMS: "-PskipFlakyTests" CACHE_TYPE: "smoke" parallel: - matrix: *test_matrix_4 + matrix: *test_matrix_2 test_ssi_smoke: extends: .test_job @@ -554,7 +558,7 @@ test_ssi_smoke: DD_INJECT_FORCE: "true" DD_INJECTION_ENABLED: "tracer" parallel: - matrix: *test_matrix_4 + matrix: *test_matrix_2 test_smoke_graalvm: extends: .test_job From f9d56d8b42fe37478c4eafcdd0f258dfd32d85c7 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Thu, 17 Apr 2025 19:23:36 -0400 Subject: [PATCH 105/113] revert memory bump, increase forked tests --- .gitlab-ci.yml | 14 +++++--------- gradle/configure_tests.gradle | 2 +- 2 files changed, 6 insertions(+), 10 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 0d73f11b5cb..9b5ba9ff8c5 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -369,10 +369,10 @@ muzzle-dep-report: needs: [ build_tests ] stage: tests variables: - KUBERNETES_MEMORY_REQUEST: 32Gi - KUBERNETES_MEMORY_LIMIT: 32Gi - KUBERNETES_CPU_REQUEST: 12 - GRADLE_WORKERS: 8 + KUBERNETES_MEMORY_REQUEST: 16Gi + KUBERNETES_MEMORY_LIMIT: 16Gi + KUBERNETES_CPU_REQUEST: 10 + GRADLE_WORKERS: 4 GRADLE_MEM: 3G GRADLE_PARAMS: "-PskipFlakyTests" CONTINUE_ON_FAILURE: "false" @@ -515,11 +515,7 @@ test_flaky_inst: testJvm: "8" CONTINUE_ON_FAILURE: "true" rules: *master_only - parallel: - matrix: - # Gitlab doesn't support "parallel" and "parallel:matrix" at the same time - # This emulates "parallel" by including it in the matrix - - CI_SPLIT: [ "1/6", "2/6", "3/6", "4/6", "5/6", "6/6"] + parallel: 6 test_profiling: extends: .test_job diff --git a/gradle/configure_tests.gradle b/gradle/configure_tests.gradle index 7f0065e9eb6..945a9e9665b 100644 --- a/gradle/configure_tests.gradle +++ b/gradle/configure_tests.gradle @@ -20,7 +20,7 @@ def isTestingInstrumentation(Project project) { } def forkedTestLimit = gradle.sharedServices.registerIfAbsent("forkedTestLimit", BuildService) { - maxParallelUsages = 2 + maxParallelUsages = 3 } // Force timeout after 9 minutes (CircleCI defaults will fail after 10 minutes without output) From b99ac8ddbf7c76f4e57186ddc1c8b8813a6bc472 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Fri, 18 Apr 2025 10:21:01 -0400 Subject: [PATCH 106/113] tweaks --- .gitlab-ci.yml | 6 +++--- .../trace/agent/test/server/http/TestHttpServer.groovy | 4 ++-- .../test/groovy/SpringBootNativeInstrumentationTest.groovy | 7 +------ 3 files changed, 6 insertions(+), 11 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 9b5ba9ff8c5..e003d67c76d 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -184,7 +184,7 @@ build_tests: CACHE_TYPE: "latestdep" - GRADLE_TARGET: ":smokeTest" CACHE_TYPE: "smoke" - MAVEN_OPTS: "-Xms64M -Xmx512M -Dorg.slf4j.simpleLogger.defaultLogLevel=debug" # Fixme: Build :smokeTest build fails unless mvn debug logging is on + MAVEN_OPTS: "-Xms64M -Xmx512M -Dorg.slf4j.simpleLogger.defaultLogLevel=debug" # FIXME: Build :smokeTest build fails unless mvn debug logging is on script: - ./gradlew clean $GRADLE_TARGET -PskipTests $GRADLE_ARGS @@ -207,7 +207,7 @@ populate_dep_cache: CACHE_TYPE: "base" - GRADLE_TARGET: ":profilingTest" CACHE_TYPE: "profiling" -# Gitlab doesn't support s3 based caches >5GB. Fixed in Gitlab 17.5 +# FIXME: Gitlab doesn't support s3 based caches >5GB. Fixed in Gitlab 17.5 # See: https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26921#note_2132307223 # - GRADLE_TARGET: ":instrumentationTest" # CACHE_TYPE: "inst" @@ -525,7 +525,7 @@ test_profiling: parallel: matrix: *test_matrix -# specific jvms list for debugger project because J9-based JVM have issues with local vars +# specific jvms list for debugger project because J9-based JVMs have issues with local vars # so need to test at least against one J9-based JVM test_debugger: extends: .test_job diff --git a/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/server/http/TestHttpServer.groovy b/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/server/http/TestHttpServer.groovy index e6fa205751a..ea140c777f1 100644 --- a/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/server/http/TestHttpServer.groovy +++ b/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/server/http/TestHttpServer.groovy @@ -83,8 +83,8 @@ class TestHttpServer implements AutoCloseable { } private TestHttpServer() { - // In some versions, Jetty requires max threads > than some arbitrary value - // The arbitrary value can be high in CI + // In some versions, Jetty requires max threads > than some arbitrary calculated value + // The calculated value can be high in CI // There is no easy way to override the configuration in a version-neutral way internalServer = new Server(new QueuedThreadPool(400)) diff --git a/dd-smoke-tests/spring-boot-3.0-native/src/test/groovy/SpringBootNativeInstrumentationTest.groovy b/dd-smoke-tests/spring-boot-3.0-native/src/test/groovy/SpringBootNativeInstrumentationTest.groovy index 286f52b8e35..f5b0ca18e9d 100644 --- a/dd-smoke-tests/spring-boot-3.0-native/src/test/groovy/SpringBootNativeInstrumentationTest.groovy +++ b/dd-smoke-tests/spring-boot-3.0-native/src/test/groovy/SpringBootNativeInstrumentationTest.groovy @@ -39,8 +39,7 @@ class SpringBootNativeInstrumentationTest extends AbstractServerSmokeTest { '-Ddd.profiling.upload.period=1', '-Ddd.profiling.start-force-first=true', "-Ddd.profiling.debug.dump_path=${testJfrDir}", - "-Ddd.integration.spring-boot.enabled=true", - "-Ddd.trace.debug=true" + "-Ddd.integration.spring-boot.enabled=true" ]) ProcessBuilder processBuilder = new ProcessBuilder(command) processBuilder.directory(new File(buildDirectory)) @@ -112,8 +111,4 @@ class SpringBootNativeInstrumentationTest extends AbstractServerSmokeTest { }) return jfrCount.get() } - - def logLevel() { - return "debug" - } } From 19ac6b4797c05d084f1f0a38d321ad077dfaa74f Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Fri, 18 Apr 2025 10:28:20 -0400 Subject: [PATCH 107/113] add flag for nondefault jvms and flaky tests --- .gitlab-ci.yml | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index e003d67c76d..2e0ee9dbfa6 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -31,6 +31,12 @@ variables: PROFILE_TESTS: description: "Enable profiling of tests" value: "false" + NON_DEFAULT_JVMS: + description: "Enable tests on JVMs that are the default" + value: "false" + RUN_FLAKY_TESTS: + description: "Enable flaky tests" + value: "false" .test_matrix: &test_matrix - testJvm: &test_jvms @@ -498,7 +504,10 @@ test_flaky: CACHE_TYPE: "base" testJvm: "8" CONTINUE_ON_FAILURE: "true" - rules: *master_only + rules: + - *master_only + - if: $RUN_FLAKY_TESTS == "true" + when: on_success parallel: matrix: - GRADLE_TARGET: [":baseTest", ":smokeTest", ":debuggerTest"] @@ -514,7 +523,10 @@ test_flaky_inst: CACHE_TYPE: "inst" testJvm: "8" CONTINUE_ON_FAILURE: "true" - rules: *master_only + rules: + - *master_only + - if: $RUN_FLAKY_TESTS == "true" + when: on_success parallel: 6 test_profiling: From c3fd666da2c09894a7125cdd774e4d2fbe5a243f Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Tue, 6 May 2025 12:47:31 -0400 Subject: [PATCH 108/113] correct description --- .gitlab-ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 2e0ee9dbfa6..27deb86a22d 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -32,7 +32,7 @@ variables: description: "Enable profiling of tests" value: "false" NON_DEFAULT_JVMS: - description: "Enable tests on JVMs that are the default" + description: "Enable tests on JVMs that are not the default" value: "false" RUN_FLAKY_TESTS: description: "Enable flaky tests" From 6e5f1a91bab11e2d15a3ec1e711fab62bdf233c1 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Tue, 6 May 2025 13:13:10 -0400 Subject: [PATCH 109/113] Externalize runtime processors override --- .gitlab-ci.yml | 1 + .../datadog/trace/agent/test/base/HttpServerTest.groovy | 3 +-- .../log-injection/src/test/resources/SpockConfig.groovy | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 27deb86a22d..edfe3525fce 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -112,6 +112,7 @@ default: KUBERNETES_MEMORY_REQUEST: 8Gi KUBERNETES_MEMORY_LIMIT: 8Gi CACHE_TYPE: lib #default + RUNTIME_AVAILABLE_PROCESSORS_OVERRIDE: 4 # Runtime.getRuntime().availableProcessors() returns incorrect or very high values in Kubernetes cache: - key: '$CI_SERVER_VERSION-$CACHE_TYPE' # Dependencies cache. Reset the cache every time gitlab is upgraded. ~Every couple months paths: diff --git a/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/base/HttpServerTest.groovy b/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/base/HttpServerTest.groovy index e261aa58101..0ace44fd948 100644 --- a/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/base/HttpServerTest.groovy +++ b/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/base/HttpServerTest.groovy @@ -566,8 +566,7 @@ abstract class HttpServerTest extends WithHttpServer { def request = request(SUCCESS, method, body).build() if (testParallelRequest()) { // Limit pool size. Too many threads overwhelm the server and starve the host - // availableProcessors() in CI can be very high and incorrect depending on JDK version - def poolSize = Math.min(4, Runtime.getRuntime().availableProcessors()) + def poolSize = System.getenv().getOrDefault("RUNTIME_AVAILABLE_PROCESSORS_OVERRIDE", Runtime.getRuntime().availableProcessors()) def executor = Executors.newFixedThreadPool(poolSize) def completionService = new ExecutorCompletionService(executor) (1..count).each { diff --git a/dd-smoke-tests/log-injection/src/test/resources/SpockConfig.groovy b/dd-smoke-tests/log-injection/src/test/resources/SpockConfig.groovy index f9abbe2c07b..9e4c7f1f6e9 100644 --- a/dd-smoke-tests/log-injection/src/test/resources/SpockConfig.groovy +++ b/dd-smoke-tests/log-injection/src/test/resources/SpockConfig.groovy @@ -4,8 +4,8 @@ runner { // Runtime.getRuntime().availableProcessors() is used to scale the parallelism by default // but it returns weird values in Gitlab/kubernetes so fix the parallelism to a specific value - if (System.getenv("GITLAB_CI") != null) { - fixed(4) + if (System.getenv("RUNTIME_AVAILABLE_PROCESSORS_OVERRIDE") != null) { + fixed(Integer.valueOf(System.getenv("RUNTIME_AVAILABLE_PROCESSORS_OVERRIDE"))) } } } From 2b5d9ac5627f52cc0d7a611776d6378240e223e3 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Tue, 6 May 2025 13:15:48 -0400 Subject: [PATCH 110/113] ExceptionHistogramTest exclusions already handled by master --- .../jfr/exceptions/ExceptionHistogramTest.java | 6 ------ .../datadog/trace/agent/test/base/HttpServerTest.groovy | 3 ++- 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/dd-java-agent/instrumentation/exception-profiling/src/test/java/datadog/trace/bootstrap/instrumentation/jfr/exceptions/ExceptionHistogramTest.java b/dd-java-agent/instrumentation/exception-profiling/src/test/java/datadog/trace/bootstrap/instrumentation/jfr/exceptions/ExceptionHistogramTest.java index d0062811eba..1b39118b3e1 100644 --- a/dd-java-agent/instrumentation/exception-profiling/src/test/java/datadog/trace/bootstrap/instrumentation/jfr/exceptions/ExceptionHistogramTest.java +++ b/dd-java-agent/instrumentation/exception-profiling/src/test/java/datadog/trace/bootstrap/instrumentation/jfr/exceptions/ExceptionHistogramTest.java @@ -21,7 +21,6 @@ import jdk.jfr.FlightRecorder; import jdk.jfr.Recording; import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.openjdk.jmc.common.item.Aggregators; @@ -61,11 +60,6 @@ public boolean equals(final Object obj) { private Recording snapshot; private ExceptionHistogram instance; - @BeforeAll - public static void precheck() { - assumeFalse(Platform.isJ9()); - } - @BeforeEach public void setup() { assumeFalse(Platform.isJ9()); diff --git a/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/base/HttpServerTest.groovy b/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/base/HttpServerTest.groovy index 55829985322..97c1ef36b83 100644 --- a/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/base/HttpServerTest.groovy +++ b/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/base/HttpServerTest.groovy @@ -575,7 +575,8 @@ abstract class HttpServerTest extends WithHttpServer { def request = request(SUCCESS, method, body).build() if (testParallelRequest()) { // Limit pool size. Too many threads overwhelm the server and starve the host - def poolSize = System.getenv().getOrDefault("RUNTIME_AVAILABLE_PROCESSORS_OVERRIDE", Runtime.getRuntime().availableProcessors()) + def availableProcessorsOverride = System.getenv().get("RUNTIME_AVAILABLE_PROCESSORS_OVERRIDE") + def poolSize = availableProcessorsOverride == null ? Runtime.getRuntime().availableProcessors() : Integer.valueOf(availableProcessorsOverride) def executor = Executors.newFixedThreadPool(poolSize) def completionService = new ExecutorCompletionService(executor) (1..count).each { From 0df23443b66e7f6c991ac4d03a1cfdba28b884fb Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Wed, 7 May 2025 12:39:49 -0400 Subject: [PATCH 111/113] increase spring native build memory --- dd-smoke-tests/spring-boot-3.0-native/build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dd-smoke-tests/spring-boot-3.0-native/build.gradle b/dd-smoke-tests/spring-boot-3.0-native/build.gradle index ed37cbb264c..167af79baa2 100644 --- a/dd-smoke-tests/spring-boot-3.0-native/build.gradle +++ b/dd-smoke-tests/spring-boot-3.0-native/build.gradle @@ -31,7 +31,7 @@ if (version >= 17) { tasks.register('springNativeBuild', Exec) { workingDir "$appDir" environment += [ - 'GRADLE_OPTS': "-Dorg.gradle.jvmargs='-Xmx512M'", + 'GRADLE_OPTS': "-Dorg.gradle.jvmargs='-Xmx1024M'", 'JAVA_HOME': javaHome, 'GRAALVM_HOME': testJvmHome, 'DD_TRACE_METHODS' : 'datadog.smoketest.springboot.controller.WebController[sayHello]', From 8d74d0e57e00919048f95db1b08a4f5ba7a07d92 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Wed, 7 May 2025 12:40:06 -0400 Subject: [PATCH 112/113] bump gradle build timeout --- .../groovy/datadog/smoketest/GradleLauncherSmokeTest.groovy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dd-smoke-tests/gradle/src/test/groovy/datadog/smoketest/GradleLauncherSmokeTest.groovy b/dd-smoke-tests/gradle/src/test/groovy/datadog/smoketest/GradleLauncherSmokeTest.groovy index 094c46f5206..0fe155ab45e 100644 --- a/dd-smoke-tests/gradle/src/test/groovy/datadog/smoketest/GradleLauncherSmokeTest.groovy +++ b/dd-smoke-tests/gradle/src/test/groovy/datadog/smoketest/GradleLauncherSmokeTest.groovy @@ -9,7 +9,7 @@ import datadog.trace.civisibility.utils.ShellCommandExecutor */ class GradleLauncherSmokeTest extends AbstractGradleTest { - private static final int GRADLE_BUILD_TIMEOUT_MILLIS = 60_000 + private static final int GRADLE_BUILD_TIMEOUT_MILLIS = 90_000 private static final String AGENT_JAR = System.getProperty("datadog.smoketest.agent.shadowJar.path") From 99c67a320919869c68abb723ef0948cc46a66284 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Fri, 9 May 2025 16:09:38 -0400 Subject: [PATCH 113/113] bump graalvm compile memory --- dd-smoke-tests/spring-boot-3.0-native/application/build.gradle | 1 + 1 file changed, 1 insertion(+) diff --git a/dd-smoke-tests/spring-boot-3.0-native/application/build.gradle b/dd-smoke-tests/spring-boot-3.0-native/application/build.gradle index 992fa0eaa6f..26f88cf0a49 100644 --- a/dd-smoke-tests/spring-boot-3.0-native/application/build.gradle +++ b/dd-smoke-tests/spring-boot-3.0-native/application/build.gradle @@ -40,6 +40,7 @@ if (hasProperty('agentPath')) { buildArgs.add("-J-Ddd.profiling.enabled=true") } buildArgs.add("--enable-monitoring=jmxserver") + jvmArgs.add("-Xmx3072M") } } }