diff --git a/.github/workflows/test-workflow.yml b/.github/workflows/test-workflow.yml index c83a25fd5..22a79a69d 100644 --- a/.github/workflows/test-workflow.yml +++ b/.github/workflows/test-workflow.yml @@ -10,30 +10,50 @@ on: jobs: build: + env: + BUILD_ARGS: ${{ matrix.os_build_args }} + WORKING_DIR: ${{ matrix.working_directory }}. strategy: matrix: java: [11, 17] + os: [ ubuntu-latest, windows-latest, macos-latest ] + include: + - os: windows-latest + os_build_args: -x integTest -x jacocoTestReport + working_directory: X:\ + os_java_options: -Xmx4096M + - os: macos-latest + os_build_args: -x integTest -x jacocoTestReport # Job name - name: Build Alerting with JDK ${{ matrix.java }} + name: Build Alerting with JDK ${{ matrix.java }} on ${{ matrix.os }} # This job runs on Linux - runs-on: ubuntu-latest + runs-on: ${{ matrix.os }} steps: # This step uses the checkout Github action: https://github.com/actions/checkout - name: Checkout Branch uses: actions/checkout@v2 + # This is a hack, but this step creates a link to the X: mounted drive, which makes the path + # short enough to work on Windows + - name: Shorten Path + if: ${{ matrix.os == 'windows-latest' }} + run: subst 'X:' . # This step uses the setup-java Github action: https://github.com/actions/setup-java - name: Set Up JDK ${{ matrix.java }} uses: actions/setup-java@v1 with: java-version: ${{ matrix.java }} - name: Build and run with Gradle - run: ./gradlew build + working-directory: ${{ env.WORKING_DIR }} + run: ./gradlew build ${{ env.BUILD_ARGS }} + env: + _JAVA_OPTIONS: ${{ matrix.os_java_options }} - name: Create Artifact Path run: | mkdir -p alerting-artifacts cp ./alerting/build/distributions/*.zip alerting-artifacts # This step uses the codecov-action Github action: https://github.com/codecov/codecov-action - name: Upload Coverage Report + if: ${{ matrix.os == 'ubuntu-latest' }} uses: codecov/codecov-action@v1 with: token: ${{ secrets.CODECOV_TOKEN }} @@ -41,5 +61,5 @@ jobs: - name: Upload Artifacts uses: actions/upload-artifact@v1 with: - name: alerting-plugin + name: alerting-plugin-${{ matrix.os }} path: alerting-artifacts diff --git a/alerting/src/test/kotlin/org/opensearch/alerting/TriggerServiceTests.kt b/alerting/src/test/kotlin/org/opensearch/alerting/TriggerServiceTests.kt index c99eb2b9b..f2f321112 100644 --- a/alerting/src/test/kotlin/org/opensearch/alerting/TriggerServiceTests.kt +++ b/alerting/src/test/kotlin/org/opensearch/alerting/TriggerServiceTests.kt @@ -37,19 +37,75 @@ class TriggerServiceTests : OpenSearchTestCase() { val trigger = randomBucketLevelTrigger(bucketSelector = bucketSelectorExtAggregationBuilder) val monitor = randomBucketLevelMonitor(triggers = listOf(trigger)) - val inputResultsStr = "{\"_shards\":" + - "{\"total\":1,\"failed\":0,\"successful\":1,\"skipped\":0},\"hits\":{\"hits\":" + - "[{\"_index\":\"sample-http-responses\",\"_type\":\"http\",\"_source\":" + - "{\"status_code\":100,\"http_4xx\":0,\"http_3xx\":0,\"http_5xx\":0,\"http_2xx\":0," + - "\"timestamp\":100000,\"http_1xx\":1},\"_id\":1,\"_score\":1}],\"total\":{\"value\":4,\"relation\":\"eq\"}," + - "\"max_score\":1},\"took\":37,\"timed_out\":false,\"aggregations\":{\"status_code\":" + - "{\"doc_count_error_upper_bound\":0,\"sum_other_doc_count\":0,\"buckets\":[{\"doc_count\":2,\"key\":100}," + - "{\"doc_count\":1,\"key\":102},{\"doc_count\":1,\"key\":201}]},\"${trigger.id}\":{\"parent_bucket_path\":" + - "\"status_code\",\"bucket_indices\":[0,1,2]}}}" - - val parser = XContentType.JSON.xContent().createParser( - NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, inputResultsStr - ) + val inputResultsStr = "{\n" + + " \"_shards\": {\n" + + " \"total\": 1,\n" + + " \"failed\": 0,\n" + + " \"successful\": 1,\n" + + " \"skipped\": 0\n" + + " },\n" + + " \"hits\": {\n" + + " \"hits\": [\n" + + " {\n" + + " \"_index\": \"sample-http-responses\",\n" + + " \"_type\": \"http\",\n" + + " \"_source\": {\n" + + " \"status_code\": 100,\n" + + " \"http_4xx\": 0,\n" + + " \"http_3xx\": 0,\n" + + " \"http_5xx\": 0,\n" + + " \"http_2xx\": 0,\n" + + " \"timestamp\": 100000,\n" + + " \"http_1xx\": 1\n" + + " },\n" + + " \"_id\": 1,\n" + + " \"_score\": 1\n" + + " }\n" + + " ],\n" + + " \"total\": {\n" + + " \"value\": 4,\n" + + " \"relation\": \"eq\"\n" + + " },\n" + + " \"max_score\": 1\n" + + " },\n" + + " \"took\": 37,\n" + + " \"timed_out\": false,\n" + + " \"aggregations\": {\n" + + " \"status_code\": {\n" + + " \"doc_count_error_upper_bound\": 0,\n" + + " \"sum_other_doc_count\": 0,\n" + + " \"buckets\": [\n" + + " {\n" + + " \"doc_count\": 2,\n" + + " \"key\": 100\n" + + " },\n" + + " {\n" + + " \"doc_count\": 1,\n" + + " \"key\": 102\n" + + " },\n" + + " {\n" + + " \"doc_count\": 1,\n" + + " \"key\": 201\n" + + " }\n" + + " ]\n" + + " },\n" + + " \"${trigger.id}\": {\n" + + " \"parent_bucket_path\": \"status_code\",\n" + + " \"bucket_indices\": [\n" + + " 0,\n" + + " 1,\n" + + " 2\n" + + " ]\n" + + " }\n" + + " }\n" + + "}" + + val parser = XContentType.JSON.xContent() + .createParser( + NamedXContentRegistry.EMPTY, + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, + inputResultsStr + ) val inputResults = parser.map() @@ -70,23 +126,127 @@ class TriggerServiceTests : OpenSearchTestCase() { val trigger = randomBucketLevelTrigger(bucketSelector = bucketSelectorExtAggregationBuilder) val monitor = randomBucketLevelMonitor(triggers = listOf(trigger)) - val inputResultsStr = "{\"_shards\":{\"total\":1, \"failed\":0, \"successful\":1, \"skipped\":0}, \"hits\":{\"hits\":" + - "[{\"_index\":\"sample-http-responses\", \"_type\":\"http\", \"_source\":{\"status_code\":100, \"http_4xx\":0," + - " \"http_3xx\":0, \"http_5xx\":0, \"http_2xx\":0, \"timestamp\":100000, \"http_1xx\":1}, \"_id\":1, \"_score\":1.0}, " + - "{\"_index\":\"sample-http-responses\", \"_type\":\"http\", \"_source\":{\"status_code\":102, \"http_4xx\":0, " + - "\"http_3xx\":0, \"http_5xx\":0, \"http_2xx\":0, \"timestamp\":160000, \"http_1xx\":1}, \"_id\":2, \"_score\":1.0}, " + - "{\"_index\":\"sample-http-responses\", \"_type\":\"http\", \"_source\":{\"status_code\":100, \"http_4xx\":0, " + - "\"http_3xx\":0, \"http_5xx\":0, \"http_2xx\":0, \"timestamp\":220000, \"http_1xx\":1}, \"_id\":4, \"_score\":1.0}, " + - "{\"_index\":\"sample-http-responses\", \"_type\":\"http\", \"_source\":{\"status_code\":201, \"http_4xx\":0, " + - "\"http_3xx\":0, \"http_5xx\":0, \"http_2xx\":1, \"timestamp\":280000, \"http_1xx\":0}, \"_id\":5, \"_score\":1.0}]," + - " \"total\":{\"value\":4, \"relation\":\"eq\"}, \"max_score\":1.0}, \"took\":15, \"timed_out\":false, \"aggregations\":" + - "{\"${trigger.id}\":{\"parent_bucket_path\":\"status_code\", \"bucket_indices\":[0, 1, 2]}, \"status_code\":{\"buckets\":" + - "[{\"doc_count\":2, \"key\":{\"status_code\":100}}, {\"doc_count\":1, \"key\":{\"status_code\":102}}, {\"doc_count\":1," + - " \"key\":{\"status_code\":201}}], \"after_key\":{\"status_code\":201}}}}" - - val parser = XContentType.JSON.xContent().createParser( - NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, inputResultsStr - ) + val inputResultsStr = "{\n" + + " \"_shards\": {\n" + + " \"total\": 1,\n" + + " \"failed\": 0,\n" + + " \"successful\": 1,\n" + + " \"skipped\": 0\n" + + " },\n" + + " \"hits\": {\n" + + " \"hits\": [\n" + + " {\n" + + " \"_index\": \"sample-http-responses\",\n" + + " \"_type\": \"http\",\n" + + " \"_source\": {\n" + + " \"status_code\": 100,\n" + + " \"http_4xx\": 0,\n" + + " \"http_3xx\": 0,\n" + + " \"http_5xx\": 0,\n" + + " \"http_2xx\": 0,\n" + + " \"timestamp\": 100000,\n" + + " \"http_1xx\": 1\n" + + " },\n" + + " \"_id\": 1,\n" + + " \"_score\": 1\n" + + " },\n" + + " {\n" + + " \"_index\": \"sample-http-responses\",\n" + + " \"_type\": \"http\",\n" + + " \"_source\": {\n" + + " \"status_code\": 102,\n" + + " \"http_4xx\": 0,\n" + + " \"http_3xx\": 0,\n" + + " \"http_5xx\": 0,\n" + + " \"http_2xx\": 0,\n" + + " \"timestamp\": 160000,\n" + + " \"http_1xx\": 1\n" + + " },\n" + + " \"_id\": 2,\n" + + " \"_score\": 1\n" + + " },\n" + + " {\n" + + " \"_index\": \"sample-http-responses\",\n" + + " \"_type\": \"http\",\n" + + " \"_source\": {\n" + + " \"status_code\": 100,\n" + + " \"http_4xx\": 0,\n" + + " \"http_3xx\": 0,\n" + + " \"http_5xx\": 0,\n" + + " \"http_2xx\": 0,\n" + + " \"timestamp\": 220000,\n" + + " \"http_1xx\": 1\n" + + " },\n" + + " \"_id\": 4,\n" + + " \"_score\": 1\n" + + " },\n" + + " {\n" + + " \"_index\": \"sample-http-responses\",\n" + + " \"_type\": \"http\",\n" + + " \"_source\": {\n" + + " \"status_code\": 201,\n" + + " \"http_4xx\": 0,\n" + + " \"http_3xx\": 0,\n" + + " \"http_5xx\": 0,\n" + + " \"http_2xx\": 1,\n" + + " \"timestamp\": 280000,\n" + + " \"http_1xx\": 0\n" + + " },\n" + + " \"_id\": 5,\n" + + " \"_score\": 1\n" + + " }\n" + + " ],\n" + + " \"total\": {\n" + + " \"value\": 4,\n" + + " \"relation\": \"eq\"\n" + + " },\n" + + " \"max_score\": 1\n" + + " },\n" + + " \"took\": 15,\n" + + " \"timed_out\": false,\n" + + " \"aggregations\": {\n" + + " \"${trigger.id}\": {\n" + + " \"parent_bucket_path\": \"status_code\",\n" + + " \"bucket_indices\": [\n" + + " 0,\n" + + " 1,\n" + + " 2\n" + + " ]\n" + + " },\n" + + " \"status_code\": {\n" + + " \"buckets\": [\n" + + " {\n" + + " \"doc_count\": 2,\n" + + " \"key\": {\n" + + " \"status_code\": 100\n" + + " }\n" + + " },\n" + + " {\n" + + " \"doc_count\": 1,\n" + + " \"key\": {\n" + + " \"status_code\": 102\n" + + " }\n" + + " },\n" + + " {\n" + + " \"doc_count\": 1,\n" + + " \"key\": {\n" + + " \"status_code\": 201\n" + + " }\n" + + " }\n" + + " ],\n" + + " \"after_key\": {\n" + + " \"status_code\": 201\n" + + " }\n" + + " }\n" + + " }\n" + + "}" + + val parser = XContentType.JSON.xContent() + .createParser( + NamedXContentRegistry.EMPTY, + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, + inputResultsStr + ) val inputResults = parser.map() diff --git a/scripts/build.sh b/scripts/build.sh index 2e621271c..5a173adfb 100755 --- a/scripts/build.sh +++ b/scripts/build.sh @@ -66,7 +66,7 @@ fi mkdir -p $OUTPUT/plugins -./gradlew assemble --no-daemon --refresh-dependencies -DskipTests=true -Dopensearch.version=$VERSION -Dbuild.version_qualifier=$QUALIFIER -Dbuild.snapshot=$SNAPSHOT -x ktlint +./gradlew assemble --no-daemon --refresh-dependencies -DskipTests=true -Dopensearch.version=$VERSION -Dbuild.version_qualifier=$QUALIFIER -Dbuild.snapshot=$SNAPSHOT zipPath=$(find . -path \*build/distributions/*.zip) distributions="$(dirname "${zipPath}")" @@ -74,6 +74,6 @@ distributions="$(dirname "${zipPath}")" echo "COPY ${distributions}/*.zip" cp ${distributions}/*.zip ./$OUTPUT/plugins -./gradlew publishPluginZipPublicationToZipStagingRepository -Dopensearch.version=$VERSION -Dbuild.snapshot=$SNAPSHOT -Dbuild.version_qualifier=$QUALIFIER -x ktlint +./gradlew publishPluginZipPublicationToZipStagingRepository -Dopensearch.version=$VERSION -Dbuild.snapshot=$SNAPSHOT -Dbuild.version_qualifier=$QUALIFIER mkdir -p $OUTPUT/maven/org/opensearch cp -r ./build/local-staging-repo/org/opensearch/. $OUTPUT/maven/org/opensearch