Skip to content

[DO NOT MERGE] Run all PostCommit and PreCommit Tests against Release Branch #3843

[DO NOT MERGE] Run all PostCommit and PreCommit Tests against Release Branch

[DO NOT MERGE] Run all PostCommit and PreCommit Tests against Release Branch #3843

GitHub Actions / Test Results failed Feb 5, 2025 in 0s

2 fail, 19 skipped, 1 pass in 36m 30s

22 tests  +22    1 ✅ + 1   36m 30s ⏱️ + 36m 30s
 1 suites + 1   19 💤 +19 
 1 files   + 1    2 ❌ + 2 

Results for commit bd45f07. ± Comparison against earlier commit 0555e62.

Annotations

Check warning on line 0 in apache_beam.examples.wordcount_it_test.WordCountIT

See this annotation in the file changed.

@github-actions github-actions / Test Results

test_wordcount_it (apache_beam.examples.wordcount_it_test.WordCountIT) failed

sdks/python/pytest-beam_python3.11_sdk.xml [took 3s]
Raw output
apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 18:11:12 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '491', '-content-encoding': 'gzip'}>, content <{
  "error": {
    "code": 400,
    "message": "(ee0a15594bfa4c23): The workflow could not be created. Causes: (8525f2a8d802af5): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 299 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
    "status": "FAILED_PRECONDITION"
  }
}
>
self = <apache_beam.examples.wordcount_it_test.WordCountIT testMethod=test_wordcount_it>

    @pytest.mark.it_postcommit
    @pytest.mark.it_validatescontainer
    def test_wordcount_it(self):
>     self._run_wordcount_it(wordcount.run)

apache_beam/examples/wordcount_it_test.py:50: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/examples/wordcount_it_test.py:150: in _run_wordcount_it
    run_wordcount(
apache_beam/examples/wordcount.py:109: in run
    result = pipeline.run()
apache_beam/pipeline.py:594: in run
    self._options).run(False)
apache_beam/pipeline.py:618: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/dataflow/test_dataflow_runner.py:53: in run_pipeline
    self.result = super().run_pipeline(pipeline, options)
apache_beam/runners/dataflow/dataflow_runner.py:502: in run_pipeline
    self.dataflow_client.create_job(self.job), self)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:759: in create_job
    return self.submit_job_description(job)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:865: in submit_job_description
    response = self._client.projects_locations_jobs.Create(request)
apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py:718: in Create
    return self._RunMethod(config, request, global_params=global_params)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:731: in _RunMethod
    return self.ProcessHttpResponse(method_config, http_response, request)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:737: in ProcessHttpResponse
    self.__ProcessHttpResponse(method_config, http_response, request))
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <apache_beam.runners.dataflow.internal.clients.dataflow.dataflow_v1b3_client.DataflowV1b3.ProjectsLocationsJobsService object at 0x7c48338e4c50>
method_config = <ApiMethodInfo
 relative_path: 'v1b3/projects/{projectId}/locations/{location}/jobs'
 method_id: 'dataflow.projects.lo...DataflowProjectsLocationsJobsCreateRequest'
 response_type_name: 'Job'
 request_field: 'job'
 supports_download: False>
http_response = Response(info={'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 0...', request_url='https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json')
request = <DataflowProjectsLocationsJobsCreateRequest
 job: <Job
 clientRequestId: '20250205181110950328-5807'
 environment: <En...empFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
 location: 'us-central1'
 projectId: 'apache-beam-testing'>

    def __ProcessHttpResponse(self, method_config, http_response, request):
        """Process the given http response."""
        if http_response.status_code not in (http_client.OK,
                                             http_client.CREATED,
                                             http_client.NO_CONTENT):
>           raise exceptions.HttpError.FromResponse(
                http_response, method_config=method_config, request=request)
E           apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 18:11:12 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '491', '-content-encoding': 'gzip'}>, content <{
E             "error": {
E               "code": 400,
E               "message": "(ee0a15594bfa4c23): The workflow could not be created. Causes: (8525f2a8d802af5): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 299 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
E               "status": "FAILED_PRECONDITION"
E             }
E           }
E           >

../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:603: HttpBadRequestError

Check warning on line 0 in apache_beam.examples.wordcount_it_test.WordCountIT

See this annotation in the file changed.

@github-actions github-actions / Test Results

test_wordcount_it_with_prebuilt_sdk_container_cloud_build (apache_beam.examples.wordcount_it_test.WordCountIT) failed

sdks/python/pytest-beam_python3.11_sdk.xml [took 20m 51s]
Raw output
apache_beam.runners.dataflow.dataflow_runner.DataflowRuntimeException: Dataflow pipeline failed. State: FAILED, Error:
Workflow failed.
self = <apache_beam.examples.wordcount_it_test.WordCountIT testMethod=test_wordcount_it_with_prebuilt_sdk_container_cloud_build>

    @pytest.mark.it_validatescontainer
    def test_wordcount_it_with_prebuilt_sdk_container_cloud_build(self):
>     self._run_wordcount_it(
          wordcount.run,
          experiment='beam_fn_api',
          prebuild_sdk_container_engine='cloud_build')

apache_beam/examples/wordcount_it_test.py:102: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/examples/wordcount_it_test.py:150: in _run_wordcount_it
    run_wordcount(
apache_beam/examples/wordcount.py:109: in run
    result = pipeline.run()
apache_beam/pipeline.py:594: in run
    self._options).run(False)
apache_beam/pipeline.py:618: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/dataflow/test_dataflow_runner.py:66: in run_pipeline
    self.result.wait_until_finish(duration=wait_duration)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <DataflowPipelineResult <Job
 clientRequestId: '20250205181407638982-2424'
 createTime: '2025-02-05T18:14:08.855364Z'
...025-02-05T18:14:08.855364Z'
 steps: []
 tempFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)> at 0x7c4833af06d0>
duration = None

    def wait_until_finish(self, duration=None):
      if not self.is_in_terminal_state():
        if not self.has_job:
          raise IOError('Failed to get the Dataflow job id.')
        consoleUrl = (
            "Console URL: https://console.cloud.google.com/"
            f"dataflow/jobs/<RegionId>/{self.job_id()}"
            "?project=<ProjectId>")
        thread = threading.Thread(
            target=DataflowRunner.poll_for_job_completion,
            args=(self._runner, self, duration))
    
        # Mark the thread as a daemon thread so a keyboard interrupt on the main
        # thread will terminate everything. This is also the reason we will not
        # use thread.join() to wait for the polling thread.
        thread.daemon = True
        thread.start()
        while thread.is_alive():
          time.sleep(5.0)
    
        # TODO: Merge the termination code in poll_for_job_completion and
        # is_in_terminal_state.
        terminated = self.is_in_terminal_state()
        assert duration or terminated, (
            'Job did not reach to a terminal state after waiting indefinitely. '
            '{}'.format(consoleUrl))
    
        if terminated and self.state != PipelineState.DONE:
          # TODO(BEAM-1290): Consider converting this to an error log based on
          # theresolution of the issue.
          _LOGGER.error(consoleUrl)
>         raise DataflowRuntimeException(
              'Dataflow pipeline failed. State: %s, Error:\n%s' %
              (self.state, getattr(self._runner, 'last_error_msg', None)),
E             apache_beam.runners.dataflow.dataflow_runner.DataflowRuntimeException: Dataflow pipeline failed. State: FAILED, Error:
E             Workflow failed.

apache_beam/runners/dataflow/dataflow_runner.py:807: DataflowRuntimeException