Skip to content

[DO NOT MERGE] Run all PostCommit and PreCommit Tests against Release Branch #3843

[DO NOT MERGE] Run all PostCommit and PreCommit Tests against Release Branch

[DO NOT MERGE] Run all PostCommit and PreCommit Tests against Release Branch #3843

GitHub Actions / Test Results failed Feb 5, 2025 in 0s. View latest attempt.

47 fail, 55 skipped, 62 pass in 1h 25m 24s

  3 files  +  3    3 suites  +3   1h 25m 24s ⏱️ + 1h 25m 24s
164 tests +164   62 ✅ + 62  55 💤 +55  47 ❌ +47 
216 runs  +216  109 ✅ +109  60 💤 +60  47 ❌ +47 

Results for commit bd45f07. ± Comparison against earlier commit 0555e62.

Annotations

Check warning on line 0 in apache_beam.io.gcp.bigquery_io_read_it_test.BigqueryIOReadIT

See this annotation in the file changed.

@github-actions github-actions / Test Results

1 out of 2 runs failed: test_bigquery_read_custom_1M_python (apache_beam.io.gcp.bigquery_io_read_it_test.BigqueryIOReadIT)

sdks/python/pytest_postCommitIT-df-py311.xml [took 2s]
Raw output
apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 17:52:32 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
  "error": {
    "code": 400,
    "message": "(837e696e5e8c02b4): The workflow could not be created. Causes: (b824c80242a1ece1): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
    "status": "FAILED_PRECONDITION"
  }
}
>
self = <apache_beam.io.gcp.bigquery_io_read_it_test.BigqueryIOReadIT testMethod=test_bigquery_read_custom_1M_python>

    @pytest.mark.it_postcommit
    def test_bigquery_read_custom_1M_python(self):
>     self.run_bigquery_io_read_pipeline('1M', True)

apache_beam/io/gcp/bigquery_io_read_it_test.py:64: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/io/gcp/bigquery_io_read_it_test.py:59: in run_bigquery_io_read_pipeline
    bigquery_io_read_pipeline.run(
apache_beam/io/gcp/bigquery_io_read_pipeline.py:78: in run
    with TestPipeline(options=options) as p:
apache_beam/pipeline.py:644: in __exit__
    self.result = self.run()
apache_beam/testing/test_pipeline.py:115: in run
    result = super().run(
apache_beam/pipeline.py:594: in run
    self._options).run(False)
apache_beam/pipeline.py:618: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/dataflow/test_dataflow_runner.py:53: in run_pipeline
    self.result = super().run_pipeline(pipeline, options)
apache_beam/runners/dataflow/dataflow_runner.py:502: in run_pipeline
    self.dataflow_client.create_job(self.job), self)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:759: in create_job
    return self.submit_job_description(job)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:865: in submit_job_description
    response = self._client.projects_locations_jobs.Create(request)
apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py:718: in Create
    return self._RunMethod(config, request, global_params=global_params)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:731: in _RunMethod
    return self.ProcessHttpResponse(method_config, http_response, request)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:737: in ProcessHttpResponse
    self.__ProcessHttpResponse(method_config, http_response, request))
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <apache_beam.runners.dataflow.internal.clients.dataflow.dataflow_v1b3_client.DataflowV1b3.ProjectsLocationsJobsService object at 0x7ca1d0398850>
method_config = <ApiMethodInfo
 relative_path: 'v1b3/projects/{projectId}/locations/{location}/jobs'
 method_id: 'dataflow.projects.lo...DataflowProjectsLocationsJobsCreateRequest'
 response_type_name: 'Job'
 request_field: 'job'
 supports_download: False>
http_response = Response(info={'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 0...', request_url='https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json')
request = <DataflowProjectsLocationsJobsCreateRequest
 job: <Job
 clientRequestId: '20250205175231188859-9903'
 environment: <En...empFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
 location: 'us-central1'
 projectId: 'apache-beam-testing'>

    def __ProcessHttpResponse(self, method_config, http_response, request):
        """Process the given http response."""
        if http_response.status_code not in (http_client.OK,
                                             http_client.CREATED,
                                             http_client.NO_CONTENT):
>           raise exceptions.HttpError.FromResponse(
                http_response, method_config=method_config, request=request)
E           apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 17:52:32 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
E             "error": {
E               "code": 400,
E               "message": "(837e696e5e8c02b4): The workflow could not be created. Causes: (b824c80242a1ece1): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
E               "status": "FAILED_PRECONDITION"
E             }
E           }
E           >

../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:603: HttpBadRequestError

Check warning on line 0 in apache_beam.examples.wordcount_it_test.WordCountIT

See this annotation in the file changed.

@github-actions github-actions / Test Results

1 out of 2 runs failed: test_wordcount_it (apache_beam.examples.wordcount_it_test.WordCountIT)

sdks/python/pytest_postCommitIT-df-py311.xml [took 2s]
Raw output
apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 17:52:19 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
  "error": {
    "code": 400,
    "message": "(ddd528817423cd88): The workflow could not be created. Causes: (af0e86bfb8f7a389): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
    "status": "FAILED_PRECONDITION"
  }
}
>
self = <apache_beam.examples.wordcount_it_test.WordCountIT testMethod=test_wordcount_it>

    @pytest.mark.it_postcommit
    @pytest.mark.it_validatescontainer
    def test_wordcount_it(self):
>     self._run_wordcount_it(wordcount.run)

apache_beam/examples/wordcount_it_test.py:50: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/examples/wordcount_it_test.py:150: in _run_wordcount_it
    run_wordcount(
apache_beam/examples/wordcount.py:109: in run
    result = pipeline.run()
apache_beam/pipeline.py:594: in run
    self._options).run(False)
apache_beam/pipeline.py:618: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/dataflow/test_dataflow_runner.py:53: in run_pipeline
    self.result = super().run_pipeline(pipeline, options)
apache_beam/runners/dataflow/dataflow_runner.py:502: in run_pipeline
    self.dataflow_client.create_job(self.job), self)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:759: in create_job
    return self.submit_job_description(job)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:865: in submit_job_description
    response = self._client.projects_locations_jobs.Create(request)
apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py:718: in Create
    return self._RunMethod(config, request, global_params=global_params)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:731: in _RunMethod
    return self.ProcessHttpResponse(method_config, http_response, request)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:737: in ProcessHttpResponse
    self.__ProcessHttpResponse(method_config, http_response, request))
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <apache_beam.runners.dataflow.internal.clients.dataflow.dataflow_v1b3_client.DataflowV1b3.ProjectsLocationsJobsService object at 0x7ca1d2278c50>
method_config = <ApiMethodInfo
 relative_path: 'v1b3/projects/{projectId}/locations/{location}/jobs'
 method_id: 'dataflow.projects.lo...DataflowProjectsLocationsJobsCreateRequest'
 response_type_name: 'Job'
 request_field: 'job'
 supports_download: False>
http_response = Response(info={'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 0...', request_url='https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json')
request = <DataflowProjectsLocationsJobsCreateRequest
 job: <Job
 clientRequestId: '20250205175218748506-2698'
 environment: <En...empFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
 location: 'us-central1'
 projectId: 'apache-beam-testing'>

    def __ProcessHttpResponse(self, method_config, http_response, request):
        """Process the given http response."""
        if http_response.status_code not in (http_client.OK,
                                             http_client.CREATED,
                                             http_client.NO_CONTENT):
>           raise exceptions.HttpError.FromResponse(
                http_response, method_config=method_config, request=request)
E           apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 17:52:19 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
E             "error": {
E               "code": 400,
E               "message": "(ddd528817423cd88): The workflow could not be created. Causes: (af0e86bfb8f7a389): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
E               "status": "FAILED_PRECONDITION"
E             }
E           }
E           >

../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:603: HttpBadRequestError

Check warning on line 0 in apache_beam.io.gcp.bigquery_read_it_test.ReadTests

See this annotation in the file changed.

@github-actions github-actions / Test Results

1 out of 3 runs failed: test_native_source (apache_beam.io.gcp.bigquery_read_it_test.ReadTests)

sdks/python/pytest_postCommitIT-df-py311.xml [took 2s]
Raw output
apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 17:44:57 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
  "error": {
    "code": 400,
    "message": "(3b2c8be57f31f5f3): The workflow could not be created. Causes: (3e644eccfbb437c2): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 301 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
    "status": "FAILED_PRECONDITION"
  }
}
>
self = <apache_beam.io.gcp.bigquery_read_it_test.ReadTests testMethod=test_native_source>

    @skip(['PortableRunner', 'FlinkRunner'])
    @pytest.mark.it_postcommit
    def test_native_source(self):
>     with beam.Pipeline(argv=self.args) as p:

apache_beam/io/gcp/bigquery_read_it_test.py:167: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/pipeline.py:644: in __exit__
    self.result = self.run()
apache_beam/pipeline.py:594: in run
    self._options).run(False)
apache_beam/pipeline.py:618: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/dataflow/test_dataflow_runner.py:53: in run_pipeline
    self.result = super().run_pipeline(pipeline, options)
apache_beam/runners/dataflow/dataflow_runner.py:502: in run_pipeline
    self.dataflow_client.create_job(self.job), self)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:759: in create_job
    return self.submit_job_description(job)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:865: in submit_job_description
    response = self._client.projects_locations_jobs.Create(request)
apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py:718: in Create
    return self._RunMethod(config, request, global_params=global_params)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:731: in _RunMethod
    return self.ProcessHttpResponse(method_config, http_response, request)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:737: in ProcessHttpResponse
    self.__ProcessHttpResponse(method_config, http_response, request))
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <apache_beam.runners.dataflow.internal.clients.dataflow.dataflow_v1b3_client.DataflowV1b3.ProjectsLocationsJobsService object at 0x7d7f9308ad90>
method_config = <ApiMethodInfo
 relative_path: 'v1b3/projects/{projectId}/locations/{location}/jobs'
 method_id: 'dataflow.projects.lo...DataflowProjectsLocationsJobsCreateRequest'
 response_type_name: 'Job'
 request_field: 'job'
 supports_download: False>
http_response = Response(info={'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 0...', request_url='https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json')
request = <DataflowProjectsLocationsJobsCreateRequest
 job: <Job
 clientRequestId: '20250205174456656429-9154'
 environment: <En...empFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
 location: 'us-central1'
 projectId: 'apache-beam-testing'>

    def __ProcessHttpResponse(self, method_config, http_response, request):
        """Process the given http response."""
        if http_response.status_code not in (http_client.OK,
                                             http_client.CREATED,
                                             http_client.NO_CONTENT):
>           raise exceptions.HttpError.FromResponse(
                http_response, method_config=method_config, request=request)
E           apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 17:44:57 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
E             "error": {
E               "code": 400,
E               "message": "(3b2c8be57f31f5f3): The workflow could not be created. Causes: (3e644eccfbb437c2): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 301 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
E               "status": "FAILED_PRECONDITION"
E             }
E           }
E           >

../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:603: HttpBadRequestError

Check warning on line 0 in apache_beam.io.gcp.bigquery_read_it_test.ReadTests

See this annotation in the file changed.

@github-actions github-actions / Test Results

1 out of 3 runs failed: test_table_schema_retrieve_specifying_only_table (apache_beam.io.gcp.bigquery_read_it_test.ReadTests)

sdks/python/pytest_postCommitIT-df-py311.xml [took 2s]
Raw output
apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 17:45:04 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
  "error": {
    "code": 400,
    "message": "(ba899491fe45eb35): The workflow could not be created. Causes: (d5cea2b75da06ea9): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
    "status": "FAILED_PRECONDITION"
  }
}
>
self = <apache_beam.io.gcp.bigquery_read_it_test.ReadTests testMethod=test_table_schema_retrieve_specifying_only_table>

    @pytest.mark.it_postcommit
    def test_table_schema_retrieve_specifying_only_table(self):
      the_table = bigquery_tools.BigQueryWrapper().get_table(
          project_id="apache-beam-testing",
          dataset_id="beam_bigquery_io_test",
          table_id="table_schema_retrieve")
      table = the_table.schema
      utype = bigquery_schema_tools.\
          generate_user_type_from_bq_schema(table)
>     with beam.Pipeline(argv=self.args) as p:

apache_beam/io/gcp/bigquery_read_it_test.py:233: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/pipeline.py:644: in __exit__
    self.result = self.run()
apache_beam/pipeline.py:594: in run
    self._options).run(False)
apache_beam/pipeline.py:618: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/dataflow/test_dataflow_runner.py:53: in run_pipeline
    self.result = super().run_pipeline(pipeline, options)
apache_beam/runners/dataflow/dataflow_runner.py:502: in run_pipeline
    self.dataflow_client.create_job(self.job), self)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:759: in create_job
    return self.submit_job_description(job)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:865: in submit_job_description
    response = self._client.projects_locations_jobs.Create(request)
apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py:718: in Create
    return self._RunMethod(config, request, global_params=global_params)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:731: in _RunMethod
    return self.ProcessHttpResponse(method_config, http_response, request)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:737: in ProcessHttpResponse
    self.__ProcessHttpResponse(method_config, http_response, request))
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <apache_beam.runners.dataflow.internal.clients.dataflow.dataflow_v1b3_client.DataflowV1b3.ProjectsLocationsJobsService object at 0x7d7f93fa9410>
method_config = <ApiMethodInfo
 relative_path: 'v1b3/projects/{projectId}/locations/{location}/jobs'
 method_id: 'dataflow.projects.lo...DataflowProjectsLocationsJobsCreateRequest'
 response_type_name: 'Job'
 request_field: 'job'
 supports_download: False>
http_response = Response(info={'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 0...', request_url='https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json')
request = <DataflowProjectsLocationsJobsCreateRequest
 job: <Job
 clientRequestId: '20250205174502974236-4537'
 environment: <En...empFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
 location: 'us-central1'
 projectId: 'apache-beam-testing'>

    def __ProcessHttpResponse(self, method_config, http_response, request):
        """Process the given http response."""
        if http_response.status_code not in (http_client.OK,
                                             http_client.CREATED,
                                             http_client.NO_CONTENT):
>           raise exceptions.HttpError.FromResponse(
                http_response, method_config=method_config, request=request)
E           apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 17:45:04 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
E             "error": {
E               "code": 400,
E               "message": "(ba899491fe45eb35): The workflow could not be created. Causes: (d5cea2b75da06ea9): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
E               "status": "FAILED_PRECONDITION"
E             }
E           }
E           >

../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:603: HttpBadRequestError

Check warning on line 0 in apache_beam.io.gcp.bigquery_read_it_test.ReadTests

See this annotation in the file changed.

@github-actions github-actions / Test Results

1 out of 3 runs failed: test_iobase_source (apache_beam.io.gcp.bigquery_read_it_test.ReadTests)

sdks/python/pytest_postCommitIT-df-py311.xml [took 8s]
Raw output
apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 17:44:55 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
  "error": {
    "code": 400,
    "message": "(b547fb3e3ae0ef0c): The workflow could not be created. Causes: (6001822ad492c10d): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 301 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
    "status": "FAILED_PRECONDITION"
  }
}
>
self = <apache_beam.io.gcp.bigquery_read_it_test.ReadTests testMethod=test_iobase_source>

    @pytest.mark.it_postcommit
    def test_iobase_source(self):
      query = StaticValueProvider(str, self.query)
>     with beam.Pipeline(argv=self.args) as p:

apache_beam/io/gcp/bigquery_read_it_test.py:176: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/pipeline.py:644: in __exit__
    self.result = self.run()
apache_beam/pipeline.py:594: in run
    self._options).run(False)
apache_beam/pipeline.py:618: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/dataflow/test_dataflow_runner.py:53: in run_pipeline
    self.result = super().run_pipeline(pipeline, options)
apache_beam/runners/dataflow/dataflow_runner.py:502: in run_pipeline
    self.dataflow_client.create_job(self.job), self)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:759: in create_job
    return self.submit_job_description(job)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:865: in submit_job_description
    response = self._client.projects_locations_jobs.Create(request)
apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py:718: in Create
    return self._RunMethod(config, request, global_params=global_params)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:731: in _RunMethod
    return self.ProcessHttpResponse(method_config, http_response, request)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:737: in ProcessHttpResponse
    self.__ProcessHttpResponse(method_config, http_response, request))
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <apache_beam.runners.dataflow.internal.clients.dataflow.dataflow_v1b3_client.DataflowV1b3.ProjectsLocationsJobsService object at 0x7d7f93603910>
method_config = <ApiMethodInfo
 relative_path: 'v1b3/projects/{projectId}/locations/{location}/jobs'
 method_id: 'dataflow.projects.lo...DataflowProjectsLocationsJobsCreateRequest'
 response_type_name: 'Job'
 request_field: 'job'
 supports_download: False>
http_response = Response(info={'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 0...', request_url='https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json')
request = <DataflowProjectsLocationsJobsCreateRequest
 job: <Job
 clientRequestId: '20250205174453697696-9986'
 environment: <En...empFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
 location: 'us-central1'
 projectId: 'apache-beam-testing'>

    def __ProcessHttpResponse(self, method_config, http_response, request):
        """Process the given http response."""
        if http_response.status_code not in (http_client.OK,
                                             http_client.CREATED,
                                             http_client.NO_CONTENT):
>           raise exceptions.HttpError.FromResponse(
                http_response, method_config=method_config, request=request)
E           apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 17:44:55 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
E             "error": {
E               "code": 400,
E               "message": "(b547fb3e3ae0ef0c): The workflow could not be created. Causes: (6001822ad492c10d): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 301 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
E               "status": "FAILED_PRECONDITION"
E             }
E           }
E           >

../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:603: HttpBadRequestError

Check warning on line 0 in apache_beam.io.gcp.bigquery_read_it_test.ReadTests

See this annotation in the file changed.

@github-actions github-actions / Test Results

1 out of 3 runs failed: test_table_schema_retrieve_with_direct_read (apache_beam.io.gcp.bigquery_read_it_test.ReadTests)

sdks/python/pytest_postCommitIT-df-py311.xml [took 2s]
Raw output
apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 17:45:06 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
  "error": {
    "code": 400,
    "message": "(b72a4abc0ea9d654): The workflow could not be created. Causes: (54fec71beda8e995): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
    "status": "FAILED_PRECONDITION"
  }
}
>
self = <apache_beam.io.gcp.bigquery_read_it_test.ReadTests testMethod=test_table_schema_retrieve_with_direct_read>

    @pytest.mark.it_postcommit
    def test_table_schema_retrieve_with_direct_read(self):
      the_table = bigquery_tools.BigQueryWrapper().get_table(
          project_id="apache-beam-testing",
          dataset_id="beam_bigquery_io_test",
          table_id="table_schema_retrieve")
      table = the_table.schema
      utype = bigquery_schema_tools.\
          generate_user_type_from_bq_schema(table)
>     with beam.Pipeline(argv=self.args) as p:

apache_beam/io/gcp/bigquery_read_it_test.py:275: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/pipeline.py:644: in __exit__
    self.result = self.run()
apache_beam/pipeline.py:594: in run
    self._options).run(False)
apache_beam/pipeline.py:618: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/dataflow/test_dataflow_runner.py:53: in run_pipeline
    self.result = super().run_pipeline(pipeline, options)
apache_beam/runners/dataflow/dataflow_runner.py:502: in run_pipeline
    self.dataflow_client.create_job(self.job), self)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:759: in create_job
    return self.submit_job_description(job)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:865: in submit_job_description
    response = self._client.projects_locations_jobs.Create(request)
apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py:718: in Create
    return self._RunMethod(config, request, global_params=global_params)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:731: in _RunMethod
    return self.ProcessHttpResponse(method_config, http_response, request)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:737: in ProcessHttpResponse
    self.__ProcessHttpResponse(method_config, http_response, request))
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <apache_beam.runners.dataflow.internal.clients.dataflow.dataflow_v1b3_client.DataflowV1b3.ProjectsLocationsJobsService object at 0x7d7f9305c890>
method_config = <ApiMethodInfo
 relative_path: 'v1b3/projects/{projectId}/locations/{location}/jobs'
 method_id: 'dataflow.projects.lo...DataflowProjectsLocationsJobsCreateRequest'
 response_type_name: 'Job'
 request_field: 'job'
 supports_download: False>
http_response = Response(info={'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 0...', request_url='https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json')
request = <DataflowProjectsLocationsJobsCreateRequest
 job: <Job
 clientRequestId: '20250205174505651981-9893'
 environment: <En...empFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
 location: 'us-central1'
 projectId: 'apache-beam-testing'>

    def __ProcessHttpResponse(self, method_config, http_response, request):
        """Process the given http response."""
        if http_response.status_code not in (http_client.OK,
                                             http_client.CREATED,
                                             http_client.NO_CONTENT):
>           raise exceptions.HttpError.FromResponse(
                http_response, method_config=method_config, request=request)
E           apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 17:45:06 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
E             "error": {
E               "code": 400,
E               "message": "(b72a4abc0ea9d654): The workflow could not be created. Causes: (54fec71beda8e995): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
E               "status": "FAILED_PRECONDITION"
E             }
E           }
E           >

../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:603: HttpBadRequestError

Check warning on line 0 in apache_beam.io.gcp.bigquery_read_it_test.ReadUsingStorageApiTests

See this annotation in the file changed.

@github-actions github-actions / Test Results

1 out of 3 runs failed: test_iobase_source_with_native_datetime (apache_beam.io.gcp.bigquery_read_it_test.ReadUsingStorageApiTests)

sdks/python/pytest_postCommitIT-df-py311.xml [took 2s]
Raw output
apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 18:03:45 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
  "error": {
    "code": 400,
    "message": "(fbd5f4a8fe506b48): The workflow could not be created. Causes: (2423c842d5ba1210): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
    "status": "FAILED_PRECONDITION"
  }
}
>
self = <apache_beam.io.gcp.bigquery_read_it_test.ReadUsingStorageApiTests testMethod=test_iobase_source_with_native_datetime>

    @pytest.mark.it_postcommit
    def test_iobase_source_with_native_datetime(self):
      EXPECTED_TABLE_DATA = [
          {
              'number': 1,
              'string': '你好',
              'time': datetime.time(12, 44, 31),
              'datetime': datetime.datetime(2018, 12, 31, 12, 44, 31),
              'rec': None,
          },
          {
              'number': 4,
              'string': 'привет',
              'time': datetime.time(12, 44, 31),
              'datetime': datetime.datetime(2018, 12, 31, 12, 44, 31),
              'rec': {
                  'rec_datetime': datetime.datetime(2018, 12, 31, 12, 44, 31),
                  'rec_rec': {
                      'rec_rec_datetime': datetime.datetime(
                          2018, 12, 31, 12, 44, 31)
                  }
              },
          }
      ]
>     with beam.Pipeline(argv=self.args) as p:

apache_beam/io/gcp/bigquery_read_it_test.py:479: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/pipeline.py:644: in __exit__
    self.result = self.run()
apache_beam/pipeline.py:594: in run
    self._options).run(False)
apache_beam/pipeline.py:618: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/dataflow/test_dataflow_runner.py:53: in run_pipeline
    self.result = super().run_pipeline(pipeline, options)
apache_beam/runners/dataflow/dataflow_runner.py:502: in run_pipeline
    self.dataflow_client.create_job(self.job), self)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:759: in create_job
    return self.submit_job_description(job)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:865: in submit_job_description
    response = self._client.projects_locations_jobs.Create(request)
apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py:718: in Create
    return self._RunMethod(config, request, global_params=global_params)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:731: in _RunMethod
    return self.ProcessHttpResponse(method_config, http_response, request)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:737: in ProcessHttpResponse
    self.__ProcessHttpResponse(method_config, http_response, request))
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <apache_beam.runners.dataflow.internal.clients.dataflow.dataflow_v1b3_client.DataflowV1b3.ProjectsLocationsJobsService object at 0x7d7f93204890>
method_config = <ApiMethodInfo
 relative_path: 'v1b3/projects/{projectId}/locations/{location}/jobs'
 method_id: 'dataflow.projects.lo...DataflowProjectsLocationsJobsCreateRequest'
 response_type_name: 'Job'
 request_field: 'job'
 supports_download: False>
http_response = Response(info={'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 0...', request_url='https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json')
request = <DataflowProjectsLocationsJobsCreateRequest
 job: <Job
 clientRequestId: '20250205180343711142-5813'
 environment: <En...empFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
 location: 'us-central1'
 projectId: 'apache-beam-testing'>

    def __ProcessHttpResponse(self, method_config, http_response, request):
        """Process the given http response."""
        if http_response.status_code not in (http_client.OK,
                                             http_client.CREATED,
                                             http_client.NO_CONTENT):
>           raise exceptions.HttpError.FromResponse(
                http_response, method_config=method_config, request=request)
E           apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 18:03:45 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
E             "error": {
E               "code": 400,
E               "message": "(fbd5f4a8fe506b48): The workflow could not be created. Causes: (2423c842d5ba1210): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
E               "status": "FAILED_PRECONDITION"
E             }
E           }
E           >

../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:603: HttpBadRequestError

Check warning on line 0 in apache_beam.io.gcp.bigquery_read_it_test.ReadTests

See this annotation in the file changed.

@github-actions github-actions / Test Results

1 out of 3 runs failed: test_table_schema_retrieve (apache_beam.io.gcp.bigquery_read_it_test.ReadTests)

sdks/python/pytest_postCommitIT-df-py311.xml [took 3s]
Raw output
apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 17:45:00 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
  "error": {
    "code": 400,
    "message": "(b5b985a4c5600d64): The workflow could not be created. Causes: (d2345f4268c2903d): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
    "status": "FAILED_PRECONDITION"
  }
}
>
self = <apache_beam.io.gcp.bigquery_read_it_test.ReadTests testMethod=test_table_schema_retrieve>

    @pytest.mark.it_postcommit
    def test_table_schema_retrieve(self):
      the_table = bigquery_tools.BigQueryWrapper().get_table(
          project_id="apache-beam-testing",
          dataset_id="beam_bigquery_io_test",
          table_id="table_schema_retrieve")
      table = the_table.schema
      utype = bigquery_schema_tools.\
          generate_user_type_from_bq_schema(table)
>     with beam.Pipeline(argv=self.args) as p:

apache_beam/io/gcp/bigquery_read_it_test.py:191: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/pipeline.py:644: in __exit__
    self.result = self.run()
apache_beam/pipeline.py:594: in run
    self._options).run(False)
apache_beam/pipeline.py:618: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/dataflow/test_dataflow_runner.py:53: in run_pipeline
    self.result = super().run_pipeline(pipeline, options)
apache_beam/runners/dataflow/dataflow_runner.py:502: in run_pipeline
    self.dataflow_client.create_job(self.job), self)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:759: in create_job
    return self.submit_job_description(job)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:865: in submit_job_description
    response = self._client.projects_locations_jobs.Create(request)
apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py:718: in Create
    return self._RunMethod(config, request, global_params=global_params)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:731: in _RunMethod
    return self.ProcessHttpResponse(method_config, http_response, request)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:737: in ProcessHttpResponse
    self.__ProcessHttpResponse(method_config, http_response, request))
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <apache_beam.runners.dataflow.internal.clients.dataflow.dataflow_v1b3_client.DataflowV1b3.ProjectsLocationsJobsService object at 0x7d7f93344c50>
method_config = <ApiMethodInfo
 relative_path: 'v1b3/projects/{projectId}/locations/{location}/jobs'
 method_id: 'dataflow.projects.lo...DataflowProjectsLocationsJobsCreateRequest'
 response_type_name: 'Job'
 request_field: 'job'
 supports_download: False>
http_response = Response(info={'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 0...', request_url='https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json')
request = <DataflowProjectsLocationsJobsCreateRequest
 job: <Job
 clientRequestId: '20250205174459520289-4559'
 environment: <En...empFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
 location: 'us-central1'
 projectId: 'apache-beam-testing'>

    def __ProcessHttpResponse(self, method_config, http_response, request):
        """Process the given http response."""
        if http_response.status_code not in (http_client.OK,
                                             http_client.CREATED,
                                             http_client.NO_CONTENT):
>           raise exceptions.HttpError.FromResponse(
                http_response, method_config=method_config, request=request)
E           apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 17:45:00 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
E             "error": {
E               "code": 400,
E               "message": "(b5b985a4c5600d64): The workflow could not be created. Causes: (d2345f4268c2903d): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
E               "status": "FAILED_PRECONDITION"
E             }
E           }
E           >

../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:603: HttpBadRequestError

Check warning on line 0 in apache_beam.io.gcp.bigquery_read_it_test.ReadUsingStorageApiTests

See this annotation in the file changed.

@github-actions github-actions / Test Results

1 out of 3 runs failed: test_iobase_source_with_query (apache_beam.io.gcp.bigquery_read_it_test.ReadUsingStorageApiTests)

sdks/python/pytest_postCommitIT-df-py311.xml [took 2s]
Raw output
apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 18:03:47 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
  "error": {
    "code": 400,
    "message": "(580fe224e0705b66): The workflow could not be created. Causes: (7eb4a88867bd300b): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
    "status": "FAILED_PRECONDITION"
  }
}
>
self = <apache_beam.io.gcp.bigquery_read_it_test.ReadUsingStorageApiTests testMethod=test_iobase_source_with_query>

    @pytest.mark.it_postcommit
    def test_iobase_source_with_query(self):
      EXPECTED_TABLE_DATA = [
          {
              'number': 1,
              'string': '你好',
              'time': datetime.time(12, 44, 31),
              'datetime': datetime.datetime(2018, 12, 31, 12, 44, 31),
              'rec': None,
          },
          {
              'number': 4,
              'string': 'привет',
              'time': datetime.time(12, 44, 31),
              'datetime': datetime.datetime(2018, 12, 31, 12, 44, 31),
              'rec': {
                  'rec_datetime': datetime.datetime(2018, 12, 31, 12, 44, 31),
                  'rec_rec': {
                      'rec_rec_datetime': datetime.datetime(
                          2018, 12, 31, 12, 44, 31)
                  }
              },
          }
      ]
      query = StaticValueProvider(str, self.query)
>     with beam.Pipeline(argv=self.args) as p:

apache_beam/io/gcp/bigquery_read_it_test.py:579: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/pipeline.py:644: in __exit__
    self.result = self.run()
apache_beam/pipeline.py:594: in run
    self._options).run(False)
apache_beam/pipeline.py:618: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/dataflow/test_dataflow_runner.py:53: in run_pipeline
    self.result = super().run_pipeline(pipeline, options)
apache_beam/runners/dataflow/dataflow_runner.py:502: in run_pipeline
    self.dataflow_client.create_job(self.job), self)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:759: in create_job
    return self.submit_job_description(job)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:865: in submit_job_description
    response = self._client.projects_locations_jobs.Create(request)
apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py:718: in Create
    return self._RunMethod(config, request, global_params=global_params)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:731: in _RunMethod
    return self.ProcessHttpResponse(method_config, http_response, request)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:737: in ProcessHttpResponse
    self.__ProcessHttpResponse(method_config, http_response, request))
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <apache_beam.runners.dataflow.internal.clients.dataflow.dataflow_v1b3_client.DataflowV1b3.ProjectsLocationsJobsService object at 0x7d7f93653910>
method_config = <ApiMethodInfo
 relative_path: 'v1b3/projects/{projectId}/locations/{location}/jobs'
 method_id: 'dataflow.projects.lo...DataflowProjectsLocationsJobsCreateRequest'
 response_type_name: 'Job'
 request_field: 'job'
 supports_download: False>
http_response = Response(info={'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 0...', request_url='https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json')
request = <DataflowProjectsLocationsJobsCreateRequest
 job: <Job
 clientRequestId: '20250205180346295113-4538'
 environment: <En...empFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
 location: 'us-central1'
 projectId: 'apache-beam-testing'>

    def __ProcessHttpResponse(self, method_config, http_response, request):
        """Process the given http response."""
        if http_response.status_code not in (http_client.OK,
                                             http_client.CREATED,
                                             http_client.NO_CONTENT):
>           raise exceptions.HttpError.FromResponse(
                http_response, method_config=method_config, request=request)
E           apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 18:03:47 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
E             "error": {
E               "code": 400,
E               "message": "(580fe224e0705b66): The workflow could not be created. Causes: (7eb4a88867bd300b): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
E               "status": "FAILED_PRECONDITION"
E             }
E           }
E           >

../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:603: HttpBadRequestError

Check warning on line 0 in apache_beam.io.gcp.pubsub_integration_test.PubSubIntegrationTest

See this annotation in the file changed.

@github-actions github-actions / Test Results

1 out of 2 runs failed: test_streaming_data_only (apache_beam.io.gcp.pubsub_integration_test.PubSubIntegrationTest)

sdks/python/pytest_postCommitIT-df-py311.xml [took 41s]
Raw output
apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 17:53:13 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
  "error": {
    "code": 400,
    "message": "(d1b7b3cc0a64fa03): The workflow could not be created. Causes: (e2a573435be7476f): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
    "status": "FAILED_PRECONDITION"
  }
}
>
self = <apache_beam.io.gcp.pubsub_integration_test.PubSubIntegrationTest testMethod=test_streaming_data_only>

    @pytest.mark.it_postcommit
    def test_streaming_data_only(self):
>     self._test_streaming(with_attributes=False)

apache_beam/io/gcp/pubsub_integration_test.py:217: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/io/gcp/pubsub_integration_test.py:209: in _test_streaming
    pubsub_it_pipeline.run_pipeline(
apache_beam/io/gcp/pubsub_it_pipeline.py:93: in run_pipeline
    result = p.run()
apache_beam/pipeline.py:618: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/dataflow/test_dataflow_runner.py:53: in run_pipeline
    self.result = super().run_pipeline(pipeline, options)
apache_beam/runners/dataflow/dataflow_runner.py:502: in run_pipeline
    self.dataflow_client.create_job(self.job), self)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:759: in create_job
    return self.submit_job_description(job)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:865: in submit_job_description
    response = self._client.projects_locations_jobs.Create(request)
apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py:718: in Create
    return self._RunMethod(config, request, global_params=global_params)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:731: in _RunMethod
    return self.ProcessHttpResponse(method_config, http_response, request)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:737: in ProcessHttpResponse
    self.__ProcessHttpResponse(method_config, http_response, request))
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <apache_beam.runners.dataflow.internal.clients.dataflow.dataflow_v1b3_client.DataflowV1b3.ProjectsLocationsJobsService object at 0x7ca1d22d8f90>
method_config = <ApiMethodInfo
 relative_path: 'v1b3/projects/{projectId}/locations/{location}/jobs'
 method_id: 'dataflow.projects.lo...DataflowProjectsLocationsJobsCreateRequest'
 response_type_name: 'Job'
 request_field: 'job'
 supports_download: False>
http_response = Response(info={'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 0...', request_url='https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json')
request = <DataflowProjectsLocationsJobsCreateRequest
 job: <Job
 clientRequestId: '20250205175311776793-4186'
 environment: <En...iles: []
 type: TypeValueValuesEnum(JOB_TYPE_STREAMING, 2)>
 location: 'us-central1'
 projectId: 'apache-beam-testing'>

    def __ProcessHttpResponse(self, method_config, http_response, request):
        """Process the given http response."""
        if http_response.status_code not in (http_client.OK,
                                             http_client.CREATED,
                                             http_client.NO_CONTENT):
>           raise exceptions.HttpError.FromResponse(
                http_response, method_config=method_config, request=request)
E           apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 17:53:13 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
E             "error": {
E               "code": 400,
E               "message": "(d1b7b3cc0a64fa03): The workflow could not be created. Causes: (e2a573435be7476f): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
E               "status": "FAILED_PRECONDITION"
E             }
E           }
E           >

../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:603: HttpBadRequestError

Check warning on line 0 in apache_beam.io.gcp.bigquery_read_it_test.ReadUsingStorageApiTests

See this annotation in the file changed.

@github-actions github-actions / Test Results

1 out of 3 runs failed: test_iobase_source_with_row_restriction (apache_beam.io.gcp.bigquery_read_it_test.ReadUsingStorageApiTests)

sdks/python/pytest_postCommitIT-df-py311.xml [took 2s]
Raw output
apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 18:11:32 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
  "error": {
    "code": 400,
    "message": "(11f169f2c09bf1cd): The workflow could not be created. Causes: (2ee4f4f5645d7dd2): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
    "status": "FAILED_PRECONDITION"
  }
}
>
self = <apache_beam.io.gcp.bigquery_read_it_test.ReadUsingStorageApiTests testMethod=test_iobase_source_with_row_restriction>

    @pytest.mark.it_postcommit
    def test_iobase_source_with_row_restriction(self):
      EXPECTED_TABLE_DATA = [{
          'number': 1,
          'string': '你好',
          'time': datetime.time(12, 44, 31),
          'datetime': datetime.datetime(2018, 12, 31, 12, 44, 31),
          'rec': None
      }]
>     with beam.Pipeline(argv=self.args) as p:

apache_beam/io/gcp/bigquery_read_it_test.py:507: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/pipeline.py:644: in __exit__
    self.result = self.run()
apache_beam/pipeline.py:594: in run
    self._options).run(False)
apache_beam/pipeline.py:618: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/dataflow/test_dataflow_runner.py:53: in run_pipeline
    self.result = super().run_pipeline(pipeline, options)
apache_beam/runners/dataflow/dataflow_runner.py:502: in run_pipeline
    self.dataflow_client.create_job(self.job), self)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:759: in create_job
    return self.submit_job_description(job)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:865: in submit_job_description
    response = self._client.projects_locations_jobs.Create(request)
apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py:718: in Create
    return self._RunMethod(config, request, global_params=global_params)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:731: in _RunMethod
    return self.ProcessHttpResponse(method_config, http_response, request)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:737: in ProcessHttpResponse
    self.__ProcessHttpResponse(method_config, http_response, request))
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <apache_beam.runners.dataflow.internal.clients.dataflow.dataflow_v1b3_client.DataflowV1b3.ProjectsLocationsJobsService object at 0x7d7f92d2c890>
method_config = <ApiMethodInfo
 relative_path: 'v1b3/projects/{projectId}/locations/{location}/jobs'
 method_id: 'dataflow.projects.lo...DataflowProjectsLocationsJobsCreateRequest'
 response_type_name: 'Job'
 request_field: 'job'
 supports_download: False>
http_response = Response(info={'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 0...', request_url='https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json')
request = <DataflowProjectsLocationsJobsCreateRequest
 job: <Job
 clientRequestId: '20250205181130863064-3018'
 environment: <En...empFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
 location: 'us-central1'
 projectId: 'apache-beam-testing'>

    def __ProcessHttpResponse(self, method_config, http_response, request):
        """Process the given http response."""
        if http_response.status_code not in (http_client.OK,
                                             http_client.CREATED,
                                             http_client.NO_CONTENT):
>           raise exceptions.HttpError.FromResponse(
                http_response, method_config=method_config, request=request)
E           apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 18:11:32 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
E             "error": {
E               "code": 400,
E               "message": "(11f169f2c09bf1cd): The workflow could not be created. Causes: (2ee4f4f5645d7dd2): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
E               "status": "FAILED_PRECONDITION"
E             }
E           }
E           >

../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:603: HttpBadRequestError

Check warning on line 0 in apache_beam.io.gcp.bigquery_io_read_it_test.BigqueryIOReadIT

See this annotation in the file changed.

@github-actions github-actions / Test Results

1 out of 2 runs failed: test_bigquery_read_1M_python (apache_beam.io.gcp.bigquery_io_read_it_test.BigqueryIOReadIT)

sdks/python/pytest_postCommitIT-df-py311.xml [took 2s]
Raw output
apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 17:52:29 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '490', '-content-encoding': 'gzip'}>, content <{
  "error": {
    "code": 400,
    "message": "(efa3a1539c0fe6e): The workflow could not be created. Causes: (e292a9e8565ca69): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
    "status": "FAILED_PRECONDITION"
  }
}
>
self = <apache_beam.io.gcp.bigquery_io_read_it_test.BigqueryIOReadIT testMethod=test_bigquery_read_1M_python>

    @pytest.mark.it_postcommit
    def test_bigquery_read_1M_python(self):
>     self.run_bigquery_io_read_pipeline('1M')

apache_beam/io/gcp/bigquery_io_read_it_test.py:68: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/io/gcp/bigquery_io_read_it_test.py:59: in run_bigquery_io_read_pipeline
    bigquery_io_read_pipeline.run(
apache_beam/io/gcp/bigquery_io_read_pipeline.py:78: in run
    with TestPipeline(options=options) as p:
apache_beam/pipeline.py:644: in __exit__
    self.result = self.run()
apache_beam/testing/test_pipeline.py:115: in run
    result = super().run(
apache_beam/pipeline.py:594: in run
    self._options).run(False)
apache_beam/pipeline.py:618: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/dataflow/test_dataflow_runner.py:53: in run_pipeline
    self.result = super().run_pipeline(pipeline, options)
apache_beam/runners/dataflow/dataflow_runner.py:502: in run_pipeline
    self.dataflow_client.create_job(self.job), self)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:759: in create_job
    return self.submit_job_description(job)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:865: in submit_job_description
    response = self._client.projects_locations_jobs.Create(request)
apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py:718: in Create
    return self._RunMethod(config, request, global_params=global_params)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:731: in _RunMethod
    return self.ProcessHttpResponse(method_config, http_response, request)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:737: in ProcessHttpResponse
    self.__ProcessHttpResponse(method_config, http_response, request))
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <apache_beam.runners.dataflow.internal.clients.dataflow.dataflow_v1b3_client.DataflowV1b3.ProjectsLocationsJobsService object at 0x7ca1d21d1910>
method_config = <ApiMethodInfo
 relative_path: 'v1b3/projects/{projectId}/locations/{location}/jobs'
 method_id: 'dataflow.projects.lo...DataflowProjectsLocationsJobsCreateRequest'
 response_type_name: 'Job'
 request_field: 'job'
 supports_download: False>
http_response = Response(info={'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 0...', request_url='https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json')
request = <DataflowProjectsLocationsJobsCreateRequest
 job: <Job
 clientRequestId: '20250205175228552655-3306'
 environment: <En...empFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
 location: 'us-central1'
 projectId: 'apache-beam-testing'>

    def __ProcessHttpResponse(self, method_config, http_response, request):
        """Process the given http response."""
        if http_response.status_code not in (http_client.OK,
                                             http_client.CREATED,
                                             http_client.NO_CONTENT):
>           raise exceptions.HttpError.FromResponse(
                http_response, method_config=method_config, request=request)
E           apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 17:52:29 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '490', '-content-encoding': 'gzip'}>, content <{
E             "error": {
E               "code": 400,
E               "message": "(efa3a1539c0fe6e): The workflow could not be created. Causes: (e292a9e8565ca69): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
E               "status": "FAILED_PRECONDITION"
E             }
E           }
E           >

../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:603: HttpBadRequestError

Check warning on line 0 in apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT

See this annotation in the file changed.

@github-actions github-actions / Test Results

1 out of 2 runs failed: test_big_query_new_types_avro (apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT)

sdks/python/pytest_postCommitIT-df-py311.xml [took 4s]
Raw output
apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 17:57:59 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
  "error": {
    "code": 400,
    "message": "(212fe8977b396322): The workflow could not be created. Causes: (c210b29523a730fd): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
    "status": "FAILED_PRECONDITION"
  }
}
>
self = <apache_beam.io.gcp.big_query_query_to_table_it_test.BigQueryQueryToTableIT testMethod=test_big_query_new_types_avro>

    @pytest.mark.it_postcommit
    def test_big_query_new_types_avro(self):
      expected_checksum = test_utils.compute_hash(NEW_TYPES_OUTPUT_EXPECTED)
      verify_query = NEW_TYPES_OUTPUT_VERIFY_QUERY % self.output_table
      pipeline_verifiers = [
          PipelineStateMatcher(),
          BigqueryMatcher(
              project=self.project,
              query=verify_query,
              checksum=expected_checksum)
      ]
      self._setup_new_types_env()
      extra_opts = {
          'query': NEW_TYPES_QUERY % (self.dataset_id, NEW_TYPES_INPUT_TABLE),
          'output': self.output_table,
          'output_schema': NEW_TYPES_OUTPUT_SCHEMA,
          'use_standard_sql': False,
          'wait_until_finish_duration': WAIT_UNTIL_FINISH_DURATION_MS,
          'on_success_matcher': all_of(*pipeline_verifiers),
      }
      options = self.test_pipeline.get_full_options_as_args(**extra_opts)
>     big_query_query_to_table_pipeline.run_bq_pipeline(options)

apache_beam/io/gcp/big_query_query_to_table_it_test.py:251: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/io/gcp/big_query_query_to_table_pipeline.py:103: in run_bq_pipeline
    result = p.run()
apache_beam/testing/test_pipeline.py:115: in run
    result = super().run(
apache_beam/pipeline.py:594: in run
    self._options).run(False)
apache_beam/pipeline.py:618: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/dataflow/test_dataflow_runner.py:53: in run_pipeline
    self.result = super().run_pipeline(pipeline, options)
apache_beam/runners/dataflow/dataflow_runner.py:502: in run_pipeline
    self.dataflow_client.create_job(self.job), self)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:759: in create_job
    return self.submit_job_description(job)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:865: in submit_job_description
    response = self._client.projects_locations_jobs.Create(request)
apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py:718: in Create
    return self._RunMethod(config, request, global_params=global_params)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:731: in _RunMethod
    return self.ProcessHttpResponse(method_config, http_response, request)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:737: in ProcessHttpResponse
    self.__ProcessHttpResponse(method_config, http_response, request))
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <apache_beam.runners.dataflow.internal.clients.dataflow.dataflow_v1b3_client.DataflowV1b3.ProjectsLocationsJobsService object at 0x7a350beca750>
method_config = <ApiMethodInfo
 relative_path: 'v1b3/projects/{projectId}/locations/{location}/jobs'
 method_id: 'dataflow.projects.lo...DataflowProjectsLocationsJobsCreateRequest'
 response_type_name: 'Job'
 request_field: 'job'
 supports_download: False>
http_response = Response(info={'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 0...', request_url='https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json')
request = <DataflowProjectsLocationsJobsCreateRequest
 job: <Job
 clientRequestId: '20250205175758246755-4304'
 environment: <En...empFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
 location: 'us-central1'
 projectId: 'apache-beam-testing'>

    def __ProcessHttpResponse(self, method_config, http_response, request):
        """Process the given http response."""
        if http_response.status_code not in (http_client.OK,
                                             http_client.CREATED,
                                             http_client.NO_CONTENT):
>           raise exceptions.HttpError.FromResponse(
                http_response, method_config=method_config, request=request)
E           apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 17:57:59 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
E             "error": {
E               "code": 400,
E               "message": "(212fe8977b396322): The workflow could not be created. Causes: (c210b29523a730fd): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
E               "status": "FAILED_PRECONDITION"
E             }
E           }
E           >

../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:603: HttpBadRequestError

Check warning on line 0 in apache_beam.io.gcp.bigquery_read_it_test.ReadUsingStorageApiTests

See this annotation in the file changed.

@github-actions github-actions / Test Results

1 out of 3 runs failed: test_iobase_source_with_column_selection_and_row_restriction (apache_beam.io.gcp.bigquery_read_it_test.ReadUsingStorageApiTests)

sdks/python/pytest_postCommitIT-df-py311.xml [took 3s]
Raw output
apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 17:57:14 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
  "error": {
    "code": 400,
    "message": "(b2eba20cd5d71c2c): The workflow could not be created. Causes: (e19eb7d8f969ba18): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 301 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
    "status": "FAILED_PRECONDITION"
  }
}
>
self = <apache_beam.io.gcp.bigquery_read_it_test.ReadUsingStorageApiTests testMethod=test_iobase_source_with_column_selection_and_row_restriction>

    @pytest.mark.it_postcommit
    def test_iobase_source_with_column_selection_and_row_restriction(self):
      EXPECTED_TABLE_DATA = [{'string': 'привет'}]
>     with beam.Pipeline(argv=self.args) as p:

apache_beam/io/gcp/bigquery_read_it_test.py:519: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/pipeline.py:644: in __exit__
    self.result = self.run()
apache_beam/pipeline.py:594: in run
    self._options).run(False)
apache_beam/pipeline.py:618: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/dataflow/test_dataflow_runner.py:53: in run_pipeline
    self.result = super().run_pipeline(pipeline, options)
apache_beam/runners/dataflow/dataflow_runner.py:502: in run_pipeline
    self.dataflow_client.create_job(self.job), self)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:759: in create_job
    return self.submit_job_description(job)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:865: in submit_job_description
    response = self._client.projects_locations_jobs.Create(request)
apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py:718: in Create
    return self._RunMethod(config, request, global_params=global_params)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:731: in _RunMethod
    return self.ProcessHttpResponse(method_config, http_response, request)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:737: in ProcessHttpResponse
    self.__ProcessHttpResponse(method_config, http_response, request))
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <apache_beam.runners.dataflow.internal.clients.dataflow.dataflow_v1b3_client.DataflowV1b3.ProjectsLocationsJobsService object at 0x7d7f92f5c890>
method_config = <ApiMethodInfo
 relative_path: 'v1b3/projects/{projectId}/locations/{location}/jobs'
 method_id: 'dataflow.projects.lo...DataflowProjectsLocationsJobsCreateRequest'
 response_type_name: 'Job'
 request_field: 'job'
 supports_download: False>
http_response = Response(info={'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 0...', request_url='https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json')
request = <DataflowProjectsLocationsJobsCreateRequest
 job: <Job
 clientRequestId: '20250205175712768267-3086'
 environment: <En...empFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
 location: 'us-central1'
 projectId: 'apache-beam-testing'>

    def __ProcessHttpResponse(self, method_config, http_response, request):
        """Process the given http response."""
        if http_response.status_code not in (http_client.OK,
                                             http_client.CREATED,
                                             http_client.NO_CONTENT):
>           raise exceptions.HttpError.FromResponse(
                http_response, method_config=method_config, request=request)
E           apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 17:57:14 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
E             "error": {
E               "code": 400,
E               "message": "(b2eba20cd5d71c2c): The workflow could not be created. Causes: (e19eb7d8f969ba18): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 301 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
E               "status": "FAILED_PRECONDITION"
E             }
E           }
E           >

../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:603: HttpBadRequestError

Check warning on line 0 in apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests

See this annotation in the file changed.

@github-actions github-actions / Test Results

1 out of 2 runs failed: test_big_query_write_insert_non_transient_api_call_error (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests)

sdks/python/pytest_postCommitIT-df-py311.xml [took 3s]
Raw output
apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 17:57:07 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
  "error": {
    "code": 400,
    "message": "(e3f1e96c311fd0a5): The workflow could not be created. Causes: (170a69b4e9b8101a): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
    "status": "FAILED_PRECONDITION"
  }
}
>
self = <apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests testMethod=test_big_query_write_insert_non_transient_api_call_error>

    @pytest.mark.it_postcommit
    def test_big_query_write_insert_non_transient_api_call_error(self):
      """
      Test that non-transient GoogleAPICallError errors returned
      by beam.io.WriteToBigQuery are not retried and result in
      FAILED_ROWS containing both the failed rows and the reason
      for failure.
      """
      table_name = 'this_table_does_not_exist'
      table_id = '{}.{}'.format(self.dataset_id, table_name)
    
      input_data = [{
          'number': 1,
          'str': 'some_string',
      }]
    
      table_schema = {
          "fields": [{
              "name": "number", "type": "INTEGER", 'mode': 'NULLABLE'
          }, {
              "name": "str", "type": "STRING", 'mode': 'NULLABLE'
          }]
      }
    
      bq_result_errors = [({
          'number': 1,
          'str': 'some_string',
      }, "Not Found")]
    
      args = self.test_pipeline.get_full_options_as_args()
    
>     with beam.Pipeline(argv=args) as p:

apache_beam/io/gcp/bigquery_write_it_test.py:490: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/pipeline.py:644: in __exit__
    self.result = self.run()
apache_beam/pipeline.py:594: in run
    self._options).run(False)
apache_beam/pipeline.py:618: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/dataflow/test_dataflow_runner.py:53: in run_pipeline
    self.result = super().run_pipeline(pipeline, options)
apache_beam/runners/dataflow/dataflow_runner.py:502: in run_pipeline
    self.dataflow_client.create_job(self.job), self)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:759: in create_job
    return self.submit_job_description(job)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:865: in submit_job_description
    response = self._client.projects_locations_jobs.Create(request)
apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py:718: in Create
    return self._RunMethod(config, request, global_params=global_params)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:731: in _RunMethod
    return self.ProcessHttpResponse(method_config, http_response, request)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:737: in ProcessHttpResponse
    self.__ProcessHttpResponse(method_config, http_response, request))
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <apache_beam.runners.dataflow.internal.clients.dataflow.dataflow_v1b3_client.DataflowV1b3.ProjectsLocationsJobsService object at 0x7e0c91904790>
method_config = <ApiMethodInfo
 relative_path: 'v1b3/projects/{projectId}/locations/{location}/jobs'
 method_id: 'dataflow.projects.lo...DataflowProjectsLocationsJobsCreateRequest'
 response_type_name: 'Job'
 request_field: 'job'
 supports_download: False>
http_response = Response(info={'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 0...', request_url='https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json')
request = <DataflowProjectsLocationsJobsCreateRequest
 job: <Job
 clientRequestId: '20250205175705089712-1452'
 environment: <En...empFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
 location: 'us-central1'
 projectId: 'apache-beam-testing'>

    def __ProcessHttpResponse(self, method_config, http_response, request):
        """Process the given http response."""
        if http_response.status_code not in (http_client.OK,
                                             http_client.CREATED,
                                             http_client.NO_CONTENT):
>           raise exceptions.HttpError.FromResponse(
                http_response, method_config=method_config, request=request)
E           apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 17:57:07 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
E             "error": {
E               "code": 400,
E               "message": "(e3f1e96c311fd0a5): The workflow could not be created. Causes: (170a69b4e9b8101a): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
E               "status": "FAILED_PRECONDITION"
E             }
E           }
E           >

../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:603: HttpBadRequestError

Check warning on line 0 in apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests

See this annotation in the file changed.

@github-actions github-actions / Test Results

1 out of 2 runs failed: test_big_query_write_without_schema (apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests)

sdks/python/pytest_postCommitIT-df-py311.xml [took 5s]
Raw output
apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 18:03:14 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
  "error": {
    "code": 400,
    "message": "(9f86bc8ff4584f4f): The workflow could not be created. Causes: (3eccde6a600a9cfe): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
    "status": "FAILED_PRECONDITION"
  }
}
>
self = <apache_beam.io.gcp.bigquery_write_it_test.BigQueryWriteIntegrationTests testMethod=test_big_query_write_without_schema>

    @pytest.mark.it_postcommit
    def test_big_query_write_without_schema(self):
      table_name = 'python_no_schema_table'
      self.create_table(table_name)
      table_id = '{}.{}'.format(self.dataset_id, table_name)
    
      input_data = [{
          'int64': 1,
          'bytes': b'xyw',
          'date': '2011-01-01',
          'time': '23:59:59.999999'
      },
                    {
                        'int64': 2,
                        'bytes': b'abc',
                        'date': '2000-01-01',
                        'time': '00:00:00'
                    },
                    {
                        'int64': 3,
                        'bytes': b'\xe4\xbd\xa0\xe5\xa5\xbd',
                        'date': '3000-12-31',
                        'time': '23:59:59'
                    },
                    {
                        'int64': 4,
                        'bytes': b'\xab\xac\xad',
                        'date': '2000-01-01',
                        'time': '00:00:00'
                    }]
      # bigquery io expects bytes to be base64 encoded values
      for row in input_data:
        row['bytes'] = base64.b64encode(row['bytes'])
    
      pipeline_verifiers = [
          BigqueryFullResultMatcher(
              project=self.project,
              query="SELECT int64, bytes, date, time FROM %s" % table_id,
              data=[(
                  1,
                  b'xyw',
                  datetime.date(2011, 1, 1),
                  datetime.time(23, 59, 59, 999999),
              ),
                    (
                        2,
                        b'abc',
                        datetime.date(2000, 1, 1),
                        datetime.time(0, 0, 0),
                    ),
                    (
                        3,
                        b'\xe4\xbd\xa0\xe5\xa5\xbd',
                        datetime.date(3000, 12, 31),
                        datetime.time(23, 59, 59),
                    ),
                    (
                        4,
                        b'\xab\xac\xad',
                        datetime.date(2000, 1, 1),
                        datetime.time(0, 0, 0),
                    )])
      ]
    
      args = self.test_pipeline.get_full_options_as_args(
          on_success_matcher=hc.all_of(*pipeline_verifiers))
    
>     with beam.Pipeline(argv=args) as p:

apache_beam/io/gcp/bigquery_write_it_test.py:371: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/pipeline.py:644: in __exit__
    self.result = self.run()
apache_beam/pipeline.py:594: in run
    self._options).run(False)
apache_beam/pipeline.py:618: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/dataflow/test_dataflow_runner.py:53: in run_pipeline
    self.result = super().run_pipeline(pipeline, options)
apache_beam/runners/dataflow/dataflow_runner.py:502: in run_pipeline
    self.dataflow_client.create_job(self.job), self)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:759: in create_job
    return self.submit_job_description(job)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:865: in submit_job_description
    response = self._client.projects_locations_jobs.Create(request)
apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py:718: in Create
    return self._RunMethod(config, request, global_params=global_params)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:731: in _RunMethod
    return self.ProcessHttpResponse(method_config, http_response, request)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:737: in ProcessHttpResponse
    self.__ProcessHttpResponse(method_config, http_response, request))
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <apache_beam.runners.dataflow.internal.clients.dataflow.dataflow_v1b3_client.DataflowV1b3.ProjectsLocationsJobsService object at 0x7e0c913ab250>
method_config = <ApiMethodInfo
 relative_path: 'v1b3/projects/{projectId}/locations/{location}/jobs'
 method_id: 'dataflow.projects.lo...DataflowProjectsLocationsJobsCreateRequest'
 response_type_name: 'Job'
 request_field: 'job'
 supports_download: False>
http_response = Response(info={'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 0...', request_url='https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json')
request = <DataflowProjectsLocationsJobsCreateRequest
 job: <Job
 clientRequestId: '20250205180311794975-5922'
 environment: <En...empFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
 location: 'us-central1'
 projectId: 'apache-beam-testing'>

    def __ProcessHttpResponse(self, method_config, http_response, request):
        """Process the given http response."""
        if http_response.status_code not in (http_client.OK,
                                             http_client.CREATED,
                                             http_client.NO_CONTENT):
>           raise exceptions.HttpError.FromResponse(
                http_response, method_config=method_config, request=request)
E           apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 18:03:14 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
E             "error": {
E               "code": 400,
E               "message": "(9f86bc8ff4584f4f): The workflow could not be created. Causes: (3eccde6a600a9cfe): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
E               "status": "FAILED_PRECONDITION"
E             }
E           }
E           >

../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:603: HttpBadRequestError

Check warning on line 0 in apache_beam.io.gcp.datastore.v1new.datastore_write_it_test.DatastoreWriteIT

See this annotation in the file changed.

@github-actions github-actions / Test Results

1 out of 2 runs failed: test_datastore_write_limit (apache_beam.io.gcp.datastore.v1new.datastore_write_it_test.DatastoreWriteIT)

sdks/python/pytest_postCommitIT-df-py311.xml [took 2s]
Raw output
apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 18:00:05 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
  "error": {
    "code": 400,
    "message": "(9b2f2da08c8a9e17): The workflow could not be created. Causes: (bed09ee40ce25e33): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
    "status": "FAILED_PRECONDITION"
  }
}
>
self = <apache_beam.io.gcp.datastore.v1new.datastore_write_it_test.DatastoreWriteIT testMethod=test_datastore_write_limit>

    @pytest.mark.it_postcommit
    @unittest.skipIf(
        datastore_write_it_pipeline is None, 'GCP dependencies are not installed')
    def test_datastore_write_limit(self):
>     self.run_datastore_write(limit=self.LIMIT)

apache_beam/io/gcp/datastore/v1new/datastore_write_it_test.py:73: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/io/gcp/datastore/v1new/datastore_write_it_test.py:66: in run_datastore_write
    datastore_write_it_pipeline.run(
apache_beam/io/gcp/datastore/v1new/datastore_write_it_pipeline.py:120: in run
    p.run()
apache_beam/testing/test_pipeline.py:115: in run
    result = super().run(
apache_beam/pipeline.py:594: in run
    self._options).run(False)
apache_beam/pipeline.py:618: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/dataflow/test_dataflow_runner.py:53: in run_pipeline
    self.result = super().run_pipeline(pipeline, options)
apache_beam/runners/dataflow/dataflow_runner.py:502: in run_pipeline
    self.dataflow_client.create_job(self.job), self)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:759: in create_job
    return self.submit_job_description(job)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:865: in submit_job_description
    response = self._client.projects_locations_jobs.Create(request)
apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py:718: in Create
    return self._RunMethod(config, request, global_params=global_params)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:731: in _RunMethod
    return self.ProcessHttpResponse(method_config, http_response, request)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:737: in ProcessHttpResponse
    self.__ProcessHttpResponse(method_config, http_response, request))
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <apache_beam.runners.dataflow.internal.clients.dataflow.dataflow_v1b3_client.DataflowV1b3.ProjectsLocationsJobsService object at 0x7c0be8ace010>
method_config = <ApiMethodInfo
 relative_path: 'v1b3/projects/{projectId}/locations/{location}/jobs'
 method_id: 'dataflow.projects.lo...DataflowProjectsLocationsJobsCreateRequest'
 response_type_name: 'Job'
 request_field: 'job'
 supports_download: False>
http_response = Response(info={'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 0...', request_url='https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json')
request = <DataflowProjectsLocationsJobsCreateRequest
 job: <Job
 clientRequestId: '20250205180004445224-8439'
 environment: <En...empFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
 location: 'us-central1'
 projectId: 'apache-beam-testing'>

    def __ProcessHttpResponse(self, method_config, http_response, request):
        """Process the given http response."""
        if http_response.status_code not in (http_client.OK,
                                             http_client.CREATED,
                                             http_client.NO_CONTENT):
>           raise exceptions.HttpError.FromResponse(
                http_response, method_config=method_config, request=request)
E           apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 18:00:05 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
E             "error": {
E               "code": 400,
E               "message": "(9b2f2da08c8a9e17): The workflow could not be created. Causes: (bed09ee40ce25e33): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
E               "status": "FAILED_PRECONDITION"
E             }
E           }
E           >

../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:603: HttpBadRequestError

Check warning on line 0 in apache_beam.ml.gcp.recommendations_ai_test_it.RecommendationAIIT

See this annotation in the file changed.

@github-actions github-actions / Test Results

test_create_user_event (apache_beam.ml.gcp.recommendations_ai_test_it.RecommendationAIIT) failed

sdks/python/pytest_postCommitIT-df-py311.xml [took 2s]
Raw output
apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 17:52:04 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
  "error": {
    "code": 400,
    "message": "(4a3ee9f2780f0878): The workflow could not be created. Causes: (447faaf8b1b030c0): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 301 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
    "status": "FAILED_PRECONDITION"
  }
}
>
self = <apache_beam.ml.gcp.recommendations_ai_test_it.RecommendationAIIT testMethod=test_create_user_event>

    def test_create_user_event(self):
      USER_EVENT = {"event_type": "page-visit", "user_info": {"visitor_id": "1"}}
    
>     with TestPipeline(is_integration_test=True) as p:

apache_beam/ml/gcp/recommendations_ai_test_it.py:90: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/pipeline.py:644: in __exit__
    self.result = self.run()
apache_beam/testing/test_pipeline.py:115: in run
    result = super().run(
apache_beam/pipeline.py:594: in run
    self._options).run(False)
apache_beam/pipeline.py:618: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/dataflow/test_dataflow_runner.py:53: in run_pipeline
    self.result = super().run_pipeline(pipeline, options)
apache_beam/runners/dataflow/dataflow_runner.py:502: in run_pipeline
    self.dataflow_client.create_job(self.job), self)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:759: in create_job
    return self.submit_job_description(job)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:865: in submit_job_description
    response = self._client.projects_locations_jobs.Create(request)
apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py:718: in Create
    return self._RunMethod(config, request, global_params=global_params)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:731: in _RunMethod
    return self.ProcessHttpResponse(method_config, http_response, request)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:737: in ProcessHttpResponse
    self.__ProcessHttpResponse(method_config, http_response, request))
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <apache_beam.runners.dataflow.internal.clients.dataflow.dataflow_v1b3_client.DataflowV1b3.ProjectsLocationsJobsService object at 0x7ca1d2d4ed10>
method_config = <ApiMethodInfo
 relative_path: 'v1b3/projects/{projectId}/locations/{location}/jobs'
 method_id: 'dataflow.projects.lo...DataflowProjectsLocationsJobsCreateRequest'
 response_type_name: 'Job'
 request_field: 'job'
 supports_download: False>
http_response = Response(info={'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 0...', request_url='https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json')
request = <DataflowProjectsLocationsJobsCreateRequest
 job: <Job
 clientRequestId: '20250205175203092242-1815'
 environment: <En...empFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
 location: 'us-central1'
 projectId: 'apache-beam-testing'>

    def __ProcessHttpResponse(self, method_config, http_response, request):
        """Process the given http response."""
        if http_response.status_code not in (http_client.OK,
                                             http_client.CREATED,
                                             http_client.NO_CONTENT):
>           raise exceptions.HttpError.FromResponse(
                http_response, method_config=method_config, request=request)
E           apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 17:52:04 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
E             "error": {
E               "code": 400,
E               "message": "(4a3ee9f2780f0878): The workflow could not be created. Causes: (447faaf8b1b030c0): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 301 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
E               "status": "FAILED_PRECONDITION"
E             }
E           }
E           >

../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:603: HttpBadRequestError

Check warning on line 0 in apache_beam.ml.gcp.recommendations_ai_test_it.RecommendationAIIT

See this annotation in the file changed.

@github-actions github-actions / Test Results

test_predict (apache_beam.ml.gcp.recommendations_ai_test_it.RecommendationAIIT) failed

sdks/python/pytest_postCommitIT-df-py311.xml [took 2s]
Raw output
apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 17:52:06 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
  "error": {
    "code": 400,
    "message": "(f41400623b3c4628): The workflow could not be created. Causes: (4fcbc9e8cc01dfa4): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 301 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
    "status": "FAILED_PRECONDITION"
  }
}
>
self = <apache_beam.ml.gcp.recommendations_ai_test_it.RecommendationAIIT testMethod=test_predict>

    def test_predict(self):
      USER_EVENT = {"event_type": "page-visit", "user_info": {"visitor_id": "1"}}
    
>     with TestPipeline(is_integration_test=True) as p:

apache_beam/ml/gcp/recommendations_ai_test_it.py:102: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/pipeline.py:644: in __exit__
    self.result = self.run()
apache_beam/testing/test_pipeline.py:115: in run
    result = super().run(
apache_beam/pipeline.py:594: in run
    self._options).run(False)
apache_beam/pipeline.py:618: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/dataflow/test_dataflow_runner.py:53: in run_pipeline
    self.result = super().run_pipeline(pipeline, options)
apache_beam/runners/dataflow/dataflow_runner.py:502: in run_pipeline
    self.dataflow_client.create_job(self.job), self)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:759: in create_job
    return self.submit_job_description(job)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:865: in submit_job_description
    response = self._client.projects_locations_jobs.Create(request)
apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py:718: in Create
    return self._RunMethod(config, request, global_params=global_params)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:731: in _RunMethod
    return self.ProcessHttpResponse(method_config, http_response, request)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:737: in ProcessHttpResponse
    self.__ProcessHttpResponse(method_config, http_response, request))
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <apache_beam.runners.dataflow.internal.clients.dataflow.dataflow_v1b3_client.DataflowV1b3.ProjectsLocationsJobsService object at 0x7ca1d2076d10>
method_config = <ApiMethodInfo
 relative_path: 'v1b3/projects/{projectId}/locations/{location}/jobs'
 method_id: 'dataflow.projects.lo...DataflowProjectsLocationsJobsCreateRequest'
 response_type_name: 'Job'
 request_field: 'job'
 supports_download: False>
http_response = Response(info={'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 0...', request_url='https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json')
request = <DataflowProjectsLocationsJobsCreateRequest
 job: <Job
 clientRequestId: '20250205175205446429-9271'
 environment: <En...empFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
 location: 'us-central1'
 projectId: 'apache-beam-testing'>

    def __ProcessHttpResponse(self, method_config, http_response, request):
        """Process the given http response."""
        if http_response.status_code not in (http_client.OK,
                                             http_client.CREATED,
                                             http_client.NO_CONTENT):
>           raise exceptions.HttpError.FromResponse(
                http_response, method_config=method_config, request=request)
E           apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 17:52:06 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
E             "error": {
E               "code": 400,
E               "message": "(f41400623b3c4628): The workflow could not be created. Causes: (4fcbc9e8cc01dfa4): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 301 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
E               "status": "FAILED_PRECONDITION"
E             }
E           }
E           >

../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:603: HttpBadRequestError

Check warning on line 0 in apache_beam.ml.inference.sklearn_inference_it_test.SklearnInference

See this annotation in the file changed.

@github-actions github-actions / Test Results

test_sklearn_mnist_classification (apache_beam.ml.inference.sklearn_inference_it_test.SklearnInference) failed

sdks/python/pytest_postCommitIT-df-py311.xml [took 3s]
Raw output
apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 17:52:10 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
  "error": {
    "code": 400,
    "message": "(bf7e5c623fcfe86f): The workflow could not be created. Causes: (f8a8ad2cb30adf21): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
    "status": "FAILED_PRECONDITION"
  }
}
>
self = <apache_beam.ml.inference.sklearn_inference_it_test.SklearnInference testMethod=test_sklearn_mnist_classification>

    @unittest.skipIf((3, 9, 0) <= sys.version_info < (3, 11, 0), "Beam#33796")
    def test_sklearn_mnist_classification(self):
      test_pipeline = TestPipeline(is_integration_test=True)
      input_file = 'gs://apache-beam-ml/testing/inputs/it_mnist_data.csv'
      output_file_dir = 'gs://temp-storage-for-end-to-end-tests'
      output_file = '/'.join([output_file_dir, str(uuid.uuid4()), 'result.txt'])
      model_path = 'gs://apache-beam-ml/models/mnist_model_svm.pickle'
      extra_opts = {
          'input': input_file,
          'output': output_file,
          'model_path': model_path,
      }
>     sklearn_mnist_classification.run(
          test_pipeline.get_full_options_as_args(**extra_opts),
          save_main_session=False)

apache_beam/ml/inference/sklearn_inference_it_test.py:71: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/examples/inference/sklearn_mnist_classification.py:131: in run
    result = pipeline.run()
apache_beam/pipeline.py:594: in run
    self._options).run(False)
apache_beam/pipeline.py:618: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/dataflow/test_dataflow_runner.py:53: in run_pipeline
    self.result = super().run_pipeline(pipeline, options)
apache_beam/runners/dataflow/dataflow_runner.py:502: in run_pipeline
    self.dataflow_client.create_job(self.job), self)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:759: in create_job
    return self.submit_job_description(job)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:865: in submit_job_description
    response = self._client.projects_locations_jobs.Create(request)
apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py:718: in Create
    return self._RunMethod(config, request, global_params=global_params)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:731: in _RunMethod
    return self.ProcessHttpResponse(method_config, http_response, request)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:737: in ProcessHttpResponse
    self.__ProcessHttpResponse(method_config, http_response, request))
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <apache_beam.runners.dataflow.internal.clients.dataflow.dataflow_v1b3_client.DataflowV1b3.ProjectsLocationsJobsService object at 0x7ca1d23cb7d0>
method_config = <ApiMethodInfo
 relative_path: 'v1b3/projects/{projectId}/locations/{location}/jobs'
 method_id: 'dataflow.projects.lo...DataflowProjectsLocationsJobsCreateRequest'
 response_type_name: 'Job'
 request_field: 'job'
 supports_download: False>
http_response = Response(info={'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 0...', request_url='https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json')
request = <DataflowProjectsLocationsJobsCreateRequest
 job: <Job
 clientRequestId: '20250205175208914140-9625'
 environment: <En...empFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
 location: 'us-central1'
 projectId: 'apache-beam-testing'>

    def __ProcessHttpResponse(self, method_config, http_response, request):
        """Process the given http response."""
        if http_response.status_code not in (http_client.OK,
                                             http_client.CREATED,
                                             http_client.NO_CONTENT):
>           raise exceptions.HttpError.FromResponse(
                http_response, method_config=method_config, request=request)
E           apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 17:52:10 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
E             "error": {
E               "code": 400,
E               "message": "(bf7e5c623fcfe86f): The workflow could not be created. Causes: (f8a8ad2cb30adf21): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
E               "status": "FAILED_PRECONDITION"
E             }
E           }
E           >

../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:603: HttpBadRequestError

Check warning on line 0 in apache_beam.ml.inference.sklearn_inference_it_test.SklearnInference

See this annotation in the file changed.

@github-actions github-actions / Test Results

test_sklearn_mnist_classification_large_model (apache_beam.ml.inference.sklearn_inference_it_test.SklearnInference) failed

sdks/python/pytest_postCommitIT-df-py311.xml [took 3s]
Raw output
apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 17:52:14 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
  "error": {
    "code": 400,
    "message": "(f7cdb80ece1ae35b): The workflow could not be created. Causes: (a5ab73b25ccebf8a): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
    "status": "FAILED_PRECONDITION"
  }
}
>
self = <apache_beam.ml.inference.sklearn_inference_it_test.SklearnInference testMethod=test_sklearn_mnist_classification_large_model>

    @unittest.skipIf((3, 9, 0) <= sys.version_info < (3, 11, 0), "Beam#33796")
    def test_sklearn_mnist_classification_large_model(self):
      test_pipeline = TestPipeline(is_integration_test=True)
      input_file = 'gs://apache-beam-ml/testing/inputs/it_mnist_data.csv'
      output_file_dir = 'gs://temp-storage-for-end-to-end-tests'
      output_file = '/'.join([output_file_dir, str(uuid.uuid4()), 'result.txt'])
      model_path = 'gs://apache-beam-ml/models/mnist_model_svm.pickle'
      extra_opts = {
          'input': input_file,
          'output': output_file,
          'model_path': model_path,
          'large_model': True
      }
>     sklearn_mnist_classification.run(
          test_pipeline.get_full_options_as_args(**extra_opts),
          save_main_session=False)

apache_beam/ml/inference/sklearn_inference_it_test.py:105: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/examples/inference/sklearn_mnist_classification.py:131: in run
    result = pipeline.run()
apache_beam/pipeline.py:594: in run
    self._options).run(False)
apache_beam/pipeline.py:618: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/dataflow/test_dataflow_runner.py:53: in run_pipeline
    self.result = super().run_pipeline(pipeline, options)
apache_beam/runners/dataflow/dataflow_runner.py:502: in run_pipeline
    self.dataflow_client.create_job(self.job), self)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:759: in create_job
    return self.submit_job_description(job)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:865: in submit_job_description
    response = self._client.projects_locations_jobs.Create(request)
apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py:718: in Create
    return self._RunMethod(config, request, global_params=global_params)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:731: in _RunMethod
    return self.ProcessHttpResponse(method_config, http_response, request)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:737: in ProcessHttpResponse
    self.__ProcessHttpResponse(method_config, http_response, request))
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <apache_beam.runners.dataflow.internal.clients.dataflow.dataflow_v1b3_client.DataflowV1b3.ProjectsLocationsJobsService object at 0x7ca1d22e2d90>
method_config = <ApiMethodInfo
 relative_path: 'v1b3/projects/{projectId}/locations/{location}/jobs'
 method_id: 'dataflow.projects.lo...DataflowProjectsLocationsJobsCreateRequest'
 response_type_name: 'Job'
 request_field: 'job'
 supports_download: False>
http_response = Response(info={'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 0...', request_url='https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json')
request = <DataflowProjectsLocationsJobsCreateRequest
 job: <Job
 clientRequestId: '20250205175212503479-6638'
 environment: <En...empFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
 location: 'us-central1'
 projectId: 'apache-beam-testing'>

    def __ProcessHttpResponse(self, method_config, http_response, request):
        """Process the given http response."""
        if http_response.status_code not in (http_client.OK,
                                             http_client.CREATED,
                                             http_client.NO_CONTENT):
>           raise exceptions.HttpError.FromResponse(
                http_response, method_config=method_config, request=request)
E           apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 17:52:14 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
E             "error": {
E               "code": 400,
E               "message": "(f7cdb80ece1ae35b): The workflow could not be created. Causes: (a5ab73b25ccebf8a): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
E               "status": "FAILED_PRECONDITION"
E             }
E           }
E           >

../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:603: HttpBadRequestError

Check warning on line 0 in apache_beam.examples.wordcount_it_test.WordCountIT

See this annotation in the file changed.

@github-actions github-actions / Test Results

test_wordcount_impersonation_it (apache_beam.examples.wordcount_it_test.WordCountIT) failed

sdks/python/pytest_postCommitIT-df-py311.xml [took 2s]
Raw output
apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 17:52:16 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
  "error": {
    "code": 400,
    "message": "(93107cf463b39367): The workflow could not be created. Causes: (7c5825c609ea3d6d): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 301 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
    "status": "FAILED_PRECONDITION"
  }
}
>
self = <apache_beam.examples.wordcount_it_test.WordCountIT testMethod=test_wordcount_impersonation_it>

    @pytest.mark.it_postcommit
    @pytest.mark.sickbay_direct
    @pytest.mark.sickbay_spark
    @pytest.mark.sickbay_flink
    def test_wordcount_impersonation_it(self):
      """Tests impersonation on dataflow.
    
      For testing impersonation, we use three ingredients:
      - a principal to impersonate
      - a dataflow service account that only that principal is
        allowed to launch jobs as
      - a temp root that only the above two accounts have access to
    
      Jenkins and Dataflow workers both run as GCE default service account.
      So we remove that account from all the above.
      """
      # Credentials need to be reset or this test will fail and credentials
      # from a previous test will be used.
      with auth._Credentials._credentials_lock:
        auth._Credentials._credentials_init = False
      try:
        ACCOUNT_TO_IMPERSONATE = (
            'allows-impersonation@apache-'
            'beam-testing.iam.gserviceaccount.com')
        RUNNER_ACCOUNT = (
            'impersonation-dataflow-worker@'
            'apache-beam-testing.iam.gserviceaccount.com')
        TEMP_DIR = 'gs://impersonation-test-bucket/temp-it'
        STAGING_LOCATION = 'gs://impersonation-test-bucket/staging-it'
        extra_options = {
            'impersonate_service_account': ACCOUNT_TO_IMPERSONATE,
            'service_account_email': RUNNER_ACCOUNT,
            'temp_location': TEMP_DIR,
            'staging_location': STAGING_LOCATION
        }
>       self._run_wordcount_it(wordcount.run, **extra_options)

apache_beam/examples/wordcount_it_test.py:87: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/examples/wordcount_it_test.py:150: in _run_wordcount_it
    run_wordcount(
apache_beam/examples/wordcount.py:109: in run
    result = pipeline.run()
apache_beam/pipeline.py:594: in run
    self._options).run(False)
apache_beam/pipeline.py:618: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/dataflow/test_dataflow_runner.py:53: in run_pipeline
    self.result = super().run_pipeline(pipeline, options)
apache_beam/runners/dataflow/dataflow_runner.py:502: in run_pipeline
    self.dataflow_client.create_job(self.job), self)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:759: in create_job
    return self.submit_job_description(job)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:865: in submit_job_description
    response = self._client.projects_locations_jobs.Create(request)
apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py:718: in Create
    return self._RunMethod(config, request, global_params=global_params)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:731: in _RunMethod
    return self.ProcessHttpResponse(method_config, http_response, request)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:737: in ProcessHttpResponse
    self.__ProcessHttpResponse(method_config, http_response, request))
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <apache_beam.runners.dataflow.internal.clients.dataflow.dataflow_v1b3_client.DataflowV1b3.ProjectsLocationsJobsService object at 0x7ca1d1dbfd10>
method_config = <ApiMethodInfo
 relative_path: 'v1b3/projects/{projectId}/locations/{location}/jobs'
 method_id: 'dataflow.projects.lo...DataflowProjectsLocationsJobsCreateRequest'
 response_type_name: 'Job'
 request_field: 'job'
 supports_download: False>
http_response = Response(info={'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 0...', request_url='https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json')
request = <DataflowProjectsLocationsJobsCreateRequest
 job: <Job
 clientRequestId: '20250205175215623785-8580'
 environment: <En...empFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
 location: 'us-central1'
 projectId: 'apache-beam-testing'>

    def __ProcessHttpResponse(self, method_config, http_response, request):
        """Process the given http response."""
        if http_response.status_code not in (http_client.OK,
                                             http_client.CREATED,
                                             http_client.NO_CONTENT):
>           raise exceptions.HttpError.FromResponse(
                http_response, method_config=method_config, request=request)
E           apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 17:52:16 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
E             "error": {
E               "code": 400,
E               "message": "(93107cf463b39367): The workflow could not be created. Causes: (7c5825c609ea3d6d): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 301 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
E               "status": "FAILED_PRECONDITION"
E             }
E           }
E           >

../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:603: HttpBadRequestError

Check warning on line 0 in apache_beam.examples.dataframe.taxiride_it_test.TaxirideIT

See this annotation in the file changed.

@github-actions github-actions / Test Results

test_aggregation (apache_beam.examples.dataframe.taxiride_it_test.TaxirideIT) failed

sdks/python/pytest_postCommitIT-df-py311.xml [took 2s]
Raw output
apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 17:52:22 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '491', '-content-encoding': 'gzip'}>, content <{
  "error": {
    "code": 400,
    "message": "(d26c8d0e01dac56): The workflow could not be created. Causes: (264acab10cac8696): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
    "status": "FAILED_PRECONDITION"
  }
}
>
self = <apache_beam.examples.dataframe.taxiride_it_test.TaxirideIT testMethod=test_aggregation>

    @pytest.mark.it_postcommit
    def test_aggregation(self):
>     taxiride.run_aggregation_pipeline(
          self.test_pipeline,
          'gs://apache-beam-samples/nyc_taxi/2018/*.csv',
          self.output_path)

apache_beam/examples/dataframe/taxiride_it_test.py:49: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/examples/dataframe/taxiride.py:38: in run_aggregation_pipeline
    with pipeline as p:
apache_beam/pipeline.py:644: in __exit__
    self.result = self.run()
apache_beam/testing/test_pipeline.py:115: in run
    result = super().run(
apache_beam/pipeline.py:594: in run
    self._options).run(False)
apache_beam/pipeline.py:618: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/dataflow/test_dataflow_runner.py:53: in run_pipeline
    self.result = super().run_pipeline(pipeline, options)
apache_beam/runners/dataflow/dataflow_runner.py:502: in run_pipeline
    self.dataflow_client.create_job(self.job), self)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:759: in create_job
    return self.submit_job_description(job)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:865: in submit_job_description
    response = self._client.projects_locations_jobs.Create(request)
apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py:718: in Create
    return self._RunMethod(config, request, global_params=global_params)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:731: in _RunMethod
    return self.ProcessHttpResponse(method_config, http_response, request)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:737: in ProcessHttpResponse
    self.__ProcessHttpResponse(method_config, http_response, request))
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <apache_beam.runners.dataflow.internal.clients.dataflow.dataflow_v1b3_client.DataflowV1b3.ProjectsLocationsJobsService object at 0x7ca1d2d55810>
method_config = <ApiMethodInfo
 relative_path: 'v1b3/projects/{projectId}/locations/{location}/jobs'
 method_id: 'dataflow.projects.lo...DataflowProjectsLocationsJobsCreateRequest'
 response_type_name: 'Job'
 request_field: 'job'
 supports_download: False>
http_response = Response(info={'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 0...', request_url='https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json')
request = <DataflowProjectsLocationsJobsCreateRequest
 job: <Job
 clientRequestId: '20250205175221587088-5722'
 environment: <En...empFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
 location: 'us-central1'
 projectId: 'apache-beam-testing'>

    def __ProcessHttpResponse(self, method_config, http_response, request):
        """Process the given http response."""
        if http_response.status_code not in (http_client.OK,
                                             http_client.CREATED,
                                             http_client.NO_CONTENT):
>           raise exceptions.HttpError.FromResponse(
                http_response, method_config=method_config, request=request)
E           apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 17:52:22 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '491', '-content-encoding': 'gzip'}>, content <{
E             "error": {
E               "code": 400,
E               "message": "(d26c8d0e01dac56): The workflow could not be created. Causes: (264acab10cac8696): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
E               "status": "FAILED_PRECONDITION"
E             }
E           }
E           >

../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:603: HttpBadRequestError

Check warning on line 0 in apache_beam.examples.dataframe.taxiride_it_test.TaxirideIT

See this annotation in the file changed.

@github-actions github-actions / Test Results

test_enrich (apache_beam.examples.dataframe.taxiride_it_test.TaxirideIT) failed

sdks/python/pytest_postCommitIT-df-py311.xml [took 4s]
Raw output
apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 17:52:27 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
  "error": {
    "code": 400,
    "message": "(afcf25a639550ba0): The workflow could not be created. Causes: (be5de0e14aa21de7): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
    "status": "FAILED_PRECONDITION"
  }
}
>
self = <apache_beam.examples.dataframe.taxiride_it_test.TaxirideIT testMethod=test_enrich>

    @pytest.mark.it_postcommit
    def test_enrich(self):
      # Standard workers OOM with the enrich pipeline
      self.test_pipeline.get_pipeline_options().view_as(
          WorkerOptions).machine_type = 'e2-highmem-2'
    
>     taxiride.run_enrich_pipeline(
          self.test_pipeline,
          'gs://apache-beam-samples/nyc_taxi/2018/*.csv',
          self.output_path)

apache_beam/examples/dataframe/taxiride_it_test.py:80: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/examples/dataframe/taxiride.py:53: in run_enrich_pipeline
    with pipeline as p:
apache_beam/pipeline.py:644: in __exit__
    self.result = self.run()
apache_beam/testing/test_pipeline.py:115: in run
    result = super().run(
apache_beam/pipeline.py:594: in run
    self._options).run(False)
apache_beam/pipeline.py:618: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/dataflow/test_dataflow_runner.py:53: in run_pipeline
    self.result = super().run_pipeline(pipeline, options)
apache_beam/runners/dataflow/dataflow_runner.py:502: in run_pipeline
    self.dataflow_client.create_job(self.job), self)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:759: in create_job
    return self.submit_job_description(job)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:865: in submit_job_description
    response = self._client.projects_locations_jobs.Create(request)
apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py:718: in Create
    return self._RunMethod(config, request, global_params=global_params)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:731: in _RunMethod
    return self.ProcessHttpResponse(method_config, http_response, request)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:737: in ProcessHttpResponse
    self.__ProcessHttpResponse(method_config, http_response, request))
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <apache_beam.runners.dataflow.internal.clients.dataflow.dataflow_v1b3_client.DataflowV1b3.ProjectsLocationsJobsService object at 0x7ca1d2270c50>
method_config = <ApiMethodInfo
 relative_path: 'v1b3/projects/{projectId}/locations/{location}/jobs'
 method_id: 'dataflow.projects.lo...DataflowProjectsLocationsJobsCreateRequest'
 response_type_name: 'Job'
 request_field: 'job'
 supports_download: False>
http_response = Response(info={'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 0...', request_url='https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json')
request = <DataflowProjectsLocationsJobsCreateRequest
 job: <Job
 clientRequestId: '20250205175226029902-6397'
 environment: <En...empFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
 location: 'us-central1'
 projectId: 'apache-beam-testing'>

    def __ProcessHttpResponse(self, method_config, http_response, request):
        """Process the given http response."""
        if http_response.status_code not in (http_client.OK,
                                             http_client.CREATED,
                                             http_client.NO_CONTENT):
>           raise exceptions.HttpError.FromResponse(
                http_response, method_config=method_config, request=request)
E           apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 17:52:27 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
E             "error": {
E               "code": 400,
E               "message": "(afcf25a639550ba0): The workflow could not be created. Causes: (be5de0e14aa21de7): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
E               "status": "FAILED_PRECONDITION"
E             }
E           }
E           >

../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:603: HttpBadRequestError

Check warning on line 0 in apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT

See this annotation in the file changed.

@github-actions github-actions / Test Results

test_multiple_destinations_transform (apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT) failed

sdks/python/pytest_postCommitIT-df-py311.xml [took 4s]
Raw output
apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 17:52:35 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
  "error": {
    "code": 400,
    "message": "(9ee9c8e58565bd6d): The workflow could not be created. Causes: (40fe52f372a0b301): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
    "status": "FAILED_PRECONDITION"
  }
}
>
self = <apache_beam.io.gcp.bigquery_file_loads_test.BigQueryFileLoadsIT testMethod=test_multiple_destinations_transform>

    @pytest.mark.it_postcommit
    def test_multiple_destinations_transform(self):
      output_table_1 = '%s%s' % (self.output_table, 1)
      output_table_2 = '%s%s' % (self.output_table, 2)
      output_table_3 = '%s%s' % (self.output_table, 3)
      output_table_4 = '%s%s' % (self.output_table, 4)
      schema1 = bigquery.WriteToBigQuery.get_dict_table_schema(
          bigquery_tools.parse_table_schema_from_json(self.BIG_QUERY_SCHEMA))
      schema2 = bigquery.WriteToBigQuery.get_dict_table_schema(
          bigquery_tools.parse_table_schema_from_json(self.BIG_QUERY_SCHEMA_2))
    
      schema_kv_pairs = [(output_table_1, schema1), (output_table_2, schema2),
                         (output_table_3, schema1), (output_table_4, schema2)]
      pipeline_verifiers = [
          BigqueryFullResultMatcher(
              project=self.project,
              query="SELECT name, language FROM %s" % output_table_1,
              data=[(d['name'], d['language']) for d in _ELEMENTS
                    if 'language' in d]),
          BigqueryFullResultMatcher(
              project=self.project,
              query="SELECT name, foundation FROM %s" % output_table_2,
              data=[(d['name'], d['foundation']) for d in _ELEMENTS
                    if 'foundation' in d]),
          BigqueryFullResultMatcher(
              project=self.project,
              query="SELECT name, language FROM %s" % output_table_3,
              data=[(d['name'], d['language']) for d in _ELEMENTS
                    if 'language' in d]),
          BigqueryFullResultMatcher(
              project=self.project,
              query="SELECT name, foundation FROM %s" % output_table_4,
              data=[(d['name'], d['foundation']) for d in _ELEMENTS
                    if 'foundation' in d])
      ]
    
      args = self.test_pipeline.get_full_options_as_args()
    
>     with beam.Pipeline(argv=args) as p:

apache_beam/io/gcp/bigquery_file_loads_test.py:1015: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/pipeline.py:644: in __exit__
    self.result = self.run()
apache_beam/pipeline.py:594: in run
    self._options).run(False)
apache_beam/pipeline.py:618: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/dataflow/test_dataflow_runner.py:53: in run_pipeline
    self.result = super().run_pipeline(pipeline, options)
apache_beam/runners/dataflow/dataflow_runner.py:502: in run_pipeline
    self.dataflow_client.create_job(self.job), self)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:759: in create_job
    return self.submit_job_description(job)
apache_beam/utils/retry.py:298: in wrapper
    return fun(*args, **kwargs)
apache_beam/runners/dataflow/internal/apiclient.py:865: in submit_job_description
    response = self._client.projects_locations_jobs.Create(request)
apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py:718: in Create
    return self._RunMethod(config, request, global_params=global_params)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:731: in _RunMethod
    return self.ProcessHttpResponse(method_config, http_response, request)
../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:737: in ProcessHttpResponse
    self.__ProcessHttpResponse(method_config, http_response, request))
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <apache_beam.runners.dataflow.internal.clients.dataflow.dataflow_v1b3_client.DataflowV1b3.ProjectsLocationsJobsService object at 0x7c0be8c6e150>
method_config = <ApiMethodInfo
 relative_path: 'v1b3/projects/{projectId}/locations/{location}/jobs'
 method_id: 'dataflow.projects.lo...DataflowProjectsLocationsJobsCreateRequest'
 response_type_name: 'Job'
 request_field: 'job'
 supports_download: False>
http_response = Response(info={'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 0...', request_url='https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json')
request = <DataflowProjectsLocationsJobsCreateRequest
 job: <Job
 clientRequestId: '20250205175234187519-9503'
 environment: <En...empFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
 location: 'us-central1'
 projectId: 'apache-beam-testing'>

    def __ProcessHttpResponse(self, method_config, http_response, request):
        """Process the given http response."""
        if http_response.status_code not in (http_client.OK,
                                             http_client.CREATED,
                                             http_client.NO_CONTENT):
>           raise exceptions.HttpError.FromResponse(
                http_response, method_config=method_config, request=request)
E           apitools.base.py.exceptions.HttpBadRequestError: HttpError accessing <https://dataflow.googleapis.com/v1b3/projects/apache-beam-testing/locations/us-central1/jobs?alt=json>: response: <{'vary': 'Origin, X-Origin, Referer', 'content-type': 'application/json; charset=UTF-8', 'date': 'Wed, 05 Feb 2025 17:52:35 GMT', 'server': 'ESF', 'x-xss-protection': '0', 'x-frame-options': 'SAMEORIGIN', 'x-content-type-options': 'nosniff', 'transfer-encoding': 'chunked', 'status': '400', 'content-length': '492', '-content-encoding': 'gzip'}>, content <{
E             "error": {
E               "code": 400,
E               "message": "(9ee9c8e58565bd6d): The workflow could not be created. Causes: (40fe52f372a0b301): Dataflow quota error for jobs-per-project quota. Project apache-beam-testing is running 300 jobs. Please check the quota usage via GCP Console. If it exceeds the limit, please wait for a workflow to finish or contact Google Cloud Support to request an increase in quota. If it does not, contact Google Cloud Support.",
E               "status": "FAILED_PRECONDITION"
E             }
E           }
E           >

../../build/gradleenv/2050596099/lib/python3.11/site-packages/apitools/base/py/base_api.py:603: HttpBadRequestError