Skip to content

Commit

Permalink
Upgrade the Dataproc package to 3.0.0 and migrate from v1beta2 to v1 …
Browse files Browse the repository at this point in the history
…api (#18879)

(cherry picked from commit 4fae04a)
  • Loading branch information
Łukasz Wyszomirski authored and potiuk committed Jan 22, 2022
1 parent 60e2b65 commit a5ee60e
Show file tree
Hide file tree
Showing 6 changed files with 9 additions and 9 deletions.
2 changes: 1 addition & 1 deletion airflow/providers/google/cloud/hooks/dataproc.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@

from google.api_core.exceptions import ServerError
from google.api_core.retry import Retry
from google.cloud.dataproc_v1beta2 import (
from google.cloud.dataproc_v1 import (
Cluster,
ClusterControllerClient,
Job,
Expand Down
8 changes: 4 additions & 4 deletions airflow/providers/google/cloud/operators/dataproc.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@

from google.api_core.exceptions import AlreadyExists, NotFound
from google.api_core.retry import Retry, exponential_sleep_generator
from google.cloud.dataproc_v1beta2 import Cluster
from google.cloud.dataproc_v1 import Cluster
from google.protobuf.duration_pb2 import Duration
from google.protobuf.field_mask_pb2 import FieldMask

Expand Down Expand Up @@ -1909,7 +1909,7 @@ class DataprocSubmitJobOperator(BaseOperator):
:type location: str
:param job: Required. The job resource.
If a dict is provided, it must be of the same form as the protobuf message
:class:`~google.cloud.dataproc_v1beta2.types.Job`
:class:`~google.cloud.dataproc_v1.types.Job`
:type job: Dict
:param request_id: Optional. A unique id used to identify the request. If the server receives two
``SubmitJobRequest`` requests with the same id, then the second request will be ignored and the first
Expand Down Expand Up @@ -2050,8 +2050,8 @@ class DataprocUpdateClusterOperator(BaseOperator):
:param cluster: Required. The changes to the cluster.
If a dict is provided, it must be of the same form as the protobuf message
:class:`~google.cloud.dataproc_v1beta2.types.Cluster`
:type cluster: Union[Dict, google.cloud.dataproc_v1beta2.types.Cluster]
:class:`~google.cloud.dataproc_v1.types.Cluster`
:type cluster: Union[Dict, google.cloud.dataproc_v1.types.Cluster]
:param update_mask: Required. Specifies the path, relative to ``Cluster``, of the field to update. For
example, to change the number of workers in a cluster to 5, the ``update_mask`` parameter would be
specified as ``config.worker_config.num_instances``, and the ``PATCH`` request body would specify the
Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/google/cloud/sensors/dataproc.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
import warnings
from typing import Optional

from google.cloud.dataproc_v1beta2.types import JobStatus
from google.cloud.dataproc_v1.types import JobStatus

from airflow.exceptions import AirflowException
from airflow.providers.google.cloud.hooks.dataproc import DataprocHook
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -303,7 +303,7 @@ def write_version(filename: str = os.path.join(*[my_dir, "airflow", "git_version
'google-cloud-build>=3.0.0,<4.0.0',
'google-cloud-container>=0.1.1,<2.0.0',
'google-cloud-datacatalog>=3.0.0,<4.0.0',
'google-cloud-dataproc>=2.2.0,<2.6.0',
'google-cloud-dataproc>=2.2.0,<4.0.0',
'google-cloud-dlp>=0.11.0,<2.0.0',
'google-cloud-kms>=2.0.0,<3.0.0',
'google-cloud-language>=1.1.1,<2.0.0',
Expand Down
2 changes: 1 addition & 1 deletion tests/providers/google/cloud/hooks/test_dataproc.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
from unittest import mock

import pytest
from google.cloud.dataproc_v1beta2 import JobStatus
from google.cloud.dataproc_v1 import JobStatus

from airflow.exceptions import AirflowException
from airflow.providers.google.cloud.hooks.dataproc import DataprocHook, DataProcJobBuilder
Expand Down
2 changes: 1 addition & 1 deletion tests/providers/google/cloud/sensors/test_dataproc.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
from unittest import mock

import pytest
from google.cloud.dataproc_v1beta2.types import JobStatus
from google.cloud.dataproc_v1.types import JobStatus

from airflow import AirflowException
from airflow.providers.google.cloud.sensors.dataproc import DataprocJobSensor
Expand Down

0 comments on commit a5ee60e

Please sign in to comment.