diff --git a/airflow/providers/google/ads/hooks/ads.py b/airflow/providers/google/ads/hooks/ads.py index 3c132d8db06fe..6eae4e52e90ff 100644 --- a/airflow/providers/google/ads/hooks/ads.py +++ b/airflow/providers/google/ads/hooks/ads.py @@ -152,7 +152,7 @@ def list_accessible_customers(self) -> list[str]: @cached_property def _get_service(self) -> GoogleAdsServiceClient: - """Connects and authenticates with the Google Ads API using a service account""" + """Connects and authenticates with the Google Ads API using a service account.""" client = self._get_client return client.get_service("GoogleAdsService", version=self.api_version) @@ -170,7 +170,7 @@ def _get_client(self) -> GoogleAdsClient: @cached_property def _get_customer_service(self) -> CustomerServiceClient: - """Connects and authenticates with the Google Ads API using a service account""" + """Connects and authenticates with the Google Ads API using a service account.""" with NamedTemporaryFile("w", suffix=".json") as secrets_temp: self._get_config() self._update_config_with_secret(secrets_temp) @@ -184,7 +184,7 @@ def _get_customer_service(self) -> CustomerServiceClient: def _get_config(self) -> None: """ Gets google ads connection from meta db and sets google_ads_config attribute with returned config - file + file. """ conn = self.get_connection(self.google_ads_conn_id) if "google_ads_client" not in conn.extra_dejson: @@ -196,7 +196,7 @@ def _update_config_with_secret(self, secrets_temp: IO[str]) -> None: """ Gets Google Cloud secret from connection and saves the contents to the temp file Updates google ads config with file path of the temp file containing the secret - Note, the secret must be passed as a file path for Google Ads API + Note, the secret must be passed as a file path for Google Ads API. """ extras = self.get_connection(self.gcp_conn_id).extra_dejson secret = get_field(extras, "keyfile_dict") @@ -211,7 +211,7 @@ def _search( self, client_ids: list[str], query: str, page_size: int = 10000, **kwargs ) -> list[GoogleAdsRow]: """ - Pulls data from the Google Ads API + Pulls data from the Google Ads API. :param client_ids: Google Ads client ID(s) to query the API for. :param query: Google Ads Query Language query. @@ -232,7 +232,7 @@ def _search( def _extract_rows(self, iterators: list[GRPCIterator]) -> list[GoogleAdsRow]: """ - Convert Google Page Iterator (GRPCIterator) objects to Google Ads Rows + Convert Google Page Iterator (GRPCIterator) objects to Google Ads Rows. :param iterators: List of Google Page Iterator (GRPCIterator) objects diff --git a/airflow/providers/google/ads/transfers/ads_to_gcs.py b/airflow/providers/google/ads/transfers/ads_to_gcs.py index 7e92de94e8e66..1d4e96f098e7f 100644 --- a/airflow/providers/google/ads/transfers/ads_to_gcs.py +++ b/airflow/providers/google/ads/transfers/ads_to_gcs.py @@ -33,7 +33,7 @@ class GoogleAdsToGcsOperator(BaseOperator): """ Fetches the daily results from the Google Ads API for 1-n clients Converts and saves the data as a temporary CSV file - Uploads the CSV to Google Cloud Storage + Uploads the CSV to Google Cloud Storage. .. seealso:: For more information on the Google Ads API, take a look at the API docs: diff --git a/airflow/providers/google/cloud/_internal_client/secret_manager_client.py b/airflow/providers/google/cloud/_internal_client/secret_manager_client.py index c2269e0d9dcec..5366e7a9ea305 100644 --- a/airflow/providers/google/cloud/_internal_client/secret_manager_client.py +++ b/airflow/providers/google/cloud/_internal_client/secret_manager_client.py @@ -33,7 +33,7 @@ class _SecretManagerClient(LoggingMixin): """ Retrieves Secrets object from Google Cloud Secrets Manager. This is a common class reused between SecretsManager and Secrets Hook that provides the shared authentication and verification mechanisms. - This class should not be used directly, use SecretsManager or SecretsHook instead + This class should not be used directly, use SecretsManager or SecretsHook instead. :param credentials: Credentials used to authenticate to GCP @@ -50,6 +50,7 @@ def __init__( def is_valid_secret_name(secret_name: str) -> bool: """ Returns true if the secret name is valid. + :param secret_name: name of the secret :return: """ @@ -57,7 +58,7 @@ def is_valid_secret_name(secret_name: str) -> bool: @cached_property def client(self) -> SecretManagerServiceClient: - """Create an authenticated KMS client""" + """Create an authenticated KMS client.""" _client = SecretManagerServiceClient(credentials=self.credentials, client_info=CLIENT_INFO) return _client diff --git a/airflow/providers/google/cloud/hooks/bigquery.py b/airflow/providers/google/cloud/hooks/bigquery.py index 30668c5556274..d46389fa537e2 100644 --- a/airflow/providers/google/cloud/hooks/bigquery.py +++ b/airflow/providers/google/cloud/hooks/bigquery.py @@ -161,7 +161,7 @@ def get_client(self, project_id: str | None = None, location: str | None = None) ) def get_uri(self) -> str: - """Override DbApiHook get_uri method for get_sqlalchemy_engine()""" + """Override DbApiHook get_uri method for get_sqlalchemy_engine().""" return f"bigquery://{self.project_id}" def get_sqlalchemy_engine(self, engine_kwargs=None): @@ -250,8 +250,9 @@ def get_pandas_df( """ Returns a Pandas DataFrame for the results produced by a BigQuery query. The DbApiHook method must be overridden because Pandas - doesn't support PEP 249 connections, except for SQLite. See: + doesn't support PEP 249 connections, except for SQLite. + See: https://github.com/pandas-dev/pandas/blob/055d008615272a1ceca9720dc365a2abd316f353/pandas/io/sql.py#L415 https://github.com/pandas-dev/pandas/issues/6900 @@ -331,7 +332,7 @@ def create_empty_table( ) -> Table: """ Creates a new, empty table in the dataset. - To create a view, which is defined by a SQL query, parse a dictionary to 'view' kwarg + To create a view, which is defined by a SQL query, parse a dictionary to 'view' kwarg. :param project_id: The project to create the table into. :param dataset_id: The dataset to create the table into. @@ -429,8 +430,9 @@ def create_empty_dataset( exists_ok: bool = True, ) -> dict[str, Any]: """ - Create a new empty dataset: - https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets/insert + Create a new empty dataset. + + See: https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets/insert. :param project_id: The name of the project where we want to create an empty a dataset. Don't need to provide, if projectId in dataset_reference. @@ -562,8 +564,9 @@ def create_external_table( ) -> Table: """ Creates a new external table in the dataset with the data from Google - Cloud Storage. See here: + Cloud Storage. + See here: https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#resource This method is deprecated. @@ -851,7 +854,7 @@ def insert_all( ) -> None: """ Method to stream data into BigQuery one record at a time without needing - to run a load job + to run a load job. .. seealso:: For more information, see: @@ -1035,7 +1038,7 @@ def get_datasets_list( return_iterator: bool = False, ) -> list[DatasetListItem] | HTTPIterator: """ - Method returns full list of BigQuery datasets in the current project + Method returns full list of BigQuery datasets in the current project. For more information, see: https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets/list @@ -1280,6 +1283,7 @@ def list_rows( ) -> list[Row] | RowIterator: """ List the rows of the table. + See https://cloud.google.com/bigquery/docs/reference/rest/v2/tabledata/list :param dataset_id: the dataset ID of the requested table. @@ -1329,6 +1333,7 @@ def list_rows( def get_schema(self, dataset_id: str, table_id: str, project_id: str | None = None) -> dict: """ Get the schema for a given dataset and table. + see https://cloud.google.com/bigquery/docs/reference/v2/tables#resource :param dataset_id: the dataset ID of the requested table @@ -1355,6 +1360,7 @@ def update_table_schema( some fields in schemas are immutable and trying to change them will cause an exception. If a new field is included it will be inserted which requires all required fields to be set. + See https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#TableSchema :param include_policy_tags: If set to True policy tags will be included in @@ -1453,7 +1459,7 @@ def poll_job_complete( return job.done(retry=retry) def cancel_query(self) -> None: - """Cancel all started queries that have not yet completed""" + """Cancel all started queries that have not yet completed.""" warnings.warn( "This method is deprecated. Please use `BigQueryHook.cancel_job`.", AirflowProviderDeprecationWarning, @@ -1471,7 +1477,7 @@ def cancel_job( location: str | None = None, ) -> None: """ - Cancel a job and wait for cancellation to complete + Cancel a job and wait for cancellation to complete. :param job_id: id of the job. :param project_id: Google Cloud Project where the job is running @@ -1516,8 +1522,9 @@ def get_job( location: str | None = None, ) -> CopyJob | QueryJob | LoadJob | ExtractJob | UnknownJob: """ - Retrieves a BigQuery job. For more information see: - https://cloud.google.com/bigquery/docs/reference/v2/jobs + Retrieves a BigQuery job. + + See: https://cloud.google.com/bigquery/docs/reference/v2/jobs :param job_id: The ID of the job. The ID must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), or dashes (-). The maximum length is 1,024 @@ -1551,8 +1558,8 @@ def insert_job( ) -> BigQueryJob: """ Executes a BigQuery job. Waits for the job to complete and returns job id. - See here: + See here: https://cloud.google.com/bigquery/docs/reference/v2/jobs :param configuration: The configuration parameter maps directly to @@ -1605,9 +1612,9 @@ def insert_job( def run_with_configuration(self, configuration: dict) -> str: """ - Executes a BigQuery SQL query. See here: + Executes a BigQuery SQL query. - https://cloud.google.com/bigquery/docs/reference/v2/jobs + See here: https://cloud.google.com/bigquery/docs/reference/v2/jobs This method is deprecated. Please use `BigQueryHook.insert_job` @@ -1653,9 +1660,9 @@ def run_load( ) -> str: """ Executes a BigQuery load command to load data from Google Cloud Storage - to BigQuery. See here: + to BigQuery. - https://cloud.google.com/bigquery/docs/reference/v2/jobs + See here: https://cloud.google.com/bigquery/docs/reference/v2/jobs This method is deprecated. Please use `BigQueryHook.insert_job` method. @@ -1880,9 +1887,10 @@ def run_copy( ) -> str: """ Executes a BigQuery copy command to copy data from one BigQuery table - to another. See here: + to another. - https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.copy + + See here: https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.copy This method is deprecated. Please use `BigQueryHook.insert_job` method. @@ -1970,9 +1978,9 @@ def run_extract( ) -> str | BigQueryJob: """ Executes a BigQuery extract command to copy data from BigQuery to - Google Cloud Storage. See here: + Google Cloud Storage. - https://cloud.google.com/bigquery/docs/reference/v2/jobs + See here: https://cloud.google.com/bigquery/docs/reference/v2/jobs This method is deprecated. Please use `BigQueryHook.insert_job` method. @@ -2058,9 +2066,9 @@ def run_query( ) -> str: """ Executes a BigQuery SQL query. Optionally persists results in a BigQuery - table. See here: + table. - https://cloud.google.com/bigquery/docs/reference/v2/jobs + See here: https://cloud.google.com/bigquery/docs/reference/v2/jobs This method is deprecated. Please use `BigQueryHook.insert_job` method. @@ -2347,17 +2355,17 @@ def __init__(self, *args, **kwargs) -> None: self._kwargs = kwargs def close(self) -> None: - """The BigQueryConnection does not have anything to close""" + """The BigQueryConnection does not have anything to close.""" def commit(self) -> None: - """The BigQueryConnection does not support transactions""" + """The BigQueryConnection does not support transactions.""" def cursor(self) -> BigQueryCursor: - """Return a new :py:class:`Cursor` object using the connection""" + """Return a new :py:class:`Cursor` object using the connection.""" return BigQueryCursor(*self._args, **self._kwargs) def rollback(self) -> NoReturn: - """The BigQueryConnection does not have transactions""" + """The BigQueryConnection does not have transactions.""" raise NotImplementedError("BigQueryConnection does not have transactions") @@ -2395,7 +2403,7 @@ def __init__( def create_empty_table(self, *args, **kwargs): """ This method is deprecated. - Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.create_empty_table` + Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.create_empty_table`. """ warnings.warn( "This method is deprecated. " @@ -2408,7 +2416,7 @@ def create_empty_table(self, *args, **kwargs): def create_empty_dataset(self, *args, **kwargs) -> dict[str, Any]: """ This method is deprecated. - Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.create_empty_dataset` + Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.create_empty_dataset`. """ warnings.warn( "This method is deprecated. " @@ -2421,7 +2429,7 @@ def create_empty_dataset(self, *args, **kwargs) -> dict[str, Any]: def get_dataset_tables(self, *args, **kwargs) -> list[dict[str, Any]]: """ This method is deprecated. - Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.get_dataset_tables` + Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.get_dataset_tables`. """ warnings.warn( "This method is deprecated. " @@ -2434,7 +2442,7 @@ def get_dataset_tables(self, *args, **kwargs) -> list[dict[str, Any]]: def delete_dataset(self, *args, **kwargs) -> None: """ This method is deprecated. - Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.delete_dataset` + Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.delete_dataset`. """ warnings.warn( "This method is deprecated. " @@ -2447,7 +2455,7 @@ def delete_dataset(self, *args, **kwargs) -> None: def create_external_table(self, *args, **kwargs): """ This method is deprecated. - Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.create_external_table` + Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.create_external_table`. """ warnings.warn( "This method is deprecated. " @@ -2460,7 +2468,7 @@ def create_external_table(self, *args, **kwargs): def patch_table(self, *args, **kwargs) -> None: """ This method is deprecated. - Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.patch_table` + Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.patch_table`. """ warnings.warn( "This method is deprecated. " @@ -2473,7 +2481,7 @@ def patch_table(self, *args, **kwargs) -> None: def insert_all(self, *args, **kwargs) -> None: """ This method is deprecated. - Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.insert_all` + Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.insert_all`. """ warnings.warn( "This method is deprecated. " @@ -2486,7 +2494,7 @@ def insert_all(self, *args, **kwargs) -> None: def update_dataset(self, *args, **kwargs) -> dict: """ This method is deprecated. - Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.update_dataset` + Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.update_dataset`. """ warnings.warn( "This method is deprecated. " @@ -2499,7 +2507,7 @@ def update_dataset(self, *args, **kwargs) -> dict: def patch_dataset(self, *args, **kwargs) -> dict: """ This method is deprecated. - Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.patch_dataset` + Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.patch_dataset`. """ warnings.warn( "This method is deprecated. " @@ -2512,7 +2520,7 @@ def patch_dataset(self, *args, **kwargs) -> dict: def get_dataset_tables_list(self, *args, **kwargs) -> list[dict[str, Any]]: """ This method is deprecated. - Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.get_dataset_tables_list` + Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.get_dataset_tables_list`. """ warnings.warn( "This method is deprecated. " @@ -2525,7 +2533,7 @@ def get_dataset_tables_list(self, *args, **kwargs) -> list[dict[str, Any]]: def get_datasets_list(self, *args, **kwargs) -> list | HTTPIterator: """ This method is deprecated. - Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.get_datasets_list` + Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.get_datasets_list`. """ warnings.warn( "This method is deprecated. " @@ -2538,7 +2546,7 @@ def get_datasets_list(self, *args, **kwargs) -> list | HTTPIterator: def get_dataset(self, *args, **kwargs) -> Dataset: """ This method is deprecated. - Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.get_dataset` + Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.get_dataset`. """ warnings.warn( "This method is deprecated. " @@ -2551,7 +2559,7 @@ def get_dataset(self, *args, **kwargs) -> Dataset: def run_grant_dataset_view_access(self, *args, **kwargs) -> dict: """ This method is deprecated. - Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_grant_dataset_view_access` + Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_grant_dataset_view_access`. """ warnings.warn( "This method is deprecated. " @@ -2565,7 +2573,7 @@ def run_grant_dataset_view_access(self, *args, **kwargs) -> dict: def run_table_upsert(self, *args, **kwargs) -> dict: """ This method is deprecated. - Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_table_upsert` + Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_table_upsert`. """ warnings.warn( "This method is deprecated. " @@ -2578,7 +2586,7 @@ def run_table_upsert(self, *args, **kwargs) -> dict: def run_table_delete(self, *args, **kwargs) -> None: """ This method is deprecated. - Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_table_delete` + Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_table_delete`. """ warnings.warn( "This method is deprecated. " @@ -2591,7 +2599,7 @@ def run_table_delete(self, *args, **kwargs) -> None: def get_tabledata(self, *args, **kwargs) -> list[dict]: """ This method is deprecated. - Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.get_tabledata` + Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.get_tabledata`. """ warnings.warn( "This method is deprecated. " @@ -2604,7 +2612,7 @@ def get_tabledata(self, *args, **kwargs) -> list[dict]: def get_schema(self, *args, **kwargs) -> dict: """ This method is deprecated. - Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.get_schema` + Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.get_schema`. """ warnings.warn( "This method is deprecated. " @@ -2617,7 +2625,7 @@ def get_schema(self, *args, **kwargs) -> dict: def poll_job_complete(self, *args, **kwargs) -> bool: """ This method is deprecated. - Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.poll_job_complete` + Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.poll_job_complete`. """ warnings.warn( "This method is deprecated. " @@ -2630,7 +2638,7 @@ def poll_job_complete(self, *args, **kwargs) -> bool: def cancel_query(self, *args, **kwargs) -> None: """ This method is deprecated. - Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.cancel_query` + Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.cancel_query`. """ warnings.warn( "This method is deprecated. " @@ -2643,7 +2651,7 @@ def cancel_query(self, *args, **kwargs) -> None: def run_with_configuration(self, *args, **kwargs) -> str: """ This method is deprecated. - Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_with_configuration` + Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_with_configuration`. """ warnings.warn( "This method is deprecated. " @@ -2656,7 +2664,7 @@ def run_with_configuration(self, *args, **kwargs) -> str: def run_load(self, *args, **kwargs) -> str: """ This method is deprecated. - Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_load` + Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_load`. """ warnings.warn( "This method is deprecated. " @@ -2669,7 +2677,7 @@ def run_load(self, *args, **kwargs) -> str: def run_copy(self, *args, **kwargs) -> str: """ This method is deprecated. - Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_copy` + Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_copy`. """ warnings.warn( "This method is deprecated. " @@ -2682,7 +2690,7 @@ def run_copy(self, *args, **kwargs) -> str: def run_extract(self, *args, **kwargs) -> str | BigQueryJob: """ This method is deprecated. - Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_extract` + Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_extract`. """ warnings.warn( "This method is deprecated. " @@ -2695,7 +2703,7 @@ def run_extract(self, *args, **kwargs) -> str | BigQueryJob: def run_query(self, *args, **kwargs) -> str: """ This method is deprecated. - Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_query` + Please use `airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_query`. """ warnings.warn( "This method is deprecated. " @@ -2709,7 +2717,7 @@ def run_query(self, *args, **kwargs) -> str: class BigQueryCursor(BigQueryBaseCursor): """ A very basic BigQuery PEP 249 cursor implementation. The PyHive PEP 249 - implementation was used as a reference: + implementation was used as a reference. https://github.com/dropbox/PyHive/blob/master/pyhive/presto.py https://github.com/dropbox/PyHive/blob/master/pyhive/common.py @@ -2741,7 +2749,7 @@ def __init__( @property def description(self) -> list: - """Return the cursor description""" + """Return the cursor description.""" return self._description @description.setter @@ -2749,11 +2757,11 @@ def description(self, value): self._description = value def close(self) -> None: - """By default, do nothing""" + """By default, do nothing.""" @property def rowcount(self) -> int: - """By default, return -1 to indicate that this is not supported""" + """By default, return -1 to indicate that this is not supported.""" return -1 def execute(self, operation: str, parameters: dict | None = None) -> None: @@ -2785,14 +2793,14 @@ def executemany(self, operation: str, seq_of_parameters: list) -> None: self.execute(operation, parameters) def flush_results(self) -> None: - """Flush results related cursor attributes""" + """Flush results related cursor attributes.""" self.page_token = None self.job_id = None self.all_pages_loaded = False self.buffer = [] def fetchone(self) -> list | None: - """Fetch the next row of a query result set""" + """Fetch the next row of a query result set.""" return self.next() def next(self) -> list | None: @@ -2865,20 +2873,20 @@ def fetchall(self) -> list[list]: return result def get_arraysize(self) -> int: - """Specifies the number of rows to fetch at a time with .fetchmany()""" + """Specifies the number of rows to fetch at a time with .fetchmany().""" return self.buffersize or 1 def set_arraysize(self, arraysize: int) -> None: - """Specifies the number of rows to fetch at a time with .fetchmany()""" + """Specifies the number of rows to fetch at a time with .fetchmany().""" self.buffersize = arraysize arraysize = property(get_arraysize, set_arraysize) def setinputsizes(self, sizes: Any) -> None: - """Does nothing by default""" + """Does nothing by default.""" def setoutputsize(self, size: Any, column: Any = None) -> None: - """Does nothing by default""" + """Does nothing by default.""" def _get_query_result(self) -> dict: """Get job query results like data, schema, job type...""" @@ -2897,7 +2905,7 @@ def _get_query_result(self) -> dict: def _bind_parameters(operation: str, parameters: dict) -> str: - """Helper method that binds parameters to a SQL query""" + """Helper method that binds parameters to a SQL query.""" # inspired by MySQL Python Connector (conversion.py) string_parameters = {} # type dict[str, str] for (name, value) in parameters.items(): @@ -2911,7 +2919,7 @@ def _bind_parameters(operation: str, parameters: dict) -> str: def _escape(s: str) -> str: - """Helper method that escapes parameters to a SQL query""" + """Helper method that escapes parameters to a SQL query.""" e = s e = e.replace("\\", "\\\\") e = e.replace("\n", "\\n") @@ -2997,7 +3005,7 @@ def _cleanse_time_partitioning( def _validate_value(key: Any, value: Any, expected_type: type | tuple[type]) -> None: - """Function to check expected type and raise error if type is not correct""" + """Function to check expected type and raise error if type is not correct.""" if not isinstance(value, expected_type): raise TypeError(f"{key} argument must have a type {expected_type} not {type(value)}") @@ -3048,7 +3056,7 @@ def _validate_src_fmt_configs( def _format_schema_for_description(schema: dict) -> list: """ Reformat the schema to match cursor description standard which is a tuple - of 7 elemenbts (name, type, display_size, internal_size, precision, scale, null_ok) + of 7 elemenbts (name, type, display_size, internal_size, precision, scale, null_ok). """ description = [] for field in schema["fields"]: @@ -3067,7 +3075,7 @@ def _format_schema_for_description(schema: dict) -> list: class BigQueryAsyncHook(GoogleBaseAsyncHook): - """Uses gcloud-aio library to retrieve Job details""" + """Uses gcloud-aio library to retrieve Job details.""" sync_hook_class = BigQueryHook @@ -3163,7 +3171,7 @@ def value_check( tolerance: float | None = None, ) -> None: """ - Match a single query resulting row and tolerance with pass_value + Match a single query resulting row and tolerance with pass_value. :return: If Match fail, we throw an AirflowException. """ @@ -3201,7 +3209,7 @@ def _get_numeric_matches( records: list[float], pass_value: Any, tolerance: float | None = None ) -> list[bool]: """ - A helper function to match numeric pass_value, tolerance with records value + A helper function to match numeric pass_value, tolerance with records value. :param records: List of value to match against :param pass_value: Expected value @@ -3217,7 +3225,7 @@ def _get_numeric_matches( @staticmethod def _convert_to_float_if_possible(s: Any) -> Any: """ - A small helper function to convert a string to a numeric value if appropriate + A small helper function to convert a string to a numeric value if appropriate. :param s: the string to be converted """ @@ -3235,7 +3243,7 @@ def interval_check( ratio_formula: str, ) -> None: """ - Checks that the values of metrics given as SQL expressions are within a certain tolerance + Checks that the values of metrics given as SQL expressions are within a certain tolerance. :param row1: first resulting row of a query execution job for first SQL query :param row2: first resulting row of a query execution job for second SQL query @@ -3315,7 +3323,7 @@ def interval_check( class BigQueryTableAsyncHook(GoogleBaseAsyncHook): - """Class to get async hook for Bigquery Table Async""" + """Class to get async hook for Bigquery Table Async.""" sync_hook_class = BigQueryHook diff --git a/airflow/providers/google/cloud/hooks/cloud_sql.py b/airflow/providers/google/cloud/hooks/cloud_sql.py index 69c35dc0f1333..1a325775f2285 100644 --- a/airflow/providers/google/cloud/hooks/cloud_sql.py +++ b/airflow/providers/google/cloud/hooks/cloud_sql.py @@ -845,7 +845,7 @@ def _generate_unique_path() -> str: can be close to 60 characters and there is a limitation in length of socket path to around 100 characters in total. We append project/location/instance to it later and postgres - appends its own prefix, so we chose a shorter "${tempdir()}[8 random characters]" + appends its own prefix, so we chose a shorter "${tempdir()}[8 random characters]". """ random.seed() while True: @@ -980,7 +980,7 @@ def cleanup_database_hook(self) -> None: self.log.info(output) def reserve_free_tcp_port(self) -> None: - """Reserve free TCP port to be used by Cloud SQL Proxy""" + """Reserve free TCP port to be used by Cloud SQL Proxy.""" self.reserved_tcp_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.reserved_tcp_socket.bind(("127.0.0.1", 0)) self.sql_proxy_tcp_port = self.reserved_tcp_socket.getsockname()[1] diff --git a/airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py b/airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py index 1e80adf0e1cdd..2823bee1f17cf 100644 --- a/airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py +++ b/airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py @@ -106,7 +106,7 @@ class GcpTransferOperationStatus: def gen_job_name(job_name: str) -> str: """ Adds unique suffix to job name. If suffix already exists, updates it. - Suffix — current timestamp + Suffix — current timestamp. :param job_name: :return: job_name with suffix diff --git a/airflow/providers/google/cloud/hooks/compute.py b/airflow/providers/google/cloud/hooks/compute.py index a6cde83425226..064551cc40dee 100644 --- a/airflow/providers/google/cloud/hooks/compute.py +++ b/airflow/providers/google/cloud/hooks/compute.py @@ -74,6 +74,7 @@ def __init__( def get_conn(self): """ Retrieves connection to Google Compute Engine. + :return: Google Compute Engine services object :rtype: dict """ diff --git a/airflow/providers/google/cloud/hooks/compute_ssh.py b/airflow/providers/google/cloud/hooks/compute_ssh.py index d602b92c65e7e..001d9f6627db2 100644 --- a/airflow/providers/google/cloud/hooks/compute_ssh.py +++ b/airflow/providers/google/cloud/hooks/compute_ssh.py @@ -39,7 +39,7 @@ class _GCloudAuthorizedSSHClient(paramiko.SSHClient): - """SSH Client that maintains the context for gcloud authorization during the connection""" + """SSH Client that maintains the context for gcloud authorization during the connection.""" def __init__(self, google_hook, *args, **kwargs): super().__init__(*args, **kwargs) @@ -67,7 +67,7 @@ def __exit__(self, type_, value, traceback): class ComputeEngineSSHHook(SSHHook): """ - Hook to connect to a remote instance in compute engine + Hook to connect to a remote instance in compute engine. :param instance_name: The name of the Compute Engine instance :param zone: The zone of the Compute Engine instance diff --git a/airflow/providers/google/cloud/hooks/dataflow.py b/airflow/providers/google/cloud/hooks/dataflow.py index a7fe4cf1316e2..e393c6dcbb919 100644 --- a/airflow/providers/google/cloud/hooks/dataflow.py +++ b/airflow/providers/google/cloud/hooks/dataflow.py @@ -60,7 +60,7 @@ def process_line_and_extract_dataflow_job_id_callback( """ Returns callback which triggers function passed as `on_new_job_id_callback` when Dataflow job_id is found. To be used for `process_line_callback` in - :py:class:`~airflow.providers.apache.beam.hooks.beam.BeamCommandRunner` + :py:class:`~airflow.providers.apache.beam.hooks.beam.BeamCommandRunner`. :param on_new_job_id_callback: Callback called when the job ID is known """ @@ -127,6 +127,7 @@ def inner_wrapper(self: DataflowHook, *args, **kwargs): class DataflowJobStatus: """ Helper class with Dataflow job statuses. + Reference: https://cloud.google.com/dataflow/docs/reference/rest/v1b3/projects.jobs#Job.JobState """ @@ -221,7 +222,7 @@ def __init__( def is_job_running(self) -> bool: """ - Helper method to check if jos is still running in dataflow + Helper method to check if jos is still running in dataflow. :return: True if job is running. """ @@ -236,7 +237,7 @@ def is_job_running(self) -> bool: def _get_current_jobs(self) -> list[dict]: """ - Helper method to get list of jobs that start with job name or id + Helper method to get list of jobs that start with job name or id. :return: list of jobs including id's """ @@ -374,7 +375,7 @@ def _fetch_jobs_by_prefix_name(self, prefix_name: str) -> list[dict]: def _refresh_jobs(self) -> None: """ - Helper method to get all jobs by name + Helper method to get all jobs by name. :return: jobs """ @@ -393,7 +394,7 @@ def _refresh_jobs(self) -> None: def _check_dataflow_job_state(self, job) -> bool: """ Helper method to check the state of one job in dataflow for this task - if job failed raise exception + if job failed raise exception. :return: True if job is done. :raise: Exception @@ -467,7 +468,7 @@ def _wait_for_states(self, expected_states: set[str]): time.sleep(self._poll_sleep) def cancel(self) -> None: - """Cancels or drains current job""" + """Cancels or drains current job.""" self._jobs = [ job for job in self.get_jobs() if job["currentState"] not in DataflowJobStatus.TERMINAL_STATES ] @@ -898,7 +899,7 @@ def is_job_dataflow_running( variables: dict | None = None, ) -> bool: """ - Helper method to check if jos is still running in dataflow + Helper method to check if jos is still running in dataflow. :param name: The name of the job. :param project_id: Optional, the Google Cloud project ID in which to start a job. diff --git a/airflow/providers/google/cloud/hooks/dataform.py b/airflow/providers/google/cloud/hooks/dataform.py index 8905f2546d261..67797a7ce32ba 100644 --- a/airflow/providers/google/cloud/hooks/dataform.py +++ b/airflow/providers/google/cloud/hooks/dataform.py @@ -277,7 +277,7 @@ def create_repository( metadata: Sequence[tuple[str, str]] = (), ) -> Repository: """ - Creates repository + Creates repository. :param project_id: Required. The ID of the Google Cloud project where repository should be. :param region: Required. The ID of the Google Cloud region where repository should be. @@ -606,7 +606,7 @@ def install_npm_packages( ) -> InstallNpmPackagesResponse: """ Installs npm dependencies in the provided workspace. Requires "package.json" - to be created in workspace + to be created in workspace. :param project_id: Required. The ID of the Google Cloud project where workspace located. :param region: Required. The ID of the Google Cloud region where workspace located. diff --git a/airflow/providers/google/cloud/hooks/datafusion.py b/airflow/providers/google/cloud/hooks/datafusion.py index 935b28fed4c3d..a736d11e9784c 100644 --- a/airflow/providers/google/cloud/hooks/datafusion.py +++ b/airflow/providers/google/cloud/hooks/datafusion.py @@ -40,7 +40,7 @@ class PipelineStates: - """Data Fusion pipeline states""" + """Data Fusion pipeline states.""" PENDING = "PENDING" STARTING = "STARTING" @@ -484,7 +484,7 @@ def stop_pipeline(self, pipeline_name: str, instance_url: str, namespace: str = class DataFusionAsyncHook(GoogleBaseAsyncHook): - """Class to get asynchronous hook for DataFusion""" + """Class to get asynchronous hook for DataFusion.""" sync_hook_class = DataFusionHook scopes = ["https://www.googleapis.com/auth/cloud-platform"] diff --git a/airflow/providers/google/cloud/hooks/dataprep.py b/airflow/providers/google/cloud/hooks/dataprep.py index c7cbc3b55157b..7e475ea6a7396 100644 --- a/airflow/providers/google/cloud/hooks/dataprep.py +++ b/airflow/providers/google/cloud/hooks/dataprep.py @@ -58,7 +58,9 @@ class JobGroupStatuses(str, Enum): class GoogleDataprepHook(BaseHook): """ Hook for connection with Dataprep API. + To get connection Dataprep with Airflow you need Dataprep token. + https://clouddataprep.com/documentation/api#section/Authentication It should be added to the Connection in Airflow in JSON format. @@ -120,9 +122,11 @@ def get_job_group(self, job_group_id: int, embed: str, include_deleted: bool) -> def run_job_group(self, body_request: dict) -> dict[str, Any]: """ Creates a ``jobGroup``, which launches the specified job as the authenticated user. + This performs the same action as clicking on the Run Job button in the application. + To get recipe_id please follow the Dataprep API documentation - https://clouddataprep.com/documentation/api#operation/runJobGroup + https://clouddataprep.com/documentation/api#operation/runJobGroup. :param body_request: The identifier for the recipe you would like to run. """ diff --git a/airflow/providers/google/cloud/hooks/dataproc.py b/airflow/providers/google/cloud/hooks/dataproc.py index 625193439047a..39670be95d56c 100644 --- a/airflow/providers/google/cloud/hooks/dataproc.py +++ b/airflow/providers/google/cloud/hooks/dataproc.py @@ -249,7 +249,7 @@ def get_job_client(self, region: str | None = None) -> JobControllerClient: ) def get_batch_client(self, region: str | None = None) -> BatchControllerClient: - """Returns BatchControllerClient""" + """Returns BatchControllerClient.""" client_options = None if region and region != "global": client_options = ClientOptions(api_endpoint=f"{region}-dataproc.googleapis.com:443") @@ -259,7 +259,7 @@ def get_batch_client(self, region: str | None = None) -> BatchControllerClient: ) def get_operations_client(self, region): - """Returns OperationsClient""" + """Returns OperationsClient.""" return self.get_batch_client(region=region).transport.operations_client def wait_for_operation( @@ -398,7 +398,7 @@ def diagnose_cluster( ): """ Gets cluster diagnostic information. After the operation completes GCS uri to - diagnose is returned + diagnose is returned. :param project_id: Required. The ID of the Google Cloud project that the cluster belongs to. :param region: Required. The Cloud Dataproc region in which to handle the request. @@ -1106,7 +1106,7 @@ def get_job_client(self, region: str | None = None) -> JobControllerAsyncClient: return self._cached_client def get_batch_client(self, region: str | None = None) -> BatchControllerAsyncClient: - """Returns BatchControllerAsyncClient""" + """Returns BatchControllerAsyncClient.""" client_options = None if region and region != "global": client_options = ClientOptions(api_endpoint=f"{region}-dataproc.googleapis.com:443") @@ -1116,7 +1116,7 @@ def get_batch_client(self, region: str | None = None) -> BatchControllerAsyncCli ) def get_operations_client(self, region: str) -> OperationsClient: - """Returns OperationsClient""" + """Returns OperationsClient.""" return self.get_template_client(region=region).transport.operations_client @GoogleBaseHook.fallback_to_default_project_id @@ -1242,7 +1242,7 @@ async def diagnose_cluster( ): """ Gets cluster diagnostic information. After the operation completes GCS uri to - diagnose is returned + diagnose is returned. :param project_id: Required. The ID of the Google Cloud project that the cluster belongs to. :param region: Required. The Cloud Dataproc region in which to handle the request. diff --git a/airflow/providers/google/cloud/hooks/functions.py b/airflow/providers/google/cloud/hooks/functions.py index 8409b5a8eadf0..0a6a9b892df03 100644 --- a/airflow/providers/google/cloud/hooks/functions.py +++ b/airflow/providers/google/cloud/hooks/functions.py @@ -63,7 +63,7 @@ def __init__( def _full_location(project_id: str, location: str) -> str: """ Retrieve full location of the function in the form of - ``projects//locations/`` + ``projects//locations/``. :param project_id: The Google Cloud Project project_id where the function belongs. :param location: The location where the function is created. diff --git a/airflow/providers/google/cloud/hooks/gcs.py b/airflow/providers/google/cloud/hooks/gcs.py index c0600dde8ee7a..f3cea1567dc2b 100644 --- a/airflow/providers/google/cloud/hooks/gcs.py +++ b/airflow/providers/google/cloud/hooks/gcs.py @@ -400,7 +400,7 @@ def provide_file( dir: str | None = None, ) -> Generator[IO[bytes], None, None]: """ - Downloads the file to a temporary directory and returns a file handle + Downloads the file to a temporary directory and returns a file handle. You can use this method by passing the bucket_name and object_name parameters or just object_url parameter. @@ -569,7 +569,7 @@ def exists(self, bucket_name: str, object_name: str, retry: Retry = DEFAULT_RETR def get_blob_update_time(self, bucket_name: str, object_name: str): """ - Get the update time of a file in Google Cloud Storage + Get the update time of a file in Google Cloud Storage. :param bucket_name: The Google Cloud Storage bucket where the object is. :param object_name: The name of the blob to get updated time from the Google cloud @@ -646,7 +646,7 @@ def is_updated_before(self, bucket_name: str, object_name: str, ts: datetime) -> def is_older_than(self, bucket_name: str, object_name: str, seconds: int) -> bool: """ - Check if object is older than given time + Check if object is older than given time. :param bucket_name: The Google Cloud Storage bucket where the object is. :param object_name: The name of the object to check in the Google cloud @@ -705,7 +705,7 @@ def list( delimiter: str | None = None, ): """ - List all objects from the bucket with the given a single prefix or multiple prefixes + List all objects from the bucket with the given a single prefix or multiple prefixes. :param bucket_name: bucket name :param versions: if true, list all versions of the objects @@ -747,7 +747,7 @@ def _list( delimiter: str | None = None, ) -> List: """ - List all objects from the bucket with the give string prefix in name + List all objects from the bucket with the give string prefix in name. :param bucket_name: bucket name :param versions: if true, list all versions of the objects @@ -965,6 +965,7 @@ def insert_bucket_acl( ) -> None: """ Creates a new ACL entry on the specified bucket_name. + See: https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/insert :param bucket_name: Name of a bucket_name. @@ -999,6 +1000,7 @@ def insert_object_acl( ) -> None: """ Creates a new ACL entry on the specified object. + See: https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/insert :param bucket_name: Name of a bucket_name. @@ -1030,7 +1032,7 @@ def insert_object_acl( def compose(self, bucket_name: str, source_objects: List[str], destination_object: str) -> None: """ - Composes a list of existing object into a new object in the same storage bucket_name + Composes a list of existing object into a new object in the same storage bucket_name. Currently it only supports up to 32 objects that can be concatenated in a single operation @@ -1243,7 +1245,7 @@ def _parse_gcs_url(gsurl: str) -> tuple[str, str]: class GCSAsyncHook(GoogleBaseAsyncHook): - """GCSAsyncHook run on the trigger worker, inherits from GoogleBaseHookAsync""" + """GCSAsyncHook run on the trigger worker, inherits from GoogleBaseHookAsync.""" sync_hook_class = GCSHook diff --git a/airflow/providers/google/cloud/hooks/kms.py b/airflow/providers/google/cloud/hooks/kms.py index 6dfa85335b849..3be5f09887ed2 100644 --- a/airflow/providers/google/cloud/hooks/kms.py +++ b/airflow/providers/google/cloud/hooks/kms.py @@ -15,7 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""This module contains a Google Cloud KMS hook""" +"""This module contains a Google Cloud KMS hook.""" from __future__ import annotations import base64 @@ -30,12 +30,12 @@ def _b64encode(s: bytes) -> str: - """Base 64 encodes a bytes object to a string""" + """Base 64 encodes a bytes object to a string.""" return base64.b64encode(s).decode("ascii") def _b64decode(s: str) -> bytes: - """Base 64 decodes a string to bytes""" + """Base 64 decodes a string to bytes.""" return base64.b64decode(s.encode("utf-8")) diff --git a/airflow/providers/google/cloud/hooks/kubernetes_engine.py b/airflow/providers/google/cloud/hooks/kubernetes_engine.py index a40a1b597e551..789dd9cb972a3 100644 --- a/airflow/providers/google/cloud/hooks/kubernetes_engine.py +++ b/airflow/providers/google/cloud/hooks/kubernetes_engine.py @@ -116,7 +116,7 @@ def get_client(self) -> ClusterManagerClient: def wait_for_operation(self, operation: Operation, project_id: str | None = None) -> Operation: """ Given an operation, continuously fetches the status from Google Cloud until either - completion or an error occurring + completion or an error occurring. :param operation: The Operation to wait for :param project_id: Google Cloud project ID @@ -135,7 +135,7 @@ def wait_for_operation(self, operation: Operation, project_id: str | None = None def get_operation(self, operation_name: str, project_id: str | None = None) -> Operation: """ - Fetches the operation from Google Cloud + Fetches the operation from Google Cloud. :param operation_name: Name of operation to fetch :param project_id: Google Cloud project ID @@ -151,7 +151,7 @@ def get_operation(self, operation_name: str, project_id: str | None = None) -> O @staticmethod def _append_label(cluster_proto: Cluster, key: str, val: str) -> Cluster: """ - Append labels to provided Cluster Protobuf + Append labels to provided Cluster Protobuf. Labels must fit the regex ``[a-z]([-a-z0-9]*[a-z0-9])?`` (current airflow version string follows semantic versioning spec: x.y.z). @@ -274,7 +274,7 @@ def get_cluster( timeout: float | None = None, ) -> Cluster: """ - Gets details of specified cluster + Gets details of specified cluster. :param name: The name of the cluster to retrieve :param project_id: Google Cloud project ID diff --git a/airflow/providers/google/cloud/hooks/life_sciences.py b/airflow/providers/google/cloud/hooks/life_sciences.py index 186c959a88ccc..835837ef5fb81 100644 --- a/airflow/providers/google/cloud/hooks/life_sciences.py +++ b/airflow/providers/google/cloud/hooks/life_sciences.py @@ -15,7 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""Hook for Google Cloud Life Sciences service""" +"""Hook for Google Cloud Life Sciences service.""" from __future__ import annotations import time @@ -84,7 +84,7 @@ def get_conn(self) -> build: @GoogleBaseHook.fallback_to_default_project_id def run_pipeline(self, body: dict, location: str, project_id: str) -> dict: """ - Runs a pipeline + Runs a pipeline. :param body: The request body. :param location: The location of the project. For example: "us-east1". diff --git a/airflow/providers/google/cloud/hooks/mlengine.py b/airflow/providers/google/cloud/hooks/mlengine.py index 83af18ecf4607..ff13546685c17 100644 --- a/airflow/providers/google/cloud/hooks/mlengine.py +++ b/airflow/providers/google/cloud/hooks/mlengine.py @@ -538,7 +538,7 @@ def _append_label(self, model: dict) -> None: class MLEngineAsyncHook(GoogleBaseAsyncHook): - """Class to get asynchronous hook for MLEngine""" + """Class to get asynchronous hook for MLEngine.""" sync_hook_class = MLEngineHook scopes = ["https://www.googleapis.com/auth/cloud-platform"] diff --git a/airflow/providers/google/cloud/hooks/os_login.py b/airflow/providers/google/cloud/hooks/os_login.py index 02ddadb5999d2..412740855f2f9 100644 --- a/airflow/providers/google/cloud/hooks/os_login.py +++ b/airflow/providers/google/cloud/hooks/os_login.py @@ -14,9 +14,9 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -""" -.. spelling:: +"""OS Login hooks. +.. spelling:: ImportSshPublicKeyResponse oslogin """ @@ -58,7 +58,7 @@ def __init__( self._conn: OsLoginServiceClient | None = None def get_conn(self) -> OsLoginServiceClient: - """Return OS Login service client""" + """Return OS Login service client.""" if self._conn: return self._conn diff --git a/airflow/providers/google/cloud/hooks/secret_manager.py b/airflow/providers/google/cloud/hooks/secret_manager.py index 1a6d143d84eba..484b147a8f7d9 100644 --- a/airflow/providers/google/cloud/hooks/secret_manager.py +++ b/airflow/providers/google/cloud/hooks/secret_manager.py @@ -15,7 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""Hook for Secrets Manager service""" +"""Hook for Secrets Manager service.""" from __future__ import annotations from typing import Sequence diff --git a/airflow/providers/google/cloud/hooks/speech_to_text.py b/airflow/providers/google/cloud/hooks/speech_to_text.py index 4dc0568b2e7d0..33ddc653c37cf 100644 --- a/airflow/providers/google/cloud/hooks/speech_to_text.py +++ b/airflow/providers/google/cloud/hooks/speech_to_text.py @@ -80,7 +80,7 @@ def recognize_speech( timeout: float | None = None, ): """ - Recognizes audio input + Recognizes audio input. :param config: information to the recognizer that specifies how to process the request. https://googleapis.github.io/google-cloud-python/latest/speech/gapic/v1/types.html#google.cloud.speech_v1.types.RecognitionConfig diff --git a/airflow/providers/google/cloud/hooks/stackdriver.py b/airflow/providers/google/cloud/hooks/stackdriver.py index 3a5738e3d507c..517e211edd0df 100644 --- a/airflow/providers/google/cloud/hooks/stackdriver.py +++ b/airflow/providers/google/cloud/hooks/stackdriver.py @@ -34,7 +34,7 @@ class StackdriverHook(GoogleBaseHook): - """Stackdriver Hook for connecting with Google Cloud Stackdriver""" + """Stackdriver Hook for connecting with Google Cloud Stackdriver.""" def __init__( self, diff --git a/airflow/providers/google/cloud/hooks/text_to_speech.py b/airflow/providers/google/cloud/hooks/text_to_speech.py index 4e530f18c2e62..9adf4bb0695be 100644 --- a/airflow/providers/google/cloud/hooks/text_to_speech.py +++ b/airflow/providers/google/cloud/hooks/text_to_speech.py @@ -91,7 +91,7 @@ def synthesize_speech( timeout: float | None = None, ) -> SynthesizeSpeechResponse: """ - Synthesizes text input + Synthesizes text input. :param input_data: text input to be synthesized. See more: https://googleapis.github.io/google-cloud-python/latest/texttospeech/gapic/v1/types.html#google.cloud.texttospeech_v1.types.SynthesisInput diff --git a/airflow/providers/google/cloud/hooks/translate.py b/airflow/providers/google/cloud/hooks/translate.py index 6025030379000..7dd8740f6c105 100644 --- a/airflow/providers/google/cloud/hooks/translate.py +++ b/airflow/providers/google/cloud/hooks/translate.py @@ -53,7 +53,7 @@ def __init__( def get_conn(self) -> Client: """ - Retrieves connection to Cloud Translate + Retrieves connection to Cloud Translate. :return: Google Cloud Translate client object. """ diff --git a/airflow/providers/google/cloud/hooks/vertex_ai/auto_ml.py b/airflow/providers/google/cloud/hooks/vertex_ai/auto_ml.py index 77cef1710977e..2f2001e4d5b03 100644 --- a/airflow/providers/google/cloud/hooks/vertex_ai/auto_ml.py +++ b/airflow/providers/google/cloud/hooks/vertex_ai/auto_ml.py @@ -112,7 +112,7 @@ def get_job_service_client( self, region: str | None = None, ) -> JobServiceClient: - """Returns JobServiceClient""" + """Returns JobServiceClient.""" if region and region != "global": client_options = ClientOptions(api_endpoint=f"{region}-aiplatform.googleapis.com:443") else: @@ -137,7 +137,7 @@ def get_auto_ml_tabular_training_job( training_encryption_spec_key_name: str | None = None, model_encryption_spec_key_name: str | None = None, ) -> AutoMLTabularTrainingJob: - """Returns AutoMLTabularTrainingJob object""" + """Returns AutoMLTabularTrainingJob object.""" return AutoMLTabularTrainingJob( display_name=display_name, optimization_prediction_type=optimization_prediction_type, @@ -166,7 +166,7 @@ def get_auto_ml_forecasting_training_job( training_encryption_spec_key_name: str | None = None, model_encryption_spec_key_name: str | None = None, ) -> AutoMLForecastingTrainingJob: - """Returns AutoMLForecastingTrainingJob object""" + """Returns AutoMLForecastingTrainingJob object.""" return AutoMLForecastingTrainingJob( display_name=display_name, optimization_objective=optimization_objective, @@ -193,7 +193,7 @@ def get_auto_ml_image_training_job( training_encryption_spec_key_name: str | None = None, model_encryption_spec_key_name: str | None = None, ) -> AutoMLImageTrainingJob: - """Returns AutoMLImageTrainingJob object""" + """Returns AutoMLImageTrainingJob object.""" return AutoMLImageTrainingJob( display_name=display_name, prediction_type=prediction_type, @@ -220,7 +220,7 @@ def get_auto_ml_text_training_job( training_encryption_spec_key_name: str | None = None, model_encryption_spec_key_name: str | None = None, ) -> AutoMLTextTrainingJob: - """Returns AutoMLTextTrainingJob object""" + """Returns AutoMLTextTrainingJob object.""" return AutoMLTextTrainingJob( display_name=display_name, prediction_type=prediction_type, @@ -245,7 +245,7 @@ def get_auto_ml_video_training_job( training_encryption_spec_key_name: str | None = None, model_encryption_spec_key_name: str | None = None, ) -> AutoMLVideoTrainingJob: - """Returns AutoMLVideoTrainingJob object""" + """Returns AutoMLVideoTrainingJob object.""" return AutoMLVideoTrainingJob( display_name=display_name, prediction_type=prediction_type, @@ -277,7 +277,7 @@ def wait_for_operation(self, operation: Operation, timeout: float | None = None) raise AirflowException(error) def cancel_auto_ml_job(self) -> None: - """Cancel Auto ML Job for training pipeline""" + """Cancel Auto ML Job for training pipeline.""" if self._job: self._job.cancel() diff --git a/airflow/providers/google/cloud/hooks/vertex_ai/batch_prediction_job.py b/airflow/providers/google/cloud/hooks/vertex_ai/batch_prediction_job.py index a3d0d205136b1..60436dffbf6a6 100644 --- a/airflow/providers/google/cloud/hooks/vertex_ai/batch_prediction_job.py +++ b/airflow/providers/google/cloud/hooks/vertex_ai/batch_prediction_job.py @@ -85,7 +85,7 @@ def extract_batch_prediction_job_id(obj: dict) -> str: return obj["name"].rpartition("/")[-1] def cancel_batch_prediction_job(self) -> None: - """Cancel BatchPredictionJob""" + """Cancel BatchPredictionJob.""" if self._batch_prediction_job: self._batch_prediction_job.cancel() @@ -291,7 +291,7 @@ def get_batch_prediction_job( metadata: Sequence[tuple[str, str]] = (), ) -> BatchPredictionJob: """ - Gets a BatchPredictionJob + Gets a BatchPredictionJob. :param project_id: Required. The ID of the Google Cloud project that the service belongs to. :param region: Required. The ID of the Google Cloud region that the service belongs to. diff --git a/airflow/providers/google/cloud/hooks/vertex_ai/custom_job.py b/airflow/providers/google/cloud/hooks/vertex_ai/custom_job.py index 04d1347ed5875..3686005713678 100644 --- a/airflow/providers/google/cloud/hooks/vertex_ai/custom_job.py +++ b/airflow/providers/google/cloud/hooks/vertex_ai/custom_job.py @@ -83,7 +83,7 @@ def get_job_service_client( self, region: str | None = None, ) -> JobServiceClient: - """Returns JobServiceClient""" + """Returns JobServiceClient.""" if region and region != "global": client_options = ClientOptions(api_endpoint=f"{region}-aiplatform.googleapis.com:443") else: @@ -116,7 +116,7 @@ def get_custom_container_training_job( model_encryption_spec_key_name: str | None = None, staging_bucket: str | None = None, ) -> CustomContainerTrainingJob: - """Returns CustomContainerTrainingJob object""" + """Returns CustomContainerTrainingJob object.""" return CustomContainerTrainingJob( display_name=display_name, container_uri=container_uri, @@ -165,7 +165,7 @@ def get_custom_python_package_training_job( model_encryption_spec_key_name: str | None = None, staging_bucket: str | None = None, ): - """Returns CustomPythonPackageTrainingJob object""" + """Returns CustomPythonPackageTrainingJob object.""" return CustomPythonPackageTrainingJob( display_name=display_name, container_uri=container_uri, @@ -215,7 +215,7 @@ def get_custom_training_job( model_encryption_spec_key_name: str | None = None, staging_bucket: str | None = None, ): - """Returns CustomTrainingJob object""" + """Returns CustomTrainingJob object.""" return CustomTrainingJob( display_name=display_name, script_path=script_path, @@ -265,7 +265,7 @@ def wait_for_operation(self, operation: Operation, timeout: float | None = None) raise AirflowException(error) def cancel_job(self) -> None: - """Cancel Job for training pipeline""" + """Cancel Job for training pipeline.""" if self._job: self._job.cancel() @@ -302,7 +302,7 @@ def _run_job( tensorboard: str | None = None, sync=True, ) -> tuple[models.Model | None, str, str]: - """Run Job for training pipeline""" + """Run Job for training pipeline.""" model = job.run( dataset=dataset, annotation_schema_uri=annotation_schema_uri, @@ -627,7 +627,7 @@ def create_custom_container_training_job( sync=True, ) -> tuple[models.Model | None, str, str]: """ - Create Custom Container Training Job + Create Custom Container Training Job. :param display_name: Required. The user-defined name of this TrainingPipeline. :param command: The command to be invoked when the container is started. @@ -985,7 +985,7 @@ def create_custom_python_package_training_job( sync=True, ) -> tuple[models.Model | None, str, str]: """ - Create Custom Python Package Training Job + Create Custom Python Package Training Job. :param display_name: Required. The user-defined name of this TrainingPipeline. :param python_package_gcs_uri: Required: GCS location of the training python package. @@ -1343,7 +1343,7 @@ def create_custom_training_job( sync=True, ) -> tuple[models.Model | None, str, str]: """ - Create Custom Training Job + Create Custom Training Job. :param display_name: Required. The user-defined name of this TrainingPipeline. :param script_path: Required. Local path to training script. diff --git a/airflow/providers/google/cloud/hooks/vertex_ai/dataset.py b/airflow/providers/google/cloud/hooks/vertex_ai/dataset.py index ac898d7776913..9b69d1edd9a6e 100644 --- a/airflow/providers/google/cloud/hooks/vertex_ai/dataset.py +++ b/airflow/providers/google/cloud/hooks/vertex_ai/dataset.py @@ -304,7 +304,7 @@ def list_annotations( metadata: Sequence[tuple[str, str]] = (), ) -> ListAnnotationsPager: """ - Lists Annotations belongs to a data item + Lists Annotations belongs to a data item. :param project_id: Required. The ID of the Google Cloud project that the service belongs to. :param region: Required. The ID of the Google Cloud region that the service belongs to. diff --git a/airflow/providers/google/cloud/hooks/vertex_ai/hyperparameter_tuning_job.py b/airflow/providers/google/cloud/hooks/vertex_ai/hyperparameter_tuning_job.py index e51ccb79163d2..dbca7148d1004 100644 --- a/airflow/providers/google/cloud/hooks/vertex_ai/hyperparameter_tuning_job.py +++ b/airflow/providers/google/cloud/hooks/vertex_ai/hyperparameter_tuning_job.py @@ -88,7 +88,7 @@ def get_hyperparameter_tuning_job_object( labels: dict[str, str] | None = None, encryption_spec_key_name: str | None = None, ) -> HyperparameterTuningJob: - """Returns HyperparameterTuningJob object""" + """Returns HyperparameterTuningJob object.""" return HyperparameterTuningJob( display_name=display_name, custom_job=custom_job, @@ -117,7 +117,7 @@ def get_custom_job_object( encryption_spec_key_name: str | None = None, staging_bucket: str | None = None, ) -> CustomJob: - """Returns CustomJob object""" + """Returns CustomJob object.""" return CustomJob( display_name=display_name, worker_pool_specs=worker_pool_specs, @@ -144,7 +144,7 @@ def wait_for_operation(self, operation: Operation, timeout: float | None = None) raise AirflowException(error) def cancel_hyperparameter_tuning_job(self) -> None: - """Cancel HyperparameterTuningJob""" + """Cancel HyperparameterTuningJob.""" if self._hyperparameter_tuning_job: self._hyperparameter_tuning_job.cancel() @@ -313,7 +313,7 @@ def get_hyperparameter_tuning_job( metadata: Sequence[tuple[str, str]] = (), ) -> types.HyperparameterTuningJob: """ - Gets a HyperparameterTuningJob + Gets a HyperparameterTuningJob. :param project_id: Required. The ID of the Google Cloud project that the service belongs to. :param region: Required. The ID of the Google Cloud region that the service belongs to. diff --git a/airflow/providers/google/cloud/hooks/video_intelligence.py b/airflow/providers/google/cloud/hooks/video_intelligence.py index fa1d2f56e5410..498c04cc91b1a 100644 --- a/airflow/providers/google/cloud/hooks/video_intelligence.py +++ b/airflow/providers/google/cloud/hooks/video_intelligence.py @@ -69,7 +69,7 @@ def __init__( self._conn: VideoIntelligenceServiceClient | None = None def get_conn(self) -> VideoIntelligenceServiceClient: - """Returns Gcp Video Intelligence Service client""" + """Returns Gcp Video Intelligence Service client.""" if not self._conn: self._conn = VideoIntelligenceServiceClient( credentials=self.get_credentials(), client_info=CLIENT_INFO diff --git a/airflow/providers/google/cloud/hooks/vision.py b/airflow/providers/google/cloud/hooks/vision.py index 2b17dd2002db1..e7cc242e4b9b0 100644 --- a/airflow/providers/google/cloud/hooks/vision.py +++ b/airflow/providers/google/cloud/hooks/vision.py @@ -179,7 +179,7 @@ def create_product_set( ) -> str: """ For the documentation see: - :class:`~airflow.providers.google.cloud.operators.vision.CloudVisionCreateProductSetOperator` + :class:`~airflow.providers.google.cloud.operators.vision.CloudVisionCreateProductSetOperator`. """ client = self.get_conn() parent = f"projects/{project_id}/locations/{location}" @@ -214,7 +214,7 @@ def get_product_set( ) -> dict: """ For the documentation see: - :class:`~airflow.providers.google.cloud.operators.vision.CloudVisionGetProductSetOperator` + :class:`~airflow.providers.google.cloud.operators.vision.CloudVisionGetProductSetOperator`. """ client = self.get_conn() name = ProductSearchClient.product_set_path(project_id, location, product_set_id) @@ -238,7 +238,7 @@ def update_product_set( ) -> dict: """ For the documentation see: - :class:`~airflow.providers.google.cloud.operators.vision.CloudVisionUpdateProductSetOperator` + :class:`~airflow.providers.google.cloud.operators.vision.CloudVisionUpdateProductSetOperator`. """ client = self.get_conn() @@ -271,7 +271,7 @@ def delete_product_set( ) -> None: """ For the documentation see: - :class:`~airflow.providers.google.cloud.operators.vision.CloudVisionDeleteProductSetOperator` + :class:`~airflow.providers.google.cloud.operators.vision.CloudVisionDeleteProductSetOperator`. """ client = self.get_conn() name = ProductSearchClient.product_set_path(project_id, location, product_set_id) @@ -292,7 +292,7 @@ def create_product( ): """ For the documentation see: - :class:`~airflow.providers.google.cloud.operators.vision.CloudVisionCreateProductOperator` + :class:`~airflow.providers.google.cloud.operators.vision.CloudVisionCreateProductOperator`. """ client = self.get_conn() parent = f"projects/{project_id}/locations/{location}" @@ -330,7 +330,7 @@ def get_product( ): """ For the documentation see: - :class:`~airflow.providers.google.cloud.operators.vision.CloudVisionGetProductOperator` + :class:`~airflow.providers.google.cloud.operators.vision.CloudVisionGetProductOperator`. """ client = self.get_conn() name = ProductSearchClient.product_path(project_id, location, product_id) @@ -354,7 +354,7 @@ def update_product( ): """ For the documentation see: - :class:`~airflow.providers.google.cloud.operators.vision.CloudVisionUpdateProductOperator` + :class:`~airflow.providers.google.cloud.operators.vision.CloudVisionUpdateProductOperator`. """ client = self.get_conn() @@ -385,7 +385,7 @@ def delete_product( ) -> None: """ For the documentation see: - :class:`~airflow.providers.google.cloud.operators.vision.CloudVisionDeleteProductOperator` + :class:`~airflow.providers.google.cloud.operators.vision.CloudVisionDeleteProductOperator`. """ client = self.get_conn() name = ProductSearchClient.product_path(project_id, location, product_id) @@ -407,7 +407,7 @@ def create_reference_image( ) -> str: """ For the documentation see: - :py:class:`~airflow.providers.google.cloud.operators.vision.CloudVisionCreateReferenceImageOperator` + :py:class:`~airflow.providers.google.cloud.operators.vision.CloudVisionCreateReferenceImageOperator`. """ client = self.get_conn() self.log.info("Creating ReferenceImage") @@ -449,7 +449,7 @@ def delete_reference_image( ) -> None: """ For the documentation see: - :py:class:`~airflow.providers.google.cloud.operators.vision.CloudVisionDeleteReferenceImageOperator` + :py:class:`~airflow.providers.google.cloud.operators.vision.CloudVisionDeleteReferenceImageOperator`. """ client = self.get_conn() self.log.info("Deleting ReferenceImage") @@ -479,7 +479,7 @@ def add_product_to_product_set( ) -> None: """ For the documentation see: - :py:class:`~airflow.providers.google.cloud.operators.vision.CloudVisionAddProductToProductSetOperator` + :py:class:`~airflow.providers.google.cloud.operators.vision.CloudVisionAddProductToProductSetOperator`. """ client = self.get_conn() @@ -507,7 +507,7 @@ def remove_product_from_product_set( ) -> None: """ For the documentation see: - :py:class:`~airflow.providers.google.cloud.operators.vision.CloudVisionRemoveProductFromProductSetOperator` + :py:class:`~airflow.providers.google.cloud.operators.vision.CloudVisionRemoveProductFromProductSetOperator`. """ client = self.get_conn() @@ -530,7 +530,7 @@ def annotate_image( ) -> dict: """ For the documentation see: - :py:class:`~airflow.providers.google.cloud.operators.vision.CloudVisionImageAnnotateOperator` + :py:class:`~airflow.providers.google.cloud.operators.vision.CloudVisionImageAnnotateOperator`. """ client = self.annotator_client @@ -551,7 +551,7 @@ def batch_annotate_images( ) -> dict: """ For the documentation see: - :py:class:`~airflow.providers.google.cloud.operators.vision.CloudVisionImageAnnotateOperator` + :py:class:`~airflow.providers.google.cloud.operators.vision.CloudVisionImageAnnotateOperator`. """ client = self.annotator_client @@ -575,7 +575,7 @@ def text_detection( ) -> dict: """ For the documentation see: - :py:class:`~airflow.providers.google.cloud.operators.vision.CloudVisionDetectTextOperator` + :py:class:`~airflow.providers.google.cloud.operators.vision.CloudVisionDetectTextOperator`. """ client = self.annotator_client @@ -605,7 +605,7 @@ def document_text_detection( ) -> dict: """ For the documentation see: - :py:class:`~airflow.providers.google.cloud.operators.vision.CloudVisionTextDetectOperator` + :py:class:`~airflow.providers.google.cloud.operators.vision.CloudVisionTextDetectOperator`. """ client = self.annotator_client @@ -635,7 +635,7 @@ def label_detection( ) -> dict: """ For the documentation see: - :py:class:`~airflow.providers.google.cloud.operators.vision.CloudVisionDetectImageLabelsOperator` + :py:class:`~airflow.providers.google.cloud.operators.vision.CloudVisionDetectImageLabelsOperator`. """ client = self.annotator_client @@ -665,7 +665,7 @@ def safe_search_detection( ) -> dict: """ For the documentation see: - :py:class:`~airflow.providers.google.cloud.operators.vision.CloudVisionDetectImageSafeSearchOperator` + :py:class:`~airflow.providers.google.cloud.operators.vision.CloudVisionDetectImageSafeSearchOperator`. """ client = self.annotator_client diff --git a/airflow/providers/google/cloud/links/automl.py b/airflow/providers/google/cloud/links/automl.py index f2deee6642344..cbb527cd06644 100644 --- a/airflow/providers/google/cloud/links/automl.py +++ b/airflow/providers/google/cloud/links/automl.py @@ -44,7 +44,7 @@ class AutoMLDatasetLink(BaseGoogleLink): - """Helper class for constructing AutoML Dataset link""" + """Helper class for constructing AutoML Dataset link.""" name = "AutoML Dataset" key = "automl_dataset" @@ -65,7 +65,7 @@ def persist( class AutoMLDatasetListLink(BaseGoogleLink): - """Helper class for constructing AutoML Dataset List link""" + """Helper class for constructing AutoML Dataset List link.""" name = "AutoML Dataset List" key = "automl_dataset_list" @@ -87,7 +87,7 @@ def persist( class AutoMLModelLink(BaseGoogleLink): - """Helper class for constructing AutoML Model link""" + """Helper class for constructing AutoML Model link.""" name = "AutoML Model" key = "automl_model" @@ -114,7 +114,7 @@ def persist( class AutoMLModelTrainLink(BaseGoogleLink): - """Helper class for constructing AutoML Model Train link""" + """Helper class for constructing AutoML Model Train link.""" name = "AutoML Model Train" key = "automl_model_train" @@ -138,7 +138,7 @@ def persist( class AutoMLModelPredictLink(BaseGoogleLink): - """Helper class for constructing AutoML Model Predict link""" + """Helper class for constructing AutoML Model Predict link.""" name = "AutoML Model Predict" key = "automl_model_predict" diff --git a/airflow/providers/google/cloud/links/base.py b/airflow/providers/google/cloud/links/base.py index c898f9890efe5..f51588a7f4ee6 100644 --- a/airflow/providers/google/cloud/links/base.py +++ b/airflow/providers/google/cloud/links/base.py @@ -30,7 +30,10 @@ class BaseGoogleLink(BaseOperatorLink): - """:meta private:""" + """Base class for all Google links. + + :meta private: + """ name: ClassVar[str] key: ClassVar[str] diff --git a/airflow/providers/google/cloud/links/bigquery.py b/airflow/providers/google/cloud/links/bigquery.py index 8c8795c2f7de7..0c22cfb9a9b49 100644 --- a/airflow/providers/google/cloud/links/bigquery.py +++ b/airflow/providers/google/cloud/links/bigquery.py @@ -37,7 +37,7 @@ class BigQueryDatasetLink(BaseGoogleLink): - """Helper class for constructing BigQuery Dataset Link""" + """Helper class for constructing BigQuery Dataset Link.""" name = "BigQuery Dataset" key = "bigquery_dataset" @@ -58,7 +58,7 @@ def persist( class BigQueryTableLink(BaseGoogleLink): - """Helper class for constructing BigQuery Table Link""" + """Helper class for constructing BigQuery Table Link.""" name = "BigQuery Table" key = "bigquery_table" diff --git a/airflow/providers/google/cloud/links/bigquery_dts.py b/airflow/providers/google/cloud/links/bigquery_dts.py index a7ebde5dd5b97..134982ea9cadc 100644 --- a/airflow/providers/google/cloud/links/bigquery_dts.py +++ b/airflow/providers/google/cloud/links/bigquery_dts.py @@ -31,7 +31,7 @@ class BigQueryDataTransferConfigLink(BaseGoogleLink): - """Helper class for constructing BigQuery Data Transfer Config Link""" + """Helper class for constructing BigQuery Data Transfer Config Link.""" name = "BigQuery Data Transfer Config" key = "bigquery_dts_config" diff --git a/airflow/providers/google/cloud/links/bigtable.py b/airflow/providers/google/cloud/links/bigtable.py index 962339a3256c1..47805ba348896 100644 --- a/airflow/providers/google/cloud/links/bigtable.py +++ b/airflow/providers/google/cloud/links/bigtable.py @@ -32,7 +32,7 @@ class BigtableInstanceLink(BaseGoogleLink): - """Helper class for constructing Bigtable Instance link""" + """Helper class for constructing Bigtable Instance link.""" name = "Bigtable Instance" key = "instance_key" @@ -54,7 +54,7 @@ def persist( class BigtableClusterLink(BaseGoogleLink): - """Helper class for constructing Bigtable Cluster link""" + """Helper class for constructing Bigtable Cluster link.""" name = "Bigtable Cluster" key = "cluster_key" @@ -77,7 +77,7 @@ def persist( class BigtableTablesLink(BaseGoogleLink): - """Helper class for constructing Bigtable Tables link""" + """Helper class for constructing Bigtable Tables link.""" name = "Bigtable Tables" key = "tables_key" diff --git a/airflow/providers/google/cloud/links/cloud_build.py b/airflow/providers/google/cloud/links/cloud_build.py index ceae038289ebd..b855dba89732b 100644 --- a/airflow/providers/google/cloud/links/cloud_build.py +++ b/airflow/providers/google/cloud/links/cloud_build.py @@ -37,7 +37,7 @@ class CloudBuildLink(BaseGoogleLink): - """Helper class for constructing Cloud Build link""" + """Helper class for constructing Cloud Build link.""" name = "Cloud Build Details" key = "cloud_build_key" @@ -63,7 +63,7 @@ def persist( class CloudBuildListLink(BaseGoogleLink): - """Helper class for constructing Cloud Build List link""" + """Helper class for constructing Cloud Build List link.""" name = "Cloud Builds List" key = "cloud_build_list_key" @@ -87,7 +87,7 @@ def persist( class CloudBuildTriggersListLink(BaseGoogleLink): - """Helper class for constructing Cloud Build Triggers List link""" + """Helper class for constructing Cloud Build Triggers List link.""" name = "Cloud Build Triggers List" key = "cloud_build_triggers_list_key" @@ -111,7 +111,7 @@ def persist( class CloudBuildTriggerDetailsLink(BaseGoogleLink): - """Helper class for constructing Cloud Build Trigger Details link""" + """Helper class for constructing Cloud Build Trigger Details link.""" name = "Cloud Build Triggers Details" key = "cloud_build_triggers_details_key" diff --git a/airflow/providers/google/cloud/links/cloud_functions.py b/airflow/providers/google/cloud/links/cloud_functions.py index 1cb8349607c93..078b2b9bb5147 100644 --- a/airflow/providers/google/cloud/links/cloud_functions.py +++ b/airflow/providers/google/cloud/links/cloud_functions.py @@ -38,7 +38,7 @@ class CloudFunctionsDetailsLink(BaseGoogleLink): - """Helper class for constructing Cloud Functions Details Link""" + """Helper class for constructing Cloud Functions Details Link.""" name = "Cloud Functions Details" key = "cloud_functions_details" @@ -61,7 +61,7 @@ def persist( class CloudFunctionsListLink(BaseGoogleLink): - """Helper class for constructing Cloud Functions Details Link""" + """Helper class for constructing Cloud Functions Details Link.""" name = "Cloud Functions List" key = "cloud_functions_list" diff --git a/airflow/providers/google/cloud/links/cloud_memorystore.py b/airflow/providers/google/cloud/links/cloud_memorystore.py index d91c4bdac2e56..8c43312fc5599 100644 --- a/airflow/providers/google/cloud/links/cloud_memorystore.py +++ b/airflow/providers/google/cloud/links/cloud_memorystore.py @@ -38,7 +38,7 @@ class MemcachedInstanceDetailsLink(BaseGoogleLink): - """Helper class for constructing Memorystore Memcached Instance Link""" + """Helper class for constructing Memorystore Memcached Instance Link.""" name = "Memorystore Memcached Instance" key = "memcached_instance" @@ -60,7 +60,7 @@ def persist( class MemcachedInstanceListLink(BaseGoogleLink): - """Helper class for constructing Memorystore Memcached List of Instances Link""" + """Helper class for constructing Memorystore Memcached List of Instances Link.""" name = "Memorystore Memcached List of Instances" key = "memcached_instances" @@ -80,7 +80,7 @@ def persist( class RedisInstanceDetailsLink(BaseGoogleLink): - """Helper class for constructing Memorystore Redis Instance Link""" + """Helper class for constructing Memorystore Redis Instance Link.""" name = "Memorystore Redis Instance" key = "redis_instance" @@ -102,7 +102,7 @@ def persist( class RedisInstanceListLink(BaseGoogleLink): - """Helper class for constructing Memorystore Redis List of Instances Link""" + """Helper class for constructing Memorystore Redis List of Instances Link.""" name = "Memorystore Redis List of Instances" key = "redis_instances" diff --git a/airflow/providers/google/cloud/links/cloud_sql.py b/airflow/providers/google/cloud/links/cloud_sql.py index 1b8f8028d08f4..6b21b0885d5ff 100644 --- a/airflow/providers/google/cloud/links/cloud_sql.py +++ b/airflow/providers/google/cloud/links/cloud_sql.py @@ -35,7 +35,7 @@ class CloudSQLInstanceLink(BaseGoogleLink): - """Helper class for constructing Cloud SQL Instance Link""" + """Helper class for constructing Cloud SQL Instance Link.""" name = "Cloud SQL Instance" key = "cloud_sql_instance" @@ -56,7 +56,7 @@ def persist( class CloudSQLInstanceDatabaseLink(BaseGoogleLink): - """Helper class for constructing Cloud SQL Instance Database Link""" + """Helper class for constructing Cloud SQL Instance Database Link.""" name = "Cloud SQL Instance Database" key = "cloud_sql_instance_database" diff --git a/airflow/providers/google/cloud/links/cloud_storage_transfer.py b/airflow/providers/google/cloud/links/cloud_storage_transfer.py index 4a7db25b64bf5..a289d8a54f7de 100644 --- a/airflow/providers/google/cloud/links/cloud_storage_transfer.py +++ b/airflow/providers/google/cloud/links/cloud_storage_transfer.py @@ -42,7 +42,7 @@ class CloudStorageTransferLinkHelper: - """Helper class for Storage Transfer links""" + """Helper class for Storage Transfer links.""" @staticmethod def extract_parts(operation_name: str | None): @@ -54,7 +54,7 @@ def extract_parts(operation_name: str | None): class CloudStorageTransferListLink(BaseGoogleLink): - """Helper class for constructing Cloud Storage Transfer Link""" + """Helper class for constructing Cloud Storage Transfer Link.""" name = "Cloud Storage Transfer" key = "cloud_storage_transfer" @@ -74,7 +74,7 @@ def persist( class CloudStorageTransferJobLink(BaseGoogleLink): - """Helper class for constructing Storage Transfer Job Link""" + """Helper class for constructing Storage Transfer Job Link.""" name = "Cloud Storage Transfer Job" key = "cloud_storage_transfer_job" @@ -101,7 +101,7 @@ def persist( class CloudStorageTransferDetailsLink(BaseGoogleLink): - """Helper class for constructing Cloud Storage Transfer Operation Link""" + """Helper class for constructing Cloud Storage Transfer Operation Link.""" name = "Cloud Storage Transfer Details" key = "cloud_storage_transfer_details" diff --git a/airflow/providers/google/cloud/links/cloud_tasks.py b/airflow/providers/google/cloud/links/cloud_tasks.py index 738b119c4ebc2..55f3979bb5b9d 100644 --- a/airflow/providers/google/cloud/links/cloud_tasks.py +++ b/airflow/providers/google/cloud/links/cloud_tasks.py @@ -32,7 +32,7 @@ class CloudTasksQueueLink(BaseGoogleLink): - """Helper class for constructing Cloud Task Queue Link""" + """Helper class for constructing Cloud Task Queue Link.""" name = "Cloud Tasks Queue" key = "cloud_task_queue" @@ -41,8 +41,9 @@ class CloudTasksQueueLink(BaseGoogleLink): @staticmethod def extract_parts(queue_name: str | None): """ - Extract project_id, location and queue id from queue name: - projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID + Extract project_id, location and queue id from queue name. + + projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID. """ if not queue_name: return "", "", "" @@ -64,7 +65,7 @@ def persist( class CloudTasksLink(BaseGoogleLink): - """Helper class for constructing Cloud Task Link""" + """Helper class for constructing Cloud Task Link.""" name = "Cloud Tasks" key = "cloud_task" diff --git a/airflow/providers/google/cloud/links/compute.py b/airflow/providers/google/cloud/links/compute.py index c2f15b273004b..e754f7085825a 100644 --- a/airflow/providers/google/cloud/links/compute.py +++ b/airflow/providers/google/cloud/links/compute.py @@ -37,7 +37,7 @@ class ComputeInstanceDetailsLink(BaseGoogleLink): - """Helper class for constructing Compute Instance details Link""" + """Helper class for constructing Compute Instance details Link.""" name = "Compute Instance details" key = "compute_instance_details" @@ -63,7 +63,7 @@ def persist( class ComputeInstanceTemplateDetailsLink(BaseGoogleLink): - """Helper class for constructing Compute Instance Template details Link""" + """Helper class for constructing Compute Instance Template details Link.""" name = "Compute Instance Template details" key = "compute_instance_template_details" @@ -87,7 +87,7 @@ def persist( class ComputeInstanceGroupManagerDetailsLink(BaseGoogleLink): - """Helper class for constructing Compute Instance Group Manager details Link""" + """Helper class for constructing Compute Instance Group Manager details Link.""" name = "Compute Instance Group Manager" key = "compute_instance_group_manager_details" diff --git a/airflow/providers/google/cloud/links/data_loss_prevention.py b/airflow/providers/google/cloud/links/data_loss_prevention.py index 46c824f9074ab..4bc2f0e23460f 100644 --- a/airflow/providers/google/cloud/links/data_loss_prevention.py +++ b/airflow/providers/google/cloud/links/data_loss_prevention.py @@ -67,7 +67,7 @@ class CloudDLPDeidentifyTemplatesListLink(BaseGoogleLink): - """Helper class for constructing Cloud Data Loss Prevention link""" + """Helper class for constructing Cloud Data Loss Prevention link.""" name = "Cloud DLP Deidentify Templates List" key = "cloud_dlp_deidentify_templates_list_key" @@ -89,7 +89,7 @@ def persist( class CloudDLPDeidentifyTemplateDetailsLink(BaseGoogleLink): - """Helper class for constructing Cloud Data Loss Prevention link""" + """Helper class for constructing Cloud Data Loss Prevention link.""" name = "Cloud DLP Deidentify Template Details" key = "cloud_dlp_deidentify_template_details_key" @@ -113,7 +113,7 @@ def persist( class CloudDLPJobTriggersListLink(BaseGoogleLink): - """Helper class for constructing Cloud Data Loss Prevention link""" + """Helper class for constructing Cloud Data Loss Prevention link.""" name = "Cloud DLP Job Triggers List" key = "cloud_dlp_job_triggers_list_key" @@ -135,7 +135,7 @@ def persist( class CloudDLPJobTriggerDetailsLink(BaseGoogleLink): - """Helper class for constructing Cloud Data Loss Prevention link""" + """Helper class for constructing Cloud Data Loss Prevention link.""" name = "Cloud DLP Job Triggers Details" key = "cloud_dlp_job_trigger_details_key" @@ -159,7 +159,7 @@ def persist( class CloudDLPJobsListLink(BaseGoogleLink): - """Helper class for constructing Cloud Data Loss Prevention link""" + """Helper class for constructing Cloud Data Loss Prevention link.""" name = "Cloud DLP Jobs List" key = "cloud_dlp_jobs_list_key" @@ -181,7 +181,7 @@ def persist( class CloudDLPJobDetailsLink(BaseGoogleLink): - """Helper class for constructing Cloud Data Loss Prevention link""" + """Helper class for constructing Cloud Data Loss Prevention link.""" name = "Cloud DLP Job Details" key = "cloud_dlp_job_details_key" @@ -205,7 +205,7 @@ def persist( class CloudDLPInspectTemplatesListLink(BaseGoogleLink): - """Helper class for constructing Cloud Data Loss Prevention link""" + """Helper class for constructing Cloud Data Loss Prevention link.""" name = "Cloud DLP Inspect Templates List" key = "cloud_dlp_inspect_templates_list_key" @@ -227,7 +227,7 @@ def persist( class CloudDLPInspectTemplateDetailsLink(BaseGoogleLink): - """Helper class for constructing Cloud Data Loss Prevention link""" + """Helper class for constructing Cloud Data Loss Prevention link.""" name = "Cloud DLP Inspect Template Details" key = "cloud_dlp_inspect_template_details_key" @@ -251,7 +251,7 @@ def persist( class CloudDLPInfoTypesListLink(BaseGoogleLink): - """Helper class for constructing Cloud Data Loss Prevention link""" + """Helper class for constructing Cloud Data Loss Prevention link.""" name = "Cloud DLP Info Types List" key = "cloud_dlp_info_types_list_key" @@ -273,7 +273,7 @@ def persist( class CloudDLPInfoTypeDetailsLink(BaseGoogleLink): - """Helper class for constructing Cloud Data Loss Prevention link""" + """Helper class for constructing Cloud Data Loss Prevention link.""" name = "Cloud DLP Info Type Details" key = "cloud_dlp_info_type_details_key" @@ -297,7 +297,7 @@ def persist( class CloudDLPPossibleInfoTypesListLink(BaseGoogleLink): - """Helper class for constructing Cloud Data Loss Prevention link""" + """Helper class for constructing Cloud Data Loss Prevention link.""" name = "Cloud DLP Possible Info Types List" key = "cloud_dlp_possible_info_types_list_key" diff --git a/airflow/providers/google/cloud/links/datacatalog.py b/airflow/providers/google/cloud/links/datacatalog.py index a5e4fcef776ad..7547d6152adfd 100644 --- a/airflow/providers/google/cloud/links/datacatalog.py +++ b/airflow/providers/google/cloud/links/datacatalog.py @@ -43,7 +43,7 @@ class DataCatalogEntryGroupLink(BaseGoogleLink): - """Helper class for constructing Data Catalog Entry Group Link""" + """Helper class for constructing Data Catalog Entry Group Link.""" name = "Data Catalog Entry Group" key = "data_catalog_entry_group" @@ -65,7 +65,7 @@ def persist( class DataCatalogEntryLink(BaseGoogleLink): - """Helper class for constructing Data Catalog Entry Link""" + """Helper class for constructing Data Catalog Entry Link.""" name = "Data Catalog Entry" key = "data_catalog_entry" @@ -93,7 +93,7 @@ def persist( class DataCatalogTagTemplateLink(BaseGoogleLink): - """Helper class for constructing Data Catalog Tag Template Link""" + """Helper class for constructing Data Catalog Tag Template Link.""" name = "Data Catalog Tag Template" key = "data_catalog_tag_template" diff --git a/airflow/providers/google/cloud/links/dataflow.py b/airflow/providers/google/cloud/links/dataflow.py index b62e29041dd3b..e2422b2653c64 100644 --- a/airflow/providers/google/cloud/links/dataflow.py +++ b/airflow/providers/google/cloud/links/dataflow.py @@ -31,7 +31,7 @@ class DataflowJobLink(BaseGoogleLink): - """Helper class for constructing Dataflow Job Link""" + """Helper class for constructing Dataflow Job Link.""" name = "Dataflow Job" key = "dataflow_job_config" diff --git a/airflow/providers/google/cloud/links/dataform.py b/airflow/providers/google/cloud/links/dataform.py index 5e8e8bd658f9b..5744f22e1d5f4 100644 --- a/airflow/providers/google/cloud/links/dataform.py +++ b/airflow/providers/google/cloud/links/dataform.py @@ -46,7 +46,7 @@ class DataformWorkflowInvocationLink(BaseGoogleLink): - """Helper class for constructing Dataflow Job Link""" + """Helper class for constructing Dataflow Job Link.""" name = "Dataform Workflow Invocation" key = "dataform_workflow_invocation_config" diff --git a/airflow/providers/google/cloud/links/datafusion.py b/airflow/providers/google/cloud/links/datafusion.py index fae57f97fd36f..2410ec8feacac 100644 --- a/airflow/providers/google/cloud/links/datafusion.py +++ b/airflow/providers/google/cloud/links/datafusion.py @@ -37,7 +37,7 @@ class BaseGoogleLink(BaseOperatorLink): """ Override the base logic to prevent adding 'https://console.cloud.google.com' - in front of every link where uri is used + in front of every link where uri is used. """ name: ClassVar[str] @@ -59,7 +59,7 @@ def get_link( class DataFusionInstanceLink(BaseGoogleLink): - """Helper class for constructing Data Fusion Instance link""" + """Helper class for constructing Data Fusion Instance link.""" name = "Data Fusion Instance" key = "instance_conf" @@ -85,7 +85,7 @@ def persist( class DataFusionPipelineLink(BaseGoogleLink): - """Helper class for constructing Data Fusion Pipeline link""" + """Helper class for constructing Data Fusion Pipeline link.""" name = "Data Fusion Pipeline" key = "pipeline_conf" @@ -109,7 +109,7 @@ def persist( class DataFusionPipelinesLink(BaseGoogleLink): - """Helper class for constructing list of Data Fusion Pipelines link""" + """Helper class for constructing list of Data Fusion Pipelines link.""" name = "Data Fusion Pipelines List" key = "pipelines_conf" diff --git a/airflow/providers/google/cloud/links/dataplex.py b/airflow/providers/google/cloud/links/dataplex.py index dcf3c8755848a..1fe6af05f2bde 100644 --- a/airflow/providers/google/cloud/links/dataplex.py +++ b/airflow/providers/google/cloud/links/dataplex.py @@ -35,7 +35,7 @@ class DataplexTaskLink(BaseGoogleLink): - """Helper class for constructing Dataplex Task link""" + """Helper class for constructing Dataplex Task link.""" name = "Dataplex Task" key = "task_conf" @@ -59,7 +59,7 @@ def persist( class DataplexTasksLink(BaseGoogleLink): - """Helper class for constructing Dataplex Tasks link""" + """Helper class for constructing Dataplex Tasks link.""" name = "Dataplex Tasks" key = "tasks_conf" @@ -82,7 +82,7 @@ def persist( class DataplexLakeLink(BaseGoogleLink): - """Helper class for constructing Dataplex Lake link""" + """Helper class for constructing Dataplex Lake link.""" name = "Dataplex Lake" key = "dataplex_lake_key" diff --git a/airflow/providers/google/cloud/links/dataproc.py b/airflow/providers/google/cloud/links/dataproc.py index 4376d41cb0fc0..d560d2a5ee0cc 100644 --- a/airflow/providers/google/cloud/links/dataproc.py +++ b/airflow/providers/google/cloud/links/dataproc.py @@ -42,7 +42,7 @@ class DataprocLink(BaseOperatorLink): - """Helper class for constructing Dataproc resource link""" + """Helper class for constructing Dataproc resource link.""" name = "Dataproc resource" key = "conf" @@ -82,7 +82,7 @@ def get_link( class DataprocListLink(BaseOperatorLink): - """Helper class for constructing list of Dataproc resources link""" + """Helper class for constructing list of Dataproc resources link.""" name = "Dataproc resources" key = "list_conf" diff --git a/airflow/providers/google/cloud/links/datastore.py b/airflow/providers/google/cloud/links/datastore.py index 08f57862312e5..a75893d793e40 100644 --- a/airflow/providers/google/cloud/links/datastore.py +++ b/airflow/providers/google/cloud/links/datastore.py @@ -30,7 +30,7 @@ class CloudDatastoreImportExportLink(BaseGoogleLink): - """Helper class for constructing Cloud Datastore Import/Export Link""" + """Helper class for constructing Cloud Datastore Import/Export Link.""" name = "Import/Export Page" key = "import_export_conf" @@ -51,7 +51,7 @@ def persist( class CloudDatastoreEntitiesLink(BaseGoogleLink): - """Helper class for constructing Cloud Datastore Entities Link""" + """Helper class for constructing Cloud Datastore Entities Link.""" name = "Entities" key = "entities_conf" diff --git a/airflow/providers/google/cloud/links/kubernetes_engine.py b/airflow/providers/google/cloud/links/kubernetes_engine.py index 1beb4a0a84f4d..0703e2eb2cb34 100644 --- a/airflow/providers/google/cloud/links/kubernetes_engine.py +++ b/airflow/providers/google/cloud/links/kubernetes_engine.py @@ -37,7 +37,7 @@ class KubernetesEngineClusterLink(BaseGoogleLink): - """Helper class for constructing Kubernetes Engine Cluster Link""" + """Helper class for constructing Kubernetes Engine Cluster Link.""" name = "Kubernetes Cluster" key = "kubernetes_cluster_conf" @@ -60,7 +60,7 @@ def persist(context: Context, task_instance, cluster: dict | Cluster | None): class KubernetesEnginePodLink(BaseGoogleLink): - """Helper class for constructing Kubernetes Engine Pod Link""" + """Helper class for constructing Kubernetes Engine Pod Link.""" name = "Kubernetes Pod" key = "kubernetes_pod_conf" diff --git a/airflow/providers/google/cloud/links/life_sciences.py b/airflow/providers/google/cloud/links/life_sciences.py index 50d783da32506..948142023777d 100644 --- a/airflow/providers/google/cloud/links/life_sciences.py +++ b/airflow/providers/google/cloud/links/life_sciences.py @@ -28,7 +28,7 @@ class LifeSciencesLink(BaseGoogleLink): - """Helper class for constructing Life Sciences List link""" + """Helper class for constructing Life Sciences List link.""" name = "Life Sciences" key = "lifesciences_key" diff --git a/airflow/providers/google/cloud/links/mlengine.py b/airflow/providers/google/cloud/links/mlengine.py index bbfe0cc5385ce..20e2a447467dc 100644 --- a/airflow/providers/google/cloud/links/mlengine.py +++ b/airflow/providers/google/cloud/links/mlengine.py @@ -37,7 +37,7 @@ class MLEngineModelLink(BaseGoogleLink): - """Helper class for constructing ML Engine link""" + """Helper class for constructing ML Engine link.""" name = "MLEngine Model" key = "ml_engine_model" @@ -58,7 +58,7 @@ def persist( class MLEngineModelsListLink(BaseGoogleLink): - """Helper class for constructing ML Engine link""" + """Helper class for constructing ML Engine link.""" name = "MLEngine Models List" key = "ml_engine_models_list" @@ -78,7 +78,7 @@ def persist( class MLEngineJobDetailsLink(BaseGoogleLink): - """Helper class for constructing ML Engine link""" + """Helper class for constructing ML Engine link.""" name = "MLEngine Job Details" key = "ml_engine_job_details" @@ -99,7 +99,7 @@ def persist( class MLEngineModelVersionDetailsLink(BaseGoogleLink): - """Helper class for constructing ML Engine link""" + """Helper class for constructing ML Engine link.""" name = "MLEngine Version Details" key = "ml_engine_version_details" @@ -121,7 +121,7 @@ def persist( class MLEngineJobSListLink(BaseGoogleLink): - """Helper class for constructing ML Engine link""" + """Helper class for constructing ML Engine link.""" name = "MLEngine Jobs List" key = "ml_engine_jobs_list" diff --git a/airflow/providers/google/cloud/links/pubsub.py b/airflow/providers/google/cloud/links/pubsub.py index 83de5ba00769f..6ec47e7ea7012 100644 --- a/airflow/providers/google/cloud/links/pubsub.py +++ b/airflow/providers/google/cloud/links/pubsub.py @@ -32,7 +32,7 @@ class PubSubTopicLink(BaseGoogleLink): - """Helper class for constructing Pub/Sub Topic Link""" + """Helper class for constructing Pub/Sub Topic Link.""" name = "Pub/Sub Topic" key = "pubsub_topic" @@ -53,7 +53,7 @@ def persist( class PubSubSubscriptionLink(BaseGoogleLink): - """Helper class for constructing Pub/Sub Subscription Link""" + """Helper class for constructing Pub/Sub Subscription Link.""" name = "Pub/Sub Subscription" key = "pubsub_subscription" diff --git a/airflow/providers/google/cloud/links/spanner.py b/airflow/providers/google/cloud/links/spanner.py index 0306e46233c93..c83facb140750 100644 --- a/airflow/providers/google/cloud/links/spanner.py +++ b/airflow/providers/google/cloud/links/spanner.py @@ -34,7 +34,7 @@ class SpannerInstanceLink(BaseGoogleLink): - """Helper class for constructing Spanner Instance Link""" + """Helper class for constructing Spanner Instance Link.""" name = "Spanner Instance" key = "spanner_instance" @@ -55,7 +55,7 @@ def persist( class SpannerDatabaseLink(BaseGoogleLink): - """Helper class for constructing Spanner Database Link""" + """Helper class for constructing Spanner Database Link.""" name = "Spanner Database" key = "spanner_database" diff --git a/airflow/providers/google/cloud/links/stackdriver.py b/airflow/providers/google/cloud/links/stackdriver.py index 1dec31ccfed2b..4ca8580e99a89 100644 --- a/airflow/providers/google/cloud/links/stackdriver.py +++ b/airflow/providers/google/cloud/links/stackdriver.py @@ -32,7 +32,7 @@ class StackdriverNotificationsLink(BaseGoogleLink): - """Helper class for constructing Stackdriver Notifications Link""" + """Helper class for constructing Stackdriver Notifications Link.""" name = "Cloud Monitoring Notifications" key = "stackdriver_notifications" @@ -52,7 +52,7 @@ def persist( class StackdriverPoliciesLink(BaseGoogleLink): - """Helper class for constructing Stackdriver Policies Link""" + """Helper class for constructing Stackdriver Policies Link.""" name = "Cloud Monitoring Policies" key = "stackdriver_policies" diff --git a/airflow/providers/google/cloud/links/vertex_ai.py b/airflow/providers/google/cloud/links/vertex_ai.py index a251305c45e7d..c05c36ea6ce72 100644 --- a/airflow/providers/google/cloud/links/vertex_ai.py +++ b/airflow/providers/google/cloud/links/vertex_ai.py @@ -52,7 +52,7 @@ class VertexAIModelLink(BaseGoogleLink): - """Helper class for constructing Vertex AI Model link""" + """Helper class for constructing Vertex AI Model link.""" name = "Vertex AI Model" key = "model_conf" @@ -76,7 +76,7 @@ def persist( class VertexAIModelListLink(BaseGoogleLink): - """Helper class for constructing Vertex AI Models Link""" + """Helper class for constructing Vertex AI Models Link.""" name = "Model List" key = "models_conf" @@ -97,7 +97,7 @@ def persist( class VertexAIModelExportLink(BaseGoogleLink): - """Helper class for constructing Vertex AI Model Export Link""" + """Helper class for constructing Vertex AI Model Export Link.""" name = "Export Model" key = "export_conf" @@ -125,7 +125,7 @@ def persist( class VertexAITrainingLink(BaseGoogleLink): - """Helper class for constructing Vertex AI Training link""" + """Helper class for constructing Vertex AI Training link.""" name = "Vertex AI Training" key = "training_conf" @@ -149,7 +149,7 @@ def persist( class VertexAITrainingPipelinesLink(BaseGoogleLink): - """Helper class for constructing Vertex AI Training Pipelines link""" + """Helper class for constructing Vertex AI Training Pipelines link.""" name = "Vertex AI Training Pipelines" key = "pipelines_conf" @@ -170,7 +170,7 @@ def persist( class VertexAIDatasetLink(BaseGoogleLink): - """Helper class for constructing Vertex AI Dataset link""" + """Helper class for constructing Vertex AI Dataset link.""" name = "Dataset" key = "dataset_conf" @@ -190,7 +190,7 @@ def persist(context: Context, task_instance, dataset_id: str): class VertexAIDatasetListLink(BaseGoogleLink): - """Helper class for constructing Vertex AI Datasets Link""" + """Helper class for constructing Vertex AI Datasets Link.""" name = "Dataset List" key = "datasets_conf" @@ -211,7 +211,7 @@ def persist( class VertexAIHyperparameterTuningJobListLink(BaseGoogleLink): - """Helper class for constructing Vertex AI HyperparameterTuningJobs Link""" + """Helper class for constructing Vertex AI HyperparameterTuningJobs Link.""" name = "Hyperparameter Tuning Job List" key = "hyperparameter_tuning_jobs_conf" @@ -232,7 +232,7 @@ def persist( class VertexAIBatchPredictionJobLink(BaseGoogleLink): - """Helper class for constructing Vertex AI BatchPredictionJob link""" + """Helper class for constructing Vertex AI BatchPredictionJob link.""" name = "Batch Prediction Job" key = "batch_prediction_job_conf" @@ -256,7 +256,7 @@ def persist( class VertexAIBatchPredictionJobListLink(BaseGoogleLink): - """Helper class for constructing Vertex AI BatchPredictionJobList link""" + """Helper class for constructing Vertex AI BatchPredictionJobList link.""" name = "Batch Prediction Job List" key = "batch_prediction_jobs_conf" @@ -277,7 +277,7 @@ def persist( class VertexAIEndpointLink(BaseGoogleLink): - """Helper class for constructing Vertex AI Endpoint link""" + """Helper class for constructing Vertex AI Endpoint link.""" name = "Endpoint" key = "endpoint_conf" @@ -301,7 +301,7 @@ def persist( class VertexAIEndpointListLink(BaseGoogleLink): - """Helper class for constructing Vertex AI EndpointList link""" + """Helper class for constructing Vertex AI EndpointList link.""" name = "Endpoint List" key = "endpoints_conf" diff --git a/airflow/providers/google/cloud/links/workflows.py b/airflow/providers/google/cloud/links/workflows.py index 8563739a99f7f..160539579cb26 100644 --- a/airflow/providers/google/cloud/links/workflows.py +++ b/airflow/providers/google/cloud/links/workflows.py @@ -36,7 +36,7 @@ class WorkflowsWorkflowDetailsLink(BaseGoogleLink): - """Helper class for constructing Workflow details Link""" + """Helper class for constructing Workflow details Link.""" name = "Workflow details" key = "workflow_details" @@ -58,7 +58,7 @@ def persist( class WorkflowsListOfWorkflowsLink(BaseGoogleLink): - """Helper class for constructing list of Workflows Link""" + """Helper class for constructing list of Workflows Link.""" name = "List of workflows" key = "list_of_workflows" @@ -78,7 +78,7 @@ def persist( class WorkflowsExecutionLink(BaseGoogleLink): - """Helper class for constructing Workflows Execution Link""" + """Helper class for constructing Workflows Execution Link.""" name = "Workflow Execution" key = "workflow_execution" diff --git a/airflow/providers/google/cloud/log/gcs_task_handler.py b/airflow/providers/google/cloud/log/gcs_task_handler.py index c8852a6e8ca3f..0c3f8a27c73c2 100644 --- a/airflow/providers/google/cloud/log/gcs_task_handler.py +++ b/airflow/providers/google/cloud/log/gcs_task_handler.py @@ -46,8 +46,9 @@ def get_default_delete_local_copy(): - """Load delete_local_logs conf if Airflow version > 2.6 and return False if not - TODO: delete this function when min airflow version >= 2.6 + """Load delete_local_logs conf if Airflow version > 2.6 and return False if not. + + TODO: delete this function when min airflow version >= 2.6. """ from airflow.version import version diff --git a/airflow/providers/google/cloud/log/stackdriver_task_handler.py b/airflow/providers/google/cloud/log/stackdriver_task_handler.py index a18a34a97b027..d1e95bc7b0ade 100644 --- a/airflow/providers/google/cloud/log/stackdriver_task_handler.py +++ b/airflow/providers/google/cloud/log/stackdriver_task_handler.py @@ -14,7 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""Handler that integrates with Stackdriver""" +"""Handler that integrates with Stackdriver.""" from __future__ import annotations import logging @@ -119,7 +119,7 @@ def _credentials_and_project(self) -> tuple[Credentials, str]: @property def _client(self) -> gcp_logging.Client: - """The Cloud Library API client""" + """The Cloud Library API client.""" credentials, project = self._credentials_and_project client = gcp_logging.Client( credentials=credentials, @@ -140,13 +140,12 @@ def _logging_service_client(self) -> LoggingServiceV2Client: @cached_property def _transport(self) -> Transport: - """Object responsible for sending data to Stackdriver""" + """Object responsible for sending data to Stackdriver.""" # The Transport object is badly defined (no init) but in the docs client/name as constructor # arguments are a requirement for any class that derives from Transport class, hence ignore: return self.transport_type(self._client, self.name) # type: ignore[call-arg] def _get_labels(self, task_instance=None): - """When""" if task_instance: ti_labels = self._task_instance_to_labels(task_instance) else: @@ -179,7 +178,7 @@ def emit(self, record: logging.LogRecord) -> None: def set_context(self, task_instance: TaskInstance) -> None: """ - Configures the logger to add information with information about the current task + Configures the logger to add information with information about the current task. :param task_instance: Currently executed task """ @@ -350,8 +349,9 @@ def _resource_path(self): def get_external_log_url(self, task_instance: TaskInstance, try_number: int) -> str: """ Creates an address for an external log collecting service. + :param task_instance: task instance object - :param try_number: task instance try_number to read logs from. + :param try_number: task instance try_number to read logs from :return: URL to the external log collection service """ _, project_id = self._credentials_and_project diff --git a/airflow/providers/google/cloud/operators/bigquery.py b/airflow/providers/google/cloud/operators/bigquery.py index 467e376447335..30964f4af8798 100644 --- a/airflow/providers/google/cloud/operators/bigquery.py +++ b/airflow/providers/google/cloud/operators/bigquery.py @@ -62,7 +62,7 @@ class BigQueryUIColors(enum.Enum): - """Hex colors for BigQuery operators""" + """Hex colors for BigQuery operators.""" CHECK = "#C0D7FF" QUERY = "#A1BBFF" @@ -71,7 +71,7 @@ class BigQueryUIColors(enum.Enum): class IfExistAction(enum.Enum): - """Action to take if the resource exist""" + """Action to take if the resource exist.""" IGNORE = "ignore" LOG = "log" @@ -121,7 +121,7 @@ def get_link( class _BigQueryDbHookMixin: def get_db_hook(self: BigQueryCheckOperator) -> BigQueryHook: # type:ignore[misc] - """Get BigQuery DB Hook""" + """Get BigQuery DB Hook.""" return BigQueryHook( gcp_conn_id=self.gcp_conn_id, use_legacy_sql=self.use_legacy_sql, @@ -886,7 +886,8 @@ def _submit_job( def generate_query(self, hook: BigQueryHook) -> str: """ Generate a select query if selected fields are given or with * - for the given dataset and table id + for the given dataset and table id. + :param hook BigQuery Hook """ query = "select " @@ -1066,7 +1067,7 @@ class BigQueryExecuteQueryOperator(GoogleCloudBaseOperator): @property def operator_extra_links(self): - """Return operator extra links""" + """Return operator extra links.""" if isinstance(self.sql, str): return (BigQueryConsoleLink(),) return (BigQueryConsoleIndexableLink(i) for i, _ in enumerate(self.sql)) @@ -1725,6 +1726,7 @@ def execute(self, context: Context) -> None: class BigQueryDeleteDatasetOperator(GoogleCloudBaseOperator): """ This operator deletes an existing dataset from your Project in Big query. + https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets/delete .. seealso:: @@ -1799,6 +1801,7 @@ def execute(self, context: Context) -> None: class BigQueryCreateEmptyDatasetOperator(GoogleCloudBaseOperator): """ This operator is used to create new dataset for your Project in BigQuery. + https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets#resource .. seealso:: @@ -2272,7 +2275,7 @@ def execute(self, context: Context): class BigQueryDeleteTableOperator(GoogleCloudBaseOperator): """ - Deletes BigQuery tables + Deletes BigQuery tables. .. seealso:: For more information on how to use this operator, take a look at the guide: @@ -2331,7 +2334,7 @@ def execute(self, context: Context) -> None: class BigQueryUpsertTableOperator(GoogleCloudBaseOperator): """ - Upsert BigQuery table + Upsert BigQuery table. .. seealso:: For more information on how to use this operator, take a look at the guide: @@ -2513,6 +2516,7 @@ def execute(self, context: Context): class BigQueryInsertJobOperator(GoogleCloudBaseOperator): """ Executes a BigQuery job. Waits for the job to complete and returns job id. + This operator work in the following way: - it calculates a unique hash of the job using job's configuration or uuid if ``force_rerun`` is True diff --git a/airflow/providers/google/cloud/operators/cloud_build.py b/airflow/providers/google/cloud/operators/cloud_build.py index 88d9b434bb544..4242f561c1003 100644 --- a/airflow/providers/google/cloud/operators/cloud_build.py +++ b/airflow/providers/google/cloud/operators/cloud_build.py @@ -1044,7 +1044,7 @@ def _reformat_storage_source(self) -> None: def process_body(self) -> Build: """ - Processes the body passed in the constructor + Processes the body passed in the constructor. :return: the body. """ @@ -1056,7 +1056,7 @@ def process_body(self) -> Build: @staticmethod def _convert_repo_url_to_dict(source: str) -> dict[str, Any]: """ - Convert url to repository in Google Cloud Source to a format supported by the API + Convert url to repository in Google Cloud Source to a format supported by the API. Example valid input: @@ -1090,7 +1090,7 @@ def _convert_repo_url_to_dict(source: str) -> dict[str, Any]: @staticmethod def _convert_storage_url_to_dict(storage_url: str) -> dict[str, Any]: """ - Convert url to object in Google Cloud Storage to a format supported by the API + Convert url to object in Google Cloud Storage to a format supported by the API. Example valid input: diff --git a/airflow/providers/google/cloud/operators/cloud_composer.py b/airflow/providers/google/cloud/operators/cloud_composer.py index 5cfc06d0c9b5c..d04a1606fc84f 100644 --- a/airflow/providers/google/cloud/operators/cloud_composer.py +++ b/airflow/providers/google/cloud/operators/cloud_composer.py @@ -44,7 +44,7 @@ class CloudComposerEnvironmentLink(BaseGoogleLink): - """Helper class for constructing Cloud Composer Environment Link""" + """Helper class for constructing Cloud Composer Environment Link.""" name = "Cloud Composer Environment" key = "composer_conf" @@ -71,7 +71,7 @@ def persist( class CloudComposerEnvironmentsLink(BaseGoogleLink): - """Helper class for constructing Cloud Composer Environment Link""" + """Helper class for constructing Cloud Composer Environment Link.""" name = "Cloud Composer Environment List" key = "composer_conf" diff --git a/airflow/providers/google/cloud/operators/cloud_sql.py b/airflow/providers/google/cloud/operators/cloud_sql.py index 67ad0caaa2890..aa8e7f526eeba 100644 --- a/airflow/providers/google/cloud/operators/cloud_sql.py +++ b/airflow/providers/google/cloud/operators/cloud_sql.py @@ -526,7 +526,7 @@ def execute(self, context: Context) -> bool | None: class CloudSQLCloneInstanceOperator(CloudSQLBaseOperator): """ - Clones an instance to a target instance + Clones an instance to a target instance. .. seealso:: For more information on how to use this operator, take a look at the guide: @@ -725,6 +725,7 @@ class CloudSQLPatchInstanceDatabaseOperator(CloudSQLBaseOperator): """ Updates a resource containing information about a database inside a Cloud SQL instance using patch semantics. + See: https://cloud.google.com/sql/docs/mysql/admin-api/how-tos/performance#patch .. seealso:: diff --git a/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py b/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py index a284eeae02a33..dc7bfc65c5b86 100644 --- a/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +++ b/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py @@ -122,12 +122,12 @@ def process_body(self) -> dict: @staticmethod def _convert_date_to_dict(field_date: date) -> dict: - """Convert native python ``datetime.date`` object to a format supported by the API""" + """Convert native python ``datetime.date`` object to a format supported by the API.""" return {DAY: field_date.day, MONTH: field_date.month, YEAR: field_date.year} @staticmethod def _convert_time_to_dict(time_object: time) -> dict: - """Convert native python ``datetime.time`` object to a format supported by the API""" + """Convert native python ``datetime.time`` object to a format supported by the API.""" return {HOURS: time_object.hour, MINUTES: time_object.minute, SECONDS: time_object.second} diff --git a/airflow/providers/google/cloud/operators/dataflow.py b/airflow/providers/google/cloud/operators/dataflow.py index 5197fbcc2e3eb..ef9333db38705 100644 --- a/airflow/providers/google/cloud/operators/dataflow.py +++ b/airflow/providers/google/cloud/operators/dataflow.py @@ -47,7 +47,8 @@ class CheckJobRunning(Enum): """ - Helper enum for choosing what to do if job is already running + Helper enum for choosing what to do if job is already running. + IgnoreJob - do not check if running FinishIfRunning - finish current dag run with no action WaitForRun - wait for job to finish and then continue with new job diff --git a/airflow/providers/google/cloud/operators/dataform.py b/airflow/providers/google/cloud/operators/dataform.py index 184b7f0fc24bf..4b61f974c9e8c 100644 --- a/airflow/providers/google/cloud/operators/dataform.py +++ b/airflow/providers/google/cloud/operators/dataform.py @@ -1030,7 +1030,7 @@ def execute(self, context: Context) -> None: class DataformInstallNpmPackagesOperator(GoogleCloudBaseOperator): """ - Installs npm dependencies in the provided workspace. Requires "package.json" to be created in workspace + Installs npm dependencies in the provided workspace. Requires "package.json" to be created in workspace. :param project_id: Required. The ID of the Google Cloud project where workspace located. :param region: Required. The ID of the Google Cloud region where workspace located. diff --git a/airflow/providers/google/cloud/operators/datafusion.py b/airflow/providers/google/cloud/operators/datafusion.py index 635c8edd9586d..b214c36d7844d 100644 --- a/airflow/providers/google/cloud/operators/datafusion.py +++ b/airflow/providers/google/cloud/operators/datafusion.py @@ -38,7 +38,7 @@ class DataFusionPipelineLinkHelper: - """Helper class for Pipeline links""" + """Helper class for Pipeline links.""" @staticmethod def get_project_id(instance): diff --git a/airflow/providers/google/cloud/operators/dataprep.py b/airflow/providers/google/cloud/operators/dataprep.py index 61aa0a8487c64..7f19f6993b7ce 100644 --- a/airflow/providers/google/cloud/operators/dataprep.py +++ b/airflow/providers/google/cloud/operators/dataprep.py @@ -31,7 +31,8 @@ class DataprepGetJobsForJobGroupOperator(GoogleCloudBaseOperator): """ Get information about the batch jobs within a Cloud Dataprep job. - API documentation https://clouddataprep.com/documentation/api#section/Overview + + API documentation: https://clouddataprep.com/documentation/api#section/Overview. .. seealso:: For more information on how to use this operator, take a look at the guide: @@ -65,8 +66,10 @@ def execute(self, context: Context) -> dict: class DataprepGetJobGroupOperator(GoogleCloudBaseOperator): """ Get the specified job group. + A job group is a job that is executed from a specific node in a flow. - API documentation https://clouddataprep.com/documentation/api#section/Overview + + API documentation: https://clouddataprep.com/documentation/api#section/Overview. .. seealso:: For more information on how to use this operator, take a look at the guide: @@ -124,9 +127,11 @@ def execute(self, context: Context) -> dict: class DataprepRunJobGroupOperator(GoogleCloudBaseOperator): """ Create a ``jobGroup``, which launches the specified job as the authenticated user. + This performs the same action as clicking on the Run Job button in the application. - To get recipe_id please follow the Dataprep API documentation - https://clouddataprep.com/documentation/api#operation/runJobGroup + + To get recipe_id please follow the Dataprep API documentation: + https://clouddataprep.com/documentation/api#operation/runJobGroup. .. seealso:: For more information on how to use this operator, take a look at the guide: diff --git a/airflow/providers/google/cloud/operators/dataproc.py b/airflow/providers/google/cloud/operators/dataproc.py index 985278d4d9d06..d460ad3800287 100644 --- a/airflow/providers/google/cloud/operators/dataproc.py +++ b/airflow/providers/google/cloud/operators/dataproc.py @@ -397,15 +397,14 @@ def make(self): class DataprocCreateClusterOperator(GoogleCloudBaseOperator): """ Create a new cluster on Google Cloud Dataproc. The operator will wait until the - creation is successful or an error occurs in the creation process. If the cluster - already exists and ``use_if_exists`` is True then the operator will: + creation is successful or an error occurs in the creation process. + If the cluster already exists and ``use_if_exists`` is True then the operator will: - if cluster state is ERROR then delete it if specified and raise error - if cluster state is CREATING wait for it and then check for ERROR state - if cluster state is DELETING wait for it and then create new cluster Please refer to - https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.clusters for a detailed explanation on the different parameters. Most of the configuration @@ -1009,7 +1008,7 @@ def __init__( self.polling_interval_seconds = polling_interval_seconds def create_job_template(self) -> DataProcJobBuilder: - """Initialize `self.job_template` with default values""" + """Initialize `self.job_template` with default values.""" if self.project_id is None: raise AirflowException( "project id should either be set via project_id " @@ -1176,6 +1175,7 @@ def __init__( def generate_job(self): """ Helper method for easier migration to `DataprocSubmitJobOperator`. + :return: Dict representing Dataproc job """ job_template = self.create_job_template() @@ -1252,6 +1252,7 @@ def __init__( def generate_job(self): """ Helper method for easier migration to `DataprocSubmitJobOperator`. + :return: Dict representing Dataproc job """ job_template = self.create_job_template() @@ -1327,6 +1328,7 @@ def __init__( def generate_job(self): """ Helper method for easier migration to `DataprocSubmitJobOperator`. + :return: Dict representing Dataproc job """ job_template = self.create_job_template() @@ -1404,6 +1406,7 @@ def __init__( def generate_job(self): """ Helper method for easier migration to `DataprocSubmitJobOperator`. + :return: Dict representing Dataproc job """ job_template = self.create_job_template() @@ -1477,6 +1480,7 @@ def __init__( def generate_job(self): """ Helper method for easier migration to `DataprocSubmitJobOperator`. + :return: Dict representing Dataproc job """ job_template = self.create_job_template() @@ -1575,6 +1579,7 @@ def __init__( def generate_job(self): """ Helper method for easier migration to `DataprocSubmitJobOperator`. + :return: Dict representing Dataproc job """ job_template = self.create_job_template() diff --git a/airflow/providers/google/cloud/operators/dataproc_metastore.py b/airflow/providers/google/cloud/operators/dataproc_metastore.py index c7fe6fccf335f..3cdc6cf9c9564 100644 --- a/airflow/providers/google/cloud/operators/dataproc_metastore.py +++ b/airflow/providers/google/cloud/operators/dataproc_metastore.py @@ -51,7 +51,7 @@ class DataprocMetastoreLink(BaseOperatorLink): - """Helper class for constructing Dataproc Metastore resource link""" + """Helper class for constructing Dataproc Metastore resource link.""" name = "Dataproc Metastore" key = "conf" @@ -99,7 +99,7 @@ def get_link( class DataprocMetastoreDetailedLink(BaseOperatorLink): - """Helper class for constructing Dataproc Metastore detailed resource link""" + """Helper class for constructing Dataproc Metastore detailed resource link.""" name = "Dataproc Metastore resource" key = "config" @@ -701,7 +701,7 @@ def _get_uri_from_destination(self, destination_uri: str): def _wait_for_export_metadata(self, hook: DataprocMetastoreHook): """ Workaround to check that export was created successfully. - We discovered a issue to parse result to MetadataExport inside the SDK + We discovered a issue to parse result to MetadataExport inside the SDK. """ for time_to_wait in exponential_sleep_generator(initial=10, maximum=120): sleep(time_to_wait) @@ -989,7 +989,7 @@ def execute(self, context: Context): def _wait_for_restore_service(self, hook: DataprocMetastoreHook): """ Workaround to check that restore service was finished successfully. - We discovered an issue to parse result to Restore inside the SDK + We discovered an issue to parse result to Restore inside the SDK. """ for time_to_wait in exponential_sleep_generator(initial=10, maximum=120): sleep(time_to_wait) diff --git a/airflow/providers/google/cloud/operators/datastore.py b/airflow/providers/google/cloud/operators/datastore.py index bcd651f11f5cf..c093296e003a4 100644 --- a/airflow/providers/google/cloud/operators/datastore.py +++ b/airflow/providers/google/cloud/operators/datastore.py @@ -36,7 +36,7 @@ class CloudDatastoreExportEntitiesOperator(GoogleCloudBaseOperator): """ - Export entities from Google Cloud Datastore to Cloud Storage + Export entities from Google Cloud Datastore to Cloud Storage. .. seealso:: For more information on how to use this operator, take a look at the guide: @@ -142,7 +142,7 @@ def execute(self, context: Context) -> dict: class CloudDatastoreImportEntitiesOperator(GoogleCloudBaseOperator): """ - Import entities from Cloud Storage to Google Cloud Datastore + Import entities from Cloud Storage to Google Cloud Datastore. .. seealso:: For more information on how to use this operator, take a look at the guide: diff --git a/airflow/providers/google/cloud/operators/life_sciences.py b/airflow/providers/google/cloud/operators/life_sciences.py index ae0363adeb983..bb2b5329e018c 100644 --- a/airflow/providers/google/cloud/operators/life_sciences.py +++ b/airflow/providers/google/cloud/operators/life_sciences.py @@ -31,7 +31,7 @@ class LifeSciencesRunPipelineOperator(GoogleCloudBaseOperator): """ - Runs a Life Sciences Pipeline + Runs a Life Sciences Pipeline. .. seealso:: For more information on how to use this operator, take a look at the guide: diff --git a/airflow/providers/google/cloud/operators/mlengine.py b/airflow/providers/google/cloud/operators/mlengine.py index 88b833cdc3323..b776d20dff42d 100644 --- a/airflow/providers/google/cloud/operators/mlengine.py +++ b/airflow/providers/google/cloud/operators/mlengine.py @@ -426,7 +426,7 @@ def execute(self, context: Context): class MLEngineGetModelOperator(GoogleCloudBaseOperator): """ - Gets a particular model + Gets a particular model. .. seealso:: For more information on how to use this operator, take a look at the guide: @@ -679,7 +679,7 @@ def execute(self, context: Context): class MLEngineCreateVersionOperator(GoogleCloudBaseOperator): """ - Creates a new version in the model + Creates a new version in the model. .. seealso:: For more information on how to use this operator, take a look at the guide: @@ -843,7 +843,7 @@ def execute(self, context: Context): class MLEngineListVersionsOperator(GoogleCloudBaseOperator): """ - Lists all available versions of the model + Lists all available versions of the model. .. seealso:: For more information on how to use this operator, take a look at the guide: diff --git a/airflow/providers/google/cloud/operators/text_to_speech.py b/airflow/providers/google/cloud/operators/text_to_speech.py index b000baba3e256..90ca92576392b 100644 --- a/airflow/providers/google/cloud/operators/text_to_speech.py +++ b/airflow/providers/google/cloud/operators/text_to_speech.py @@ -37,7 +37,7 @@ class CloudTextToSpeechSynthesizeOperator(GoogleCloudBaseOperator): """ - Synthesizes text to speech and stores it in Google Cloud Storage + Synthesizes text to speech and stores it in Google Cloud Storage. .. seealso:: For more information on how to use this operator, take a look at the guide: diff --git a/airflow/providers/google/cloud/operators/vertex_ai/auto_ml.py b/airflow/providers/google/cloud/operators/vertex_ai/auto_ml.py index f933cfc376563..80e573a1d663c 100644 --- a/airflow/providers/google/cloud/operators/vertex_ai/auto_ml.py +++ b/airflow/providers/google/cloud/operators/vertex_ai/auto_ml.py @@ -89,7 +89,7 @@ def on_kill(self) -> None: class CreateAutoMLForecastingTrainingJobOperator(AutoMLTrainingJobBaseOperator): - """Create AutoML Forecasting Training job""" + """Create AutoML Forecasting Training job.""" template_fields = ( "dataset_id", @@ -213,7 +213,7 @@ def execute(self, context: Context): class CreateAutoMLImageTrainingJobOperator(AutoMLTrainingJobBaseOperator): - """Create Auto ML Image Training job""" + """Create Auto ML Image Training job.""" template_fields = ( "dataset_id", @@ -293,7 +293,7 @@ def execute(self, context: Context): class CreateAutoMLTabularTrainingJobOperator(AutoMLTrainingJobBaseOperator): - """Create Auto ML Tabular Training job""" + """Create Auto ML Tabular Training job.""" template_fields = ( "dataset_id", @@ -399,7 +399,7 @@ def execute(self, context: Context): class CreateAutoMLTextTrainingJobOperator(AutoMLTrainingJobBaseOperator): - """Create Auto ML Text Training job""" + """Create Auto ML Text Training job.""" template_fields = [ "dataset_id", @@ -470,7 +470,7 @@ def execute(self, context: Context): class CreateAutoMLVideoTrainingJobOperator(AutoMLTrainingJobBaseOperator): - """Create Auto ML Video Training job""" + """Create Auto ML Video Training job.""" template_fields = ( "dataset_id", diff --git a/airflow/providers/google/cloud/operators/vertex_ai/batch_prediction_job.py b/airflow/providers/google/cloud/operators/vertex_ai/batch_prediction_job.py index dc4775fe24184..df92847c0cc48 100644 --- a/airflow/providers/google/cloud/operators/vertex_ai/batch_prediction_job.py +++ b/airflow/providers/google/cloud/operators/vertex_ai/batch_prediction_job.py @@ -346,7 +346,7 @@ def execute(self, context: Context): class GetBatchPredictionJobOperator(GoogleCloudBaseOperator): """ - Gets a BatchPredictionJob + Gets a BatchPredictionJob. :param project_id: Required. The ID of the Google Cloud project that the service belongs to. :param region: Required. The ID of the Google Cloud region that the service belongs to. diff --git a/airflow/providers/google/cloud/operators/vertex_ai/custom_job.py b/airflow/providers/google/cloud/operators/vertex_ai/custom_job.py index 38c3a2ea69445..7eb7866ff3671 100644 --- a/airflow/providers/google/cloud/operators/vertex_ai/custom_job.py +++ b/airflow/providers/google/cloud/operators/vertex_ai/custom_job.py @@ -150,7 +150,7 @@ def __init__( class CreateCustomContainerTrainingJobOperator(CustomTrainingJobBaseOperator): - """Create Custom Container Training job + """Create Custom Container Training job. :param project_id: Required. The ID of the Google Cloud project that the service belongs to. :param region: Required. The ID of the Google Cloud region that the service belongs to. @@ -498,7 +498,7 @@ def on_kill(self) -> None: class CreateCustomPythonPackageTrainingJobOperator(CustomTrainingJobBaseOperator): - """Create Custom Python Package Training job + """Create Custom Python Package Training job. :param project_id: Required. The ID of the Google Cloud project that the service belongs to. :param region: Required. The ID of the Google Cloud region that the service belongs to. @@ -848,7 +848,7 @@ def on_kill(self) -> None: class CreateCustomTrainingJobOperator(CustomTrainingJobBaseOperator): - """Create Custom Training job + """Create Custom Training job. :param project_id: Required. The ID of the Google Cloud project that the service belongs to. :param region: Required. The ID of the Google Cloud region that the service belongs to. diff --git a/airflow/providers/google/cloud/operators/vertex_ai/hyperparameter_tuning_job.py b/airflow/providers/google/cloud/operators/vertex_ai/hyperparameter_tuning_job.py index 42be6fbadd20b..39a5b9bc6e2c7 100644 --- a/airflow/providers/google/cloud/operators/vertex_ai/hyperparameter_tuning_job.py +++ b/airflow/providers/google/cloud/operators/vertex_ai/hyperparameter_tuning_job.py @@ -50,7 +50,7 @@ class CreateHyperparameterTuningJobOperator(GoogleCloudBaseOperator): """ - Create Hyperparameter Tuning job + Create Hyperparameter Tuning job. :param project_id: Required. The ID of the Google Cloud project that the service belongs to. :param region: Required. The ID of the Google Cloud region that the service belongs to. @@ -274,7 +274,7 @@ def on_kill(self) -> None: class GetHyperparameterTuningJobOperator(GoogleCloudBaseOperator): """ - Gets a HyperparameterTuningJob + Gets a HyperparameterTuningJob. :param project_id: Required. The ID of the Google Cloud project that the service belongs to. :param region: Required. The ID of the Google Cloud region that the service belongs to. diff --git a/airflow/providers/google/cloud/operators/vision.py b/airflow/providers/google/cloud/operators/vision.py index 76eabe242060e..028ce8a0d7300 100644 --- a/airflow/providers/google/cloud/operators/vision.py +++ b/airflow/providers/google/cloud/operators/vision.py @@ -1208,7 +1208,7 @@ def execute(self, context: Context): class CloudVisionDetectTextOperator(GoogleCloudBaseOperator): """ - Detects Text in the image + Detects Text in the image. .. seealso:: For more information on how to use this operator, take a look at the guide: @@ -1290,7 +1290,7 @@ def execute(self, context: Context): class CloudVisionTextDetectOperator(GoogleCloudBaseOperator): """ - Detects Document Text in the image + Detects Document Text in the image. .. seealso:: For more information on how to use this operator, take a look at the guide: @@ -1371,7 +1371,7 @@ def execute(self, context: Context): class CloudVisionDetectImageLabelsOperator(GoogleCloudBaseOperator): """ - Detects Document Text in the image + Detects Document Text in the image. .. seealso:: For more information on how to use this operator, take a look at the guide: @@ -1442,7 +1442,7 @@ def execute(self, context: Context): class CloudVisionDetectImageSafeSearchOperator(GoogleCloudBaseOperator): """ - Detects Document Text in the image + Detects Document Text in the image. .. seealso:: For more information on how to use this operator, take a look at the guide: @@ -1516,7 +1516,7 @@ def prepare_additional_parameters( ) -> dict | None: """ Creates additional_properties parameter based on language_hints, web_detection_params and - additional_properties parameters specified by the user + additional_properties parameters specified by the user. """ if language_hints is None and web_detection_params is None: return additional_properties diff --git a/airflow/providers/google/cloud/secrets/secret_manager.py b/airflow/providers/google/cloud/secrets/secret_manager.py index 3deffaabb5c08..7269a17ef45f1 100644 --- a/airflow/providers/google/cloud/secrets/secret_manager.py +++ b/airflow/providers/google/cloud/secrets/secret_manager.py @@ -14,7 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""Objects relating to sourcing connections from Google Cloud Secrets Manager""" +"""Objects relating to sourcing connections from Google Cloud Secrets Manager.""" from __future__ import annotations import logging @@ -42,7 +42,7 @@ def _parse_version(val): class CloudSecretManagerBackend(BaseSecretsBackend, LoggingMixin): """ - Retrieves Connection object from Google Cloud Secrets Manager + Retrieves Connection object from Google Cloud Secrets Manager. Configurable via ``airflow.cfg`` as follows: @@ -136,7 +136,7 @@ def _is_valid_prefix_and_sep(self) -> bool: def get_conn_value(self, conn_id: str) -> str | None: """ - Get serialized representation of Connection + Get serialized representation of Connection. :param conn_id: connection id """ @@ -165,7 +165,7 @@ def get_conn_uri(self, conn_id: str) -> str | None: def get_variable(self, key: str) -> str | None: """ - Get Airflow Variable from Environment Variable + Get Airflow Variable from Environment Variable. :param key: Variable Key :return: Variable Value @@ -177,7 +177,7 @@ def get_variable(self, key: str) -> str | None: def get_config(self, key: str) -> str | None: """ - Get Airflow Configuration + Get Airflow Configuration. :param key: Configuration Option Key :return: Configuration Option Value diff --git a/airflow/providers/google/cloud/sensors/cloud_composer.py b/airflow/providers/google/cloud/sensors/cloud_composer.py index f454d255a1693..238c9533051c3 100644 --- a/airflow/providers/google/cloud/sensors/cloud_composer.py +++ b/airflow/providers/google/cloud/sensors/cloud_composer.py @@ -31,7 +31,7 @@ class CloudComposerEnvironmentSensor(BaseSensorOperator): """ - Check the status of the Cloud Composer Environment task + Check the status of the Cloud Composer Environment task. :param project_id: Required. The ID of the Google Cloud project that the service belongs to. :param region: Required. The ID of the Google Cloud region that the service belongs to. diff --git a/airflow/providers/google/cloud/sensors/datafusion.py b/airflow/providers/google/cloud/sensors/datafusion.py index f1d441f4a8b5c..8297d60f4403a 100644 --- a/airflow/providers/google/cloud/sensors/datafusion.py +++ b/airflow/providers/google/cloud/sensors/datafusion.py @@ -30,7 +30,7 @@ class CloudDataFusionPipelineStateSensor(BaseSensorOperator): """ - Check the status of the pipeline in the Google Cloud Data Fusion + Check the status of the pipeline in the Google Cloud Data Fusion. :param pipeline_name: Your pipeline name. :param pipeline_id: Your pipeline ID. diff --git a/airflow/providers/google/cloud/sensors/dataplex.py b/airflow/providers/google/cloud/sensors/dataplex.py index 092fabd367154..d710d134c3754 100644 --- a/airflow/providers/google/cloud/sensors/dataplex.py +++ b/airflow/providers/google/cloud/sensors/dataplex.py @@ -31,7 +31,7 @@ class TaskState: - """Dataplex Task states""" + """Dataplex Task states.""" STATE_UNSPECIFIED = 0 ACTIVE = 1 @@ -42,7 +42,7 @@ class TaskState: class DataplexTaskStateSensor(BaseSensorOperator): """ - Check the status of the Dataplex task + Check the status of the Dataplex task. :param project_id: Required. The ID of the Google Cloud project that the task belongs to. :param region: Required. The ID of the Google Cloud region that the task belongs to. diff --git a/airflow/providers/google/cloud/sensors/gcs.py b/airflow/providers/google/cloud/sensors/gcs.py index a268002b79881..08fd37022dfe7 100644 --- a/airflow/providers/google/cloud/sensors/gcs.py +++ b/airflow/providers/google/cloud/sensors/gcs.py @@ -132,8 +132,9 @@ def execute_complete(self, context: Context, event: dict[str, str]) -> str: class GCSObjectExistenceAsyncSensor(GCSObjectExistenceSensor): """ Checks for the existence of a file in Google Cloud Storage. + Class `GCSObjectExistenceAsyncSensor` is deprecated and will be removed in a future release. - Please use `GCSObjectExistenceSensor` and set `deferrable` attribute to `True` instead + Please use `GCSObjectExistenceSensor` and set `deferrable` attribute to `True` instead. :param bucket: The Google Cloud Storage bucket where the object is. :param object: The name of the object to check in the Google cloud storage bucket. @@ -318,7 +319,7 @@ def poke(self, context: Context) -> bool: return bool(self._matches) def execute(self, context: Context): - """Overridden to allow matches to be passed""" + """Overridden to allow matches to be passed.""" self.log.info("Checking for existence of object: %s, %s", self.bucket, self.prefix) if not self.deferrable: super().execute(context) @@ -342,7 +343,7 @@ def execute(self, context: Context): def execute_complete(self, context: dict[str, Any], event: dict[str, str | list[str]]) -> str | list[str]: """ Callback for when the trigger fires; returns immediately. - Relies on trigger to throw a success event + Relies on trigger to throw a success event. """ self.log.info("Resuming from trigger and checking status") if event["status"] == "success": diff --git a/airflow/providers/google/cloud/sensors/pubsub.py b/airflow/providers/google/cloud/sensors/pubsub.py index 8ab0234d5bd50..dfe14b542b61d 100644 --- a/airflow/providers/google/cloud/sensors/pubsub.py +++ b/airflow/providers/google/cloud/sensors/pubsub.py @@ -113,7 +113,7 @@ def __init__( self._return_value = None def execute(self, context: Context) -> Any: - """Overridden to allow messages to be passed""" + """Overridden to allow messages to be passed.""" super().execute(context) return self._return_value diff --git a/airflow/providers/google/cloud/transfers/adls_to_gcs.py b/airflow/providers/google/cloud/transfers/adls_to_gcs.py index c4e9eb3035ff5..4810ab41af20d 100644 --- a/airflow/providers/google/cloud/transfers/adls_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/adls_to_gcs.py @@ -35,7 +35,7 @@ class ADLSToGCSOperator(ADLSListOperator): """ - Synchronizes an Azure Data Lake Storage path with a GCS bucket + Synchronizes an Azure Data Lake Storage path with a GCS bucket. :param src_adls: The Azure Data Lake path to find the objects (templated) :param dest_gcs: The Google Cloud Storage bucket and prefix to diff --git a/airflow/providers/google/cloud/transfers/cassandra_to_gcs.py b/airflow/providers/google/cloud/transfers/cassandra_to_gcs.py index fb7711dd8a8e7..ffd3e3aa78373 100644 --- a/airflow/providers/google/cloud/transfers/cassandra_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/cassandra_to_gcs.py @@ -45,7 +45,7 @@ class CassandraToGCSOperator(BaseOperator): """ - Copy data from Cassandra to Google Cloud Storage in JSON format + Copy data from Cassandra to Google Cloud Storage in JSON format. Note: Arrays of arrays are not supported. diff --git a/airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py b/airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py index bda341bd2e73d..0b96c0735b3cd 100644 --- a/airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py @@ -35,7 +35,7 @@ class FlushAction(Enum): - """Facebook Ads Export Options""" + """Facebook Ads Export Options.""" EXPORT_ONCE = "ExportAtOnce" EXPORT_EVERY_ACCOUNT = "ExportEveryAccount" @@ -45,7 +45,7 @@ class FacebookAdsReportToGcsOperator(BaseOperator): """ Fetches the results from the Facebook Ads API as desired in the params Converts and saves the data as a temporary JSON file - Uploads the JSON to Google Cloud Storage + Uploads the JSON to Google Cloud Storage. .. seealso:: For more information on the Facebook Ads API, take a look at the API docs: diff --git a/airflow/providers/google/cloud/transfers/local_to_gcs.py b/airflow/providers/google/cloud/transfers/local_to_gcs.py index ee3a5c867ce19..26e70109bc74d 100644 --- a/airflow/providers/google/cloud/transfers/local_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/local_to_gcs.py @@ -87,7 +87,7 @@ def __init__( self.impersonation_chain = impersonation_chain def execute(self, context: Context): - """Uploads a file or list of files to Google Cloud Storage""" + """Uploads a file or list of files to Google Cloud Storage.""" hook = GCSHook( gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain, diff --git a/airflow/providers/google/cloud/transfers/presto_to_gcs.py b/airflow/providers/google/cloud/transfers/presto_to_gcs.py index e66531825e332..1433435c7529f 100644 --- a/airflow/providers/google/cloud/transfers/presto_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/presto_to_gcs.py @@ -71,11 +71,11 @@ def description(self) -> list[tuple]: @property def rowcount(self) -> int: - """The read-only attribute specifies the number of rows""" + """The read-only attribute specifies the number of rows.""" return self.cursor.rowcount def close(self) -> None: - """Close the cursor now""" + """Close the cursor now.""" self.cursor.close() def execute(self, *args, **kwargs) -> PrestoResult: @@ -129,8 +129,9 @@ def fetchmany(self, size=None) -> list: def __next__(self) -> Any: """ Return the next row from the currently executing SQL statement using the same semantics as - ``.fetchone()``. A ``StopIteration`` exception is raised when the result set is exhausted. - :return: + ``.fetchone()``. + + A ``StopIteration`` exception is raised when the result set is exhausted. """ result = self.fetchone() if result is None: @@ -138,7 +139,7 @@ def __next__(self) -> Any: return result def __iter__(self) -> _PrestoToGCSPrestoCursorAdapter: - """Return self to make cursors compatible to the iteration protocol""" + """Return self to make cursors compatible to the iteration protocol.""" return self diff --git a/airflow/providers/google/cloud/transfers/salesforce_to_gcs.py b/airflow/providers/google/cloud/transfers/salesforce_to_gcs.py index 4764ed33d1cb4..a46d75d21f6b0 100644 --- a/airflow/providers/google/cloud/transfers/salesforce_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/salesforce_to_gcs.py @@ -30,7 +30,7 @@ class SalesforceToGcsOperator(BaseOperator): """ - Submits Salesforce query and uploads results to Google Cloud Storage + Submits Salesforce query and uploads results to Google Cloud Storage. .. seealso:: For more information on how to use this operator, take a look at the guide: diff --git a/airflow/providers/google/cloud/transfers/sql_to_gcs.py b/airflow/providers/google/cloud/transfers/sql_to_gcs.py index 1531664999e4a..a60f5cb2798a3 100644 --- a/airflow/providers/google/cloud/transfers/sql_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/sql_to_gcs.py @@ -322,7 +322,7 @@ def _write_local_data_files(self, cursor): yield file_to_upload def _get_file_to_upload(self, file_mime_type, file_no): - """Returns a dictionary that represents the file to upload""" + """Returns a dictionary that represents the file to upload.""" tmp_file_handle = NamedTemporaryFile(delete=True) return ( { diff --git a/airflow/providers/google/cloud/transfers/trino_to_gcs.py b/airflow/providers/google/cloud/transfers/trino_to_gcs.py index ce104dc565649..60dbecb4b73b9 100644 --- a/airflow/providers/google/cloud/transfers/trino_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/trino_to_gcs.py @@ -71,11 +71,11 @@ def description(self) -> list[tuple]: @property def rowcount(self) -> int: - """The read-only attribute specifies the number of rows""" + """The read-only attribute specifies the number of rows.""" return self.cursor.rowcount def close(self) -> None: - """Close the cursor now""" + """Close the cursor now.""" self.cursor.close() def execute(self, *args, **kwargs) -> TrinoResult: @@ -129,8 +129,9 @@ def fetchmany(self, size=None) -> list: def __next__(self) -> Any: """ Return the next row from the currently executing SQL statement using the same semantics as - ``.fetchone()``. A ``StopIteration`` exception is raised when the result set is exhausted. - :return: + ``.fetchone()``. + + A ``StopIteration`` exception is raised when the result set is exhausted. """ result = self.fetchone() if result is None: @@ -138,7 +139,7 @@ def __next__(self) -> Any: return result def __iter__(self) -> _TrinoToGCSTrinoCursorAdapter: - """Return self to make cursors compatible to the iteration protocol""" + """Return self to make cursors compatible to the iteration protocol.""" return self diff --git a/airflow/providers/google/cloud/triggers/bigquery.py b/airflow/providers/google/cloud/triggers/bigquery.py index c7b17af2ed5f5..0747b621f1f52 100644 --- a/airflow/providers/google/cloud/triggers/bigquery.py +++ b/airflow/providers/google/cloud/triggers/bigquery.py @@ -28,7 +28,7 @@ class BigQueryInsertJobTrigger(BaseTrigger): """ - BigQueryInsertJobTrigger run on the trigger worker to perform insert operation + BigQueryInsertJobTrigger run on the trigger worker to perform insert operation. :param conn_id: Reference to google cloud connection id :param job_id: The ID of the job. It will be suffixed with hash of job configuration @@ -72,7 +72,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]: ) async def run(self) -> AsyncIterator[TriggerEvent]: # type: ignore[override] - """Gets current job execution status and yields a TriggerEvent""" + """Gets current job execution status and yields a TriggerEvent.""" hook = self._get_async_hook() while True: try: @@ -106,7 +106,7 @@ def _get_async_hook(self) -> BigQueryAsyncHook: class BigQueryCheckTrigger(BigQueryInsertJobTrigger): - """BigQueryCheckTrigger run on the trigger worker""" + """BigQueryCheckTrigger run on the trigger worker.""" def serialize(self) -> tuple[str, dict[str, Any]]: """Serializes BigQueryCheckTrigger arguments and classpath.""" @@ -123,7 +123,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]: ) async def run(self) -> AsyncIterator[TriggerEvent]: # type: ignore[override] - """Gets current job execution status and yields a TriggerEvent""" + """Gets current job execution status and yields a TriggerEvent.""" hook = self._get_async_hook() while True: try: @@ -166,7 +166,7 @@ async def run(self) -> AsyncIterator[TriggerEvent]: # type: ignore[override] class BigQueryGetDataTrigger(BigQueryInsertJobTrigger): """ - BigQueryGetDataTrigger run on the trigger worker, inherits from BigQueryInsertJobTrigger class + BigQueryGetDataTrigger run on the trigger worker, inherits from BigQueryInsertJobTrigger class. :param as_dict: if True returns the result as a list of dictionaries, otherwise as list of lists (default: False). @@ -192,7 +192,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]: ) async def run(self) -> AsyncIterator[TriggerEvent]: # type: ignore[override] - """Gets current job execution status and yields a TriggerEvent with response data""" + """Gets current job execution status and yields a TriggerEvent with response data.""" hook = self._get_async_hook() while True: try: @@ -225,7 +225,7 @@ async def run(self) -> AsyncIterator[TriggerEvent]: # type: ignore[override] class BigQueryIntervalCheckTrigger(BigQueryInsertJobTrigger): """ - BigQueryIntervalCheckTrigger run on the trigger worker, inherits from BigQueryInsertJobTrigger class + BigQueryIntervalCheckTrigger run on the trigger worker, inherits from BigQueryInsertJobTrigger class. :param conn_id: Reference to google cloud connection id :param first_job_id: The ID of the job 1 performed @@ -297,7 +297,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]: ) async def run(self) -> AsyncIterator[TriggerEvent]: # type: ignore[override] - """Gets current job execution status and yields a TriggerEvent""" + """Gets current job execution status and yields a TriggerEvent.""" hook = self._get_async_hook() while True: try: @@ -370,7 +370,7 @@ async def run(self) -> AsyncIterator[TriggerEvent]: # type: ignore[override] class BigQueryValueCheckTrigger(BigQueryInsertJobTrigger): """ - BigQueryValueCheckTrigger run on the trigger worker, inherits from BigQueryInsertJobTrigger class + BigQueryValueCheckTrigger run on the trigger worker, inherits from BigQueryInsertJobTrigger class. :param conn_id: Reference to google cloud connection id :param sql: the sql to be executed @@ -425,7 +425,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]: ) async def run(self) -> AsyncIterator[TriggerEvent]: # type: ignore[override] - """Gets current job execution status and yields a TriggerEvent""" + """Gets current job execution status and yields a TriggerEvent.""" hook = self._get_async_hook() while True: try: @@ -454,7 +454,7 @@ async def run(self) -> AsyncIterator[TriggerEvent]: # type: ignore[override] class BigQueryTableExistenceTrigger(BaseTrigger): """ - Initialize the BigQuery Table Existence Trigger with needed parameters + Initialize the BigQuery Table Existence Trigger with needed parameters. :param project_id: Google Cloud Project where the job is running :param dataset_id: The dataset ID of the requested table. @@ -543,7 +543,8 @@ async def _table_exists( class BigQueryTablePartitionExistenceTrigger(BigQueryTableExistenceTrigger): """ - Initialize the BigQuery Table Partition Existence Trigger with needed parameters + Initialize the BigQuery Table Partition Existence Trigger with needed parameters. + :param partition_id: The name of the partition to check the existence of. :param project_id: Google Cloud Project where the job is running :param dataset_id: The dataset ID of the requested table. diff --git a/airflow/providers/google/cloud/triggers/cloud_build.py b/airflow/providers/google/cloud/triggers/cloud_build.py index 3187dc0c1d342..5ec9eef3cab04 100644 --- a/airflow/providers/google/cloud/triggers/cloud_build.py +++ b/airflow/providers/google/cloud/triggers/cloud_build.py @@ -27,7 +27,7 @@ class CloudBuildCreateBuildTrigger(BaseTrigger): """ - CloudBuildCreateBuildTrigger run on the trigger worker to perform create Build operation + CloudBuildCreateBuildTrigger run on the trigger worker to perform create Build operation. :param id_: The ID of the build. :param project_id: Google Cloud Project where the job is running @@ -76,7 +76,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]: ) async def run(self) -> AsyncIterator[TriggerEvent]: # type: ignore[override] - """Gets current build execution status and yields a TriggerEvent""" + """Gets current build execution status and yields a TriggerEvent.""" hook = self._get_async_hook() while True: try: diff --git a/airflow/providers/google/cloud/triggers/cloud_composer.py b/airflow/providers/google/cloud/triggers/cloud_composer.py index e6567fa02c6b5..254a87637cdc8 100644 --- a/airflow/providers/google/cloud/triggers/cloud_composer.py +++ b/airflow/providers/google/cloud/triggers/cloud_composer.py @@ -27,7 +27,7 @@ class CloudComposerExecutionTrigger(BaseTrigger): - """The trigger handles the async communication with the Google Cloud Composer""" + """The trigger handles the async communication with the Google Cloud Composer.""" def __init__( self, diff --git a/airflow/providers/google/cloud/triggers/datafusion.py b/airflow/providers/google/cloud/triggers/datafusion.py index 34fa7d025819f..153e28c9a8835 100644 --- a/airflow/providers/google/cloud/triggers/datafusion.py +++ b/airflow/providers/google/cloud/triggers/datafusion.py @@ -81,7 +81,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]: ) async def run(self) -> AsyncIterator[TriggerEvent]: # type: ignore[override] - """Gets current pipeline status and yields a TriggerEvent""" + """Gets current pipeline status and yields a TriggerEvent.""" hook = self._get_async_hook() while True: try: diff --git a/airflow/providers/google/cloud/triggers/dataproc.py b/airflow/providers/google/cloud/triggers/dataproc.py index d896f1190d5e2..137a92388b0e2 100644 --- a/airflow/providers/google/cloud/triggers/dataproc.py +++ b/airflow/providers/google/cloud/triggers/dataproc.py @@ -31,7 +31,7 @@ class DataprocBaseTrigger(BaseTrigger): - """Base class for Dataproc triggers""" + """Base class for Dataproc triggers.""" def __init__( self, @@ -57,7 +57,7 @@ def get_async_hook(self): class DataprocSubmitTrigger(DataprocBaseTrigger): """ - DataprocSubmitTrigger run on the trigger worker to perform create Build operation + DataprocSubmitTrigger run on the trigger worker to perform create Build operation. :param job_id: The ID of a Dataproc job. :param project_id: Google Cloud Project where the job is running @@ -109,7 +109,7 @@ async def run(self): class DataprocClusterTrigger(DataprocBaseTrigger): """ - DataprocClusterTrigger run on the trigger worker to perform create Build operation + DataprocClusterTrigger run on the trigger worker to perform create Build operation. :param cluster_name: The name of the cluster. :param project_id: Google Cloud Project where the job is running @@ -162,7 +162,7 @@ async def run(self) -> AsyncIterator[TriggerEvent]: class DataprocBatchTrigger(DataprocBaseTrigger): """ - DataprocCreateBatchTrigger run on the trigger worker to perform create Build operation + DataprocCreateBatchTrigger run on the trigger worker to perform create Build operation. :param batch_id: The ID of the build. :param project_id: Google Cloud Project where the job is running @@ -262,7 +262,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]: ) async def run(self) -> AsyncIterator[TriggerEvent]: - """Wait until cluster is deleted completely""" + """Wait until cluster is deleted completely.""" while self.end_time > time.time(): try: cluster = await self.get_async_hook().get_cluster( diff --git a/airflow/providers/google/cloud/triggers/gcs.py b/airflow/providers/google/cloud/triggers/gcs.py index 34dc163e23884..f8b3ca0c64dfe 100644 --- a/airflow/providers/google/cloud/triggers/gcs.py +++ b/airflow/providers/google/cloud/triggers/gcs.py @@ -148,7 +148,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]: ) async def run(self) -> AsyncIterator[TriggerEvent]: - """Loop until the object updated time is greater than target datetime""" + """Loop until the object updated time is greater than target datetime.""" try: hook = self._get_async_hook() while True: diff --git a/airflow/providers/google/cloud/triggers/mlengine.py b/airflow/providers/google/cloud/triggers/mlengine.py index c221308f2aee4..76c542a5bf4d4 100644 --- a/airflow/providers/google/cloud/triggers/mlengine.py +++ b/airflow/providers/google/cloud/triggers/mlengine.py @@ -25,7 +25,7 @@ class MLEngineStartTrainingJobTrigger(BaseTrigger): """ - MLEngineStartTrainingJobTrigger run on the trigger worker to perform starting training job operation + MLEngineStartTrainingJobTrigger run on the trigger worker to perform starting training job operation. :param conn_id: Reference to google cloud connection id :param job_id: The ID of the job. It will be suffixed with hash of job configuration @@ -89,7 +89,7 @@ def serialize(self) -> tuple[str, dict[str, Any]]: ) async def run(self) -> AsyncIterator[TriggerEvent]: # type: ignore[override] - """Gets current job execution status and yields a TriggerEvent""" + """Gets current job execution status and yields a TriggerEvent.""" hook = self._get_async_hook() while True: try: diff --git a/airflow/providers/google/cloud/utils/credentials_provider.py b/airflow/providers/google/cloud/utils/credentials_provider.py index f147d8fff9d96..a5063cd738dcb 100644 --- a/airflow/providers/google/cloud/utils/credentials_provider.py +++ b/airflow/providers/google/cloud/utils/credentials_provider.py @@ -141,7 +141,9 @@ def provide_gcp_conn_and_credentials( project_id: str | None = None, ) -> Generator[None, None, None]: """ - Context manager that provides both: + Context manager that provides GPC connection and credentials. + + It provides both: - Google Cloud credentials for application supporting `Application Default Credentials (ADC) strategy`__. @@ -167,7 +169,7 @@ def provide_gcp_conn_and_credentials( class _CredentialProvider(LoggingMixin): """ - Prepare the Credentials object for Google API and the associated project_id + Prepare the Credentials object for Google API and the associated project_id. Only either `key_path` or `keyfile_dict` should be provided, or an exception will occur. If neither of them are provided, return default credentials for the current environment diff --git a/airflow/providers/google/cloud/utils/helpers.py b/airflow/providers/google/cloud/utils/helpers.py index 28c7aa07c3f24..72216ec20b06e 100644 --- a/airflow/providers/google/cloud/utils/helpers.py +++ b/airflow/providers/google/cloud/utils/helpers.py @@ -19,5 +19,5 @@ def normalize_directory_path(source_object: str | None) -> str | None: - """Makes sure dir path ends with a slash""" + """Makes sure dir path ends with a slash.""" return source_object + "/" if source_object and not source_object.endswith("/") else source_object diff --git a/airflow/providers/google/common/hooks/base_google.py b/airflow/providers/google/common/hooks/base_google.py index e68cb074e6a57..3ee344a03a881 100644 --- a/airflow/providers/google/common/hooks/base_google.py +++ b/airflow/providers/google/common/hooks/base_google.py @@ -73,8 +73,8 @@ def is_soft_quota_exception(exception: Exception): """ API for Google services does not have a standardized way to report quota violation errors. - The function has been adapted by trial and error to the following services: + The function has been adapted by trial and error to the following services: * Google Translate * Google Vision * Google Text-to-Speech @@ -188,7 +188,7 @@ class GoogleBaseHook(BaseHook): @staticmethod def get_connection_form_widgets() -> dict[str, Any]: - """Returns connection widgets to add to connection form""" + """Returns connection widgets to add to connection form.""" from flask_appbuilder.fieldwidgets import BS3PasswordFieldWidget, BS3TextFieldWidget from flask_babel import lazy_gettext from wtforms import IntegerField, PasswordField, StringField @@ -218,7 +218,7 @@ def get_connection_form_widgets() -> dict[str, Any]: @staticmethod def get_ui_field_behaviour() -> dict[str, Any]: - """Returns custom field behaviour""" + """Returns custom field behaviour.""" return { "hidden_fields": ["host", "schema", "login", "password", "port", "extra"], "relabeling": {}, @@ -239,7 +239,7 @@ def __init__( self._cached_project_id: str | None = None def get_credentials_and_project_id(self) -> tuple[google.auth.credentials.Credentials, str | None]: - """Returns the Credentials object for Google API and the associated project_id""" + """Returns the Credentials object for Google API and the associated project_id.""" if self._cached_credentials is not None: return self._cached_credentials, self._cached_project_id @@ -284,12 +284,12 @@ def get_credentials_and_project_id(self) -> tuple[google.auth.credentials.Creden return credentials, project_id def get_credentials(self) -> google.auth.credentials.Credentials: - """Returns the Credentials object for Google API""" + """Returns the Credentials object for Google API.""" credentials, _ = self.get_credentials_and_project_id() return credentials def _get_access_token(self) -> str: - """Returns a valid access token from Google API Credentials""" + """Returns a valid access token from Google API Credentials.""" credentials = self.get_credentials() auth_req = google.auth.transport.requests.Request() # credentials.token is None @@ -300,7 +300,7 @@ def _get_access_token(self) -> str: @functools.lru_cache(maxsize=None) def _get_credentials_email(self) -> str: """ - Returns the email address associated with the currently logged in account + Returns the email address associated with the currently logged in account. If a service account is used, it returns the service account. If user authentication (e.g. gcloud auth) is used, it returns the e-mail account of that user. @@ -602,7 +602,7 @@ def download_content_from_request(file_handle, request: dict, chunk_size: int) - file_handle.flush() def test_connection(self): - """Test the Google cloud connectivity from UI""" + """Test the Google cloud connectivity from UI.""" status, message = False, "" try: token = self._get_access_token() @@ -619,7 +619,7 @@ def test_connection(self): class GoogleBaseAsyncHook(BaseHook): - """GoogleBaseAsyncHook inherits from BaseHook class, run on the trigger worker""" + """GoogleBaseAsyncHook inherits from BaseHook class, run on the trigger worker.""" sync_hook_class: Any = None diff --git a/airflow/providers/google/common/links/storage.py b/airflow/providers/google/common/links/storage.py index 42bb710937154..3252e552443bd 100644 --- a/airflow/providers/google/common/links/storage.py +++ b/airflow/providers/google/common/links/storage.py @@ -31,7 +31,7 @@ class StorageLink(BaseGoogleLink): - """Helper class for constructing GCS Storage link""" + """Helper class for constructing GCS Storage link.""" name = "GCS Storage" key = "storage_conf" @@ -47,7 +47,7 @@ def persist(context: Context, task_instance, uri: str, project_id: str | None): class FileDetailsLink(BaseGoogleLink): - """Helper class for constructing GCS file details link""" + """Helper class for constructing GCS file details link.""" name = "GCS File Details" key = "file_details" diff --git a/airflow/providers/google/firebase/hooks/firestore.py b/airflow/providers/google/firebase/hooks/firestore.py index 1702f6bdb94dc..93fc154e997af 100644 --- a/airflow/providers/google/firebase/hooks/firestore.py +++ b/airflow/providers/google/firebase/hooks/firestore.py @@ -15,7 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""Hook for Google Cloud Firestore service""" +"""Hook for Google Cloud Firestore service.""" from __future__ import annotations import time diff --git a/airflow/providers/google/leveldb/hooks/leveldb.py b/airflow/providers/google/leveldb/hooks/leveldb.py index 730af48db9493..16d2773badd7d 100644 --- a/airflow/providers/google/leveldb/hooks/leveldb.py +++ b/airflow/providers/google/leveldb/hooks/leveldb.py @@ -14,7 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""Hook for Level DB""" +"""Hook for Level DB.""" from __future__ import annotations from airflow.exceptions import AirflowException, AirflowOptionalProviderFeatureException @@ -30,12 +30,13 @@ class LevelDBHookException(AirflowException): - """Exception specific for LevelDB""" + """Exception specific for LevelDB.""" class LevelDBHook(BaseHook): """ - Plyvel Wrapper to Interact With LevelDB Database + Plyvel Wrapper to Interact With LevelDB Database. + `LevelDB Connection Documentation `__ """ @@ -52,7 +53,7 @@ def __init__(self, leveldb_conn_id: str = default_conn_name): def get_conn(self, name: str = "/tmp/testdb/", create_if_missing: bool = False, **kwargs) -> DB: """ - Creates `Plyvel DB `__ + Creates `Plyvel DB `__. :param name: path to create database e.g. `/tmp/testdb/`) :param create_if_missing: whether a new database should be created if needed @@ -65,7 +66,7 @@ def get_conn(self, name: str = "/tmp/testdb/", create_if_missing: bool = False, return self.db def close_conn(self) -> None: - """Closes connection""" + """Closes connection.""" db = self.db if db is not None: db.close() @@ -80,7 +81,7 @@ def run( values: list[bytes] | None = None, ) -> bytes | None: """ - Execute operation with leveldb + Execute operation with leveldb. :param command: command of plyvel(python wrap for leveldb) for DB object e.g. ``"put"``, ``"get"``, ``"delete"``, ``"write_batch"``. @@ -109,7 +110,7 @@ def run( def put(self, key: bytes, value: bytes): """ - Put a single value into a leveldb db by key + Put a single value into a leveldb db by key. :param key: key for put execution, e.g. ``b'key'``, ``b'another-key'`` :param value: value for put execution e.g. ``b'value'``, ``b'another-value'`` @@ -120,7 +121,7 @@ def put(self, key: bytes, value: bytes): def get(self, key: bytes) -> bytes: """ - Get a single value into a leveldb db by key + Get a single value into a leveldb db by key. :param key: key for get execution, e.g. ``b'key'``, ``b'another-key'`` :returns: value of key from db.get @@ -141,7 +142,7 @@ def delete(self, key: bytes): def write_batch(self, keys: list[bytes], values: list[bytes]): """ - Write batch of values in a leveldb db by keys + Write batch of values in a leveldb db by keys. :param keys: keys for write_batch execution e.g. ``[b'key', b'another-key']`` :param values: values for write_batch execution e.g. ``[b'value', b'another-value']`` diff --git a/airflow/providers/google/leveldb/operators/leveldb.py b/airflow/providers/google/leveldb/operators/leveldb.py index 7ff2da63ff066..9471e9a1317d4 100644 --- a/airflow/providers/google/leveldb/operators/leveldb.py +++ b/airflow/providers/google/leveldb/operators/leveldb.py @@ -27,7 +27,7 @@ class LevelDBOperator(BaseOperator): """ - Execute command in LevelDB + Execute command in LevelDB. .. seealso:: For more information on how to use this operator, take a look at the guide: @@ -72,7 +72,7 @@ def __init__( def execute(self, context: Context) -> str | None: """ - Execute command in LevelDB + Execute command in LevelDB. :returns: value from get(str, not bytes, to prevent error in json.dumps in serialize_value in xcom.py) or str | None diff --git a/airflow/providers/google/marketing_platform/hooks/analytics.py b/airflow/providers/google/marketing_platform/hooks/analytics.py index 524eaaa8717bc..05df51c2c1f57 100644 --- a/airflow/providers/google/marketing_platform/hooks/analytics.py +++ b/airflow/providers/google/marketing_platform/hooks/analytics.py @@ -119,7 +119,7 @@ def upload_data( resumable_upload: bool = False, ) -> None: """ - Uploads file to GA via the Data Import API + Uploads file to GA via the Data Import API. :param file_location: The path and name of the file to upload. :param account_id: The GA account Id to which the data upload belongs. @@ -156,7 +156,7 @@ def delete_upload_data( delete_request_body: dict[str, Any], ) -> None: """ - Deletes the uploaded data for a given account/property/dataset + Deletes the uploaded data for a given account/property/dataset. :param account_id: The GA account Id to which the data upload belongs. :param web_property_id: UA-string associated with the upload. @@ -180,7 +180,7 @@ def delete_upload_data( def list_uploads(self, account_id, web_property_id, custom_data_source_id) -> list[dict[str, Any]]: """ - Get list of data upload from GA + Get list of data upload from GA. :param account_id: The GA account Id to which the data upload belongs. :param web_property_id: UA-string associated with the upload. diff --git a/airflow/providers/google/marketing_platform/operators/analytics.py b/airflow/providers/google/marketing_platform/operators/analytics.py index 8767b6de1a916..2996838f12c2f 100644 --- a/airflow/providers/google/marketing_platform/operators/analytics.py +++ b/airflow/providers/google/marketing_platform/operators/analytics.py @@ -156,7 +156,7 @@ def execute(self, context: Context) -> dict[str, Any]: class GoogleAnalyticsRetrieveAdsLinksListOperator(BaseOperator): """ - Lists webProperty-Google Ads links for a given web property + Lists webProperty-Google Ads links for a given web property. .. seealso:: Check official API docs: diff --git a/airflow/providers/google/suite/hooks/calendar.py b/airflow/providers/google/suite/hooks/calendar.py index 485656716076c..b5b595067507a 100644 --- a/airflow/providers/google/suite/hooks/calendar.py +++ b/airflow/providers/google/suite/hooks/calendar.py @@ -15,7 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""This module contains a Google Calendar API hook""" +"""This module contains a Google Calendar API hook.""" from __future__ import annotations from datetime import datetime @@ -29,9 +29,9 @@ class GoogleCalendarHook(GoogleBaseHook): """ - Interact with Google Calendar via Google Cloud connection - Reading and writing cells in Google Sheet: - https://developers.google.com/calendar/api/v3/reference + Interact with Google Calendar via Google Cloud connection. + + Reading and writing cells in Google Sheet: https://developers.google.com/calendar/api/v3/reference :param gcp_conn_id: The connection ID to use when fetching connection info. :param api_version: API Version. For example v3 @@ -97,7 +97,8 @@ def get_events( updated_min: datetime | None = None, ) -> list: """ - Gets events from Google Calendar from a single calendar_id + Gets events from Google Calendar from a single calendar_id. + https://developers.google.com/calendar/api/v3/reference/events/list :param calendar_id: The Google Calendar ID to interact with @@ -170,8 +171,9 @@ def create_event( supports_attachments: bool | None = False, ) -> dict: """ - Create event on the specified calendar - https://developers.google.com/calendar/api/v3/reference/events/insert + Create event on the specified calendar. + + https://developers.google.com/calendar/api/v3/reference/events/insert. :param calendar_id: The Google Calendar ID to interact with :param conference_data_version: Optional. Version number of conference data diff --git a/airflow/providers/google/suite/hooks/drive.py b/airflow/providers/google/suite/hooks/drive.py index 7519a9fd9b941..89d91378801c1 100644 --- a/airflow/providers/google/suite/hooks/drive.py +++ b/airflow/providers/google/suite/hooks/drive.py @@ -15,7 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""Hook for Google Drive service""" +"""Hook for Google Drive service.""" from __future__ import annotations from typing import IO, Any, Sequence @@ -145,7 +145,7 @@ def exists( self, folder_id: str, file_name: str, drive_id: str | None = None, *, include_trashed: bool = True ) -> bool: """ - Checks to see if a file exists within a Google Drive folder + Checks to see if a file exists within a Google Drive folder. :param folder_id: The id of the Google Drive folder in which the file resides :param file_name: The name of a file in Google Drive @@ -162,7 +162,8 @@ def exists( def _get_file_info(self, file_id: str): """ - Returns Google API file_info object containing id, name, parents in the response + Returns Google API file_info object containing id, name, parents in the response. + https://developers.google.com/drive/api/v3/reference/files/get :param file_id: id as string representation of interested file @@ -182,7 +183,7 @@ def _get_file_info(self, file_id: str): def _resolve_file_path(self, file_id: str) -> str: """ - Returns the full Google Drive path for given file_id + Returns the full Google Drive path for given file_id. :param file_id: The id of a file in Google Drive :return: Google Drive full path for a file @@ -210,7 +211,7 @@ def get_file_id( self, folder_id: str, file_name: str, drive_id: str | None = None, *, include_trashed: bool = True ) -> dict: """ - Returns the file id of a Google Drive file + Returns the file id of a Google Drive file. :param folder_id: The id of the Google Drive folder in which the file resides :param file_name: The name of a file in Google Drive diff --git a/airflow/providers/google/suite/hooks/sheets.py b/airflow/providers/google/suite/hooks/sheets.py index a4aed79e0457f..7a4ae4479ec64 100644 --- a/airflow/providers/google/suite/hooks/sheets.py +++ b/airflow/providers/google/suite/hooks/sheets.py @@ -15,7 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""This module contains a Google Sheets API hook""" +"""This module contains a Google Sheets API hook.""" from __future__ import annotations from typing import Any, Sequence @@ -28,9 +28,9 @@ class GSheetsHook(GoogleBaseHook): """ - Interact with Google Sheets via Google Cloud connection - Reading and writing cells in Google Sheet: - https://developers.google.com/sheets/api/guides/values + Interact with Google Sheets via Google Cloud connection. + + Reading and writing cells in Google Sheet: https://developers.google.com/sheets/api/guides/values :param gcp_conn_id: The connection ID to use when fetching connection info. :param api_version: API Version @@ -85,7 +85,8 @@ def get_values( date_time_render_option: str = "SERIAL_NUMBER", ) -> list: """ - Gets values from Google Sheet from a single range + Gets values from Google Sheet from a single range. + https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets.values/get :param spreadsheet_id: The Google Sheet ID to interact with @@ -124,7 +125,8 @@ def batch_get_values( date_time_render_option: str = "SERIAL_NUMBER", ) -> dict: """ - Gets values from Google Sheet from a list of ranges + Gets values from Google Sheet from a list of ranges. + https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets.values/batchGet :param spreadsheet_id: The Google Sheet ID to interact with @@ -166,7 +168,8 @@ def update_values( date_time_render_option: str = "SERIAL_NUMBER", ) -> dict: """ - Updates values from Google Sheet from a single range + Updates values from Google Sheet from a single range. + https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets.values/update :param spreadsheet_id: The Google Sheet ID to interact with. @@ -216,7 +219,8 @@ def batch_update_values( date_time_render_option: str = "SERIAL_NUMBER", ) -> dict: """ - Updates values from Google Sheet for multiple ranges + Updates values from Google Sheet for multiple ranges. + https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets.values/batchUpdate :param spreadsheet_id: The Google Sheet ID to interact with @@ -274,7 +278,8 @@ def append_values( date_time_render_option: str = "SERIAL_NUMBER", ) -> dict: """ - Append values from Google Sheet from a single range + Append values from Google Sheet from a single range. + https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets.values/append :param spreadsheet_id: The Google Sheet ID to interact with @@ -317,7 +322,8 @@ def append_values( def clear(self, spreadsheet_id: str, range_: str) -> dict: """ - Clear values from Google Sheet from a single range + Clear values from Google Sheet from a single range. + https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets.values/clear :param spreadsheet_id: The Google Sheet ID to interact with @@ -337,7 +343,8 @@ def clear(self, spreadsheet_id: str, range_: str) -> dict: def batch_clear(self, spreadsheet_id: str, ranges: list) -> dict: """ - Clear values from Google Sheet from a list of ranges + Clear values from Google Sheet from a list of ranges. + https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets.values/batchClear :param spreadsheet_id: The Google Sheet ID to interact with diff --git a/airflow/providers/google/suite/transfers/local_to_drive.py b/airflow/providers/google/suite/transfers/local_to_drive.py index 7201732218297..490be7556120a 100644 --- a/airflow/providers/google/suite/transfers/local_to_drive.py +++ b/airflow/providers/google/suite/transfers/local_to_drive.py @@ -14,7 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""This file contains Google Drive operators""" +"""This file contains Google Drive operators.""" from __future__ import annotations import os @@ -33,7 +33,7 @@ class LocalFilesystemToGoogleDriveOperator(BaseOperator): """ Upload a list of files to a Google Drive folder. This operator uploads a list of local files to a Google Drive folder. - The local files can be deleted after upload (optional) + The local files can be deleted after upload (optional). .. seealso:: For more information on how to use this operator, take a look at the guide: