diff --git a/google/cloud/aiplatform/base.py b/google/cloud/aiplatform/base.py index 757cf2cf99..e0b0ae6312 100644 --- a/google/cloud/aiplatform/base.py +++ b/google/cloud/aiplatform/base.py @@ -686,6 +686,13 @@ def to_dict(self) -> Dict[str, Any]: """Returns the resource proto as a dictionary.""" return json_format.MessageToDict(self.gca_resource._pb) + @classmethod + def _generate_display_name(cls, prefix: Optional[str] = None) -> str: + """Returns a display name containing class name and time string.""" + if not prefix: + prefix = cls.__name__ + return prefix + " " + datetime.datetime.now().isoformat(sep=" ") + def optional_sync( construct_object_on_arg: Optional[str] = None, diff --git a/google/cloud/aiplatform/datasets/dataset.py b/google/cloud/aiplatform/datasets/dataset.py index 757af1922d..97651adefb 100644 --- a/google/cloud/aiplatform/datasets/dataset.py +++ b/google/cloud/aiplatform/datasets/dataset.py @@ -106,6 +106,7 @@ def _validate_metadata_schema_uri(self) -> None: @classmethod def create( cls, + # TODO(b/223262536): Make the display_name parameter optional in the next major release display_name: str, metadata_schema_uri: str, gcs_source: Optional[Union[str, Sequence[str]]] = None, @@ -211,7 +212,8 @@ def create( dataset (Dataset): Instantiated representation of the managed dataset resource. """ - + if not display_name: + display_name = cls._generate_display_name() utils.validate_display_name(display_name) if labels: utils.validate_labels(labels) diff --git a/google/cloud/aiplatform/datasets/image_dataset.py b/google/cloud/aiplatform/datasets/image_dataset.py index 98571f1d69..4d6db86a7a 100644 --- a/google/cloud/aiplatform/datasets/image_dataset.py +++ b/google/cloud/aiplatform/datasets/image_dataset.py @@ -36,7 +36,7 @@ class ImageDataset(datasets._Dataset): @classmethod def create( cls, - display_name: str, + display_name: Optional[str] = None, gcs_source: Optional[Union[str, Sequence[str]]] = None, import_schema_uri: Optional[str] = None, data_item_labels: Optional[Dict] = None, @@ -54,7 +54,7 @@ def create( Args: display_name (str): - Required. The user-defined name of the Dataset. + Optional. The user-defined name of the Dataset. The name can be up to 128 characters long and can be consist of any UTF-8 characters. gcs_source (Union[str, Sequence[str]]): @@ -129,6 +129,8 @@ def create( image_dataset (ImageDataset): Instantiated representation of the managed image dataset resource. """ + if not display_name: + display_name = cls._generate_display_name() utils.validate_display_name(display_name) if labels: diff --git a/google/cloud/aiplatform/datasets/tabular_dataset.py b/google/cloud/aiplatform/datasets/tabular_dataset.py index 62ddf43d9d..ec9769bb7f 100644 --- a/google/cloud/aiplatform/datasets/tabular_dataset.py +++ b/google/cloud/aiplatform/datasets/tabular_dataset.py @@ -36,7 +36,7 @@ class TabularDataset(datasets._ColumnNamesDataset): @classmethod def create( cls, - display_name: str, + display_name: Optional[str] = None, gcs_source: Optional[Union[str, Sequence[str]]] = None, bq_source: Optional[str] = None, project: Optional[str] = None, @@ -52,7 +52,7 @@ def create( Args: display_name (str): - Required. The user-defined name of the Dataset. + Optional. The user-defined name of the Dataset. The name can be up to 128 characters long and can be consist of any UTF-8 characters. gcs_source (Union[str, Sequence[str]]): @@ -110,7 +110,8 @@ def create( tabular_dataset (TabularDataset): Instantiated representation of the managed tabular dataset resource. """ - + if not display_name: + display_name = cls._generate_display_name() utils.validate_display_name(display_name) if labels: utils.validate_labels(labels) diff --git a/google/cloud/aiplatform/datasets/text_dataset.py b/google/cloud/aiplatform/datasets/text_dataset.py index 7126c4e53a..64fab743b3 100644 --- a/google/cloud/aiplatform/datasets/text_dataset.py +++ b/google/cloud/aiplatform/datasets/text_dataset.py @@ -36,7 +36,7 @@ class TextDataset(datasets._Dataset): @classmethod def create( cls, - display_name: str, + display_name: Optional[str] = None, gcs_source: Optional[Union[str, Sequence[str]]] = None, import_schema_uri: Optional[str] = None, data_item_labels: Optional[Dict] = None, @@ -61,7 +61,7 @@ def create( Args: display_name (str): - Required. The user-defined name of the Dataset. + Optional. The user-defined name of the Dataset. The name can be up to 128 characters long and can be consist of any UTF-8 characters. gcs_source (Union[str, Sequence[str]]): @@ -136,7 +136,8 @@ def create( text_dataset (TextDataset): Instantiated representation of the managed text dataset resource. """ - + if not display_name: + display_name = cls._generate_display_name() utils.validate_display_name(display_name) if labels: utils.validate_labels(labels) diff --git a/google/cloud/aiplatform/datasets/time_series_dataset.py b/google/cloud/aiplatform/datasets/time_series_dataset.py index aab96eda90..ec5546f12a 100644 --- a/google/cloud/aiplatform/datasets/time_series_dataset.py +++ b/google/cloud/aiplatform/datasets/time_series_dataset.py @@ -36,7 +36,7 @@ class TimeSeriesDataset(datasets._ColumnNamesDataset): @classmethod def create( cls, - display_name: str, + display_name: Optional[str] = None, gcs_source: Optional[Union[str, Sequence[str]]] = None, bq_source: Optional[str] = None, project: Optional[str] = None, @@ -51,7 +51,7 @@ def create( Args: display_name (str): - Required. The user-defined name of the Dataset. + Optional. The user-defined name of the Dataset. The name can be up to 128 characters long and can be consist of any UTF-8 characters. gcs_source (Union[str, Sequence[str]]): @@ -108,7 +108,8 @@ def create( Instantiated representation of the managed time series dataset resource. """ - + if not display_name: + display_name = cls._generate_display_name() utils.validate_display_name(display_name) if labels: utils.validate_labels(labels) diff --git a/google/cloud/aiplatform/datasets/video_dataset.py b/google/cloud/aiplatform/datasets/video_dataset.py index f98a70b596..c04f136d2d 100644 --- a/google/cloud/aiplatform/datasets/video_dataset.py +++ b/google/cloud/aiplatform/datasets/video_dataset.py @@ -36,7 +36,7 @@ class VideoDataset(datasets._Dataset): @classmethod def create( cls, - display_name: str, + display_name: Optional[str] = None, gcs_source: Optional[Union[str, Sequence[str]]] = None, import_schema_uri: Optional[str] = None, data_item_labels: Optional[Dict] = None, @@ -54,7 +54,7 @@ def create( Args: display_name (str): - Required. The user-defined name of the Dataset. + Optional. The user-defined name of the Dataset. The name can be up to 128 characters long and can be consist of any UTF-8 characters. gcs_source (Union[str, Sequence[str]]): @@ -129,7 +129,8 @@ def create( video_dataset (VideoDataset): Instantiated representation of the managed video dataset resource. """ - + if not display_name: + display_name = cls._generate_display_name() utils.validate_display_name(display_name) if labels: utils.validate_labels(labels) diff --git a/google/cloud/aiplatform/jobs.py b/google/cloud/aiplatform/jobs.py index 0a12d1b4fc..fc4f829882 100644 --- a/google/cloud/aiplatform/jobs.py +++ b/google/cloud/aiplatform/jobs.py @@ -349,6 +349,7 @@ def completion_stats(self) -> Optional[gca_completion_stats.CompletionStats]: @classmethod def create( cls, + # TODO(b/223262536): Make the job_display_name parameter optional in the next major release job_display_name: str, model_name: Union[str, "aiplatform.Model"], instances_format: str = "jsonl", @@ -537,6 +538,8 @@ def create( (jobs.BatchPredictionJob): Instantiated representation of the created batch prediction job. """ + if not job_display_name: + job_display_name = cls._generate_display_name() utils.validate_display_name(job_display_name) @@ -1032,6 +1035,7 @@ class CustomJob(_RunnableJob): def __init__( self, + # TODO(b/223262536): Make display_name parameter fully optional in next major release display_name: str, worker_pool_specs: Union[List[Dict], List[aiplatform.gapic.WorkerPoolSpec]], base_output_dir: Optional[str] = None, @@ -1136,6 +1140,9 @@ def __init__( staging_bucket, "aiplatform-custom-job" ) + if not display_name: + display_name = self.__class__._generate_display_name() + self._gca_resource = gca_custom_job_compat.CustomJob( display_name=display_name, job_spec=gca_custom_job_compat.CustomJobSpec( @@ -1193,6 +1200,7 @@ def _log_web_access_uris(self): @classmethod def from_local_script( cls, + # TODO(b/223262536): Make display_name parameter fully optional in next major release display_name: str, script_path: str, container_uri: str, @@ -1521,6 +1529,7 @@ class HyperparameterTuningJob(_RunnableJob): def __init__( self, + # TODO(b/223262536): Make display_name parameter fully optional in next major release display_name: str, custom_job: CustomJob, metric_spec: Dict[str, str], @@ -1717,6 +1726,9 @@ def __init__( ], ) + if not display_name: + display_name = self.__class__._generate_display_name() + self._gca_resource = ( gca_hyperparameter_tuning_job_compat.HyperparameterTuningJob( display_name=display_name, diff --git a/google/cloud/aiplatform/models.py b/google/cloud/aiplatform/models.py index ae019029fa..ab22d20f10 100644 --- a/google/cloud/aiplatform/models.py +++ b/google/cloud/aiplatform/models.py @@ -197,7 +197,7 @@ def network(self) -> Optional[str]: @classmethod def create( cls, - display_name: str, + display_name: Optional[str] = None, description: Optional[str] = None, labels: Optional[Dict[str, str]] = None, metadata: Optional[Sequence[Tuple[str, str]]] = (), @@ -212,7 +212,7 @@ def create( Args: display_name (str): - Required. The user-defined name of the Endpoint. + Optional. The user-defined name of the Endpoint. The name can be up to 128 characters long and can be consist of any UTF-8 characters. project (str): @@ -263,6 +263,9 @@ def create( api_client = cls._instantiate_client(location=location, credentials=credentials) + if not display_name: + display_name = cls._generate_display_name() + utils.validate_display_name(display_name) if labels: utils.validate_labels(labels) @@ -1654,7 +1657,6 @@ def update( @base.optional_sync() def upload( cls, - display_name: str, serving_container_image_uri: str, *, artifact_uri: Optional[str] = None, @@ -1670,6 +1672,7 @@ def upload( prediction_schema_uri: Optional[str] = None, explanation_metadata: Optional[explain.ExplanationMetadata] = None, explanation_parameters: Optional[explain.ExplanationParameters] = None, + display_name: Optional[str] = None, project: Optional[str] = None, location: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, @@ -1692,7 +1695,7 @@ def upload( Args: display_name (str): - Required. The display name of the Model. The name can be up to 128 + Optional. The display name of the Model. The name can be up to 128 characters long and can be consist of any UTF-8 characters. serving_container_image_uri (str): Required. The URI of the Model serving container. @@ -1832,6 +1835,8 @@ def upload( is specified. Also if model directory does not contain a supported model file. """ + if not display_name: + display_name = cls._generate_display_name() utils.validate_display_name(display_name) if labels: utils.validate_labels(labels) @@ -2231,7 +2236,7 @@ def _deploy( def batch_predict( self, - job_display_name: str, + job_display_name: Optional[str] = None, gcs_source: Optional[Union[str, Sequence[str]]] = None, bigquery_source: Optional[str] = None, instances_format: str = "jsonl", @@ -2269,7 +2274,7 @@ def batch_predict( Args: job_display_name (str): - Required. The user-defined name of the BatchPredictionJob. + Optional. The user-defined name of the BatchPredictionJob. The name can be up to 128 characters long and can be consist of any UTF-8 characters. gcs_source: Optional[Sequence[str]] = None @@ -2636,7 +2641,7 @@ def upload_xgboost_model_file( cls, model_file_path: str, xgboost_version: Optional[str] = None, - display_name: str = "XGBoost model", + display_name: Optional[str] = None, description: Optional[str] = None, instance_schema_uri: Optional[str] = None, parameters_schema_uri: Optional[str] = None, @@ -2769,6 +2774,9 @@ def upload_xgboost_model_file( is specified. Also if model directory does not contain a supported model file. """ + if not display_name: + display_name = cls.__class__.__generate_display_name("XGBoost model") + XGBOOST_SUPPORTED_MODEL_FILE_EXTENSIONS = [ ".pkl", ".joblib", @@ -2835,7 +2843,7 @@ def upload_scikit_learn_model_file( cls, model_file_path: str, sklearn_version: Optional[str] = None, - display_name: str = "Scikit-learn model", + display_name: Optional[str] = None, description: Optional[str] = None, instance_schema_uri: Optional[str] = None, parameters_schema_uri: Optional[str] = None, @@ -2969,6 +2977,9 @@ def upload_scikit_learn_model_file( is specified. Also if model directory does not contain a supported model file. """ + if not display_name: + display_name = cls._generate_display_name("Scikit-Learn model") + SKLEARN_SUPPORTED_MODEL_FILE_EXTENSIONS = [ ".pkl", ".joblib", @@ -3034,7 +3045,7 @@ def upload_tensorflow_saved_model( saved_model_dir: str, tensorflow_version: Optional[str] = None, use_gpu: bool = False, - display_name: str = "Tensorflow model", + display_name: Optional[str] = None, description: Optional[str] = None, instance_schema_uri: Optional[str] = None, parameters_schema_uri: Optional[str] = None, @@ -3170,6 +3181,9 @@ def upload_tensorflow_saved_model( is specified. Also if model directory does not contain a supported model file. """ + if not display_name: + display_name = cls._generate_display_name("Tensorflow model") + container_image_uri = aiplatform.helpers.get_prebuilt_prediction_container_uri( region=location, framework="tensorflow", diff --git a/google/cloud/aiplatform/pipeline_jobs.py b/google/cloud/aiplatform/pipeline_jobs.py index bc2ddee0e8..3d3788fb8f 100644 --- a/google/cloud/aiplatform/pipeline_jobs.py +++ b/google/cloud/aiplatform/pipeline_jobs.py @@ -92,6 +92,7 @@ class PipelineJob(base.VertexAiStatefulResource): def __init__( self, + # TODO(b/223262536): Make the display_name parameter optional in the next major release display_name: str, template_path: str, job_id: Optional[str] = None, @@ -160,6 +161,8 @@ def __init__( Raises: ValueError: If job_id or labels have incorrect format. """ + if not display_name: + display_name = self.__class__._generate_display_name() utils.validate_display_name(display_name) if labels: diff --git a/google/cloud/aiplatform/tensorboard/tensorboard_resource.py b/google/cloud/aiplatform/tensorboard/tensorboard_resource.py index 2bfab2c41b..b838ad039a 100644 --- a/google/cloud/aiplatform/tensorboard/tensorboard_resource.py +++ b/google/cloud/aiplatform/tensorboard/tensorboard_resource.py @@ -82,7 +82,7 @@ def __init__( @classmethod def create( cls, - display_name: str, + display_name: Optional[str] = None, description: Optional[str] = None, labels: Optional[Dict[str, str]] = None, project: Optional[str] = None, @@ -107,7 +107,7 @@ def create( Args: display_name (str): - Required. The user-defined name of the Tensorboard. + Optional. The user-defined name of the Tensorboard. The name can be up to 128 characters long and can be consist of any UTF-8 characters. description (str): @@ -151,6 +151,8 @@ def create( tensorboard (Tensorboard): Instantiated representation of the managed tensorboard resource. """ + if not display_name: + display_name = cls._generate_display_name() utils.validate_display_name(display_name) if labels: diff --git a/google/cloud/aiplatform/training_jobs.py b/google/cloud/aiplatform/training_jobs.py index dd64b7f2af..fe47ef3c5a 100644 --- a/google/cloud/aiplatform/training_jobs.py +++ b/google/cloud/aiplatform/training_jobs.py @@ -81,7 +81,7 @@ class _TrainingJob(base.VertexAiStatefulResource): def __init__( self, - display_name: str, + display_name: Optional[str] = None, project: Optional[str] = None, location: Optional[str] = None, credentials: Optional[auth_credentials.Credentials] = None, @@ -93,7 +93,7 @@ def __init__( Args: display_name (str): - Required. The user-defined name of this TrainingPipeline. + Optional. The user-defined name of this TrainingPipeline. project (str): Optional project to retrieve model from. If not set, project set in aiplatform.init will be used. @@ -138,6 +138,8 @@ def __init__( Overrides encryption_spec_key_name set in aiplatform.init. """ + if not display_name: + display_name = self.__class__._generate_display_name() utils.validate_display_name(display_name) if labels: utils.validate_labels(labels) @@ -1020,6 +1022,7 @@ class _CustomTrainingJob(_TrainingJob): def __init__( self, + # TODO(b/223262536): Make display_name parameter fully optional in next major release display_name: str, container_uri: str, model_serving_container_image_uri: Optional[str] = None, @@ -1180,6 +1183,8 @@ def __init__( Bucket used to stage source and training artifacts. Overrides staging_bucket set in aiplatform.init. """ + if not display_name: + display_name = self.__class__._generate_display_name() super().__init__( display_name=display_name, project=project, @@ -1564,6 +1569,7 @@ class CustomTrainingJob(_CustomTrainingJob): def __init__( self, + # TODO(b/223262536): Make display_name parameter fully optional in next major release display_name: str, script_path: str, container_uri: str, @@ -1763,6 +1769,8 @@ def __init__( Bucket used to stage source and training artifacts. Overrides staging_bucket set in aiplatform.init. """ + if not display_name: + display_name = self.__class__._generate_display_name() super().__init__( display_name=display_name, project=project, @@ -2401,6 +2409,7 @@ class CustomContainerTrainingJob(_CustomTrainingJob): def __init__( self, + # TODO(b/223262536): Make display_name parameter fully optional in next major release display_name: str, container_uri: str, command: Sequence[str] = None, @@ -2598,6 +2607,8 @@ def __init__( Bucket used to stage source and training artifacts. Overrides staging_bucket set in aiplatform.init. """ + if not display_name: + display_name = self.__class__._generate_display_name() super().__init__( display_name=display_name, project=project, @@ -3215,6 +3226,7 @@ class AutoMLTabularTrainingJob(_TrainingJob): def __init__( self, + # TODO(b/223262536): Make display_name parameter fully optional in next major release display_name: str, optimization_prediction_type: str, optimization_objective: Optional[str] = None, @@ -3368,6 +3380,8 @@ def __init__( Raises: ValueError: If both column_transformations and column_specs were provided. """ + if not display_name: + display_name = self.__class__._generate_display_name() super().__init__( display_name=display_name, project=project, @@ -3875,7 +3889,7 @@ class AutoMLForecastingTrainingJob(_TrainingJob): def __init__( self, - display_name: str, + display_name: Optional[str] = None, optimization_objective: Optional[str] = None, column_specs: Optional[Dict[str, str]] = None, column_transformations: Optional[List[Dict[str, Dict[str, str]]]] = None, @@ -3890,7 +3904,7 @@ def __init__( Args: display_name (str): - Required. The user-defined name of this TrainingPipeline. + Optional. The user-defined name of this TrainingPipeline. optimization_objective (str): Optional. Objective function the model is to be optimized towards. The training process creates a Model that optimizes the value of the objective @@ -3972,6 +3986,8 @@ def __init__( Raises: ValueError: If both column_transformations and column_specs were provided. """ + if not display_name: + display_name = self.__class__._generate_display_name() super().__init__( display_name=display_name, project=project, @@ -4550,7 +4566,7 @@ class AutoMLImageTrainingJob(_TrainingJob): def __init__( self, - display_name: str, + display_name: Optional[str] = None, prediction_type: str = "classification", multi_label: bool = False, model_type: str = "CLOUD", @@ -4566,7 +4582,7 @@ def __init__( Args: display_name (str): - Required. The user-defined name of this TrainingPipeline. + Optional. The user-defined name of this TrainingPipeline. prediction_type (str): The type of prediction the Model is to produce, one of: "classification" - Predict one out of multiple target values is @@ -4662,6 +4678,8 @@ def __init__( Raises: ValueError: When an invalid prediction_type or model_type is provided. """ + if not display_name: + display_name = self.__class__._generate_display_name() valid_model_types = constants.AUTOML_IMAGE_PREDICTION_MODEL_TYPES.get( prediction_type, None @@ -5056,6 +5074,7 @@ class CustomPythonPackageTrainingJob(_CustomTrainingJob): def __init__( self, + # TODO(b/223262536): Make display_name parameter fully optional in next major release display_name: str, python_package_gcs_uri: str, python_module_name: str, @@ -5258,6 +5277,8 @@ def __init__( Bucket used to stage source and training artifacts. Overrides staging_bucket set in aiplatform.init. """ + if not display_name: + display_name = self.__class__._generate_display_name() super().__init__( display_name=display_name, project=project, @@ -5863,7 +5884,7 @@ class AutoMLVideoTrainingJob(_TrainingJob): def __init__( self, - display_name: str, + display_name: Optional[str] = None, prediction_type: str = "classification", model_type: str = "CLOUD", project: Optional[str] = None, @@ -5957,6 +5978,9 @@ def __init__( Raises: ValueError: When an invalid prediction_type and/or model_type is provided. """ + if not display_name: + display_name = self.__class__._generate_display_name() + valid_model_types = constants.AUTOML_VIDEO_PREDICTION_MODEL_TYPES.get( prediction_type, None ) @@ -6238,6 +6262,7 @@ class AutoMLTextTrainingJob(_TrainingJob): def __init__( self, + # TODO(b/223262536): Make display_name parameter fully optional in next major release display_name: str, prediction_type: str, multi_label: bool = False, @@ -6325,6 +6350,8 @@ def __init__( Overrides encryption_spec_key_name set in aiplatform.init. """ + if not display_name: + display_name = self.__class__._generate_display_name() super().__init__( display_name=display_name, project=project, diff --git a/tests/unit/aiplatform/test_datasets.py b/tests/unit/aiplatform/test_datasets.py index bab93280b5..db9d06a25b 100644 --- a/tests/unit/aiplatform/test_datasets.py +++ b/tests/unit/aiplatform/test_datasets.py @@ -1214,6 +1214,23 @@ def test_create_dataset(self, create_dataset_mock, sync): timeout=None, ) + @pytest.mark.usefixtures("get_dataset_tabular_bq_mock") + @pytest.mark.parametrize("sync", [True, False]) + def test_create_dataset_with_default_display_name(self, create_dataset_mock, sync): + + my_dataset = datasets.TabularDataset.create( + bq_source=_TEST_SOURCE_URI_BQ, + sync=sync, + ) + + if not sync: + my_dataset.wait() + + create_dataset_mock.assert_called_once() + create_dataset_mock.call_args[1]["dataset"].display_name.startswith( + "TabularDataset " + ) + @pytest.mark.usefixtures("get_dataset_tabular_bq_mock") def test_no_import_data_method(self):