From a26d650146aab43f9c3c15ecec76e33910849de0 Mon Sep 17 00:00:00 2001 From: "Xin Zou (AI PLATFORM)" Date: Thu, 1 Aug 2019 10:59:39 +0800 Subject: [PATCH] build-in models, save sklearn & keras models --- builtin-models/builtin_models/__init__.py | 0 builtin-models/builtin_models/environment.py | 66 +++++++++ builtin-models/builtin_models/keras.py | 67 +++++++++ builtin-models/builtin_models/sklearn.py | 71 ++++++++++ builtin-models/setup.py | 12 ++ builtin-models/test/__init__.py | 0 builtin-models/test/builtin_models_test.py | 19 +++ .../builtin_score/keras_score_module.py | 31 +--- .../builtin_score/sklearn_score_module.py | 15 +- .../dstest/keras/saved_model_predict_test.py | 17 +-- .../dstest/keras/trainer_and_save_model_h5.py | 60 +------- .../keras/trainer_and_save_model_weights.py | 133 ------------------ .../sklearn/load_saved_model_predict_test.py | 28 ++++ dstest/dstest/sklearn/trainer.py | 36 +++-- ...t_saved_model2.yaml => sklearn_train.yaml} | 8 +- 15 files changed, 302 insertions(+), 261 deletions(-) create mode 100644 builtin-models/builtin_models/__init__.py create mode 100644 builtin-models/builtin_models/environment.py create mode 100644 builtin-models/builtin_models/keras.py create mode 100644 builtin-models/builtin_models/sklearn.py create mode 100644 builtin-models/setup.py create mode 100644 builtin-models/test/__init__.py create mode 100644 builtin-models/test/builtin_models_test.py delete mode 100644 dstest/dstest/keras/trainer_and_save_model_weights.py create mode 100644 dstest/dstest/sklearn/load_saved_model_predict_test.py rename dstest/module_specs/{keras_minist_saved_model2.yaml => sklearn_train.yaml} (75%) diff --git a/builtin-models/builtin_models/__init__.py b/builtin-models/builtin_models/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/builtin-models/builtin_models/environment.py b/builtin-models/builtin_models/environment.py new file mode 100644 index 0000000..fe132ef --- /dev/null +++ b/builtin-models/builtin_models/environment.py @@ -0,0 +1,66 @@ +import os +import yaml +import json +from sys import version_info + +PYTHON_VERSION = "{major}.{minor}.{micro}".format(major=version_info.major, + minor=version_info.minor, + micro=version_info.micro) +_conda_header = """\ +name: project_environment +channels: + - defaults +""" + +_extra_index_url = "--extra-index-url=https://test.pypi.org/simple" +_alghost_pip = "alghost==0.0.59" +_azureml_defaults_pip = "azureml-defaults" + +# temp solution, would remove later +_data_type_file_name = "data_type.json" +_data_ilearner_file_name = "data.ilearner" + + +def _generate_conda_env(path=None, additional_conda_deps=None, additional_pip_deps=None, + additional_conda_channels=None, install_alghost=True, install_azureml=True): + env = yaml.safe_load(_conda_header) + env["dependencies"] = ["python={}".format(PYTHON_VERSION), "git", "regex"] + pip_deps = ([_extra_index_url, _alghost_pip] if install_alghost else []) + ( + [_azureml_defaults_pip] if install_alghost else []) + ( + additional_pip_deps if additional_pip_deps else []) + if additional_conda_deps is not None: + env["dependencies"] += additional_conda_deps + env["dependencies"].append({"pip": pip_deps}) + if additional_conda_channels is not None: + env["channels"] += additional_conda_channels + + if path is not None: + with open(path, "w") as out: + yaml.safe_dump(env, stream=out, default_flow_style=False) + return None + else: + return env + + +def _generate_ilearner_files(path): + # Dump data_type.json as a work around until SMT deploys + dct = { + "Id": "ILearnerDotNet", + "Name": "ILearner .NET file", + "ShortName": "Model", + "Description": "A .NET serialized ILearner", + "IsDirectory": False, + "Owner": "Microsoft Corporation", + "FileExtension": "ilearner", + "ContentType": "application/octet-stream", + "AllowUpload": False, + "AllowPromotion": False, + "AllowModelPromotion": True, + "AuxiliaryFileExtension": None, + "AuxiliaryContentType": None + } + with open(os.path.join(path, _data_type_file_name), 'w') as fp: + json.dump(dct, fp) + # Dump data.ilearner as a work around until data type design + with open(os.path.join(path, _data_ilearner_file_name), 'w') as fp: + fp.writelines('{}') diff --git a/builtin-models/builtin_models/keras.py b/builtin-models/builtin_models/keras.py new file mode 100644 index 0000000..7cf4a7c --- /dev/null +++ b/builtin-models/builtin_models/keras.py @@ -0,0 +1,67 @@ +import os +import yaml + +from builtin_models.environment import _generate_conda_env +from builtin_models.environment import _generate_ilearner_files + +FLAVOR_NAME = "keras" +model_file_name = "model.h5" +conda_file_name = "conda.yaml" +model_spec_file_name = "model_spec.yml" + +def _get_default_conda_env(): + import keras + import tensorflow as tf + + return _generate_conda_env( + additional_pip_deps=[ + "keras=={}".format(keras.__version__), + "tensorflow=={}".format(tf.__version__), + ]) + + +def _save_conda_env(path, conda_env=None): + if conda_env is None: + conda_env = _get_default_conda_env() + elif not isinstance(conda_env, dict): + with open(conda_env, "r") as f: # conda_env is a file + conda_env = yaml.safe_load(f) + with open(os.path.join(path, conda_file_name), "w") as f: + yaml.safe_dump(conda_env, stream=f, default_flow_style=False) + + +def _save_model_spec(path): + spec = { + 'flavor' : { + 'framework' : FLAVOR_NAME + }, + FLAVOR_NAME: { + 'model_file_path': model_file_name + }, + 'conda': { + 'conda_file_path': conda_file_name + }, + } + with open(os.path.join(path, model_spec_file_name), 'w') as fp: + yaml.dump(spec, fp, default_flow_style=False) + + +def load_model_from_local_file(path): + from keras.models import load_model + return load_model(path) + + +def save_model(keras_model, path, conda_env=None): + import keras + + if(not path.endswith('/')): + path += '/' + if not os.path.exists(path): + os.makedirs(path) + + keras_model.save(os.path.join(path, model_file_name)) + _save_conda_env(path, conda_env) + _save_model_spec(path) + _generate_ilearner_files(path) # temp solution, to remove later + + \ No newline at end of file diff --git a/builtin-models/builtin_models/sklearn.py b/builtin-models/builtin_models/sklearn.py new file mode 100644 index 0000000..5aabc3c --- /dev/null +++ b/builtin-models/builtin_models/sklearn.py @@ -0,0 +1,71 @@ +import os +import yaml +import pickle + +from builtin_models.environment import _generate_conda_env +from builtin_models.environment import _generate_ilearner_files + +FLAVOR_NAME = "sklearn" +model_file_name = "model.pkl" +conda_file_name = "conda.yaml" +model_spec_file_name = "model_spec.yml" + +def _get_default_conda_env(): + import sklearn + + return _generate_conda_env( + additional_pip_deps=[ + "scikit-learn=={}".format(sklearn.__version__) + ]) + + +def _save_conda_env(path, conda_env=None): + if conda_env is None: + conda_env = _get_default_conda_env() + elif not isinstance(conda_env, dict): + with open(conda_env, "r") as f: # conda_env is a file + conda_env = yaml.safe_load(f) + with open(os.path.join(path, conda_file_name), "w") as f: + yaml.safe_dump(conda_env, stream=f, default_flow_style=False) + + +def _save_model_spec(path): + spec = { + 'flavor' : { + 'framework' : FLAVOR_NAME + }, + FLAVOR_NAME: { + 'model_file_path': model_file_name + }, + 'conda': { + 'conda_file_path': conda_file_name + }, + } + with open(os.path.join(path, model_spec_file_name), 'w') as fp: + yaml.dump(spec, fp, default_flow_style=False) + + +def _save_model(sklearn_model, path): + with open(os.path.join(path, model_file_name), "wb") as fb: + pickle.dump(sklearn_model, fb) + + +def load_model_from_local_file(path): + with open(path, "rb") as f: + return pickle.load(f) + + +def save_model(sklearn_model, path, conda_env=None): + import sklearn + + if(not path.endswith('/')): + path += '/' + if not os.path.exists(path): + os.makedirs(path) + + _save_model(sklearn_model, path) + _save_conda_env(path, conda_env) + _save_model_spec(path) + _generate_ilearner_files(path) # temp solution, to remove later + + \ No newline at end of file diff --git a/builtin-models/setup.py b/builtin-models/setup.py new file mode 100644 index 0000000..1af1cda --- /dev/null +++ b/builtin-models/setup.py @@ -0,0 +1,12 @@ +from setuptools import setup + +# python setup.py install +setup( + name="builtin_models", + version="0.0.1", + description="builtin_models", + packages=["builtin_models"], + author="Xin Zou", + license="MIT", + include_package_data=True, +) \ No newline at end of file diff --git a/builtin-models/test/__init__.py b/builtin-models/test/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/builtin-models/test/builtin_models_test.py b/builtin-models/test/builtin_models_test.py new file mode 100644 index 0000000..3e19a9b --- /dev/null +++ b/builtin-models/test/builtin_models_test.py @@ -0,0 +1,19 @@ + +# python -m test.builtin_models_test +if __name__ == '__main__': + # keras test + from builtin_models.keras import * + print('---keras test---') + model = load_model_from_local_file('D:/GIT/CustomModules-migu-NewYamlTest2/dstest/model/keras-mnist/model.h5') + print('------') + save_model(model, "./test/outputModels/keras/") + print('********') + + #sklearn test + from builtin_models.sklearn import * + print('---sklearn test---') + model = load_model_from_local_file('D:/GIT/CustomModules-migu-NewYamlTest2/dstest/dstest/sklearn/model/sklearn/model.pkl') + print('------') + save_model(model, "./test/outputModels/sklearn/") + print('********') + diff --git a/builtin-score/builtin_score/keras_score_module.py b/builtin-score/builtin_score/keras_score_module.py index d0c1482..476e86b 100644 --- a/builtin-score/builtin_score/keras_score_module.py +++ b/builtin-score/builtin_score/keras_score_module.py @@ -12,13 +12,10 @@ class KerasScoreModule(object): def __init__(self, model_path, config): keras_conf = config["keras"] - serializer = keras_conf.get('serialization_format', 'load_model') - if serializer == 'load_model': - self.model = load_model(os.path.join(model_path, keras_conf[constants.MODEL_FILE_PATH_KEY])) - elif serializer == 'load_weights': - self.load_model_via_weights(model_path, keras_conf) + self.model = load_model(os.path.join(model_path, keras_conf[constants.MODEL_FILE_PATH_KEY])) print(f"Successfully loaded model from {model_path}") + def run(self, df): df_output = pd.DataFrame([]) for _, row in df.iterrows(): @@ -38,31 +35,7 @@ def run(self, df): return df_output - def load_model_via_weights(self, model_path, config): - model_yaml_file = config.get('model_yaml_file','') - model_json_file = config.get('model_json_file','') - model_data = '' - if model_json_file != '': - import json - from keras.models import model_from_json - with open(os.path.join(model_path, model_json_file), 'r') as f: - model_data = json.load(f) - self.model = model_from_json(model_data) - elif model_yaml_file != '': - import yaml - from keras.models import model_from_yaml - with open(os.path.join(model_path, model_yaml_file), 'r') as f: - model_data = yaml.safe_load(f) - self.model = model_from_yaml(model_data) - else: - raise Exception(f"Unable to load model, config = {config}") - - model_weights_file = config.get('model_weights_file','') - if model_weights_file == '': - raise Exception(f"model_weights_file is empty, config = {config}") - self.model.load_weights(os.path.join(model_path, model_weights_file)) - def is_image(self, row): # TO DO: if(len(row)>100): diff --git a/builtin-score/builtin_score/sklearn_score_module.py b/builtin-score/builtin_score/sklearn_score_module.py index fc1d01c..f409be8 100644 --- a/builtin-score/builtin_score/sklearn_score_module.py +++ b/builtin-score/builtin_score/sklearn_score_module.py @@ -14,18 +14,9 @@ class SklearnScoreModule(object): def __init__(self, model_path, config): sklearn_conf = config["sklearn"] model_file_path = os.path.join(model_path, sklearn_conf[constants.MODEL_FILE_PATH_KEY]) - DEFAULT_SERIALIZATION_METHOD = "pickle" - serialization_method = sklearn_conf.get(constants.SERIALIZATION_METHOD_KEY) - if serialization_method is None: - print(f"Using default deserializtion method: {DEFAULT_SERIALIZATION_METHOD}") - serialization_method = pickle - if serialization_method == "joblib": - self.model = joblib.load(model_file_path) - elif serialization_method == "pickle": - with open(model_file_path, "rb") as fp: - self.model = pickle.load(fp) - else: - raise Exception(f"Unrecognized serializtion format {serialization_method}") + with open(model_file_path, "rb") as fp: + self.model = pickle.load(fp) + def run(self, df): y = self.model.predict(df) diff --git a/dstest/dstest/keras/saved_model_predict_test.py b/dstest/dstest/keras/saved_model_predict_test.py index 01370c0..cfa7e75 100644 --- a/dstest/dstest/keras/saved_model_predict_test.py +++ b/dstest/dstest/keras/saved_model_predict_test.py @@ -5,15 +5,12 @@ import pandas as pd import numpy as np -# This is a placeholder for a Google-internal import. import tensorflow as tf from builtin_score.builtin_score_module import * -from builtin_score.keras_score_module import KerasScoreModule -from keras.models import load_model from keras.datasets import mnist -def load_model_then_predict1(model_spec_file = 'model/keras-mnist/model_spec.yml'): +def load_model_then_predict(model_path = "./model/keras-mnist/"): (x_train, y_train), (x_test, y_test) = mnist.load_data() x_test = x_test.reshape(x_test.shape[0], -1) # x_test shape [x_test.shape[0], 784] x_test = x_test[:8] # only pick 8 imgs @@ -22,14 +19,6 @@ def load_model_then_predict1(model_spec_file = 'model/keras-mnist/model_spec.yml df = pd.DataFrame(data=x_test, columns=['img']*784, dtype=np.float64) df.to_csv('mnist_kera_test_data.csv') - with open(model_spec_file) as fp: - config = yaml.safe_load(fp) - - model_path = "./model/keras-mnist/" - keras_model = KerasScoreModule(model_path, config) - result = keras_model.run(df) - print(result) - module = BuiltinScoreModule(model_path) result = module.run(df) print('=====buildinScoreModule=======') @@ -37,6 +26,4 @@ def load_model_then_predict1(model_spec_file = 'model/keras-mnist/model_spec.yml # python -m dstest.keras.saved_model_predict_test if __name__ == '__main__': - load_model_then_predict1() - print('\n===============another load model method================\n') - load_model_then_predict1('model/keras-mnist/model_weights_spec.yml') + load_model_then_predict() diff --git a/dstest/dstest/keras/trainer_and_save_model_h5.py b/dstest/dstest/keras/trainer_and_save_model_h5.py index 82f9329..79eb02a 100644 --- a/dstest/dstest/keras/trainer_and_save_model_h5.py +++ b/dstest/dstest/keras/trainer_and_save_model_h5.py @@ -8,6 +8,7 @@ from keras.layers import Dense from keras import backend as K +from builtin_models.keras import * # Test dynamic install package from pip._internal import main as pipmain @@ -21,58 +22,6 @@ logger = logging.getLogger(__name__) -def save_model_spec(model_path): - spec = { - 'flavor' : { - 'framework' : 'keras' - }, - 'keras': { - 'model_file_path': 'model.h5' - }, - } - - with open(os.path.join(model_path, "model_spec.yml"), 'w') as fp: - yaml.dump(spec, fp, default_flow_style=False) - - -def save_model(model_path, model): - if(not model_path.endswith('/')): - model_path += '/' - - if not os.path.exists(model_path): - logger.info(f"{model_path} not exists") - os.makedirs(model_path) - else: - logger.info(f"{model_path} exists") - - model.save(model_path + "model.h5") - - -def save_ilearner(model_path): - # Dump data_type.json as a work around until SMT deploys - dct = { - "Id": "ILearnerDotNet", - "Name": "ILearner .NET file", - "ShortName": "Model", - "Description": "A .NET serialized ILearner", - "IsDirectory": False, - "Owner": "Microsoft Corporation", - "FileExtension": "ilearner", - "ContentType": "application/octet-stream", - "AllowUpload": False, - "AllowPromotion": False, - "AllowModelPromotion": True, - "AuxiliaryFileExtension": None, - "AuxiliaryContentType": None - } - with open(os.path.join(model_path, 'data_type.json'), 'w') as f: - json.dump(dct, f) - # Dump data.ilearner as a work around until data type design - visualization = os.path.join(model_path, "data.ilearner") - with open(visualization, 'w') as file: - file.writelines('{}') - - @click.command() @click.option('--model_path', default="./model/") def run_pipeline( @@ -108,13 +57,12 @@ def run_pipeline( model.compile(loss=keras.losses.categorical_crossentropy, optimizer=keras.optimizers.Adadelta(), metrics=['accuracy']) model.fit(x_train, y_train, batch_size=batch_size, epochs=epochs, verbose=1, validation_data=(x_test, y_test)) - save_model(model_path, model) - save_model_spec(model_path) - save_ilearner(model_path) + + save_model(model, model_path, conda_env=None) logger.info(f"training finished") -# python -m dstest.keras.trainer --model_path model/keras-mnist +# python -m dstest.keras.trainer_and_save_model_h5 --model_path model/keras-mnist if __name__ == '__main__': run_pipeline() diff --git a/dstest/dstest/keras/trainer_and_save_model_weights.py b/dstest/dstest/keras/trainer_and_save_model_weights.py deleted file mode 100644 index fc3cfd8..0000000 --- a/dstest/dstest/keras/trainer_and_save_model_weights.py +++ /dev/null @@ -1,133 +0,0 @@ -import keras -import logging -import os -import json -import yaml -from keras.datasets import mnist -from keras.models import Sequential -from keras.layers import Dense -from keras import backend as K - - -# Test dynamic install package -from pip._internal import main as pipmain -pipmain(["install", "click"]) -import click - -logging.basicConfig(format='%(asctime)s - %(levelname)s - %(name)s - %(message)s', - datefmt='%m/%d/%Y %H:%M:%S', - level=logging.INFO) -logging.info(f"in dstest echo") -logger = logging.getLogger(__name__) - -model_json_file_name = "model.json" -model_weights_file_name = "model.weights.h5" -model_spec_file_name = "model_weights_spec.yml" - -def save_model_spec(model_path): - spec = { - 'flavor' : { - 'framework' : 'keras' - }, - 'keras': { - 'serialization_format': 'load_weights', - 'model_json_file': model_json_file_name, - 'model_weights_file': model_weights_file_name - }, - } - - with open(os.path.join(model_path, "model_spec.yml"), 'w') as fp: - yaml.dump(spec, fp, default_flow_style=False) - - -def save_model(model_path, model): - if(not model_path.endswith('/')): - model_path += '/' - - if not os.path.exists(model_path): - logger.info(f"{model_path} not exists") - os.makedirs(model_path) - else: - logger.info(f"{model_path} exists") - - json_data = model.to_json() - with open(os.path.join(model_path, model_json_file_name), 'w') as f: - # TO DO: json data need to decode - json.dump(json_data, f) - - model.save_weights(model_path + model_weights_file_name) - - -def save_ilearner(model_path): - # Dump data_type.json as a work around until SMT deploys - dct = { - "Id": "ILearnerDotNet", - "Name": "ILearner .NET file", - "ShortName": "Model", - "Description": "A .NET serialized ILearner", - "IsDirectory": False, - "Owner": "Microsoft Corporation", - "FileExtension": "ilearner", - "ContentType": "application/octet-stream", - "AllowUpload": False, - "AllowPromotion": False, - "AllowModelPromotion": True, - "AuxiliaryFileExtension": None, - "AuxiliaryContentType": None - } - with open(os.path.join(model_path, 'data_type.json'), 'w') as f: - json.dump(dct, f) - # Dump data.ilearner as a work around until data type design - visualization = os.path.join(model_path, "data.ilearner") - with open(visualization, 'w') as file: - file.writelines('{}') - - -@click.command() -@click.option('--model_path', default="./model/") -def run_pipeline( - model_path - ): - batch_size = 64 - num_classes = 10 - epochs = 5 - # input image dimensions - img_rows, img_cols = 28, 28 - - # the data, split between train and test sets - (x_train, y_train), (x_test, y_test) = mnist.load_data() # x_train shape (60000, 28, 28), y_train shape (60000, 1). x_test(10000, 28, 28) - - x_train = x_train.reshape(x_train.shape[0], -1) # equals x_train.reshape(x_test.shape[0], 784). 28*28=784 - x_test = x_test.reshape(x_test.shape[0], 784) # 28*28=784 - - x_train = x_train.astype('float32') - x_test = x_test.astype('float32') - x_train /= 255 #value change to [0,1] - x_test /= 255 - - # convert class vectors to binary class matrices, e.g. y_train[0] = 5, img is 5,after to_categorical, y_train[0] is [ 0, 0, 0, 0, 0, 1, 0, 0, 0, 0] - y_train = keras.utils.to_categorical(y_train, num_classes) - y_test = keras.utils.to_categorical(y_test, num_classes) - - model = Sequential() - model.add(Dense(512,activation='relu',input_shape=(784,))) - model.add(Dense(256,activation='relu')) - model.add(Dense(10,activation='softmax')) - #model.summary() - - model.compile(loss=keras.losses.categorical_crossentropy, optimizer=keras.optimizers.Adadelta(), metrics=['accuracy']) - - model.fit(x_train, y_train, batch_size=batch_size, epochs=epochs, verbose=1, validation_data=(x_test, y_test)) - save_model(model_path, model) - save_model_spec(model_path) - save_ilearner(model_path) - - logger.info(f"training finished") - -# python -m dstest.keras.trainer_model_weights --model_path model/keras-mnist -if __name__ == '__main__': - run_pipeline() - - - - diff --git a/dstest/dstest/sklearn/load_saved_model_predict_test.py b/dstest/dstest/sklearn/load_saved_model_predict_test.py new file mode 100644 index 0000000..1a95a4c --- /dev/null +++ b/dstest/dstest/sklearn/load_saved_model_predict_test.py @@ -0,0 +1,28 @@ +from __future__ import print_function + +import os +import sys +import pandas as pd +import numpy as np + +# This is a placeholder for a Google-internal import. +import tensorflow as tf +from builtin_score.builtin_score_module import * +from sklearn import datasets + + +def load_model_then_predict(model_path = "./model/sklearn/"): + iris = datasets.load_iris() + x, y = iris.data, iris.target + x_test= x[:8] # only pick 8 test data + df = pd.DataFrame(data=x_test, columns=['input']*x.shape[1], dtype=np.float64) + #df.to_csv('iris_sklearn_test_data.csv') + + module = BuiltinScoreModule(model_path) + result = module.run(df) + print('=====buildinScoreModule=======') + print(result) + +# python -m dstest.sklearn.load_saved_model_predict_test +if __name__ == '__main__': + load_model_then_predict() diff --git a/dstest/dstest/sklearn/trainer.py b/dstest/dstest/sklearn/trainer.py index c078992..b78fad7 100644 --- a/dstest/dstest/sklearn/trainer.py +++ b/dstest/dstest/sklearn/trainer.py @@ -2,19 +2,31 @@ from sklearn import datasets import pickle import os +from builtin_models.sklearn import * -#python -m dstest.sklearn.trainer -clf = svm.SVC(gamma='scale') -iris = datasets.load_iris() -X, y = iris.data, iris.target -# X: array([[5.1, 3.5, 1.4, 0.2],..., [5.9, 3. , 5.1, 1.8]]) -# y: array([0,...,2]) -clf.fit(X, y) +# Test dynamic install package +from pip._internal import main as pipmain +pipmain(["install", "click"]) +import click -model_path = './model/sklearn' -if not os.path.exists(model_path): - os.makedirs(model_path) -with open("./model/sklearn/model.pkl", "wb") as fp: - pickle.dump(clf, fp) \ No newline at end of file +@click.command() +@click.option('--model_path', default="./model/") +def run_pipeline( + model_path + ): + clf = svm.SVC(gamma='scale') + iris = datasets.load_iris() + X, y = iris.data, iris.target + # X: array([[5.1, 3.5, 1.4, 0.2],..., [5.9, 3. , 5.1, 1.8]]) + # # y: array([0,...,2]) + clf.fit(X, y) + + save_model(clf, './model/sklearn') + +# python -m dstest.sklearn.trainer --model_path model/ +if __name__ == '__main__': + run_pipeline() + + diff --git a/dstest/module_specs/keras_minist_saved_model2.yaml b/dstest/module_specs/sklearn_train.yaml similarity index 75% rename from dstest/module_specs/keras_minist_saved_model2.yaml rename to dstest/module_specs/sklearn_train.yaml index 4c0f218..18d8379 100644 --- a/dstest/module_specs/keras_minist_saved_model2.yaml +++ b/dstest/module_specs/sklearn_train.yaml @@ -1,7 +1,7 @@ -name: Keras Train - MNIST Saved Model 2 -id: b390f3b5-af6b-4b1f-896f-cb359af74a23 +name: Sklearn Train - Xin builtin-model test +id: 8ad10e1b-ac2d-47f8-aa96-78597f26ca48 category: Machine Learning\Train -description: DS keras train which outputs a keras model 2 with spec +description: DS Sklearn train which outputs a Sklearn model with spec inputs: outputs: - name: Output Model @@ -26,7 +26,7 @@ implementation: command: - python - -m - - dstest.keras.trainer_and_save_model_weights + - dstest.sklearn.trainer args: - --model_path - outputPath: Output Model