Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[SDK] Improve host name handling #3287

Merged
merged 1 commit into from
Mar 17, 2020
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 14 additions & 5 deletions sdk/python/kfp/_client.py
Original file line number Diff line number Diff line change
@@ -113,6 +113,11 @@ def __init__(self, host=None, client_id=None, namespace='kubeflow', other_client

def _load_config(self, host, client_id, namespace, other_client_id, other_client_secret):
config = kfp_server_api.configuration.Configuration()

host = host or ''
# Preprocess the host endpoint to prevent some common user mistakes.
host = host.lstrip(r'(https|http)://').rstrip('/')
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I vaguely remember there were scenarios where this had to be an actual URI with non-empty path.
Something like http://1.2.3.4/something/#pipeline

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Do you mean we have to reserve the prefix http://?


if host:
config.host = host

@@ -159,6 +164,10 @@ def _load_config(self, host, client_id, namespace, other_client_id, other_client
def _is_inverse_proxy_host(self, host):
if host:
return re.match(r'\S+.googleusercontent.com/{0,1}$', host)
if re.match(r'\w+', host):
warnings.warn(
'The received host is %s, please include the full endpoint address '
'(with ".(pipelines/notebooks).googleusercontent.com")' % host)
return False

def _is_ipython(self):
@@ -409,14 +418,14 @@ def __repr__(self):
run_info = self.run_pipeline(experiment.id, run_name, pipeline_file, arguments, namespace=namespace)
return RunPipelineResult(self, run_info)

def schedule_pipeline(self, experiment_id, job_name, pipeline_package_path=None, params={}, pipeline_id=None,
def schedule_pipeline(self, experiment_id, job_name, pipeline_package_path=None, params={}, pipeline_id=None,
namespace=None, cron_schedule=None, description=None, max_concurrency=10, no_catchup=None):
"""Schedule pipeline on kubeflow to run based upon a cron job
Arguments:
experiment_id {string} -- The expriment within which we would like kubeflow
experiment_id {string} -- The expriment within which we would like kubeflow
job_name {string} -- The name of the scheduled job
Keyword Arguments:
pipeline_package_path {string} -- The path to the pipeline package (default: {None})
params {dict} -- The pipeline parameters (default: {{}})
@@ -457,7 +466,7 @@ def schedule_pipeline(self, experiment_id, job_name, pipeline_package_path=None,
pipeline_id=pipeline_id,
workflow_manifest=pipeline_json_string,
parameters=api_params)

trigger = kfp_server_api.models.api_cron_schedule.ApiCronSchedule(cron=cron_schedule) #Example:cron_schedule="0 0 9 ? * 2-6"
job_id = ''.join(random.choices(string.ascii_uppercase + string.digits, k=10))
schedule_body = kfp_server_api.models.ApiJob(