Skip to content

Commit

Permalink
Move running of bootstrap-pipelines to auto-rds-s3-setup (#685)
Browse files Browse the repository at this point in the history
**Which issue is resolved by this Pull Request:**
Resolves #

**Description of your changes:**
Should only run as part of deployments running rds/s3 flavors
Keeping it in the makefile additionally to support users who want to run
it if they are using s3-only and/or do not wish to run the
auto-rds-s3-setup script in its entirety.

**Testing:**
- [ ] Unit tests pass
- [ ] e2e tests pass
- Details about new tests (If this PR adds a new feature)
- Details about any manual tests performed

By submitting this pull request, I confirm that my contribution is made
under the terms of the Apache 2.0 license.
  • Loading branch information
ryansteakley authored Apr 19, 2023
1 parent bae8e40 commit f91ae60
Show file tree
Hide file tree
Showing 13 changed files with 275 additions and 281 deletions.
17 changes: 2 additions & 15 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -93,29 +93,16 @@ bootstrap-ack: verify-cluster-variables connect-to-eks-cluster
yq e '.cluster.region=env(CLUSTER_REGION)' -i tests/e2e/utils/ack_sm_controller_bootstrap/config.yaml
cd tests/e2e && PYTHONPATH=.. python3.8 utils/ack_sm_controller_bootstrap/setup_sm_controller_req.py

bootstrap-pipelines: verify-cluster-variables connect-to-eks-cluster
yq e '.cluster.name=env(CLUSTER_NAME)' -i tests/e2e/utils/pipelines/config.yaml
yq e '.cluster.region=env(CLUSTER_REGION)' -i tests/e2e/utils/pipelines/config.yaml
cd tests/e2e && PYTHONPATH=.. python3.8 utils/pipelines/setup_pipelines_irsa.py

cleanup-ack-req: verify-cluster-variables
yq e '.cluster.name=env(CLUSTER_NAME)' -i tests/e2e/utils/ack_sm_controller_bootstrap/config.yaml
yq e '.cluster.region=env(CLUSTER_REGION)' -i tests/e2e/utils/ack_sm_controller_bootstrap/config.yaml
cd tests/e2e && PYTHONPATH=.. python3.8 utils/ack_sm_controller_bootstrap/cleanup_sm_controller_req.py

cleanup-pipelines-req: verify-cluster-variables
yq e '.cluster.name=env(CLUSTER_NAME)' -i tests/e2e/utils/pipelines/config.yaml
yq e '.cluster.region=env(CLUSTER_REGION)' -i tests/e2e/utils/pipelines/config.yaml
cd tests/e2e && PYTHONPATH=.. python3.8 utils/pipelines/cleanup_pipelines_irsa.py

deploy-kubeflow: bootstrap-ack
$(eval DEPLOYMENT_OPTION:=vanilla)
$(eval INSTALLATION_OPTION:=kustomize)
$(eval CREDENTIALS_OPTION:=irsa)
if [ "$(CREDENTIALS_OPTION)" = "irsa" ]; then \
make bootstrap-pipelines; \
fi
cd tests/e2e && PYTHONPATH=.. python3.8 utils/kubeflow_installation.py --deployment_option $(DEPLOYMENT_OPTION) --installation_option $(INSTALLATION_OPTION) --credentials_option $(CREDENTIALS_OPTION) --cluster_name $(CLUSTER_NAME)
$(eval PIPELINE_S3_CREDENTIAL_OPTION:=irsa)
cd tests/e2e && PYTHONPATH=.. python3.8 utils/kubeflow_installation.py --deployment_option $(DEPLOYMENT_OPTION) --installation_option $(INSTALLATION_OPTION) --pipeline_s3_credential_option $(PIPELINE_S3_CREDENTIAL_OPTION) --cluster_name $(CLUSTER_NAME)

delete-kubeflow:
$(eval DEPLOYMENT_OPTION:=vanilla)
Expand Down
49 changes: 29 additions & 20 deletions tests/e2e/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,31 +51,32 @@ def pytest_addoption(parser):
parser.addoption(
"--installation_option",
action="store",
help="helm or kustomize, default is set to kustomize"
help="helm or kustomize, default is set to kustomize",
)
parser.addoption(
"--deployment_option",
action="store",
help="vanilla/cognito/rds-and-s3/rds-only/s3-only, default is set to vanilla"
help="vanilla/cognito/rds-and-s3/rds-only/s3-only, default is set to vanilla",
)
parser.addoption(
"--credentials_option",
"--pipeline_s3_credential_option",
action="store",
help="irsa or static, default is set to static"
help="irsa or static, default is set to static",
)




def keep_successfully_created_resource(request):
return request.config.getoption("--keepsuccess")


def clean_up_eks_cluster(request):
return request.config.getoption("--deletecluster")


def load_metadata_file(request):
return request.config.getoption("--metadata")


def get_accesskey(request):
access_key = request.config.getoption("--accesskey")
if not access_key:
Expand All @@ -89,18 +90,21 @@ def get_secretkey(request):
pytest.fail("--secretkey is required")
return secret_key


def get_installation_option(request):
installation_option = request.config.getoption("--installation_option")
if not installation_option:
installation_option = "kustomize"
return installation_option


def get_deployment_option(request):
deployment_option = request.config.getoption("--deployment_option")
if not deployment_option:
deployment_option = "vanilla"
return deployment_option


def get_root_domain_name(request):
return request.config.getoption("--root-domain-name")

Expand All @@ -120,6 +124,7 @@ def region(metadata, request):
metadata.insert("region", region)
return region


@pytest.fixture(scope="class")
def installation_option(metadata, request):
"""
Expand All @@ -130,11 +135,12 @@ def installation_option(metadata, request):

installation_option = request.config.getoption("--installation_option")
if not installation_option:
installation_option = 'kustomize'
installation_option = "kustomize"
metadata.insert("installation_option", installation_option)

return installation_option


@pytest.fixture(scope="class")
def deployment_option(metadata, request):
"""
Expand All @@ -145,25 +151,28 @@ def deployment_option(metadata, request):

deployment_option = request.config.getoption("--deployment_option")
if not deployment_option:
deployment_option = 'vanilla'
deployment_option = "vanilla"
metadata.insert("deployment_option", deployment_option)

return deployment_option


@pytest.fixture(scope="class")
def credentials_option(metadata, request):
def pipeline_s3_credential_option(metadata, request):
"""
Test credentials option.
"""
if metadata.get("credentials_option"):
return metadata.get("credentials_option")

credentials_option = request.config.getoption("--credentials_option")
if not credentials_option:
credentials_option = 'static'
metadata.insert("credentials_option", credentials_option)

return credentials_option
if metadata.get("pipeline_s3_credential_option"):
return metadata.get("pipeline_s3_credential_option")

pipeline_s3_credential_option = request.config.getoption(
"--pipeline_s3_credential_option"
)
if not pipeline_s3_credential_option:
pipeline_s3_credential_option = "static"
metadata.insert("pipeline_s3_credential_option", pipeline_s3_credential_option)

return pipeline_s3_credential_option


@pytest.fixture(scope="class")
Expand Down
17 changes: 13 additions & 4 deletions tests/e2e/fixtures/installation.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,9 @@ def on_create():
)
ebs_csi_driver["role_name"] = ebs_csi_role_name
ebs_csi_driver["service_account_name"] = "ebs-csi-controller-sa"
create_addon("aws-ebs-csi-driver", cluster, account_id, ebs_csi_role_name, region)
create_addon(
"aws-ebs-csi-driver", cluster, account_id, ebs_csi_role_name, region
)
ebs_csi_driver["addon_name"] = "aws-ebs-csi-driver"
ebs_csi_driver["addon_account"] = account_id

Expand All @@ -101,7 +103,7 @@ def installation(
ebs_addon,
installation_path,
installation_option,
credentials_option,
pipeline_s3_credential_option,
request,
):
"""
Expand All @@ -115,10 +117,17 @@ def installation(
"""

def on_create():
install_kubeflow(installation_option, deployment_option, cluster, credentials_option)
install_kubeflow(
installation_option,
deployment_option,
cluster,
pipeline_s3_credential_option,
)

def on_delete():
uninstall_kubeflow(installation_option, deployment_option, credentials_option)
uninstall_kubeflow(
installation_option, deployment_option, pipeline_s3_credential_option
)

configure_resource_fixture(
metadata, request, installation_path, "installation_path", on_create, on_delete
Expand Down
73 changes: 43 additions & 30 deletions tests/e2e/utils/kubeflow_installation.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,19 +67,22 @@ def install_kubeflow(
installation_option,
deployment_option,
cluster_name,
credentials_option,
pipeline_s3_credential_option,
aws_telemetry=True,
):
print(cluster_name)
if deployment_option == "vanilla":
installation_config = load_yaml_file(INSTALLATION_CONFIG_VANILLA)
elif deployment_option == "cognito":
installation_config = load_yaml_file(INSTALLATION_CONFIG_COGNITO)
elif deployment_option == "rds-s3" and credentials_option == "static":
elif deployment_option == "rds-s3" and pipeline_s3_credential_option == "static":
installation_config = load_yaml_file(INSTALLATION_CONFIG_RDS_S3_STATIC)
elif deployment_option == "s3-only" and credentials_option == "static":
elif deployment_option == "s3-only" and pipeline_s3_credential_option == "static":
installation_config = load_yaml_file(INSTALLATION_CONFIG_S3_ONLY_STATIC)
elif deployment_option == "cognito-rds-s3" and credentials_option == "static":
elif (
deployment_option == "cognito-rds-s3"
and pipeline_s3_credential_option == "static"
):
installation_config = load_yaml_file(INSTALLATION_CONFIG_COGNITO_RDS_S3_STATIC)
elif deployment_option == "rds-s3":
installation_config = load_yaml_file(INSTALLATION_CONFIG_RDS_S3)
Expand All @@ -91,7 +94,7 @@ def install_kubeflow(
installation_config = load_yaml_file(INSTALLATION_CONFIG_COGNITO_RDS_S3)

print_banner(
f"Installing kubeflow {deployment_option} deployment with {installation_option} with {credentials_option}"
f"Installing kubeflow {deployment_option} deployment with {installation_option} with {pipeline_s3_credential_option}"
)

for component in Install_Sequence:
Expand All @@ -100,7 +103,7 @@ def install_kubeflow(
component,
installation_config,
cluster_name,
credentials_option,
pipeline_s3_credential_option,
)

if aws_telemetry == True:
Expand All @@ -109,7 +112,7 @@ def install_kubeflow(
"aws-telemetry",
installation_config,
cluster_name,
credentials_option,
pipeline_s3_credential_option,
)


Expand All @@ -118,7 +121,7 @@ def install_component(
component_name,
installation_config,
cluster_name,
credentials_option,
pipeline_s3_credential_option,
crd_established=True,
):
# component not applicable for deployment option
Expand Down Expand Up @@ -153,7 +156,7 @@ def install_component(
component_name,
installation_paths,
installation_option,
credentials_option,
pipeline_s3_credential_option,
)
install_helm(component_name, installation_paths)
# kustomize
Expand All @@ -171,7 +174,7 @@ def install_component(
component_name,
installation_paths,
installation_option,
credentials_option,
pipeline_s3_credential_option,
)
for kustomize_path in installation_paths:
if not crd_established:
Expand Down Expand Up @@ -279,32 +282,42 @@ def install_ack_controller():


def configure_kubeflow_pipelines(
component_name, installation_paths, installation_option, credentials_option
component_name,
installation_paths,
installation_option,
pipeline_s3_credential_option,
):
if credentials_option == "static":
if pipeline_s3_credential_option == "static":
return

cfg = load_yaml_file(file_path="./utils/pipelines/config.yaml")
IAM_ROLE_ARN_FOR_IRSA = cfg["pipeline_oidc_role"]

cfg = load_yaml_file(file_path="./utils/rds-s3/metadata.yaml")
BACKEND_ROLE_ARN = cfg["S3"]["backEndRoleArn"]
PROFILE_ROLE_ARN = cfg["S3"]["profileRoleArn"]
if installation_option == "kustomize":
CHART_EXPORT_PATH = "../../awsconfigs/apps/pipeline/s3/service-account.yaml"
USER_NAMESPACE_PATH = (
"../../awsconfigs/common/user-namespace/overlay/profile.yaml"
)
exec_shell(
f'yq e \'.metadata.annotations."eks.amazonaws.com/role-arn"="{BACKEND_ROLE_ARN}"\' '
+ f"-i {CHART_EXPORT_PATH}"
)
exec_shell(
f'yq e \'.spec.plugins[0].spec."awsIamRole"="{PROFILE_ROLE_ARN}"\' '
+ f"-i {USER_NAMESPACE_PATH}"
)

else:
CHART_EXPORT_PATH = f"{installation_paths}/templates/ServiceAccount/ml-pipeline-kubeflow-ServiceAccount.yaml"
USER_NAMESPACE_PATH = "../../charts/common/user-namespace/templates/Profile/kubeflow-user-example-com-Profile.yaml"

exec_shell(
f'yq e \'.metadata.annotations."eks.amazonaws.com/role-arn"="{IAM_ROLE_ARN_FOR_IRSA}"\' '
+ f"-i {CHART_EXPORT_PATH}"
)
exec_shell(
f'yq e \'.spec.plugins[0].spec."awsIamRole"="{IAM_ROLE_ARN_FOR_IRSA}"\' '
+ f"-i {USER_NAMESPACE_PATH}"
)
CHART_EXPORT_PATH = f"{installation_paths}/values.yaml"
USER_NAMESPACE_PATH = "../../charts/common/user-namespace/values.yaml"
exec_shell(
f"yq e '.irsa.roleArn=\"{BACKEND_ROLE_ARN}\"' "
+ f"-i {CHART_EXPORT_PATH}"
)
exec_shell(
f"yq e '.irsa.roleArn=\"{PROFILE_ROLE_ARN}\"' "
+ f"-i {USER_NAMESPACE_PATH}"
)


if __name__ == "__main__":
Expand Down Expand Up @@ -349,11 +362,11 @@ def configure_kubeflow_pipelines(
help=f"EKS cluster Name",
required=True,
)
CREDENTIAL_OPTION_DEFAULT = "irsa"
PIPELINE_S3_CREDENTIAL_OPTION_DEFAULT = "irsa"
parser.add_argument(
"--credentials_option",
"--pipeline_s3_credential_option",
type=str,
default=CREDENTIAL_OPTION_DEFAULT,
default=PIPELINE_S3_CREDENTIAL_OPTION_DEFAULT,
choices=["irsa", "static"],
help=f"Kubeflow default credential option default is set to irsa",
required=False,
Expand All @@ -365,6 +378,6 @@ def configure_kubeflow_pipelines(
args.installation_option,
args.deployment_option,
args.cluster_name,
args.credentials_option,
args.pipeline_s3_credential_option,
args.aws_telemetry,
)
Loading

0 comments on commit f91ae60

Please sign in to comment.