Skip to content

Commit

Permalink
Reorganizing render, deploy, destroy to unify stages input_vars,
Browse files Browse the repository at this point in the history
tf_objects, checks, and state_imports

Closes #1081
  • Loading branch information
costrouc committed Feb 22, 2022
1 parent 6736717 commit 8c4715d
Show file tree
Hide file tree
Showing 8 changed files with 868 additions and 1,002 deletions.
772 changes: 73 additions & 699 deletions qhub/deploy.py

Large diffs are not rendered by default.

214 changes: 17 additions & 197 deletions qhub/destroy.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import logging
import os
import tempfile

from qhub.utils import timer, check_cloud_credentials
from qhub.stages import input_vars, state_imports
from qhub.provider import terraform

logger = logging.getLogger(__name__)
Expand All @@ -11,207 +11,27 @@
def destroy_01_terraform_state(config):
directory = "stages/01-terraform-state"

if config["provider"] == "do":
terraform.deploy(
terraform_import=True,
# acl and force_destroy do not import properly
# and only get refreshed properly with an apply
terraform_apply=True,
terraform_destroy=True,
directory=os.path.join(directory, config["provider"]),
input_vars={
"name": config["project_name"],
"namespace": config["namespace"],
"region": config["digital_ocean"]["region"],
},
state_imports=[
(
"module.terraform-state.module.spaces.digitalocean_spaces_bucket.main",
f"{config['digital_ocean']['region']},{config['project_name']}-{config['namespace']}-terraform-state",
)
],
)
elif config["provider"] == "gcp":
terraform.deploy(
terraform_import=True,
# acl and force_destroy do not import properly
# and only get refreshed properly with an apply
terraform_apply=True,
terraform_destroy=True,
directory=os.path.join(directory, config["provider"]),
input_vars={
"name": config["project_name"],
"namespace": config["namespace"],
"region": config["google_cloud_platform"]["region"],
},
state_imports=[
(
"module.terraform-state.module.gcs.google_storage_bucket.static-site",
f"{config['project_name']}-{config['namespace']}-terraform-state",
)
],
)
elif config["provider"] == "azure":
subscription_id = os.environ["ARM_SUBSCRIPTION_ID"]
resource_group_name = f"{config['project_name']}-{config['namespace']}"
resource_group_name_safe = resource_group_name.replace("-", "")
resource_group_url = f"/subscriptions/{subscription_id}/resourceGroups/{config['project_name']}-{config['namespace']}"

terraform.deploy(
terraform_import=True,
terraform_apply=False,
terraform_destroy=True,
directory=os.path.join(directory, config["provider"]),
input_vars={
"name": config["project_name"],
"namespace": config["namespace"],
"region": config["azure"]["region"],
"storage_account_postfix": config["azure"]["storage_account_postfix"],
},
state_imports=[
(
"module.terraform-state.azurerm_resource_group.terraform-resource-group",
resource_group_url,
),
(
"module.terraform-state.azurerm_storage_account.terraform-storage-account",
f"{resource_group_url}/providers/Microsoft.Storage/storageAccounts/{resource_group_name_safe}{config['azure']['storage_account_postfix']}",
),
(
"module.terraform-state.azurerm_storage_container.storage_container",
f"https://{resource_group_name_safe}{config['azure']['storage_account_postfix']}.blob.core.windows.net/{resource_group_name}state",
),
],
)
elif config["provider"] == "aws":
terraform.deploy(
terraform_import=True,
# acl and force_destroy do not import properly
# and only get refreshed properly with an apply
terraform_apply=True,
terraform_destroy=True,
directory=os.path.join(directory, config["provider"]),
input_vars={
"name": config["project_name"],
"namespace": config["namespace"],
},
state_imports=[
(
"module.terraform-state.aws_s3_bucket.terraform-state",
f"{config['project_name']}-{config['namespace']}-terraform-state",
),
(
"module.terraform-state.aws_dynamodb_table.terraform-state-lock",
f"{config['project_name']}-{config['namespace']}-terraform-state-lock",
),
],
)
else:
raise NotImplementedError(
f'provider {config["provider"]} not implemented for directory={directory}'
)
terraform.deploy(
terraform_import=True,
# acl and force_destroy do not import properly
# and only get refreshed properly with an apply
terraform_apply=True,
terraform_destroy=True,
directory=os.path.join(directory, config["provider"]),
input_vars=input_vars.stage_01_terraform_state({}, config),
state_imports=state_imports.stage_01_terraform_state({}, config),
)


def destroy_02_infrastructure(config):
directory = "stages/02-infrastructure"

if config["provider"] == "local":
terraform.deploy(
terraform_apply=False,
terraform_destroy=True,
directory=os.path.join(directory, config["provider"]),
input_vars={"kube_context": config["local"].get("kube_context")},
)
elif config["provider"] == "do":
terraform.deploy(
terraform_apply=False,
terraform_destroy=True,
directory=os.path.join(directory, config["provider"]),
input_vars={
"name": config["project_name"],
"environment": config["namespace"],
"region": config["digital_ocean"]["region"],
"kubernetes_version": config["digital_ocean"]["kubernetes_version"],
"node_groups": config["digital_ocean"]["node_groups"],
"kubeconfig_filename": os.path.join(
tempfile.gettempdir(), "QHUB_KUBECONFIG"
),
},
)
elif config["provider"] == "gcp":
terraform.deploy(
terraform_apply=False,
terraform_destroy=True,
directory=os.path.join(directory, config["provider"]),
input_vars={
"name": config["project_name"],
"environment": config["namespace"],
"region": config["google_cloud_platform"]["region"],
"project_id": config["google_cloud_platform"]["project"],
"node_groups": [
{
"name": key,
"min_size": value["min_nodes"],
"max_size": value["max_nodes"],
**value,
}
for key, value in config["google_cloud_platform"][
"node_groups"
].items()
],
"kubeconfig_filename": os.path.join(
tempfile.gettempdir(), "QHUB_KUBECONFIG"
),
},
)
elif config["provider"] == "azure":
terraform.deploy(
terraform_apply=False,
terraform_destroy=True,
directory=os.path.join(directory, config["provider"]),
input_vars={
"name": config["project_name"],
"environment": config["namespace"],
"region": config["azure"]["region"],
"kubernetes_version": config["azure"]["kubernetes_version"],
"node_groups": config["azure"]["node_groups"],
"kubeconfig_filename": os.path.join(
tempfile.gettempdir(), "QHUB_KUBECONFIG"
),
"resource_group_name": f'{config["project_name"]}-{config["namespace"]}',
"node_resource_group_name": f'{config["project_name"]}-{config["namespace"]}-node-resource-group',
},
)
elif config["provider"] == "aws":
terraform.deploy(
terraform_apply=False,
terraform_destroy=True,
directory=os.path.join(directory, config["provider"]),
input_vars={
"name": config["project_name"],
"environment": config["namespace"],
"node_groups": [
{
"name": key,
"min_size": value["min_nodes"],
"desired_size": value["min_nodes"],
"max_size": value["max_nodes"],
"gpu": value.get("gpu", False),
"instance_type": value["instance"],
}
for key, value in config["amazon_web_services"][
"node_groups"
].items()
],
"kubeconfig_filename": os.path.join(
tempfile.gettempdir(), "QHUB_KUBECONFIG"
),
},
)
else:
raise NotImplementedError(
f'provider {config["provider"]} not implemented for directory={directory}'
)
terraform.deploy(
terraform_apply=False,
terraform_destroy=True,
directory=os.path.join(directory, config["provider"]),
input_vars=input_vars.stage_02_infrastructure({}, config),
)


def destroy_configuration(config):
Expand Down
113 changes: 13 additions & 100 deletions qhub/render/__init__.py → qhub/render.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,14 +8,9 @@

from ruamel.yaml import YAML

from qhub.provider.terraform import tf_render_objects
from qhub.stages import tf_objects
from qhub.deprecate import DEPRECATED_FILE_PATHS
from qhub.render.terraform import (
QHubKubernetesProvider,
QHubTerraformState,
QHubGCPProvider,
QHubAWSProvider,
)

from qhub.provider.cicd.github import gen_qhub_ops, gen_qhub_linter
from qhub.provider.cicd.gitlab import gen_gitlab_ci

Expand Down Expand Up @@ -143,100 +138,18 @@ def render_template(output_directory, config_filename, force=False, dry_run=Fals

def render_contents(config: Dict):
"""Dynamically generated contents from QHub configuration"""
contents = {}

if config["provider"] == "gcp":
contents.update(
{
"stages/01-terraform-state/gcp/_qhub.tf.json": tf_render_objects(
[
QHubGCPProvider(config),
]
),
"stages/02-infrastructure/gcp/_qhub.tf.json": tf_render_objects(
[
QHubGCPProvider(config),
QHubTerraformState("02-infrastructure", config),
]
),
}
)
elif config["provider"] == "do":
contents.update(
{
"stages/02-infrastructure/do/_qhub.tf.json": tf_render_objects(
[
QHubTerraformState("02-infrastructure", config),
]
)
}
)
elif config["provider"] == "azure":
contents.update(
{
"stages/02-infrastructure/azure/_qhub.tf.json": tf_render_objects(
[
QHubTerraformState("02-infrastructure", config),
]
),
}
)
elif config["provider"] == "aws":
contents.update(
{
"stages/01-terraform-state/aws/_qhub.tf.json": tf_render_objects(
[
QHubAWSProvider(config),
]
),
"stages/02-infrastructure/aws/_qhub.tf.json": tf_render_objects(
[
QHubAWSProvider(config),
QHubTerraformState("02-infrastructure", config),
]
),
}
)

contents.update(
{
"stages/03-kubernetes-initialize/_qhub.tf.json": tf_render_objects(
[
QHubTerraformState("03-kubernetes-initialize", config),
QHubKubernetesProvider(config),
]
),
"stages/04-kubernetes-ingress/_qhub.tf.json": tf_render_objects(
[
QHubTerraformState("04-kubernetes-ingress", config),
QHubKubernetesProvider(config),
]
),
"stages/05-kubernetes-keycloak/_qhub.tf.json": tf_render_objects(
[
QHubTerraformState("05-kubernetes-keycloak", config),
QHubKubernetesProvider(config),
]
),
"stages/06-kubernetes-keycloak-configuration/_qhub.tf.json": tf_render_objects(
[
QHubTerraformState("06-kubernetes-keycloak-configuration", config),
]
),
"stages/07-kubernetes-services/_qhub.tf.json": tf_render_objects(
[
QHubTerraformState("07-kubernetes-services", config),
QHubKubernetesProvider(config),
]
),
"stages/08-qhub-tf-extensions/_qhub.tf.json": tf_render_objects(
[
QHubTerraformState("08-qhub-tf-extensions", config),
QHubKubernetesProvider(config),
]
),
}
)
contents = {
**tf_objects.stage_01_terraform_state(config),
**tf_objects.stage_02_infrastructure(config),
**tf_objects.stage_03_kubernetes_initialize(config),
**tf_objects.stage_04_kubernetes_ingress(config),
**tf_objects.stage_05_kubernetes_keycloak(config),
**tf_objects.stage_06_kubernetes_keycloak_configuration(config),
**tf_objects.stage_07_kubernetes_services(config),
**tf_objects.stage_08_qhub_tf_extensions(config),
}

if config.get("ci_cd"):
for fn, workflow in gen_cicd(config).items():
contents.update(
Expand Down
Loading

0 comments on commit 8c4715d

Please sign in to comment.