Skip to content

Commit

Permalink
Setup/Add integration/deployment tests via pytest (#922)
Browse files Browse the repository at this point in the history
Setup/Add integration/deployment pytests
  • Loading branch information
aktech authored Nov 16, 2021
1 parent 4e5190a commit f555bce
Show file tree
Hide file tree
Showing 9 changed files with 104 additions and 2 deletions.
5 changes: 4 additions & 1 deletion .github/workflows/kubernetes_test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -146,8 +146,11 @@ jobs:
./tests_e2e/cypress/screenshots/
./tests_e2e/cypress/videos/
### CLEANUP AFTER CYPRESS
- name: Deployment Pytests
run: |
pytest tests_deployment/ -v
### CLEANUP AFTER TESTS
- name: Cleanup qhub deployment
run: |
cd local-deployment
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ jobs:
- name: Test QHub
run: |
pytest --version
pytest
pytest --ignore=tests_deployment
test-render-providers:
name: 'Test QHub Provider'
Expand Down
11 changes: 11 additions & 0 deletions docs/source/dev_guide/testing.md
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,17 @@ The final command above should open the Cypress UI where you can run the tests m

Note that tests are heavily state dependent, so any changes or use of the deployed QHub could affect the results.

## Deployment/Integration Tests

Deployment and Integration testing makes it easier to test various features of deployed QHub
on minikube such as Dask Gateway, external integrations, state of the kubernetes cluster via
simple Python code. You can run the integration and deployment tests via the following command:

```
pytest tests_deployment/ -v
```


# Cloud Testing

Cloud testing on aws, gcp, azure, and digital ocean can be significantly more complicated and time consuming. But it is the only way to truly test the cloud deployments, including infrastructure, of course. To test on cloud Kubernetes, just deploy qhub in the normal way on those clouds, but using the [linked pip install](./index.md) of the qhub package.
Expand Down
4 changes: 4 additions & 0 deletions environment-dev.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -16,3 +16,7 @@ dependencies:
- pytest
- diagrams
- jhub-client

# deployment/integration tests dependencies
- dask-gateway
- python-kubernetes
2 changes: 2 additions & 0 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,8 @@
"diagrams",
"jhub-client",
"pre-commit",
"kubernetes",
"dask-gateway",
],
},
include_package_data=True,
Expand Down
Empty file added tests_deployment/__init__.py
Empty file.
5 changes: 5 additions & 0 deletions tests_deployment/constants.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
DASK_GATEWAY_JUPYTER_SECRET_NAME = "qhub-daskgateway-gateway"
JUPYTERHUB_TOKEN_SECRET_KEY_NAME = "jupyterhub_api_token"
NAMESPACE = 'dev'
QHUB_HOSTNAME = 'github-actions.qhub.dev'
GATEWAY_ENDPOINT = 'gateway'
32 changes: 32 additions & 0 deletions tests_deployment/test_dask_gateway.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
import dask_gateway
import os

import pytest
from tests_deployment import constants
from tests_deployment.utils import monkeypatch_ssl_context, get_jupyterhub_token

monkeypatch_ssl_context()


@pytest.fixture
def dask_gateway_object():
"""Connects to Dask Gateway cluster from outside the cluster."""
os.environ['JUPYTERHUB_API_TOKEN'] = get_jupyterhub_token()
return dask_gateway.Gateway(
address=f'https://{constants.QHUB_HOSTNAME}/{constants.GATEWAY_ENDPOINT}',
auth='jupyterhub',
proxy_address=f'tcp://{constants.QHUB_HOSTNAME}:8786'
)


def test_dask_gateway(dask_gateway_object):
"""This test checks if we're able to connect to dask gateway."""
assert dask_gateway_object.list_clusters() == []


def test_dask_gateway_cluster_options(dask_gateway_object):
"""Tests Dask Gateway's cluster options."""
cluster_options = dask_gateway_object.cluster_options()
assert cluster_options.conda_environment == "dask"
assert cluster_options.profile == "Small Worker"
assert cluster_options.environment_vars == {}
45 changes: 45 additions & 0 deletions tests_deployment/utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
import base64
import ssl

from kubernetes import client, config

from tests_deployment import constants


def get_kubernetes_api_instance():
"""Returns the v1 core Kubernetes api instance for making
calls to the kubernetes cluster
"""
config.load_kube_config()
return client.CoreV1Api()


def get_jupyterhub_token():
"""
It fetches the secret that has the JupyterHub token to be able to
connect to dask gateway.
"""
v1 = get_kubernetes_api_instance()
secret = str(v1.read_namespaced_secret(
constants.DASK_GATEWAY_JUPYTER_SECRET_NAME, constants.NAMESPACE
).data)
base64_encoded_token = eval(secret)[constants.JUPYTERHUB_TOKEN_SECRET_KEY_NAME]
return base64.b64decode(base64_encoded_token).decode()


def monkeypatch_ssl_context():
"""
This is a workaround monkeypatch to disable ssl checking to avoid SSL
failures.
TODO: A better way to do this would be adding the Traefik's default certificate's
CA public key to the trusted certificate authorities.
"""
def create_default_context(context):
def _inner(*args, **kwargs):
context.check_hostname = False
context.verify_mode = ssl.CERT_NONE
return context
return _inner

sslcontext = ssl.create_default_context()
ssl.create_default_context = create_default_context(sslcontext)

0 comments on commit f555bce

Please sign in to comment.