Skip to content

Commit

Permalink
ci: Fixing local integration tests, defaulting to test containers (#2927
Browse files Browse the repository at this point in the history
)
  • Loading branch information
adchia authored and kevjumba committed Jul 18, 2022
1 parent f4f4894 commit 935b34c
Show file tree
Hide file tree
Showing 10 changed files with 121 additions and 17 deletions.
63 changes: 63 additions & 0 deletions .github/workflows/pr_local_integration_tests.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
name: pr-local-integration-tests
# This runs local tests with containerized stubs of online stores. This is the main dev workflow

on:
pull_request_target:
types:
- opened
- synchronize
- labeled

jobs:
integration-test-python-local:
# all jobs MUST have this if check for 'ok-to-test' or 'approved' for security purposes.
if:
(github.event.action == 'labeled' && (github.event.label.name == 'approved' || github.event.label.name == 'lgtm' || github.event.label.name == 'ok-to-test')) ||
(github.event.action != 'labeled' && (contains(github.event.pull_request.labels.*.name, 'ok-to-test') || contains(github.event.pull_request.labels.*.name, 'approved') || contains(github.event.pull_request.labels.*.name, 'lgtm')))
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
python-version: [ "3.8" ]
os: [ ubuntu-latest ]
env:
OS: ${{ matrix.os }}
PYTHON: ${{ matrix.python-version }}
steps:
- uses: actions/checkout@v2
with:
# pull_request_target runs the workflow in the context of the base repo
# as such actions/checkout needs to be explicit configured to retrieve
# code from the PR.
ref: refs/pull/${{ github.event.pull_request.number }}/merge
submodules: recursive
- name: Setup Python
uses: actions/setup-python@v2
id: setup-python
with:
python-version: ${{ matrix.python-version }}
architecture: x64
- name: Upgrade pip version
run: |
pip install --upgrade "pip>=21.3.1,<22.1"
- name: Get pip cache dir
id: pip-cache
run: |
echo "::set-output name=dir::$(pip cache dir)"
- name: pip cache
uses: actions/cache@v2
with:
path: |
${{ steps.pip-cache.outputs.dir }}
/opt/hostedtoolcache/Python
/Users/runner/hostedtoolcache/Python
key: ${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-${{ hashFiles(format('**/py{0}-ci-requirements.txt', env.PYTHON)) }}
restore-keys: |
${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-pip-
- name: Install pip-tools
run: pip install pip-tools
- name: Install dependencies
run: make install-python-ci-dependencies
- name: Test local integration tests
if: ${{ always() }} # this will guarantee that step won't be canceled and resources won't leak
run: make test-python-integration-local
24 changes: 17 additions & 7 deletions CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -133,17 +133,19 @@ make test-python
### Integration Tests
There are two sets of tests you can run:
1. Local integration tests (for faster development)
1. Local integration tests (for faster development, tests file offline store & key online stores)
2. Full integration tests (requires cloud environment setups)

#### Local integration tests
To get local integration tests running, you'll need to have Redis setup:
For this approach of running tests, you'll need to have docker set up locally: [Get Docker](https://docs.docker.com/get-docker/)

Redis
1. Install Redis: [Quickstart](https://redis.io/topics/quickstart)
2. Run `redis-server`
It leverages a file based offline store to test against emulated versions of Datastore, DynamoDB, and Redis, using ephemeral containers.

Now run `make test-python-universal-local`
These tests create new temporary tables / datasets locally only, and they are cleaned up. when the containers are torn down.

```sh
make test-python-integration-local
```

#### Full integration tests
To test across clouds, on top of setting up Redis, you also need GCP / AWS / Snowflake setup.
Expand All @@ -166,7 +168,15 @@ To test across clouds, on top of setting up Redis, you also need GCP / AWS / Sno
2. Modify `RedshiftDataSourceCreator` to use your credentials

**Snowflake**
- See https://signup.snowflake.com/
1. See https://signup.snowflake.com/ to setup a trial.
2. Then to run successfully, you'll need some environment variables setup:
```sh
export SNOWFLAKE_CI_DEPLOYMENT='[snowflake_deployment]'
export SNOWFLAKE_CI_USER='[your user]'
export SNOWFLAKE_CI_PASSWORD='[your pw]'
export SNOWFLAKE_CI_ROLE='[your CI role e.g. SYSADMIN]'
export SNOWFLAKE_CI_WAREHOUSE='[your warehouse]'
```

Then run `make test-python-integration`. Note that for Snowflake / GCP / AWS, this will create new temporary tables / datasets.

Expand Down
25 changes: 20 additions & 5 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -68,8 +68,26 @@ test-python:
test-python-integration:
FEAST_USAGE=False IS_TEST=True python -m pytest -n 8 --integration sdk/python/tests

test-python-integration-local:
@(docker info > /dev/null 2>&1 && \
FEAST_USAGE=False \
IS_TEST=True \
FEAST_IS_LOCAL_TEST=True \
FEAST_LOCAL_ONLINE_CONTAINER=True \
python -m pytest -n 8 --integration \
-k "not test_apply_entity_integration and \
not test_apply_feature_view_integration and \
not test_apply_data_source_integration" \
sdk/python/tests \
) || echo "This script uses Docker, and it isn't running - please start the Docker Daemon and try again!";

test-python-integration-container:
FEAST_USAGE=False IS_TEST=True FEAST_LOCAL_ONLINE_CONTAINER=True python -m pytest -n 8 --integration sdk/python/tests
@(docker info > /dev/null 2>&1 && \
FEAST_USAGE=False \
IS_TEST=True \
FEAST_LOCAL_ONLINE_CONTAINER=True \
python -m pytest -n 8 --integration sdk/python/tests \
) || echo "This script uses Docker, and it isn't running - please start the Docker Daemon and try again!";

test-python-universal-contrib:
PYTHONPATH='.' \
Expand Down Expand Up @@ -104,14 +122,11 @@ test-python-universal-postgres:
not test_universal_types" \
sdk/python/tests

test-python-universal-local:
FEAST_USAGE=False IS_TEST=True FEAST_IS_LOCAL_TEST=True python -m pytest -n 8 --integration sdk/python/tests

test-python-universal:
FEAST_USAGE=False IS_TEST=True python -m pytest -n 8 --integration sdk/python/tests

test-python-go-server: compile-go-lib
FEAST_USAGE=False IS_TEST=True FEAST_GO_FEATURE_RETRIEVAL=True pytest --integration --goserver sdk/python/tests
FEAST_USAGE=False IS_TEST=True pytest --integration --goserver sdk/python/tests

format-python:
# Sort
Expand Down
5 changes: 4 additions & 1 deletion sdk/python/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,10 @@ def pytest_collection_modifyitems(config, items: List[Item]):
items.append(t)

goserver_tests = [t for t in items if "goserver" in t.keywords]
if should_run_goserver:
if not should_run_goserver:
for t in goserver_tests:
items.remove(t)
else:
items.clear()
for t in goserver_tests:
items.append(t)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,7 @@
"sqlite": ({"type": "sqlite"}, None),
}

# Only configure Cloud DWH if running full integration tests
if os.getenv("FEAST_IS_LOCAL_TEST", "False") != "True":
AVAILABLE_OFFLINE_STORES.extend(
[
Expand Down Expand Up @@ -141,6 +142,7 @@
}


# Replace online stores with emulated online stores if we're running local integration tests
if os.getenv("FEAST_LOCAL_ONLINE_CONTAINER", "False").lower() == "true":
replacements: Dict[
str, Tuple[Union[str, Dict[str, str]], Optional[Type[OnlineStoreCreator]]]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ def create_online_store(self) -> Dict[str, str]:
self.container.start()
log_string_to_wait_for = r"\[datastore\] Dev App Server is now running"
wait_for_logs(
container=self.container, predicate=log_string_to_wait_for, timeout=5
container=self.container, predicate=log_string_to_wait_for, timeout=10
)
exposed_port = self.container.get_exposed_port("8081")
os.environ[datastore.client.DATASTORE_EMULATOR_HOST] = f"0.0.0.0:{exposed_port}"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ def create_online_store(self) -> Dict[str, str]:
"Initializing DynamoDB Local with the following configuration:"
)
wait_for_logs(
container=self.container, predicate=log_string_to_wait_for, timeout=5
container=self.container, predicate=log_string_to_wait_for, timeout=10
)
exposed_port = self.container.get_exposed_port("8000")
return {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ def create_online_store(self) -> Dict[str, str]:
"Initializing Hbase Local with the following configuration:"
)
wait_for_logs(
container=self.container, predicate=log_string_to_wait_for, timeout=5
container=self.container, predicate=log_string_to_wait_for, timeout=10
)
exposed_port = self.container.get_exposed_port("9090")
return {"type": "hbase", "host": "127.0.0.1", "port": exposed_port}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ def create_online_store(self) -> Dict[str, str]:
self.container.start()
log_string_to_wait_for = "Ready to accept connections"
wait_for_logs(
container=self.container, predicate=log_string_to_wait_for, timeout=5
container=self.container, predicate=log_string_to_wait_for, timeout=10
)
exposed_port = self.container.get_exposed_port("6379")
return {"type": "redis", "connection_string": f"localhost:{exposed_port},db=0"}
Expand Down
11 changes: 11 additions & 0 deletions sdk/python/tests/integration/registration/test_registry.py
Original file line number Diff line number Diff line change
Expand Up @@ -571,7 +571,18 @@ def test_apply_feature_view_integration(test_registry):
@pytest.mark.parametrize(
"test_registry", [lazy_fixture("gcs_registry"), lazy_fixture("s3_registry")],
)
def test_apply_data_source_integration(test_registry: Registry):
run_test_data_source_apply(test_registry)


@pytest.mark.parametrize(
"test_registry", [lazy_fixture("local_registry")],
)
def test_apply_data_source(test_registry: Registry):
run_test_data_source_apply(test_registry)


def run_test_data_source_apply(test_registry: Registry):
# Create Feature Views
batch_source = FileSource(
name="test_source",
Expand Down

0 comments on commit 935b34c

Please sign in to comment.