diff --git a/.circleci/config.yml b/.circleci/config.yml deleted file mode 100644 index ddd51ef..0000000 --- a/.circleci/config.yml +++ /dev/null @@ -1,88 +0,0 @@ -version: 2.1 - -orbs: - localstack: localstack/platform@dev:alpha - -jobs: - build: - # machine: - # image: ubuntu-2004:202101-01 - executor: localstack/default - environment: - DOWNLOAD_TEST_BIN: 1 - KINESIS_PROVIDER: kinesalite - steps: - - checkout - - # start LocalStack asynchronously - - localstack/start - - - run: git submodule update --init --recursive - - run: - name: "Installing prerequisites" - command: | - sudo apt-get update - sudo apt install -y python3.8 libsasl2-dev - - # download/install and cache aws.test and localstack pip - - restore_cache: - keys: - - localstack-cache - - # uncomment this line to force re-downloading of aws.test binary - - run: rm -f /home/circleci/.cache/localstack/aws.test - - - run: bin/install-aws-test - # - run: - # name: "Installing LocalStack" - # command: | - # cd localstack - # virtualenv --python=`which python3.8` .venv - # make install - # cd .. - - save_cache: - key: localstack-cache - paths: - - /home/circleci/.cache/localstack/ - - /home/circleci/.cache/pip/ - - # wait for LocalStack to become available - - localstack/wait - - # main test suite - - run: - name: "Run test suite" - # command: bin/run-tests -i localstack-tests.incl.txt - command: bin/run-tests -t localstack-tests.incl.txt - - # save build reports as artifacts - - run: - name: "Create reports" - when: always - command: | - pip3 install junit2html - bin/create-report || true - bin/create-report-html || true - mkdir -p /tmp/report/tests - mkdir -p /tmp/results/ - mv build/report.html /tmp/report || true - mv build/tests/*.html /tmp/report/tests || true - # temporarily also uploading XML files, for debugging - cp build/tests/*.xml /tmp/report/tests || true - mv build/tests/*.xml /tmp/results || true - - - store_test_results: - path: /tmp/results - - store_artifacts: - path: /tmp/report - - store_artifacts: - path: /tmp/report/tests - -workflows: - main: - when: - or: - - equal: [ build, << pipeline.git.branch >> ] - - equal: [ build-new, << pipeline.git.branch >> ] - jobs: - - build diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml new file mode 100644 index 0000000..599525d --- /dev/null +++ b/.github/workflows/lint.yml @@ -0,0 +1,20 @@ +on: [push, pull_request] + +name: Linting +jobs: + lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - name: Set up Python 3.10.5 + uses: actions/setup-python@v4 + with: + python-version: '3.10.5' + cache: 'pip' + - name: Install system dependencies + run: | + pip install --upgrade pip + pip install -r requirements.txt + - name: Run Linting + run: | + make lint diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml new file mode 100644 index 0000000..c07e4c3 --- /dev/null +++ b/.github/workflows/main.yml @@ -0,0 +1,84 @@ +on: + schedule: + - cron: '0 19 * * 6' + workflow_dispatch: + inputs: + services: + default: 'ls-community' + type: string + description: name of the service to execute tests for (e.g. "ls-community", "ls-pro", "ls-all", "s3,iam,ec2") + localstack-image: + required: false + type: string + default: 'localstack/localstack:latest' + description: localstack docker image name to test against + +name: Terraform Tests +jobs: + + prepare_list: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + with: + submodules: 'true' + - id: set-matrix + run: echo "matrix=$(python -m terraform_pytest.get-services ${{ github.event.inputs.services || 'ls-community' }})" >> $GITHUB_OUTPUT + outputs: + matrix: ${{ steps.set-matrix.outputs.matrix }} + + test_service: + needs: prepare_list + strategy: + max-parallel: 10 + fail-fast: false + matrix: + service: ${{ fromJson(needs.prepare_list.outputs.matrix) }} + runs-on: ubuntu-latest + + steps: + + - uses: actions/checkout@v3 + with: + submodules: 'true' + + - uses: actions/setup-go@v3 + with: + go-version: '1.18.x' + cache: true + cache-dependency-path: terraform-provider-aws/go.sum + + - name: Set up Python 3.10.5 + uses: actions/setup-python@v4 + with: + python-version: '3.10.5' + cache: 'pip' + + - name: Install system dependencies + run: | + pip install --upgrade pip + pip install -r requirements.txt + + - name: Patch Terraform Provider + run: | + cd terraform-provider-aws && go mod vendor + cd ../ + python -m terraform_pytest.main patch + + - name: Build ${{ matrix.service }} Binary + run: | + python -m terraform_pytest.main build -s ${{ matrix.service }} + ls -la terraform-provider-aws/test-bin + + - name: Run ${{ matrix.service }} Tests + env: + PYTEST_PARALLEL_CONFIG: "${{ matrix.service == 'ec2' && '-n 2' || '' }}" + run: | + python -m pytest $PYTEST_PARALLEL_CONFIG --junitxml=target/reports/pytest.xml terraform-provider-aws/internal/service/${{ matrix.service }} -s -v --ls-start --ls-image ${{ github.event.inputs.localstack-image || 'localstack/localstack:latest' }} + + - name: Publish ${{ matrix.service }} Test Results + uses: EnricoMi/publish-unit-test-result-action@v2 + if: always() + with: + junit_files: target/reports/*.xml + check_name: ${{ matrix.service }} Terraform Test Results diff --git a/.gitignore b/.gitignore index f38249a..77c853a 100644 --- a/.gitignore +++ b/.gitignore @@ -1,10 +1,9 @@ -*~ -*.swp -*.log -*.bak - -.vscode +.DS_Store .idea -*.iml - -build/ +.venv +.pytest_cache +__pycache__ +target +**/*.test +report.xml +volume diff --git a/.gitmodules b/.gitmodules index 5613b14..b558fd7 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,9 +1,3 @@ -[submodule "localstack"] - path = localstack - url = https://github.com/localstack/localstack.git [submodule "terraform-provider-aws"] path = terraform-provider-aws url = https://github.com/hashicorp/terraform-provider-aws.git -[submodule "moto"] - path = moto - url = git@github.com:localstack/moto diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..271255b --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,19 @@ +# See https://pre-commit.com for more information +# See https://pre-commit.com/hooks.html for more hooks +repos: + - repo: https://github.com/psf/black + rev: 22.3.0 + hooks: + - id: black + + - repo: https://github.com/pycqa/isort + rev: 5.9.1 + hooks: + - id: isort + name: isort (python) + - id: isort + name: isort (cython) + types: [cython] + - id: isort + name: isort (pyi) + types: [pyi] diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..6fd2b21 --- /dev/null +++ b/Makefile @@ -0,0 +1,36 @@ +#!/bin/bash + +VENV_BIN ?= python3 -m venv +VENV_DIR ?= .venv +PIP_CMD ?= pip3 + +ifeq ($(OS), Windows_NT) + VENV_ACTIVATE = $(VENV_DIR)/Scripts/activate +else + VENV_ACTIVATE = $(VENV_DIR)/bin/activate +endif + +usage: ## Show this help + @fgrep -h "##" $(MAKEFILE_LIST) | fgrep -v fgrep | sed -e 's/:.*##\s*/##/g' | awk -F'##' '{ printf "%-25s %s\n", $$1, $$2 }' + +$(VENV_ACTIVATE): + test -d $(VENV_DIR) || $(VENV_BIN) $(VENV_DIR) + $(VENV_RUN); $(PIP_CMD) install --upgrade pip setuptools wheel plux + touch $(VENV_ACTIVATE) + +VENV_RUN = . $(VENV_ACTIVATE) + +venv: $(VENV_ACTIVATE) ## Create a new (empty) virtual environment + +install: ## Install the package in editable mode + $(VENV_RUN); $(PIP_CMD) install -r requirements.txt + +init-precommit: ## install te pre-commit hook into your local git repository + ($(VENV_RUN); pre-commit install) + +lint: ## Run linting + @echo "Running black... " + black --check . + +format: ## Run formatting + $(VENV_RUN); python -m isort .; python -m black . diff --git a/README.md b/README.md index 12b48ca..77be770 100644 --- a/README.md +++ b/README.md @@ -1,61 +1,43 @@ -Terraform vs LocalStack -======================= - -This repository contains scripts and CI configurations to to run the Terraform Acceptance test suite of the AWS provider against LocalStack - -## Utilities - -Some utilities for local development: - -* `bin/list-tests [--all]`: list the available tests by parsing the go test files. -* `bin/install-aws-test` creates the binary for running the test suite (and installs it into `$HOME/.cache/localstack/aws.test`. requires go 1.16 -* `bin/run-tests [test]` run a specific test. this installs and runs localstack in a background process. add the flag `-t` to test against an already running localstack instance. - -## Finding and running tests - -After running `bin/install-aws-test`, use `bin/run-tests [OPTIONS...] [TESTS...]` to run individual tests or entire test suites. - -Here are some examples: - -* `bin/run-tests TestAccAWSAPIGatewayResource` -* `bin/run-tests -t TestAccAWSAPIGatewayResource`: same as above, but does not start localstack -* `bin/run-tests TestAccAWSAPIGateway`: runs all tests that match `TestAccAWSAPIGateway` (run `bin/list-tests TestAccAWSAPIGateway` to see which ones will be executed) -* `bin/run-tests -e TestAccAWSAPIGatewayV2 TestAccAWSAPIGateway`: same as above, but excludes all tests that match `TestAccAWSAPIGatewayV2`. -* `bin/run-tests -i localstack-tests.incl.txt`: runs all tests listed in the text file - -You can use `bin/list-tests` with the same parameters to see which tests will be executed, -or to find specific tests based on patterns. - -For example: - -``` - % bin/list-tests Queue -TestAccAWSBatchJobQueue -TestAccAWSGameliftGameSessionQueue -TestAccAWSMediaConvertQueue -TestAccAWSSQSQueue -TestAccAWSSQSQueuePolicy -TestAccDataSourceAwsBatchJobQueue -TestAccDataSourceAwsSqsQueue -``` - -or - -``` - % bin/list-tests "Data.*Queue" -TestAccDataSourceAwsBatchJobQueue -TestAccDataSourceAwsSqsQueue -``` - -## Generating the test reports - -Test logs are aggregated into `build/tests/*.log`, the command `bin/create-report` will create junit-like xml reports. -These can then be rendered into html using `bin/create-report-html`, which also creates a summary page in `build/report.html`. -For rendering html, you need `junit2html`. - -## Travis config - -### Build cache - -The Travis-CI worker caches the built `aws.test` binary across builds. -The first build may therefore take a while. +# Localstack Terraform Test Runner + +This is a test runner for localstack and terraform. It will run a test cases from the hashicrop [terraform provider aws](https://github.com/hashicorp/terraform-provider-aws.git) against Localstack Instance. + +Purpose of this project is to externalize the test cases from the localstack repo and run them against localstack to gather parity metrics. + +## Installation +1. Clone the repository with submodules + - `git clone git@github.com:localstack/localstack-terraform-test.git --recurse-submodules` + - Make sure you have the latest version of the submodules after switching to a different branch using `git submodule update --init --recursive` +2. Run `make venv` to create a virtual environment +3. Run `make install` to install the dependencies + +## How to run? +1. Run `python -m terraform_pytest.main patch` to apply the patch to the terraform provider aws + - **Note: This operation is not idempotent. Please apply the patch only once.** +2. Run `python -m terraform_pytest.main build -s s3` to build testing binary for the golang module +3. Now you are ready to use `python -m pytest` commands to list and run test cases from golang + +## How to run test cases? +- To list down all the test case from a specific service, run `python -m pytest terraform-provider-aws/internal/service/ --collect-only -q` +- To run a specific test case, run `python -m pytest terraform-provider-aws/internal/service// -k --ls-start` or `python -m pytest terraform-provider-aws/internal/service//:: --ls-start` +- Additional environment variables can be added by appending it in the start of the command, i.e. `AWS_ALTERNATE_REGION='us-west-2' python -m pytest terraform-provider-aws/internal/service//:: --ls-start` + +## Default environment variables for Terraform Tests +- **TF_ACC**: `1` +- **AWS_ACCESS_KEY_ID**: `test` +- **AWS_SECRET_ACCESS_KEY**: `test` +- **AWS_DEFAULT_REGION**: `us-west-1` +- **AWS_ALTERNATE_ACCESS_KEY_ID**: `test` +- **AWS_ALTERNATE_SECRET_ACCESS_KEY**: `test` +- **AWS_ALTERNATE_SECRET_ACCESS_KEY**: `test` +- **AWS_ALTERNATE_REGION**: `us-east-2` +- **AWS_THIRD_REGION**: `eu-west-1` + +## Environment variables for Localstack +- **DEBUG**: `1` +- **PROVIDER_OVERRIDE_S3**: `asf` +- **FAIL_FAST**: `1` + +## Options +- `--ls-start`: Start localstack instance before running the test cases +- `--ls-image`: Specify the localstack image to use, default is `localstack/localstack:latest` \ No newline at end of file diff --git a/bin/create-report b/bin/create-report deleted file mode 100755 index dec6c14..0000000 --- a/bin/create-report +++ /dev/null @@ -1,259 +0,0 @@ -#!/usr/bin/python3 - -import glob -import os - -from collections import defaultdict - -prefixes = { - 'go': '[ltt.gotest]', - 'lst': '[ltt.localstack]', - 'runner': '[ltt.runner]' -} - -def strip_special_chars(line): - # remove special characters, convert to str - if not isinstance(line, bytes): - line = line.replace('\x1b', '').encode('utf-8') - return line.decode('utf-8', 'ignore') - - -class TestCase: - def __init__(self, suite, test): - self.suite = suite - self.test = test - self.lines = list() - self.duration = 0 - self.result = None - - -class RunnerLogParser: - def __init__(self, suite): - self.suite = suite - self.tests = dict() - self.active_test = None - self.collecting = False - - def parse_duration(self, fragment): - seconds = fragment[1:-2] # (0.00s) -> 0.00 - return float(seconds) - - def add_line(self, src, line): - tests = self.tests - suite = self.suite - - line = strip_special_chars(line) - - output = line[len(prefixes[src]) + 1:] - - if src == 'runner': - if output.startswith('starting'): - if output.strip().split()[-1] == suite: - self.collecting = True - - if output.startswith('completed'): - if output.strip().split()[-1] == suite: - self.collecting = False - - if not self.collecting: - return - - if src == 'go': - if output.startswith('=== RUN'): - _ ,_ , test = output.split() - if test not in tests: - tests[test] = TestCase(suite, test) - - self.active_test = test - return - - elif output.startswith('=== PAUSE'): - _ ,_ , test = output.split() - self.active_test = None - return - - elif output.startswith('=== CONT'): - _ ,_ , test = output.split() - self.active_test = test - return - - elif output.strip().startswith('--- PASS'): - _ ,_ , test, duration = output.split() - self.active_test = None - tests[test].result = 'passed' - tests[test].duration = self.parse_duration(duration) - return - - elif output.strip().startswith('--- SKIP'): - _ ,_ , test, duration = output.split() - self.active_test = None - tests[test].result = 'skipped' - tests[test].duration = self.parse_duration(duration) - return - - elif output.strip().startswith('--- FAIL'): - _ ,_ , test, duration = output.split() - self.active_test = None - tests[test].result = 'failed' - tests[test].duration = self.parse_duration(duration) - return - - if self.active_test: - tests[self.active_test].lines.append(line) - - -def parse(suite, lines): - parser = RunnerLogParser(suite) - - for line in lines: - for src, prefix in prefixes.items(): - if line.startswith(prefix): - parser.add_line(src, line) - - for test in parser.tests.values(): - if test.result is None: - test.result = 'errored' - test.duration = 0. - - return parser - - -def parser_to_xml(parser): - - testsuite = { - 'name': parser.suite, - 'time': 0, - 'errors': 0, - 'failures': 0, - 'skipped': 0, - 'tests': 0, - } - - def find_fail_message(testcase: TestCase): - for line in testcase.lines: - if not line.startswith('[ltt.gotest]'): - continue - - if 'Step' in line and '.go' in line and 'error' in line: - return line.strip() - - return 'failure' - - - def test_to_junit_dict(testcase: TestCase): - d = { - 'name': testcase.test, - 'classname': testcase.test.split('/')[0], - 'time': "%.2f" % testcase.duration, - } - - testsuite['tests'] += 1 - testsuite['time'] += testcase.duration - - if testcase.result == 'errored': - d['error'] = { - 'type': 'error', - '__CDATA__': ''.join(testcase.lines) - } - testsuite['errors'] += 1 - - if testcase.result == 'failed': - d['failure'] = { - 'type': 'failure', - 'message': escape(find_fail_message(testcase)), - '__CDATA__': ''.join(testcase.lines) - } - testsuite['failures'] += 1 - - - if testcase.result == 'skipped': - d['skipped'] = {} - testsuite['skipped'] += 1 - - - return d - - testcases = [dict2xml(test_to_junit_dict(t), 'testcase') for t in parser.tests.values()] - - testsuite['properties'] = [{ - 'property': {'name': 'test-runner', 'value': 'localstack-terraform-test'} - }] - - testsuite['__XML__'] = testcases - - return dict2xml(testsuite, 'testsuite') - - -def escape(s): - s = s.replace("&", "&") - s = s.replace("<", "<") - s = s.replace(">", ">") - s = s.replace("\"", """) - s = s.replace("'", "'") - return s - - -def dict2xml(d, root_node=None): - wrap = False if None == root_node or isinstance(d, list) else True - root = 'objects' if None == root_node else root_node - root_singular = root[:-1] if 's' == root[-1] and None == root_node else root - xml = '' - children = [] - - if isinstance(d, dict): - for key, value in dict.items(d): - if key == '__CDATA__': - children.append('') - elif key.startswith('__XML__'): - if isinstance(value, list): - children.append('\n'.join(value)) - else: - children.append(value) - elif isinstance(value, dict): - children.append(dict2xml(value, key)) - elif isinstance(value, list): - children.append(dict2xml(value, key)) - else: - xml = xml + ' ' + key + '="' + str(value) + '"' - else: - for value in d: - children.append(dict2xml(value, root_singular)) - - end_tag = '>' if 0 < len(children) else '/>' - - if wrap or isinstance(d, dict): - xml = '<' + root + xml + end_tag - - if 0 < len(children): - for child in children: - xml = xml + child - - if wrap or isinstance(d, dict): - xml = xml + '' - - return xml - - - -def main(): - for f in glob.glob('build/tests/*.log'): - with open(f, 'rb') as fd: - lines = fd.readlines() - - lines = [strip_special_chars(line) for line in lines] - suite = os.path.basename(f)[:-4] # strip `.log` - parser = parse(suite, lines) - - f_xml = f[:-4] + '.xml' - d = os.path.dirname(f_xml) - f = os.path.basename(f_xml) - - f_xml = os.path.join(d, 'TEST-' + f) - - with open(f_xml, 'w') as fd: - print(f_xml) - fd.writelines(parser_to_xml(parser)) - - -if __name__ == '__main__': - main() diff --git a/bin/create-report-cli b/bin/create-report-cli deleted file mode 100755 index c92f30a..0000000 --- a/bin/create-report-cli +++ /dev/null @@ -1,55 +0,0 @@ -#!/usr/bin/python3 - -import glob -import xml.etree.ElementTree as ET - - -def get_summary(reports): - passed = failures = errors = skipped = total = 0 - for f in reports: - try: - for event, elem in ET.iterparse(f, events=('start',)): - if elem.tag == 'testsuite': - d = dict(elem.attrib) - d['time'] = float(d['time']) - d['tests'] = float(d['tests']) - d['errors'] = float(d['errors']) - d['failures'] = float(d['failures']) - d['skipped'] = float(d['skipped']) - d['passed'] = d['tests'] - (d['errors'] + d['failures'] + d['skipped']) - print(d) - # - total += d["tests"] - passed += d["passed"] - failures += d["failures"] - errors += d["errors"] - except Exception as e: - print(e) - - print('===========================================') - print("Total cases: {}".format(total)) - for name, nr in [("Passed", passed), ("Failures", failures), ("Errors", errors), ("Skipped", skipped)]: - pct = round((nr / total) * 100, 2) - print("{}: {} ({}%)".format(name, nr, pct)) - - failed_tests = failures + errors - if failed_tests > 0: - raise Exception("{} Terraform tests failed!".format(failed_tests)) - - -def main(): - reports = glob.glob('build/tests/*.xml') - if not reports: - print('no reports, run bin/create-report first') - exit(1) - - reports.sort() - - print('===========================================') - get_summary(reports) - print('===========================================') - print() - - -if __name__ == '__main__': - main() diff --git a/bin/create-report-html b/bin/create-report-html deleted file mode 100755 index 3142a85..0000000 --- a/bin/create-report-html +++ /dev/null @@ -1,130 +0,0 @@ -#!/usr/bin/python3 - -import glob -import os -import xml.etree.ElementTree as ET -from shutil import which - - -def render(src, dest): - os.system(f'junit2html {src} {dest}') - - -def create_test_reports(reports): - files = [] - for f in reports: - - f_html = f[:-4] + '.html' - - if os.path.exists(f_html): - print('skipping', f) - continue - - print(f) - render(f, f_html) - files.append(f_html) - - return files - - -def parse_testsuite_meta(source): - for event, elem in ET.iterparse(source, events=('start', )): - if elem.tag == 'testsuite': - - d = dict(elem.attrib) - d['time'] = float(d['time']) - d['tests'] = int(d['tests']) - d['errors'] = int(d['errors']) - d['failures'] = int(d['failures']) - d['skipped'] = int(d['skipped']) - d['passed'] = d['tests'] - (d['errors'] + d['failures'] + d['skipped']) - - return d - - return None - - -def create_summary(reports): - testsuites = [] - - for f in reports: - with open(f, 'r') as fd: - try: - ts = parse_testsuite_meta(fd) - if ts: - ts['report'] = os.path.basename(f[:-4] + '.html') - testsuites.append(ts) - except Exception as e: - print('Unable to parse test suite metadata from report %s: %s' % (f, e)) - - summary = {'tests': 0, 'skipped': 0, 'failures': 0, 'errors': 0, 'passed': 0, 'time': 0} - - for ts in testsuites: - for k in summary.keys(): - summary[k] += ts.get(k, 0) - - summary['time'] = round(summary['time'], 2) - - for k in ('skipped', 'failures', 'errors', 'passed'): - if summary['tests'] == 0: - summary[k + '%'] = 0 - else: - summary[k + '%'] = round((summary[k] / summary['tests']) * 100, 2) - - return testsuites, summary - - -def render_summary(testsuites, summary): - # TODO: use jinja templates and render a proper summary page - - html = 'Summary' - - html += '' - html += '' - for k in ('passed', 'failures', 'errors', 'skipped'): - html += f'' - - html += f'' - - html += '
ResultsNumber%
{k}{summary[k]}{summary[k+"%"]}
Total{summary["tests"]}100%

' - - html += '' - for ts in testsuites: - html += ''.format(**ts) - - html += '
TestTestsPassedFailedErroredSkipped
{name}{tests}{passed}{failures}{errors}{skipped}
' - - html += '' - return html - - -def main(): - reports = glob.glob('build/tests/*.xml') - if not reports: - print('no reports, run bin/create-report first') - exit(1) - - if not which('junit2html'): - print('junit2html not found in path, please install it with: pip install junit2html') - exit(1) - - reports.sort() - - create_test_reports(reports) - testsuites, summary = create_summary(reports) - - with open('build/report.html', 'w') as fd: - fd.write(render_summary(testsuites, summary)) - - print('===========================================') - print('report created') - print() - - if which('open'): - print('open build/report.html') - elif which('xdg-open'): - print('xdg-open build/report.html') - - -if __name__ == '__main__': - main() diff --git a/bin/install-aws-test b/bin/install-aws-test deleted file mode 100755 index ef80b06..0000000 --- a/bin/install-aws-test +++ /dev/null @@ -1,38 +0,0 @@ -#!/bin/bash - -pwd=`pwd` - -TEST_BIN=$HOME/.cache/localstack/aws.test - -TEST_BIN_URL=${TEST_BIN_URL:-"https://localstack-terraform-test.s3.eu-central-1.amazonaws.com/aws.test"} - -[ -f ${TEST_BIN} ] && { echo "aws.test already exists at ${TEST_BIN} skipping"; exit 0; } - -BIN_DIR=$(dirname ${TEST_BIN}) -mkdir -p ${BIN_DIR} - -if [ ! -z "${DOWNLOAD_TEST_BIN}" ] && [ ${DOWNLOAD_TEST_BIN} == 1 ]; then - echo "downloading test_binary from ${TEST_BIN_URL}" - if ! curl ${TEST_BIN_URL} --output ${TEST_BIN}; then - exit 1; - fi - chmod +x ${TEST_BIN} - ls -la ${TEST_BIN} - exit 0 -fi - -PATCH="etc/0001-add-simple-hardcoded-configuration-for-running-tests.patch" - -cd terraform-provider-aws -git apply $pwd/${PATCH} - -PATCH="etc/0002-route53-reduce-sync-time.patch" - -git apply $pwd/${PATCH} - -go get -u ./ - -echo "building ${TEST_BIN} with go test" -go test -c ./aws - -mv aws.test ${TEST_BIN} diff --git a/bin/list-tests b/bin/list-tests deleted file mode 100755 index 661d558..0000000 --- a/bin/list-tests +++ /dev/null @@ -1,84 +0,0 @@ -#!/bin/bash - -function usage() { - echo "USAGE" - echo " list-tests [OPTION...] [INCLUDE...]" - echo "" - echo "DESCRIPTION" - echo " list available acceptance tests in the terraform test suite and prepare arguments for the go test command" - echo "" - echo "OPTIONS" - echo " -h, --help" - echo " show this message" - echo "" - echo " -a, --all" - echo " list individual test cases instead of test groups" - echo "" - echo " -e PATTERN|FILE, --exclude PATTERN|FILE" - echo " repeatable option to exclude certain tests or test groups" - echo "" - echo " -i PATTERN|FILE, --include PATTERN|FILE" - echo " repeatable option to include certain tests or test groups" - echo "" - echo " -p, --prepare" - echo " prepare the list of tests for 'go test -run' command" -} - -function list_testacc_all() { - grep "^func TestAcc" terraform-provider-aws/aws/**_test.go \ - | cut -d':' -f2 | cut -d '(' -f1 | cut -c 6- \ - | sort -u | grep -v '^$' -} - -function list_testacc() { - list_testacc_all | cut -d'_' -f1 | sort -u | grep -v '^$' -} - -# some logic to invoke the correct command -function main() { - excluded=() - included=() - - cmd=list_testacc - - # parse options - while [[ "$#" -gt 0 ]]; do - case $1 in - -h|--help) usage; exit 0; ;; - -a|--all) cmd=list_testacc_all; ;; - -e|--exclude) excluded+=("$2"); shift; ;; - -i|--include) included+=("$2"); shift; ;; - -p|--prepare) prepare=0; ;; - *) included+=("$1"); ;; - esac - shift - done - - # build grep filters - filters="" - for excl in "${excluded[@]}"; do - if [ -f $excl ]; then - filters="${filters}|grep -v -f "$excl"" - else - filters="${filters}|grep -v '"$excl"'" - fi - done - for incl in "${included[@]}"; do - if [ -f $incl ]; then - filters="${filters}|grep -f "$incl"" - else - filters="${filters}|grep '"$incl"'" - fi - done - - cmd=$(echo "${cmd} ${filters}") - - if [ ! -z $prepare ]; then - echo "($(eval $cmd | tr '\n' '|' | sed 's/|$/_/' | sed 's/|/_|/g'))" - else - eval $cmd - fi - -} - -main "$@" diff --git a/bin/publish-metrics b/bin/publish-metrics deleted file mode 100755 index 51a8694..0000000 --- a/bin/publish-metrics +++ /dev/null @@ -1,34 +0,0 @@ -#!/bin/bash - -export PROJECT_ROOT=$(pwd) # FIXME - -function publish_metrics() { - export AWS_DEFAULT_REGION=us-east-2 - # check if AWS credentials are configured - aws cloudwatch list-metrics --namespace test123 > /dev/null || return - echo "Publishing test results to CloudWatch Metrics ..." - - # generate report - bin/create-report - bin/create-report-html - - # prepare data - now=$(date +"%Y-%m-%dT%H:%M:%S") - reportFile=$PROJECT_ROOT/build/report.html - passed=$(sed -r 's|.*passed([^<]*).*|\1|' $reportFile) - failures=$(sed -r 's|.*failures([^<]*).*|\1|' $reportFile) - errors=$(sed -r 's|.*errors([^<]*).*|\1|' $reportFile) - skipped=$(sed -r 's|.*skipped([^<]*).*|\1|' $reportFile) - total=$(sed -r 's|.*Total([^<]*).*|\1|' $reportFile) - passedPercent=$(awk "BEGIN { print $passed / $total }") - - # publish data to CloudWatch - names=(testsPassed testsPassedPercent testsFailures testsErrors testsSkipped testsTotal) - values=($passed $passedPercent $failures $errors $skipped $total) - echo ${values[@]} - for (( i=0 ; i < ${#names[@]} ; i++ )) { - aws cloudwatch put-metric-data --namespace ls-tf-tests --metric-name ${names[i]} --value ${values[i]} --timestamp $now - } -} - -publish_metrics diff --git a/bin/run-tests b/bin/run-tests deleted file mode 100755 index aa4532b..0000000 --- a/bin/run-tests +++ /dev/null @@ -1,248 +0,0 @@ -#!/bin/bash - -function usage() { - echo "USAGE" - echo " run-tests [OPTIONS...] [TESTS...]" - echo "" - echo "DESCRIPTION" - echo " runs the Terraform test suite" - echo "" - echo "OPTIONS" - echo " -h, --help" - echo " show this message" - echo "" - echo " -e PATTERN|FILE, --exclude PATTERN|FILE" - echo " repeatable option to exclude certain tests or test groups" - echo "" - echo " -i PATTERN|FILE, --include PATTERN|FILE" - echo " repeatable option to include certain tests or test groups" - echo "" - echo " -t, --tests-only" - echo " run the tests against an already running instance of localstack" -} - -export PROJECT_ROOT=$(pwd) # FIXME - -export LST_DIR=${PROJECT_ROOT}/localstack -export BUILD_DIR=${PROJECT_ROOT}/build - -export LST_LOG=${BUILD_DIR}/localstack.log -export TEST_LOG=${BUILD_DIR}/test.log - -export AWS_DEFAULT_REGION=${AWS_DEFAULT_REGION:-us-east-1} -export AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID:-test} -export AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-test} - -export INSTALL_LOCAL_MOTO=${INSTALL_LOCAL_MOTO:-true} - -export TF_ACC=1 -export TEST_PARALLEL=${TEST_PARALLEL:-1} - -export TIMEOUT_SECS=600 - -SUPPRESS_BIN_LOGS=${SUPPRESS_BIN_LOGS:-0} - -TEST_BIN=$HOME/.cache/localstack/aws.test -[ ! -f $TEST_BIN ] && { echo "aws.test not installed. please run bin/install-aws-test"; exit 1; } - -function run_localstack() { - cd ${LST_DIR} - source .venv/bin/activate - - if [ ${INSTALL_LOCAL_MOTO} == true ]; then - pip install -e ../moto - fi - - exec bin/localstack start --host -} - -function run_watchdog() { - log="${BUILD_DIR}/tests/$1.log" - - i=0 - while true; do - sleep 2 - i=$((i + 1)) - if [ $i -gt $TIMEOUT_SECS ]; then - # TODO remove duplicate code - pkill -f "aws.test" # FIXME - echo "[ltt.runner] terminated (timeout) $1" | tee -a ${log} - break - fi - if cat ${log} | strings | grep --max-count=1 -q "attempt 1/25" &> /dev/null; then - pkill -f "aws.test" # FIXME - echo "[ltt.runner] terminated $1" | tee -a ${log} - break - fi - done -} - -function forward_lst_log() { - trap 'kill $(jobs -p) 2> /dev/null' EXIT - log=$1 - tail -f -n0 ${LST_LOG} 2> /dev/null | stdbuf -oL awk '{print "[ltt.localstack] " $0}' | tee -a $log > /dev/null -} - -function forward_test_log() { - trap 'kill $(jobs -p) 2> /dev/null' EXIT - log=$1 - tail -f -n0 ${TEST_LOG} 2> /dev/null | stdbuf -oL awk '{print "[ltt.gotest] " $0}' | tee -a $log > /dev/null -} - -function run_test() { - cd terraform-provider-aws/aws - echo "running ${TEST_BIN} -test.v -test.parallel=${TEST_PARALLEL} -test.run $@" - if [ "$SUPPRESS_BIN_LOGS" = "1" ]; then - # can be useful for debugging, but seems to break certain API calls, e.g., large S3 file PUTs - ${TEST_BIN} -test.v -test.parallel=${TEST_PARALLEL} -test.run "$@" 2>&1 | tr -cd "[:print:][\011\012]" - else - ${TEST_BIN} -test.v -test.parallel=${TEST_PARALLEL} -test.run "$@" - fi - result=$? - echo - echo Test terminated with exit code $? - # clean up tmp dirs - rm -rf /tmp/plugintest* - return $result -} - -function list_tests() { - # without this function we could not run individual tests, but it's kinda hacky - cmd=$PROJECT_ROOT/bin/list-tests - args_all="" - args="" - - # parse options - while [[ "$#" -gt 0 ]]; do - if [[ $2 =~ "_" ]]; then - args_all="$args_all $1 $2" - else - args="$args $1 $2" - fi - shift - shift - done - - [ -z "$args" ] || $cmd $args - [ -z "$args_all" ] || $cmd --all $args_all -} - -function run_tests() { - tests=$(list_tests "$@") - - if [ -z "${tests}" ]; then - echo "no matching tests" - exit 1 - fi - - # kill jobs once process exits - trap 'kill $(jobs -p) 2> /dev/null' EXIT - - echo "" > ${TEST_LOG} - - tail -F ${TEST_LOG} 2> /dev/null | stdbuf -oL strings | egrep "(=== RUN|=== CONT|=== PAUSE|FAIL|PASS|SKIP)" & - - export TF_LOG=debug - - for t in ${tests}; do - # truncate test log - echo "" > ${TEST_LOG} - - # touch log for test run - log="${BUILD_DIR}/tests/${t}.log" - echo "" > ${log} - - echo "[ltt.runner] starting $t" | tee -a ${log} - - run_watchdog $t & - pid_watchdog=$! - - forward_lst_log $log & - pid_forward_lst_log=$! - - forward_test_log $log & - pid_forward_test_log=$! - - sleep 1 - - if [[ $t =~ "_" ]]; then - arg=$t - else - arg="${t}_" - fi - - run_test "${arg}" 2>&1 | stdbuf -oL tee -a ${TEST_LOG} > /dev/null - - kill $pid_watchdog &> /dev/null - kill $pid_forward_lst_log &> /dev/null - kill $pid_forward_test_log &> /dev/null - - wait $pid_watchdog $pid_forward_test_log $pid_forward_lst_log - echo "[ltt.runner] completed $t" | tee -a ${log} - sleep 1 - - done -} - -function run_lst_and_tests() { - rm -f ${LST_LOG} - # start localstack in the background - DEBUG=1 run_localstack &> ${LST_LOG} & - export lst_pid=$! - - # TODO: subprocesses will stay open if interrupted - - # wait for localstack to be ready - echo "[ltt.runner] waiting on localstack to start on process ${lst_pid}" - - while true; do - sleep 1 - - if `grep --max-count=1 -q "Ready\." ${LST_LOG}`; then - break - fi - if ! ps -p ${lst_pid} > /dev/null; then - echo "[ltt.runner] localstack terminated while waiting" - exit 1 - fi - done - - run_tests "$@" - ret=$? - - # kill the running localstack instance - echo "[ltt.runner] killing localstack ${lst_pid}" - kill ${lst_pid} - echo "[ltt.runner] waiting on localstack to end" - wait ${lst_pid} - - return ${ret} -} - -function main() { - list_test_args="" # test filters (handed to list-tests) - start_lst=true - - while [[ "$#" -gt 0 ]]; do - case $1 in - -h|--help) usage; exit 0 ;; - -i|--include) list_test_args="$list_test_args -i $2"; shift ;; - -e|--exclude) list_test_args="$list_test_args -e $2"; shift ;; - -t|--tests-only) start_lst=false; ;; - *) list_test_args="$list_test_args -i $1" ;; - esac - shift - done - - mkdir -p ${BUILD_DIR}/tests - - if [ $start_lst == false ]; then - run_tests $list_test_args - else - run_lst_and_tests $list_test_args - fi - - exit $? -} - -main "$@" diff --git a/bin/setup-on-ec2 b/bin/setup-on-ec2 deleted file mode 100755 index 818420e..0000000 --- a/bin/setup-on-ec2 +++ /dev/null @@ -1,46 +0,0 @@ -#!/bin/bash - -# ONLY RUN THESE COMMANDS ON FRESH EC2 INSTANCES, NOT IN YOUR OWN SYSTEM! - -sudo apt -y update -sudo apt -y install docker.io python3-pip npm -echo 'PATH=$PATH:~/.local/bin' >> ~/.profile - -sudo usermod -aG docker ubuntu - -pip install localstack junit2html awscli awscli-local -sudo npm install -g serve - -# enable swap memory -sudo fallocate -l 2G /swapfile -sudo chmod 600 /swapfile -sudo mkswap /swapfile -sudo swapon /swapfile - -test -e localstack-terraform-test || ( - git clone https://github.com/localstack/localstack-terraform-test - git submodule update --init terraform-provider-aws -) -( - cd localstack-terraform-test - DOWNLOAD_TEST_BIN=1 bin/install-aws-test -) - -# may need to logout, re-login to the machine for groups to become effective - -# STEP 1: -# TODO: set API key below -# export LOCALSTACK_API_KEY=test -# DOCKER_FLAGS=-d localstack start - -# STEP 2: -# start in screen session: -# bin/run-tests -t -i localstack-tests.incl.txt -# to create reports: -# bin/create-report -# bin/create-report-html - -# STEP 3: -# start in separate screen session: -# cd localstack-terraform-test/build -# sudo serve -l tcp://0.0.0.0:80 diff --git a/conftest.py b/conftest.py new file mode 100644 index 0000000..0dfc2d6 --- /dev/null +++ b/conftest.py @@ -0,0 +1,209 @@ +import os +import re +from os.path import dirname, realpath, relpath +from pathlib import Path + +import docker +import pytest +import requests +from requests.adapters import HTTPAdapter, Retry + +from terraform_pytest.utils import execute_command + + +def pytest_addoption(parser): + """Add command line options to pytest""" + parser.addoption( + "--ls-image", + action="store", + default="localstack/localstack:latest", + help="Base URL for the API tests", + ) + parser.addoption( + "--ls-start", action="store_true", default=False, help="Start localstack service" + ) + + +def pytest_collect_file(parent, file_path): + """Collect test files from the test directory""" + if file_path.suffix == ".go" and file_path.name.endswith("_test.go"): + return GoFile.from_parent(parent, path=file_path) + + +class GoFile(pytest.File): + """class for collecting tests from a file.""" + + def collect(self): + """Collect test cases from the test file""" + raw = self.path.open().read() + fa = re.findall(r"^(func (TestAcc.*))\(.*\).*", raw, re.MULTILINE) + for _, name in fa: + yield GoItem.from_parent(self, name=name) + + +class GoItem(pytest.Item): + """class for individual test cases.""" + + def __init__(self, **kwargs): + super().__init__(**kwargs) + + def runtest(self): + """Run the test case""" + + tf_root_path = realpath(relpath(self.path).split(os.sep)[0]) + service_path = dirname(Path(*relpath(self.path).split(os.sep)[1:])) + service = service_path.split(os.sep)[-1] + + env = dict(os.environ) + env.update( + { + "TF_ACC": "1", + "AWS_ACCESS_KEY_ID": "test", + "AWS_SECRET_ACCESS_KEY": "test", + "AWS_DEFAULT_REGION": "us-west-1", + "AWS_ALTERNATE_ACCESS_KEY_ID": "test", + "AWS_ALTERNATE_SECRET_ACCESS_KEY": "test", + "AWS_ALTERNATE_SECRET_ACCESS_KEY": "test", + "AWS_ALTERNATE_REGION": "us-east-2", + "AWS_THIRD_REGION": "eu-west-1", + } + ) + + cmd = [ + f"./test-bin/{service}.test", + "-test.v", + "-test.parallel=1", + "-test.count=1", + "-test.timeout=60m", + f"-test.run={self.name}", + ] + return_code, stdout = execute_command(cmd, env, tf_root_path) + if return_code != 0: + raise GoException(returncode=return_code, stderr=stdout) + + def repr_failure(self, excinfo, **kwargs): + """Called when self.runtest() raises an exception. + + return: a representation of a collection failure. + """ + if isinstance(excinfo.value, GoException): + return "\n".join( + [ + f"Execution failed with return code: {excinfo.value.returncode}", + f"Failure Reason:\n{excinfo.value.stderr}", + ] + ) + + def reportinfo(self): + """Get location information for this item for test reports. + + return: a tuple with three elements: + - The path of the test + - The line number of the test + - A name of the test to be shown in reports + """ + return self.path, 0, f"Test Case: {self.name}" + + +class GoException(Exception): + """Go test exception - raised when test cases failed""" + + def __init__(self, returncode, stderr): + self.returncode = returncode + self.stderr = stderr + + +def _docker_service_health(client): + """Check if the docker service is healthy""" + if not client.ping(): + print("\nPlease start docker daemon and try again") + raise Exception("Docker is not running") + + +def _start_docker_container(client, config, localstack_image): + """Start the docker container""" + env_vars = ["DEBUG=1", "PROVIDER_OVERRIDE_S3=asf", "FAIL_FAST=1"] + port_mappings = { + "53/tcp": ("127.0.0.1", 53), + "53/udp": ("127.0.0.1", 53), + "443": ("127.0.0.1", 443), + "4566": ("127.0.0.1", 4566), + "4571": ("127.0.0.1", 4571), + } + volumes = ["/var/run/docker.sock:/var/run/docker.sock"] + localstack_container = client.containers.run( + image=localstack_image, + detach=True, + ports=port_mappings, + name="localstack_main", + volumes=volumes, + auto_remove=True, + environment=env_vars, + ) + setattr(config, "localstack_container_id", localstack_container.id) + + +def _stop_docker_container(client, config): + """Stop the docker container""" + client.containers.get(getattr(config, "localstack_container_id")).stop() + print("LocalStack is stopped") + + +def _localstack_health_check(): + """Check if the localstack service is healthy""" + localstack_health_url = "http://localhost:4566/health" + session = requests.Session() + retry = Retry(connect=3, backoff_factor=2) + adapter = HTTPAdapter(max_retries=retry) + session.mount("http://", adapter) + session.mount("https://", adapter) + session.get(localstack_health_url) + session.close() + + +def _pull_docker_image(client, localstack_image): + """Pull the docker image""" + docker_image_list = client.images.list(name=localstack_image) + if len(docker_image_list) == 0: + print(f"Pulling image {localstack_image}") + client.images.pull(localstack_image) + docker_image_list = client.images.list(name=localstack_image) + print(f"Using LocalStack image: {docker_image_list[0].id}") + + +def pytest_sessionstart(session): + """Called after the Session object has been created and before performing collection and entering the run test loop.""" + is_collect_only = session.config.getoption(name="--collect-only") + is_localstack_start = session.config.getoption(name="--ls-start") + localstack_image = session.config.getoption(name="--ls-image") + + if getattr(session.config, "workerinput", None) is not None: + return + + if not is_collect_only and is_localstack_start: + print("\nStarting LocalStack...") + + client = docker.from_env() + _docker_service_health(client) + _pull_docker_image(client, localstack_image) + _start_docker_container(client, session.config, localstack_image) + _localstack_health_check() + client.close() + + print("LocalStack is ready...") + + +def pytest_sessionfinish(session, exitstatus): + """Called after whole test run finished, right before returning the exit status to the system.""" + is_collect_only = session.config.getoption(name="--collect-only") + is_localstack_start = session.config.getoption(name="--ls-start") + + # Only run on the master node + if getattr(session.config, "workerinput", None) is not None: + return + + if not is_collect_only and is_localstack_start: + print("\nStopping LocalStack...") + client = docker.from_env() + _stop_docker_container(client, session.config) + client.close() diff --git a/etc/0001-add-simple-hardcoded-configuration-for-running-tests.patch b/etc/0001-add-simple-hardcoded-configuration-for-running-tests.patch deleted file mode 100644 index a23c1c1..0000000 --- a/etc/0001-add-simple-hardcoded-configuration-for-running-tests.patch +++ /dev/null @@ -1,74 +0,0 @@ -From 0111e5997250b4d1ac56a218147de086c8456c08 Mon Sep 17 00:00:00 2001 -From: Waldemar Hummer -Date: Sun, 9 May 2021 15:34:37 +0200 -Subject: [PATCH] add simple/hardcoded configuration for running tests against - LocalStack - ---- - aws/config.go | 4 ++++ - aws/provider.go | 17 ++++++++++++++++- - 2 files changed, 20 insertions(+), 1 deletion(-) - -diff --git a/aws/config.go b/aws/config.go -index 20e04deb9..fd1857aa3 100644 ---- a/aws/config.go -+++ b/aws/config.go -@@ -456,6 +456,10 @@ func (c *Config) Client() (interface{}, error) { - dnsSuffix = p.DNSSuffix() - } - -+ // XXX: added by whummer -+ // insert custom endpoints -+ c.Endpoints = localEndpoints -+ - client := &AWSClient{ - accessanalyzerconn: accessanalyzer.New(sess.Copy(&aws.Config{Endpoint: aws.String(c.Endpoints["accessanalyzer"])})), - accountid: accountID, -diff --git a/aws/provider.go b/aws/provider.go -index 366fc2178..ee5654ead 100644 ---- a/aws/provider.go -+++ b/aws/provider.go -@@ -1169,6 +1169,8 @@ func Provider() *schema.Provider { - - var descriptions map[string]string - var endpointServiceNames []string -+const localEndpoint = "http://localhost:4566" -+var localEndpoints map[string]string - - func init() { - descriptions = map[string]string{ -@@ -1376,9 +1378,21 @@ func init() { - "workspaces", - "xray", - } -+ -+ // XXX: added by whummer -+ localEndpoints = map[string]string{} -+ for _, name := range endpointServiceNames { -+ if name == "s3" { -+ localEndpoints[name] = "http://s3.localhost.localstack.cloud:4566" -+ } else { -+ localEndpoints[name] = localEndpoint -+ } -+ } -+ - } - - func providerConfigure(d *schema.ResourceData, terraformVersion string) (interface{}, error) { -+ - config := Config{ - AccessKey: d.Get("access_key").(string), - SecretKey: d.Get("secret_key").(string), -@@ -1387,7 +1401,8 @@ func providerConfigure(d *schema.ResourceData, terraformVersion string) (interfa - Region: d.Get("region").(string), - CredsFilename: d.Get("shared_credentials_file").(string), - DefaultTagsConfig: expandProviderDefaultTags(d.Get("default_tags").([]interface{})), -- Endpoints: make(map[string]string), -+ // Endpoints: make(map[string]string), -+ Endpoints: localEndpoints, - MaxRetries: d.Get("max_retries").(int), - IgnoreTagsConfig: expandProviderIgnoreTags(d.Get("ignore_tags").([]interface{})), - Insecure: d.Get("insecure").(bool), --- -2.25.1 - diff --git a/etc/0002-route53-reduce-sync-time.patch b/etc/0002-route53-reduce-sync-time.patch deleted file mode 100644 index 1df26a1..0000000 --- a/etc/0002-route53-reduce-sync-time.patch +++ /dev/null @@ -1,27 +0,0 @@ -diff --git a/aws/resource_aws_route53_record.go b/aws/resource_aws_route53_record.go -index eeca299609..9fb7deb45e 100644 ---- a/aws/resource_aws_route53_record.go -+++ b/aws/resource_aws_route53_record.go -@@ -469,7 +469,7 @@ func resourceAwsRoute53RecordCreate(d *schema.ResourceData, meta interface{}) er - - func changeRoute53RecordSet(conn *route53.Route53, input *route53.ChangeResourceRecordSetsInput) (interface{}, error) { - var out *route53.ChangeResourceRecordSetsOutput -- err := resource.Retry(1*time.Minute, func() *resource.RetryError { -+ err := resource.Retry(5*time.Second, func() *resource.RetryError { - var err error - out, err = conn.ChangeResourceRecordSets(input) - if isAWSErr(err, route53.ErrCodeNoSuchHostedZone, "") { -@@ -494,10 +494,10 @@ func waitForRoute53RecordSetToSync(conn *route53.Route53, requestId string) erro - wait := resource.StateChangeConf{ - Pending: []string{route53.ChangeStatusPending}, - Target: []string{route53.ChangeStatusInsync}, -- Delay: time.Duration(rand.Int63n(20)+10) * time.Second, -+ Delay: time.Duration(rand.Int63n(2)+1) * time.Second, - MinTimeout: 5 * time.Second, -- PollInterval: 20 * time.Second, -- Timeout: 30 * time.Minute, -+ PollInterval: 2 * time.Second, -+ Timeout: 3 * time.Minute, - Refresh: func() (result interface{}, state string, err error) { - changeRequest := &route53.GetChangeInput{ - Id: aws.String(requestId), diff --git a/etc/001-hardcode-endpoint.patch b/etc/001-hardcode-endpoint.patch new file mode 100644 index 0000000..dd3f9ed --- /dev/null +++ b/etc/001-hardcode-endpoint.patch @@ -0,0 +1,27 @@ +diff --git a/internal/conns/config.go b/internal/conns/config.go +index 12240109bb..3940e4ce73 100644 +--- a/internal/conns/config.go ++++ b/internal/conns/config.go +@@ -77,8 +77,22 @@ type Config struct { + UseFIPSEndpoint bool + } + ++func GetLocalEndpoints() map[string]string { ++ const localEndpoint = "http://localhost:4566" ++ var localEndpoints = map[string]string{} ++ for _, name := range names.Aliases() { ++ if name == "s3" { ++ localEndpoints[name] = "http://s3.localhost.localstack.cloud:4566" ++ } else { ++ localEndpoints[name] = localEndpoint ++ } ++ } ++ return localEndpoints ++} ++ + // ConfigureProvider configures the provided provider Meta (instance data). + func (c *Config) ConfigureProvider(ctx context.Context, client *AWSClient) (*AWSClient, diag.Diagnostics) { ++ c.Endpoints = GetLocalEndpoints() + awsbaseConfig := awsbase.Config{ + AccessKey: c.AccessKey, + APNInfo: StdUserAgentProducts(c.TerraformVersion), diff --git a/localstack b/localstack deleted file mode 160000 index 3a32202..0000000 --- a/localstack +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 3a3220268b962a97eeeebff44a3bb5383b1dbfc7 diff --git a/localstack-tests.excl.txt b/localstack-tests.excl.txt deleted file mode 100644 index 2ed9db4..0000000 --- a/localstack-tests.excl.txt +++ /dev/null @@ -1,6 +0,0 @@ -TestAccAWSEc2Fleet -TestAccAWSEc2TransitGateway -TestAccAWSEc2TransitGatewayVpcAttachment -TestAccAWSKinesisAnalytics -TestAccAWSSSMMaintenanceWindow -TestAccAWSSSMMaintenanceWindowTask diff --git a/localstack-tests.incl.txt b/localstack-tests.incl.txt deleted file mode 100644 index 5035061..0000000 --- a/localstack-tests.incl.txt +++ /dev/null @@ -1,32 +0,0 @@ -TestAccAWSAcm -TestAccAWSAmplify -# TestAccAWSAPIGateway -TestAccAWSAppsync -TestAccAWSCloudformation -TestAccAWSCloudTrail -TestAccAwsCloudWatch -TestAccAwsCloudWatch -TestAccAWSCloudWatch -TestAccAWSCloudwatch -TestAccAWSCognito -TestAccAWSDBCluster -TestAccAWSDynamoDb -TestAccAWSEc2 -# TestAccAWSGlue -# TestAccAWSIAM -TestAccAWSKinesis -TestAccAWSKms -TestAccAWSLambda -TestAccAWSRedshift -TestAccAWSResourceGroup -TestAccAWSRoute53 -TestAccAWSS3 -TestAccAwsSecrets -TestAccAWSSES -TestAccAWSSns -TestAccAWSSNS -TestAccAWSSQS -TestAccAWSSSM -TestAccAWSSsm -TestAccAWSStepFunctions -TestAccAWSSwfDomain diff --git a/moto b/moto deleted file mode 160000 index 2a44d6e..0000000 --- a/moto +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 2a44d6e97ce0f0d199cb54a6e967776e14e85069 diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..968281a --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,15 @@ +# project configuration + +[tool.black] +line_length = 100 +extend_exclude = '(terraform-provider-aws)' + +[tool.isort] +profile = 'black' +extend_skip = ["terraform-provider-aws"] +line_length = 100 + +[tool.pytest.ini_options] +testpaths = [ + "terraform-provider-aws/internal/service/", +] diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..5de6103 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,8 @@ +click>=8.1.3 +pytest>=7.2.0 +docker>=6.0.1 +requests>=2.28.2 +black>=22.1 +isort>=5.10 +pytest-xdist>=3.1.0 +pre-commit>=2.21.0 diff --git a/terraform_pytest/get-services.py b/terraform_pytest/get-services.py new file mode 100644 index 0000000..a702011 --- /dev/null +++ b/terraform_pytest/get-services.py @@ -0,0 +1,15 @@ +import json +import sys + +from terraform_pytest.utils import get_services + +services = [] + +if len(sys.argv) > 1: + service = sys.argv[1] + services = get_services(service) + print(json.dumps(services)) + exit(0) +else: + print("No service provided") + exit(1) diff --git a/terraform_pytest/main.py b/terraform_pytest/main.py new file mode 100644 index 0000000..f67e122 --- /dev/null +++ b/terraform_pytest/main.py @@ -0,0 +1,51 @@ +from os.path import realpath +from timeit import default_timer as timer + +import click + +from terraform_pytest.utils import TF_REPO_NAME, build_test_bin, get_services, patch_repo + + +@click.group(name="pytest-golang", help="Golang Test Runner for localstack") +def cli(): + pass + + +@click.command(name="patch", help="Patch the golang test runner") +def patch(): + patch_repo() + + +@click.command(name="build", help="Build binary for testing") +@click.option( + "--service", + "-s", + default=None, + required=True, + help="""Service to build; use "ls-all", "ls-community", "ls-pro" to build all services, example: +--service=ls-all; --service=ec2; --service=ec2,iam""", +) +@click.option("--force-build", "-f", is_flag=True, default=False, help="Force rebuilds binary") +def build(service, force_build): + services = get_services(service) + + for service in services: + print(f"Building {service}...") + try: + start = timer() + build_test_bin( + service=service, tf_root_path=realpath(TF_REPO_NAME), force_build=force_build + ) + end = timer() + print(f"Build {service} in {end - start} seconds") + except KeyboardInterrupt: + print("Interrupted") + return + except Exception as e: + print(f"Failed to build binary for {service}: {e}") + + +if __name__ == "__main__": + cli.add_command(build) + cli.add_command(patch) + cli() diff --git a/terraform_pytest/utils.py b/terraform_pytest/utils.py new file mode 100644 index 0000000..016a835 --- /dev/null +++ b/terraform_pytest/utils.py @@ -0,0 +1,253 @@ +import signal +from os import chdir, chmod, getcwd, listdir, system +from os.path import exists, realpath +from uuid import uuid4 + +TF_REPO_NAME = "terraform-provider-aws" + +# absolute path to the terraform repo +TF_REPO_PATH = f"{realpath(TF_REPO_NAME)}" + +# list of patch files to apply to the terraform repo +TF_REPO_PATCH_FILES = ["etc/001-hardcode-endpoint.patch"] + +# folder name where the testing binaries are stored +TF_TEST_BINARY_FOLDER = "test-bin" +TF_REPO_SERVICE_FOLDER = "./internal/service" + +# list of services that are supported by the localstack community edition +LS_COMMUNITY_SERVICES = [ + "acm", + "apigateway", + "lambda", + "cloudformation", + "cloudwatch", + "configservice", + "dynamodb", + "ec2", + "elasticsearch", + "events", + "firehose", + "iam", + "kinesis", + "kms", + "logs", + "opensearch", + "redshift", + "resourcegroups", + "resourcegroupstaggingapi", + "route53", + "route53resolver", + "s3", + "s3control", + "secretsmanager", + "ses", + "sns", + "sqs", + "ssm", + "sts", + "swf", + "transcribe", +] +# list of services that are supported by the localstack pro edition +LS_PRO_SERVICES = [ + "amplify", + "apigateway", + "apigatewayv2", + "appconfig", + "appautoscaling", + "appsync", + "athena", + "autoscaling", + "backup", + "batch", + "cloudformation", + "cloudfront", + "cloudtrail", + "codecommit", + "cognitoidp", + "cognitoidentity", + "docdb", + "dynamodb", + "ec2", + "ecr", + "ecs", + "efs", + "eks", + "elasticache", + "elasticbeanstalk", + "elb", + "elbv2", + "emr", + "events", + "fis", + "glacier", + "glue", + "iam", + "iot", + "iotanalytics", + "kafka", + "kinesisanalytics", + "kms", + "lakeformation", + "lambda", + "logs", + "mediastore", + "mq", + "mwaa", + "neptune", + "organizations", + "qldb", + "rds", + "redshift", + "route53", + "s3", + "sagemaker", + "secretsmanager", + "serverlessrepo", + "ses", + "sns", + "sqs", + "ssm", + "sts", +] + +# list of services that doesn't contain any tests +BLACKLISTED_SERVICES = ["controltower", "greengrass"] + + +def execute_command(cmd, env=None, cwd=None): + """Execute a command and return the return code. + + :param list(str) cmd: + command to execute + :param dict env: + environment variables + :param str cwd: + working directory + """ + _lwd = getcwd() + if isinstance(cmd, list): + cmd = " ".join(cmd) + else: + raise Exception("Please provide command as list(str)") + if cwd: + chdir(cwd) + if env: + _env = " ".join([f'{k}="{str(v)}"' for k, v in env.items()]) + cmd = f"{_env} {cmd}" + log_file: str = "/tmp/%s" % uuid4().hex + _err = system(f"{cmd} > {log_file} 2>&1") + if _err == signal.SIGINT: + print("SIGNINT is caught") + raise KeyboardInterrupt + _out = open(log_file, "r").read() + chdir(_lwd) + return _err, _out + + +def build_test_bin(service, tf_root_path, force_build=False): + """Build the test binary for a given service. + + :param str service: + service name + :param str tf_root_path: + path to the terraform repo + :param bool force_build: + force build the binary + + :return: int, str or None + return code and stdout + """ + _test_bin_abs_path = f"{TF_REPO_PATH}/{TF_TEST_BINARY_FOLDER}/{service}.test" + _tf_repo_service_folder = f"{TF_REPO_SERVICE_FOLDER}/{service}" + + if exists(_test_bin_abs_path) and not force_build: + return None + + cmd = ["go", "mod", "tidy"] + return_code, stdout = execute_command(cmd, cwd=tf_root_path) + if return_code != 0: + raise Exception(f"Error while building test binary for {service}\ntraceback: {stdout}") + + cmd = ["go", "mod", "vendor"] + return_code, stdout = execute_command(cmd, cwd=tf_root_path) + if return_code != 0: + raise Exception(f"Error while building test binary for {service}\ntraceback: {stdout}") + + cmd = ["go", "mod", "tidy"] + return_code, stdout = execute_command(cmd, cwd=tf_root_path) + if return_code != 0: + raise Exception(f"Error while building test binary for {service}\ntraceback: {stdout}") + + cmd = ["go", "mod", "vendor"] + return_code, stdout = execute_command(cmd, cwd=tf_root_path) + if return_code != 0: + raise Exception(f"Error while building test binary for {service}\ntraceback: {stdout}") + + cmd = [ + "go", + "test", + "-c", + _tf_repo_service_folder, + "-o", + _test_bin_abs_path, + ] + return_code, stdout = execute_command(cmd, cwd=tf_root_path) + if return_code != 0: + raise Exception(f"Error while building test binary for {service}\ntraceback: {stdout}") + + if exists(_test_bin_abs_path): + chmod(_test_bin_abs_path, 0o755) + + return return_code, stdout + + +def get_services(service): + """Get the list of services to test. + + :param: str service: + service names in comma separated format + example: ec2,lambda,iam or ls-community or ls-pro or ls-all + + :return: list: + list of services + """ + result = [] + if service == "ls-community": + services = LS_COMMUNITY_SERVICES + elif service == "ls-pro": + services = LS_PRO_SERVICES + elif service == "ls-all": + services = LS_COMMUNITY_SERVICES + LS_PRO_SERVICES + else: + if "," in service: + services = service.split(",") + services = [s for s in services if s] + else: + services = [service] + for s in services: + if s in LS_COMMUNITY_SERVICES + LS_PRO_SERVICES and s not in BLACKLISTED_SERVICES: + result.append(s) + else: + print(f"Service {s} is not supported...\nPlease check the service name") + return list(set(result)) + + +def patch_repo(): + """Patches terraform repo. + + return: None + """ + print(f"Patching {TF_REPO_NAME}...") + for patch_file in TF_REPO_PATCH_FILES: + cmd = [ + "git", + "apply", + f"{realpath(patch_file)}", + ] + return_code, stdout = execute_command(cmd, cwd=realpath(TF_REPO_NAME)) + if return_code != 0: + print("----- error while patching repo -----") + if stdout: + print(f"stdout: {stdout}")