-
Notifications
You must be signed in to change notification settings - Fork 58
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
2 changed files
with
351 additions
and
2 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -40,8 +40,6 @@ build | |
eggs | ||
.eggs | ||
parts | ||
bin | ||
!asdf/json/bin | ||
var | ||
sdist | ||
develop-eggs | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,351 @@ | ||
#! /usr/bin/env python3 | ||
from pathlib import Path | ||
from urllib.parse import urljoin | ||
import argparse | ||
import json | ||
import os | ||
import random | ||
import shutil | ||
import sys | ||
import textwrap | ||
import unittest | ||
import warnings | ||
|
||
try: | ||
import asdf._jsonschema.validators | ||
jsonschema = asdf._jsonschema | ||
except ImportError: | ||
jsonschema = None | ||
VALIDATORS = {} | ||
else: | ||
VALIDATORS = { | ||
"draft3": asdf._jsonschema.validators.Draft3Validator, | ||
"draft4": asdf._jsonschema.validators.Draft4Validator, | ||
"draft6": asdf._jsonschema.validators.Draft6Validator, | ||
"draft7": asdf._jsonschema.validators.Draft7Validator, | ||
"draft2019-09": asdf._jsonschema.validators.Draft201909Validator, | ||
"draft2020-12": asdf._jsonschema.validators.Draft202012Validator, | ||
"latest": asdf._jsonschema.validators.Draft202012Validator, | ||
} | ||
|
||
|
||
ROOT_DIR = Path(__file__).parent.parent | ||
SUITE_ROOT_DIR = ROOT_DIR / "tests" | ||
|
||
REMOTES_DIR = ROOT_DIR / "remotes" | ||
REMOTES_BASE_URL = "http://localhost:1234/" | ||
|
||
TESTSUITE_SCHEMA = json.loads((ROOT_DIR / "test-schema.json").read_text()) | ||
|
||
|
||
def files(paths): | ||
""" | ||
Each test file in the provided paths, as an array of test cases. | ||
""" | ||
for path in paths: | ||
yield path, json.loads(path.read_text()) | ||
|
||
|
||
def cases(paths): | ||
""" | ||
Each test case within each file in the provided paths. | ||
""" | ||
for _, test_file in files(paths): | ||
yield from test_file | ||
|
||
|
||
def tests(paths): | ||
""" | ||
Each individual test within all cases within the provided paths. | ||
""" | ||
for case in cases(paths): | ||
for test in case["tests"]: | ||
test["schema"] = case["schema"] | ||
yield test | ||
|
||
|
||
def collect(root_dir): | ||
""" | ||
All of the test file paths within the given root directory, recursively. | ||
""" | ||
return root_dir.glob("**/*.json") | ||
|
||
|
||
def url_for_path(path): | ||
""" | ||
Return the assumed remote URL for a file in the remotes/ directory. | ||
Tests in the refRemote.json file reference this URL, and assume the | ||
corresponding contents are available at the URL. | ||
""" | ||
|
||
return urljoin( | ||
REMOTES_BASE_URL, | ||
str(path.relative_to(REMOTES_DIR)).replace("\\", "/") # Windows... | ||
) | ||
|
||
|
||
class SanityTests(unittest.TestCase): | ||
@classmethod | ||
def setUpClass(cls): | ||
print(f"Looking for tests in {SUITE_ROOT_DIR}") | ||
print(f"Looking for remotes in {REMOTES_DIR}") | ||
|
||
cls.test_files = list(collect(SUITE_ROOT_DIR)) | ||
assert cls.test_files, "Didn't find the test files!" | ||
print(f"Found {len(cls.test_files)} test files") | ||
|
||
cls.remote_files = list(collect(REMOTES_DIR)) | ||
assert cls.remote_files, "Didn't find the remote files!" | ||
print(f"Found {len(cls.remote_files)} remote files") | ||
|
||
def assertUnique(self, iterable): | ||
""" | ||
Assert that the elements of an iterable are unique. | ||
""" | ||
|
||
seen, duplicated = set(), set() | ||
for each in iterable: | ||
if each in seen: | ||
duplicated.add(each) | ||
seen.add(each) | ||
self.assertFalse(duplicated, "Elements are not unique.") | ||
|
||
def assertFollowsDescriptionStyle(self, description): | ||
""" | ||
Instead of saying "test that X frobs" or "X should frob" use "X frobs". | ||
See e.g. https://jml.io/pages/test-docstrings.html | ||
This test isn't comprehensive (it doesn't catch all the extra | ||
verbiage there), but it's just to catch whatever it manages to | ||
cover. | ||
""" | ||
|
||
message = ( | ||
"In descriptions, don't say 'Test that X frobs' or 'X should " | ||
"frob' or 'X should be valid'. Just say 'X frobs' or 'X is " | ||
"valid'. It's shorter, and the test suite is entirely about " | ||
"what *should* be already. " | ||
"See https://jml.io/pages/test-docstrings.html for help." | ||
) | ||
self.assertNotRegex(description, r"\bshould\b", message) | ||
self.assertNotRegex(description, r"(?i)\btest(s)? that\b", message) | ||
|
||
def test_all_test_files_are_valid_json(self): | ||
""" | ||
All test files contain valid JSON. | ||
""" | ||
for path in self.test_files: | ||
with self.subTest(path=path): | ||
try: | ||
json.loads(path.read_text()) | ||
except ValueError as error: | ||
self.fail(f"{path} contains invalid JSON ({error})") | ||
|
||
def test_all_remote_files_are_valid_json(self): | ||
""" | ||
All remote files contain valid JSON. | ||
""" | ||
for path in self.remote_files: | ||
with self.subTest(path=path): | ||
try: | ||
json.loads(path.read_text()) | ||
except ValueError as error: | ||
self.fail(f"{path} contains invalid JSON ({error})") | ||
|
||
def test_all_case_descriptions_have_reasonable_length(self): | ||
""" | ||
All cases have reasonably long descriptions. | ||
""" | ||
for case in cases(self.test_files): | ||
with self.subTest(description=case["description"]): | ||
self.assertLess( | ||
len(case["description"]), | ||
150, | ||
"Description is too long (keep it to less than 150 chars)." | ||
) | ||
|
||
def test_all_test_descriptions_have_reasonable_length(self): | ||
""" | ||
All tests have reasonably long descriptions. | ||
""" | ||
for count, test in enumerate(tests(self.test_files)): | ||
with self.subTest(description=test["description"]): | ||
self.assertLess( | ||
len(test["description"]), | ||
70, | ||
"Description is too long (keep it to less than 70 chars)." | ||
) | ||
print(f"Found {count} tests.") | ||
|
||
def test_all_case_descriptions_are_unique(self): | ||
""" | ||
All cases have unique descriptions in their files. | ||
""" | ||
for path, cases in files(self.test_files): | ||
with self.subTest(path=path): | ||
self.assertUnique(case["description"] for case in cases) | ||
|
||
def test_all_test_descriptions_are_unique(self): | ||
""" | ||
All test cases have unique test descriptions in their tests. | ||
""" | ||
for count, case in enumerate(cases(self.test_files)): | ||
with self.subTest(description=case["description"]): | ||
self.assertUnique( | ||
test["description"] for test in case["tests"] | ||
) | ||
print(f"Found {count} test cases.") | ||
|
||
def test_case_descriptions_do_not_use_modal_verbs(self): | ||
for case in cases(self.test_files): | ||
with self.subTest(description=case["description"]): | ||
self.assertFollowsDescriptionStyle(case["description"]) | ||
|
||
def test_test_descriptions_do_not_use_modal_verbs(self): | ||
for test in tests(self.test_files): | ||
with self.subTest(description=test["description"]): | ||
self.assertFollowsDescriptionStyle(test["description"]) | ||
|
||
@unittest.skipIf(jsonschema is None, "Validation library not present!") | ||
def test_all_schemas_are_valid(self): | ||
""" | ||
All schemas are valid under their metaschemas. | ||
""" | ||
for version in SUITE_ROOT_DIR.iterdir(): | ||
if not version.is_dir(): | ||
continue | ||
|
||
Validator = VALIDATORS.get(version.name) | ||
if Validator is not None: | ||
test_files = collect(version) | ||
for case in cases(test_files): | ||
with self.subTest(case=case): | ||
try: | ||
Validator.check_schema(case["schema"]) | ||
except asdf._jsonschema.SchemaError: | ||
self.fail( | ||
"Found an invalid schema." | ||
"See the traceback for details on why." | ||
) | ||
else: | ||
warnings.warn(f"No schema validator for {version.name}") | ||
|
||
@unittest.skipIf(jsonschema is None, "Validation library not present!") | ||
def test_suites_are_valid(self): | ||
""" | ||
All test files are valid under test-schema.json. | ||
""" | ||
Validator = asdf._jsonschema.validators.validator_for(TESTSUITE_SCHEMA) | ||
validator = Validator(TESTSUITE_SCHEMA) | ||
for path, cases in files(self.test_files): | ||
with self.subTest(path=path): | ||
try: | ||
validator.validate(cases) | ||
except asdf._jsonschema.ValidationError as error: | ||
self.fail(str(error)) | ||
|
||
|
||
def main(arguments): | ||
if arguments.command == "check": | ||
suite = unittest.TestLoader().loadTestsFromTestCase(SanityTests) | ||
result = unittest.TextTestRunner().run(suite) | ||
sys.exit(not result.wasSuccessful()) | ||
elif arguments.command == "flatten": | ||
selected_cases = [case for case in cases(collect(arguments.version))] | ||
|
||
if arguments.randomize: | ||
random.shuffle(selected_cases) | ||
|
||
json.dump(selected_cases, sys.stdout, indent=4, sort_keys=True) | ||
elif arguments.command == "remotes": | ||
remotes = { | ||
url_for_path(path): json.loads(path.read_text()) | ||
for path in collect(REMOTES_DIR) | ||
} | ||
json.dump(remotes, sys.stdout, indent=4, sort_keys=True) | ||
elif arguments.command == "dump_remotes": | ||
if arguments.update: | ||
shutil.rmtree(arguments.out_dir, ignore_errors=True) | ||
|
||
try: | ||
shutil.copytree(REMOTES_DIR, arguments.out_dir) | ||
except FileExistsError: | ||
print(f"{arguments.out_dir} already exists. Aborting.") | ||
sys.exit(1) | ||
elif arguments.command == "serve": | ||
try: | ||
import flask | ||
except ImportError: | ||
print(textwrap.dedent(""" | ||
The Flask library is required to serve the remote schemas. | ||
You can install it by running `pip install Flask`. | ||
Alternatively, see the `jsonschema_suite remotes` or | ||
`jsonschema_suite dump_remotes` commands to create static files | ||
that can be served with your own web server. | ||
""".strip("\n"))) | ||
sys.exit(1) | ||
|
||
app = flask.Flask(__name__) | ||
|
||
@app.route("/<path:path>") | ||
def serve_path(path): | ||
return flask.send_from_directory(REMOTES_DIR, path) | ||
|
||
app.run(port=1234) | ||
|
||
|
||
parser = argparse.ArgumentParser( | ||
description="JSON Schema Test Suite utilities", | ||
) | ||
subparsers = parser.add_subparsers( | ||
help="utility commands", dest="command", metavar="COMMAND" | ||
) | ||
subparsers.required = True | ||
|
||
check = subparsers.add_parser("check", help="Sanity check the test suite.") | ||
|
||
flatten = subparsers.add_parser( | ||
"flatten", | ||
help="Output a flattened file containing a selected version's test cases." | ||
) | ||
flatten.add_argument( | ||
"--randomize", | ||
action="store_true", | ||
help="Randomize the order of the outputted cases.", | ||
) | ||
flatten.add_argument( | ||
"version", help="The directory containing the version to output", | ||
) | ||
|
||
remotes = subparsers.add_parser( | ||
"remotes", | ||
help="Output the expected URLs and their associated schemas for remote " | ||
"ref tests as a JSON object." | ||
) | ||
|
||
dump_remotes = subparsers.add_parser( | ||
"dump_remotes", help="Dump the remote ref schemas into a file tree", | ||
) | ||
dump_remotes.add_argument( | ||
"--update", | ||
action="store_true", | ||
help="Update the remotes in an existing directory.", | ||
) | ||
dump_remotes.add_argument( | ||
"--out-dir", | ||
default=REMOTES_DIR, | ||
type=os.path.abspath, | ||
help="The output directory to create as the root of the file tree", | ||
) | ||
|
||
serve = subparsers.add_parser( | ||
"serve", | ||
help="Start a webserver to serve schemas used by remote ref tests." | ||
) | ||
|
||
if __name__ == "__main__": | ||
main(parser.parse_args()) |