Skip to content

initial run at creating output tests #619

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 21 commits into from
Dec 13, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
21 commits
Select commit Hold shift + click to select a range
e9f5400
initial run at creating output tests
gregsdennis Nov 23, 2022
87c184d
add .editorconfig; rearrange folders
gregsdennis Nov 23, 2022
fe25ba9
updated readme; fixed $schema typo; tests for 2020-12
gregsdennis Nov 23, 2022
147df48
add output schemas
gregsdennis Nov 23, 2022
efeee1d
update tests to reference output schemas
gregsdennis Nov 23, 2022
46bc7ac
add readme note about output-schema.json files
gregsdennis Nov 23, 2022
daf42a7
attempt to update ci for output tests
gregsdennis Nov 24, 2022
d263bf0
add blank lines at the end of all the files
gregsdennis Nov 28, 2022
691d039
update $dynamic* value in schemas
gregsdennis Nov 28, 2022
b40a6ca
Update bin/jsonschema_suite
gregsdennis Nov 28, 2022
b126759
Update bin/jsonschema_suite
gregsdennis Nov 28, 2022
dc6e820
Merge remote-tracking branch 'origin/main' into gregsdennis/output-tests
Julian Nov 29, 2022
3413863
Inline the relevant parts of the test schema to output tests.
Julian Nov 29, 2022
a8b2805
Minor style tweaks.
Julian Nov 29, 2022
f5197e0
Fix the output check to ignore output-schema.json.
Julian Nov 29, 2022
930e87e
add clarification on no changes between 2019 and 2020
gregsdennis Nov 30, 2022
1a860cf
added $id to all output schemas; reindexed test cases to 0 (instead o…
gregsdennis Dec 2, 2022
c883552
absoluteKeywordLocation is not required when there is no `$ref`
gregsdennis Dec 2, 2022
8ee4323
add some more detail in readme; add redundant keyword to some tests
gregsdennis Dec 4, 2022
c264401
Blacked.
Julian Nov 29, 2022
b538fe7
Bump the validator version used for suite sanity checks.
Julian Dec 6, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .editorconfig
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
insert_final_newline = true
110 changes: 74 additions & 36 deletions bin/jsonschema_suite
Original file line number Diff line number Diff line change
Expand Up @@ -30,11 +30,15 @@ else:

ROOT_DIR = Path(__file__).parent.parent
SUITE_ROOT_DIR = ROOT_DIR / "tests"
OUTPUT_ROOT_DIR = ROOT_DIR / "output-tests"

REMOTES_DIR = ROOT_DIR / "remotes"
REMOTES_BASE_URL = "http://localhost:1234/"

TESTSUITE_SCHEMA = json.loads((ROOT_DIR / "test-schema.json").read_text())
TEST_SCHEMA = json.loads(ROOT_DIR.joinpath("test-schema.json").read_text())
OUTPUT_TEST_SCHEMA = json.loads(
ROOT_DIR.joinpath("output-test-schema.json").read_text(),
)


def files(paths):
Expand Down Expand Up @@ -67,7 +71,7 @@ def collect(root_dir):
"""
All of the test file paths within the given root directory, recursively.
"""
return root_dir.glob("**/*.json")
return root_dir.rglob("*.json")


def url_for_path(path):
Expand All @@ -80,20 +84,29 @@ def url_for_path(path):

return urljoin(
REMOTES_BASE_URL,
str(path.relative_to(REMOTES_DIR)).replace("\\", "/") # Windows...
str(path.relative_to(REMOTES_DIR)).replace("\\", "/"), # Windows...
)


class SanityTests(unittest.TestCase):
@classmethod
def setUpClass(cls):
print(f"Looking for tests in {SUITE_ROOT_DIR}")
print(f"Looking for output tests in {OUTPUT_ROOT_DIR}")
print(f"Looking for remotes in {REMOTES_DIR}")

cls.test_files = list(collect(SUITE_ROOT_DIR))
assert cls.test_files, "Didn't find the test files!"
print(f"Found {len(cls.test_files)} test files")

cls.output_test_files = [
each
for each in collect(OUTPUT_ROOT_DIR)
if each.name != "output-schema.json"
]
assert cls.output_test_files, "Didn't find the output test files!"
print(f"Found {len(cls.output_test_files)} output test files")

cls.remote_files = list(collect(REMOTES_DIR))
assert cls.remote_files, "Didn't find the remote files!"
print(f"Found {len(cls.remote_files)} remote files")
Expand Down Expand Up @@ -131,22 +144,11 @@ class SanityTests(unittest.TestCase):
self.assertNotRegex(description, r"\bshould\b", message)
self.assertNotRegex(description, r"(?i)\btest(s)? that\b", message)

def test_all_test_files_are_valid_json(self):
"""
All test files contain valid JSON.
"""
for path in self.test_files:
with self.subTest(path=path):
try:
json.loads(path.read_text())
except ValueError as error:
self.fail(f"{path} contains invalid JSON ({error})")

def test_all_remote_files_are_valid_json(self):
def test_all_json_files_are_valid(self):
"""
All remote files contain valid JSON.
All files (tests, output tests, remotes, etc.) contain valid JSON.
"""
for path in self.remote_files:
for path in collect(ROOT_DIR):
with self.subTest(path=path):
try:
json.loads(path.read_text())
Expand All @@ -157,53 +159,57 @@ class SanityTests(unittest.TestCase):
"""
All cases have reasonably long descriptions.
"""
for case in cases(self.test_files):
for case in cases(self.test_files + self.output_test_files):
with self.subTest(description=case["description"]):
self.assertLess(
len(case["description"]),
150,
"Description is too long (keep it to less than 150 chars)."
"Description is too long (keep it to less than 150 chars).",
)

def test_all_test_descriptions_have_reasonable_length(self):
"""
All tests have reasonably long descriptions.
"""
for count, test in enumerate(tests(self.test_files)):
for count, test in enumerate(
tests(self.test_files + self.output_test_files)
):
with self.subTest(description=test["description"]):
self.assertLess(
len(test["description"]),
70,
"Description is too long (keep it to less than 70 chars)."
"Description is too long (keep it to less than 70 chars).",
)
print(f"Found {count} tests.")

def test_all_case_descriptions_are_unique(self):
"""
All cases have unique descriptions in their files.
"""
for path, cases in files(self.test_files):
for path, cases in files(self.test_files + self.output_test_files):
with self.subTest(path=path):
self.assertUnique(case["description"] for case in cases)

def test_all_test_descriptions_are_unique(self):
"""
All test cases have unique test descriptions in their tests.
"""
for count, case in enumerate(cases(self.test_files)):
for count, case in enumerate(
cases(self.test_files + self.output_test_files)
):
with self.subTest(description=case["description"]):
self.assertUnique(
test["description"] for test in case["tests"]
)
print(f"Found {count} test cases.")

def test_case_descriptions_do_not_use_modal_verbs(self):
for case in cases(self.test_files):
for case in cases(self.test_files + self.output_test_files):
with self.subTest(description=case["description"]):
self.assertFollowsDescriptionStyle(case["description"])

def test_test_descriptions_do_not_use_modal_verbs(self):
for test in tests(self.test_files):
for test in tests(self.test_files + self.output_test_files):
with self.subTest(description=test["description"]):
self.assertFollowsDescriptionStyle(test["description"])

Expand All @@ -218,14 +224,21 @@ class SanityTests(unittest.TestCase):

Validator = VALIDATORS.get(version.name)
if Validator is not None:
# Valid (optional test) schemas contain regexes which
# aren't valid Python regexes, so skip checking it
Validator.FORMAT_CHECKER.checkers.pop("regex", None)

test_files = collect(version)
for case in cases(test_files):
with self.subTest(case=case):
try:
Validator.check_schema(case["schema"])
Validator.check_schema(
case["schema"],
format_checker=Validator.FORMAT_CHECKER,
)
except jsonschema.SchemaError:
self.fail(
"Found an invalid schema."
"Found an invalid schema. "
"See the traceback for details on why."
)
else:
Expand All @@ -236,15 +249,32 @@ class SanityTests(unittest.TestCase):
"""
All test files are valid under test-schema.json.
"""
Validator = jsonschema.validators.validator_for(TESTSUITE_SCHEMA)
validator = Validator(TESTSUITE_SCHEMA)
Validator = jsonschema.validators.validator_for(TEST_SCHEMA)
validator = Validator(TEST_SCHEMA)
for path, cases in files(self.test_files):
with self.subTest(path=path):
try:
validator.validate(cases)
except jsonschema.ValidationError as error:
self.fail(str(error))

@unittest.skipIf(jsonschema is None, "Validation library not present!")
def test_output_suites_are_valid(self):
"""
All output test files are valid under output-test-schema.json.
"""
Validator = jsonschema.validators.validator_for(OUTPUT_TEST_SCHEMA)
validator = Validator(OUTPUT_TEST_SCHEMA)
for path, cases in files(self.output_test_files):
with self.subTest(path=path):
try:
validator.validate(cases)
except jsonschema.exceptions.RefResolutionError as error:
# python-jsonschema/jsonschema#884
pass
except jsonschema.ValidationError as error:
self.fail(str(error))


def main(arguments):
if arguments.command == "check":
Expand Down Expand Up @@ -277,15 +307,21 @@ def main(arguments):
try:
import flask
except ImportError:
print(textwrap.dedent("""
print(
textwrap.dedent(
"""
The Flask library is required to serve the remote schemas.

You can install it by running `pip install Flask`.

Alternatively, see the `jsonschema_suite remotes` or
`jsonschema_suite dump_remotes` commands to create static files
that can be served with your own web server.
""".strip("\n")))
""".strip(
"\n"
)
)
)
sys.exit(1)

app = flask.Flask(__name__)
Expand All @@ -309,25 +345,27 @@ check = subparsers.add_parser("check", help="Sanity check the test suite.")

flatten = subparsers.add_parser(
"flatten",
help="Output a flattened file containing a selected version's test cases."
help="Output a flattened file containing a selected version's test cases.",
)
flatten.add_argument(
"--randomize",
action="store_true",
help="Randomize the order of the outputted cases.",
)
flatten.add_argument(
"version", help="The directory containing the version to output",
"version",
help="The directory containing the version to output",
)

remotes = subparsers.add_parser(
"remotes",
help="Output the expected URLs and their associated schemas for remote "
"ref tests as a JSON object."
"ref tests as a JSON object.",
)

dump_remotes = subparsers.add_parser(
"dump_remotes", help="Dump the remote ref schemas into a file tree",
"dump_remotes",
help="Dump the remote ref schemas into a file tree",
)
dump_remotes.add_argument(
"--update",
Expand All @@ -343,7 +381,7 @@ dump_remotes.add_argument(

serve = subparsers.add_parser(
"serve",
help="Start a webserver to serve schemas used by remote ref tests."
help="Start a webserver to serve schemas used by remote ref tests.",
)

if __name__ == "__main__":
Expand Down
70 changes: 70 additions & 0 deletions output-test-schema.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$id": "https://json-schema.org/tests/output-test-schema",
"description": "A schema for files contained within this suite",

"type": "array",
"minItems": 1,
"items": {
"description": "An individual test case, containing multiple tests of a single schema's behavior",

"type": "object",
"required": [ "description", "schema", "tests" ],
"properties": {
"description": {
"description": "The test case description",
"type": "string"
},
"comment": {
"description": "Any additional comments about the test case",
"type": "string"
},
"schema": {
"description": "A valid JSON Schema (one written for the corresponding version directory that the file sits within)."
},
"tests": {
"description": "A set of related tests all using the same schema",
"type": "array",
"items": { "$ref": "#/$defs/test" },
"minItems": 1
}
},
"additionalProperties": false
},

"$defs": {
"test": {
"description": "A single output test",

"type": "object",
"required": [ "description", "data", "output" ],
"properties": {
"description": {
"description": "The test description, briefly explaining which behavior it exercises",
"type": "string"
},
"comment": {
"description": "Any additional comments about the test",
"type": "string"
},
"data": {
"description": "The instance which should be validated against the schema in \"schema\"."
},
"output": {
"description": "schemas that are used to verify output",
"type": "object",
"properties": {
"flag": { "$ref": "https://json-schema.org/draft/2020-12/schema" },
"basic": { "$ref": "https://json-schema.org/draft/2020-12/schema" },
"detailed": { "$ref": "https://json-schema.org/draft/2020-12/schema" },
"verbose": { "$ref": "https://json-schema.org/draft/2020-12/schema" },
"list": { "$ref": "https://json-schema.org/draft/2020-12/schema" },
"hierarchy": { "$ref": "https://json-schema.org/draft/2020-12/schema" }
},
"minProperties": 1,
"additionalProperties": false
}
}
}
}
}
Loading