Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Enable JSON response from the validator #502

Merged
merged 3 commits into from
Sep 17, 2020
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions optimade/validator/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,12 @@ def validate():
default=0,
help="""Increase the verbosity of the output.""",
)
parser.add_argument(
"-j",
"--json",
action="store_true",
help="""Only a JSON summary of the validator results will be printed to stdout.""",
)
parser.add_argument(
"-t",
"--as-type",
Expand Down Expand Up @@ -97,6 +103,7 @@ def validate():
validator = ImplementationValidator(
base_url=args["base_url"],
verbosity=args["verbosity"],
respond_json=args["json"],
as_type=args["as_type"],
index=args["index"],
run_optional_tests=not args["skip_optional"],
Expand Down
32 changes: 18 additions & 14 deletions optimade/validator/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,6 @@ def get(self, request: str):
if status_code != 429:
break

print("Hit rate limit, sleeping for 1 s...")
time.sleep(1)

else:
Expand Down Expand Up @@ -242,19 +241,20 @@ def wrapper(
if not isinstance(result, Exception):
if not multistage:
if not optional:
validator.success_count += 1
validator.results["success_count"] += 1
else:
validator.optional_success_count += 1
validator.results["optional_success_count"] += 1
message = f"✔: {request} - {msg}"
if validator.verbosity > 0:
if optional:
print(message)
else:
print_success(message)
elif optional:
print(".", end="", flush=True)
else:
print_success(".", end="", flush=True)
elif validator.verbosity == 0:
if optional:
print(".", end="", flush=True)
else:
print_success(".", end="", flush=True)
else:
internal_error = False
request = request.replace("\n", "")
Expand All @@ -267,17 +267,21 @@ def wrapper(

if isinstance(result, InternalError):
internal_error = True
validator.internal_failure_count += 1
validator.results["internal_failure_count"] += 1
summary = f"!: {request} - {test_fn.__name__} - failed with internal error"
validator.internal_failure_messages.append((summary, message))
validator.results["internal_failure_messages"].append(
(summary, message)
)
else:
summary = f"✖: {request} - {test_fn.__name__} - failed with error"
if not optional:
validator.failure_count += 1
validator.failure_messages.append((summary, message))
validator.results["failure_count"] += 1
validator.results["failure_messages"].append((summary, message))
else:
validator.optional_failure_count += 1
validator.optional_failure_messages.append((summary, message))
validator.results["optional_failure_count"] += 1
validator.results["optional_failure_messages"].append(
(summary, message)
)

if validator.verbosity > 0:
if internal_error:
Expand All @@ -292,7 +296,7 @@ def wrapper(
print_failure(summary)
for line in message:
print_warning(f"\t{line}")
else:
elif validator.verbosity == 0:
if internal_error:
print_notify("!", end="", flush=True)
elif optional:
Expand Down
62 changes: 40 additions & 22 deletions optimade/validator/validator.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@ def __init__( # pylint: disable=too-many-arguments
client: Any = None,
base_url: str = None,
verbosity: int = 0,
respond_json: bool = False,
page_limit: int = 5,
max_retries: int = 5,
run_optional_tests: bool = True,
Expand All @@ -82,6 +83,8 @@ def __init__( # pylint: disable=too-many-arguments
base of the OPTIMADE implementation.
verbosity: The verbosity of the output and logging as an integer
(`0`: critical, `1`: warning, `2`: info, `3`: debug).
respond_json: If `True`, print only a JSON representation of the
results of validation to stdout.
page_limit: The default page limit to apply to filters.
max_retries: Argument is passed to the client for how many
attempts to make for a request before failing.
Expand All @@ -101,6 +104,7 @@ def __init__( # pylint: disable=too-many-arguments
self.index = index
self.run_optional_tests = run_optional_tests
self.fail_fast = fail_fast
self.respond_json = respond_json

if as_type is None:
self.as_type_cls = None
Expand Down Expand Up @@ -147,14 +151,16 @@ def __init__( # pylint: disable=too-many-arguments
self._test_id_by_type = {}
self._entry_info_by_type = {}

self.success_count = 0
self.failure_count = 0
self.internal_failure_count = 0
self.optional_success_count = 0
self.optional_failure_count = 0
self.failure_messages = []
self.internal_failure_messages = []
self.optional_failure_messages = []
self.results = {
ml-evs marked this conversation as resolved.
Show resolved Hide resolved
"success_count": 0,
"failure_count": 0,
"internal_failure_count": 0,
"optional_success_count": 0,
"optional_failure_count": 0,
"failure_messages": [],
"internal_failure_messages": [],
"optional_failure_messages": [],
}

def _setup_log(self):
""" Define stdout log based on given verbosity. """
Expand All @@ -164,7 +170,12 @@ def _setup_log(self):
stdout_handler.setFormatter(
logging.Formatter("%(asctime)s - %(name)s | %(levelname)8s: %(message)s")
)
self._log.addHandler(stdout_handler)

if not self.respond_json:
self._log.addHandler(stdout_handler)
else:
self.verbosity = -1

if self.verbosity == 0:
self._log.setLevel(logging.CRITICAL)
elif self.verbosity == 1:
Expand All @@ -176,23 +187,27 @@ def _setup_log(self):

def print_summary(self):
""" Print a summary of the results of validation. """
if self.failure_messages:
if self.respond_json:
print(json.dumps(self.results, indent=2))
return

if self.results["failure_messages"]:
print("\n\nFAILURES")
print("========\n")
for message in self.failure_messages:
for message in self.results["failure_messages"]:
print_failure(message[0])
for line in message[1]:
print_warning("\t" + line)

if self.optional_failure_messages:
if self.results["optional_failure_messages"]:
print("\n\nOPTIONAL TEST FAILURES")
print("======================\n")
for message in self.optional_failure_messages:
for message in self.results["optional_failure_messages"]:
print_notify(message[0])
for line in message[1]:
print_warning("\t" + line)

if self.internal_failure_messages:
if self.results["internal_failure_messages"]:
print("\n\nINTERNAL FAILURES")
print("=================\n")
print(
Expand All @@ -201,22 +216,22 @@ def print_summary(self):
"https://github.com/Materials-Consortia/optimade-python-tools/issues/new.\n"
)

for message in self.internal_failure_messages:
for message in self.results["internal_failure_messages"]:
print_warning(message[0])
for line in message[1]:
print_warning("\t" + line)

if self.valid or (not self.valid and not self.fail_fast):
final_message = f"\n\nPassed {self.success_count} out of {self.success_count + self.failure_count + self.internal_failure_count} tests."
final_message = f"\n\nPassed {self.results['success_count']} out of {self.results['success_count'] + self.results['failure_count'] + self.results['internal_failure_count']} tests."
if not self.valid:
print_failure(final_message)
else:
print_success(final_message)

if self.run_optional_tests and not self.fail_fast:
print(
f"Additionally passed {self.optional_success_count} out of "
f"{self.optional_success_count + self.optional_failure_count} optional tests."
f"Additionally passed {self.results['optional_success_count']} out of "
f"{self.results['optional_success_count'] + self.results['optional_failure_count']} optional tests."
)

def validate_implementation(self):
Expand All @@ -238,11 +253,12 @@ def validate_implementation(self):
self.as_type_cls,
)
self._test_as_type()
self.valid = not bool(self.failure_count)
self.valid = not bool(self.results["failure_count"])
return

# Test entire implementation
print(f"Testing entire implementation at {self.base_url}...")
if self.verbosity >= 0:
print(f"Testing entire implementation at {self.base_url}")
info_endp = CONF.info_endpoint
self._log.debug("Testing base info endpoint of %s", info_endp)

Expand Down Expand Up @@ -297,7 +313,9 @@ def validate_implementation(self):
self._log.debug("Testing %s endpoint", CONF.links_endpoint)
self._test_info_or_links_endpoint(CONF.links_endpoint)

self.valid = not (self.failure_count or self.internal_failure_count)
self.valid = not (
self.results["failure_count"] or self.results["internal_failure_count"]
)

self.print_summary()

Expand Down Expand Up @@ -390,7 +408,7 @@ def _test_must_properties(
f"Some 'MUST' properties were missing from info/{endp}: {missing}"
)

return True, "Found all required properties in entry info for endpoint {endp}"
return True, f"Found all required properties in entry info for endpoint {endp}"

@test_case
def _get_archetypal_entry(self, endp: str) -> Tuple[Dict[str, Any], str]:
Expand Down
24 changes: 24 additions & 0 deletions tests/server/test_server_validation.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import os
import json
from traceback import print_exc

import pytest
Expand All @@ -21,6 +22,29 @@ def test_with_validator(both_fake_remote_clients):
assert validator.valid


def test_with_validator_json_response(both_fake_remote_clients, capsys):
""" Test that the validator writes compliant JSON when requested. """
from optimade.server.main_index import app

validator = ImplementationValidator(
client=both_fake_remote_clients,
index=both_fake_remote_clients.app == app,
respond_json=True,
)
try:
validator.validate_implementation()
except Exception:
print_exc()
ml-evs marked this conversation as resolved.
Show resolved Hide resolved

captured = capsys.readouterr()
json_response = json.loads(captured.out)
assert json_response["failure_count"] == 0
assert json_response["internal_failure_count"] == 0
assert json_response["optional_failure_count"] == 0

ml-evs marked this conversation as resolved.
Show resolved Hide resolved
assert validator.valid


def test_mongo_backend_package_used():
import pymongo
import mongomock
Expand Down
Loading