From 03bde0592d50ab7f99e5c93693841ed1f789c4c0 Mon Sep 17 00:00:00 2001 From: pederhan Date: Wed, 9 Oct 2024 11:36:24 +0200 Subject: [PATCH] Add fault tolerance for invalid token file --- mreg_cli/tokenfile.py | 21 +++++++++++++++++---- tests/test_tokenfile.py | 40 +++++++++++++++++++++++++++++++++++----- 2 files changed, 52 insertions(+), 9 deletions(-) diff --git a/mreg_cli/tokenfile.py b/mreg_cli/tokenfile.py index 79e2738..3cdbddd 100644 --- a/mreg_cli/tokenfile.py +++ b/mreg_cli/tokenfile.py @@ -5,9 +5,9 @@ import json import os import sys -from typing import Optional, Self +from typing import Any, Optional, Self -from pydantic import BaseModel, TypeAdapter +from pydantic import BaseModel, TypeAdapter, ValidationError # The contents of the token file is: @@ -36,9 +36,22 @@ class TokenFile: tokens_path: str = os.path.join(os.getenv("HOME", ""), ".mreg-cli_auth_token.json") - def __init__(self, tokens: Optional[list[dict[str, str]]] = None): + def __init__(self, tokens: Any = None): """Initialize the TokenFile instance.""" - self.tokens = TokenList.validate_python(tokens) + self.tokens = self.validate_tokens(tokens) + + def validate_tokens(self, tokens: Any) -> list[Token]: + """Convert deserialized JSON to list of Token objects.""" + if tokens is None: + return [] + try: + return TokenList.validate_python(tokens) + except ValidationError as e: + print( + f"Failed to validate tokens from token file {self.tokens_path}: {e}", + file=sys.stderr, + ) + return [] def _set_file_permissions(self, mode: int) -> None: """Set the file permissions for the token file.""" diff --git a/tests/test_tokenfile.py b/tests/test_tokenfile.py index 5afd418..539be7a 100644 --- a/tests/test_tokenfile.py +++ b/tests/test_tokenfile.py @@ -8,6 +8,7 @@ from __future__ import annotations +import json from pathlib import Path from typing import Iterator @@ -62,7 +63,7 @@ def reset_token_file_path() -> Iterator[None]: TokenFile.tokens_path = TOKENS_PATH_ORIGINAL -def testload_file_nonexistent(tmp_path: Path) -> None: +def test_load_file_nonexistent(tmp_path: Path) -> None: """Load from a nonexistent tokens file.""" tokens_path = tmp_path / "does_not_exist.json" assert not tokens_path.exists() @@ -71,7 +72,7 @@ def testload_file_nonexistent(tmp_path: Path) -> None: assert tokenfile.tokens == [] -def testload_file_empty(tmp_path: Path, capsys: pytest.CaptureFixture[str]) -> None: +def test_load_file_empty(tmp_path: Path, capsys: pytest.CaptureFixture[str]) -> None: """Load from an empty tokens file.""" tokens_path = tmp_path / "empty.json" tokens_path.touch() @@ -82,7 +83,7 @@ def testload_file_empty(tmp_path: Path, capsys: pytest.CaptureFixture[str]) -> N assert "Failed to decode JSON" in capsys.readouterr().err -def testload_file_invalid(tmp_path: Path, capsys: pytest.CaptureFixture[str]) -> None: +def test_load_file_invalid_json(tmp_path: Path, capsys: pytest.CaptureFixture[str]) -> None: """Load from a tokens file with invalid JSON.""" tokens_path = tmp_path / "invalid.json" tokens_path.write_text("not json") @@ -93,7 +94,36 @@ def testload_file_invalid(tmp_path: Path, capsys: pytest.CaptureFixture[str]) -> assert "Failed to decode JSON" in capsys.readouterr().err -def testload_file_single(tmp_path: Path) -> None: +def test_load_file_invalid_tokenfile(tmp_path: Path, capsys: pytest.CaptureFixture[str]) -> None: + """Load from a tokens file that is not in the correct format.""" + tokens_path = tmp_path / "invalid_format.json" + + # Contents is valid JSON, but contents are not in the correct format + tokens_path.write_text( + json.dumps( + { + "tokens": [ + { + "invalid_key": 123, + "url": "https://example.com", + # missing token and username keys + }, + { + "token": "exampletoken123", + "url": "https://example.com", + "username": "exampleuser", + }, + ] + } + ) + ) + TokenFile.tokens_path = str(tokens_path) + tokenfile = TokenFile.load() + assert tokenfile.tokens == [] + assert "Failed to validate tokens from token file" in capsys.readouterr().err + + +def test_load_file_single(tmp_path: Path) -> None: """Load from a tokens file with a single token.""" tokens_path = tmp_path / "single.json" tokens_path.write_text(TOKEN_FILE_SINGLE) @@ -106,7 +136,7 @@ def testload_file_single(tmp_path: Path) -> None: assert tokens[0].username == snapshot("exampleuser") -def testload_file_multiple(tmp_path: Path) -> None: +def test_load_file_multiple(tmp_path: Path) -> None: """Load from a tokens file with multiple tokens.""" tokens_path = tmp_path / "multiple.json" tokens_path.write_text(TOKEN_FILE_MULTIPLE)