Skip to content

Commit

Permalink
Add fault tolerance for invalid token file
Browse files Browse the repository at this point in the history
  • Loading branch information
pederhan committed Oct 9, 2024
1 parent 5f1d360 commit 03bde05
Show file tree
Hide file tree
Showing 2 changed files with 52 additions and 9 deletions.
21 changes: 17 additions & 4 deletions mreg_cli/tokenfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,9 @@
import json
import os
import sys
from typing import Optional, Self
from typing import Any, Optional, Self

from pydantic import BaseModel, TypeAdapter
from pydantic import BaseModel, TypeAdapter, ValidationError

# The contents of the token file is:

Expand Down Expand Up @@ -36,9 +36,22 @@ class TokenFile:

tokens_path: str = os.path.join(os.getenv("HOME", ""), ".mreg-cli_auth_token.json")

def __init__(self, tokens: Optional[list[dict[str, str]]] = None):
def __init__(self, tokens: Any = None):
"""Initialize the TokenFile instance."""
self.tokens = TokenList.validate_python(tokens)
self.tokens = self.validate_tokens(tokens)

def validate_tokens(self, tokens: Any) -> list[Token]:
"""Convert deserialized JSON to list of Token objects."""
if tokens is None:
return []
try:
return TokenList.validate_python(tokens)
except ValidationError as e:
print(
f"Failed to validate tokens from token file {self.tokens_path}: {e}",
file=sys.stderr,
)
return []

def _set_file_permissions(self, mode: int) -> None:
"""Set the file permissions for the token file."""
Expand Down
40 changes: 35 additions & 5 deletions tests/test_tokenfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@

from __future__ import annotations

import json
from pathlib import Path
from typing import Iterator

Expand Down Expand Up @@ -62,7 +63,7 @@ def reset_token_file_path() -> Iterator[None]:
TokenFile.tokens_path = TOKENS_PATH_ORIGINAL


def testload_file_nonexistent(tmp_path: Path) -> None:
def test_load_file_nonexistent(tmp_path: Path) -> None:
"""Load from a nonexistent tokens file."""
tokens_path = tmp_path / "does_not_exist.json"
assert not tokens_path.exists()
Expand All @@ -71,7 +72,7 @@ def testload_file_nonexistent(tmp_path: Path) -> None:
assert tokenfile.tokens == []


def testload_file_empty(tmp_path: Path, capsys: pytest.CaptureFixture[str]) -> None:
def test_load_file_empty(tmp_path: Path, capsys: pytest.CaptureFixture[str]) -> None:
"""Load from an empty tokens file."""
tokens_path = tmp_path / "empty.json"
tokens_path.touch()
Expand All @@ -82,7 +83,7 @@ def testload_file_empty(tmp_path: Path, capsys: pytest.CaptureFixture[str]) -> N
assert "Failed to decode JSON" in capsys.readouterr().err


def testload_file_invalid(tmp_path: Path, capsys: pytest.CaptureFixture[str]) -> None:
def test_load_file_invalid_json(tmp_path: Path, capsys: pytest.CaptureFixture[str]) -> None:
"""Load from a tokens file with invalid JSON."""
tokens_path = tmp_path / "invalid.json"
tokens_path.write_text("not json")
Expand All @@ -93,7 +94,36 @@ def testload_file_invalid(tmp_path: Path, capsys: pytest.CaptureFixture[str]) ->
assert "Failed to decode JSON" in capsys.readouterr().err


def testload_file_single(tmp_path: Path) -> None:
def test_load_file_invalid_tokenfile(tmp_path: Path, capsys: pytest.CaptureFixture[str]) -> None:
"""Load from a tokens file that is not in the correct format."""
tokens_path = tmp_path / "invalid_format.json"

# Contents is valid JSON, but contents are not in the correct format
tokens_path.write_text(
json.dumps(
{
"tokens": [
{
"invalid_key": 123,
"url": "https://example.com",
# missing token and username keys
},
{
"token": "exampletoken123",
"url": "https://example.com",
"username": "exampleuser",
},
]
}
)
)
TokenFile.tokens_path = str(tokens_path)
tokenfile = TokenFile.load()
assert tokenfile.tokens == []
assert "Failed to validate tokens from token file" in capsys.readouterr().err


def test_load_file_single(tmp_path: Path) -> None:
"""Load from a tokens file with a single token."""
tokens_path = tmp_path / "single.json"
tokens_path.write_text(TOKEN_FILE_SINGLE)
Expand All @@ -106,7 +136,7 @@ def testload_file_single(tmp_path: Path) -> None:
assert tokens[0].username == snapshot("exampleuser")


def testload_file_multiple(tmp_path: Path) -> None:
def test_load_file_multiple(tmp_path: Path) -> None:
"""Load from a tokens file with multiple tokens."""
tokens_path = tmp_path / "multiple.json"
tokens_path.write_text(TOKEN_FILE_MULTIPLE)
Expand Down

0 comments on commit 03bde05

Please sign in to comment.