Skip to content

Commit

Permalink
Add pytest unit tests
Browse files Browse the repository at this point in the history
  • Loading branch information
pederhan committed May 30, 2024
1 parent 0c198c5 commit 4f3e500
Show file tree
Hide file tree
Showing 6 changed files with 228 additions and 20 deletions.
32 changes: 18 additions & 14 deletions mreg_cli/tokenfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,9 @@
import json
import os
import sys
from typing import Optional
from typing import Optional, Self

from pydantic import BaseModel
from pydantic import BaseModel, TypeAdapter

# The contents of the token file is:

Expand All @@ -28,42 +28,47 @@ class Token(BaseModel):
username: str


TokenList = TypeAdapter(list[Token])


class TokenFile:
"""A class for managing tokens in a JSON file."""

tokens_path: str = os.path.join(os.getenv("HOME", ""), ".mreg-cli_auth_token.json")

def __init__(self, tokens: Optional[list[dict[str, str]]] = None):
"""Initialize the TokenFile instance."""
self.tokens = [Token(**token) for token in tokens] if tokens else []
self.tokens = TokenList.validate_python(tokens)

@classmethod
def _load_tokens(cls) -> "TokenFile":
def _load_tokens(cls) -> Self:
"""Load tokens from a JSON file, returning a new instance of TokenFile."""
try:
with open(cls.tokens_path, "r") as file:
data = json.load(file)
return TokenFile(tokens=data["tokens"])
except (FileNotFoundError, KeyError):
return TokenFile(tokens=[])
return cls(tokens=data.get("tokens"))
except (FileNotFoundError, KeyError, json.JSONDecodeError) as e:
if isinstance(e, json.JSONDecodeError):
print(f"Failed to decode JSON in tokens file {cls.tokens_path}", file=sys.stderr)
return cls(tokens=[])

@classmethod
def _set_file_permissions(cls, mode: int) -> None:
"""Set the file permissions for the token file."""
try:
os.chmod(cls.tokens_path, mode)
except PermissionError:
print("Failed to set permissions on " + cls.tokens_path, file=sys.stderr)
print(f"Failed to set permissions on {cls.tokens_path}", file=sys.stderr)
except FileNotFoundError:
pass

@classmethod
def _save_tokens(cls, tokens: "TokenFile") -> None:
def _save_tokens(cls, tokens: Self) -> Self:
"""Save tokens to a JSON file."""
with open(cls.tokens_path, "w") as file:
json.dump({"tokens": [token.model_dump() for token in tokens.tokens]}, file, indent=4)

cls._set_file_permissions(0o600)
return tokens

@classmethod
def get_entry(cls, username: str, url: str) -> Optional[Token]:
Expand All @@ -75,15 +80,14 @@ def get_entry(cls, username: str, url: str) -> Optional[Token]:
return None

@classmethod
def set_entry(cls, username: str, url: str, new_token: str) -> None:
def set_entry(cls, username: str, url: str, new_token: str) -> Self:
"""Update or add a token based on the URL and username."""
tokens_file = cls._load_tokens()
for token in tokens_file.tokens:
if token.url == url and token.username == username:
token.token = new_token
cls._save_tokens(tokens_file)
return
return cls._save_tokens(tokens_file)

# If not found, add a new token
tokens_file.tokens.append(Token(token=new_token, url=url, username=username))
cls._save_tokens(tokens_file)
return cls._save_tokens(tokens_file)
8 changes: 4 additions & 4 deletions mreg_cli/utilities/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,14 +204,14 @@ def result_check(result: Response, operation_type: str, url: str) -> None:
cli_warning(message)


def _strip_none(data: dict[str, Any]) -> dict[str, Any]:
def strip_none(data: dict[str, Any]) -> dict[str, Any]:
"""Recursively strip None values from a dictionary."""
new: dict[str, Any] = {}
for key, value in data.items():
if value is not None:
if isinstance(value, dict):
v = _strip_none(value) # pyright: ignore[reportUnknownArgumentType]
if v:
v = strip_none(value) # pyright: ignore[reportUnknownArgumentType]
if v: # Only keep resulting dict if not empty
new[key] = v
else:
new[key] = value
Expand All @@ -233,7 +233,7 @@ def _request_wrapper(

# Strip None values from data
if data:
data = _strip_none(data)
data = strip_none(data)

result = getattr(session, operation_type)(
url,
Expand Down
3 changes: 2 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,8 @@ dependencies = [
dynamic = ["version"]

[project.optional-dependencies]
dev = ["ruff", "tox", "pyinstaller"]
test = ["pytest", "inline-snapshot"]
dev = ["mreg_cli[test]", "ruff", "tox", "pyinstaller"]

[project.urls]
Repository = 'https://github.com/unioslo/mreg-cli/'
Expand Down
173 changes: 173 additions & 0 deletions tests/test_tokenfile.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,173 @@
from pathlib import Path
from typing import Iterator

import pytest
from inline_snapshot import snapshot

from mreg_cli.tokenfile import TokenFile

TOKENS_PATH_ORIGINAL = TokenFile.tokens_path


TOKEN_FILE_SINGLE = """
{
"tokens": [
{
"token": "exampletoken123",
"url": "https://example.com",
"username": "exampleuser"
}
]
}
"""


TOKEN_FILE_MULTIPLE = """
{
"tokens": [
{
"token": "exampletoken123",
"url": "https://example.com",
"username": "exampleuser"
},
{
"token": "footoken456",
"url": "https://foo.com",
"username": "foouser"
},
{
"token": "bartoken789",
"url": "https://bar.com",
"username": "baruser"
}
]
}
"""


@pytest.fixture(autouse=True)
def reset_token_file_path() -> Iterator[None]:
"""Reset the token file path after each test."""
yield
TokenFile.tokens_path = TOKENS_PATH_ORIGINAL


def test_load_tokens_file_nonexistent(tmp_path: Path) -> None:
"""Load from a nonexistent tokens file."""
tokens_path = tmp_path / "does_not_exist.json"
assert not tokens_path.exists()
TokenFile.tokens_path = str(tokens_path)
tokenfile = TokenFile._load_tokens()
assert tokenfile.tokens == []


def test_load_tokens_file_empty(tmp_path: Path, capsys: pytest.CaptureFixture[str]) -> None:
"""Load from an empty tokens file."""
tokens_path = tmp_path / "empty.json"
tokens_path.touch()
assert tokens_path.read_text() == ""
TokenFile.tokens_path = str(tokens_path)
tokenfile = TokenFile._load_tokens()
assert tokenfile.tokens == []
assert "Failed to decode JSON" in capsys.readouterr().err


def test_load_tokens_file_invalid(tmp_path: Path, capsys: pytest.CaptureFixture[str]) -> None:
"""Load from a tokens file with invalid JSON."""
tokens_path = tmp_path / "invalid.json"
tokens_path.write_text("not json")
assert tokens_path.read_text() == "not json"
TokenFile.tokens_path = str(tokens_path)
tokenfile = TokenFile._load_tokens()
assert tokenfile.tokens == []
assert "Failed to decode JSON" in capsys.readouterr().err


def test_load_tokens_file_single(tmp_path: Path) -> None:
"""Load from a tokens file with a single token."""
tokens_path = tmp_path / "single.json"
tokens_path.write_text(TOKEN_FILE_SINGLE)
TokenFile.tokens_path = str(tokens_path)
tokenfile = TokenFile._load_tokens()
assert len(tokenfile.tokens) == 1
assert tokenfile.tokens[0].token == snapshot("exampletoken123")
assert tokenfile.tokens[0].url == snapshot("https://example.com")
assert tokenfile.tokens[0].username == snapshot("exampleuser")


def test_load_tokens_file_multiple(tmp_path: Path) -> None:
"""Load from a tokens file with multiple tokens."""
tokens_path = tmp_path / "multiple.json"
tokens_path.write_text(TOKEN_FILE_MULTIPLE)
TokenFile.tokens_path = str(tokens_path)
tokenfile = TokenFile._load_tokens()
assert len(tokenfile.tokens) == snapshot(3)


def test_get_entry(tmp_path: Path) -> None:
"""Get a token from the token file."""
tokens_path = tmp_path / "get_token.json"
tokens_path.write_text(TOKEN_FILE_MULTIPLE)
TokenFile.tokens_path = str(tokens_path)
tokenfile = TokenFile._load_tokens()

token = tokenfile.get_entry("exampleuser", "https://example.com")
assert token is not None
assert token.token == snapshot("exampletoken123")
assert token.url == snapshot("https://example.com")
assert token.username == snapshot("exampleuser")

token = tokenfile.get_entry("foouser", "https://foo.com")
assert token is not None
assert token.token == snapshot("footoken456")
assert token.url == snapshot("https://foo.com")
assert token.username == snapshot("foouser")

token = tokenfile.get_entry("baruser", "https://bar.com")
assert token is not None
assert token.token == snapshot("bartoken789")
assert token.url == snapshot("https://bar.com")
assert token.username == snapshot("baruser")

token = tokenfile.get_entry("nonexistent", "https://example.com")
assert token is None


def test_set_entry_existing(tmp_path: Path) -> None:
"""Set a token in the token file that already exists."""
tokens_path = tmp_path / "set_existing.json"
tokens_path.write_text(TOKEN_FILE_MULTIPLE)
TokenFile.tokens_path = str(tokens_path)
tokenfile = TokenFile._load_tokens()

assert len(tokenfile.tokens) == snapshot(3)
tokenfile = tokenfile.set_entry("newuser", "https://new.com", "newtoken123")
assert len(tokenfile.tokens) == snapshot(4)
token = tokenfile.get_entry("newuser", "https://new.com")
assert token is not None
assert token.token == snapshot("newtoken123")


@pytest.mark.parametrize("create_before", [True, False], ids=["create_before", "create_after"])
def test_set_entry_new(tmp_path: Path, create_before: bool) -> None:
"""Set a token in the token file that does not already exist."""
tokens_path = tmp_path / "set_new.json"
if create_before:
tokens_path.touch() # empty file
TokenFile.tokens_path = str(tokens_path)
tokenfile = TokenFile._load_tokens()

# Write tokens to the empty file
assert len(tokenfile.tokens) == snapshot(0)
tokenfile = tokenfile.set_entry("newuser", "https://new.com", "newtoken123")
assert len(tokenfile.tokens) == snapshot(1)
token = tokenfile.get_entry("newuser", "https://new.com")
assert token is not None
assert token.token == snapshot("newtoken123")

# Try to load the tokens from the file again
new_tokenfile = TokenFile._load_tokens()
assert len(new_tokenfile.tokens) == snapshot(1)
token = new_tokenfile.get_entry("newuser", "https://new.com")
assert token is not None
assert token.token == snapshot("newtoken123")
22 changes: 22 additions & 0 deletions tests/utilities/test_api.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
from typing import Any

import pytest

from mreg_cli.utilities.api import strip_none


@pytest.mark.parametrize(
"inp,expect",
[
# Empty dict
({}, {}),
# Mixed values
({"foo": "bar", "baz": None}, {"foo": "bar"}),
# Multiple keys with None values
({"foo": None, "bar": None}, {}),
# Nested dicts
({"foo": {"bar": {"baz": None}}}, {}),
],
)
def test_strip_none(inp: dict[str, Any], expect: dict[str, Any]) -> None:
assert strip_none(inp) == expect
10 changes: 9 additions & 1 deletion tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ toxworkdir = {env:TOX_WORKDIR:.tox}
envlist =
lint
python3{11,12}
pytest

[gh-actions]
python =
Expand All @@ -30,6 +31,14 @@ commands =
ruff check --fix mreg_cli
ruff format mreg_cli

[testenv:pytest]
skip_install = false
description = Run unit tests with pytest
allowlist_externals =
pytest
commands =
pytest -v

[testenv]
skip_install = false
description = Test building binary with pyinstaller
Expand All @@ -44,4 +53,3 @@ allowlist_externals =
commands =
python --version
pyinstaller --name mreg-cli run.py -F --hidden-import=mreg_cli.commands.host_submodules

0 comments on commit 4f3e500

Please sign in to comment.