Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Reworking logging in mreg. #515

Merged
merged 20 commits into from
Aug 28, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
69 changes: 40 additions & 29 deletions mreg/api/v1/history.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import datetime

from django_logging import log
import structlog

from django.core.exceptions import ValidationError
from django.core.serializers.json import DjangoJSONEncoder
Expand All @@ -11,9 +11,10 @@

from mreg.models.base import History

log = structlog.get_logger(__name__)

class DjangoJSONModelEncoder(DjangoJSONEncoder):

class DjangoJSONModelEncoder(DjangoJSONEncoder):
def default(self, o):
if isinstance(o, Model):
return model_to_dict(o)
Expand All @@ -25,14 +26,14 @@ def default(self, o):


class HistoryLog:

def save_log(self, action, serializer, data, orig_data=None):
if serializer.Meta.model == self.model:
model_id = serializer.data['id']
name = serializer.data['name']
model_id = serializer.data["id"]
name = serializer.data["name"]
else:
obj = data.get(self.foreign_key_name,
serializer.data.get(self.foreign_key_name, None))
obj = data.get(
self.foreign_key_name, serializer.data.get(self.foreign_key_name, None)
)
if isinstance(obj, self.model):
pass
elif isinstance(obj, int):
Expand All @@ -42,17 +43,19 @@ def save_log(self, action, serializer, data, orig_data=None):
model_id = obj.id
name = obj.name
self.manipulate_data(action, serializer, data, orig_data)
if action == 'update':
data = {'current_data': orig_data, 'update': data}
if action == "update":
data = {"current_data": orig_data, "update": data}
model = serializer.Meta.model.__name__
json_data = self.get_jsondata(data)
history = History(user=self.request.user,
resource=self.log_resource,
name=name,
model_id=model_id,
model=model,
action=action,
data=json_data)
history = History(
user=self.request.user,
resource=self.log_resource,
name=name,
model_id=model_id,
model=model,
action=action,
data=json_data,
)

# We should never fail at performing a clean on the testdata itself.
try:
Expand All @@ -75,21 +78,27 @@ def save_log_create(self, serializer):
self.save_log("create", serializer, serializer.validated_data)

def save_log_update(self, serializer):
self.save_log("update", serializer, serializer.validated_data, orig_data=self.orig_data)
self.save_log(
"update", serializer, serializer.validated_data, orig_data=self.orig_data
)

def save_log_m2m_alteration(self, method, instance):
data = {"relation": self.m2m_field,
"id": str(instance.id),
"name": instance.name}
data = {
"relation": self.m2m_field,
"id": str(instance.id),
"name": instance.name,
}
model = instance.__class__.__name__
action = method.__name__
history = History(user=self.request.user,
resource=self.log_resource,
name=self.object.name,
model_id=self.object.id,
model=model,
action=action,
data=data)
history = History(
user=self.request.user,
resource=self.log_resource,
name=self.object.name,
model_id=self.object.id,
model=model,
action=action,
data=data,
)

# We should never fail at performing a clean on the testdata itself.
try:
Expand All @@ -105,10 +114,12 @@ def perform_create(self, serializer, **kwargs):

def perform_update(self, serializer):
# Make sure to get the original data before serializer.save()
if not hasattr(self, 'orig_data'):
if not hasattr(self, "orig_data"):
self.orig_data = self.get_serializer(self.get_object()).data
super().perform_update(serializer)
self.save_log("update", serializer, serializer.validated_data, orig_data=self.orig_data)
self.save_log(
"update", serializer, serializer.validated_data, orig_data=self.orig_data
)

def perform_destroy(self, instance):
serializer = self.get_serializer(instance)
Expand Down
227 changes: 227 additions & 0 deletions mreg/api/v1/tests/test_logging.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,227 @@
"""Test logging middleware and logging output."""


import io
from unittest.mock import MagicMock, patch

from django.http import HttpRequest, HttpResponse
from structlog import get_logger
from structlog.testing import capture_logs

from mreg.middleware.logging_http import LoggingMiddleware

from django.contrib.auth import get_user_model
from mreg.api.v1.tests.tests import MregAPITestCase
from mreg.log_processors import (
filter_sensitive_data,
RequestColorTracker,
collapse_request_id_processor,
reorder_keys_processor,
)


class TestLoggingInternals(MregAPITestCase):
"""Test internals in the logging framework."""

def test_reorder_keys_processor(self) -> None:
"""Test that the keys are reordered properly."""
# Simulate a logging event
event = {
"event": "Event 1",
"request_id": "request_id",
"another_key": "value",
}

# Process the event
processed_event = reorder_keys_processor(None, None, event)

# Check that request_id is the first key
first_key = next(iter(processed_event.keys()))
self.assertEqual(first_key, "request_id")

def test_collapse_request_id_processor(self):
"""Test that the request ID is collapsed properly."""
testcases = [
("", "..."), # length 0
("12345", "..."), # length 5
("1234567890", "..."), # length 10
("12345678901", "123...901"), # length 11
("12345678901234567890", "123...890"), # length 20
]

for original_request_id, expected_request_id in testcases:
# Simulate a logging event
event = {"event": "Event 1", "request_id": original_request_id}

# Process the event
processed_event = collapse_request_id_processor(None, None, event)

# Check that the request ID has been replaced and properly formatted
self.assertEqual(processed_event["request_id"], expected_request_id)

def test_filtering_of_sensitive_data(self):
"""Test that sensitive data is filtered correctly."""
source_dicts = [
{
"model": "ExpiringToken",
"_str": "1234567890123456789012345678901234567890",
"id": "1234567890123456789012345678901234567890",
},
{
"model": "ExpiringToken",
"_str": "123456789",
"id": "123456789",
},
{
"model": "Session",
"_str": "123456789012345678901234567890123456789a",
"id": "123456789012345678901234567890123456789a",
},
]

expected_dicts = [
{
"model": "ExpiringToken",
"_str": "123...890",
"id": "123...890",
},
{
"model": "ExpiringToken",
"_str": "...",
"id": "...",
},
{
"model": "Session",
"_str": "123...89a",
"id": "123...89a",
},
]

for source_dict, expected_dict in zip(source_dicts, expected_dicts):
self.assertEqual(
filter_sensitive_data(None, None, source_dict), expected_dict
)

def test_binary_request_body(self) -> None:
"""Test logging of a request with a binary body."""
middleware = LoggingMiddleware(MagicMock())

def mock_get_response(_):
return HttpResponse(status=200)

middleware.get_response = mock_get_response

with capture_logs() as cap_logs:
get_logger().bind()
request = HttpRequest()
request._read_started = False
request.user = get_user_model().objects.get(username="superuser")

# Mock a binary request body
binary_body = b"\x80abc\x01\x02\x03\x04\x05"
request._stream = io.BytesIO(binary_body)

middleware(request)

# Check that the body was logged as '<Binary Data>'
self.assertEqual(cap_logs[0]["content"], "<Binary Data>")


class TestLoggingMiddleware(MregAPITestCase):
"""Test logging middleware."""

def test_run_time_ms_escalation(self):
"""Test run_time_ms escalation for logging levels."""
middleware = LoggingMiddleware(MagicMock())

# mock the get_response method to return a response with a specified status code and delay
def mock_get_response(_):
return HttpResponse(status=200)

middleware.get_response = mock_get_response

# test the behavior of the logging system with different delays
delay_responses = [
(0.1, "info"),
(0.5, "info"),
(1.0, "warning"),
(2.0, "warning"),
(5.0, "critical"),
(5.5, "critical"),
]

for delay, expected_level in delay_responses:
with patch("time.time", side_effect=[0, delay]):
with capture_logs() as cap_logs:
get_logger().bind()
request = HttpRequest()
request._body = b"Some request body"
request.user = get_user_model().objects.get(username="superuser")
middleware(request)
# cap_logs[0] is the request, cap_logs[1] is the response
self.assertEqual(cap_logs[1]["log_level"], expected_level)

def test_return_500_error(self) -> None:
"""Test middleware returning 500 error."""
middleware = LoggingMiddleware(MagicMock())

def mock_get_response(_):
return HttpResponse(status=500)

middleware.get_response = mock_get_response

with capture_logs() as cap_logs:
get_logger().bind()
request = HttpRequest()
request._read_started = False
request._stream = io.BytesIO(b"request body") # mock the _stream attribute
request.user = get_user_model().objects.get(username="superuser")
middleware(request)
self.assertEqual(cap_logs[1]["status_code"], 500)

def test_proxy_ip_in_logs(self) -> None:
"""Check that a proxy IP is logged."""
middleware = LoggingMiddleware(MagicMock())

def mock_get_response(_):
return HttpResponse(status=500)

middleware.get_response = mock_get_response

with capture_logs() as cap_logs:
get_logger().bind()
request = HttpRequest()
request._read_started = False
request._stream = io.BytesIO(b"request body")
request.user = get_user_model().objects.get(username="superuser")
request.META["HTTP_X_FORWARDED_FOR"] = "192.0.2.0" # set a proxy IP
middleware(request)
self.assertEqual(cap_logs[0]["proxy_ip"], "192.0.2.0")

def test_request_color_tracker(self) -> None:
"""Test that the request color tracker works as expected."""
color_tracker = RequestColorTracker()

events = [
{"request_id": "abc123", "event": "Event 1"},
{"request_id": "def456", "event": "Event 2"},
{"request_id": "abc123", "event": "Event 3"},
{"request_id": "abc123", "event": "Event 3"},
{"request_id": "ghi789", "event": "Event 3"},
]

expected_colors = [
color_tracker.COLORS[0],
color_tracker.COLORS[1],
color_tracker.COLORS[0],
color_tracker.COLORS[0],
color_tracker.COLORS[2],
]

for i, event in enumerate(events):
expected_color = expected_colors[i]
colored_bubble = color_tracker._colorize(expected_color, " • ")
expected_event = colored_bubble + event["event"]
colored_event = color_tracker(None, None, event)

self.assertEqual(colored_event["event"], expected_event)
Loading