diff --git a/docs/task_headers_payloads.rst b/docs/task_headers_payloads.rst index 22ee06c..5cf2d46 100644 --- a/docs/task_headers_payloads.rst +++ b/docs/task_headers_payloads.rst @@ -352,3 +352,39 @@ Regular payloads and persistent payload keys have common namespace so persistent Because merging strategy is quite aggressive, it's not recommended to overuse that feature. They should be treated as "analysis-wide payload". It's recommended to set them only in initial task. Don't store any references to resources or other heavy objects here, unless you need to. Persistent payload is, as the name says, persistent, so it is propagated to the whole task subtree and **can't be removed** during analysis. Resource referenced by persistent payload won't be garbage-collected until the whole analysis (task subtree) ends, even if it's not needed by further analysis steps. + +Persistent headers +------------------ + +.. versionadded:: 5.2.0 + +Headers that are propagated to the whole task subtree, so consumers don't need to care about passing these things to child tasks. + +Using persistent headers you can mark properties that are crucial for routing and should be kept for analysis artifacts as well: + +* Analysis volatility if we don't want to report and persist artifacts from analysis, so tasks are not routed to reporter services +* Analysis confidentiality if we shouldn't pass artifacts to 3rd party services and they should be considered internal +* Marking analysis as made for testing, so we can pass only testing analyses to testing consumers + +Semantics are similar as for persistent payload: + +.. code-block:: python + + task = Task( + headers=..., + payload=..., + headers_persistent={ + "uploader": "psrok1" + } + ) + + +``headers_persistent`` passed to Task are merged with ``self.headers`` with keys marked internally as persistent. + +Headers precedence is as follows: + +* ``headers_persistent`` from parent task (most important) +* ``headers_persistent`` from current task +* ``headers`` from current task (least important) + +Following these rules: persistent headers are propagating to the whole subtree and always override other headers with the same key. diff --git a/karton/core/karton.py b/karton/core/karton.py index d85e13b..0f46c36 100644 --- a/karton/core/karton.py +++ b/karton/core/karton.py @@ -71,6 +71,7 @@ def send_task(self, task: Task) -> bool: if self.current_task is not None: task.set_task_parent(self.current_task) task.merge_persistent_payload(self.current_task) + task.merge_persistent_headers(self.current_task) task.priority = self.current_task.priority task.last_update = time.time() diff --git a/karton/core/task.py b/karton/core/task.py index f706549..846d68f 100644 --- a/karton/core/task.py +++ b/karton/core/task.py @@ -46,6 +46,8 @@ class Task(object): Systems filter by these. :param payload: Any instance of :py:class:`dict` - contains resources \ and additional informations + :param headers_persistent: Persistent headers for whole task subtree, \ + propagated from initial task. :param payload_persistent: Persistent payload set for whole task subtree, \ propagated from initial task :param priority: Priority of whole task subtree, \ @@ -64,6 +66,7 @@ def __init__( self, headers: Dict[str, Any], payload: Optional[Dict[str, Any]] = None, + headers_persistent: Optional[Dict[str, Any]] = None, payload_persistent: Optional[Dict[str, Any]] = None, priority: Optional[TaskPriority] = None, parent_uid: Optional[str] = None, @@ -74,10 +77,14 @@ def __init__( ) -> None: payload = payload or {} payload_persistent = payload_persistent or {} + headers_persistent = headers_persistent or {} + if not isinstance(payload, dict): raise ValueError("Payload should be an instance of a dict") if not isinstance(payload_persistent, dict): raise ValueError("Persistent payload should be an instance of a dict") + if not isinstance(headers_persistent, dict): + raise ValueError("Persistent headers should be an instance of a dict") if uid is None: self.uid = str(uuid.uuid4()) @@ -93,7 +100,8 @@ def __init__( self.parent_uid = parent_uid self.error = error - self.headers = headers + self.headers = {**headers, **headers_persistent} + self._headers_persistent_keys = set(headers_persistent.keys()) self.status = TaskState.DECLARED self.last_update: float = time.time() @@ -102,6 +110,12 @@ def __init__( self.payload = dict(payload) self.payload_persistent = dict(payload_persistent) + @property + def headers_persistent(self) -> Dict[str, Any]: + return { + k: v for k, v in self.headers.items() if k in self._headers_persistent_keys + } + def fork_task(self) -> "Task": """ Fork task to transfer single task to many queues (but use different UID). @@ -114,6 +128,7 @@ def fork_task(self) -> "Task": """ new_task = Task( headers=self.headers, + headers_persistent=self.headers_persistent, payload=self.payload, payload_persistent=self.payload_persistent, priority=self.priority, @@ -160,6 +175,7 @@ def process(self, task: Task) -> None: """ new_task = Task( headers=headers, + headers_persistent=self.headers_persistent, payload=self.payload, payload_persistent=self.payload_persistent, ) @@ -227,6 +243,19 @@ def merge_persistent_payload(self, other_task: "Task") -> None: # Delete conflicting non-persistent payload del self.payload[name] + def merge_persistent_headers(self, other_task: "Task") -> None: + """ + Merge persistent headers from another task + + :param other_task: Task from which to merge persistent headers + + :meta private: + """ + self.headers.update(other_task.headers_persistent) + self._headers_persistent_keys = self._headers_persistent_keys.union( + other_task._headers_persistent_keys + ) + def to_dict(self) -> Dict[str, Any]: """ Transform task data into dictionary @@ -245,6 +274,16 @@ def serialize_resources(obj): else: return obj + headers_persistent = self.headers_persistent + payload_persistent = { + **self.payload_persistent, + # Compatibility with Karton <5.2.0 + # Consumers <5.2.0 are not merging headers_persistent + # from previous task, so we need to hide it there to + # let karton-system fix it for us during deserialization + "__headers_persistent": headers_persistent, + } + return { "uid": self.uid, "root_uid": self.root_uid, @@ -254,8 +293,9 @@ def serialize_resources(obj): "priority": self.priority.value, "last_update": self.last_update, "payload": serialize_resources(self.payload), - "payload_persistent": serialize_resources(self.payload_persistent), + "payload_persistent": serialize_resources(payload_persistent), "headers": self.headers, + "headers_persistent": headers_persistent, "error": self.error, } @@ -350,7 +390,18 @@ def unserialize_resources(value: Any) -> Any: task_data = json.loads(data, object_hook=unserialize_resources) - task = Task(task_data["headers"]) + # Compatibility with Karton <5.2.0 + headers_persistent_fallback = task_data["payload_persistent"].get( + "__headers_persistent", None + ) + headers_persistent = task_data.get( + "headers_persistent", headers_persistent_fallback + ) + + task = Task( + task_data["headers"], + headers_persistent=headers_persistent, + ) task.uid = task_data["uid"] task.root_uid = task_data["root_uid"] task.parent_uid = task_data["parent_uid"] @@ -460,6 +511,15 @@ def has_payload(self, name: str) -> bool: """ return name in self.payload or name in self.payload_persistent + def is_header_persistent(self, name: str) -> bool: + """ + Checks whether header exists and is persistent + + :param name: Name of the header to be checked + :return: If tasks header with given name is persistent + """ + return name in self._headers_persistent_keys + def is_payload_persistent(self, name: str) -> bool: """ Checks whether payload exists and is persistent