Files
ledgerdock/backend/app/schemas/processing_logs.py

58 lines
1.8 KiB
Python

"""Pydantic schemas for processing pipeline log API payloads."""
from datetime import datetime
from typing import Any
from uuid import UUID
from pydantic import BaseModel, Field, field_validator
from app.models.processing_log import sanitize_processing_log_payload_value, sanitize_processing_log_text
class ProcessingLogEntryResponse(BaseModel):
"""Represents one persisted processing log event with already-redacted sensitive fields."""
id: int
created_at: datetime
level: str
stage: str
event: str
document_id: UUID | None
document_filename: str | None
provider_id: str | None
model_name: str | None
prompt_text: str | None
response_text: str | None
payload_json: dict[str, Any]
@field_validator("prompt_text", "response_text", mode="before")
@classmethod
def _sanitize_text_fields(cls, value: Any) -> str | None:
"""Ensures log text fields are redacted in API responses."""
if value is None:
return None
return sanitize_processing_log_text(str(value))
@field_validator("payload_json", mode="before")
@classmethod
def _sanitize_payload_field(cls, value: Any) -> dict[str, Any]:
"""Ensures payload fields are redacted in API responses."""
if not isinstance(value, dict):
return {}
sanitized = sanitize_processing_log_payload_value(value)
return sanitized if isinstance(sanitized, dict) else {}
class Config:
"""Enables ORM object parsing for SQLAlchemy model instances."""
from_attributes = True
class ProcessingLogListResponse(BaseModel):
"""Represents a paginated collection of processing log records."""
total: int
items: list[ProcessingLogEntryResponse] = Field(default_factory=list)