Files
ledgerdock/backend/app/api/routes_processing_logs.py
2026-02-21 09:44:18 -03:00

67 lines
2.0 KiB
Python

"""Read-only API endpoints for processing pipeline event logs."""
from uuid import UUID
from fastapi import APIRouter, Depends, Query
from sqlalchemy.orm import Session
from app.db.base import get_session
from app.schemas.processing_logs import ProcessingLogEntryResponse, ProcessingLogListResponse
from app.services.processing_logs import (
cleanup_processing_logs,
clear_processing_logs,
count_processing_logs,
list_processing_logs,
)
router = APIRouter()
@router.get("", response_model=ProcessingLogListResponse)
def get_processing_logs(
offset: int = Query(default=0, ge=0),
limit: int = Query(default=120, ge=1, le=400),
document_id: UUID | None = Query(default=None),
session: Session = Depends(get_session),
) -> ProcessingLogListResponse:
"""Returns paginated processing logs ordered from newest to oldest."""
items = list_processing_logs(
session=session,
limit=limit,
offset=offset,
document_id=document_id,
)
total = count_processing_logs(session=session, document_id=document_id)
return ProcessingLogListResponse(
total=total,
items=[ProcessingLogEntryResponse.model_validate(item) for item in items],
)
@router.post("/trim")
def trim_processing_logs(
keep_document_sessions: int = Query(default=2, ge=0, le=20),
keep_unbound_entries: int = Query(default=80, ge=0, le=400),
session: Session = Depends(get_session),
) -> dict[str, int]:
"""Deletes old processing logs while keeping recent document sessions and unbound events."""
result = cleanup_processing_logs(
session=session,
keep_document_sessions=keep_document_sessions,
keep_unbound_entries=keep_unbound_entries,
)
session.commit()
return result
@router.post("/clear")
def clear_all_processing_logs(session: Session = Depends(get_session)) -> dict[str, int]:
"""Deletes all processing logs to reset the diagnostics timeline."""
result = clear_processing_logs(session=session)
session.commit()
return result