chore(backend): deprecation sweep — Pydantic V1 -> V2, utcnow -> tz-aware

Two low-risk Pydantic V1 idioms that will be hard errors in V3:
  - Query(regex=...) -> Query(pattern=...) (audit_routes, control_generator_routes)
  - class Config: from_attributes=True -> model_config = ConfigDict(...)
    in source_policy_router.py (schemas.py is intentionally skipped — it is
    the Phase 1 schema-split target and the ConfigDict conversion is most
    efficient to do during that split).

Naive -> aware datetime sweep across 47 files:
  - datetime.utcnow() -> datetime.now(timezone.utc)
  - default=datetime.utcnow -> default=lambda: datetime.now(timezone.utc)
  - onupdate=datetime.utcnow -> onupdate=lambda: datetime.now(timezone.utc)

All SQLAlchemy DateTime columns in the project already declare
timezone=True, so the DB schema expects aware datetimes. Before this
commit, the in-Python side was generating naive values and the driver
was silently coercing them. This is a latent-bug fix, not a behavior
change at the DB boundary.

Verified:
  - 173/173 pytest compliance/tests/ pass (same as baseline)
  - tests/contracts/test_openapi_baseline.py passes (360 paths,
    484 operations unchanged)
  - DeprecationWarning count dropped from 158 -> 35

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
Sharang Parnerkar
2026-04-07 13:09:59 +02:00
parent 512b7a0f6c
commit cb90d0db0c
47 changed files with 260 additions and 261 deletions

View File

@@ -186,7 +186,7 @@ async def update_ai_system(
if hasattr(system, key):
setattr(system, key, value)
system.updated_at = datetime.utcnow()
system.updated_at = datetime.now(timezone.utc)
db.commit()
db.refresh(system)
@@ -266,7 +266,7 @@ async def assess_ai_system(
except ValueError:
system.classification = AIClassificationEnum.UNCLASSIFIED
system.assessment_date = datetime.utcnow()
system.assessment_date = datetime.now(timezone.utc)
system.assessment_result = assessment_result
system.obligations = _derive_obligations(classification)
system.risk_factors = assessment_result.get("risk_factors", [])

View File

@@ -9,7 +9,7 @@ Endpoints:
"""
import logging
from datetime import datetime
from datetime import datetime, timezone
from typing import Optional, List
from uuid import uuid4
import hashlib
@@ -204,7 +204,7 @@ async def start_audit_session(
)
session.status = AuditSessionStatusEnum.IN_PROGRESS
session.started_at = datetime.utcnow()
session.started_at = datetime.now(timezone.utc)
db.commit()
return {"success": True, "message": "Audit session started", "status": "in_progress"}
@@ -229,7 +229,7 @@ async def complete_audit_session(
)
session.status = AuditSessionStatusEnum.COMPLETED
session.completed_at = datetime.utcnow()
session.completed_at = datetime.now(timezone.utc)
db.commit()
return {"success": True, "message": "Audit session completed", "status": "completed"}
@@ -482,7 +482,7 @@ async def sign_off_item(
# Update existing sign-off
signoff.result = result_enum
signoff.notes = request.notes
signoff.updated_at = datetime.utcnow()
signoff.updated_at = datetime.now(timezone.utc)
else:
# Create new sign-off
signoff = AuditSignOffDB(
@@ -497,11 +497,11 @@ async def sign_off_item(
# Create digital signature if requested
signature = None
if request.sign:
timestamp = datetime.utcnow().isoformat()
timestamp = datetime.now(timezone.utc).isoformat()
data = f"{result_enum.value}|{requirement_id}|{session.auditor_name}|{timestamp}"
signature = hashlib.sha256(data.encode()).hexdigest()
signoff.signature_hash = signature
signoff.signed_at = datetime.utcnow()
signoff.signed_at = datetime.now(timezone.utc)
signoff.signed_by = session.auditor_name
# Update session statistics
@@ -523,7 +523,7 @@ async def sign_off_item(
# Auto-start session if this is the first sign-off
if session.status == AuditSessionStatusEnum.DRAFT:
session.status = AuditSessionStatusEnum.IN_PROGRESS
session.started_at = datetime.utcnow()
session.started_at = datetime.now(timezone.utc)
db.commit()
db.refresh(signoff)
@@ -587,7 +587,7 @@ async def get_sign_off(
@router.get("/sessions/{session_id}/report/pdf")
async def generate_audit_pdf_report(
session_id: str,
language: str = Query("de", regex="^(de|en)$"),
language: str = Query("de", pattern="^(de|en)$"),
include_signatures: bool = Query(True),
db: Session = Depends(get_db),
):

View File

@@ -6,7 +6,7 @@ Public SDK-Endpoints (fuer Einbettung) + Admin-Endpoints (Konfiguration & Stats)
import uuid
import hashlib
from datetime import datetime, timedelta
from datetime import datetime, timedelta, timezone
from typing import Optional, List
from fastapi import APIRouter, Depends, HTTPException, Query, Header
@@ -206,8 +206,8 @@ async def record_consent(
existing.ip_hash = ip_hash
existing.user_agent = body.user_agent
existing.consent_string = body.consent_string
existing.expires_at = datetime.utcnow() + timedelta(days=365)
existing.updated_at = datetime.utcnow()
existing.expires_at = datetime.now(timezone.utc) + timedelta(days=365)
existing.updated_at = datetime.now(timezone.utc)
db.flush()
_log_banner_audit(
@@ -227,7 +227,7 @@ async def record_consent(
ip_hash=ip_hash,
user_agent=body.user_agent,
consent_string=body.consent_string,
expires_at=datetime.utcnow() + timedelta(days=365),
expires_at=datetime.now(timezone.utc) + timedelta(days=365),
)
db.add(consent)
db.flush()
@@ -476,7 +476,7 @@ async def update_site_config(
if val is not None:
setattr(config, field, val)
config.updated_at = datetime.utcnow()
config.updated_at = datetime.now(timezone.utc)
db.commit()
db.refresh(config)
return _site_config_to_dict(config)

View File

@@ -11,7 +11,7 @@ Endpoints:
"""
import logging
from datetime import datetime
from datetime import datetime, timezone
from typing import Optional
from fastapi import APIRouter, Depends, HTTPException, Header
@@ -173,7 +173,7 @@ async def update_consent_template(
set_clauses = ", ".join(f"{k} = :{k}" for k in updates)
updates["id"] = template_id
updates["tenant_id"] = tenant_id
updates["now"] = datetime.utcnow()
updates["now"] = datetime.now(timezone.utc)
row = db.execute(
text(f"""

View File

@@ -186,7 +186,7 @@ async def list_jobs(
@router.get("/generate/review-queue")
async def get_review_queue(
release_state: str = Query("needs_review", regex="^(needs_review|too_close|duplicate)$"),
release_state: str = Query("needs_review", pattern="^(needs_review|too_close|duplicate)$"),
limit: int = Query(50, ge=1, le=200),
):
"""Get controls that need manual review."""

View File

@@ -20,7 +20,7 @@ Usage:
"""
import logging
from datetime import datetime
from datetime import datetime, timezone
from typing import Any, Dict, List, Optional
from fastapi import APIRouter, Depends, HTTPException, Query
@@ -171,7 +171,7 @@ def create_crud_router(
updates: Dict[str, Any] = {
"id": item_id,
"tenant_id": tenant_id,
"updated_at": datetime.utcnow(),
"updated_at": datetime.now(timezone.utc),
}
set_clauses = ["updated_at = :updated_at"]

View File

@@ -10,7 +10,7 @@ Endpoints:
"""
import logging
from datetime import datetime, timedelta
from datetime import datetime, timedelta, timezone
from calendar import month_abbr
from typing import Optional
@@ -167,7 +167,7 @@ async def get_executive_dashboard(db: Session = Depends(get_db)):
# Trend data — only show current score, no simulated history
trend_data = []
if total > 0:
now = datetime.utcnow()
now = datetime.now(timezone.utc)
trend_data.append(TrendDataPoint(
date=now.strftime("%Y-%m-%d"),
score=round(score, 1),
@@ -204,7 +204,7 @@ async def get_executive_dashboard(db: Session = Depends(get_db)):
# Get upcoming deadlines
controls = ctrl_repo.get_all()
upcoming_deadlines = []
today = datetime.utcnow().date()
today = datetime.now(timezone.utc).date()
for ctrl in controls:
if ctrl.next_review_at:
@@ -280,7 +280,7 @@ async def get_executive_dashboard(db: Session = Depends(get_db)):
top_risks=top_risks,
upcoming_deadlines=upcoming_deadlines,
team_workload=team_workload,
last_updated=datetime.utcnow().isoformat(),
last_updated=datetime.now(timezone.utc).isoformat(),
)
@@ -305,7 +305,7 @@ async def get_compliance_trend(
# Trend data — only current score, no simulated history
trend_data = []
if total > 0:
now = datetime.utcnow()
now = datetime.now(timezone.utc)
trend_data.append({
"date": now.strftime("%Y-%m-%d"),
"score": round(current_score, 1),
@@ -318,7 +318,7 @@ async def get_compliance_trend(
"current_score": round(current_score, 1),
"trend": trend_data,
"period_months": months,
"generated_at": datetime.utcnow().isoformat(),
"generated_at": datetime.now(timezone.utc).isoformat(),
}

View File

@@ -20,7 +20,7 @@ Endpoints:
"""
import logging
from datetime import datetime
from datetime import datetime, timezone
from typing import Optional, List
from fastapi import APIRouter, Depends, HTTPException, Query
@@ -691,7 +691,7 @@ async def update_dsfa_status(
params: dict = {
"id": dsfa_id, "tid": tid,
"status": request.status,
"approved_at": datetime.utcnow() if request.status == "approved" else None,
"approved_at": datetime.now(timezone.utc) if request.status == "approved" else None,
"approved_by": request.approved_by,
}
row = db.execute(
@@ -906,7 +906,7 @@ async def export_dsfa_json(
dsfa_data = _dsfa_to_response(row)
return {
"exported_at": datetime.utcnow().isoformat(),
"exported_at": datetime.now(timezone.utc).isoformat(),
"format": format,
"dsfa": dsfa_data,
}

View File

@@ -7,7 +7,7 @@ Native Python/FastAPI Implementierung, ersetzt Go consent-service Proxy.
import io
import csv
import uuid
from datetime import datetime, timedelta
from datetime import datetime, timedelta, timezone
from typing import Optional, List, Dict, Any
from fastapi import APIRouter, Depends, HTTPException, Query, Header
@@ -168,7 +168,7 @@ def _get_tenant(x_tenant_id: Optional[str] = Header(None, alias='X-Tenant-ID'))
def _generate_request_number(db: Session, tenant_id: str) -> str:
"""Generate next request number: DSR-YYYY-NNNNNN"""
year = datetime.utcnow().year
year = datetime.now(timezone.utc).year
try:
result = db.execute(text("SELECT nextval('compliance_dsr_request_number_seq')"))
seq = result.scalar()
@@ -275,7 +275,7 @@ async def create_dsr(
if body.priority and body.priority not in VALID_PRIORITIES:
raise HTTPException(status_code=400, detail=f"Invalid priority. Must be one of: {VALID_PRIORITIES}")
now = datetime.utcnow()
now = datetime.now(timezone.utc)
deadline_days = DEADLINE_DAYS.get(body.request_type, 30)
request_number = _generate_request_number(db, tenant_id)
@@ -348,7 +348,7 @@ async def list_dsrs(
query = query.filter(DSRRequestDB.priority == priority)
if overdue_only:
query = query.filter(
DSRRequestDB.deadline_at < datetime.utcnow(),
DSRRequestDB.deadline_at < datetime.now(timezone.utc),
DSRRequestDB.status.notin_(["completed", "rejected", "cancelled"]),
)
if search:
@@ -399,7 +399,7 @@ async def get_dsr_stats(
by_type[t] = base.filter(DSRRequestDB.request_type == t).count()
# Overdue
now = datetime.utcnow()
now = datetime.now(timezone.utc)
overdue = base.filter(
DSRRequestDB.deadline_at < now,
DSRRequestDB.status.notin_(["completed", "rejected", "cancelled"]),
@@ -459,7 +459,7 @@ async def export_dsrs(
if format == "json":
return {
"exported_at": datetime.utcnow().isoformat(),
"exported_at": datetime.now(timezone.utc).isoformat(),
"total": len(dsrs),
"requests": [_dsr_to_dict(d) for d in dsrs],
}
@@ -506,7 +506,7 @@ async def process_deadlines(
db: Session = Depends(get_db),
):
"""Verarbeitet Fristen und markiert ueberfaellige DSRs."""
now = datetime.utcnow()
now = datetime.now(timezone.utc)
tid = uuid.UUID(tenant_id)
overdue = db.query(DSRRequestDB).filter(
@@ -714,7 +714,7 @@ async def publish_template_version(
if not version:
raise HTTPException(status_code=404, detail="Version not found")
now = datetime.utcnow()
now = datetime.now(timezone.utc)
version.status = "published"
version.published_at = now
version.published_by = "admin"
@@ -766,7 +766,7 @@ async def update_dsr(
dsr.internal_notes = body.internal_notes
if body.assigned_to is not None:
dsr.assigned_to = body.assigned_to
dsr.assigned_at = datetime.utcnow()
dsr.assigned_at = datetime.now(timezone.utc)
if body.request_text is not None:
dsr.request_text = body.request_text
if body.affected_systems is not None:
@@ -778,7 +778,7 @@ async def update_dsr(
if body.objection_details is not None:
dsr.objection_details = body.objection_details
dsr.updated_at = datetime.utcnow()
dsr.updated_at = datetime.now(timezone.utc)
db.commit()
db.refresh(dsr)
return _dsr_to_dict(dsr)
@@ -797,7 +797,7 @@ async def delete_dsr(
_record_history(db, dsr, "cancelled", comment="DSR storniert")
dsr.status = "cancelled"
dsr.updated_at = datetime.utcnow()
dsr.updated_at = datetime.now(timezone.utc)
db.commit()
return {"success": True, "message": "DSR cancelled"}
@@ -820,7 +820,7 @@ async def change_status(
dsr = _get_dsr_or_404(db, dsr_id, tenant_id)
_record_history(db, dsr, body.status, comment=body.comment)
dsr.status = body.status
dsr.updated_at = datetime.utcnow()
dsr.updated_at = datetime.now(timezone.utc)
db.commit()
db.refresh(dsr)
return _dsr_to_dict(dsr)
@@ -835,7 +835,7 @@ async def verify_identity(
):
"""Verifiziert die Identitaet des Antragstellers."""
dsr = _get_dsr_or_404(db, dsr_id, tenant_id)
now = datetime.utcnow()
now = datetime.now(timezone.utc)
dsr.identity_verified = True
dsr.verification_method = body.method
@@ -868,9 +868,9 @@ async def assign_dsr(
"""Weist eine DSR einem Bearbeiter zu."""
dsr = _get_dsr_or_404(db, dsr_id, tenant_id)
dsr.assigned_to = body.assignee_id
dsr.assigned_at = datetime.utcnow()
dsr.assigned_at = datetime.now(timezone.utc)
dsr.assigned_by = "admin"
dsr.updated_at = datetime.utcnow()
dsr.updated_at = datetime.now(timezone.utc)
db.commit()
db.refresh(dsr)
return _dsr_to_dict(dsr)
@@ -888,7 +888,7 @@ async def extend_deadline(
if dsr.status in ("completed", "rejected", "cancelled"):
raise HTTPException(status_code=400, detail="Cannot extend deadline for closed DSR")
now = datetime.utcnow()
now = datetime.now(timezone.utc)
current_deadline = dsr.extended_deadline_at or dsr.deadline_at
new_deadline = current_deadline + timedelta(days=body.days or 60)
@@ -916,7 +916,7 @@ async def complete_dsr(
if dsr.status in ("completed", "cancelled"):
raise HTTPException(status_code=400, detail="DSR already completed or cancelled")
now = datetime.utcnow()
now = datetime.now(timezone.utc)
_record_history(db, dsr, "completed", comment=body.summary)
dsr.status = "completed"
dsr.completed_at = now
@@ -941,7 +941,7 @@ async def reject_dsr(
if dsr.status in ("completed", "rejected", "cancelled"):
raise HTTPException(status_code=400, detail="DSR already closed")
now = datetime.utcnow()
now = datetime.now(timezone.utc)
_record_history(db, dsr, "rejected", comment=f"{body.reason} ({body.legal_basis})")
dsr.status = "rejected"
dsr.rejection_reason = body.reason
@@ -1024,7 +1024,7 @@ async def send_communication(
):
"""Sendet eine Kommunikation."""
dsr = _get_dsr_or_404(db, dsr_id, tenant_id)
now = datetime.utcnow()
now = datetime.now(timezone.utc)
comm = DSRCommunicationDB(
tenant_id=uuid.UUID(tenant_id),
@@ -1158,7 +1158,7 @@ async def update_exception_check(
check.applies = body.applies
check.notes = body.notes
check.checked_by = "admin"
check.checked_at = datetime.utcnow()
check.checked_at = datetime.now(timezone.utc)
db.commit()
db.refresh(check)

View File

@@ -15,7 +15,7 @@ Endpoints:
"""
import logging
from datetime import datetime
from datetime import datetime, timezone
from typing import Optional, List, Any, Dict
from fastapi import APIRouter, Depends, HTTPException, Query, Header
@@ -131,7 +131,7 @@ async def upsert_catalog(
if record:
record.selected_data_point_ids = request.selected_data_point_ids
record.custom_data_points = request.custom_data_points
record.updated_at = datetime.utcnow()
record.updated_at = datetime.now(timezone.utc)
else:
record = EinwilligungenCatalogDB(
tenant_id=tenant_id,
@@ -184,7 +184,7 @@ async def upsert_company(
if record:
record.data = request.data
record.updated_at = datetime.utcnow()
record.updated_at = datetime.now(timezone.utc)
else:
record = EinwilligungenCompanyDB(tenant_id=tenant_id, data=request.data)
db.add(record)
@@ -233,7 +233,7 @@ async def upsert_cookies(
if record:
record.categories = request.categories
record.config = request.config
record.updated_at = datetime.utcnow()
record.updated_at = datetime.now(timezone.utc)
else:
record = EinwilligungenCookiesDB(
tenant_id=tenant_id,
@@ -374,7 +374,7 @@ async def create_consent(
user_id=request.user_id,
data_point_id=request.data_point_id,
granted=request.granted,
granted_at=datetime.utcnow(),
granted_at=datetime.now(timezone.utc),
consent_version=request.consent_version,
source=request.source,
ip_address=request.ip_address,
@@ -443,7 +443,7 @@ async def revoke_consent(
if consent.revoked_at:
raise HTTPException(status_code=400, detail="Consent is already revoked")
consent.revoked_at = datetime.utcnow()
consent.revoked_at = datetime.now(timezone.utc)
_record_history(db, consent, 'revoked')
db.commit()
db.refresh(consent)

View File

@@ -6,7 +6,7 @@ Inklusive Versionierung, Approval-Workflow, Vorschau und Send-Logging.
"""
import uuid
from datetime import datetime
from datetime import datetime, timezone
from typing import Optional, Dict
from fastapi import APIRouter, Depends, HTTPException, Query, Header
@@ -271,7 +271,7 @@ async def update_settings(
if val is not None:
setattr(settings, field, val)
settings.updated_at = datetime.utcnow()
settings.updated_at = datetime.now(timezone.utc)
db.commit()
db.refresh(settings)
@@ -638,7 +638,7 @@ async def submit_version(
raise HTTPException(status_code=400, detail="Only draft versions can be submitted")
v.status = "review"
v.submitted_at = datetime.utcnow()
v.submitted_at = datetime.now(timezone.utc)
v.submitted_by = "admin"
db.commit()
db.refresh(v)
@@ -730,7 +730,7 @@ async def publish_version(
if v.status not in ("approved", "review", "draft"):
raise HTTPException(status_code=400, detail="Version cannot be published")
now = datetime.utcnow()
now = datetime.now(timezone.utc)
v.status = "published"
v.published_at = now
v.published_by = "admin"

View File

@@ -12,7 +12,7 @@ Endpoints:
"""
import logging
from datetime import datetime
from datetime import datetime, timezone
from typing import Optional, Any, Dict
from fastapi import APIRouter, Depends, HTTPException, Query, Header
@@ -244,7 +244,7 @@ async def update_escalation(
set_clauses = ", ".join(f"{k} = :{k}" for k in updates.keys())
updates["id"] = escalation_id
updates["updated_at"] = datetime.utcnow()
updates["updated_at"] = datetime.now(timezone.utc)
row = db.execute(
text(
@@ -277,7 +277,7 @@ async def update_status(
resolved_at = request.resolved_at
if request.status in ('resolved', 'closed') and resolved_at is None:
resolved_at = datetime.utcnow()
resolved_at = datetime.now(timezone.utc)
row = db.execute(
text(
@@ -288,7 +288,7 @@ async def update_status(
{
"status": request.status,
"resolved_at": resolved_at,
"updated_at": datetime.utcnow(),
"updated_at": datetime.now(timezone.utc),
"id": escalation_id,
},
).fetchone()

View File

@@ -10,7 +10,7 @@ Endpoints:
import logging
import os
from datetime import datetime, timedelta
from datetime import datetime, timedelta, timezone
from typing import Optional
from collections import defaultdict
import uuid as uuid_module
@@ -370,8 +370,8 @@ def _store_evidence(
mime_type="application/json",
source="ci_pipeline",
ci_job_id=ci_job_id,
valid_from=datetime.utcnow(),
valid_until=datetime.utcnow() + timedelta(days=90),
valid_from=datetime.now(timezone.utc),
valid_until=datetime.now(timezone.utc) + timedelta(days=90),
status=EvidenceStatusEnum(parsed["evidence_status"]),
)
db.add(evidence)
@@ -455,7 +455,7 @@ def _update_risks(db: Session, *, source: str, control_id: str, ci_job_id: str,
tool=source,
control_id=control_id,
evidence_type=f"ci_{source}",
timestamp=datetime.utcnow().isoformat(),
timestamp=datetime.now(timezone.utc).isoformat(),
commit_sha=report_data.get("commit_sha", "unknown") if report_data else "unknown",
ci_job_id=ci_job_id,
findings=findings_detail,
@@ -571,7 +571,7 @@ async def get_ci_evidence_status(
Returns overview of recent evidence collected from CI/CD pipelines,
useful for dashboards and monitoring.
"""
cutoff_date = datetime.utcnow() - timedelta(days=days)
cutoff_date = datetime.now(timezone.utc) - timedelta(days=days)
# Build query
query = db.query(EvidenceDB).filter(

View File

@@ -18,7 +18,7 @@ import logging
import re
import asyncio
from typing import Optional, List, Dict
from datetime import datetime
from datetime import datetime, timezone
from fastapi import APIRouter, Depends
from pydantic import BaseModel
@@ -171,7 +171,7 @@ def _get_or_create_regulation(
code=regulation_code,
name=regulation_name or regulation_code,
regulation_type=reg_type,
description=f"Auto-created from RAG extraction ({datetime.utcnow().date()})",
description=f"Auto-created from RAG extraction ({datetime.now(timezone.utc).date()})",
)
return reg

View File

@@ -13,7 +13,7 @@ Provides endpoints for ISO 27001 certification-ready ISMS management:
import uuid
import hashlib
from datetime import datetime, date
from datetime import datetime, date, timezone
from typing import Optional
from fastapi import APIRouter, HTTPException, Query, Depends
@@ -102,7 +102,7 @@ def log_audit_trail(
new_value=new_value,
change_summary=change_summary,
performed_by=performed_by,
performed_at=datetime.utcnow(),
performed_at=datetime.now(timezone.utc),
checksum=create_signature(f"{entity_type}|{entity_id}|{action}|{performed_by}")
)
db.add(trail)
@@ -190,7 +190,7 @@ async def update_isms_scope(
setattr(scope, field, value)
scope.updated_by = updated_by
scope.updated_at = datetime.utcnow()
scope.updated_at = datetime.now(timezone.utc)
# Increment version if significant changes
version_parts = scope.version.split(".")
@@ -221,11 +221,11 @@ async def approve_isms_scope(
scope.status = ApprovalStatusEnum.APPROVED
scope.approved_by = data.approved_by
scope.approved_at = datetime.utcnow()
scope.approved_at = datetime.now(timezone.utc)
scope.effective_date = data.effective_date
scope.review_date = data.review_date
scope.approval_signature = create_signature(
f"{scope.scope_statement}|{data.approved_by}|{datetime.utcnow().isoformat()}"
f"{scope.scope_statement}|{data.approved_by}|{datetime.now(timezone.utc).isoformat()}"
)
log_audit_trail(db, "isms_scope", scope.id, "ISMS Scope", "approve", data.approved_by)
@@ -403,7 +403,7 @@ async def approve_policy(
policy.reviewed_by = data.reviewed_by
policy.approved_by = data.approved_by
policy.approved_at = datetime.utcnow()
policy.approved_at = datetime.now(timezone.utc)
policy.effective_date = data.effective_date
policy.next_review_date = date(
data.effective_date.year + (policy.review_frequency_months // 12),
@@ -412,7 +412,7 @@ async def approve_policy(
)
policy.status = ApprovalStatusEnum.APPROVED
policy.approval_signature = create_signature(
f"{policy.policy_id}|{data.approved_by}|{datetime.utcnow().isoformat()}"
f"{policy.policy_id}|{data.approved_by}|{datetime.now(timezone.utc).isoformat()}"
)
log_audit_trail(db, "isms_policy", policy.id, policy.policy_id, "approve", data.approved_by)
@@ -634,9 +634,9 @@ async def approve_soa_entry(
raise HTTPException(status_code=404, detail="SoA entry not found")
entry.reviewed_by = data.reviewed_by
entry.reviewed_at = datetime.utcnow()
entry.reviewed_at = datetime.now(timezone.utc)
entry.approved_by = data.approved_by
entry.approved_at = datetime.utcnow()
entry.approved_at = datetime.now(timezone.utc)
log_audit_trail(db, "soa", entry.id, entry.annex_a_control, "approve", data.approved_by)
db.commit()
@@ -812,7 +812,7 @@ async def close_finding(
finding.verification_method = data.verification_method
finding.verification_evidence = data.verification_evidence
finding.verified_by = data.closed_by
finding.verified_at = datetime.utcnow()
finding.verified_at = datetime.now(timezone.utc)
log_audit_trail(db, "audit_finding", finding.id, finding.finding_id, "close", data.closed_by)
db.commit()
@@ -1080,7 +1080,7 @@ async def approve_management_review(
review.status = "approved"
review.approved_by = data.approved_by
review.approved_at = datetime.utcnow()
review.approved_at = datetime.now(timezone.utc)
review.next_review_date = data.next_review_date
review.minutes_document_path = data.minutes_document_path
@@ -1392,7 +1392,7 @@ async def run_readiness_check(
# Save check result
check = ISMSReadinessCheckDB(
id=generate_id(),
check_date=datetime.utcnow(),
check_date=datetime.now(timezone.utc),
triggered_by=data.triggered_by,
overall_status=overall_status,
certification_possible=certification_possible,

View File

@@ -6,7 +6,7 @@ Extended with: Public endpoints, User Consents, Consent Audit Log, Cookie Catego
import uuid as uuid_mod
import logging
from datetime import datetime
from datetime import datetime, timezone
from typing import Optional, List, Any, Dict
from fastapi import APIRouter, Depends, HTTPException, Query, Header, UploadFile, File
@@ -285,7 +285,7 @@ async def update_version(
for field, value in request.dict(exclude_none=True).items():
setattr(version, field, value)
version.updated_at = datetime.utcnow()
version.updated_at = datetime.now(timezone.utc)
db.commit()
db.refresh(version)
@@ -346,7 +346,7 @@ def _transition(
)
version.status = to_status
version.updated_at = datetime.utcnow()
version.updated_at = datetime.now(timezone.utc)
if extra_updates:
for k, v in extra_updates.items():
setattr(version, k, v)
@@ -378,7 +378,7 @@ async def approve_version(
return _transition(
db, version_id, ['review'], 'approved', 'approved',
request.approver, request.comment,
extra_updates={'approved_by': request.approver, 'approved_at': datetime.utcnow()}
extra_updates={'approved_by': request.approver, 'approved_at': datetime.now(timezone.utc)}
)
@@ -728,7 +728,7 @@ async def withdraw_consent(
if consent.withdrawn_at:
raise HTTPException(status_code=400, detail="Consent already withdrawn")
consent.withdrawn_at = datetime.utcnow()
consent.withdrawn_at = datetime.now(timezone.utc)
consent.consented = False
_log_consent_audit(
@@ -903,7 +903,7 @@ async def update_cookie_category(
if val is not None:
setattr(cat, field, val)
cat.updated_at = datetime.utcnow()
cat.updated_at = datetime.now(timezone.utc)
db.commit()
db.refresh(cat)
return _cookie_cat_to_dict(cat)

View File

@@ -15,7 +15,7 @@ Endpoints:
import json
import logging
from datetime import datetime
from datetime import datetime, timezone
from typing import Optional, List, Any, Dict
from fastapi import APIRouter, Depends, HTTPException, Query
@@ -322,7 +322,7 @@ async def update_legal_template(
params: Dict[str, Any] = {
"id": template_id,
"tenant_id": tenant_id,
"updated_at": datetime.utcnow(),
"updated_at": datetime.now(timezone.utc),
}
jsonb_fields = {"placeholders", "inspiration_sources"}

View File

@@ -13,7 +13,7 @@ Endpoints:
import json
import logging
from datetime import datetime
from datetime import datetime, timezone
from typing import Optional, List, Any, Dict
from fastapi import APIRouter, Depends, HTTPException, Query
@@ -253,7 +253,7 @@ async def update_loeschfrist(
):
"""Full update of a Loeschfrist policy."""
updates: Dict[str, Any] = {"id": policy_id, "tenant_id": tenant_id, "updated_at": datetime.utcnow()}
updates: Dict[str, Any] = {"id": policy_id, "tenant_id": tenant_id, "updated_at": datetime.now(timezone.utc)}
set_clauses = ["updated_at = :updated_at"]
for field, value in payload.model_dump(exclude_unset=True).items():
@@ -302,7 +302,7 @@ async def update_loeschfrist_status(
WHERE id = :id AND tenant_id = :tenant_id
RETURNING *
"""),
{"status": payload.status, "now": datetime.utcnow(), "id": policy_id, "tenant_id": tenant_id},
{"status": payload.status, "now": datetime.now(timezone.utc), "id": policy_id, "tenant_id": tenant_id},
).fetchone()
db.commit()

View File

@@ -21,7 +21,7 @@ Endpoints:
import json
import logging
from datetime import datetime
from datetime import datetime, timezone
from typing import Optional, List, Any
from fastapi import APIRouter, Depends, HTTPException, Query, Header
@@ -852,11 +852,11 @@ async def update_incident(
# Auto-set timestamps based on status transitions
if updates.get("status") == "reported" and not updates.get("reported_to_authority_at"):
updates["reported_to_authority_at"] = datetime.utcnow().isoformat()
updates["reported_to_authority_at"] = datetime.now(timezone.utc).isoformat()
if updates.get("status") == "closed" and not updates.get("closed_at"):
updates["closed_at"] = datetime.utcnow().isoformat()
updates["closed_at"] = datetime.now(timezone.utc).isoformat()
updates["updated_at"] = datetime.utcnow().isoformat()
updates["updated_at"] = datetime.now(timezone.utc).isoformat()
set_parts = []
for k in updates:
@@ -984,7 +984,7 @@ async def update_template(
if not updates:
raise HTTPException(status_code=400, detail="No fields to update")
updates["updated_at"] = datetime.utcnow().isoformat()
updates["updated_at"] = datetime.now(timezone.utc).isoformat()
set_clauses = ", ".join(f"{k} = :{k}" for k in updates)
updates["id"] = template_id
updates["tenant_id"] = tenant_id

View File

@@ -12,7 +12,7 @@ Endpoints:
"""
import logging
from datetime import datetime
from datetime import datetime, timezone
from typing import Optional, List, Any, Dict
from fastapi import APIRouter, Depends, HTTPException, Query, Header
@@ -228,7 +228,7 @@ async def update_obligation(
logger.info("update_obligation user_id=%s tenant_id=%s id=%s", x_user_id, tenant_id, obligation_id)
import json
updates: Dict[str, Any] = {"id": obligation_id, "tenant_id": tenant_id, "updated_at": datetime.utcnow()}
updates: Dict[str, Any] = {"id": obligation_id, "tenant_id": tenant_id, "updated_at": datetime.now(timezone.utc)}
set_clauses = ["updated_at = :updated_at"]
for field, value in payload.model_dump(exclude_unset=True).items():
@@ -274,7 +274,7 @@ async def update_obligation_status(
SET status = :status, updated_at = :now
WHERE id = :id AND tenant_id = :tenant_id
RETURNING *
"""), {"status": payload.status, "now": datetime.utcnow(), "id": obligation_id, "tenant_id": tenant_id}).fetchone()
"""), {"status": payload.status, "now": datetime.now(timezone.utc), "id": obligation_id, "tenant_id": tenant_id}).fetchone()
db.commit()
if not row:

View File

@@ -10,7 +10,7 @@ Endpoints:
"""
import logging
from datetime import datetime
from datetime import datetime, timezone
from typing import Optional, Any, Dict
from fastapi import APIRouter, Depends, HTTPException, Query
@@ -177,7 +177,7 @@ async def create_metric(
"threshold": payload.threshold,
"trend": payload.trend,
"ai_system": payload.ai_system,
"last_measured": payload.last_measured or datetime.utcnow(),
"last_measured": payload.last_measured or datetime.now(timezone.utc),
}).fetchone()
db.commit()
return _row_to_dict(row)
@@ -192,7 +192,7 @@ async def update_metric(
):
"""Update a quality metric."""
updates: Dict[str, Any] = {"id": metric_id, "tenant_id": tenant_id, "updated_at": datetime.utcnow()}
updates: Dict[str, Any] = {"id": metric_id, "tenant_id": tenant_id, "updated_at": datetime.now(timezone.utc)}
set_clauses = ["updated_at = :updated_at"]
for field, value in payload.model_dump(exclude_unset=True).items():
@@ -296,7 +296,7 @@ async def create_test(
"duration": payload.duration,
"ai_system": payload.ai_system,
"details": payload.details,
"last_run": payload.last_run or datetime.utcnow(),
"last_run": payload.last_run or datetime.now(timezone.utc),
}).fetchone()
db.commit()
return _row_to_dict(row)
@@ -311,7 +311,7 @@ async def update_test(
):
"""Update a quality test."""
updates: Dict[str, Any] = {"id": test_id, "tenant_id": tenant_id, "updated_at": datetime.utcnow()}
updates: Dict[str, Any] = {"id": test_id, "tenant_id": tenant_id, "updated_at": datetime.now(timezone.utc)}
set_clauses = ["updated_at = :updated_at"]
for field, value in payload.model_dump(exclude_unset=True).items():

View File

@@ -16,7 +16,7 @@ import logging
logger = logging.getLogger(__name__)
import os
from datetime import datetime
from datetime import datetime, timezone
from typing import Optional
from fastapi import APIRouter, Depends, HTTPException, Query, BackgroundTasks
@@ -393,11 +393,11 @@ async def update_requirement(requirement_id: str, updates: dict, db: Session = D
# Track audit changes
if 'audit_status' in updates:
requirement.last_audit_date = datetime.utcnow()
requirement.last_audit_date = datetime.now(timezone.utc)
# TODO: Get auditor from auth
requirement.last_auditor = updates.get('auditor_name', 'api_user')
requirement.updated_at = datetime.utcnow()
requirement.updated_at = datetime.now(timezone.utc)
db.commit()
db.refresh(requirement)

View File

@@ -10,7 +10,7 @@ Endpoints:
"""
import logging
from datetime import datetime
from datetime import datetime, timezone
from typing import Optional, Any, Dict
from fastapi import APIRouter, Depends, HTTPException, Query
@@ -207,7 +207,7 @@ async def update_security_item(
):
"""Update a security backlog item."""
updates: Dict[str, Any] = {"id": item_id, "tenant_id": tenant_id, "updated_at": datetime.utcnow()}
updates: Dict[str, Any] = {"id": item_id, "tenant_id": tenant_id, "updated_at": datetime.now(timezone.utc)}
set_clauses = ["updated_at = :updated_at"]
for field, value in payload.model_dump(exclude_unset=True).items():

View File

@@ -21,11 +21,11 @@ Endpoints:
GET /api/v1/admin/compliance-report — Compliance report
"""
from datetime import datetime
from datetime import datetime, timezone
from typing import Optional
from fastapi import APIRouter, HTTPException, Depends, Query
from pydantic import BaseModel, Field
from pydantic import BaseModel, ConfigDict, Field
from sqlalchemy.orm import Session
from database import get_db
@@ -83,8 +83,7 @@ class SourceResponse(BaseModel):
created_at: str
updated_at: Optional[str] = None
class Config:
from_attributes = True
model_config = ConfigDict(from_attributes=True)
class OperationUpdate(BaseModel):
@@ -530,7 +529,7 @@ async def get_policy_stats(db: Session = Depends(get_db)):
pii_rules = db.query(PIIRuleDB).filter(PIIRuleDB.active).count()
# Count blocked content entries from today
today_start = datetime.utcnow().replace(hour=0, minute=0, second=0, microsecond=0)
today_start = datetime.now(timezone.utc).replace(hour=0, minute=0, second=0, microsecond=0)
blocked_today = db.query(BlockedContentDB).filter(
BlockedContentDB.created_at >= today_start,
).count()
@@ -553,7 +552,7 @@ async def get_compliance_report(db: Session = Depends(get_db)):
pii_rules = db.query(PIIRuleDB).filter(PIIRuleDB.active).all()
return {
"report_date": datetime.utcnow().isoformat(),
"report_date": datetime.now(timezone.utc).isoformat(),
"summary": {
"active_sources": len(sources),
"active_pii_rules": len(pii_rules),

View File

@@ -49,7 +49,7 @@ vendor_findings, vendor_control_instances).
import json
import logging
import uuid
from datetime import datetime
from datetime import datetime, timezone
from typing import Optional
from fastapi import APIRouter, Depends, HTTPException, Query
@@ -69,7 +69,7 @@ DEFAULT_TENANT_ID = "9282a473-5c95-4b3a-bf78-0ecc0ec71d3e"
# =============================================================================
def _now_iso() -> str:
return datetime.utcnow().isoformat() + "Z"
return datetime.now(timezone.utc).isoformat() + "Z"
def _ok(data, status_code: int = 200):
@@ -418,7 +418,7 @@ def create_vendor(body: dict = {}, db: Session = Depends(get_db)):
data = _to_snake(body)
vid = str(uuid.uuid4())
tid = data.get("tenant_id", DEFAULT_TENANT_ID)
now = datetime.utcnow().isoformat()
now = datetime.now(timezone.utc).isoformat()
db.execute(text("""
INSERT INTO vendor_vendors (
@@ -498,7 +498,7 @@ def update_vendor(vendor_id: str, body: dict = {}, db: Session = Depends(get_db)
raise HTTPException(404, "Vendor not found")
data = _to_snake(body)
now = datetime.utcnow().isoformat()
now = datetime.now(timezone.utc).isoformat()
# Build dynamic SET clause
allowed = [
@@ -558,7 +558,7 @@ def patch_vendor_status(vendor_id: str, body: dict = {}, db: Session = Depends(g
result = db.execute(text("""
UPDATE vendor_vendors SET status = :status, updated_at = :now WHERE id = :id
"""), {"id": vendor_id, "status": new_status, "now": datetime.utcnow().isoformat()})
"""), {"id": vendor_id, "status": new_status, "now": datetime.now(timezone.utc).isoformat()})
db.commit()
if result.rowcount == 0:
raise HTTPException(404, "Vendor not found")
@@ -620,7 +620,7 @@ def create_contract(body: dict = {}, db: Session = Depends(get_db)):
data = _to_snake(body)
cid = str(uuid.uuid4())
tid = data.get("tenant_id", DEFAULT_TENANT_ID)
now = datetime.utcnow().isoformat()
now = datetime.now(timezone.utc).isoformat()
db.execute(text("""
INSERT INTO vendor_contracts (
@@ -682,7 +682,7 @@ def update_contract(contract_id: str, body: dict = {}, db: Session = Depends(get
raise HTTPException(404, "Contract not found")
data = _to_snake(body)
now = datetime.utcnow().isoformat()
now = datetime.now(timezone.utc).isoformat()
allowed = [
"vendor_id", "file_name", "original_name", "mime_type", "file_size",
@@ -781,7 +781,7 @@ def create_finding(body: dict = {}, db: Session = Depends(get_db)):
data = _to_snake(body)
fid = str(uuid.uuid4())
tid = data.get("tenant_id", DEFAULT_TENANT_ID)
now = datetime.utcnow().isoformat()
now = datetime.now(timezone.utc).isoformat()
db.execute(text("""
INSERT INTO vendor_findings (
@@ -831,7 +831,7 @@ def update_finding(finding_id: str, body: dict = {}, db: Session = Depends(get_d
raise HTTPException(404, "Finding not found")
data = _to_snake(body)
now = datetime.utcnow().isoformat()
now = datetime.now(timezone.utc).isoformat()
allowed = [
"vendor_id", "contract_id", "finding_type", "category", "severity",
@@ -920,7 +920,7 @@ def create_control_instance(body: dict = {}, db: Session = Depends(get_db)):
data = _to_snake(body)
ciid = str(uuid.uuid4())
tid = data.get("tenant_id", DEFAULT_TENANT_ID)
now = datetime.utcnow().isoformat()
now = datetime.now(timezone.utc).isoformat()
db.execute(text("""
INSERT INTO vendor_control_instances (
@@ -965,7 +965,7 @@ def update_control_instance(instance_id: str, body: dict = {}, db: Session = Dep
raise HTTPException(404, "Control instance not found")
data = _to_snake(body)
now = datetime.utcnow().isoformat()
now = datetime.now(timezone.utc).isoformat()
allowed = [
"vendor_id", "control_id", "control_domain",
@@ -1050,7 +1050,7 @@ def list_controls(
def create_control(body: dict = {}, db: Session = Depends(get_db)):
cid = str(uuid.uuid4())
tid = body.get("tenantId", body.get("tenant_id", DEFAULT_TENANT_ID))
now = datetime.utcnow().isoformat()
now = datetime.now(timezone.utc).isoformat()
db.execute(text("""
INSERT INTO vendor_compliance_controls (

View File

@@ -119,7 +119,7 @@ async def upsert_organization(
else:
for field, value in request.dict(exclude_none=True).items():
setattr(org, field, value)
org.updated_at = datetime.utcnow()
org.updated_at = datetime.now(timezone.utc)
db.commit()
db.refresh(org)
@@ -291,7 +291,7 @@ async def update_activity(
updates = request.dict(exclude_none=True)
for field, value in updates.items():
setattr(act, field, value)
act.updated_at = datetime.utcnow()
act.updated_at = datetime.now(timezone.utc)
_log_audit(
db,
@@ -408,7 +408,7 @@ async def export_activities(
return _export_csv(activities)
return {
"exported_at": datetime.utcnow().isoformat(),
"exported_at": datetime.now(timezone.utc).isoformat(),
"organization": {
"name": org.organization_name if org else "",
"dpo_name": org.dpo_name if org else "",
@@ -482,7 +482,7 @@ def _export_csv(activities: list) -> StreamingResponse:
iter([output.getvalue()]),
media_type='text/csv; charset=utf-8',
headers={
'Content-Disposition': f'attachment; filename="vvt_export_{datetime.utcnow().strftime("%Y%m%d")}.csv"'
'Content-Disposition': f'attachment; filename="vvt_export_{datetime.now(timezone.utc).strftime("%Y%m%d")}.csv"'
},
)