mirror of
https://github.com/complexcaresolutions/dak.c2s.git
synced 2026-03-17 20:43:41 +00:00
294 lines
9.1 KiB
Python
294 lines
9.1 KiB
Python
"""Import API routes — CSV upload/preview/confirm, ICD xlsx upload, import log."""
|
|
|
|
import logging
|
|
from datetime import datetime
|
|
from typing import Optional
|
|
|
|
from fastapi import APIRouter, Depends, File, HTTPException, Query, UploadFile, status
|
|
from pydantic import BaseModel
|
|
from sqlalchemy.orm import Session
|
|
|
|
from app.config import get_settings
|
|
from app.core.dependencies import get_current_user, require_admin
|
|
from app.database import get_db
|
|
from app.models.audit import ImportLog
|
|
from app.models.user import User
|
|
from app.schemas.import_schemas import ImportPreview, ImportResult
|
|
from app.services.audit_service import log_action
|
|
from app.services.csv_parser import parse_csv
|
|
from app.services.icd_service import import_icd_from_xlsx
|
|
from app.services.import_service import confirm_import, preview_import
|
|
|
|
logger = logging.getLogger(__name__)
|
|
settings = get_settings()
|
|
|
|
router = APIRouter()
|
|
|
|
|
|
# ---------------------------------------------------------------------------
|
|
# Response schemas (co-located for simplicity)
|
|
# ---------------------------------------------------------------------------
|
|
|
|
|
|
class ICDImportResponse(BaseModel):
|
|
"""Result of importing ICD codes from an xlsx file."""
|
|
|
|
updated: int
|
|
errors: list[str] = []
|
|
|
|
|
|
class ImportLogResponse(BaseModel):
|
|
"""Single import-log entry."""
|
|
|
|
id: int
|
|
filename: str
|
|
import_type: str
|
|
cases_imported: int
|
|
cases_skipped: int
|
|
cases_updated: int
|
|
errors: Optional[str] = None
|
|
imported_by: Optional[int] = None
|
|
imported_at: datetime
|
|
|
|
model_config = {"from_attributes": True}
|
|
|
|
|
|
class ImportLogListResponse(BaseModel):
|
|
"""Paginated import-log response."""
|
|
|
|
items: list[ImportLogResponse]
|
|
total: int
|
|
page: int
|
|
per_page: int
|
|
|
|
|
|
# ---------------------------------------------------------------------------
|
|
# CSV Import — preview (dry-run)
|
|
# ---------------------------------------------------------------------------
|
|
|
|
|
|
@router.post("/csv", response_model=ImportPreview)
|
|
async def upload_csv_preview(
|
|
file: UploadFile = File(...),
|
|
db: Session = Depends(get_db),
|
|
user: User = Depends(require_admin),
|
|
):
|
|
"""Upload a CRM CSV file and preview what will be imported.
|
|
|
|
Returns a list of rows with duplicate detection. No data is written
|
|
to the database — the caller must confirm via ``POST /csv/confirm``.
|
|
"""
|
|
if not file.filename or not file.filename.lower().endswith(".csv"):
|
|
raise HTTPException(
|
|
status_code=status.HTTP_400_BAD_REQUEST,
|
|
detail="File must be a .csv file",
|
|
)
|
|
|
|
content = await file.read()
|
|
if len(content) > settings.MAX_UPLOAD_SIZE:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE,
|
|
detail=f"File exceeds maximum size of {settings.MAX_UPLOAD_SIZE // (1024 * 1024)}MB",
|
|
)
|
|
|
|
try:
|
|
parsed = parse_csv(content, file.filename)
|
|
except Exception as exc:
|
|
logger.exception("CSV parsing failed for %s", file.filename)
|
|
raise HTTPException(
|
|
status_code=status.HTTP_400_BAD_REQUEST,
|
|
detail=f"CSV parsing failed: {exc}",
|
|
)
|
|
|
|
return preview_import(db, parsed, file.filename)
|
|
|
|
|
|
# ---------------------------------------------------------------------------
|
|
# CSV Import — confirm (write to DB)
|
|
# ---------------------------------------------------------------------------
|
|
|
|
|
|
@router.post("/csv/confirm", response_model=ImportResult)
|
|
async def confirm_csv_import(
|
|
file: UploadFile = File(...),
|
|
db: Session = Depends(get_db),
|
|
user: User = Depends(require_admin),
|
|
):
|
|
"""Re-upload the same CSV file to confirm the import.
|
|
|
|
The file is re-parsed and non-duplicate cases are inserted into the
|
|
database. An entry is written to the ``import_log`` table.
|
|
"""
|
|
if not file.filename or not file.filename.lower().endswith(".csv"):
|
|
raise HTTPException(
|
|
status_code=status.HTTP_400_BAD_REQUEST,
|
|
detail="File must be a .csv file",
|
|
)
|
|
|
|
content = await file.read()
|
|
if len(content) > settings.MAX_UPLOAD_SIZE:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE,
|
|
detail=f"File exceeds maximum size of {settings.MAX_UPLOAD_SIZE // (1024 * 1024)}MB",
|
|
)
|
|
|
|
try:
|
|
parsed = parse_csv(content, file.filename)
|
|
except Exception as exc:
|
|
logger.exception("CSV parsing failed for %s", file.filename)
|
|
raise HTTPException(
|
|
status_code=status.HTTP_400_BAD_REQUEST,
|
|
detail=f"CSV parsing failed: {exc}",
|
|
)
|
|
|
|
result = confirm_import(db, parsed, file.filename, user.id)
|
|
log_action(
|
|
db,
|
|
user_id=user.id,
|
|
action="csv_imported",
|
|
entity_type="import",
|
|
new_values={
|
|
"filename": file.filename,
|
|
"imported": result.imported,
|
|
"skipped": result.skipped,
|
|
"updated": result.updated,
|
|
},
|
|
)
|
|
return result
|
|
|
|
|
|
# ---------------------------------------------------------------------------
|
|
# ICD Excel upload
|
|
# ---------------------------------------------------------------------------
|
|
|
|
|
|
@router.post("/icd-xlsx", response_model=ICDImportResponse)
|
|
async def upload_icd_xlsx(
|
|
file: UploadFile = File(...),
|
|
db: Session = Depends(get_db),
|
|
user: User = Depends(get_current_user),
|
|
):
|
|
"""Upload a filled-in ICD coding Excel template.
|
|
|
|
Expects columns: Case_ID (col 1), ICD (col 7). Accessible to both
|
|
admin and dak_mitarbeiter users.
|
|
"""
|
|
if not file.filename or not file.filename.lower().endswith((".xlsx", ".xls")):
|
|
raise HTTPException(
|
|
status_code=status.HTTP_400_BAD_REQUEST,
|
|
detail="File must be an Excel file (.xlsx)",
|
|
)
|
|
|
|
content = await file.read()
|
|
if len(content) > settings.MAX_UPLOAD_SIZE:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE,
|
|
detail=f"File exceeds maximum size of {settings.MAX_UPLOAD_SIZE // (1024 * 1024)}MB",
|
|
)
|
|
|
|
try:
|
|
result = import_icd_from_xlsx(db, content, user.id)
|
|
except Exception as exc:
|
|
logger.exception("ICD xlsx import failed for %s", file.filename)
|
|
raise HTTPException(
|
|
status_code=status.HTTP_400_BAD_REQUEST,
|
|
detail=f"ICD import failed: {exc}",
|
|
)
|
|
|
|
# Log the import
|
|
log = ImportLog(
|
|
filename=file.filename,
|
|
import_type="icd_xlsx",
|
|
cases_imported=0,
|
|
cases_skipped=0,
|
|
cases_updated=result["updated"],
|
|
errors="; ".join(result["errors"]) if result["errors"] else None,
|
|
imported_by=user.id,
|
|
)
|
|
db.add(log)
|
|
db.commit()
|
|
|
|
log_action(
|
|
db,
|
|
user_id=user.id,
|
|
action="icd_xlsx_imported",
|
|
entity_type="import",
|
|
new_values={
|
|
"filename": file.filename,
|
|
"updated": result["updated"],
|
|
"errors": len(result["errors"]),
|
|
},
|
|
)
|
|
|
|
return ICDImportResponse(**result)
|
|
|
|
|
|
# ---------------------------------------------------------------------------
|
|
# Historical Excel upload (placeholder — full logic in Task 12)
|
|
# ---------------------------------------------------------------------------
|
|
|
|
|
|
@router.post("/historical", response_model=ImportResult)
|
|
async def upload_historical_excel(
|
|
file: UploadFile = File(...),
|
|
db: Session = Depends(get_db),
|
|
user: User = Depends(require_admin),
|
|
):
|
|
"""Upload the Abrechnung_DAK.xlsx for historical data import (admin only).
|
|
|
|
This is a placeholder endpoint. The full import logic will be
|
|
implemented in Task 12.
|
|
"""
|
|
if not file.filename or not file.filename.lower().endswith((".xlsx", ".xls")):
|
|
raise HTTPException(
|
|
status_code=status.HTTP_400_BAD_REQUEST,
|
|
detail="File must be an Excel file (.xlsx)",
|
|
)
|
|
|
|
content = await file.read()
|
|
if len(content) > settings.MAX_UPLOAD_SIZE:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE,
|
|
detail=f"File exceeds maximum size of {settings.MAX_UPLOAD_SIZE // (1024 * 1024)}MB",
|
|
)
|
|
|
|
# TODO: Implement historical import in Task 12
|
|
raise HTTPException(
|
|
status_code=status.HTTP_501_NOT_IMPLEMENTED,
|
|
detail="Historical Excel import is not yet implemented",
|
|
)
|
|
|
|
|
|
# ---------------------------------------------------------------------------
|
|
# Import log (history)
|
|
# ---------------------------------------------------------------------------
|
|
|
|
|
|
@router.get("/log", response_model=ImportLogListResponse)
|
|
def get_import_log(
|
|
page: int = Query(1, ge=1),
|
|
per_page: int = Query(20, ge=1, le=100),
|
|
import_type: Optional[str] = Query(None),
|
|
db: Session = Depends(get_db),
|
|
user: User = Depends(require_admin),
|
|
):
|
|
"""Return a paginated list of past imports (admin only)."""
|
|
query = db.query(ImportLog)
|
|
|
|
if import_type:
|
|
query = query.filter(ImportLog.import_type == import_type)
|
|
|
|
total = query.count()
|
|
items = (
|
|
query.order_by(ImportLog.imported_at.desc())
|
|
.offset((page - 1) * per_page)
|
|
.limit(per_page)
|
|
.all()
|
|
)
|
|
|
|
return ImportLogListResponse(
|
|
items=items,
|
|
total=total,
|
|
page=page,
|
|
per_page=per_page,
|
|
)
|