First commit
This commit is contained in:
321
api/app/main.py
Normal file
321
api/app/main.py
Normal file
@@ -0,0 +1,321 @@
|
||||
"""
|
||||
API principale pour le service d'extraction d'informations de chèques
|
||||
"""
|
||||
|
||||
import os
|
||||
import time
|
||||
import uuid
|
||||
import shutil
|
||||
from typing import List, Optional
|
||||
from fastapi import FastAPI, File, UploadFile, HTTPException, Depends, Header, BackgroundTasks, Query, Request
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.responses import JSONResponse
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
from datetime import datetime, timedelta
|
||||
import redis
|
||||
from rq import Queue
|
||||
from rq.job import Job
|
||||
from rq.registry import StartedJobRegistry, FinishedJobRegistry, FailedJobRegistry
|
||||
import logging
|
||||
|
||||
from .config import settings
|
||||
from .schemas import (
|
||||
UploadResponse, JobStatusResponse, JobResult,
|
||||
ExtractionResult, HealthCheck, ErrorResponse, JobStatus
|
||||
)
|
||||
from .dependencies import verify_api_key, get_redis_connection
|
||||
from .tasks import process_cheque_image
|
||||
|
||||
# Configuration du logging
|
||||
logging.basicConfig(
|
||||
level=logging.INFO if not settings.DEBUG else logging.DEBUG,
|
||||
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
||||
)
|
||||
logger = logging.getLogger("cheque_scanner_api")
|
||||
|
||||
# Création de l'application FastAPI
|
||||
app = FastAPI(
|
||||
title=settings.APP_NAME,
|
||||
description="API pour l'extraction d'informations de chèques à partir d'images",
|
||||
version=settings.API_VERSION,
|
||||
docs_url=f"{settings.API_PREFIX}/docs",
|
||||
redoc_url=f"{settings.API_PREFIX}/redoc",
|
||||
openapi_url=f"{settings.API_PREFIX}/openapi.json"
|
||||
)
|
||||
|
||||
# Middleware CORS
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=["*"], # À ajuster en production
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
# Configuration des fichiers statiques pour accéder aux images de résultats
|
||||
app.mount("/static", StaticFiles(directory=settings.RESULT_FOLDER), name="static")
|
||||
|
||||
# Variable pour stocker le temps de démarrage
|
||||
start_time = time.time()
|
||||
|
||||
|
||||
@app.get(f"{settings.API_PREFIX}/health", response_model=HealthCheck, tags=["Système"])
|
||||
async def health_check(redis_conn: redis.Redis = Depends(get_redis_connection)):
|
||||
"""
|
||||
Vérifie l'état de santé de l'API et des services associés
|
||||
"""
|
||||
# Vérifier la connexion Redis
|
||||
try:
|
||||
redis_conn.ping()
|
||||
redis_status = "ok"
|
||||
except Exception as e:
|
||||
logger.error(f"Erreur Redis: {str(e)}")
|
||||
redis_status = f"error: {str(e)}"
|
||||
|
||||
# Obtenir le nombre de workers actifs
|
||||
registry = StartedJobRegistry(settings.QUEUE_NAME, connection=redis_conn)
|
||||
worker_count = len(registry.get_job_ids())
|
||||
|
||||
# Obtenir la taille de la file d'attente
|
||||
queue = Queue(settings.QUEUE_NAME, connection=redis_conn)
|
||||
queue_size = len(queue.get_job_ids())
|
||||
|
||||
# Calculer le temps de fonctionnement
|
||||
uptime_seconds = time.time() - start_time
|
||||
days, remainder = divmod(uptime_seconds, 86400)
|
||||
hours, remainder = divmod(remainder, 3600)
|
||||
minutes, seconds = divmod(remainder, 60)
|
||||
uptime = f"{int(days)}d {int(hours)}h {int(minutes)}m {int(seconds)}s"
|
||||
|
||||
return HealthCheck(
|
||||
status="ok",
|
||||
version=settings.API_VERSION,
|
||||
redis_status=redis_status,
|
||||
worker_count=worker_count,
|
||||
queue_size=queue_size,
|
||||
uptime=uptime
|
||||
)
|
||||
|
||||
|
||||
@app.post(
|
||||
f"{settings.API_PREFIX}/upload",
|
||||
response_model=UploadResponse,
|
||||
tags=["Extraction"],
|
||||
status_code=202
|
||||
)
|
||||
async def upload_image(
|
||||
background_tasks: BackgroundTasks,
|
||||
file: UploadFile = File(...),
|
||||
priority: bool = Query(False, description="Traiter avec une priorité élevée"),
|
||||
api_key: str = Depends(verify_api_key),
|
||||
redis_conn: redis.Redis = Depends(get_redis_connection)
|
||||
):
|
||||
"""
|
||||
Télécharge une image de chèque et lance son traitement
|
||||
"""
|
||||
# Vérifier l'extension du fichier
|
||||
file_ext = os.path.splitext(file.filename)[1].lower().lstrip(".")
|
||||
if file_ext not in settings.ALLOWED_EXTENSIONS:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Format de fichier non pris en charge. Formats acceptés: {', '.join(settings.ALLOWED_EXTENSIONS)}"
|
||||
)
|
||||
|
||||
# Créer un identifiant unique pour la tâche
|
||||
job_id = str(uuid.uuid4())
|
||||
|
||||
# Créer le chemin de fichier
|
||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
filename = f"{timestamp}_{job_id}.{file_ext}"
|
||||
file_path = os.path.join(settings.UPLOAD_FOLDER, filename)
|
||||
|
||||
# Sauvegarder le fichier
|
||||
try:
|
||||
with open(file_path, "wb") as buffer:
|
||||
shutil.copyfileobj(file.file, buffer)
|
||||
except Exception as e:
|
||||
logger.error(f"Erreur lors de la sauvegarde du fichier: {str(e)}")
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Erreur lors de la sauvegarde de l'image: {str(e)}"
|
||||
)
|
||||
|
||||
# Déterminer la file d'attente en fonction de la priorité
|
||||
queue_name = settings.HIGH_PRIORITY_QUEUE_NAME if priority else settings.QUEUE_NAME
|
||||
queue = Queue(queue_name, connection=redis_conn)
|
||||
|
||||
# Créer une tâche RQ
|
||||
try:
|
||||
# Utiliser directement le nom complet du module dans le worker
|
||||
queue_job = queue.enqueue(
|
||||
'tasks.process_cheque_image',
|
||||
job_id,
|
||||
file_path,
|
||||
result_ttl=settings.RESULT_TTL,
|
||||
timeout=settings.JOB_TIMEOUT
|
||||
)
|
||||
|
||||
# Stocker les métadonnées dans Redis
|
||||
redis_conn.hset(f"job:{job_id}", mapping={
|
||||
"status": JobStatus.PENDING.value,
|
||||
"created_at": datetime.now().isoformat(),
|
||||
"file_path": file_path,
|
||||
"filename": file.filename,
|
||||
"priority": str(priority).lower()
|
||||
})
|
||||
|
||||
logger.info(f"Tâche créée: {job_id} - Fichier: {file.filename}")
|
||||
|
||||
return UploadResponse(
|
||||
job_id=job_id,
|
||||
status=JobStatus.PENDING,
|
||||
message=f"Image en file d'attente pour traitement (file {'prioritaire' if priority else 'standard'})"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Erreur lors de la création de la tâche: {str(e)}")
|
||||
# Supprimer le fichier en cas d'erreur
|
||||
if os.path.exists(file_path):
|
||||
os.remove(file_path)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Erreur lors de la création de la tâche: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@app.get(
|
||||
f"{settings.API_PREFIX}/status/{{job_id}}",
|
||||
response_model=JobStatusResponse,
|
||||
tags=["Extraction"]
|
||||
)
|
||||
async def get_job_status(
|
||||
job_id: str,
|
||||
api_key: str = Depends(verify_api_key),
|
||||
redis_conn: redis.Redis = Depends(get_redis_connection)
|
||||
):
|
||||
"""
|
||||
Vérifie l'état d'une tâche d'extraction
|
||||
"""
|
||||
# Vérifier si la tâche existe
|
||||
if not redis_conn.exists(f"job:{job_id}"):
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail=f"Tâche non trouvée: {job_id}"
|
||||
)
|
||||
|
||||
# Récupérer les métadonnées de la tâche
|
||||
job_data = {k.decode(): v.decode() for k, v in redis_conn.hgetall(f"job:{job_id}").items()}
|
||||
|
||||
# Récupérer l'état de la tâche RQ
|
||||
queue_name = settings.HIGH_PRIORITY_QUEUE_NAME if job_data.get("priority") == "true" else settings.QUEUE_NAME
|
||||
queue = Queue(queue_name, connection=redis_conn)
|
||||
|
||||
status = job_data.get("status", JobStatus.PENDING.value)
|
||||
message = job_data.get("message", "En attente de traitement")
|
||||
|
||||
# Vérifier si la tâche est dans différentes files d'attente
|
||||
progress = None
|
||||
queue_position = None
|
||||
|
||||
# Si la tâche est en attente, déterminer sa position dans la file
|
||||
if status == JobStatus.PENDING.value:
|
||||
job_ids = queue.get_job_ids()
|
||||
if job_id in job_ids:
|
||||
queue_position = job_ids.index(job_id) + 1
|
||||
|
||||
# Si la tâche est en cours, récupérer la progression
|
||||
elif status == JobStatus.PROCESSING.value:
|
||||
progress = job_data.get("progress")
|
||||
if progress:
|
||||
progress = int(progress)
|
||||
|
||||
return JobStatusResponse(
|
||||
job_id=job_id,
|
||||
status=JobStatus(status),
|
||||
message=message,
|
||||
created_at=datetime.fromisoformat(job_data.get("created_at")),
|
||||
updated_at=datetime.fromisoformat(job_data.get("updated_at")) if "updated_at" in job_data else None,
|
||||
progress=progress,
|
||||
queue_position=queue_position
|
||||
)
|
||||
|
||||
|
||||
@app.get(
|
||||
f"{settings.API_PREFIX}/result/{{job_id}}",
|
||||
response_model=JobResult,
|
||||
tags=["Extraction"]
|
||||
)
|
||||
async def get_job_result(
|
||||
job_id: str,
|
||||
api_key: str = Depends(verify_api_key),
|
||||
redis_conn: redis.Redis = Depends(get_redis_connection)
|
||||
):
|
||||
"""
|
||||
Récupère les résultats d'une tâche d'extraction terminée
|
||||
"""
|
||||
# Vérifier si la tâche existe
|
||||
if not redis_conn.exists(f"job:{job_id}"):
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail=f"Tâche non trouvée: {job_id}"
|
||||
)
|
||||
|
||||
# Récupérer les métadonnées de la tâche
|
||||
job_data = {k.decode(): v.decode() for k, v in redis_conn.hgetall(f"job:{job_id}").items()}
|
||||
|
||||
# Vérifier si la tâche est terminée
|
||||
status = JobStatus(job_data.get("status", JobStatus.PENDING.value))
|
||||
if status != JobStatus.COMPLETED and status != JobStatus.FAILED:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"La tâche n'est pas encore terminée. Statut actuel: {status.value}"
|
||||
)
|
||||
|
||||
# Récupérer les résultats d'extraction si disponibles
|
||||
result = None
|
||||
texte_brut = None
|
||||
|
||||
if status == JobStatus.COMPLETED:
|
||||
# Charger les résultats depuis Redis
|
||||
result_data = job_data.get("result")
|
||||
if result_data:
|
||||
result_dict = eval(result_data) # Attention: eval n'est pas sécurisé en production
|
||||
result = ExtractionResult(**result_dict)
|
||||
|
||||
texte_brut = job_data.get("texte_brut")
|
||||
|
||||
return JobResult(
|
||||
job_id=job_id,
|
||||
status=status,
|
||||
created_at=datetime.fromisoformat(job_data.get("created_at")),
|
||||
completed_at=datetime.fromisoformat(job_data.get("completed_at")) if "completed_at" in job_data else None,
|
||||
image_path=job_data.get("file_path"),
|
||||
result=result,
|
||||
texte_brut=texte_brut,
|
||||
methode=job_data.get("methode", "inconnu"),
|
||||
erreur=job_data.get("erreur") if status == JobStatus.FAILED else None
|
||||
)
|
||||
|
||||
|
||||
@app.exception_handler(Exception)
|
||||
async def global_exception_handler(request: Request, exc: Exception):
|
||||
"""Gestionnaire d'exceptions global"""
|
||||
logger.error(f"Exception non gérée: {str(exc)}")
|
||||
return JSONResponse(
|
||||
status_code=500,
|
||||
content=ErrorResponse(
|
||||
message="Erreur interne du serveur",
|
||||
error_code="INTERNAL_SERVER_ERROR",
|
||||
details={"error": str(exc)}
|
||||
).dict()
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
uvicorn.run(
|
||||
"main:app",
|
||||
host=settings.HOST,
|
||||
port=settings.PORT,
|
||||
reload=settings.DEBUG
|
||||
)
|
||||
Reference in New Issue
Block a user