refactor(universign): remove emojis from logs and clean up admin routes
This commit is contained in:
parent
d6ed8792cc
commit
18f9a45ef6
7 changed files with 94 additions and 469 deletions
2
api.py
2
api.py
|
|
@ -2446,7 +2446,7 @@ async def creer_sortie_stock(sortie: SortieStock):
|
||||||
if sortie_data.get("date_sortie"):
|
if sortie_data.get("date_sortie"):
|
||||||
sortie_data["date_sortie"] = sortie_data["date_sortie"].isoformat()
|
sortie_data["date_sortie"] = sortie_data["date_sortie"].isoformat()
|
||||||
|
|
||||||
logger.info(f"📤 Création sortie stock: {len(sortie.lignes)} ligne(s)")
|
logger.info(f"Création sortie stock: {len(sortie.lignes)} ligne(s)")
|
||||||
|
|
||||||
resultat = sage_client.creer_sortie_stock(sortie_data)
|
resultat = sage_client.creer_sortie_stock(sortie_data)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,10 @@
|
||||||
from fastapi import APIRouter, Depends, HTTPException, Query, Request
|
from fastapi import APIRouter, Depends, HTTPException, Query, Request
|
||||||
from fastapi.responses import FileResponse
|
from fastapi.responses import FileResponse
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
from sqlalchemy import false, select, func, or_, and_, true
|
from sqlalchemy import select, func, and_
|
||||||
from sqlalchemy.orm import selectinload
|
from sqlalchemy.orm import selectinload
|
||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from pydantic import BaseModel, EmailStr
|
|
||||||
import logging
|
import logging
|
||||||
from data.data import templates_signature_email
|
from data.data import templates_signature_email
|
||||||
from email_queue import email_queue
|
from email_queue import email_queue
|
||||||
|
|
@ -15,19 +14,21 @@ from database import (
|
||||||
UniversignSigner,
|
UniversignSigner,
|
||||||
UniversignSyncLog,
|
UniversignSyncLog,
|
||||||
LocalDocumentStatus,
|
LocalDocumentStatus,
|
||||||
SageDocumentType,
|
|
||||||
)
|
)
|
||||||
import os
|
import os
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import json
|
import json
|
||||||
from services.universign_document import UniversignDocumentService
|
|
||||||
from services.universign_sync import UniversignSyncService
|
from services.universign_sync import UniversignSyncService
|
||||||
from config.config import settings
|
from config.config import settings
|
||||||
from utils.generic_functions import normaliser_type_doc
|
from utils.generic_functions import normaliser_type_doc
|
||||||
from utils.universign_status_mapping import get_status_message, map_universign_to_local
|
from utils.universign_status_mapping import get_status_message, map_universign_to_local
|
||||||
|
|
||||||
from database.models.email import EmailLog
|
from database.models.email import EmailLog
|
||||||
from database.enum.status import StatutEmail
|
from database.enum.status import StatutEmail
|
||||||
|
from schemas import (
|
||||||
|
SyncStatsResponse,
|
||||||
|
CreateSignatureRequest,
|
||||||
|
TransactionResponse,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
@ -38,60 +39,13 @@ sync_service = UniversignSyncService(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class CreateSignatureRequest(BaseModel):
|
|
||||||
"""Demande de création d'une signature"""
|
|
||||||
|
|
||||||
sage_document_id: str
|
|
||||||
sage_document_type: SageDocumentType
|
|
||||||
signer_email: EmailStr
|
|
||||||
signer_name: str
|
|
||||||
document_name: Optional[str] = None
|
|
||||||
|
|
||||||
|
|
||||||
class TransactionResponse(BaseModel):
|
|
||||||
"""Réponse détaillée d'une transaction"""
|
|
||||||
|
|
||||||
id: str
|
|
||||||
transaction_id: str
|
|
||||||
sage_document_id: str
|
|
||||||
sage_document_type: str
|
|
||||||
universign_status: str
|
|
||||||
local_status: str
|
|
||||||
local_status_label: str
|
|
||||||
signer_url: Optional[str]
|
|
||||||
document_url: Optional[str]
|
|
||||||
created_at: datetime
|
|
||||||
sent_at: Optional[datetime]
|
|
||||||
signed_at: Optional[datetime]
|
|
||||||
last_synced_at: Optional[datetime]
|
|
||||||
needs_sync: bool
|
|
||||||
signers: List[dict]
|
|
||||||
|
|
||||||
signed_document_available: bool = False
|
|
||||||
signed_document_downloaded_at: Optional[datetime] = None
|
|
||||||
signed_document_size_kb: Optional[float] = None
|
|
||||||
|
|
||||||
|
|
||||||
class SyncStatsResponse(BaseModel):
|
|
||||||
"""Statistiques de synchronisation"""
|
|
||||||
|
|
||||||
total_transactions: int
|
|
||||||
pending_sync: int
|
|
||||||
signed: int
|
|
||||||
in_progress: int
|
|
||||||
refused: int
|
|
||||||
expired: int
|
|
||||||
last_sync_at: Optional[datetime]
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/signatures/create", response_model=TransactionResponse)
|
@router.post("/signatures/create", response_model=TransactionResponse)
|
||||||
async def create_signature(
|
async def create_signature(
|
||||||
request: CreateSignatureRequest, session: AsyncSession = Depends(get_session)
|
request: CreateSignatureRequest, session: AsyncSession = Depends(get_session)
|
||||||
):
|
):
|
||||||
try:
|
try:
|
||||||
# === VÉRIFICATION DOUBLON RENFORCÉE ===
|
|
||||||
logger.info(
|
logger.info(
|
||||||
f"🔍 Vérification doublon pour: {request.sage_document_id} "
|
f"Vérification doublon pour: {request.sage_document_id} "
|
||||||
f"(type: {request.sage_document_type.name})"
|
f"(type: {request.sage_document_type.name})"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -107,7 +61,6 @@ async def create_signature(
|
||||||
f"{len(all_existing)} transaction(s) existante(s) trouvée(s)"
|
f"{len(all_existing)} transaction(s) existante(s) trouvée(s)"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Filtrer les transactions non-finales
|
|
||||||
active_txs = [
|
active_txs = [
|
||||||
tx
|
tx
|
||||||
for tx in all_existing
|
for tx in all_existing
|
||||||
|
|
@ -137,8 +90,7 @@ async def create_signature(
|
||||||
"Toutes les transactions existantes sont finales, création autorisée"
|
"Toutes les transactions existantes sont finales, création autorisée"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Génération PDF
|
logger.info(f"Génération PDF: {request.sage_document_id}")
|
||||||
logger.info(f"📄 Génération PDF: {request.sage_document_id}")
|
|
||||||
pdf_bytes = email_queue._generate_pdf(
|
pdf_bytes = email_queue._generate_pdf(
|
||||||
request.sage_document_id, normaliser_type_doc(request.sage_document_type)
|
request.sage_document_id, normaliser_type_doc(request.sage_document_type)
|
||||||
)
|
)
|
||||||
|
|
@ -148,13 +100,12 @@ async def create_signature(
|
||||||
|
|
||||||
logger.info(f"PDF généré: {len(pdf_bytes)} octets")
|
logger.info(f"PDF généré: {len(pdf_bytes)} octets")
|
||||||
|
|
||||||
# === CRÉATION TRANSACTION UNIVERSIGN ===
|
|
||||||
import requests
|
import requests
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
auth = (settings.universign_api_key, "")
|
auth = (settings.universign_api_key, "")
|
||||||
|
|
||||||
logger.info("🔄 Création transaction Universign...")
|
logger.info("Création transaction Universign...")
|
||||||
|
|
||||||
resp = requests.post(
|
resp = requests.post(
|
||||||
f"{settings.universign_api_url}/transactions",
|
f"{settings.universign_api_url}/transactions",
|
||||||
|
|
@ -174,8 +125,7 @@ async def create_signature(
|
||||||
universign_tx_id = resp.json().get("id")
|
universign_tx_id = resp.json().get("id")
|
||||||
logger.info(f"Transaction Universign créée: {universign_tx_id}")
|
logger.info(f"Transaction Universign créée: {universign_tx_id}")
|
||||||
|
|
||||||
# Upload PDF
|
logger.info("Upload PDF...")
|
||||||
logger.info("📤 Upload PDF...")
|
|
||||||
files = {
|
files = {
|
||||||
"file": (f"{request.sage_document_id}.pdf", pdf_bytes, "application/pdf")
|
"file": (f"{request.sage_document_id}.pdf", pdf_bytes, "application/pdf")
|
||||||
}
|
}
|
||||||
|
|
@ -190,8 +140,7 @@ async def create_signature(
|
||||||
file_id = resp.json().get("id")
|
file_id = resp.json().get("id")
|
||||||
logger.info(f"PDF uploadé: {file_id}")
|
logger.info(f"PDF uploadé: {file_id}")
|
||||||
|
|
||||||
# Attachement document
|
logger.info("Attachement document...")
|
||||||
logger.info("🔗 Attachement document...")
|
|
||||||
resp = requests.post(
|
resp = requests.post(
|
||||||
f"{settings.universign_api_url}/transactions/{universign_tx_id}/documents",
|
f"{settings.universign_api_url}/transactions/{universign_tx_id}/documents",
|
||||||
auth=auth,
|
auth=auth,
|
||||||
|
|
@ -204,8 +153,7 @@ async def create_signature(
|
||||||
|
|
||||||
document_id = resp.json().get("id")
|
document_id = resp.json().get("id")
|
||||||
|
|
||||||
# Création champ signature
|
logger.info("Création champ signature...")
|
||||||
logger.info("✍️ Création champ signature...")
|
|
||||||
resp = requests.post(
|
resp = requests.post(
|
||||||
f"{settings.universign_api_url}/transactions/{universign_tx_id}/documents/{document_id}/fields",
|
f"{settings.universign_api_url}/transactions/{universign_tx_id}/documents/{document_id}/fields",
|
||||||
auth=auth,
|
auth=auth,
|
||||||
|
|
@ -218,8 +166,7 @@ async def create_signature(
|
||||||
|
|
||||||
field_id = resp.json().get("id")
|
field_id = resp.json().get("id")
|
||||||
|
|
||||||
# Liaison signataire
|
logger.info(f"Liaison signataire: {request.signer_email}")
|
||||||
logger.info(f"👤 Liaison signataire: {request.signer_email}")
|
|
||||||
resp = requests.post(
|
resp = requests.post(
|
||||||
f"{settings.universign_api_url}/transactions/{universign_tx_id}/signatures",
|
f"{settings.universign_api_url}/transactions/{universign_tx_id}/signatures",
|
||||||
auth=auth,
|
auth=auth,
|
||||||
|
|
@ -230,8 +177,7 @@ async def create_signature(
|
||||||
if resp.status_code not in [200, 201]:
|
if resp.status_code not in [200, 201]:
|
||||||
raise HTTPException(500, "Erreur liaison signataire")
|
raise HTTPException(500, "Erreur liaison signataire")
|
||||||
|
|
||||||
# Démarrage transaction
|
logger.info("Démarrage transaction...")
|
||||||
logger.info("🚀 Démarrage transaction...")
|
|
||||||
resp = requests.post(
|
resp = requests.post(
|
||||||
f"{settings.universign_api_url}/transactions/{universign_tx_id}/start",
|
f"{settings.universign_api_url}/transactions/{universign_tx_id}/start",
|
||||||
auth=auth,
|
auth=auth,
|
||||||
|
|
@ -243,7 +189,6 @@ async def create_signature(
|
||||||
|
|
||||||
final_data = resp.json()
|
final_data = resp.json()
|
||||||
|
|
||||||
# Extraction URL de signature
|
|
||||||
signer_url = ""
|
signer_url = ""
|
||||||
if final_data.get("actions"):
|
if final_data.get("actions"):
|
||||||
for action in final_data["actions"]:
|
for action in final_data["actions"]:
|
||||||
|
|
@ -256,12 +201,11 @@ async def create_signature(
|
||||||
|
|
||||||
logger.info("URL de signature obtenue")
|
logger.info("URL de signature obtenue")
|
||||||
|
|
||||||
# === ENREGISTREMENT LOCAL ===
|
|
||||||
local_id = str(uuid.uuid4())
|
local_id = str(uuid.uuid4())
|
||||||
|
|
||||||
transaction = UniversignTransaction(
|
transaction = UniversignTransaction(
|
||||||
id=local_id,
|
id=local_id,
|
||||||
transaction_id=universign_tx_id, # Utiliser l'ID Universign, ne jamais le changer
|
transaction_id=universign_tx_id,
|
||||||
sage_document_id=request.sage_document_id,
|
sage_document_id=request.sage_document_id,
|
||||||
sage_document_type=request.sage_document_type,
|
sage_document_type=request.sage_document_type,
|
||||||
universign_status=UniversignTransactionStatus.STARTED,
|
universign_status=UniversignTransactionStatus.STARTED,
|
||||||
|
|
@ -291,10 +235,9 @@ async def create_signature(
|
||||||
await session.commit()
|
await session.commit()
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
f"💾 Transaction sauvegardée: {local_id} (Universign: {universign_tx_id})"
|
f"Transaction sauvegardée: {local_id} (Universign: {universign_tx_id})"
|
||||||
)
|
)
|
||||||
|
|
||||||
# === ENVOI EMAIL AVEC TEMPLATE ===
|
|
||||||
template = templates_signature_email["demande_signature"]
|
template = templates_signature_email["demande_signature"]
|
||||||
|
|
||||||
type_labels = {
|
type_labels = {
|
||||||
|
|
@ -349,7 +292,6 @@ async def create_signature(
|
||||||
|
|
||||||
email_queue.enqueue(email_log.id)
|
email_queue.enqueue(email_log.id)
|
||||||
|
|
||||||
# === MISE À JOUR STATUT SAGE (Confirmé = 1) ===
|
|
||||||
try:
|
try:
|
||||||
from sage_client import sage_client
|
from sage_client import sage_client
|
||||||
|
|
||||||
|
|
@ -364,7 +306,6 @@ async def create_signature(
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning(f"Impossible de mettre à jour le statut Sage: {e}")
|
logger.warning(f"Impossible de mettre à jour le statut Sage: {e}")
|
||||||
|
|
||||||
# === RÉPONSE ===
|
|
||||||
return TransactionResponse(
|
return TransactionResponse(
|
||||||
id=transaction.id,
|
id=transaction.id,
|
||||||
transaction_id=transaction.transaction_id,
|
transaction_id=transaction.transaction_id,
|
||||||
|
|
@ -444,7 +385,6 @@ async def list_transactions(
|
||||||
}
|
}
|
||||||
for s in tx.signers
|
for s in tx.signers
|
||||||
],
|
],
|
||||||
# ✅ NOUVEAUX CHAMPS
|
|
||||||
signed_document_available=bool(
|
signed_document_available=bool(
|
||||||
tx.signed_document_path and Path(tx.signed_document_path).exists()
|
tx.signed_document_path and Path(tx.signed_document_path).exists()
|
||||||
),
|
),
|
||||||
|
|
@ -500,7 +440,6 @@ async def get_transaction(
|
||||||
}
|
}
|
||||||
for s in tx.signers
|
for s in tx.signers
|
||||||
],
|
],
|
||||||
# ✅ NOUVEAUX CHAMPS
|
|
||||||
signed_document_available=bool(
|
signed_document_available=bool(
|
||||||
tx.signed_document_path and Path(tx.signed_document_path).exists()
|
tx.signed_document_path and Path(tx.signed_document_path).exists()
|
||||||
),
|
),
|
||||||
|
|
@ -566,17 +505,11 @@ async def webhook_universign(
|
||||||
try:
|
try:
|
||||||
payload = await request.json()
|
payload = await request.json()
|
||||||
|
|
||||||
# 📋 LOG COMPLET du payload pour débogage
|
logger.info(f"Webhook Universign reçu - Type: {payload.get('type', 'unknown')}")
|
||||||
logger.info(
|
|
||||||
f"📥 Webhook Universign reçu - Type: {payload.get('type', 'unknown')}"
|
|
||||||
)
|
|
||||||
logger.debug(f"Payload complet: {json.dumps(payload, indent=2)}")
|
logger.debug(f"Payload complet: {json.dumps(payload, indent=2)}")
|
||||||
|
|
||||||
# EXTRACTION CORRECTE DU TRANSACTION_ID
|
|
||||||
transaction_id = None
|
transaction_id = None
|
||||||
|
|
||||||
# 🔍 Structure 1 : Événements avec payload imbriqué (la plus courante)
|
|
||||||
# Exemple : transaction.lifecycle.created, transaction.lifecycle.started, etc.
|
|
||||||
if payload.get("type", "").startswith("transaction.") and "payload" in payload:
|
if payload.get("type", "").startswith("transaction.") and "payload" in payload:
|
||||||
# Le transaction_id est dans payload.object.id
|
# Le transaction_id est dans payload.object.id
|
||||||
nested_object = payload.get("payload", {}).get("object", {})
|
nested_object = payload.get("payload", {}).get("object", {})
|
||||||
|
|
@ -586,9 +519,7 @@ async def webhook_universign(
|
||||||
f"Transaction ID extrait de payload.object.id: {transaction_id}"
|
f"Transaction ID extrait de payload.object.id: {transaction_id}"
|
||||||
)
|
)
|
||||||
|
|
||||||
# 🔍 Structure 2 : Action événements (action.opened, action.completed)
|
|
||||||
elif payload.get("type", "").startswith("action."):
|
elif payload.get("type", "").startswith("action."):
|
||||||
# Le transaction_id est directement dans payload.object.transaction_id
|
|
||||||
transaction_id = (
|
transaction_id = (
|
||||||
payload.get("payload", {}).get("object", {}).get("transaction_id")
|
payload.get("payload", {}).get("object", {}).get("transaction_id")
|
||||||
)
|
)
|
||||||
|
|
@ -596,17 +527,14 @@ async def webhook_universign(
|
||||||
f"Transaction ID extrait de payload.object.transaction_id: {transaction_id}"
|
f"Transaction ID extrait de payload.object.transaction_id: {transaction_id}"
|
||||||
)
|
)
|
||||||
|
|
||||||
# 🔍 Structure 3 : Transaction directe (fallback)
|
|
||||||
elif payload.get("object") == "transaction":
|
elif payload.get("object") == "transaction":
|
||||||
transaction_id = payload.get("id")
|
transaction_id = payload.get("id")
|
||||||
logger.info(f"Transaction ID extrait direct: {transaction_id}")
|
logger.info(f"Transaction ID extrait direct: {transaction_id}")
|
||||||
|
|
||||||
# 🔍 Structure 4 : Ancien format (pour rétro-compatibilité)
|
|
||||||
elif "transaction" in payload:
|
elif "transaction" in payload:
|
||||||
transaction_id = payload.get("transaction", {}).get("id")
|
transaction_id = payload.get("transaction", {}).get("id")
|
||||||
logger.info(f"Transaction ID extrait de transaction.id: {transaction_id}")
|
logger.info(f"Transaction ID extrait de transaction.id: {transaction_id}")
|
||||||
|
|
||||||
# Échec d'extraction
|
|
||||||
if not transaction_id:
|
if not transaction_id:
|
||||||
logger.error(
|
logger.error(
|
||||||
f"Transaction ID introuvable dans webhook\n"
|
f"Transaction ID introuvable dans webhook\n"
|
||||||
|
|
@ -621,9 +549,8 @@ async def webhook_universign(
|
||||||
"event_id": payload.get("id"),
|
"event_id": payload.get("id"),
|
||||||
}, 400
|
}, 400
|
||||||
|
|
||||||
logger.info(f"🎯 Transaction ID identifié: {transaction_id}")
|
logger.info(f"Transaction ID identifié: {transaction_id}")
|
||||||
|
|
||||||
# Vérifier si la transaction existe localement
|
|
||||||
query = select(UniversignTransaction).where(
|
query = select(UniversignTransaction).where(
|
||||||
UniversignTransaction.transaction_id == transaction_id
|
UniversignTransaction.transaction_id == transaction_id
|
||||||
)
|
)
|
||||||
|
|
@ -643,7 +570,6 @@ async def webhook_universign(
|
||||||
"event_type": payload.get("type"),
|
"event_type": payload.get("type"),
|
||||||
}
|
}
|
||||||
|
|
||||||
# Traiter le webhook
|
|
||||||
success, error = await sync_service.process_webhook(
|
success, error = await sync_service.process_webhook(
|
||||||
session, payload, transaction_id
|
session, payload, transaction_id
|
||||||
)
|
)
|
||||||
|
|
@ -656,7 +582,6 @@ async def webhook_universign(
|
||||||
"transaction_id": transaction_id,
|
"transaction_id": transaction_id,
|
||||||
}, 500
|
}, 500
|
||||||
|
|
||||||
# Succès
|
|
||||||
logger.info(
|
logger.info(
|
||||||
f"Webhook traité avec succès\n"
|
f"Webhook traité avec succès\n"
|
||||||
f"Transaction: {transaction_id}\n"
|
f"Transaction: {transaction_id}\n"
|
||||||
|
|
@ -673,7 +598,7 @@ async def webhook_universign(
|
||||||
}
|
}
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"💥 Erreur critique webhook: {e}", exc_info=True)
|
logger.error(f"Erreur critique webhook: {e}", exc_info=True)
|
||||||
return {"status": "error", "message": str(e)}, 500
|
return {"status": "error", "message": str(e)}, 500
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -681,17 +606,14 @@ async def webhook_universign(
|
||||||
async def get_sync_stats(session: AsyncSession = Depends(get_session)):
|
async def get_sync_stats(session: AsyncSession = Depends(get_session)):
|
||||||
"""Statistiques globales de synchronisation"""
|
"""Statistiques globales de synchronisation"""
|
||||||
|
|
||||||
# Total
|
|
||||||
total_query = select(func.count(UniversignTransaction.id))
|
total_query = select(func.count(UniversignTransaction.id))
|
||||||
total = (await session.execute(total_query)).scalar()
|
total = (await session.execute(total_query)).scalar()
|
||||||
|
|
||||||
# En attente de sync
|
|
||||||
pending_query = select(func.count(UniversignTransaction.id)).where(
|
pending_query = select(func.count(UniversignTransaction.id)).where(
|
||||||
UniversignTransaction.needs_sync
|
UniversignTransaction.needs_sync
|
||||||
)
|
)
|
||||||
pending = (await session.execute(pending_query)).scalar()
|
pending = (await session.execute(pending_query)).scalar()
|
||||||
|
|
||||||
# Par statut
|
|
||||||
signed_query = select(func.count(UniversignTransaction.id)).where(
|
signed_query = select(func.count(UniversignTransaction.id)).where(
|
||||||
UniversignTransaction.local_status == LocalDocumentStatus.SIGNED
|
UniversignTransaction.local_status == LocalDocumentStatus.SIGNED
|
||||||
)
|
)
|
||||||
|
|
@ -712,7 +634,6 @@ async def get_sync_stats(session: AsyncSession = Depends(get_session)):
|
||||||
)
|
)
|
||||||
expired = (await session.execute(expired_query)).scalar()
|
expired = (await session.execute(expired_query)).scalar()
|
||||||
|
|
||||||
# Dernière sync
|
|
||||||
last_sync_query = select(func.max(UniversignTransaction.last_synced_at))
|
last_sync_query = select(func.max(UniversignTransaction.last_synced_at))
|
||||||
last_sync = (await session.execute(last_sync_query)).scalar()
|
last_sync = (await session.execute(last_sync_query)).scalar()
|
||||||
|
|
||||||
|
|
@ -733,7 +654,6 @@ async def get_transaction_logs(
|
||||||
limit: int = Query(50, le=500),
|
limit: int = Query(50, le=500),
|
||||||
session: AsyncSession = Depends(get_session),
|
session: AsyncSession = Depends(get_session),
|
||||||
):
|
):
|
||||||
# Trouver la transaction
|
|
||||||
tx_query = select(UniversignTransaction).where(
|
tx_query = select(UniversignTransaction).where(
|
||||||
UniversignTransaction.transaction_id == transaction_id
|
UniversignTransaction.transaction_id == transaction_id
|
||||||
)
|
)
|
||||||
|
|
@ -743,7 +663,6 @@ async def get_transaction_logs(
|
||||||
if not tx:
|
if not tx:
|
||||||
raise HTTPException(404, "Transaction introuvable")
|
raise HTTPException(404, "Transaction introuvable")
|
||||||
|
|
||||||
# Logs
|
|
||||||
logs_query = (
|
logs_query = (
|
||||||
select(UniversignSyncLog)
|
select(UniversignSyncLog)
|
||||||
.where(UniversignSyncLog.transaction_id == tx.id)
|
.where(UniversignSyncLog.transaction_id == tx.id)
|
||||||
|
|
@ -772,9 +691,6 @@ async def get_transaction_logs(
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
# Ajouter ces routes dans universign.py
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/documents/{sage_document_id}/signatures")
|
@router.get("/documents/{sage_document_id}/signatures")
|
||||||
async def get_signatures_for_document(
|
async def get_signatures_for_document(
|
||||||
sage_document_id: str,
|
sage_document_id: str,
|
||||||
|
|
@ -816,10 +732,6 @@ async def cleanup_duplicate_signatures(
|
||||||
),
|
),
|
||||||
session: AsyncSession = Depends(get_session),
|
session: AsyncSession = Depends(get_session),
|
||||||
):
|
):
|
||||||
"""
|
|
||||||
Supprime les doublons de signatures pour un document.
|
|
||||||
Garde une seule transaction (la plus récente ou ancienne selon le paramètre).
|
|
||||||
"""
|
|
||||||
query = (
|
query = (
|
||||||
select(UniversignTransaction)
|
select(UniversignTransaction)
|
||||||
.where(UniversignTransaction.sage_document_id == sage_document_id)
|
.where(UniversignTransaction.sage_document_id == sage_document_id)
|
||||||
|
|
@ -841,7 +753,6 @@ async def cleanup_duplicate_signatures(
|
||||||
"deleted_count": 0,
|
"deleted_count": 0,
|
||||||
}
|
}
|
||||||
|
|
||||||
# Garder la première (selon l'ordre), supprimer les autres
|
|
||||||
to_keep = transactions[0]
|
to_keep = transactions[0]
|
||||||
to_delete = transactions[1:]
|
to_delete = transactions[1:]
|
||||||
|
|
||||||
|
|
@ -901,13 +812,8 @@ async def delete_transaction(
|
||||||
async def cleanup_all_duplicates(
|
async def cleanup_all_duplicates(
|
||||||
session: AsyncSession = Depends(get_session),
|
session: AsyncSession = Depends(get_session),
|
||||||
):
|
):
|
||||||
"""
|
|
||||||
Nettoie tous les doublons dans la base.
|
|
||||||
Pour chaque document avec plusieurs transactions, garde la plus récente non-erreur ou la plus récente.
|
|
||||||
"""
|
|
||||||
from sqlalchemy import func
|
from sqlalchemy import func
|
||||||
|
|
||||||
# Trouver les documents avec plusieurs transactions
|
|
||||||
subquery = (
|
subquery = (
|
||||||
select(
|
select(
|
||||||
UniversignTransaction.sage_document_id,
|
UniversignTransaction.sage_document_id,
|
||||||
|
|
@ -925,7 +831,6 @@ async def cleanup_all_duplicates(
|
||||||
cleanup_details = []
|
cleanup_details = []
|
||||||
|
|
||||||
for doc_id in duplicate_docs:
|
for doc_id in duplicate_docs:
|
||||||
# Récupérer toutes les transactions pour ce document
|
|
||||||
tx_query = (
|
tx_query = (
|
||||||
select(UniversignTransaction)
|
select(UniversignTransaction)
|
||||||
.where(UniversignTransaction.sage_document_id == doc_id)
|
.where(UniversignTransaction.sage_document_id == doc_id)
|
||||||
|
|
@ -934,7 +839,6 @@ async def cleanup_all_duplicates(
|
||||||
tx_result = await session.execute(tx_query)
|
tx_result = await session.execute(tx_query)
|
||||||
transactions = tx_result.scalars().all()
|
transactions = tx_result.scalars().all()
|
||||||
|
|
||||||
# Priorité: SIGNE > EN_COURS > EN_ATTENTE > autres
|
|
||||||
priority = {"SIGNE": 0, "EN_COURS": 1, "EN_ATTENTE": 2}
|
priority = {"SIGNE": 0, "EN_COURS": 1, "EN_ATTENTE": 2}
|
||||||
|
|
||||||
def sort_key(tx):
|
def sort_key(tx):
|
||||||
|
|
@ -972,115 +876,11 @@ async def cleanup_all_duplicates(
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@router.get("/admin/diagnostic", tags=["Admin"])
|
|
||||||
async def diagnostic_complet(session: AsyncSession = Depends(get_session)):
|
|
||||||
"""
|
|
||||||
Diagnostic complet de l'état des transactions Universign
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
# Statistiques générales
|
|
||||||
total_query = select(func.count(UniversignTransaction.id))
|
|
||||||
total = (await session.execute(total_query)).scalar()
|
|
||||||
|
|
||||||
# Par statut local
|
|
||||||
statuts_query = select(
|
|
||||||
UniversignTransaction.local_status, func.count(UniversignTransaction.id)
|
|
||||||
).group_by(UniversignTransaction.local_status)
|
|
||||||
statuts_result = await session.execute(statuts_query)
|
|
||||||
statuts = {status.value: count for status, count in statuts_result.all()}
|
|
||||||
|
|
||||||
# Transactions sans sync récente
|
|
||||||
date_limite = datetime.now() - timedelta(hours=1)
|
|
||||||
sans_sync_query = select(func.count(UniversignTransaction.id)).where(
|
|
||||||
and_(
|
|
||||||
UniversignTransaction.needs_sync,
|
|
||||||
or_(
|
|
||||||
UniversignTransaction.last_synced_at < date_limite,
|
|
||||||
UniversignTransaction.last_synced_at.is_(None),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
sans_sync = (await session.execute(sans_sync_query)).scalar()
|
|
||||||
|
|
||||||
# Doublons potentiels
|
|
||||||
doublons_query = (
|
|
||||||
select(
|
|
||||||
UniversignTransaction.sage_document_id,
|
|
||||||
func.count(UniversignTransaction.id).label("count"),
|
|
||||||
)
|
|
||||||
.group_by(UniversignTransaction.sage_document_id)
|
|
||||||
.having(func.count(UniversignTransaction.id) > 1)
|
|
||||||
)
|
|
||||||
doublons_result = await session.execute(doublons_query)
|
|
||||||
doublons = doublons_result.fetchall()
|
|
||||||
|
|
||||||
# Transactions avec erreurs de sync
|
|
||||||
erreurs_query = select(func.count(UniversignTransaction.id)).where(
|
|
||||||
UniversignTransaction.sync_error.isnot(None)
|
|
||||||
)
|
|
||||||
erreurs = (await session.execute(erreurs_query)).scalar()
|
|
||||||
|
|
||||||
# Transactions sans webhook reçu
|
|
||||||
sans_webhook_query = select(func.count(UniversignTransaction.id)).where(
|
|
||||||
and_(
|
|
||||||
not UniversignTransaction.webhook_received,
|
|
||||||
UniversignTransaction.local_status != LocalDocumentStatus.PENDING,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
sans_webhook = (await session.execute(sans_webhook_query)).scalar()
|
|
||||||
|
|
||||||
diagnostic = {
|
|
||||||
"timestamp": datetime.now().isoformat(),
|
|
||||||
"total_transactions": total,
|
|
||||||
"repartition_statuts": statuts,
|
|
||||||
"problemes_detectes": {
|
|
||||||
"sans_sync_recente": sans_sync,
|
|
||||||
"doublons_possibles": len(doublons),
|
|
||||||
"erreurs_sync": erreurs,
|
|
||||||
"sans_webhook": sans_webhook,
|
|
||||||
},
|
|
||||||
"documents_avec_doublons": [
|
|
||||||
{"document_id": doc_id, "nombre_transactions": count}
|
|
||||||
for doc_id, count in doublons
|
|
||||||
],
|
|
||||||
"recommandations": [],
|
|
||||||
}
|
|
||||||
|
|
||||||
# Recommandations
|
|
||||||
if sans_sync > 0:
|
|
||||||
diagnostic["recommandations"].append(
|
|
||||||
f"🔄 {sans_sync} transaction(s) à synchroniser. "
|
|
||||||
f"Utilisez POST /universign/sync/all"
|
|
||||||
)
|
|
||||||
|
|
||||||
if len(doublons) > 0:
|
|
||||||
diagnostic["recommandations"].append(
|
|
||||||
f"{len(doublons)} document(s) avec doublons. "
|
|
||||||
f"Utilisez POST /universign/cleanup/all-duplicates"
|
|
||||||
)
|
|
||||||
|
|
||||||
if erreurs > 0:
|
|
||||||
diagnostic["recommandations"].append(
|
|
||||||
f"{erreurs} transaction(s) en erreur. "
|
|
||||||
f"Vérifiez les logs avec GET /universign/transactions?status=ERREUR"
|
|
||||||
)
|
|
||||||
|
|
||||||
return diagnostic
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Erreur diagnostic: {e}")
|
|
||||||
raise HTTPException(500, str(e))
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/admin/force-sync-all", tags=["Admin"])
|
@router.post("/admin/force-sync-all", tags=["Admin"])
|
||||||
async def forcer_sync_toutes_transactions(
|
async def forcer_sync_toutes_transactions(
|
||||||
max_transactions: int = Query(200, le=500),
|
max_transactions: int = Query(200, le=500),
|
||||||
session: AsyncSession = Depends(get_session),
|
session: AsyncSession = Depends(get_session),
|
||||||
):
|
):
|
||||||
"""
|
|
||||||
Force la synchronisation de TOUTES les transactions (même finales)
|
|
||||||
À utiliser pour réparer les incohérences
|
|
||||||
"""
|
|
||||||
try:
|
try:
|
||||||
query = (
|
query = (
|
||||||
select(UniversignTransaction)
|
select(UniversignTransaction)
|
||||||
|
|
@ -1105,7 +905,7 @@ async def forcer_sync_toutes_transactions(
|
||||||
previous_status = transaction.local_status.value
|
previous_status = transaction.local_status.value
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
f"🔄 Force sync: {transaction.transaction_id} (statut: {previous_status})"
|
f"Force sync: {transaction.transaction_id} (statut: {previous_status})"
|
||||||
)
|
)
|
||||||
|
|
||||||
success, error = await sync_service.sync_transaction(
|
success, error = await sync_service.sync_transaction(
|
||||||
|
|
@ -1154,9 +954,6 @@ async def forcer_sync_toutes_transactions(
|
||||||
async def reparer_transaction(
|
async def reparer_transaction(
|
||||||
transaction_id: str, session: AsyncSession = Depends(get_session)
|
transaction_id: str, session: AsyncSession = Depends(get_session)
|
||||||
):
|
):
|
||||||
"""
|
|
||||||
Répare une transaction spécifique en la re-synchronisant depuis Universign
|
|
||||||
"""
|
|
||||||
try:
|
try:
|
||||||
query = select(UniversignTransaction).where(
|
query = select(UniversignTransaction).where(
|
||||||
UniversignTransaction.transaction_id == transaction_id
|
UniversignTransaction.transaction_id == transaction_id
|
||||||
|
|
@ -1174,7 +971,6 @@ async def reparer_transaction(
|
||||||
else None
|
else None
|
||||||
)
|
)
|
||||||
|
|
||||||
# Force sync
|
|
||||||
success, error = await sync_service.sync_transaction(
|
success, error = await sync_service.sync_transaction(
|
||||||
session, transaction, force=True
|
session, transaction, force=True
|
||||||
)
|
)
|
||||||
|
|
@ -1211,11 +1007,7 @@ async def reparer_transaction(
|
||||||
async def trouver_transactions_inconsistantes(
|
async def trouver_transactions_inconsistantes(
|
||||||
session: AsyncSession = Depends(get_session),
|
session: AsyncSession = Depends(get_session),
|
||||||
):
|
):
|
||||||
"""
|
|
||||||
Trouve les transactions dont le statut local ne correspond pas au statut Universign
|
|
||||||
"""
|
|
||||||
try:
|
try:
|
||||||
# Toutes les transactions non-finales
|
|
||||||
query = select(UniversignTransaction).where(
|
query = select(UniversignTransaction).where(
|
||||||
UniversignTransaction.local_status.in_(
|
UniversignTransaction.local_status.in_(
|
||||||
[LocalDocumentStatus.PENDING, LocalDocumentStatus.IN_PROGRESS]
|
[LocalDocumentStatus.PENDING, LocalDocumentStatus.IN_PROGRESS]
|
||||||
|
|
@ -1229,7 +1021,6 @@ async def trouver_transactions_inconsistantes(
|
||||||
|
|
||||||
for tx in transactions:
|
for tx in transactions:
|
||||||
try:
|
try:
|
||||||
# Récupérer le statut depuis Universign
|
|
||||||
universign_data = sync_service.fetch_transaction_status(
|
universign_data = sync_service.fetch_transaction_status(
|
||||||
tx.transaction_id
|
tx.transaction_id
|
||||||
)
|
)
|
||||||
|
|
@ -1298,9 +1089,6 @@ async def nettoyer_transactions_erreur(
|
||||||
),
|
),
|
||||||
session: AsyncSession = Depends(get_session),
|
session: AsyncSession = Depends(get_session),
|
||||||
):
|
):
|
||||||
"""
|
|
||||||
Nettoie les transactions en erreur anciennes
|
|
||||||
"""
|
|
||||||
try:
|
try:
|
||||||
date_limite = datetime.now() - timedelta(days=age_jours)
|
date_limite = datetime.now() - timedelta(days=age_jours)
|
||||||
|
|
||||||
|
|
@ -1344,9 +1132,6 @@ async def nettoyer_transactions_erreur(
|
||||||
async def voir_dernier_webhook(
|
async def voir_dernier_webhook(
|
||||||
transaction_id: str, session: AsyncSession = Depends(get_session)
|
transaction_id: str, session: AsyncSession = Depends(get_session)
|
||||||
):
|
):
|
||||||
"""
|
|
||||||
Affiche le dernier payload webhook reçu pour une transaction
|
|
||||||
"""
|
|
||||||
try:
|
try:
|
||||||
query = select(UniversignTransaction).where(
|
query = select(UniversignTransaction).where(
|
||||||
UniversignTransaction.transaction_id == transaction_id
|
UniversignTransaction.transaction_id == transaction_id
|
||||||
|
|
@ -1357,7 +1142,6 @@ async def voir_dernier_webhook(
|
||||||
if not tx:
|
if not tx:
|
||||||
raise HTTPException(404, "Transaction introuvable")
|
raise HTTPException(404, "Transaction introuvable")
|
||||||
|
|
||||||
# Récupérer le dernier log de type webhook
|
|
||||||
logs_query = (
|
logs_query = (
|
||||||
select(UniversignSyncLog)
|
select(UniversignSyncLog)
|
||||||
.where(
|
.where(
|
||||||
|
|
@ -1407,9 +1191,6 @@ async def voir_dernier_webhook(
|
||||||
async def telecharger_document_signe(
|
async def telecharger_document_signe(
|
||||||
transaction_id: str, session: AsyncSession = Depends(get_session)
|
transaction_id: str, session: AsyncSession = Depends(get_session)
|
||||||
):
|
):
|
||||||
"""
|
|
||||||
Télécharge le document signé localement stocké
|
|
||||||
"""
|
|
||||||
try:
|
try:
|
||||||
query = select(UniversignTransaction).where(
|
query = select(UniversignTransaction).where(
|
||||||
UniversignTransaction.transaction_id == transaction_id
|
UniversignTransaction.transaction_id == transaction_id
|
||||||
|
|
@ -1430,7 +1211,6 @@ async def telecharger_document_signe(
|
||||||
file_path = Path(transaction.signed_document_path)
|
file_path = Path(transaction.signed_document_path)
|
||||||
|
|
||||||
if not file_path.exists():
|
if not file_path.exists():
|
||||||
# Document perdu, on peut tenter de le retélécharger
|
|
||||||
logger.warning(f"Fichier perdu : {file_path}")
|
logger.warning(f"Fichier perdu : {file_path}")
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
404,
|
404,
|
||||||
|
|
@ -1438,7 +1218,6 @@ async def telecharger_document_signe(
|
||||||
"Utilisez POST /admin/download-missing-documents pour le récupérer.",
|
"Utilisez POST /admin/download-missing-documents pour le récupérer.",
|
||||||
)
|
)
|
||||||
|
|
||||||
# Génération du nom de fichier pour le téléchargement
|
|
||||||
download_name = (
|
download_name = (
|
||||||
f"{transaction.sage_document_id}_"
|
f"{transaction.sage_document_id}_"
|
||||||
f"{transaction.sage_document_type.name}_"
|
f"{transaction.sage_document_type.name}_"
|
||||||
|
|
@ -1503,218 +1282,3 @@ async def info_document_signe(
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Erreur info document : {e}")
|
logger.error(f"Erreur info document : {e}")
|
||||||
raise HTTPException(500, str(e))
|
raise HTTPException(500, str(e))
|
||||||
|
|
||||||
|
|
||||||
@router.post("/admin/download-missing-documents", tags=["Admin"])
|
|
||||||
async def telecharger_documents_manquants(
|
|
||||||
force_redownload: bool = Query(
|
|
||||||
False, description="Forcer le retéléchargement même si déjà présent"
|
|
||||||
),
|
|
||||||
session: AsyncSession = Depends(get_session),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Télécharge tous les documents signés manquants pour les transactions SIGNE
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
# Transactions signées sans document local
|
|
||||||
query = select(UniversignTransaction).where(
|
|
||||||
UniversignTransaction.local_status == LocalDocumentStatus.SIGNED,
|
|
||||||
or_(
|
|
||||||
UniversignTransaction.signed_document_path.is_(None),
|
|
||||||
force_redownload,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
result = await session.execute(query)
|
|
||||||
transactions = result.scalars().all()
|
|
||||||
|
|
||||||
logger.info(f"📥 {len(transactions)} document(s) à télécharger")
|
|
||||||
|
|
||||||
document_service = UniversignDocumentService(
|
|
||||||
api_url=settings.universign_api_url,
|
|
||||||
api_key=settings.universign_api_key, timeout=60
|
|
||||||
)
|
|
||||||
|
|
||||||
results = {"total": len(transactions), "success": 0, "failed": 0, "details": []}
|
|
||||||
|
|
||||||
for transaction in transactions:
|
|
||||||
try:
|
|
||||||
(
|
|
||||||
success,
|
|
||||||
error,
|
|
||||||
) = await document_service.download_and_store_signed_document(
|
|
||||||
session=session, transaction=transaction, force=force_redownload
|
|
||||||
)
|
|
||||||
|
|
||||||
if success:
|
|
||||||
results["success"] += 1
|
|
||||||
results["details"].append(
|
|
||||||
{
|
|
||||||
"transaction_id": transaction.transaction_id,
|
|
||||||
"sage_document_id": transaction.sage_document_id,
|
|
||||||
"status": "success",
|
|
||||||
}
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
results["failed"] += 1
|
|
||||||
results["details"].append(
|
|
||||||
{
|
|
||||||
"transaction_id": transaction.transaction_id,
|
|
||||||
"sage_document_id": transaction.sage_document_id,
|
|
||||||
"status": "failed",
|
|
||||||
"error": error,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Erreur téléchargement {transaction.transaction_id}: {e}")
|
|
||||||
results["failed"] += 1
|
|
||||||
results["details"].append(
|
|
||||||
{"transaction_id": transaction.transaction_id, "error": str(e)}
|
|
||||||
)
|
|
||||||
|
|
||||||
await session.commit()
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
f"Téléchargement terminé : {results['success']}/{results['total']} réussis"
|
|
||||||
)
|
|
||||||
|
|
||||||
return results
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Erreur téléchargement batch : {e}", exc_info=True)
|
|
||||||
raise HTTPException(500, str(e))
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/admin/cleanup-old-documents", tags=["Admin"])
|
|
||||||
async def nettoyer_anciens_documents(
|
|
||||||
days_to_keep: int = Query(
|
|
||||||
90, ge=7, le=365, description="Nombre de jours à conserver"
|
|
||||||
),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Supprime les documents signés de plus de X jours (par défaut 90)
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
document_service = UniversignDocumentService(
|
|
||||||
api_url=settings.universign_api_url,
|
|
||||||
api_key=settings.universign_api_key
|
|
||||||
)
|
|
||||||
|
|
||||||
deleted, size_freed_mb = await document_service.cleanup_old_documents(
|
|
||||||
days_to_keep=days_to_keep
|
|
||||||
)
|
|
||||||
|
|
||||||
return {
|
|
||||||
"success": True,
|
|
||||||
"files_deleted": deleted,
|
|
||||||
"space_freed_mb": size_freed_mb,
|
|
||||||
"days_kept": days_to_keep,
|
|
||||||
}
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Erreur nettoyage : {e}")
|
|
||||||
raise HTTPException(500, str(e))
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/transactions/{transaction_id}/diagnose", tags=["Debug"])
|
|
||||||
async def diagnose_transaction(
|
|
||||||
transaction_id: str, session: AsyncSession = Depends(get_session)
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Diagnostic complet d'une transaction Universign
|
|
||||||
Utile pour débugger les problèmes de récupération de documents
|
|
||||||
"""
|
|
||||||
from services.universign_document import UniversignDocumentService
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Récupérer la transaction locale
|
|
||||||
query = select(UniversignTransaction).where(
|
|
||||||
UniversignTransaction.transaction_id == transaction_id
|
|
||||||
)
|
|
||||||
result = await session.execute(query)
|
|
||||||
transaction = result.scalar_one_or_none()
|
|
||||||
|
|
||||||
local_info = None
|
|
||||||
if transaction:
|
|
||||||
local_info = {
|
|
||||||
"id": transaction.id,
|
|
||||||
"sage_document_id": transaction.sage_document_id,
|
|
||||||
"local_status": transaction.local_status.value,
|
|
||||||
"document_url": transaction.document_url,
|
|
||||||
"signed_document_path": transaction.signed_document_path,
|
|
||||||
"download_attempts": transaction.download_attempts,
|
|
||||||
"download_error": transaction.download_error,
|
|
||||||
}
|
|
||||||
|
|
||||||
# Diagnostic API Universign
|
|
||||||
document_service = UniversignDocumentService(
|
|
||||||
api_url=settings.universign_api_url,
|
|
||||||
api_key=settings.universign_api_key,
|
|
||||||
timeout=30,
|
|
||||||
)
|
|
||||||
|
|
||||||
api_diagnosis = document_service.diagnose_transaction(transaction_id)
|
|
||||||
|
|
||||||
return {
|
|
||||||
"transaction_id": transaction_id,
|
|
||||||
"local_data": local_info,
|
|
||||||
"api_diagnosis": api_diagnosis,
|
|
||||||
"recommendations": _generate_recommendations(local_info, api_diagnosis),
|
|
||||||
}
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Erreur diagnostic: {e}", exc_info=True)
|
|
||||||
raise HTTPException(500, str(e))
|
|
||||||
|
|
||||||
|
|
||||||
def _generate_recommendations(local_info, api_diagnosis):
|
|
||||||
"""Génère des recommandations basées sur le diagnostic"""
|
|
||||||
recommendations = []
|
|
||||||
|
|
||||||
if not local_info:
|
|
||||||
recommendations.append(
|
|
||||||
"Transaction introuvable localement. Vérifiez le transaction_id."
|
|
||||||
)
|
|
||||||
return recommendations
|
|
||||||
|
|
||||||
if not api_diagnosis.get("success"):
|
|
||||||
recommendations.append(
|
|
||||||
f"Erreur API Universign: {api_diagnosis.get('error')}. "
|
|
||||||
f"Vérifiez la connectivité et les credentials."
|
|
||||||
)
|
|
||||||
return recommendations
|
|
||||||
|
|
||||||
state = api_diagnosis.get("checks", {}).get("transaction_data", {}).get("state")
|
|
||||||
|
|
||||||
if state not in ["completed", "closed"]:
|
|
||||||
recommendations.append(
|
|
||||||
f"La transaction n'est pas encore signée (state={state}). "
|
|
||||||
f"Attendez que le signataire complète la signature."
|
|
||||||
)
|
|
||||||
|
|
||||||
docs = api_diagnosis.get("checks", {}).get("documents", [])
|
|
||||||
if not docs:
|
|
||||||
recommendations.append("Aucun document trouvé dans la transaction Universign.")
|
|
||||||
else:
|
|
||||||
for doc in docs:
|
|
||||||
dl_check = doc.get("download_check", {})
|
|
||||||
if not dl_check.get("accessible"):
|
|
||||||
recommendations.append(
|
|
||||||
f"Document {doc.get('id')} non accessible: "
|
|
||||||
f"status_code={dl_check.get('status_code')}. "
|
|
||||||
f"Vérifiez que la signature est complète."
|
|
||||||
)
|
|
||||||
|
|
||||||
if local_info.get("download_error"):
|
|
||||||
recommendations.append(
|
|
||||||
f"Dernière erreur de téléchargement: {local_info['download_error']}"
|
|
||||||
)
|
|
||||||
|
|
||||||
if not recommendations:
|
|
||||||
recommendations.append(
|
|
||||||
"Tout semble correct. Essayez POST /admin/download-missing-documents "
|
|
||||||
"avec force_redownload=true"
|
|
||||||
)
|
|
||||||
|
|
||||||
return recommendations
|
|
||||||
|
|
|
||||||
|
|
@ -26,7 +26,13 @@ from schemas.documents.documents import TypeDocument, TypeDocumentSQL
|
||||||
from schemas.documents.email import StatutEmail, EmailEnvoi
|
from schemas.documents.email import StatutEmail, EmailEnvoi
|
||||||
from schemas.documents.factures import FactureCreate, FactureUpdate
|
from schemas.documents.factures import FactureCreate, FactureUpdate
|
||||||
from schemas.documents.livraisons import LivraisonCreate, LivraisonUpdate
|
from schemas.documents.livraisons import LivraisonCreate, LivraisonUpdate
|
||||||
from schemas.documents.universign import Signature, StatutSignature
|
from schemas.documents.universign import (
|
||||||
|
Signature,
|
||||||
|
StatutSignature,
|
||||||
|
SyncStatsResponse,
|
||||||
|
CreateSignatureRequest,
|
||||||
|
TransactionResponse,
|
||||||
|
)
|
||||||
from schemas.articles.articles import (
|
from schemas.articles.articles import (
|
||||||
ArticleCreate,
|
ArticleCreate,
|
||||||
Article,
|
Article,
|
||||||
|
|
@ -105,4 +111,7 @@ __all__ = [
|
||||||
"SageGatewayTest",
|
"SageGatewayTest",
|
||||||
"SageGatewayStatsResponse",
|
"SageGatewayStatsResponse",
|
||||||
"CurrentGatewayInfo",
|
"CurrentGatewayInfo",
|
||||||
|
"SyncStatsResponse",
|
||||||
|
"CreateSignatureRequest",
|
||||||
|
"TransactionResponse",
|
||||||
]
|
]
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,12 @@
|
||||||
from pydantic import BaseModel, EmailStr
|
from pydantic import BaseModel, EmailStr
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from schemas.documents.documents import TypeDocument
|
from schemas.documents.documents import TypeDocument
|
||||||
|
from database import (
|
||||||
|
SageDocumentType,
|
||||||
|
)
|
||||||
|
|
||||||
|
from typing import List, Optional
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
|
||||||
class StatutSignature(str, Enum):
|
class StatutSignature(str, Enum):
|
||||||
|
|
@ -16,3 +22,49 @@ class Signature(BaseModel):
|
||||||
type_doc: TypeDocument
|
type_doc: TypeDocument
|
||||||
email_signataire: EmailStr
|
email_signataire: EmailStr
|
||||||
nom_signataire: str
|
nom_signataire: str
|
||||||
|
|
||||||
|
|
||||||
|
class CreateSignatureRequest(BaseModel):
|
||||||
|
"""Demande de création d'une signature"""
|
||||||
|
|
||||||
|
sage_document_id: str
|
||||||
|
sage_document_type: SageDocumentType
|
||||||
|
signer_email: EmailStr
|
||||||
|
signer_name: str
|
||||||
|
document_name: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class TransactionResponse(BaseModel):
|
||||||
|
"""Réponse détaillée d'une transaction"""
|
||||||
|
|
||||||
|
id: str
|
||||||
|
transaction_id: str
|
||||||
|
sage_document_id: str
|
||||||
|
sage_document_type: str
|
||||||
|
universign_status: str
|
||||||
|
local_status: str
|
||||||
|
local_status_label: str
|
||||||
|
signer_url: Optional[str]
|
||||||
|
document_url: Optional[str]
|
||||||
|
created_at: datetime
|
||||||
|
sent_at: Optional[datetime]
|
||||||
|
signed_at: Optional[datetime]
|
||||||
|
last_synced_at: Optional[datetime]
|
||||||
|
needs_sync: bool
|
||||||
|
signers: List[dict]
|
||||||
|
|
||||||
|
signed_document_available: bool = False
|
||||||
|
signed_document_downloaded_at: Optional[datetime] = None
|
||||||
|
signed_document_size_kb: Optional[float] = None
|
||||||
|
|
||||||
|
|
||||||
|
class SyncStatsResponse(BaseModel):
|
||||||
|
"""Statistiques de synchronisation"""
|
||||||
|
|
||||||
|
total_transactions: int
|
||||||
|
pending_sync: int
|
||||||
|
signed: int
|
||||||
|
in_progress: int
|
||||||
|
refused: int
|
||||||
|
expired: int
|
||||||
|
last_sync_at: Optional[datetime]
|
||||||
|
|
|
||||||
|
|
@ -76,7 +76,7 @@ class UniversignDocumentService:
|
||||||
f"/documents/{document_id}/download"
|
f"/documents/{document_id}/download"
|
||||||
)
|
)
|
||||||
|
|
||||||
logger.info(f"📥 Téléchargement depuis: {download_url}")
|
logger.info(f"Téléchargement depuis: {download_url}")
|
||||||
|
|
||||||
response = requests.get(
|
response = requests.get(
|
||||||
download_url,
|
download_url,
|
||||||
|
|
@ -155,7 +155,7 @@ class UniversignDocumentService:
|
||||||
try:
|
try:
|
||||||
# ÉTAPE 1: Récupérer les documents de la transaction
|
# ÉTAPE 1: Récupérer les documents de la transaction
|
||||||
logger.info(
|
logger.info(
|
||||||
f"🔄 Récupération document signé pour: {transaction.transaction_id}"
|
f"Récupération document signé pour: {transaction.transaction_id}"
|
||||||
)
|
)
|
||||||
|
|
||||||
documents = self.fetch_transaction_documents(transaction.transaction_id)
|
documents = self.fetch_transaction_documents(transaction.transaction_id)
|
||||||
|
|
@ -177,7 +177,7 @@ class UniversignDocumentService:
|
||||||
if doc_status in ["signed", "completed", "closed"]:
|
if doc_status in ["signed", "completed", "closed"]:
|
||||||
document_id = doc_id
|
document_id = doc_id
|
||||||
logger.info(
|
logger.info(
|
||||||
f"📄 Document signé trouvé: {doc_id} (status: {doc_status})"
|
f"Document signé trouvé: {doc_id} (status: {doc_status})"
|
||||||
)
|
)
|
||||||
break
|
break
|
||||||
|
|
||||||
|
|
@ -309,7 +309,7 @@ class UniversignDocumentService:
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Test 1: Récupération de la transaction
|
# Test 1: Récupération de la transaction
|
||||||
logger.info(f"🔍 Diagnostic transaction: {transaction_id}")
|
logger.info(f"Diagnostic transaction: {transaction_id}")
|
||||||
|
|
||||||
response = requests.get(
|
response = requests.get(
|
||||||
f"{self.api_url}/transactions/{transaction_id}",
|
f"{self.api_url}/transactions/{transaction_id}",
|
||||||
|
|
|
||||||
|
|
@ -341,7 +341,7 @@ class UniversignSyncService:
|
||||||
return True, None
|
return True, None
|
||||||
|
|
||||||
# Récupération du statut distant
|
# Récupération du statut distant
|
||||||
logger.info(f"🔄 Synchronisation: {transaction.transaction_id}")
|
logger.info(f"Synchronisation: {transaction.transaction_id}")
|
||||||
|
|
||||||
result = self.fetch_transaction_status(transaction.transaction_id)
|
result = self.fetch_transaction_status(transaction.transaction_id)
|
||||||
|
|
||||||
|
|
@ -365,7 +365,7 @@ class UniversignSyncService:
|
||||||
previous_local_status = transaction.local_status.value
|
previous_local_status = transaction.local_status.value
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
f"🔄 Mapping: {universign_status_raw} (Universign) → "
|
f"Mapping: {universign_status_raw} (Universign) → "
|
||||||
f"{new_local_status} (Local) | Actuel: {previous_local_status}"
|
f"{new_local_status} (Local) | Actuel: {previous_local_status}"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -433,13 +433,13 @@ class UniversignSyncService:
|
||||||
if documents:
|
if documents:
|
||||||
first_doc = documents[0]
|
first_doc = documents[0]
|
||||||
logger.info(
|
logger.info(
|
||||||
f"📄 Document Universign trouvé: id={first_doc.get('id')}, "
|
f"Document Universign trouvé: id={first_doc.get('id')}, "
|
||||||
f"status={first_doc.get('status')}"
|
f"status={first_doc.get('status')}"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Téléchargement automatique du document signé
|
# Téléchargement automatique du document signé
|
||||||
if new_local_status == "SIGNE" and not transaction.signed_document_path:
|
if new_local_status == "SIGNE" and not transaction.signed_document_path:
|
||||||
logger.info("📥 Déclenchement téléchargement document signé...")
|
logger.info("Déclenchement téléchargement document signé...")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
(
|
(
|
||||||
|
|
@ -537,7 +537,7 @@ class UniversignSyncService:
|
||||||
transaction.universign_document_id = first_doc_id
|
transaction.universign_document_id = first_doc_id
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
f"📄 Document Universign: id={first_doc_id}, "
|
f"Document Universign: id={first_doc_id}, "
|
||||||
f"name={first_doc.get('name')}, status={first_doc.get('status')}"
|
f"name={first_doc.get('name')}, status={first_doc.get('status')}"
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
|
|
@ -546,7 +546,7 @@ class UniversignSyncService:
|
||||||
# Téléchargement automatique si signé
|
# Téléchargement automatique si signé
|
||||||
if new_local_status == "SIGNE":
|
if new_local_status == "SIGNE":
|
||||||
if not transaction.signed_document_path:
|
if not transaction.signed_document_path:
|
||||||
logger.info("📥 Déclenchement téléchargement document signé...")
|
logger.info("Déclenchement téléchargement document signé...")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
(
|
(
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue