feat(universign): add signed document download and storage functionality
This commit is contained in:
parent
74c0d73294
commit
983e960b9b
17 changed files with 903 additions and 348 deletions
85
Dockerfile
85
Dockerfile
|
|
@ -1,23 +1,78 @@
|
|||
# Backend Dockerfile
|
||||
FROM python:3.12-slim
|
||||
|
||||
# ================================
|
||||
# Base
|
||||
# ================================
|
||||
FROM python:3.12-slim AS base
|
||||
WORKDIR /app
|
||||
|
||||
# Copier et installer les dépendances
|
||||
COPY requirements.txt .
|
||||
RUN pip install --no-cache-dir --upgrade pip \
|
||||
&& pip install --no-cache-dir -r requirements.txt
|
||||
# Installer dépendances système si nécessaire
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
curl \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY requirements.txt .
|
||||
RUN pip install --no-cache-dir --upgrade pip
|
||||
|
||||
# ================================
|
||||
# DEV
|
||||
# ================================
|
||||
FROM base AS dev
|
||||
ENV PYTHONUNBUFFERED=1 \
|
||||
PYTHONDONTWRITEBYTECODE=1 \
|
||||
ENV=development
|
||||
|
||||
# Installer dépendances dev (si vous avez un requirements.dev.txt)
|
||||
COPY requirements.txt .
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Créer dossiers
|
||||
RUN mkdir -p /app/data /app/logs && chmod -R 777 /app/data /app/logs
|
||||
|
||||
# Copier le reste du projet
|
||||
COPY . .
|
||||
|
||||
# Créer dossier persistant pour SQLite avec bonnes permissions
|
||||
RUN mkdir -p /app/data && chmod 777 /app/data
|
||||
|
||||
# Exposer le port
|
||||
EXPOSE 8000
|
||||
CMD ["uvicorn", "api:app", "--host", "0.0.0.0", "--port", "8000", "--reload"]
|
||||
|
||||
# Lancer l'API et initialiser la DB au démarrage
|
||||
# CMD ["sh", "-c", "uvicorn api:app --host 0.0.0.0 --port 8000"]
|
||||
# ================================
|
||||
# STAGING
|
||||
# ================================
|
||||
FROM base AS staging
|
||||
ENV PYTHONUNBUFFERED=1 \
|
||||
PYTHONDONTWRITEBYTECODE=1 \
|
||||
ENV=staging
|
||||
|
||||
CMD ["sh", "-c", "python init_db.py && uvicorn api:app --host 0.0.0.0 --port 8000"]
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
RUN mkdir -p /app/data /app/logs && chmod -R 755 /app/data /app/logs
|
||||
|
||||
COPY . .
|
||||
|
||||
# Initialiser la DB au build
|
||||
RUN python init_db.py || true
|
||||
|
||||
EXPOSE 8002
|
||||
CMD ["uvicorn", "api:app", "--host", "0.0.0.0", "--port", "8002", "--log-level", "info"]
|
||||
|
||||
# ================================
|
||||
# PROD
|
||||
# ================================
|
||||
FROM base AS prod
|
||||
ENV PYTHONUNBUFFERED=1 \
|
||||
PYTHONDONTWRITEBYTECODE=1 \
|
||||
ENV=production
|
||||
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Créer utilisateur non-root pour la sécurité
|
||||
RUN useradd -m -u 1000 appuser && \
|
||||
mkdir -p /app/data /app/logs && \
|
||||
chown -R appuser:appuser /app
|
||||
|
||||
COPY --chown=appuser:appuser . .
|
||||
|
||||
# Initialiser la DB au build
|
||||
RUN python init_db.py || true
|
||||
|
||||
USER appuser
|
||||
|
||||
EXPOSE 8004
|
||||
CMD ["uvicorn", "api:app", "--host", "0.0.0.0", "--port", "8004", "--workers", "4"]
|
||||
26
api.py
26
api.py
|
|
@ -176,10 +176,10 @@ app.include_router(universign_router)
|
|||
@app.get("/clients", response_model=List[ClientDetails], tags=["Clients"])
|
||||
async def obtenir_clients(
|
||||
query: Optional[str] = Query(None),
|
||||
sage: SageGatewayClient = Depends(get_sage_client_for_user),
|
||||
#sage: SageGatewayClient = Depends(get_sage_client_for_user),
|
||||
):
|
||||
try:
|
||||
clients = sage.lister_clients(filtre=query or "")
|
||||
clients = sage_client.lister_clients(filtre=query or "")
|
||||
return [ClientDetails(**c) for c in clients]
|
||||
except Exception as e:
|
||||
logger.error(f"Erreur recherche clients: {e}")
|
||||
|
|
@ -391,7 +391,7 @@ async def creer_devis(devis: DevisRequest):
|
|||
resultat = sage_client.creer_devis(devis_data)
|
||||
|
||||
logger.info(
|
||||
f"✅ Devis créé: {resultat.get('numero_devis')} "
|
||||
f"Devis créé: {resultat.get('numero_devis')} "
|
||||
f"({resultat.get('total_ttc')}€ TTC)"
|
||||
)
|
||||
|
||||
|
|
@ -405,7 +405,7 @@ async def creer_devis(devis: DevisRequest):
|
|||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Erreur création devis: {e}")
|
||||
logger.error(f"Erreur création devis: {e}")
|
||||
raise HTTPException(500, str(e))
|
||||
|
||||
|
||||
|
|
@ -474,7 +474,7 @@ async def creer_commande(
|
|||
resultat = sage_client.creer_commande(commande_data)
|
||||
|
||||
logger.info(
|
||||
f"✅ Commande créée: {resultat.get('numero_commande')} "
|
||||
f"Commande créée: {resultat.get('numero_commande')} "
|
||||
f"({resultat.get('total_ttc')}€ TTC)"
|
||||
)
|
||||
|
||||
|
|
@ -496,7 +496,7 @@ async def creer_commande(
|
|||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Erreur création commande: {e}")
|
||||
logger.error(f"Erreur création commande: {e}")
|
||||
raise HTTPException(500, str(e))
|
||||
|
||||
|
||||
|
|
@ -1420,7 +1420,7 @@ async def creer_facture(
|
|||
resultat = sage_client.creer_facture(facture_data)
|
||||
|
||||
logger.info(
|
||||
f"✅ Facture créée: {resultat.get('numero_facture')} "
|
||||
f"Facture créée: {resultat.get('numero_facture')} "
|
||||
f"({resultat.get('total_ttc')}€ TTC)"
|
||||
)
|
||||
|
||||
|
|
@ -1442,7 +1442,7 @@ async def creer_facture(
|
|||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Erreur création facture: {e}")
|
||||
logger.error(f"Erreur création facture: {e}")
|
||||
raise HTTPException(500, str(e))
|
||||
|
||||
|
||||
|
|
@ -1943,7 +1943,7 @@ async def creer_avoir(avoir: AvoirCreate, session: AsyncSession = Depends(get_se
|
|||
resultat = sage_client.creer_avoir(avoir_data)
|
||||
|
||||
logger.info(
|
||||
f"✅ Avoir créé: {resultat.get('numero_avoir')} "
|
||||
f"Avoir créé: {resultat.get('numero_avoir')} "
|
||||
f"({resultat.get('total_ttc')}€ TTC)"
|
||||
)
|
||||
|
||||
|
|
@ -1965,7 +1965,7 @@ async def creer_avoir(avoir: AvoirCreate, session: AsyncSession = Depends(get_se
|
|||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Erreur création avoir: {e}")
|
||||
logger.error(f"Erreur création avoir: {e}")
|
||||
raise HTTPException(500, str(e))
|
||||
|
||||
|
||||
|
|
@ -2070,7 +2070,7 @@ async def creer_livraison(
|
|||
resultat = sage_client.creer_livraison(livraison_data)
|
||||
|
||||
logger.info(
|
||||
f"✅ Livraison créée: {resultat.get('numero_livraison')} "
|
||||
f"Livraison créée: {resultat.get('numero_livraison')} "
|
||||
f"({resultat.get('total_ttc')}€ TTC)"
|
||||
)
|
||||
|
||||
|
|
@ -2092,7 +2092,7 @@ async def creer_livraison(
|
|||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Erreur création livraison: {e}")
|
||||
logger.error(f"Erreur création livraison: {e}")
|
||||
raise HTTPException(500, str(e))
|
||||
|
||||
|
||||
|
|
@ -2446,7 +2446,7 @@ async def creer_sortie_stock(sortie: SortieStock):
|
|||
if sortie_data.get("date_sortie"):
|
||||
sortie_data["date_sortie"] = sortie_data["date_sortie"].isoformat()
|
||||
|
||||
logger.info(f"📤 Création sortie stock: {len(sortie.lignes)} ligne(s)")
|
||||
logger.info(f"Création sortie stock: {len(sortie.lignes)} ligne(s)")
|
||||
|
||||
resultat = sage_client.creer_sortie_stock(sortie_data)
|
||||
|
||||
|
|
|
|||
18
data/status_latest.py
Normal file
18
data/status_latest.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
import enum
|
||||
|
||||
|
||||
class StatutEmail(str, enum.Enum):
|
||||
EN_ATTENTE = "EN_ATTENTE"
|
||||
EN_COURS = "EN_COURS"
|
||||
ENVOYE = "ENVOYE"
|
||||
OUVERT = "OUVERT"
|
||||
ERREUR = "ERREUR"
|
||||
BOUNCE = "BOUNCE"
|
||||
|
||||
|
||||
class StatutSignature(str, enum.Enum):
|
||||
EN_ATTENTE = "EN_ATTENTE"
|
||||
ENVOYE = "ENVOYE"
|
||||
SIGNE = "SIGNE"
|
||||
REFUSE = "REFUSE"
|
||||
EXPIRE = "EXPIRE"
|
||||
|
|
@ -7,7 +7,7 @@ from database.models.generic_model import Base
|
|||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
DATABASE_URL = os.getenv("DATABASE_URL", "sqlite+aiosqlite:///./data/sage_dataven.db")
|
||||
DATABASE_URL = os.getenv("DATABASE_URL")
|
||||
|
||||
engine = create_async_engine(
|
||||
DATABASE_URL,
|
||||
|
|
|
|||
|
|
@ -106,6 +106,23 @@ class UniversignTransaction(Base):
|
|||
# === URLS ET MÉTADONNÉES UNIVERSIGN ===
|
||||
signer_url = Column(Text, nullable=True, comment="URL de signature")
|
||||
document_url = Column(Text, nullable=True, comment="URL du document signé")
|
||||
|
||||
signed_document_path = Column(
|
||||
Text, nullable=True, comment="Chemin local du PDF signé"
|
||||
)
|
||||
signed_document_downloaded_at = Column(
|
||||
DateTime, nullable=True, comment="Date de téléchargement du document"
|
||||
)
|
||||
signed_document_size_bytes = Column(
|
||||
Integer, nullable=True, comment="Taille du fichier en octets"
|
||||
)
|
||||
download_attempts = Column(
|
||||
Integer, default=0, comment="Nombre de tentatives de téléchargement"
|
||||
)
|
||||
download_error = Column(
|
||||
Text, nullable=True, comment="Dernière erreur de téléchargement"
|
||||
)
|
||||
|
||||
certificate_url = Column(Text, nullable=True, comment="URL du certificat")
|
||||
|
||||
# === SIGNATAIRES ===
|
||||
|
|
@ -268,7 +285,7 @@ class UniversignConfig(Base):
|
|||
)
|
||||
|
||||
api_url = Column(String(500), nullable=False)
|
||||
api_key = Column(String(500), nullable=False, comment="⚠️ À chiffrer")
|
||||
api_key = Column(String(500), nullable=False, comment="À chiffrer")
|
||||
|
||||
# === OPTIONS ===
|
||||
webhook_url = Column(String(500), nullable=True)
|
||||
|
|
|
|||
24
docker-compose.dev.yml
Normal file
24
docker-compose.dev.yml
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
services:
|
||||
backend:
|
||||
container_name: dev-sage-api
|
||||
build:
|
||||
context: .
|
||||
target: dev
|
||||
env_file: .env
|
||||
volumes:
|
||||
- .:/app
|
||||
- /app/__pycache__
|
||||
- ./data:/app/data
|
||||
- ./logs:/app/logs
|
||||
ports:
|
||||
- "8000:8000"
|
||||
environment:
|
||||
ENV: development
|
||||
DEBUG: "true"
|
||||
DATABASE_URL: "sqlite+aiosqlite:///./data/sage_dataven.db"
|
||||
restart: unless-stopped
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
23
docker-compose.prod.yml
Normal file
23
docker-compose.prod.yml
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
services:
|
||||
backend:
|
||||
container_name: prod_sage_api
|
||||
build:
|
||||
context: .
|
||||
target: prod
|
||||
env_file: .env.production
|
||||
volumes:
|
||||
- ./data:/app/data
|
||||
- ./logs:/app/logs
|
||||
ports:
|
||||
- "8004:8004"
|
||||
environment:
|
||||
ENV: production
|
||||
DEBUG: "false"
|
||||
DATABASE_URL: "sqlite+aiosqlite:///./data/sage_prod.db"
|
||||
restart: always
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8004/health"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 5
|
||||
start_period: 40s
|
||||
22
docker-compose.staging.yml
Normal file
22
docker-compose.staging.yml
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
services:
|
||||
backend:
|
||||
container_name: staging_sage_api
|
||||
build:
|
||||
context: .
|
||||
target: staging
|
||||
env_file: .env.staging
|
||||
volumes:
|
||||
- ./data:/app/data
|
||||
- ./logs:/app/logs
|
||||
ports:
|
||||
- "8002:8002"
|
||||
environment:
|
||||
ENV: staging
|
||||
DEBUG: "false"
|
||||
DATABASE_URL: "sqlite+aiosqlite:///./data/sage_staging.db"
|
||||
restart: unless-stopped
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8002/health"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
|
|
@ -1,11 +1,4 @@
|
|||
services:
|
||||
vps-sage-api:
|
||||
build: .
|
||||
container_name: vps-sage-api
|
||||
env_file: .env
|
||||
volumes:
|
||||
- ./data:/app/data
|
||||
- ./logs:/app/logs
|
||||
ports:
|
||||
- "8000:8000"
|
||||
restart: unless-stopped
|
||||
backend:
|
||||
build:
|
||||
context: .
|
||||
21
init_db.py
21
init_db.py
|
|
@ -14,33 +14,14 @@ logger = logging.getLogger(__name__)
|
|||
|
||||
|
||||
async def main():
|
||||
print("\n" + "=" * 60)
|
||||
print("Initialisation de la base de données délocalisée")
|
||||
print("=" * 60 + "\n")
|
||||
|
||||
try:
|
||||
logger.info("Debut de l'initialisation")
|
||||
await init_db()
|
||||
logger.info("Initialisation terminee")
|
||||
print("\nInitialisation terminee")
|
||||
|
||||
print("\nBase de données créée avec succès !")
|
||||
print("Fichier: sage_dataven.db")
|
||||
|
||||
print("\nTables créées:")
|
||||
print(" |- email_logs (Journalisation emails)")
|
||||
print(" |- signature_logs (Suivi signatures Universign)")
|
||||
print(" |- workflow_logs (Transformations documents)")
|
||||
print(" |- cache_metadata (Métadonnées cache)")
|
||||
print(" |- audit_logs (Journal d'audit)")
|
||||
|
||||
print("\nProchaines étapes:")
|
||||
print(" 1. Configurer le fichier .env avec les credentials")
|
||||
print(" 2. Lancer la gateway Windows sur la machine Sage")
|
||||
print(" 3. Lancer l'API VPS: uvicorn api:app --host 0.0.0.0 --port 8000")
|
||||
print(" 4. Ou avec Docker : docker-compose up -d")
|
||||
print(" 5. Tester: http://IP_DU_VPS:8000/docs")
|
||||
|
||||
print("\n" + "=" * 60 + "\n")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
from fastapi import APIRouter, Depends, HTTPException, Query, Request
|
||||
from fastapi.responses import FileResponse
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, func, or_, and_
|
||||
from sqlalchemy import select, func, and_
|
||||
from sqlalchemy.orm import selectinload
|
||||
from typing import List, Optional
|
||||
from datetime import datetime, timedelta
|
||||
from pydantic import BaseModel, EmailStr
|
||||
import logging
|
||||
from data.data import templates_signature_email
|
||||
from email_queue import email_queue
|
||||
|
|
@ -14,16 +14,21 @@ from database import (
|
|||
UniversignSigner,
|
||||
UniversignSyncLog,
|
||||
LocalDocumentStatus,
|
||||
SageDocumentType,
|
||||
)
|
||||
import os
|
||||
from pathlib import Path
|
||||
import json
|
||||
from services.universign_sync import UniversignSyncService
|
||||
from config.config import settings
|
||||
from utils.generic_functions import normaliser_type_doc
|
||||
from utils.universign_status_mapping import get_status_message, map_universign_to_local
|
||||
|
||||
from database.models.email import EmailLog
|
||||
from database.enum.status import StatutEmail
|
||||
from schemas import (
|
||||
SyncStatsResponse,
|
||||
CreateSignatureRequest,
|
||||
TransactionResponse,
|
||||
)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
|
@ -34,56 +39,13 @@ sync_service = UniversignSyncService(
|
|||
)
|
||||
|
||||
|
||||
class CreateSignatureRequest(BaseModel):
|
||||
"""Demande de création d'une signature"""
|
||||
|
||||
sage_document_id: str
|
||||
sage_document_type: SageDocumentType
|
||||
signer_email: EmailStr
|
||||
signer_name: str
|
||||
document_name: Optional[str] = None
|
||||
|
||||
|
||||
class TransactionResponse(BaseModel):
|
||||
"""Réponse détaillée d'une transaction"""
|
||||
|
||||
id: str
|
||||
transaction_id: str
|
||||
sage_document_id: str
|
||||
sage_document_type: str
|
||||
universign_status: str
|
||||
local_status: str
|
||||
local_status_label: str
|
||||
signer_url: Optional[str]
|
||||
document_url: Optional[str]
|
||||
created_at: datetime
|
||||
sent_at: Optional[datetime]
|
||||
signed_at: Optional[datetime]
|
||||
last_synced_at: Optional[datetime]
|
||||
needs_sync: bool
|
||||
signers: List[dict]
|
||||
|
||||
|
||||
class SyncStatsResponse(BaseModel):
|
||||
"""Statistiques de synchronisation"""
|
||||
|
||||
total_transactions: int
|
||||
pending_sync: int
|
||||
signed: int
|
||||
in_progress: int
|
||||
refused: int
|
||||
expired: int
|
||||
last_sync_at: Optional[datetime]
|
||||
|
||||
|
||||
@router.post("/signatures/create", response_model=TransactionResponse)
|
||||
async def create_signature(
|
||||
request: CreateSignatureRequest, session: AsyncSession = Depends(get_session)
|
||||
):
|
||||
try:
|
||||
# === VÉRIFICATION DOUBLON RENFORCÉE ===
|
||||
logger.info(
|
||||
f"🔍 Vérification doublon pour: {request.sage_document_id} "
|
||||
f"Vérification doublon pour: {request.sage_document_id} "
|
||||
f"(type: {request.sage_document_type.name})"
|
||||
)
|
||||
|
||||
|
|
@ -96,10 +58,9 @@ async def create_signature(
|
|||
|
||||
if all_existing:
|
||||
logger.warning(
|
||||
f"⚠️ {len(all_existing)} transaction(s) existante(s) trouvée(s)"
|
||||
f"{len(all_existing)} transaction(s) existante(s) trouvée(s)"
|
||||
)
|
||||
|
||||
# Filtrer les transactions non-finales
|
||||
active_txs = [
|
||||
tx
|
||||
for tx in all_existing
|
||||
|
|
@ -115,7 +76,7 @@ async def create_signature(
|
|||
if active_txs:
|
||||
active_tx = active_txs[0]
|
||||
logger.error(
|
||||
f"❌ Transaction active existante: {active_tx.transaction_id} "
|
||||
f"Transaction active existante: {active_tx.transaction_id} "
|
||||
f"(statut: {active_tx.local_status.value})"
|
||||
)
|
||||
raise HTTPException(
|
||||
|
|
@ -126,11 +87,10 @@ async def create_signature(
|
|||
)
|
||||
|
||||
logger.info(
|
||||
"✅ Toutes les transactions existantes sont finales, création autorisée"
|
||||
"Toutes les transactions existantes sont finales, création autorisée"
|
||||
)
|
||||
|
||||
# Génération PDF
|
||||
logger.info(f"📄 Génération PDF: {request.sage_document_id}")
|
||||
logger.info(f"Génération PDF: {request.sage_document_id}")
|
||||
pdf_bytes = email_queue._generate_pdf(
|
||||
request.sage_document_id, normaliser_type_doc(request.sage_document_type)
|
||||
)
|
||||
|
|
@ -138,15 +98,14 @@ async def create_signature(
|
|||
if not pdf_bytes:
|
||||
raise HTTPException(400, "Échec génération PDF")
|
||||
|
||||
logger.info(f"✅ PDF généré: {len(pdf_bytes)} octets")
|
||||
logger.info(f"PDF généré: {len(pdf_bytes)} octets")
|
||||
|
||||
# === CRÉATION TRANSACTION UNIVERSIGN ===
|
||||
import requests
|
||||
import uuid
|
||||
|
||||
auth = (settings.universign_api_key, "")
|
||||
|
||||
logger.info("🔄 Création transaction Universign...")
|
||||
logger.info("Création transaction Universign...")
|
||||
|
||||
resp = requests.post(
|
||||
f"{settings.universign_api_url}/transactions",
|
||||
|
|
@ -160,14 +119,13 @@ async def create_signature(
|
|||
)
|
||||
|
||||
if resp.status_code != 200:
|
||||
logger.error(f"❌ Erreur Universign (création): {resp.text}")
|
||||
logger.error(f"Erreur Universign (création): {resp.text}")
|
||||
raise HTTPException(500, f"Erreur Universign: {resp.status_code}")
|
||||
|
||||
universign_tx_id = resp.json().get("id")
|
||||
logger.info(f"✅ Transaction Universign créée: {universign_tx_id}")
|
||||
logger.info(f"Transaction Universign créée: {universign_tx_id}")
|
||||
|
||||
# Upload PDF
|
||||
logger.info("📤 Upload PDF...")
|
||||
logger.info("Upload PDF...")
|
||||
files = {
|
||||
"file": (f"{request.sage_document_id}.pdf", pdf_bytes, "application/pdf")
|
||||
}
|
||||
|
|
@ -176,14 +134,13 @@ async def create_signature(
|
|||
)
|
||||
|
||||
if resp.status_code not in [200, 201]:
|
||||
logger.error(f"❌ Erreur upload: {resp.text}")
|
||||
logger.error(f"Erreur upload: {resp.text}")
|
||||
raise HTTPException(500, "Erreur upload PDF")
|
||||
|
||||
file_id = resp.json().get("id")
|
||||
logger.info(f"✅ PDF uploadé: {file_id}")
|
||||
logger.info(f"PDF uploadé: {file_id}")
|
||||
|
||||
# Attachement document
|
||||
logger.info("🔗 Attachement document...")
|
||||
logger.info("Attachement document...")
|
||||
resp = requests.post(
|
||||
f"{settings.universign_api_url}/transactions/{universign_tx_id}/documents",
|
||||
auth=auth,
|
||||
|
|
@ -196,8 +153,7 @@ async def create_signature(
|
|||
|
||||
document_id = resp.json().get("id")
|
||||
|
||||
# Création champ signature
|
||||
logger.info("✍️ Création champ signature...")
|
||||
logger.info("Création champ signature...")
|
||||
resp = requests.post(
|
||||
f"{settings.universign_api_url}/transactions/{universign_tx_id}/documents/{document_id}/fields",
|
||||
auth=auth,
|
||||
|
|
@ -210,8 +166,7 @@ async def create_signature(
|
|||
|
||||
field_id = resp.json().get("id")
|
||||
|
||||
# Liaison signataire
|
||||
logger.info(f"👤 Liaison signataire: {request.signer_email}")
|
||||
logger.info(f"Liaison signataire: {request.signer_email}")
|
||||
resp = requests.post(
|
||||
f"{settings.universign_api_url}/transactions/{universign_tx_id}/signatures",
|
||||
auth=auth,
|
||||
|
|
@ -222,8 +177,7 @@ async def create_signature(
|
|||
if resp.status_code not in [200, 201]:
|
||||
raise HTTPException(500, "Erreur liaison signataire")
|
||||
|
||||
# Démarrage transaction
|
||||
logger.info("🚀 Démarrage transaction...")
|
||||
logger.info("Démarrage transaction...")
|
||||
resp = requests.post(
|
||||
f"{settings.universign_api_url}/transactions/{universign_tx_id}/start",
|
||||
auth=auth,
|
||||
|
|
@ -235,7 +189,6 @@ async def create_signature(
|
|||
|
||||
final_data = resp.json()
|
||||
|
||||
# Extraction URL de signature
|
||||
signer_url = ""
|
||||
if final_data.get("actions"):
|
||||
for action in final_data["actions"]:
|
||||
|
|
@ -246,14 +199,13 @@ async def create_signature(
|
|||
if not signer_url:
|
||||
raise HTTPException(500, "URL de signature non retournée")
|
||||
|
||||
logger.info("✅ URL de signature obtenue")
|
||||
logger.info("URL de signature obtenue")
|
||||
|
||||
# === ENREGISTREMENT LOCAL ===
|
||||
local_id = str(uuid.uuid4())
|
||||
|
||||
transaction = UniversignTransaction(
|
||||
id=local_id,
|
||||
transaction_id=universign_tx_id, # ⚠️ Utiliser l'ID Universign, ne jamais le changer
|
||||
transaction_id=universign_tx_id,
|
||||
sage_document_id=request.sage_document_id,
|
||||
sage_document_type=request.sage_document_type,
|
||||
universign_status=UniversignTransactionStatus.STARTED,
|
||||
|
|
@ -283,10 +235,9 @@ async def create_signature(
|
|||
await session.commit()
|
||||
|
||||
logger.info(
|
||||
f"💾 Transaction sauvegardée: {local_id} (Universign: {universign_tx_id})"
|
||||
f"Transaction sauvegardée: {local_id} (Universign: {universign_tx_id})"
|
||||
)
|
||||
|
||||
# === ENVOI EMAIL AVEC TEMPLATE ===
|
||||
template = templates_signature_email["demande_signature"]
|
||||
|
||||
type_labels = {
|
||||
|
|
@ -341,7 +292,6 @@ async def create_signature(
|
|||
|
||||
email_queue.enqueue(email_log.id)
|
||||
|
||||
# === MISE À JOUR STATUT SAGE (Confirmé = 1) ===
|
||||
try:
|
||||
from sage_client import sage_client
|
||||
|
||||
|
|
@ -356,7 +306,6 @@ async def create_signature(
|
|||
except Exception as e:
|
||||
logger.warning(f"Impossible de mettre à jour le statut Sage: {e}")
|
||||
|
||||
# === RÉPONSE ===
|
||||
return TransactionResponse(
|
||||
id=transaction.id,
|
||||
transaction_id=transaction.transaction_id,
|
||||
|
|
@ -436,6 +385,15 @@ async def list_transactions(
|
|||
}
|
||||
for s in tx.signers
|
||||
],
|
||||
signed_document_available=bool(
|
||||
tx.signed_document_path and Path(tx.signed_document_path).exists()
|
||||
),
|
||||
signed_document_downloaded_at=tx.signed_document_downloaded_at,
|
||||
signed_document_size_kb=(
|
||||
tx.signed_document_size_bytes / 1024
|
||||
if tx.signed_document_size_bytes
|
||||
else None
|
||||
),
|
||||
)
|
||||
for tx in transactions
|
||||
]
|
||||
|
|
@ -482,6 +440,15 @@ async def get_transaction(
|
|||
}
|
||||
for s in tx.signers
|
||||
],
|
||||
signed_document_available=bool(
|
||||
tx.signed_document_path and Path(tx.signed_document_path).exists()
|
||||
),
|
||||
signed_document_downloaded_at=tx.signed_document_downloaded_at,
|
||||
signed_document_size_kb=(
|
||||
tx.signed_document_size_bytes / 1024
|
||||
if tx.signed_document_size_bytes
|
||||
else None
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -538,52 +505,39 @@ async def webhook_universign(
|
|||
try:
|
||||
payload = await request.json()
|
||||
|
||||
# 📋 LOG COMPLET du payload pour débogage
|
||||
logger.info(
|
||||
f"📥 Webhook Universign reçu - Type: {payload.get('type', 'unknown')}"
|
||||
)
|
||||
logger.info(f"Webhook Universign reçu - Type: {payload.get('type', 'unknown')}")
|
||||
logger.debug(f"Payload complet: {json.dumps(payload, indent=2)}")
|
||||
|
||||
# ✅ EXTRACTION CORRECTE DU TRANSACTION_ID
|
||||
transaction_id = None
|
||||
|
||||
# 🔍 Structure 1 : Événements avec payload imbriqué (la plus courante)
|
||||
# Exemple : transaction.lifecycle.created, transaction.lifecycle.started, etc.
|
||||
if payload.get("type", "").startswith("transaction.") and "payload" in payload:
|
||||
# Le transaction_id est dans payload.object.id
|
||||
nested_object = payload.get("payload", {}).get("object", {})
|
||||
if nested_object.get("object") == "transaction":
|
||||
transaction_id = nested_object.get("id")
|
||||
logger.info(
|
||||
f"✅ Transaction ID extrait de payload.object.id: {transaction_id}"
|
||||
f"Transaction ID extrait de payload.object.id: {transaction_id}"
|
||||
)
|
||||
|
||||
# 🔍 Structure 2 : Action événements (action.opened, action.completed)
|
||||
elif payload.get("type", "").startswith("action."):
|
||||
# Le transaction_id est directement dans payload.object.transaction_id
|
||||
transaction_id = (
|
||||
payload.get("payload", {}).get("object", {}).get("transaction_id")
|
||||
)
|
||||
logger.info(
|
||||
f"✅ Transaction ID extrait de payload.object.transaction_id: {transaction_id}"
|
||||
f"Transaction ID extrait de payload.object.transaction_id: {transaction_id}"
|
||||
)
|
||||
|
||||
# 🔍 Structure 3 : Transaction directe (fallback)
|
||||
elif payload.get("object") == "transaction":
|
||||
transaction_id = payload.get("id")
|
||||
logger.info(f"✅ Transaction ID extrait direct: {transaction_id}")
|
||||
logger.info(f"Transaction ID extrait direct: {transaction_id}")
|
||||
|
||||
# 🔍 Structure 4 : Ancien format (pour rétro-compatibilité)
|
||||
elif "transaction" in payload:
|
||||
transaction_id = payload.get("transaction", {}).get("id")
|
||||
logger.info(
|
||||
f"✅ Transaction ID extrait de transaction.id: {transaction_id}"
|
||||
)
|
||||
logger.info(f"Transaction ID extrait de transaction.id: {transaction_id}")
|
||||
|
||||
# ❌ Échec d'extraction
|
||||
if not transaction_id:
|
||||
logger.error(
|
||||
f"❌ Transaction ID introuvable dans webhook\n"
|
||||
f"Transaction ID introuvable dans webhook\n"
|
||||
f"Type d'événement: {payload.get('type', 'unknown')}\n"
|
||||
f"Clés racine: {list(payload.keys())}\n"
|
||||
f"Payload simplifié: {json.dumps({k: v if k != 'payload' else '...' for k, v in payload.items()})}"
|
||||
|
|
@ -595,9 +549,8 @@ async def webhook_universign(
|
|||
"event_id": payload.get("id"),
|
||||
}, 400
|
||||
|
||||
logger.info(f"🎯 Transaction ID identifié: {transaction_id}")
|
||||
logger.info(f"Transaction ID identifié: {transaction_id}")
|
||||
|
||||
# Vérifier si la transaction existe localement
|
||||
query = select(UniversignTransaction).where(
|
||||
UniversignTransaction.transaction_id == transaction_id
|
||||
)
|
||||
|
|
@ -606,7 +559,7 @@ async def webhook_universign(
|
|||
|
||||
if not tx:
|
||||
logger.warning(
|
||||
f"⚠️ Transaction {transaction_id} inconnue en local\n"
|
||||
f"Transaction {transaction_id} inconnue en local\n"
|
||||
f"Type d'événement: {payload.get('type')}\n"
|
||||
f"Elle sera synchronisée au prochain polling"
|
||||
)
|
||||
|
|
@ -617,22 +570,20 @@ async def webhook_universign(
|
|||
"event_type": payload.get("type"),
|
||||
}
|
||||
|
||||
# Traiter le webhook
|
||||
success, error = await sync_service.process_webhook(
|
||||
session, payload, transaction_id
|
||||
)
|
||||
|
||||
if not success:
|
||||
logger.error(f"❌ Erreur traitement webhook: {error}")
|
||||
logger.error(f"Erreur traitement webhook: {error}")
|
||||
return {
|
||||
"status": "error",
|
||||
"message": error,
|
||||
"transaction_id": transaction_id,
|
||||
}, 500
|
||||
|
||||
# ✅ Succès
|
||||
logger.info(
|
||||
f"✅ Webhook traité avec succès\n"
|
||||
f"Webhook traité avec succès\n"
|
||||
f"Transaction: {transaction_id}\n"
|
||||
f"Nouveau statut: {tx.local_status.value if tx else 'unknown'}\n"
|
||||
f"Type d'événement: {payload.get('type')}"
|
||||
|
|
@ -647,7 +598,7 @@ async def webhook_universign(
|
|||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"💥 Erreur critique webhook: {e}", exc_info=True)
|
||||
logger.error(f"Erreur critique webhook: {e}", exc_info=True)
|
||||
return {"status": "error", "message": str(e)}, 500
|
||||
|
||||
|
||||
|
|
@ -655,17 +606,14 @@ async def webhook_universign(
|
|||
async def get_sync_stats(session: AsyncSession = Depends(get_session)):
|
||||
"""Statistiques globales de synchronisation"""
|
||||
|
||||
# Total
|
||||
total_query = select(func.count(UniversignTransaction.id))
|
||||
total = (await session.execute(total_query)).scalar()
|
||||
|
||||
# En attente de sync
|
||||
pending_query = select(func.count(UniversignTransaction.id)).where(
|
||||
UniversignTransaction.needs_sync
|
||||
)
|
||||
pending = (await session.execute(pending_query)).scalar()
|
||||
|
||||
# Par statut
|
||||
signed_query = select(func.count(UniversignTransaction.id)).where(
|
||||
UniversignTransaction.local_status == LocalDocumentStatus.SIGNED
|
||||
)
|
||||
|
|
@ -686,7 +634,6 @@ async def get_sync_stats(session: AsyncSession = Depends(get_session)):
|
|||
)
|
||||
expired = (await session.execute(expired_query)).scalar()
|
||||
|
||||
# Dernière sync
|
||||
last_sync_query = select(func.max(UniversignTransaction.last_synced_at))
|
||||
last_sync = (await session.execute(last_sync_query)).scalar()
|
||||
|
||||
|
|
@ -707,7 +654,6 @@ async def get_transaction_logs(
|
|||
limit: int = Query(50, le=500),
|
||||
session: AsyncSession = Depends(get_session),
|
||||
):
|
||||
# Trouver la transaction
|
||||
tx_query = select(UniversignTransaction).where(
|
||||
UniversignTransaction.transaction_id == transaction_id
|
||||
)
|
||||
|
|
@ -717,7 +663,6 @@ async def get_transaction_logs(
|
|||
if not tx:
|
||||
raise HTTPException(404, "Transaction introuvable")
|
||||
|
||||
# Logs
|
||||
logs_query = (
|
||||
select(UniversignSyncLog)
|
||||
.where(UniversignSyncLog.transaction_id == tx.id)
|
||||
|
|
@ -746,9 +691,6 @@ async def get_transaction_logs(
|
|||
}
|
||||
|
||||
|
||||
# Ajouter ces routes dans universign.py
|
||||
|
||||
|
||||
@router.get("/documents/{sage_document_id}/signatures")
|
||||
async def get_signatures_for_document(
|
||||
sage_document_id: str,
|
||||
|
|
@ -790,10 +732,6 @@ async def cleanup_duplicate_signatures(
|
|||
),
|
||||
session: AsyncSession = Depends(get_session),
|
||||
):
|
||||
"""
|
||||
Supprime les doublons de signatures pour un document.
|
||||
Garde une seule transaction (la plus récente ou ancienne selon le paramètre).
|
||||
"""
|
||||
query = (
|
||||
select(UniversignTransaction)
|
||||
.where(UniversignTransaction.sage_document_id == sage_document_id)
|
||||
|
|
@ -815,7 +753,6 @@ async def cleanup_duplicate_signatures(
|
|||
"deleted_count": 0,
|
||||
}
|
||||
|
||||
# Garder la première (selon l'ordre), supprimer les autres
|
||||
to_keep = transactions[0]
|
||||
to_delete = transactions[1:]
|
||||
|
||||
|
|
@ -875,13 +812,8 @@ async def delete_transaction(
|
|||
async def cleanup_all_duplicates(
|
||||
session: AsyncSession = Depends(get_session),
|
||||
):
|
||||
"""
|
||||
Nettoie tous les doublons dans la base.
|
||||
Pour chaque document avec plusieurs transactions, garde la plus récente non-erreur ou la plus récente.
|
||||
"""
|
||||
from sqlalchemy import func
|
||||
|
||||
# Trouver les documents avec plusieurs transactions
|
||||
subquery = (
|
||||
select(
|
||||
UniversignTransaction.sage_document_id,
|
||||
|
|
@ -899,7 +831,6 @@ async def cleanup_all_duplicates(
|
|||
cleanup_details = []
|
||||
|
||||
for doc_id in duplicate_docs:
|
||||
# Récupérer toutes les transactions pour ce document
|
||||
tx_query = (
|
||||
select(UniversignTransaction)
|
||||
.where(UniversignTransaction.sage_document_id == doc_id)
|
||||
|
|
@ -908,7 +839,6 @@ async def cleanup_all_duplicates(
|
|||
tx_result = await session.execute(tx_query)
|
||||
transactions = tx_result.scalars().all()
|
||||
|
||||
# Priorité: SIGNE > EN_COURS > EN_ATTENTE > autres
|
||||
priority = {"SIGNE": 0, "EN_COURS": 1, "EN_ATTENTE": 2}
|
||||
|
||||
def sort_key(tx):
|
||||
|
|
@ -946,115 +876,11 @@ async def cleanup_all_duplicates(
|
|||
}
|
||||
|
||||
|
||||
@router.get("/admin/diagnostic", tags=["Admin"])
|
||||
async def diagnostic_complet(session: AsyncSession = Depends(get_session)):
|
||||
"""
|
||||
Diagnostic complet de l'état des transactions Universign
|
||||
"""
|
||||
try:
|
||||
# Statistiques générales
|
||||
total_query = select(func.count(UniversignTransaction.id))
|
||||
total = (await session.execute(total_query)).scalar()
|
||||
|
||||
# Par statut local
|
||||
statuts_query = select(
|
||||
UniversignTransaction.local_status, func.count(UniversignTransaction.id)
|
||||
).group_by(UniversignTransaction.local_status)
|
||||
statuts_result = await session.execute(statuts_query)
|
||||
statuts = {status.value: count for status, count in statuts_result.all()}
|
||||
|
||||
# Transactions sans sync récente
|
||||
date_limite = datetime.now() - timedelta(hours=1)
|
||||
sans_sync_query = select(func.count(UniversignTransaction.id)).where(
|
||||
and_(
|
||||
UniversignTransaction.needs_sync == True,
|
||||
or_(
|
||||
UniversignTransaction.last_synced_at < date_limite,
|
||||
UniversignTransaction.last_synced_at.is_(None),
|
||||
),
|
||||
)
|
||||
)
|
||||
sans_sync = (await session.execute(sans_sync_query)).scalar()
|
||||
|
||||
# Doublons potentiels
|
||||
doublons_query = (
|
||||
select(
|
||||
UniversignTransaction.sage_document_id,
|
||||
func.count(UniversignTransaction.id).label("count"),
|
||||
)
|
||||
.group_by(UniversignTransaction.sage_document_id)
|
||||
.having(func.count(UniversignTransaction.id) > 1)
|
||||
)
|
||||
doublons_result = await session.execute(doublons_query)
|
||||
doublons = doublons_result.fetchall()
|
||||
|
||||
# Transactions avec erreurs de sync
|
||||
erreurs_query = select(func.count(UniversignTransaction.id)).where(
|
||||
UniversignTransaction.sync_error.isnot(None)
|
||||
)
|
||||
erreurs = (await session.execute(erreurs_query)).scalar()
|
||||
|
||||
# Transactions sans webhook reçu
|
||||
sans_webhook_query = select(func.count(UniversignTransaction.id)).where(
|
||||
and_(
|
||||
UniversignTransaction.webhook_received == False,
|
||||
UniversignTransaction.local_status != LocalDocumentStatus.PENDING,
|
||||
)
|
||||
)
|
||||
sans_webhook = (await session.execute(sans_webhook_query)).scalar()
|
||||
|
||||
diagnostic = {
|
||||
"timestamp": datetime.now().isoformat(),
|
||||
"total_transactions": total,
|
||||
"repartition_statuts": statuts,
|
||||
"problemes_detectes": {
|
||||
"sans_sync_recente": sans_sync,
|
||||
"doublons_possibles": len(doublons),
|
||||
"erreurs_sync": erreurs,
|
||||
"sans_webhook": sans_webhook,
|
||||
},
|
||||
"documents_avec_doublons": [
|
||||
{"document_id": doc_id, "nombre_transactions": count}
|
||||
for doc_id, count in doublons
|
||||
],
|
||||
"recommandations": [],
|
||||
}
|
||||
|
||||
# Recommandations
|
||||
if sans_sync > 0:
|
||||
diagnostic["recommandations"].append(
|
||||
f"🔄 {sans_sync} transaction(s) à synchroniser. "
|
||||
f"Utilisez POST /universign/sync/all"
|
||||
)
|
||||
|
||||
if len(doublons) > 0:
|
||||
diagnostic["recommandations"].append(
|
||||
f"⚠️ {len(doublons)} document(s) avec doublons. "
|
||||
f"Utilisez POST /universign/cleanup/all-duplicates"
|
||||
)
|
||||
|
||||
if erreurs > 0:
|
||||
diagnostic["recommandations"].append(
|
||||
f"❌ {erreurs} transaction(s) en erreur. "
|
||||
f"Vérifiez les logs avec GET /universign/transactions?status=ERREUR"
|
||||
)
|
||||
|
||||
return diagnostic
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Erreur diagnostic: {e}")
|
||||
raise HTTPException(500, str(e))
|
||||
|
||||
|
||||
@router.post("/admin/force-sync-all", tags=["Admin"])
|
||||
async def forcer_sync_toutes_transactions(
|
||||
max_transactions: int = Query(200, le=500),
|
||||
session: AsyncSession = Depends(get_session),
|
||||
):
|
||||
"""
|
||||
Force la synchronisation de TOUTES les transactions (même finales)
|
||||
À utiliser pour réparer les incohérences
|
||||
"""
|
||||
try:
|
||||
query = (
|
||||
select(UniversignTransaction)
|
||||
|
|
@ -1079,7 +905,7 @@ async def forcer_sync_toutes_transactions(
|
|||
previous_status = transaction.local_status.value
|
||||
|
||||
logger.info(
|
||||
f"🔄 Force sync: {transaction.transaction_id} (statut: {previous_status})"
|
||||
f"Force sync: {transaction.transaction_id} (statut: {previous_status})"
|
||||
)
|
||||
|
||||
success, error = await sync_service.sync_transaction(
|
||||
|
|
@ -1128,9 +954,6 @@ async def forcer_sync_toutes_transactions(
|
|||
async def reparer_transaction(
|
||||
transaction_id: str, session: AsyncSession = Depends(get_session)
|
||||
):
|
||||
"""
|
||||
Répare une transaction spécifique en la re-synchronisant depuis Universign
|
||||
"""
|
||||
try:
|
||||
query = select(UniversignTransaction).where(
|
||||
UniversignTransaction.transaction_id == transaction_id
|
||||
|
|
@ -1148,7 +971,6 @@ async def reparer_transaction(
|
|||
else None
|
||||
)
|
||||
|
||||
# Force sync
|
||||
success, error = await sync_service.sync_transaction(
|
||||
session, transaction, force=True
|
||||
)
|
||||
|
|
@ -1185,11 +1007,7 @@ async def reparer_transaction(
|
|||
async def trouver_transactions_inconsistantes(
|
||||
session: AsyncSession = Depends(get_session),
|
||||
):
|
||||
"""
|
||||
Trouve les transactions dont le statut local ne correspond pas au statut Universign
|
||||
"""
|
||||
try:
|
||||
# Toutes les transactions non-finales
|
||||
query = select(UniversignTransaction).where(
|
||||
UniversignTransaction.local_status.in_(
|
||||
[LocalDocumentStatus.PENDING, LocalDocumentStatus.IN_PROGRESS]
|
||||
|
|
@ -1203,7 +1021,6 @@ async def trouver_transactions_inconsistantes(
|
|||
|
||||
for tx in transactions:
|
||||
try:
|
||||
# Récupérer le statut depuis Universign
|
||||
universign_data = sync_service.fetch_transaction_status(
|
||||
tx.transaction_id
|
||||
)
|
||||
|
|
@ -1272,9 +1089,6 @@ async def nettoyer_transactions_erreur(
|
|||
),
|
||||
session: AsyncSession = Depends(get_session),
|
||||
):
|
||||
"""
|
||||
Nettoie les transactions en erreur anciennes
|
||||
"""
|
||||
try:
|
||||
date_limite = datetime.now() - timedelta(days=age_jours)
|
||||
|
||||
|
|
@ -1318,9 +1132,6 @@ async def nettoyer_transactions_erreur(
|
|||
async def voir_dernier_webhook(
|
||||
transaction_id: str, session: AsyncSession = Depends(get_session)
|
||||
):
|
||||
"""
|
||||
Affiche le dernier payload webhook reçu pour une transaction
|
||||
"""
|
||||
try:
|
||||
query = select(UniversignTransaction).where(
|
||||
UniversignTransaction.transaction_id == transaction_id
|
||||
|
|
@ -1331,7 +1142,6 @@ async def voir_dernier_webhook(
|
|||
if not tx:
|
||||
raise HTTPException(404, "Transaction introuvable")
|
||||
|
||||
# Récupérer le dernier log de type webhook
|
||||
logs_query = (
|
||||
select(UniversignSyncLog)
|
||||
.where(
|
||||
|
|
@ -1373,3 +1183,102 @@ async def voir_dernier_webhook(
|
|||
except Exception as e:
|
||||
logger.error(f"Erreur debug webhook: {e}")
|
||||
raise HTTPException(500, str(e))
|
||||
|
||||
|
||||
@router.get(
|
||||
"/transactions/{transaction_id}/document/download", tags=["Documents Signés"]
|
||||
)
|
||||
async def telecharger_document_signe(
|
||||
transaction_id: str, session: AsyncSession = Depends(get_session)
|
||||
):
|
||||
try:
|
||||
query = select(UniversignTransaction).where(
|
||||
UniversignTransaction.transaction_id == transaction_id
|
||||
)
|
||||
result = await session.execute(query)
|
||||
transaction = result.scalar_one_or_none()
|
||||
|
||||
if not transaction:
|
||||
raise HTTPException(404, f"Transaction {transaction_id} introuvable")
|
||||
|
||||
if not transaction.signed_document_path:
|
||||
raise HTTPException(
|
||||
404,
|
||||
"Document signé non disponible localement. "
|
||||
"Utilisez POST /admin/download-missing-documents pour le récupérer.",
|
||||
)
|
||||
|
||||
file_path = Path(transaction.signed_document_path)
|
||||
|
||||
if not file_path.exists():
|
||||
logger.warning(f"Fichier perdu : {file_path}")
|
||||
raise HTTPException(
|
||||
404,
|
||||
"Fichier introuvable sur le serveur. "
|
||||
"Utilisez POST /admin/download-missing-documents pour le récupérer.",
|
||||
)
|
||||
|
||||
download_name = (
|
||||
f"{transaction.sage_document_id}_"
|
||||
f"{transaction.sage_document_type.name}_"
|
||||
f"signe.pdf"
|
||||
)
|
||||
|
||||
return FileResponse(
|
||||
path=str(file_path), media_type="application/pdf", filename=download_name
|
||||
)
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Erreur téléchargement document : {e}", exc_info=True)
|
||||
raise HTTPException(500, str(e))
|
||||
|
||||
|
||||
@router.get("/transactions/{transaction_id}/document/info", tags=["Documents Signés"])
|
||||
async def info_document_signe(
|
||||
transaction_id: str, session: AsyncSession = Depends(get_session)
|
||||
):
|
||||
"""
|
||||
Informations sur le document signé
|
||||
"""
|
||||
try:
|
||||
query = select(UniversignTransaction).where(
|
||||
UniversignTransaction.transaction_id == transaction_id
|
||||
)
|
||||
result = await session.execute(query)
|
||||
transaction = result.scalar_one_or_none()
|
||||
|
||||
if not transaction:
|
||||
raise HTTPException(404, f"Transaction {transaction_id} introuvable")
|
||||
|
||||
file_exists = False
|
||||
file_size_mb = None
|
||||
|
||||
if transaction.signed_document_path:
|
||||
file_path = Path(transaction.signed_document_path)
|
||||
file_exists = file_path.exists()
|
||||
|
||||
if file_exists:
|
||||
file_size_mb = os.path.getsize(file_path) / (1024 * 1024)
|
||||
|
||||
return {
|
||||
"transaction_id": transaction_id,
|
||||
"document_available_locally": file_exists,
|
||||
"document_url_universign": transaction.document_url,
|
||||
"downloaded_at": (
|
||||
transaction.signed_document_downloaded_at.isoformat()
|
||||
if transaction.signed_document_downloaded_at
|
||||
else None
|
||||
),
|
||||
"file_size_mb": round(file_size_mb, 2) if file_size_mb else None,
|
||||
"download_attempts": transaction.download_attempts,
|
||||
"last_download_error": transaction.download_error,
|
||||
"local_path": transaction.signed_document_path if file_exists else None,
|
||||
}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Erreur info document : {e}")
|
||||
raise HTTPException(500, str(e))
|
||||
|
|
|
|||
|
|
@ -408,7 +408,7 @@ class SageGatewayClient:
|
|||
return self._post(
|
||||
"/sage/collaborateurs/list",
|
||||
{
|
||||
"filtre": filtre or "", # ⚠️ Convertir None en ""
|
||||
"filtre": filtre or "", # Convertir None en ""
|
||||
"actifs_seulement": actifs_seulement,
|
||||
},
|
||||
).get("data", [])
|
||||
|
|
|
|||
|
|
@ -26,7 +26,13 @@ from schemas.documents.documents import TypeDocument, TypeDocumentSQL
|
|||
from schemas.documents.email import StatutEmail, EmailEnvoi
|
||||
from schemas.documents.factures import FactureCreate, FactureUpdate
|
||||
from schemas.documents.livraisons import LivraisonCreate, LivraisonUpdate
|
||||
from schemas.documents.universign import Signature, StatutSignature
|
||||
from schemas.documents.universign import (
|
||||
Signature,
|
||||
StatutSignature,
|
||||
SyncStatsResponse,
|
||||
CreateSignatureRequest,
|
||||
TransactionResponse,
|
||||
)
|
||||
from schemas.articles.articles import (
|
||||
ArticleCreate,
|
||||
Article,
|
||||
|
|
@ -105,4 +111,7 @@ __all__ = [
|
|||
"SageGatewayTest",
|
||||
"SageGatewayStatsResponse",
|
||||
"CurrentGatewayInfo",
|
||||
"SyncStatsResponse",
|
||||
"CreateSignatureRequest",
|
||||
"TransactionResponse",
|
||||
]
|
||||
|
|
|
|||
|
|
@ -1,6 +1,12 @@
|
|||
from pydantic import BaseModel, EmailStr
|
||||
from enum import Enum
|
||||
from schemas.documents.documents import TypeDocument
|
||||
from database import (
|
||||
SageDocumentType,
|
||||
)
|
||||
|
||||
from typing import List, Optional
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
class StatutSignature(str, Enum):
|
||||
|
|
@ -16,3 +22,49 @@ class Signature(BaseModel):
|
|||
type_doc: TypeDocument
|
||||
email_signataire: EmailStr
|
||||
nom_signataire: str
|
||||
|
||||
|
||||
class CreateSignatureRequest(BaseModel):
|
||||
"""Demande de création d'une signature"""
|
||||
|
||||
sage_document_id: str
|
||||
sage_document_type: SageDocumentType
|
||||
signer_email: EmailStr
|
||||
signer_name: str
|
||||
document_name: Optional[str] = None
|
||||
|
||||
|
||||
class TransactionResponse(BaseModel):
|
||||
"""Réponse détaillée d'une transaction"""
|
||||
|
||||
id: str
|
||||
transaction_id: str
|
||||
sage_document_id: str
|
||||
sage_document_type: str
|
||||
universign_status: str
|
||||
local_status: str
|
||||
local_status_label: str
|
||||
signer_url: Optional[str]
|
||||
document_url: Optional[str]
|
||||
created_at: datetime
|
||||
sent_at: Optional[datetime]
|
||||
signed_at: Optional[datetime]
|
||||
last_synced_at: Optional[datetime]
|
||||
needs_sync: bool
|
||||
signers: List[dict]
|
||||
|
||||
signed_document_available: bool = False
|
||||
signed_document_downloaded_at: Optional[datetime] = None
|
||||
signed_document_size_kb: Optional[float] = None
|
||||
|
||||
|
||||
class SyncStatsResponse(BaseModel):
|
||||
"""Statistiques de synchronisation"""
|
||||
|
||||
total_transactions: int
|
||||
pending_sync: int
|
||||
signed: int
|
||||
in_progress: int
|
||||
refused: int
|
||||
expired: int
|
||||
last_sync_at: Optional[datetime]
|
||||
|
|
|
|||
|
|
@ -55,7 +55,7 @@ class SageGatewayService:
|
|||
and_(
|
||||
SageGatewayConfig.id == gateway_id,
|
||||
SageGatewayConfig.user_id == user_id,
|
||||
# SageGatewayConfig.is_deleted.is_(false()),
|
||||
SageGatewayConfig.is_deleted == false(),
|
||||
)
|
||||
)
|
||||
)
|
||||
|
|
@ -67,7 +67,7 @@ class SageGatewayService:
|
|||
query = select(SageGatewayConfig).where(SageGatewayConfig.user_id == user_id)
|
||||
|
||||
if not include_deleted:
|
||||
query = query.where(SageGatewayConfig.is_deleted.is_(false()))
|
||||
query = query.where(SageGatewayConfig.is_deleted == false())
|
||||
|
||||
query = query.order_by(
|
||||
SageGatewayConfig.is_active.desc(),
|
||||
|
|
@ -167,7 +167,7 @@ class SageGatewayService:
|
|||
and_(
|
||||
SageGatewayConfig.user_id == user_id,
|
||||
SageGatewayConfig.is_active,
|
||||
SageGatewayConfig.is_deleted.is_(true()),
|
||||
SageGatewayConfig.is_deleted == false(),
|
||||
)
|
||||
)
|
||||
)
|
||||
|
|
|
|||
378
services/universign_document.py
Normal file
378
services/universign_document.py
Normal file
|
|
@ -0,0 +1,378 @@
|
|||
import os
|
||||
import logging
|
||||
import requests
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
from typing import Optional, Tuple, Dict, List
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
SIGNED_DOCS_DIR = Path(os.getenv("SIGNED_DOCS_PATH", "/app/data/signed_documents"))
|
||||
SIGNED_DOCS_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
class UniversignDocumentService:
|
||||
"""Service de gestion des documents signés Universign - VERSION CORRIGÉE"""
|
||||
|
||||
def __init__(self, api_url: str, api_key: str, timeout: int = 60):
|
||||
self.api_url = api_url.rstrip("/")
|
||||
self.api_key = api_key
|
||||
self.timeout = timeout
|
||||
self.auth = (api_key, "")
|
||||
|
||||
def fetch_transaction_documents(self, transaction_id: str) -> Optional[List[Dict]]:
|
||||
try:
|
||||
logger.info(f"📋 Récupération documents pour transaction: {transaction_id}")
|
||||
|
||||
response = requests.get(
|
||||
f"{self.api_url}/transactions/{transaction_id}",
|
||||
auth=self.auth,
|
||||
timeout=self.timeout,
|
||||
headers={"Accept": "application/json"},
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
documents = data.get("documents", [])
|
||||
|
||||
logger.info(f"✅ {len(documents)} document(s) trouvé(s)")
|
||||
|
||||
# Log détaillé de chaque document
|
||||
for idx, doc in enumerate(documents):
|
||||
logger.debug(
|
||||
f" Document {idx}: id={doc.get('id')}, "
|
||||
f"name={doc.get('name')}, status={doc.get('status')}"
|
||||
)
|
||||
|
||||
return documents
|
||||
|
||||
elif response.status_code == 404:
|
||||
logger.warning(
|
||||
f"Transaction {transaction_id} introuvable sur Universign"
|
||||
)
|
||||
return None
|
||||
|
||||
else:
|
||||
logger.error(
|
||||
f"Erreur HTTP {response.status_code} pour {transaction_id}: "
|
||||
f"{response.text[:500]}"
|
||||
)
|
||||
return None
|
||||
|
||||
except requests.exceptions.Timeout:
|
||||
logger.error(f"⏱️ Timeout récupération transaction {transaction_id}")
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Erreur fetch documents: {e}", exc_info=True)
|
||||
return None
|
||||
|
||||
def download_signed_document(
|
||||
self, transaction_id: str, document_id: str
|
||||
) -> Optional[bytes]:
|
||||
try:
|
||||
download_url = (
|
||||
f"{self.api_url}/transactions/{transaction_id}"
|
||||
f"/documents/{document_id}/download"
|
||||
)
|
||||
|
||||
logger.info(f"Téléchargement depuis: {download_url}")
|
||||
|
||||
response = requests.get(
|
||||
download_url,
|
||||
auth=self.auth,
|
||||
timeout=self.timeout,
|
||||
stream=True,
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
content_type = response.headers.get("Content-Type", "")
|
||||
content_length = response.headers.get("Content-Length", "unknown")
|
||||
|
||||
logger.info(
|
||||
f"✅ Téléchargement réussi: "
|
||||
f"Content-Type={content_type}, Size={content_length}"
|
||||
)
|
||||
|
||||
# Vérification du type de contenu
|
||||
if (
|
||||
"pdf" not in content_type.lower()
|
||||
and "octet-stream" not in content_type.lower()
|
||||
):
|
||||
logger.warning(
|
||||
f"⚠️ Type de contenu inattendu: {content_type}. "
|
||||
f"Tentative de lecture quand même..."
|
||||
)
|
||||
|
||||
# Lecture du contenu
|
||||
content = response.content
|
||||
|
||||
if len(content) < 1024:
|
||||
logger.error(f"❌ Document trop petit: {len(content)} octets")
|
||||
return None
|
||||
|
||||
return content
|
||||
|
||||
elif response.status_code == 404:
|
||||
logger.error(
|
||||
f"❌ Document {document_id} introuvable pour transaction {transaction_id}"
|
||||
)
|
||||
return None
|
||||
|
||||
elif response.status_code == 403:
|
||||
logger.error(
|
||||
f"❌ Accès refusé au document {document_id}. "
|
||||
f"Vérifiez que la transaction est bien signée."
|
||||
)
|
||||
return None
|
||||
|
||||
else:
|
||||
logger.error(
|
||||
f"❌ Erreur HTTP {response.status_code}: {response.text[:500]}"
|
||||
)
|
||||
return None
|
||||
|
||||
except requests.exceptions.Timeout:
|
||||
logger.error(f"⏱️ Timeout téléchargement document {document_id}")
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Erreur téléchargement: {e}", exc_info=True)
|
||||
return None
|
||||
|
||||
async def download_and_store_signed_document(
|
||||
self, session: AsyncSession, transaction, force: bool = False
|
||||
) -> Tuple[bool, Optional[str]]:
|
||||
# Vérification si déjà téléchargé
|
||||
if not force and transaction.signed_document_path:
|
||||
if os.path.exists(transaction.signed_document_path):
|
||||
logger.debug(
|
||||
f"✅ Document déjà téléchargé: {transaction.transaction_id}"
|
||||
)
|
||||
return True, None
|
||||
|
||||
transaction.download_attempts += 1
|
||||
|
||||
try:
|
||||
# ÉTAPE 1: Récupérer les documents de la transaction
|
||||
logger.info(
|
||||
f"Récupération document signé pour: {transaction.transaction_id}"
|
||||
)
|
||||
|
||||
documents = self.fetch_transaction_documents(transaction.transaction_id)
|
||||
|
||||
if not documents:
|
||||
error = "Aucun document trouvé dans la transaction Universign"
|
||||
logger.warning(f"⚠️ {error}")
|
||||
transaction.download_error = error
|
||||
await session.commit()
|
||||
return False, error
|
||||
|
||||
# ÉTAPE 2: Récupérer le premier document (ou chercher celui qui est signé)
|
||||
document_id = None
|
||||
for doc in documents:
|
||||
doc_id = doc.get("id")
|
||||
doc_status = doc.get("status", "").lower()
|
||||
|
||||
# Priorité aux documents marqués comme signés/complétés
|
||||
if doc_status in ["signed", "completed", "closed"]:
|
||||
document_id = doc_id
|
||||
logger.info(
|
||||
f"Document signé trouvé: {doc_id} (status: {doc_status})"
|
||||
)
|
||||
break
|
||||
|
||||
# Fallback sur le premier document si aucun n'est explicitement signé
|
||||
if document_id is None:
|
||||
document_id = doc_id
|
||||
|
||||
if not document_id:
|
||||
error = "Impossible de déterminer l'ID du document à télécharger"
|
||||
logger.error(f"❌ {error}")
|
||||
transaction.download_error = error
|
||||
await session.commit()
|
||||
return False, error
|
||||
|
||||
# Stocker le document_id pour référence future
|
||||
if hasattr(transaction, "universign_document_id"):
|
||||
transaction.universign_document_id = document_id
|
||||
|
||||
# ÉTAPE 3: Télécharger le document signé
|
||||
pdf_content = self.download_signed_document(
|
||||
transaction_id=transaction.transaction_id, document_id=document_id
|
||||
)
|
||||
|
||||
if not pdf_content:
|
||||
error = f"Échec téléchargement document {document_id}"
|
||||
logger.error(f"❌ {error}")
|
||||
transaction.download_error = error
|
||||
await session.commit()
|
||||
return False, error
|
||||
|
||||
# ÉTAPE 4: Stocker le fichier localement
|
||||
filename = self._generate_filename(transaction)
|
||||
file_path = SIGNED_DOCS_DIR / filename
|
||||
|
||||
with open(file_path, "wb") as f:
|
||||
f.write(pdf_content)
|
||||
|
||||
file_size = os.path.getsize(file_path)
|
||||
|
||||
# Mise à jour de la transaction
|
||||
transaction.signed_document_path = str(file_path)
|
||||
transaction.signed_document_downloaded_at = datetime.now()
|
||||
transaction.signed_document_size_bytes = file_size
|
||||
transaction.download_error = None
|
||||
|
||||
# Stocker aussi l'URL de téléchargement pour référence
|
||||
transaction.document_url = (
|
||||
f"{self.api_url}/transactions/{transaction.transaction_id}"
|
||||
f"/documents/{document_id}/download"
|
||||
)
|
||||
|
||||
await session.commit()
|
||||
|
||||
logger.info(
|
||||
f"✅ Document signé téléchargé: {filename} ({file_size / 1024:.1f} KB)"
|
||||
)
|
||||
|
||||
return True, None
|
||||
|
||||
except OSError as e:
|
||||
error = f"Erreur filesystem: {str(e)}"
|
||||
logger.error(f"❌ {error}")
|
||||
transaction.download_error = error
|
||||
await session.commit()
|
||||
return False, error
|
||||
|
||||
except Exception as e:
|
||||
error = f"Erreur inattendue: {str(e)}"
|
||||
logger.error(f"❌ {error}", exc_info=True)
|
||||
transaction.download_error = error
|
||||
await session.commit()
|
||||
return False, error
|
||||
|
||||
def _generate_filename(self, transaction) -> str:
|
||||
"""Génère un nom de fichier unique pour le document signé"""
|
||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
tx_id = transaction.transaction_id.replace("tr_", "")
|
||||
filename = f"{transaction.sage_document_id}_{tx_id}_{timestamp}_signed.pdf"
|
||||
return filename
|
||||
|
||||
def get_document_path(self, transaction) -> Optional[Path]:
|
||||
"""Retourne le chemin du document signé s'il existe"""
|
||||
if not transaction.signed_document_path:
|
||||
return None
|
||||
path = Path(transaction.signed_document_path)
|
||||
if path.exists():
|
||||
return path
|
||||
return None
|
||||
|
||||
async def cleanup_old_documents(self, days_to_keep: int = 90) -> Tuple[int, int]:
|
||||
"""Supprime les anciens documents signés"""
|
||||
from datetime import timedelta
|
||||
|
||||
cutoff_date = datetime.now() - timedelta(days=days_to_keep)
|
||||
deleted = 0
|
||||
size_freed = 0
|
||||
|
||||
for file_path in SIGNED_DOCS_DIR.glob("*.pdf"):
|
||||
try:
|
||||
file_time = datetime.fromtimestamp(os.path.getmtime(file_path))
|
||||
if file_time < cutoff_date:
|
||||
size_freed += os.path.getsize(file_path)
|
||||
os.remove(file_path)
|
||||
deleted += 1
|
||||
logger.info(f"🗑️ Supprimé: {file_path.name}")
|
||||
except Exception as e:
|
||||
logger.error(f"Erreur suppression {file_path}: {e}")
|
||||
|
||||
size_freed_mb = size_freed / (1024 * 1024)
|
||||
logger.info(
|
||||
f"Nettoyage terminé: {deleted} fichiers supprimés "
|
||||
f"({size_freed_mb:.2f} MB libérés)"
|
||||
)
|
||||
|
||||
return deleted, int(size_freed_mb)
|
||||
|
||||
# === MÉTHODES DE DIAGNOSTIC ===
|
||||
|
||||
def diagnose_transaction(self, transaction_id: str) -> Dict:
|
||||
"""
|
||||
Diagnostic complet d'une transaction pour debug
|
||||
"""
|
||||
result = {
|
||||
"transaction_id": transaction_id,
|
||||
"api_url": self.api_url,
|
||||
"timestamp": datetime.now().isoformat(),
|
||||
"checks": {},
|
||||
}
|
||||
|
||||
try:
|
||||
# Test 1: Récupération de la transaction
|
||||
logger.info(f"Diagnostic transaction: {transaction_id}")
|
||||
|
||||
response = requests.get(
|
||||
f"{self.api_url}/transactions/{transaction_id}",
|
||||
auth=self.auth,
|
||||
timeout=self.timeout,
|
||||
)
|
||||
|
||||
result["checks"]["transaction_fetch"] = {
|
||||
"status_code": response.status_code,
|
||||
"success": response.status_code == 200,
|
||||
}
|
||||
|
||||
if response.status_code != 200:
|
||||
result["checks"]["transaction_fetch"]["error"] = response.text[:500]
|
||||
return result
|
||||
|
||||
data = response.json()
|
||||
|
||||
result["checks"]["transaction_data"] = {
|
||||
"state": data.get("state"),
|
||||
"documents_count": len(data.get("documents", [])),
|
||||
"participants_count": len(data.get("participants", [])),
|
||||
}
|
||||
|
||||
# Test 2: Documents disponibles
|
||||
documents = data.get("documents", [])
|
||||
result["checks"]["documents"] = []
|
||||
|
||||
for doc in documents:
|
||||
doc_info = {
|
||||
"id": doc.get("id"),
|
||||
"name": doc.get("name"),
|
||||
"status": doc.get("status"),
|
||||
}
|
||||
|
||||
# Test téléchargement
|
||||
if doc.get("id"):
|
||||
download_url = (
|
||||
f"{self.api_url}/transactions/{transaction_id}"
|
||||
f"/documents/{doc['id']}/download"
|
||||
)
|
||||
|
||||
try:
|
||||
dl_response = requests.head(
|
||||
download_url,
|
||||
auth=self.auth,
|
||||
timeout=10,
|
||||
)
|
||||
doc_info["download_check"] = {
|
||||
"url": download_url,
|
||||
"status_code": dl_response.status_code,
|
||||
"accessible": dl_response.status_code in [200, 302],
|
||||
"content_type": dl_response.headers.get("Content-Type"),
|
||||
}
|
||||
except Exception as e:
|
||||
doc_info["download_check"] = {"error": str(e)}
|
||||
|
||||
result["checks"]["documents"].append(doc_info)
|
||||
|
||||
result["success"] = True
|
||||
|
||||
except Exception as e:
|
||||
result["success"] = False
|
||||
result["error"] = str(e)
|
||||
|
||||
return result
|
||||
|
|
@ -19,6 +19,7 @@ from database import (
|
|||
StatutEmail,
|
||||
)
|
||||
from data.data import templates_signature_email
|
||||
from services.universign_document import UniversignDocumentService
|
||||
from utils.universign_status_mapping import (
|
||||
map_universign_to_local,
|
||||
is_transition_allowed,
|
||||
|
|
@ -39,6 +40,9 @@ class UniversignSyncService:
|
|||
self.sage_client = None
|
||||
self.email_queue = None
|
||||
self.settings = None
|
||||
self.document_service = UniversignDocumentService(
|
||||
api_url=api_url, api_key=api_key, timeout=60
|
||||
)
|
||||
|
||||
def configure(self, sage_client, email_queue, settings):
|
||||
self.sage_client = sage_client
|
||||
|
|
@ -201,7 +205,7 @@ class UniversignSyncService:
|
|||
transaction = result.scalar_one_or_none()
|
||||
|
||||
if not transaction:
|
||||
logger.warning(f"⚠️ Transaction {transaction_id} inconnue localement")
|
||||
logger.warning(f"Transaction {transaction_id} inconnue localement")
|
||||
return False, "Transaction inconnue"
|
||||
|
||||
# Marquer comme webhook reçu
|
||||
|
|
@ -218,7 +222,7 @@ class UniversignSyncService:
|
|||
# Log du changement de statut
|
||||
if success and transaction.local_status.value != old_status:
|
||||
logger.info(
|
||||
f"✅ Webhook traité: {transaction_id} | "
|
||||
f"Webhook traité: {transaction_id} | "
|
||||
f"{old_status} → {transaction.local_status.value}"
|
||||
)
|
||||
|
||||
|
|
@ -233,7 +237,7 @@ class UniversignSyncService:
|
|||
new_status=transaction.local_status.value,
|
||||
changes=json.dumps(
|
||||
payload, default=str
|
||||
), # ✅ Ajout default=str pour éviter les erreurs JSON
|
||||
), # Ajout default=str pour éviter les erreurs JSON
|
||||
)
|
||||
|
||||
await session.commit()
|
||||
|
|
@ -267,7 +271,7 @@ class UniversignSyncService:
|
|||
logger.warning(f"Signataire sans email à l'index {idx}, ignoré")
|
||||
continue
|
||||
|
||||
# ✅ PROTECTION : gérer les statuts inconnus
|
||||
# PROTECTION : gérer les statuts inconnus
|
||||
raw_status = signer_data.get("status") or signer_data.get(
|
||||
"state", "waiting"
|
||||
)
|
||||
|
|
@ -298,7 +302,7 @@ class UniversignSyncService:
|
|||
if signer_data.get("name") and not signer.name:
|
||||
signer.name = signer_data.get("name")
|
||||
else:
|
||||
# ✅ Nouveau signer avec gestion d'erreur intégrée
|
||||
# Nouveau signer avec gestion d'erreur intégrée
|
||||
try:
|
||||
signer = UniversignSigner(
|
||||
id=f"{transaction.id}_signer_{idx}_{int(datetime.now().timestamp())}",
|
||||
|
|
@ -318,39 +322,34 @@ class UniversignSyncService:
|
|||
except Exception as e:
|
||||
logger.error(f"Erreur création signer {email}: {e}")
|
||||
|
||||
# CORRECTION 3 : Amélioration du logging dans sync_transaction
|
||||
async def sync_transaction(
|
||||
self,
|
||||
session: AsyncSession,
|
||||
transaction: UniversignTransaction,
|
||||
session,
|
||||
transaction,
|
||||
force: bool = False,
|
||||
) -> Tuple[bool, Optional[str]]:
|
||||
"""
|
||||
CORRECTION : Meilleur logging et gestion d'erreurs
|
||||
"""
|
||||
):
|
||||
import json
|
||||
|
||||
# Si statut final et pas de force, skip
|
||||
if is_final_status(transaction.local_status.value) and not force:
|
||||
logger.debug(
|
||||
f"⏭️ Skip {transaction.transaction_id}: statut final {transaction.local_status.value}"
|
||||
f"⏭️ Skip {transaction.transaction_id}: statut final "
|
||||
f"{transaction.local_status.value}"
|
||||
)
|
||||
transaction.needs_sync = False
|
||||
await session.commit()
|
||||
return True, None
|
||||
|
||||
# Récupération du statut distant
|
||||
logger.info(f"🔄 Synchronisation: {transaction.transaction_id}")
|
||||
logger.info(f"Synchronisation: {transaction.transaction_id}")
|
||||
|
||||
result = self.fetch_transaction_status(transaction.transaction_id)
|
||||
|
||||
if not result:
|
||||
error = "Échec récupération données Universign"
|
||||
logger.error(f"❌ {error}: {transaction.transaction_id}")
|
||||
|
||||
# ✅ CORRECTION : Incrémenter les tentatives MÊME en cas d'échec
|
||||
transaction.sync_attempts += 1
|
||||
transaction.sync_error = error
|
||||
|
||||
await self._log_sync_attempt(session, transaction, "polling", False, error)
|
||||
await session.commit()
|
||||
return False, error
|
||||
|
|
@ -366,7 +365,7 @@ class UniversignSyncService:
|
|||
previous_local_status = transaction.local_status.value
|
||||
|
||||
logger.info(
|
||||
f"🔄 Mapping: {universign_status_raw} (Universign) → "
|
||||
f"Mapping: {universign_status_raw} (Universign) → "
|
||||
f"{new_local_status} (Local) | Actuel: {previous_local_status}"
|
||||
)
|
||||
|
||||
|
|
@ -378,9 +377,7 @@ class UniversignSyncService:
|
|||
new_local_status = resolve_status_conflict(
|
||||
previous_local_status, new_local_status
|
||||
)
|
||||
logger.info(
|
||||
f"✅ Résolution conflit: statut résolu = {new_local_status}"
|
||||
)
|
||||
logger.info(f"Résolution conflit: statut résolu = {new_local_status}")
|
||||
|
||||
status_changed = previous_local_status != new_local_status
|
||||
|
||||
|
|
@ -396,7 +393,6 @@ class UniversignSyncService:
|
|||
)
|
||||
except ValueError:
|
||||
logger.warning(f"⚠️ Statut Universign inconnu: {universign_status_raw}")
|
||||
# Fallback intelligent
|
||||
if new_local_status == "SIGNE":
|
||||
transaction.universign_status = (
|
||||
UniversignTransactionStatus.COMPLETED
|
||||
|
|
@ -408,7 +404,7 @@ class UniversignSyncService:
|
|||
else:
|
||||
transaction.universign_status = UniversignTransactionStatus.STARTED
|
||||
|
||||
# ✅ Mise à jour du statut local
|
||||
# Mise à jour du statut local
|
||||
transaction.local_status = LocalDocumentStatus(new_local_status)
|
||||
transaction.universign_status_updated_at = datetime.now()
|
||||
|
||||
|
|
@ -429,14 +425,40 @@ class UniversignSyncService:
|
|||
transaction.expired_at = datetime.now()
|
||||
logger.info("⏰ Date d'expiration mise à jour")
|
||||
|
||||
# Mise à jour des URLs
|
||||
if (
|
||||
universign_data.get("documents")
|
||||
and len(universign_data["documents"]) > 0
|
||||
):
|
||||
first_doc = universign_data["documents"][0]
|
||||
if first_doc.get("url"):
|
||||
transaction.document_url = first_doc["url"]
|
||||
# === SECTION CORRIGÉE: Gestion des documents ===
|
||||
# Ne plus chercher document_url dans la réponse (elle n'existe pas!)
|
||||
# Le téléchargement se fait via le service document qui utilise le bon endpoint
|
||||
|
||||
documents = universign_data.get("documents", [])
|
||||
if documents:
|
||||
first_doc = documents[0]
|
||||
logger.info(
|
||||
f"Document Universign trouvé: id={first_doc.get('id')}, "
|
||||
f"status={first_doc.get('status')}"
|
||||
)
|
||||
|
||||
# Téléchargement automatique du document signé
|
||||
if new_local_status == "SIGNE" and not transaction.signed_document_path:
|
||||
logger.info("Déclenchement téléchargement document signé...")
|
||||
|
||||
try:
|
||||
(
|
||||
download_success,
|
||||
download_error,
|
||||
) = await self.document_service.download_and_store_signed_document(
|
||||
session=session, transaction=transaction, force=False
|
||||
)
|
||||
|
||||
if download_success:
|
||||
logger.info("✅ Document signé téléchargé et stocké")
|
||||
else:
|
||||
logger.warning(f"⚠️ Échec téléchargement: {download_error}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"❌ Erreur téléchargement document: {e}", exc_info=True
|
||||
)
|
||||
# === FIN SECTION CORRIGÉE ===
|
||||
|
||||
# Synchroniser les signataires
|
||||
await self._sync_signers(session, transaction, universign_data)
|
||||
|
|
@ -445,7 +467,7 @@ class UniversignSyncService:
|
|||
transaction.last_synced_at = datetime.now()
|
||||
transaction.sync_attempts += 1
|
||||
transaction.needs_sync = not is_final_status(new_local_status)
|
||||
transaction.sync_error = None # ✅ Effacer l'erreur précédente
|
||||
transaction.sync_error = None
|
||||
|
||||
# Log de la tentative
|
||||
await self._log_sync_attempt(
|
||||
|
|
@ -460,9 +482,10 @@ class UniversignSyncService:
|
|||
{
|
||||
"status_changed": status_changed,
|
||||
"universign_raw": universign_status_raw,
|
||||
"documents_count": len(documents),
|
||||
"response_time_ms": result.get("response_time_ms"),
|
||||
},
|
||||
default=str, # ✅ Éviter les erreurs de sérialisation
|
||||
default=str,
|
||||
),
|
||||
)
|
||||
|
||||
|
|
@ -486,7 +509,7 @@ class UniversignSyncService:
|
|||
error_msg = f"Erreur lors de la synchronisation: {str(e)}"
|
||||
logger.error(f"❌ {error_msg}", exc_info=True)
|
||||
|
||||
transaction.sync_error = error_msg[:1000] # Tronquer si trop long
|
||||
transaction.sync_error = error_msg[:1000]
|
||||
transaction.sync_attempts += 1
|
||||
|
||||
await self._log_sync_attempt(
|
||||
|
|
@ -496,6 +519,57 @@ class UniversignSyncService:
|
|||
|
||||
return False, error_msg
|
||||
|
||||
# CORRECTION 3 : Amélioration du logging dans sync_transaction
|
||||
async def _sync_transaction_documents_corrected(
|
||||
self, session, transaction, universign_data: dict, new_local_status: str
|
||||
):
|
||||
# Récupérer et stocker les infos documents
|
||||
documents = universign_data.get("documents", [])
|
||||
|
||||
if documents:
|
||||
# Stocker le premier document_id pour référence
|
||||
first_doc = documents[0]
|
||||
first_doc_id = first_doc.get("id")
|
||||
|
||||
if first_doc_id:
|
||||
# Stocker l'ID du document (si le champ existe dans le modèle)
|
||||
if hasattr(transaction, "universign_document_id"):
|
||||
transaction.universign_document_id = first_doc_id
|
||||
|
||||
logger.info(
|
||||
f"Document Universign: id={first_doc_id}, "
|
||||
f"name={first_doc.get('name')}, status={first_doc.get('status')}"
|
||||
)
|
||||
else:
|
||||
logger.debug("Aucun document dans la réponse Universign")
|
||||
|
||||
# Téléchargement automatique si signé
|
||||
if new_local_status == "SIGNE":
|
||||
if not transaction.signed_document_path:
|
||||
logger.info("Déclenchement téléchargement document signé...")
|
||||
|
||||
try:
|
||||
(
|
||||
download_success,
|
||||
download_error,
|
||||
) = await self.document_service.download_and_store_signed_document(
|
||||
session=session, transaction=transaction, force=False
|
||||
)
|
||||
|
||||
if download_success:
|
||||
logger.info("✅ Document signé téléchargé avec succès")
|
||||
else:
|
||||
logger.warning(f"⚠️ Échec téléchargement: {download_error}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"❌ Erreur téléchargement document: {e}", exc_info=True
|
||||
)
|
||||
else:
|
||||
logger.debug(
|
||||
f"Document déjà téléchargé: {transaction.signed_document_path}"
|
||||
)
|
||||
|
||||
async def _log_sync_attempt(
|
||||
self,
|
||||
session: AsyncSession,
|
||||
|
|
|
|||
Loading…
Reference in a new issue