feat(universign): add transaction diagnosis endpoint and improve document handling
This commit is contained in:
parent
a9aff7b386
commit
24d7a49a73
3 changed files with 491 additions and 109 deletions
|
|
@ -1613,3 +1613,106 @@ async def nettoyer_anciens_documents(
|
|||
except Exception as e:
|
||||
logger.error(f"Erreur nettoyage : {e}")
|
||||
raise HTTPException(500, str(e))
|
||||
|
||||
|
||||
@router.get("/transactions/{transaction_id}/diagnose", tags=["Debug"])
|
||||
async def diagnose_transaction(
|
||||
transaction_id: str, session: AsyncSession = Depends(get_session)
|
||||
):
|
||||
"""
|
||||
Diagnostic complet d'une transaction Universign
|
||||
Utile pour débugger les problèmes de récupération de documents
|
||||
"""
|
||||
from services.universign_document import UniversignDocumentService
|
||||
|
||||
try:
|
||||
# Récupérer la transaction locale
|
||||
query = select(UniversignTransaction).where(
|
||||
UniversignTransaction.transaction_id == transaction_id
|
||||
)
|
||||
result = await session.execute(query)
|
||||
transaction = result.scalar_one_or_none()
|
||||
|
||||
local_info = None
|
||||
if transaction:
|
||||
local_info = {
|
||||
"id": transaction.id,
|
||||
"sage_document_id": transaction.sage_document_id,
|
||||
"local_status": transaction.local_status.value,
|
||||
"document_url": transaction.document_url,
|
||||
"signed_document_path": transaction.signed_document_path,
|
||||
"download_attempts": transaction.download_attempts,
|
||||
"download_error": transaction.download_error,
|
||||
}
|
||||
|
||||
# Diagnostic API Universign
|
||||
document_service = UniversignDocumentService(
|
||||
api_url=settings.universign_api_url,
|
||||
api_key=settings.universign_api_key,
|
||||
timeout=30,
|
||||
)
|
||||
|
||||
api_diagnosis = document_service.diagnose_transaction(transaction_id)
|
||||
|
||||
return {
|
||||
"transaction_id": transaction_id,
|
||||
"local_data": local_info,
|
||||
"api_diagnosis": api_diagnosis,
|
||||
"recommendations": _generate_recommendations(local_info, api_diagnosis),
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Erreur diagnostic: {e}", exc_info=True)
|
||||
raise HTTPException(500, str(e))
|
||||
|
||||
|
||||
def _generate_recommendations(local_info, api_diagnosis):
|
||||
"""Génère des recommandations basées sur le diagnostic"""
|
||||
recommendations = []
|
||||
|
||||
if not local_info:
|
||||
recommendations.append(
|
||||
"Transaction introuvable localement. Vérifiez le transaction_id."
|
||||
)
|
||||
return recommendations
|
||||
|
||||
if not api_diagnosis.get("success"):
|
||||
recommendations.append(
|
||||
f"Erreur API Universign: {api_diagnosis.get('error')}. "
|
||||
f"Vérifiez la connectivité et les credentials."
|
||||
)
|
||||
return recommendations
|
||||
|
||||
state = api_diagnosis.get("checks", {}).get("transaction_data", {}).get("state")
|
||||
|
||||
if state not in ["completed", "closed"]:
|
||||
recommendations.append(
|
||||
f"La transaction n'est pas encore signée (state={state}). "
|
||||
f"Attendez que le signataire complète la signature."
|
||||
)
|
||||
|
||||
docs = api_diagnosis.get("checks", {}).get("documents", [])
|
||||
if not docs:
|
||||
recommendations.append("Aucun document trouvé dans la transaction Universign.")
|
||||
else:
|
||||
for doc in docs:
|
||||
dl_check = doc.get("download_check", {})
|
||||
if not dl_check.get("accessible"):
|
||||
recommendations.append(
|
||||
f"Document {doc.get('id')} non accessible: "
|
||||
f"status_code={dl_check.get('status_code')}. "
|
||||
f"Vérifiez que la signature est complète."
|
||||
)
|
||||
|
||||
if local_info.get("download_error"):
|
||||
recommendations.append(
|
||||
f"Dernière erreur de téléchargement: {local_info['download_error']}"
|
||||
)
|
||||
|
||||
if not recommendations:
|
||||
recommendations.append(
|
||||
"Tout semble correct. Essayez POST /admin/download-missing-documents "
|
||||
"avec force_redownload=true"
|
||||
)
|
||||
|
||||
return recommendations
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import logging
|
|||
import requests
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
from typing import Optional, Tuple
|
||||
from typing import Optional, Tuple, Dict, List
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
|
@ -13,144 +13,366 @@ SIGNED_DOCS_DIR.mkdir(parents=True, exist_ok=True)
|
|||
|
||||
|
||||
class UniversignDocumentService:
|
||||
"""Service de gestion des documents signés Universign"""
|
||||
"""Service de gestion des documents signés Universign - VERSION CORRIGÉE"""
|
||||
|
||||
def __init__(self, api_key: str, timeout: int = 60):
|
||||
def __init__(self, api_url: str, api_key: str, timeout: int = 60):
|
||||
self.api_url = api_url.rstrip("/")
|
||||
self.api_key = api_key
|
||||
self.timeout = timeout
|
||||
self.auth = (api_key, "")
|
||||
|
||||
async def download_and_store_signed_document(
|
||||
self, session: AsyncSession, transaction, force: bool = False
|
||||
) -> Tuple[bool, Optional[str]]:
|
||||
if not force and transaction.signed_document_path:
|
||||
if os.path.exists(transaction.signed_document_path):
|
||||
logger.debug(f"Document déjà téléchargé : {transaction.transaction_id}")
|
||||
return True, None
|
||||
|
||||
if not transaction.document_url:
|
||||
error = "Aucune URL de document disponible"
|
||||
logger.warning(f"{error} pour {transaction.transaction_id}")
|
||||
transaction.download_error = error
|
||||
await session.commit()
|
||||
return False, error
|
||||
|
||||
def fetch_transaction_documents(self, transaction_id: str) -> Optional[List[Dict]]:
|
||||
try:
|
||||
logger.info(f"Téléchargement document signé : {transaction.transaction_id}")
|
||||
|
||||
transaction.download_attempts += 1
|
||||
logger.info(f"📋 Récupération documents pour transaction: {transaction_id}")
|
||||
|
||||
response = requests.get(
|
||||
transaction.document_url,
|
||||
f"{self.api_url}/transactions/{transaction_id}",
|
||||
auth=self.auth,
|
||||
timeout=self.timeout,
|
||||
headers={"Accept": "application/json"},
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
documents = data.get("documents", [])
|
||||
|
||||
logger.info(f"✅ {len(documents)} document(s) trouvé(s)")
|
||||
|
||||
# Log détaillé de chaque document
|
||||
for idx, doc in enumerate(documents):
|
||||
logger.debug(
|
||||
f" Document {idx}: id={doc.get('id')}, "
|
||||
f"name={doc.get('name')}, status={doc.get('status')}"
|
||||
)
|
||||
|
||||
return documents
|
||||
|
||||
elif response.status_code == 404:
|
||||
logger.warning(
|
||||
f"Transaction {transaction_id} introuvable sur Universign"
|
||||
)
|
||||
return None
|
||||
|
||||
else:
|
||||
logger.error(
|
||||
f"Erreur HTTP {response.status_code} pour {transaction_id}: "
|
||||
f"{response.text[:500]}"
|
||||
)
|
||||
return None
|
||||
|
||||
except requests.exceptions.Timeout:
|
||||
logger.error(f"⏱️ Timeout récupération transaction {transaction_id}")
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Erreur fetch documents: {e}", exc_info=True)
|
||||
return None
|
||||
|
||||
def download_signed_document(
|
||||
self, transaction_id: str, document_id: str
|
||||
) -> Optional[bytes]:
|
||||
try:
|
||||
download_url = (
|
||||
f"{self.api_url}/transactions/{transaction_id}"
|
||||
f"/documents/{document_id}/download"
|
||||
)
|
||||
|
||||
logger.info(f"📥 Téléchargement depuis: {download_url}")
|
||||
|
||||
response = requests.get(
|
||||
download_url,
|
||||
auth=self.auth,
|
||||
timeout=self.timeout,
|
||||
stream=True,
|
||||
)
|
||||
|
||||
response.raise_for_status()
|
||||
if response.status_code == 200:
|
||||
content_type = response.headers.get("Content-Type", "")
|
||||
content_length = response.headers.get("Content-Length", "unknown")
|
||||
|
||||
content_type = response.headers.get("Content-Type", "")
|
||||
if "pdf" not in content_type.lower():
|
||||
error = f"Type de contenu invalide : {content_type}"
|
||||
logger.error(error)
|
||||
logger.info(
|
||||
f"✅ Téléchargement réussi: "
|
||||
f"Content-Type={content_type}, Size={content_length}"
|
||||
)
|
||||
|
||||
# Vérification du type de contenu
|
||||
if (
|
||||
"pdf" not in content_type.lower()
|
||||
and "octet-stream" not in content_type.lower()
|
||||
):
|
||||
logger.warning(
|
||||
f"⚠️ Type de contenu inattendu: {content_type}. "
|
||||
f"Tentative de lecture quand même..."
|
||||
)
|
||||
|
||||
# Lecture du contenu
|
||||
content = response.content
|
||||
|
||||
if len(content) < 1024:
|
||||
logger.error(f"❌ Document trop petit: {len(content)} octets")
|
||||
return None
|
||||
|
||||
return content
|
||||
|
||||
elif response.status_code == 404:
|
||||
logger.error(
|
||||
f"❌ Document {document_id} introuvable pour transaction {transaction_id}"
|
||||
)
|
||||
return None
|
||||
|
||||
elif response.status_code == 403:
|
||||
logger.error(
|
||||
f"❌ Accès refusé au document {document_id}. "
|
||||
f"Vérifiez que la transaction est bien signée."
|
||||
)
|
||||
return None
|
||||
|
||||
else:
|
||||
logger.error(
|
||||
f"❌ Erreur HTTP {response.status_code}: {response.text[:500]}"
|
||||
)
|
||||
return None
|
||||
|
||||
except requests.exceptions.Timeout:
|
||||
logger.error(f"⏱️ Timeout téléchargement document {document_id}")
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Erreur téléchargement: {e}", exc_info=True)
|
||||
return None
|
||||
|
||||
async def download_and_store_signed_document(
|
||||
self, session: AsyncSession, transaction, force: bool = False
|
||||
) -> Tuple[bool, Optional[str]]:
|
||||
# Vérification si déjà téléchargé
|
||||
if not force and transaction.signed_document_path:
|
||||
if os.path.exists(transaction.signed_document_path):
|
||||
logger.debug(
|
||||
f"✅ Document déjà téléchargé: {transaction.transaction_id}"
|
||||
)
|
||||
return True, None
|
||||
|
||||
transaction.download_attempts += 1
|
||||
|
||||
try:
|
||||
# ÉTAPE 1: Récupérer les documents de la transaction
|
||||
logger.info(
|
||||
f"🔄 Récupération document signé pour: {transaction.transaction_id}"
|
||||
)
|
||||
|
||||
documents = self.fetch_transaction_documents(transaction.transaction_id)
|
||||
|
||||
if not documents:
|
||||
error = "Aucun document trouvé dans la transaction Universign"
|
||||
logger.warning(f"⚠️ {error}")
|
||||
transaction.download_error = error
|
||||
await session.commit()
|
||||
return False, error
|
||||
|
||||
# ÉTAPE 2: Récupérer le premier document (ou chercher celui qui est signé)
|
||||
document_id = None
|
||||
for doc in documents:
|
||||
doc_id = doc.get("id")
|
||||
doc_status = doc.get("status", "").lower()
|
||||
|
||||
# Priorité aux documents marqués comme signés/complétés
|
||||
if doc_status in ["signed", "completed", "closed"]:
|
||||
document_id = doc_id
|
||||
logger.info(
|
||||
f"📄 Document signé trouvé: {doc_id} (status: {doc_status})"
|
||||
)
|
||||
break
|
||||
|
||||
# Fallback sur le premier document si aucun n'est explicitement signé
|
||||
if document_id is None:
|
||||
document_id = doc_id
|
||||
|
||||
if not document_id:
|
||||
error = "Impossible de déterminer l'ID du document à télécharger"
|
||||
logger.error(f"❌ {error}")
|
||||
transaction.download_error = error
|
||||
await session.commit()
|
||||
return False, error
|
||||
|
||||
# Stocker le document_id pour référence future
|
||||
if hasattr(transaction, "universign_document_id"):
|
||||
transaction.universign_document_id = document_id
|
||||
|
||||
# ÉTAPE 3: Télécharger le document signé
|
||||
pdf_content = self.download_signed_document(
|
||||
transaction_id=transaction.transaction_id, document_id=document_id
|
||||
)
|
||||
|
||||
if not pdf_content:
|
||||
error = f"Échec téléchargement document {document_id}"
|
||||
logger.error(f"❌ {error}")
|
||||
transaction.download_error = error
|
||||
await session.commit()
|
||||
return False, error
|
||||
|
||||
# ÉTAPE 4: Stocker le fichier localement
|
||||
filename = self._generate_filename(transaction)
|
||||
file_path = SIGNED_DOCS_DIR / filename
|
||||
|
||||
with open(file_path, "wb") as f:
|
||||
for chunk in response.iter_content(chunk_size=8192):
|
||||
f.write(chunk)
|
||||
f.write(pdf_content)
|
||||
|
||||
file_size = os.path.getsize(file_path)
|
||||
|
||||
if file_size < 1024: # Moins de 1 KB = suspect
|
||||
error = f"Fichier trop petit : {file_size} octets"
|
||||
logger.error(error)
|
||||
os.remove(file_path)
|
||||
transaction.download_error = error
|
||||
await session.commit()
|
||||
return False, error
|
||||
|
||||
# Mise à jour de la transaction
|
||||
transaction.signed_document_path = str(file_path)
|
||||
transaction.signed_document_downloaded_at = datetime.now()
|
||||
transaction.signed_document_size_bytes = file_size
|
||||
transaction.download_error = None
|
||||
|
||||
# Stocker aussi l'URL de téléchargement pour référence
|
||||
transaction.document_url = (
|
||||
f"{self.api_url}/transactions/{transaction.transaction_id}"
|
||||
f"/documents/{document_id}/download"
|
||||
)
|
||||
|
||||
await session.commit()
|
||||
|
||||
logger.info(f"Document téléchargé : {filename} ({file_size / 1024:.1f} KB)")
|
||||
logger.info(
|
||||
f"✅ Document signé téléchargé: {filename} ({file_size / 1024:.1f} KB)"
|
||||
)
|
||||
|
||||
return True, None
|
||||
|
||||
except requests.exceptions.RequestException as e:
|
||||
error = f"Erreur HTTP : {str(e)}"
|
||||
logger.error(f"{error} pour {transaction.transaction_id}")
|
||||
transaction.download_error = error
|
||||
await session.commit()
|
||||
return False, error
|
||||
|
||||
except OSError as e:
|
||||
error = f"Erreur filesystem : {str(e)}"
|
||||
logger.error(f"{error}")
|
||||
error = f"Erreur filesystem: {str(e)}"
|
||||
logger.error(f"❌ {error}")
|
||||
transaction.download_error = error
|
||||
await session.commit()
|
||||
return False, error
|
||||
|
||||
except Exception as e:
|
||||
error = f"Erreur inattendue : {str(e)}"
|
||||
logger.error(f"{error}", exc_info=True)
|
||||
error = f"Erreur inattendue: {str(e)}"
|
||||
logger.error(f"❌ {error}", exc_info=True)
|
||||
transaction.download_error = error
|
||||
await session.commit()
|
||||
return False, error
|
||||
|
||||
def _generate_filename(self, transaction) -> str:
|
||||
"""Génère un nom de fichier unique pour le document signé"""
|
||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
|
||||
tx_id = transaction.transaction_id.replace("tr_", "")
|
||||
|
||||
filename = f"{transaction.sage_document_id}_{tx_id}_{timestamp}.pdf"
|
||||
|
||||
filename = f"{transaction.sage_document_id}_{tx_id}_{timestamp}_signed.pdf"
|
||||
return filename
|
||||
|
||||
def get_document_path(self, transaction) -> Optional[Path]:
|
||||
"""Retourne le chemin du document signé s'il existe"""
|
||||
if not transaction.signed_document_path:
|
||||
return None
|
||||
|
||||
path = Path(transaction.signed_document_path)
|
||||
if path.exists():
|
||||
return path
|
||||
|
||||
return None
|
||||
|
||||
async def cleanup_old_documents(self, days_to_keep: int = 90) -> Tuple[int, int]:
|
||||
"""Supprime les anciens documents signés"""
|
||||
from datetime import timedelta
|
||||
|
||||
cutoff_date = datetime.now() - timedelta(days=days_to_keep)
|
||||
|
||||
deleted = 0
|
||||
size_freed = 0
|
||||
|
||||
for file_path in SIGNED_DOCS_DIR.glob("*.pdf"):
|
||||
try:
|
||||
file_time = datetime.fromtimestamp(os.path.getmtime(file_path))
|
||||
|
||||
if file_time < cutoff_date:
|
||||
size_freed += os.path.getsize(file_path)
|
||||
os.remove(file_path)
|
||||
deleted += 1
|
||||
logger.info(f"🗑️ Supprimé : {file_path.name}")
|
||||
|
||||
logger.info(f"🗑️ Supprimé: {file_path.name}")
|
||||
except Exception as e:
|
||||
logger.error(f"Erreur suppression {file_path}: {e}")
|
||||
|
||||
size_freed_mb = size_freed / (1024 * 1024)
|
||||
|
||||
logger.info(
|
||||
f"Nettoyage terminé : {deleted} fichiers supprimés "
|
||||
f"Nettoyage terminé: {deleted} fichiers supprimés "
|
||||
f"({size_freed_mb:.2f} MB libérés)"
|
||||
)
|
||||
|
||||
return deleted, int(size_freed_mb)
|
||||
|
||||
# === MÉTHODES DE DIAGNOSTIC ===
|
||||
|
||||
def diagnose_transaction(self, transaction_id: str) -> Dict:
|
||||
"""
|
||||
Diagnostic complet d'une transaction pour debug
|
||||
"""
|
||||
result = {
|
||||
"transaction_id": transaction_id,
|
||||
"api_url": self.api_url,
|
||||
"timestamp": datetime.now().isoformat(),
|
||||
"checks": {},
|
||||
}
|
||||
|
||||
try:
|
||||
# Test 1: Récupération de la transaction
|
||||
logger.info(f"🔍 Diagnostic transaction: {transaction_id}")
|
||||
|
||||
response = requests.get(
|
||||
f"{self.api_url}/transactions/{transaction_id}",
|
||||
auth=self.auth,
|
||||
timeout=self.timeout,
|
||||
)
|
||||
|
||||
result["checks"]["transaction_fetch"] = {
|
||||
"status_code": response.status_code,
|
||||
"success": response.status_code == 200,
|
||||
}
|
||||
|
||||
if response.status_code != 200:
|
||||
result["checks"]["transaction_fetch"]["error"] = response.text[:500]
|
||||
return result
|
||||
|
||||
data = response.json()
|
||||
|
||||
result["checks"]["transaction_data"] = {
|
||||
"state": data.get("state"),
|
||||
"documents_count": len(data.get("documents", [])),
|
||||
"participants_count": len(data.get("participants", [])),
|
||||
}
|
||||
|
||||
# Test 2: Documents disponibles
|
||||
documents = data.get("documents", [])
|
||||
result["checks"]["documents"] = []
|
||||
|
||||
for doc in documents:
|
||||
doc_info = {
|
||||
"id": doc.get("id"),
|
||||
"name": doc.get("name"),
|
||||
"status": doc.get("status"),
|
||||
}
|
||||
|
||||
# Test téléchargement
|
||||
if doc.get("id"):
|
||||
download_url = (
|
||||
f"{self.api_url}/transactions/{transaction_id}"
|
||||
f"/documents/{doc['id']}/download"
|
||||
)
|
||||
|
||||
try:
|
||||
dl_response = requests.head(
|
||||
download_url,
|
||||
auth=self.auth,
|
||||
timeout=10,
|
||||
)
|
||||
doc_info["download_check"] = {
|
||||
"url": download_url,
|
||||
"status_code": dl_response.status_code,
|
||||
"accessible": dl_response.status_code in [200, 302],
|
||||
"content_type": dl_response.headers.get("Content-Type"),
|
||||
}
|
||||
except Exception as e:
|
||||
doc_info["download_check"] = {"error": str(e)}
|
||||
|
||||
result["checks"]["documents"].append(doc_info)
|
||||
|
||||
result["success"] = True
|
||||
|
||||
except Exception as e:
|
||||
result["success"] = False
|
||||
result["error"] = str(e)
|
||||
|
||||
return result
|
||||
|
|
|
|||
|
|
@ -40,7 +40,9 @@ class UniversignSyncService:
|
|||
self.sage_client = None
|
||||
self.email_queue = None
|
||||
self.settings = None
|
||||
self.document_service = UniversignDocumentService(api_key=api_key, timeout=60)
|
||||
self.document_service = UniversignDocumentService(
|
||||
api_url=api_url, api_key=api_key, timeout=60
|
||||
)
|
||||
|
||||
def configure(self, sage_client, email_queue, settings):
|
||||
self.sage_client = sage_client
|
||||
|
|
@ -320,21 +322,20 @@ class UniversignSyncService:
|
|||
except Exception as e:
|
||||
logger.error(f"Erreur création signer {email}: {e}")
|
||||
|
||||
# CORRECTION 3 : Amélioration du logging dans sync_transaction
|
||||
|
||||
async def sync_transaction(
|
||||
self,
|
||||
session: AsyncSession,
|
||||
transaction: UniversignTransaction,
|
||||
session,
|
||||
transaction,
|
||||
force: bool = False,
|
||||
) -> Tuple[bool, Optional[str]]:
|
||||
"""
|
||||
CORRECTION : Meilleur logging et gestion d'erreurs
|
||||
"""
|
||||
):
|
||||
import json
|
||||
|
||||
# Si statut final et pas de force, skip
|
||||
if is_final_status(transaction.local_status.value) and not force:
|
||||
logger.debug(
|
||||
f"⏭️ Skip {transaction.transaction_id}: statut final {transaction.local_status.value}"
|
||||
f"⏭️ Skip {transaction.transaction_id}: statut final "
|
||||
f"{transaction.local_status.value}"
|
||||
)
|
||||
transaction.needs_sync = False
|
||||
await session.commit()
|
||||
|
|
@ -347,12 +348,9 @@ class UniversignSyncService:
|
|||
|
||||
if not result:
|
||||
error = "Échec récupération données Universign"
|
||||
logger.error(f"{error}: {transaction.transaction_id}")
|
||||
|
||||
# CORRECTION : Incrémenter les tentatives MÊME en cas d'échec
|
||||
logger.error(f"❌ {error}: {transaction.transaction_id}")
|
||||
transaction.sync_attempts += 1
|
||||
transaction.sync_error = error
|
||||
|
||||
await self._log_sync_attempt(session, transaction, "polling", False, error)
|
||||
await session.commit()
|
||||
return False, error
|
||||
|
|
@ -375,7 +373,7 @@ class UniversignSyncService:
|
|||
# Vérifier la transition
|
||||
if not is_transition_allowed(previous_local_status, new_local_status):
|
||||
logger.warning(
|
||||
f"Transition refusée: {previous_local_status} → {new_local_status}"
|
||||
f"⚠️ Transition refusée: {previous_local_status} → {new_local_status}"
|
||||
)
|
||||
new_local_status = resolve_status_conflict(
|
||||
previous_local_status, new_local_status
|
||||
|
|
@ -395,12 +393,9 @@ class UniversignSyncService:
|
|||
universign_status_raw
|
||||
)
|
||||
except ValueError:
|
||||
logger.warning(f"Statut Universign inconnu: {universign_status_raw}")
|
||||
# Fallback intelligent
|
||||
logger.warning(f"⚠️ Statut Universign inconnu: {universign_status_raw}")
|
||||
if new_local_status == "SIGNE":
|
||||
transaction.universign_status = (
|
||||
UniversignTransactionStatus.COMPLETED
|
||||
)
|
||||
transaction.universign_status = UniversignTransactionStatus.COMPLETED
|
||||
elif new_local_status == "REFUSE":
|
||||
transaction.universign_status = UniversignTransactionStatus.REFUSED
|
||||
elif new_local_status == "EXPIRE":
|
||||
|
|
@ -419,41 +414,53 @@ class UniversignSyncService:
|
|||
|
||||
if new_local_status == "SIGNE" and not transaction.signed_at:
|
||||
transaction.signed_at = datetime.now()
|
||||
logger.info("Date de signature mise à jour")
|
||||
logger.info("✅ Date de signature mise à jour")
|
||||
|
||||
if new_local_status == "REFUSE" and not transaction.refused_at:
|
||||
transaction.refused_at = datetime.now()
|
||||
logger.info("Date de refus mise à jour")
|
||||
logger.info("❌ Date de refus mise à jour")
|
||||
|
||||
if new_local_status == "EXPIRE" and not transaction.expired_at:
|
||||
transaction.expired_at = datetime.now()
|
||||
logger.info("⏰ Date d'expiration mise à jour")
|
||||
|
||||
# Mise à jour des URLs
|
||||
if (
|
||||
universign_data.get("documents")
|
||||
and len(universign_data["documents"]) > 0
|
||||
):
|
||||
first_doc = universign_data["documents"][0]
|
||||
if first_doc.get("url"):
|
||||
transaction.document_url = first_doc["url"]
|
||||
# === SECTION CORRIGÉE: Gestion des documents ===
|
||||
# Ne plus chercher document_url dans la réponse (elle n'existe pas!)
|
||||
# Le téléchargement se fait via le service document qui utilise le bon endpoint
|
||||
|
||||
# NOUVEAU : Téléchargement automatique du document signé
|
||||
if new_local_status == "SIGNE" and transaction.document_url:
|
||||
if not transaction.signed_document_path:
|
||||
logger.info("Déclenchement téléchargement document signé")
|
||||
documents = universign_data.get("documents", [])
|
||||
if documents:
|
||||
first_doc = documents[0]
|
||||
logger.info(
|
||||
f"📄 Document Universign trouvé: id={first_doc.get('id')}, "
|
||||
f"status={first_doc.get('status')}"
|
||||
)
|
||||
|
||||
# Téléchargement automatique du document signé
|
||||
if new_local_status == "SIGNE" and not transaction.signed_document_path:
|
||||
logger.info("📥 Déclenchement téléchargement document signé...")
|
||||
|
||||
try:
|
||||
(
|
||||
download_success,
|
||||
download_error,
|
||||
) = await self.document_service.download_and_store_signed_document(
|
||||
session=session, transaction=transaction, force=False
|
||||
session=session,
|
||||
transaction=transaction,
|
||||
force=False
|
||||
)
|
||||
|
||||
if download_success:
|
||||
logger.info("Document signé téléchargé avec succès")
|
||||
logger.info("✅ Document signé téléchargé et stocké")
|
||||
else:
|
||||
logger.warning(f"Échec téléchargement : {download_error}")
|
||||
logger.warning(f"⚠️ Échec téléchargement: {download_error}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"❌ Erreur téléchargement document: {e}",
|
||||
exc_info=True
|
||||
)
|
||||
# === FIN SECTION CORRIGÉE ===
|
||||
|
||||
# Synchroniser les signataires
|
||||
await self._sync_signers(session, transaction, universign_data)
|
||||
|
|
@ -462,7 +469,7 @@ class UniversignSyncService:
|
|||
transaction.last_synced_at = datetime.now()
|
||||
transaction.sync_attempts += 1
|
||||
transaction.needs_sync = not is_final_status(new_local_status)
|
||||
transaction.sync_error = None # Effacer l'erreur précédente
|
||||
transaction.sync_error = None
|
||||
|
||||
# Log de la tentative
|
||||
await self._log_sync_attempt(
|
||||
|
|
@ -477,9 +484,10 @@ class UniversignSyncService:
|
|||
{
|
||||
"status_changed": status_changed,
|
||||
"universign_raw": universign_status_raw,
|
||||
"documents_count": len(documents),
|
||||
"response_time_ms": result.get("response_time_ms"),
|
||||
},
|
||||
default=str, # Éviter les erreurs de sérialisation
|
||||
default=str,
|
||||
),
|
||||
)
|
||||
|
||||
|
|
@ -488,12 +496,10 @@ class UniversignSyncService:
|
|||
# Exécuter les actions post-changement
|
||||
if status_changed:
|
||||
logger.info(f"🎬 Exécution actions pour statut: {new_local_status}")
|
||||
await self._execute_status_actions(
|
||||
session, transaction, new_local_status
|
||||
)
|
||||
await self._execute_status_actions(session, transaction, new_local_status)
|
||||
|
||||
logger.info(
|
||||
f"Sync terminée: {transaction.transaction_id} | "
|
||||
f"✅ Sync terminée: {transaction.transaction_id} | "
|
||||
f"{previous_local_status} → {new_local_status}"
|
||||
)
|
||||
|
||||
|
|
@ -501,9 +507,9 @@ class UniversignSyncService:
|
|||
|
||||
except Exception as e:
|
||||
error_msg = f"Erreur lors de la synchronisation: {str(e)}"
|
||||
logger.error(f"{error_msg}", exc_info=True)
|
||||
logger.error(f"❌ {error_msg}", exc_info=True)
|
||||
|
||||
transaction.sync_error = error_msg[:1000] # Tronquer si trop long
|
||||
transaction.sync_error = error_msg[:1000]
|
||||
transaction.sync_attempts += 1
|
||||
|
||||
await self._log_sync_attempt(
|
||||
|
|
@ -513,6 +519,57 @@ class UniversignSyncService:
|
|||
|
||||
return False, error_msg
|
||||
|
||||
# CORRECTION 3 : Amélioration du logging dans sync_transaction
|
||||
async def _sync_transaction_documents_corrected(
|
||||
self, session, transaction, universign_data: dict, new_local_status: str
|
||||
):
|
||||
# Récupérer et stocker les infos documents
|
||||
documents = universign_data.get("documents", [])
|
||||
|
||||
if documents:
|
||||
# Stocker le premier document_id pour référence
|
||||
first_doc = documents[0]
|
||||
first_doc_id = first_doc.get("id")
|
||||
|
||||
if first_doc_id:
|
||||
# Stocker l'ID du document (si le champ existe dans le modèle)
|
||||
if hasattr(transaction, "universign_document_id"):
|
||||
transaction.universign_document_id = first_doc_id
|
||||
|
||||
logger.info(
|
||||
f"📄 Document Universign: id={first_doc_id}, "
|
||||
f"name={first_doc.get('name')}, status={first_doc.get('status')}"
|
||||
)
|
||||
else:
|
||||
logger.debug("Aucun document dans la réponse Universign")
|
||||
|
||||
# Téléchargement automatique si signé
|
||||
if new_local_status == "SIGNE":
|
||||
if not transaction.signed_document_path:
|
||||
logger.info("📥 Déclenchement téléchargement document signé...")
|
||||
|
||||
try:
|
||||
(
|
||||
download_success,
|
||||
download_error,
|
||||
) = await self.document_service.download_and_store_signed_document(
|
||||
session=session, transaction=transaction, force=False
|
||||
)
|
||||
|
||||
if download_success:
|
||||
logger.info("✅ Document signé téléchargé avec succès")
|
||||
else:
|
||||
logger.warning(f"⚠️ Échec téléchargement: {download_error}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"❌ Erreur téléchargement document: {e}", exc_info=True
|
||||
)
|
||||
else:
|
||||
logger.debug(
|
||||
f"Document déjà téléchargé: {transaction.signed_document_path}"
|
||||
)
|
||||
|
||||
async def _log_sync_attempt(
|
||||
self,
|
||||
session: AsyncSession,
|
||||
|
|
|
|||
Loading…
Reference in a new issue