feat(universign): improve transaction and signer management

- Update signer handling
This commit is contained in:
Fanilo-Nantenaina 2026-01-06 18:31:53 +03:00
parent 7d1a68f4e5
commit ba4a9d4773
2 changed files with 239 additions and 24 deletions

View file

@ -84,8 +84,13 @@ async def create_signature(
existing_query = select(UniversignTransaction).where(
UniversignTransaction.sage_document_id == request.sage_document_id,
UniversignTransaction.sage_document_type == request.sage_document_type,
UniversignTransaction.local_status.in_(
[LocalDocumentStatus.PENDING, LocalDocumentStatus.IN_PROGRESS]
~UniversignTransaction.local_status.in_(
[
LocalDocumentStatus.SIGNED,
LocalDocumentStatus.REJECTED,
LocalDocumentStatus.EXPIRED,
LocalDocumentStatus.ERROR,
]
),
)
existing_result = await session.execute(existing_query)
@ -95,7 +100,7 @@ async def create_signature(
raise HTTPException(
400,
f"Une demande de signature est déjà en cours pour {request.sage_document_id} "
f"(transaction: {existing_tx.transaction_id})",
f"(transaction: {existing_tx.transaction_id}, statut: {existing_tx.local_status.value})",
)
pdf_bytes = email_queue._generate_pdf(
@ -591,3 +596,200 @@ async def get_transaction_logs(
for log in logs
],
}
@router.get("/documents/{sage_document_id}/signatures")
async def get_signatures_for_document(
sage_document_id: str,
session: AsyncSession = Depends(get_session),
):
"""Liste toutes les transactions de signature pour un document Sage"""
query = (
select(UniversignTransaction)
.options(selectinload(UniversignTransaction.signers))
.where(UniversignTransaction.sage_document_id == sage_document_id)
.order_by(UniversignTransaction.created_at.desc())
)
result = await session.execute(query)
transactions = result.scalars().all()
return [
{
"id": tx.id,
"transaction_id": tx.transaction_id,
"local_status": tx.local_status.value,
"universign_status": tx.universign_status.value
if tx.universign_status
else None,
"created_at": tx.created_at.isoformat(),
"signed_at": tx.signed_at.isoformat() if tx.signed_at else None,
"signer_url": tx.signer_url,
"signers_count": len(tx.signers),
}
for tx in transactions
]
@router.delete("/documents/{sage_document_id}/duplicates")
async def cleanup_duplicate_signatures(
sage_document_id: str,
keep_latest: bool = Query(
True, description="Garder la plus récente (True) ou la plus ancienne (False)"
),
session: AsyncSession = Depends(get_session),
):
"""
Supprime les doublons de signatures pour un document.
Garde une seule transaction (la plus récente ou ancienne selon le paramètre).
"""
query = (
select(UniversignTransaction)
.where(UniversignTransaction.sage_document_id == sage_document_id)
.order_by(
UniversignTransaction.created_at.desc()
if keep_latest
else UniversignTransaction.created_at.asc()
)
)
result = await session.execute(query)
transactions = result.scalars().all()
if len(transactions) <= 1:
return {
"success": True,
"message": "Aucun doublon trouvé",
"kept": transactions[0].transaction_id if transactions else None,
"deleted_count": 0,
}
# Garder la première (selon l'ordre), supprimer les autres
to_keep = transactions[0]
to_delete = transactions[1:]
deleted_ids = []
for tx in to_delete:
deleted_ids.append(tx.transaction_id)
await session.delete(tx)
await session.commit()
logger.info(
f"Nettoyage doublons {sage_document_id}: gardé {to_keep.transaction_id}, supprimé {deleted_ids}"
)
return {
"success": True,
"document_id": sage_document_id,
"kept": {
"id": to_keep.id,
"transaction_id": to_keep.transaction_id,
"status": to_keep.local_status.value,
"created_at": to_keep.created_at.isoformat(),
},
"deleted_count": len(deleted_ids),
"deleted_transaction_ids": deleted_ids,
}
@router.delete("/transactions/{transaction_id}")
async def delete_transaction(
transaction_id: str,
session: AsyncSession = Depends(get_session),
):
"""Supprime une transaction spécifique par son ID Universign"""
query = select(UniversignTransaction).where(
UniversignTransaction.transaction_id == transaction_id
)
result = await session.execute(query)
tx = result.scalar_one_or_none()
if not tx:
raise HTTPException(404, f"Transaction {transaction_id} introuvable")
await session.delete(tx)
await session.commit()
logger.info(f"Transaction {transaction_id} supprimée")
return {
"success": True,
"deleted_transaction_id": transaction_id,
"document_id": tx.sage_document_id,
}
@router.post("/cleanup/all-duplicates")
async def cleanup_all_duplicates(
session: AsyncSession = Depends(get_session),
):
"""
Nettoie tous les doublons dans la base.
Pour chaque document avec plusieurs transactions, garde la plus récente non-erreur ou la plus récente.
"""
from sqlalchemy import func
# Trouver les documents avec plusieurs transactions
subquery = (
select(
UniversignTransaction.sage_document_id,
func.count(UniversignTransaction.id).label("count"),
)
.group_by(UniversignTransaction.sage_document_id)
.having(func.count(UniversignTransaction.id) > 1)
).subquery()
duplicates_query = select(subquery.c.sage_document_id)
duplicates_result = await session.execute(duplicates_query)
duplicate_docs = [row[0] for row in duplicates_result.fetchall()]
total_deleted = 0
cleanup_details = []
for doc_id in duplicate_docs:
# Récupérer toutes les transactions pour ce document
tx_query = (
select(UniversignTransaction)
.where(UniversignTransaction.sage_document_id == doc_id)
.order_by(UniversignTransaction.created_at.desc())
)
tx_result = await session.execute(tx_query)
transactions = tx_result.scalars().all()
# Priorité: SIGNE > EN_COURS > EN_ATTENTE > autres
priority = {"SIGNE": 0, "EN_COURS": 1, "EN_ATTENTE": 2}
def sort_key(tx):
status_priority = priority.get(tx.local_status.value, 99)
return (status_priority, -tx.created_at.timestamp())
sorted_txs = sorted(transactions, key=sort_key)
to_keep = sorted_txs[0]
to_delete = sorted_txs[1:]
for tx in to_delete:
await session.delete(tx)
total_deleted += 1
cleanup_details.append(
{
"document_id": doc_id,
"kept": to_keep.transaction_id,
"kept_status": to_keep.local_status.value,
"deleted_count": len(to_delete),
}
)
await session.commit()
logger.info(
f"Nettoyage global: {total_deleted} doublons supprimés sur {len(duplicate_docs)} documents"
)
return {
"success": True,
"documents_processed": len(duplicate_docs),
"total_deleted": total_deleted,
"details": cleanup_details,
}

View file

@ -1,8 +1,3 @@
"""
Service de synchronisation Universign
Architecture : polling + webhooks avec retry et gestion d'erreurs
"""
import requests
import json
import logging
@ -329,27 +324,45 @@ class UniversignSyncService:
):
signers_data = universign_data.get("signers", [])
# Ne pas toucher aux signers existants si Universign n'en retourne pas
if not signers_data:
return
existing_ids = {s.id for s in transaction.signers}
for signer in list(transaction.signers):
await session.delete(signer)
# Mettre à jour les signers existants ou en créer de nouveaux
existing_signers = {s.email: s for s in transaction.signers}
for idx, signer_data in enumerate(signers_data):
signer = UniversignSigner(
id=f"{transaction.id}_signer_{idx}",
transaction_id=transaction.id,
email=signer_data.get("email", ""),
name=signer_data.get("name"),
status=UniversignSignerStatus(signer_data.get("status", "waiting")),
order_index=idx,
viewed_at=self._parse_date(signer_data.get("viewed_at")),
signed_at=self._parse_date(signer_data.get("signed_at")),
refused_at=self._parse_date(signer_data.get("refused_at")),
)
session.add(signer)
email = signer_data.get("email", "")
if email in existing_signers:
# Mise à jour du signer existant
signer = existing_signers[email]
signer.status = UniversignSignerStatus(
signer_data.get("status", "waiting")
)
signer.viewed_at = (
self._parse_date(signer_data.get("viewed_at")) or signer.viewed_at
)
signer.signed_at = (
self._parse_date(signer_data.get("signed_at")) or signer.signed_at
)
signer.refused_at = (
self._parse_date(signer_data.get("refused_at")) or signer.refused_at
)
else:
# Nouveau signer
signer = UniversignSigner(
id=f"{transaction.id}_signer_{idx}_{int(datetime.now().timestamp())}",
transaction_id=transaction.id,
email=email,
name=signer_data.get("name"),
status=UniversignSignerStatus(signer_data.get("status", "waiting")),
order_index=idx,
viewed_at=self._parse_date(signer_data.get("viewed_at")),
signed_at=self._parse_date(signer_data.get("signed_at")),
refused_at=self._parse_date(signer_data.get("refused_at")),
)
session.add(signer)
async def _log_sync_attempt(
self,