Added docker config
This commit is contained in:
parent
5e06aafff7
commit
a8ee3fe492
10 changed files with 765 additions and 7 deletions
21
Dockerfile
Normal file
21
Dockerfile
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
# Backend Dockerfile
|
||||
FROM python:3.12-slim
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Copier et installer les dépendances
|
||||
COPY requirements.txt .
|
||||
RUN pip install --no-cache-dir --upgrade pip \
|
||||
&& pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copier le reste du projet
|
||||
COPY . .
|
||||
|
||||
# ✅ Créer dossier persistant pour SQLite avec bonnes permissions
|
||||
RUN mkdir -p /app/data && chmod 777 /app/data
|
||||
|
||||
# Exposer le port
|
||||
EXPOSE 8000
|
||||
|
||||
# Lancer l'API et initialiser la DB au démarrage
|
||||
CMD ["sh", "-c", "python init_db.py && uvicorn api:app --host 0.0.0.0 --port 8000 --reload"]
|
||||
10
api.py
10
api.py
|
|
@ -27,9 +27,11 @@ logger = logging.getLogger(__name__)
|
|||
|
||||
# Imports locaux
|
||||
from config import settings
|
||||
from database.config import init_db, async_session_factory, get_session
|
||||
from database.models import (
|
||||
EmailLog as EmailLogModel,
|
||||
from database import (
|
||||
init_db,
|
||||
async_session_factory,
|
||||
get_session,
|
||||
EmailLog,
|
||||
StatutEmail as StatutEmailEnum,
|
||||
WorkflowLog,
|
||||
SignatureLog,
|
||||
|
|
@ -384,7 +386,7 @@ async def envoyer_devis_email(
|
|||
email_logs = []
|
||||
|
||||
for dest in tous_destinataires:
|
||||
email_log = EmailLogModel(
|
||||
email_log = EmailLog(
|
||||
id=str(uuid.uuid4()),
|
||||
destinataire=dest,
|
||||
sujet=request.sujet,
|
||||
|
|
|
|||
|
|
@ -25,11 +25,11 @@ class Settings(BaseSettings):
|
|||
|
||||
# === Universign ===
|
||||
universign_api_key: str
|
||||
universign_api_url: str = "https://api.universign.com/v1"
|
||||
universign_api_url: str
|
||||
|
||||
# === API ===
|
||||
api_host: str = "0.0.0.0"
|
||||
api_port: int = 8002
|
||||
api_host: str
|
||||
api_port: int
|
||||
api_reload: bool = False
|
||||
|
||||
# === Email Queue ===
|
||||
|
|
|
|||
39
database/__init__.py
Normal file
39
database/__init__.py
Normal file
|
|
@ -0,0 +1,39 @@
|
|||
from database.db_config import (
|
||||
engine,
|
||||
async_session_factory,
|
||||
init_db,
|
||||
get_session,
|
||||
close_db
|
||||
)
|
||||
|
||||
from database.models import (
|
||||
Base,
|
||||
EmailLog,
|
||||
SignatureLog,
|
||||
WorkflowLog,
|
||||
CacheMetadata,
|
||||
AuditLog,
|
||||
StatutEmail,
|
||||
StatutSignature
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
# Config
|
||||
'engine',
|
||||
'async_session_factory',
|
||||
'init_db',
|
||||
'get_session',
|
||||
'close_db',
|
||||
|
||||
# Models
|
||||
'Base',
|
||||
'EmailLog',
|
||||
'SignatureLog',
|
||||
'WorkflowLog',
|
||||
'CacheMetadata',
|
||||
'AuditLog',
|
||||
|
||||
# Enums
|
||||
'StatutEmail',
|
||||
'StatutSignature',
|
||||
]
|
||||
56
database/db_config.py
Normal file
56
database/db_config.py
Normal file
|
|
@ -0,0 +1,56 @@
|
|||
import os
|
||||
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, async_sessionmaker
|
||||
from sqlalchemy.pool import StaticPool
|
||||
from database.models import Base
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
DATABASE_URL = os.getenv("DATABASE_URL", "sqlite+aiosqlite:///./sage_dataven.db")
|
||||
|
||||
engine = create_async_engine(
|
||||
DATABASE_URL,
|
||||
echo=False,
|
||||
future=True,
|
||||
connect_args={"check_same_thread": False},
|
||||
poolclass=StaticPool,
|
||||
)
|
||||
|
||||
async_session_factory = async_sessionmaker(
|
||||
engine,
|
||||
class_=AsyncSession,
|
||||
expire_on_commit=False,
|
||||
autoflush=False,
|
||||
)
|
||||
|
||||
|
||||
async def init_db():
|
||||
"""
|
||||
Crée toutes les tables dans la base de données
|
||||
⚠️ Utilise create_all qui ne crée QUE les tables manquantes
|
||||
"""
|
||||
try:
|
||||
async with engine.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.create_all)
|
||||
|
||||
logger.info("✅ Base de données initialisée avec succès")
|
||||
logger.info(f"📍 Fichier DB: {DATABASE_URL}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Erreur initialisation DB: {e}")
|
||||
raise
|
||||
|
||||
|
||||
async def get_session() -> AsyncSession:
|
||||
"""Dependency FastAPI pour obtenir une session DB"""
|
||||
async with async_session_factory() as session:
|
||||
try:
|
||||
yield session
|
||||
finally:
|
||||
await session.close()
|
||||
|
||||
|
||||
async def close_db():
|
||||
"""Ferme proprement toutes les connexions"""
|
||||
await engine.dispose()
|
||||
logger.info("🔌 Connexions DB fermées")
|
||||
204
database/models.py
Normal file
204
database/models.py
Normal file
|
|
@ -0,0 +1,204 @@
|
|||
from sqlalchemy import Column, Integer, String, DateTime, Float, Text, Boolean, Enum as SQLEnum
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from datetime import datetime
|
||||
import enum
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
# ============================================================================
|
||||
# Enums
|
||||
# ============================================================================
|
||||
|
||||
class StatutEmail(str, enum.Enum):
|
||||
"""Statuts possibles d'un email"""
|
||||
EN_ATTENTE = "EN_ATTENTE"
|
||||
EN_COURS = "EN_COURS"
|
||||
ENVOYE = "ENVOYE"
|
||||
OUVERT = "OUVERT"
|
||||
ERREUR = "ERREUR"
|
||||
BOUNCE = "BOUNCE"
|
||||
|
||||
class StatutSignature(str, enum.Enum):
|
||||
"""Statuts possibles d'une signature électronique"""
|
||||
EN_ATTENTE = "EN_ATTENTE"
|
||||
ENVOYE = "ENVOYE"
|
||||
SIGNE = "SIGNE"
|
||||
REFUSE = "REFUSE"
|
||||
EXPIRE = "EXPIRE"
|
||||
|
||||
# ============================================================================
|
||||
# Tables
|
||||
# ============================================================================
|
||||
|
||||
class EmailLog(Base):
|
||||
"""
|
||||
Journal des emails envoyés via l'API
|
||||
Permet le suivi et le retry automatique
|
||||
"""
|
||||
__tablename__ = "email_logs"
|
||||
|
||||
# Identifiant
|
||||
id = Column(String(36), primary_key=True)
|
||||
|
||||
# Destinataires
|
||||
destinataire = Column(String(255), nullable=False, index=True)
|
||||
cc = Column(Text, nullable=True) # JSON stringifié
|
||||
cci = Column(Text, nullable=True) # JSON stringifié
|
||||
|
||||
# Contenu
|
||||
sujet = Column(String(500), nullable=False)
|
||||
corps_html = Column(Text, nullable=False)
|
||||
|
||||
# Documents attachés
|
||||
document_ids = Column(Text, nullable=True) # Séparés par virgules
|
||||
type_document = Column(Integer, nullable=True)
|
||||
|
||||
# Statut
|
||||
statut = Column(SQLEnum(StatutEmail), default=StatutEmail.EN_ATTENTE, index=True)
|
||||
|
||||
# Tracking temporel
|
||||
date_creation = Column(DateTime, default=datetime.now, nullable=False)
|
||||
date_envoi = Column(DateTime, nullable=True)
|
||||
date_ouverture = Column(DateTime, nullable=True)
|
||||
|
||||
# Retry automatique
|
||||
nb_tentatives = Column(Integer, default=0)
|
||||
derniere_erreur = Column(Text, nullable=True)
|
||||
prochain_retry = Column(DateTime, nullable=True)
|
||||
|
||||
# Métadonnées
|
||||
ip_envoi = Column(String(45), nullable=True)
|
||||
user_agent = Column(String(500), nullable=True)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<EmailLog {self.id} to={self.destinataire} status={self.statut.value}>"
|
||||
|
||||
|
||||
class SignatureLog(Base):
|
||||
"""
|
||||
Journal des demandes de signature Universign
|
||||
Permet le suivi du workflow de signature
|
||||
"""
|
||||
__tablename__ = "signature_logs"
|
||||
|
||||
# Identifiant
|
||||
id = Column(String(36), primary_key=True)
|
||||
|
||||
# Document Sage associé
|
||||
document_id = Column(String(100), nullable=False, index=True)
|
||||
type_document = Column(Integer, nullable=False)
|
||||
|
||||
# Universign
|
||||
transaction_id = Column(String(100), unique=True, index=True, nullable=True)
|
||||
signer_url = Column(String(500), nullable=True)
|
||||
|
||||
# Signataire
|
||||
email_signataire = Column(String(255), nullable=False, index=True)
|
||||
nom_signataire = Column(String(255), nullable=False)
|
||||
|
||||
# Statut
|
||||
statut = Column(SQLEnum(StatutSignature), default=StatutSignature.EN_ATTENTE, index=True)
|
||||
date_envoi = Column(DateTime, default=datetime.now)
|
||||
date_signature = Column(DateTime, nullable=True)
|
||||
date_refus = Column(DateTime, nullable=True)
|
||||
|
||||
# Relances
|
||||
est_relance = Column(Boolean, default=False)
|
||||
nb_relances = Column(Integer, default=0)
|
||||
|
||||
# Métadonnées
|
||||
raison_refus = Column(Text, nullable=True)
|
||||
ip_signature = Column(String(45), nullable=True)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<SignatureLog {self.id} doc={self.document_id} status={self.statut.value}>"
|
||||
|
||||
|
||||
class WorkflowLog(Base):
|
||||
"""
|
||||
Journal des transformations de documents (Devis → Commande → Facture)
|
||||
Permet la traçabilité du workflow commercial
|
||||
"""
|
||||
__tablename__ = "workflow_logs"
|
||||
|
||||
# Identifiant
|
||||
id = Column(String(36), primary_key=True)
|
||||
|
||||
# Documents
|
||||
document_source = Column(String(100), nullable=False, index=True)
|
||||
type_source = Column(Integer, nullable=False) # 0=Devis, 3=Commande, etc.
|
||||
|
||||
document_cible = Column(String(100), nullable=False, index=True)
|
||||
type_cible = Column(Integer, nullable=False)
|
||||
|
||||
# Métadonnées de transformation
|
||||
nb_lignes = Column(Integer, nullable=True)
|
||||
montant_ht = Column(Float, nullable=True)
|
||||
montant_ttc = Column(Float, nullable=True)
|
||||
|
||||
# Tracking
|
||||
date_transformation = Column(DateTime, default=datetime.now, nullable=False)
|
||||
utilisateur = Column(String(100), nullable=True)
|
||||
|
||||
# Résultat
|
||||
succes = Column(Boolean, default=True)
|
||||
erreur = Column(Text, nullable=True)
|
||||
duree_ms = Column(Integer, nullable=True) # Durée en millisecondes
|
||||
|
||||
def __repr__(self):
|
||||
return f"<WorkflowLog {self.document_source} → {self.document_cible}>"
|
||||
|
||||
|
||||
class CacheMetadata(Base):
|
||||
"""
|
||||
Métadonnées sur le cache Sage (clients, articles)
|
||||
Permet le monitoring du cache géré par la gateway Windows
|
||||
"""
|
||||
__tablename__ = "cache_metadata"
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
|
||||
# Type de cache
|
||||
cache_type = Column(String(50), unique=True, nullable=False) # 'clients' ou 'articles'
|
||||
|
||||
# Statistiques
|
||||
last_refresh = Column(DateTime, default=datetime.now)
|
||||
item_count = Column(Integer, default=0)
|
||||
refresh_duration_ms = Column(Float, nullable=True)
|
||||
|
||||
# Santé
|
||||
last_error = Column(Text, nullable=True)
|
||||
error_count = Column(Integer, default=0)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<CacheMetadata type={self.cache_type} items={self.item_count}>"
|
||||
|
||||
|
||||
class AuditLog(Base):
|
||||
"""
|
||||
Journal d'audit pour la sécurité et la conformité
|
||||
Trace toutes les actions importantes dans l'API
|
||||
"""
|
||||
__tablename__ = "audit_logs"
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
|
||||
# Action
|
||||
action = Column(String(100), nullable=False, index=True) # 'CREATE_DEVIS', 'SEND_EMAIL', etc.
|
||||
ressource_type = Column(String(50), nullable=True) # 'devis', 'facture', etc.
|
||||
ressource_id = Column(String(100), nullable=True, index=True)
|
||||
|
||||
# Utilisateur (si authentification ajoutée plus tard)
|
||||
utilisateur = Column(String(100), nullable=True)
|
||||
ip_address = Column(String(45), nullable=True)
|
||||
|
||||
# Résultat
|
||||
succes = Column(Boolean, default=True)
|
||||
details = Column(Text, nullable=True) # JSON stringifié
|
||||
erreur = Column(Text, nullable=True)
|
||||
|
||||
# Timestamp
|
||||
date_action = Column(DateTime, default=datetime.now, nullable=False, index=True)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<AuditLog {self.action} on {self.ressource_type}/{self.ressource_id}>"
|
||||
13
docker-compose.yml
Normal file
13
docker-compose.yml
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
version: "3.9"
|
||||
|
||||
services:
|
||||
vps-sage-api:
|
||||
build: .
|
||||
container_name: vps-sage-api
|
||||
env_file: .env
|
||||
volumes:
|
||||
# ✅ Monter un DOSSIER entier au lieu d'un fichier
|
||||
- ./data:/app/data
|
||||
ports:
|
||||
- "8000:8000"
|
||||
restart: unless-stopped
|
||||
346
email_queue.py
Normal file
346
email_queue.py
Normal file
|
|
@ -0,0 +1,346 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Queue d'envoi d'emails avec threading et génération PDF
|
||||
Version VPS Linux - utilise sage_client pour récupérer les données
|
||||
"""
|
||||
|
||||
import threading
|
||||
import queue
|
||||
import time
|
||||
import asyncio
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Optional
|
||||
from tenacity import retry, stop_after_attempt, wait_exponential
|
||||
import smtplib
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
from email.mime.text import MIMEText
|
||||
from email.mime.application import MIMEApplication
|
||||
from config import settings
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class EmailQueue:
|
||||
"""
|
||||
Queue d'emails avec workers threadés et retry automatique
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.queue = queue.Queue()
|
||||
self.workers = []
|
||||
self.running = False
|
||||
self.session_factory = None
|
||||
self.sage_client = None # Sera injecté depuis api.py
|
||||
|
||||
def start(self, num_workers: int = 3):
|
||||
"""Démarre les workers"""
|
||||
if self.running:
|
||||
logger.warning("Queue déjà démarrée")
|
||||
return
|
||||
|
||||
self.running = True
|
||||
for i in range(num_workers):
|
||||
worker = threading.Thread(
|
||||
target=self._worker,
|
||||
name=f"EmailWorker-{i}",
|
||||
daemon=True
|
||||
)
|
||||
worker.start()
|
||||
self.workers.append(worker)
|
||||
|
||||
logger.info(f"✅ Queue email démarrée avec {num_workers} worker(s)")
|
||||
|
||||
def stop(self):
|
||||
"""Arrête les workers proprement"""
|
||||
logger.info("🛑 Arrêt de la queue email...")
|
||||
self.running = False
|
||||
|
||||
# Attendre que la queue soit vide (max 30s)
|
||||
try:
|
||||
self.queue.join()
|
||||
logger.info("✅ Queue email arrêtée proprement")
|
||||
except:
|
||||
logger.warning("⚠️ Timeout lors de l'arrêt de la queue")
|
||||
|
||||
def enqueue(self, email_log_id: str):
|
||||
"""Ajoute un email dans la queue"""
|
||||
self.queue.put(email_log_id)
|
||||
logger.debug(f"📨 Email {email_log_id} ajouté à la queue")
|
||||
|
||||
def _worker(self):
|
||||
"""Worker qui traite les emails dans un thread"""
|
||||
# Créer une event loop pour ce thread
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
|
||||
try:
|
||||
while self.running:
|
||||
try:
|
||||
# Récupérer un email de la queue (timeout 1s)
|
||||
email_log_id = self.queue.get(timeout=1)
|
||||
|
||||
# Traiter l'email
|
||||
loop.run_until_complete(self._process_email(email_log_id))
|
||||
|
||||
# Marquer comme traité
|
||||
self.queue.task_done()
|
||||
|
||||
except queue.Empty:
|
||||
continue
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Erreur worker: {e}", exc_info=True)
|
||||
try:
|
||||
self.queue.task_done()
|
||||
except:
|
||||
pass
|
||||
finally:
|
||||
loop.close()
|
||||
|
||||
async def _process_email(self, email_log_id: str):
|
||||
"""Traite un email avec retry automatique"""
|
||||
from database import EmailLog, StatutEmail
|
||||
from sqlalchemy import select
|
||||
|
||||
if not self.session_factory:
|
||||
logger.error("❌ session_factory non configuré")
|
||||
return
|
||||
|
||||
async with self.session_factory() as session:
|
||||
# Charger l'email log
|
||||
result = await session.execute(
|
||||
select(EmailLog).where(EmailLog.id == email_log_id)
|
||||
)
|
||||
email_log = result.scalar_one_or_none()
|
||||
|
||||
if not email_log:
|
||||
logger.error(f"❌ Email log {email_log_id} introuvable")
|
||||
return
|
||||
|
||||
# Marquer comme en cours
|
||||
email_log.statut = StatutEmail.EN_COURS
|
||||
email_log.nb_tentatives += 1
|
||||
await session.commit()
|
||||
|
||||
try:
|
||||
# Envoi avec retry automatique
|
||||
await self._send_with_retry(email_log)
|
||||
|
||||
# Succès
|
||||
email_log.statut = StatutEmail.ENVOYE
|
||||
email_log.date_envoi = datetime.now()
|
||||
email_log.derniere_erreur = None
|
||||
logger.info(f"✅ Email envoyé: {email_log.destinataire}")
|
||||
|
||||
except Exception as e:
|
||||
# Échec
|
||||
email_log.statut = StatutEmail.ERREUR
|
||||
email_log.derniere_erreur = str(e)[:1000] # Limiter la taille
|
||||
|
||||
# Programmer un retry si < max attempts
|
||||
if email_log.nb_tentatives < settings.max_retry_attempts:
|
||||
delay = settings.retry_delay_seconds * (2 ** (email_log.nb_tentatives - 1))
|
||||
email_log.prochain_retry = datetime.now() + timedelta(seconds=delay)
|
||||
|
||||
# Programmer le retry
|
||||
timer = threading.Timer(delay, self.enqueue, args=[email_log_id])
|
||||
timer.daemon = True
|
||||
timer.start()
|
||||
|
||||
logger.warning(f"⚠️ Retry prévu dans {delay}s pour {email_log.destinataire}")
|
||||
else:
|
||||
logger.error(f"❌ Échec définitif: {email_log.destinataire} - {e}")
|
||||
|
||||
await session.commit()
|
||||
|
||||
@retry(
|
||||
stop=stop_after_attempt(3),
|
||||
wait=wait_exponential(multiplier=1, min=4, max=10)
|
||||
)
|
||||
async def _send_with_retry(self, email_log):
|
||||
"""Envoi SMTP avec retry Tenacity + génération PDF"""
|
||||
# Préparer le message
|
||||
msg = MIMEMultipart()
|
||||
msg['From'] = settings.smtp_from
|
||||
msg['To'] = email_log.destinataire
|
||||
msg['Subject'] = email_log.sujet
|
||||
|
||||
# Corps HTML
|
||||
msg.attach(MIMEText(email_log.corps_html, 'html'))
|
||||
|
||||
# 📎 GÉNÉRATION ET ATTACHEMENT DES PDFs
|
||||
if email_log.document_ids:
|
||||
document_ids = email_log.document_ids.split(',')
|
||||
type_doc = email_log.type_document
|
||||
|
||||
for doc_id in document_ids:
|
||||
doc_id = doc_id.strip()
|
||||
if not doc_id:
|
||||
continue
|
||||
|
||||
try:
|
||||
# Générer PDF (appel bloquant dans thread séparé)
|
||||
pdf_bytes = await asyncio.to_thread(
|
||||
self._generate_pdf,
|
||||
doc_id,
|
||||
type_doc
|
||||
)
|
||||
|
||||
if pdf_bytes:
|
||||
# Attacher PDF
|
||||
part = MIMEApplication(pdf_bytes, Name=f"{doc_id}.pdf")
|
||||
part['Content-Disposition'] = f'attachment; filename="{doc_id}.pdf"'
|
||||
msg.attach(part)
|
||||
logger.info(f"📎 PDF attaché: {doc_id}.pdf")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Erreur génération PDF {doc_id}: {e}")
|
||||
# Continuer avec les autres PDFs
|
||||
|
||||
# Envoi SMTP (bloquant mais dans thread séparé)
|
||||
await asyncio.to_thread(self._send_smtp, msg)
|
||||
|
||||
def _generate_pdf(self, doc_id: str, type_doc: int) -> bytes:
|
||||
"""
|
||||
Génération PDF via ReportLab + sage_client
|
||||
|
||||
⚠️ Cette méthode est appelée depuis un thread worker
|
||||
"""
|
||||
from reportlab.lib.pagesizes import A4
|
||||
from reportlab.pdfgen import canvas
|
||||
from reportlab.lib.units import cm
|
||||
from io import BytesIO
|
||||
|
||||
if not self.sage_client:
|
||||
logger.error("❌ sage_client non configuré")
|
||||
raise Exception("sage_client non disponible")
|
||||
|
||||
# 📡 Récupérer document depuis gateway Windows via HTTP
|
||||
try:
|
||||
doc = self.sage_client.lire_document(doc_id, type_doc)
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Erreur récupération document {doc_id}: {e}")
|
||||
raise Exception(f"Document {doc_id} inaccessible")
|
||||
|
||||
if not doc:
|
||||
raise Exception(f"Document {doc_id} introuvable")
|
||||
|
||||
# 📄 Créer PDF avec ReportLab
|
||||
buffer = BytesIO()
|
||||
pdf = canvas.Canvas(buffer, pagesize=A4)
|
||||
width, height = A4
|
||||
|
||||
# === EN-TÊTE ===
|
||||
pdf.setFont("Helvetica-Bold", 20)
|
||||
pdf.drawString(2*cm, height - 3*cm, f"Document N° {doc_id}")
|
||||
|
||||
# Type de document
|
||||
type_labels = {
|
||||
0: "DEVIS",
|
||||
1: "BON DE LIVRAISON",
|
||||
2: "BON DE RETOUR",
|
||||
3: "COMMANDE",
|
||||
4: "PRÉPARATION",
|
||||
5: "FACTURE"
|
||||
}
|
||||
type_label = type_labels.get(type_doc, "DOCUMENT")
|
||||
|
||||
pdf.setFont("Helvetica", 12)
|
||||
pdf.drawString(2*cm, height - 4*cm, f"Type: {type_label}")
|
||||
|
||||
# === INFORMATIONS CLIENT ===
|
||||
y = height - 5*cm
|
||||
pdf.setFont("Helvetica-Bold", 14)
|
||||
pdf.drawString(2*cm, y, "CLIENT")
|
||||
|
||||
y -= 0.8*cm
|
||||
pdf.setFont("Helvetica", 11)
|
||||
pdf.drawString(2*cm, y, f"Code: {doc.get('client_code', '')}")
|
||||
y -= 0.6*cm
|
||||
pdf.drawString(2*cm, y, f"Nom: {doc.get('client_intitule', '')}")
|
||||
y -= 0.6*cm
|
||||
pdf.drawString(2*cm, y, f"Date: {doc.get('date', '')}")
|
||||
|
||||
# === LIGNES ===
|
||||
y -= 1.5*cm
|
||||
pdf.setFont("Helvetica-Bold", 14)
|
||||
pdf.drawString(2*cm, y, "ARTICLES")
|
||||
|
||||
y -= 1*cm
|
||||
pdf.setFont("Helvetica-Bold", 10)
|
||||
pdf.drawString(2*cm, y, "Désignation")
|
||||
pdf.drawString(10*cm, y, "Qté")
|
||||
pdf.drawString(12*cm, y, "Prix Unit.")
|
||||
pdf.drawString(15*cm, y, "Total HT")
|
||||
|
||||
y -= 0.5*cm
|
||||
pdf.line(2*cm, y, width - 2*cm, y)
|
||||
|
||||
y -= 0.7*cm
|
||||
pdf.setFont("Helvetica", 9)
|
||||
|
||||
for ligne in doc.get('lignes', []):
|
||||
# Nouvelle page si nécessaire
|
||||
if y < 3*cm:
|
||||
pdf.showPage()
|
||||
y = height - 3*cm
|
||||
pdf.setFont("Helvetica", 9)
|
||||
|
||||
designation = ligne.get('designation', '')[:50]
|
||||
pdf.drawString(2*cm, y, designation)
|
||||
pdf.drawString(10*cm, y, str(ligne.get('quantite', 0)))
|
||||
pdf.drawString(12*cm, y, f"{ligne.get('prix_unitaire', 0):.2f}€")
|
||||
pdf.drawString(15*cm, y, f"{ligne.get('montant_ht', 0):.2f}€")
|
||||
y -= 0.6*cm
|
||||
|
||||
# === TOTAUX ===
|
||||
y -= 1*cm
|
||||
pdf.line(12*cm, y, width - 2*cm, y)
|
||||
|
||||
y -= 0.8*cm
|
||||
pdf.setFont("Helvetica-Bold", 11)
|
||||
pdf.drawString(12*cm, y, "Total HT:")
|
||||
pdf.drawString(15*cm, y, f"{doc.get('total_ht', 0):.2f}€")
|
||||
|
||||
y -= 0.6*cm
|
||||
pdf.drawString(12*cm, y, "TVA (20%):")
|
||||
tva = doc.get('total_ttc', 0) - doc.get('total_ht', 0)
|
||||
pdf.drawString(15*cm, y, f"{tva:.2f}€")
|
||||
|
||||
y -= 0.6*cm
|
||||
pdf.setFont("Helvetica-Bold", 14)
|
||||
pdf.drawString(12*cm, y, "Total TTC:")
|
||||
pdf.drawString(15*cm, y, f"{doc.get('total_ttc', 0):.2f}€")
|
||||
|
||||
# === PIED DE PAGE ===
|
||||
pdf.setFont("Helvetica", 8)
|
||||
pdf.drawString(2*cm, 2*cm, f"Généré le {datetime.now().strftime('%d/%m/%Y %H:%M')}")
|
||||
pdf.drawString(2*cm, 1.5*cm, "Sage 100c - API Dataven")
|
||||
|
||||
# Finaliser
|
||||
pdf.save()
|
||||
buffer.seek(0)
|
||||
|
||||
logger.info(f"✅ PDF généré: {doc_id}.pdf")
|
||||
return buffer.read()
|
||||
|
||||
def _send_smtp(self, msg):
|
||||
"""Envoi SMTP bloquant (appelé depuis asyncio.to_thread)"""
|
||||
try:
|
||||
with smtplib.SMTP(settings.smtp_host, settings.smtp_port, timeout=30) as server:
|
||||
if settings.smtp_use_tls:
|
||||
server.starttls()
|
||||
|
||||
if settings.smtp_user and settings.smtp_password:
|
||||
server.login(settings.smtp_user, settings.smtp_password)
|
||||
|
||||
server.send_message(msg)
|
||||
|
||||
except smtplib.SMTPException as e:
|
||||
raise Exception(f"Erreur SMTP: {str(e)}")
|
||||
except Exception as e:
|
||||
raise Exception(f"Erreur envoi: {str(e)}")
|
||||
|
||||
|
||||
# Instance globale
|
||||
email_queue = EmailQueue()
|
||||
63
init_db.py
Normal file
63
init_db.py
Normal file
|
|
@ -0,0 +1,63 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Script d'initialisation de la base de données SQLite
|
||||
Lance ce script avant le premier démarrage de l'API
|
||||
|
||||
Usage:
|
||||
python init_db.py
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
# Ajouter le répertoire parent au path pour les imports
|
||||
sys.path.insert(0, str(Path(__file__).parent))
|
||||
|
||||
from database import init_db # ✅ Import depuis database/__init__.py
|
||||
import logging
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def main():
|
||||
"""Crée toutes les tables dans sage_dataven.db"""
|
||||
|
||||
print("\n" + "="*60)
|
||||
print("🚀 Initialisation de la base de données Sage Dataven")
|
||||
print("="*60 + "\n")
|
||||
|
||||
try:
|
||||
# Créer les tables
|
||||
await init_db()
|
||||
|
||||
print("\n✅ Base de données créée avec succès!")
|
||||
print(f"📍 Fichier: sage_dataven.db")
|
||||
|
||||
print("\n📊 Tables créées:")
|
||||
print(" ├─ email_logs (Journalisation emails)")
|
||||
print(" ├─ signature_logs (Suivi signatures Universign)")
|
||||
print(" ├─ workflow_logs (Transformations documents)")
|
||||
print(" ├─ cache_metadata (Métadonnées cache)")
|
||||
print(" └─ audit_logs (Journal d'audit)")
|
||||
|
||||
print("\n📝 Prochaines étapes:")
|
||||
print(" 1. Configurer le fichier .env avec vos credentials")
|
||||
print(" 2. Lancer la gateway Windows sur la machine Sage")
|
||||
print(" 3. Lancer l'API VPS: uvicorn api:app --host 0.0.0.0 --port 8000")
|
||||
print(" 4. Ou avec Docker: docker-compose up -d")
|
||||
print(" 5. Tester: http://votre-vps:8000/docs")
|
||||
|
||||
print("\n" + "="*60 + "\n")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"\n❌ Erreur lors de l'initialisation: {e}")
|
||||
logger.exception("Détails de l'erreur:")
|
||||
return False
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
result = asyncio.run(main())
|
||||
sys.exit(0 if result else 1)
|
||||
14
requirements.txt
Normal file
14
requirements.txt
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
fastapi
|
||||
uvicorn[standard]
|
||||
pydantic
|
||||
pydantic-settings
|
||||
reportlab
|
||||
requests
|
||||
msal
|
||||
python-multipart
|
||||
email-validator
|
||||
python-dotenv
|
||||
|
||||
sqlalchemy
|
||||
aiosqlite
|
||||
tenacity
|
||||
Loading…
Reference in a new issue