Compare commits
No commits in common. "main" and "feat/updates_containers" have entirely different histories.
main
...
feat/updat
48 changed files with 1487 additions and 5397 deletions
9
.gitignore
vendored
9
.gitignore
vendored
|
|
@ -39,12 +39,3 @@ data/*.db.bak
|
|||
*.db
|
||||
|
||||
tools/
|
||||
|
||||
|
||||
.trunk
|
||||
.env.staging
|
||||
.env.production
|
||||
|
||||
.trunk
|
||||
|
||||
*clean*.py
|
||||
|
|
@ -7,6 +7,7 @@ class Settings(BaseSettings):
|
|||
env_file=".env", env_file_encoding="utf-8", case_sensitive=False, extra="ignore"
|
||||
)
|
||||
|
||||
# === JWT & Auth ===
|
||||
jwt_secret: str
|
||||
jwt_algorithm: str
|
||||
access_token_expire_minutes: int
|
||||
|
|
@ -20,12 +21,15 @@ class Settings(BaseSettings):
|
|||
SAGE_TYPE_BON_AVOIR: int = 50
|
||||
SAGE_TYPE_FACTURE: int = 60
|
||||
|
||||
# === Sage Gateway (Windows) ===
|
||||
sage_gateway_url: str
|
||||
sage_gateway_token: str
|
||||
frontend_url: str
|
||||
|
||||
# === Base de données ===
|
||||
database_url: str = "sqlite+aiosqlite:///./data/sage_dataven.db"
|
||||
|
||||
# === SMTP ===
|
||||
smtp_host: str
|
||||
smtp_port: int = 587
|
||||
smtp_user: str
|
||||
|
|
@ -33,17 +37,21 @@ class Settings(BaseSettings):
|
|||
smtp_from: str
|
||||
smtp_use_tls: bool = True
|
||||
|
||||
# === Universign ===
|
||||
universign_api_key: str
|
||||
universign_api_url: str
|
||||
|
||||
# === API ===
|
||||
api_host: str
|
||||
api_port: int
|
||||
api_reload: bool = False
|
||||
|
||||
# === Email Queue ===
|
||||
max_email_workers: int = 3
|
||||
max_retry_attempts: int = 3
|
||||
retry_delay_seconds: int = 3
|
||||
|
||||
# === CORS ===
|
||||
cors_origins: List[str] = ["*"]
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,125 +0,0 @@
|
|||
from fastapi import FastAPI
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from typing import List
|
||||
import os
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def configure_cors_open(app: FastAPI):
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=["*"],
|
||||
allow_credentials=False,
|
||||
allow_methods=["GET", "POST", "PUT", "DELETE", "PATCH", "OPTIONS"],
|
||||
allow_headers=["*"],
|
||||
expose_headers=["X-RateLimit-Limit", "X-RateLimit-Remaining"],
|
||||
max_age=3600,
|
||||
)
|
||||
|
||||
logger.info(" CORS configuré: Mode OUVERT (sécurisé par API Keys)")
|
||||
logger.info(" - Origins: * (toutes)")
|
||||
logger.info(" - Headers: * (dont X-API-Key)")
|
||||
logger.info(" - Credentials: False")
|
||||
|
||||
|
||||
def configure_cors_whitelist(app: FastAPI):
|
||||
allowed_origins_str = os.getenv("CORS_ALLOWED_ORIGINS", "")
|
||||
|
||||
if allowed_origins_str:
|
||||
allowed_origins = [
|
||||
origin.strip()
|
||||
for origin in allowed_origins_str.split(",")
|
||||
if origin.strip()
|
||||
]
|
||||
else:
|
||||
allowed_origins = ["*"]
|
||||
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=allowed_origins,
|
||||
allow_credentials=True,
|
||||
allow_methods=["GET", "POST", "PUT", "DELETE", "PATCH", "OPTIONS"],
|
||||
allow_headers=["Content-Type", "Authorization", "X-API-Key"],
|
||||
expose_headers=["X-RateLimit-Limit", "X-RateLimit-Remaining"],
|
||||
max_age=3600,
|
||||
)
|
||||
|
||||
logger.info(" CORS configuré: Mode WHITELIST")
|
||||
logger.info(f" - Origins autorisées: {len(allowed_origins)}")
|
||||
for origin in allowed_origins:
|
||||
logger.info(f" • {origin}")
|
||||
|
||||
|
||||
def configure_cors_regex(app: FastAPI):
|
||||
origin_regex = r"*"
|
||||
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origin_regex=origin_regex,
|
||||
allow_credentials=True,
|
||||
allow_methods=["GET", "POST", "PUT", "DELETE", "PATCH", "OPTIONS"],
|
||||
allow_headers=["Content-Type", "Authorization", "X-API-Key"],
|
||||
expose_headers=["X-RateLimit-Limit", "X-RateLimit-Remaining"],
|
||||
max_age=3600,
|
||||
)
|
||||
|
||||
logger.info(" CORS configuré: Mode REGEX")
|
||||
logger.info(f" - Pattern: {origin_regex}")
|
||||
|
||||
|
||||
def configure_cors_hybrid(app: FastAPI):
|
||||
from starlette.middleware.base import BaseHTTPMiddleware
|
||||
|
||||
class HybridCORSMiddleware(BaseHTTPMiddleware):
|
||||
def __init__(self, app, known_origins: List[str]):
|
||||
super().__init__(app)
|
||||
self.known_origins = set(known_origins)
|
||||
|
||||
async def dispatch(self, request, call_next):
|
||||
origin = request.headers.get("origin")
|
||||
|
||||
if origin in self.known_origins:
|
||||
response = await call_next(request)
|
||||
response.headers["Access-Control-Allow-Origin"] = origin
|
||||
response.headers["Access-Control-Allow-Credentials"] = "true"
|
||||
response.headers["Access-Control-Allow-Methods"] = (
|
||||
"GET, POST, PUT, DELETE, PATCH, OPTIONS"
|
||||
)
|
||||
response.headers["Access-Control-Allow-Headers"] = (
|
||||
"Content-Type, Authorization, X-API-Key"
|
||||
)
|
||||
return response
|
||||
|
||||
response = await call_next(request)
|
||||
response.headers["Access-Control-Allow-Origin"] = "*"
|
||||
response.headers["Access-Control-Allow-Methods"] = (
|
||||
"GET, POST, PUT, DELETE, PATCH, OPTIONS"
|
||||
)
|
||||
response.headers["Access-Control-Allow-Headers"] = "*"
|
||||
return response
|
||||
|
||||
known_origins = ["*"]
|
||||
|
||||
app.add_middleware(HybridCORSMiddleware, known_origins=known_origins)
|
||||
|
||||
logger.info(" CORS configuré: Mode HYBRIDE")
|
||||
logger.info(f" - Whitelist: {len(known_origins)} domaines")
|
||||
logger.info(" - Fallback: * (ouvert)")
|
||||
|
||||
|
||||
def setup_cors(app: FastAPI, mode: str = "open"):
|
||||
if mode == "open":
|
||||
configure_cors_open(app)
|
||||
elif mode == "whitelist":
|
||||
configure_cors_whitelist(app)
|
||||
elif mode == "regex":
|
||||
configure_cors_regex(app)
|
||||
elif mode == "hybrid":
|
||||
configure_cors_hybrid(app)
|
||||
else:
|
||||
logger.warning(
|
||||
f" Mode CORS inconnu: {mode}. Utilisation de 'open' par défaut."
|
||||
)
|
||||
configure_cors_open(app)
|
||||
|
|
@ -1,75 +1,48 @@
|
|||
from fastapi import Depends, HTTPException, status, Request
|
||||
from fastapi import Depends, HTTPException, status
|
||||
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select
|
||||
from typing import Optional
|
||||
from jwt.exceptions import InvalidTokenError
|
||||
|
||||
from database import get_session, User
|
||||
from security.auth import decode_token
|
||||
from typing import Optional
|
||||
from datetime import datetime
|
||||
|
||||
security = HTTPBearer(auto_error=False)
|
||||
security = HTTPBearer()
|
||||
|
||||
|
||||
async def get_current_user_hybrid(
|
||||
request: Request,
|
||||
credentials: Optional[HTTPAuthorizationCredentials] = Depends(security),
|
||||
async def get_current_user(
|
||||
credentials: HTTPAuthorizationCredentials = Depends(security),
|
||||
session: AsyncSession = Depends(get_session),
|
||||
) -> User:
|
||||
api_key_obj = getattr(request.state, "api_key", None)
|
||||
token = credentials.credentials
|
||||
|
||||
if api_key_obj:
|
||||
if api_key_obj.user_id:
|
||||
result = await session.execute(
|
||||
select(User).where(User.id == api_key_obj.user_id)
|
||||
)
|
||||
user = result.scalar_one_or_none()
|
||||
|
||||
if user:
|
||||
user._is_api_key_user = True
|
||||
user._api_key_obj = api_key_obj
|
||||
return user
|
||||
|
||||
virtual_user = User(
|
||||
id=f"api_key_{api_key_obj.id}",
|
||||
email=f"api_key_{api_key_obj.id}@virtual.local",
|
||||
nom=api_key_obj.name,
|
||||
prenom="API",
|
||||
hashed_password="",
|
||||
role="api_client",
|
||||
is_active=True,
|
||||
is_verified=True,
|
||||
)
|
||||
|
||||
virtual_user._is_api_key_user = True
|
||||
virtual_user._api_key_obj = api_key_obj
|
||||
|
||||
return virtual_user
|
||||
|
||||
if not credentials:
|
||||
payload = decode_token(token)
|
||||
if not payload:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Authentification requise (JWT ou API Key)",
|
||||
detail="Token invalide ou expiré",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
|
||||
token = credentials.credentials
|
||||
|
||||
try:
|
||||
payload = decode_token(token)
|
||||
user_id: str = payload.get("sub")
|
||||
|
||||
if user_id is None:
|
||||
if payload.get("type") != "access":
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Token invalide: user_id manquant",
|
||||
detail="Type de token incorrect",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
|
||||
user_id: str = payload.get("sub")
|
||||
if not user_id:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Token malformé",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
|
||||
result = await session.execute(select(User).where(User.id == user_id))
|
||||
user = result.scalar_one_or_none()
|
||||
|
||||
if user is None:
|
||||
if not user:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Utilisateur introuvable",
|
||||
|
|
@ -77,42 +50,45 @@ async def get_current_user_hybrid(
|
|||
)
|
||||
|
||||
if not user.is_active:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN, detail="Compte désactivé"
|
||||
)
|
||||
|
||||
if not user.is_verified:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Utilisateur inactif",
|
||||
detail="Email non vérifié. Consultez votre boîte de réception.",
|
||||
)
|
||||
|
||||
if user.locked_until and user.locked_until > datetime.now():
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Compte temporairement verrouillé suite à trop de tentatives échouées",
|
||||
)
|
||||
|
||||
return user
|
||||
|
||||
except InvalidTokenError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail=f"Token invalide: {str(e)}",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
|
||||
async def get_current_user_optional(
|
||||
credentials: Optional[HTTPAuthorizationCredentials] = Depends(security),
|
||||
session: AsyncSession = Depends(get_session),
|
||||
) -> Optional[User]:
|
||||
if not credentials:
|
||||
return None
|
||||
|
||||
try:
|
||||
return await get_current_user(credentials, session)
|
||||
except HTTPException:
|
||||
return None
|
||||
|
||||
|
||||
def require_role_hybrid(*allowed_roles: str):
|
||||
async def role_checker(user: User = Depends(get_current_user_hybrid)) -> User:
|
||||
def require_role(*allowed_roles: str):
|
||||
async def role_checker(user: User = Depends(get_current_user)) -> User:
|
||||
if user.role not in allowed_roles:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail=f"Accès interdit. Rôles autorisés: {', '.join(allowed_roles)}",
|
||||
detail=f"Accès refusé. Rôles requis: {', '.join(allowed_roles)}",
|
||||
)
|
||||
return user
|
||||
|
||||
return role_checker
|
||||
|
||||
|
||||
def is_api_key_user(user: User) -> bool:
|
||||
"""Vérifie si l'utilisateur est authentifié via API Key"""
|
||||
return getattr(user, "_is_api_key_user", False)
|
||||
|
||||
|
||||
def get_api_key_from_user(user: User):
|
||||
"""Récupère l'objet ApiKey depuis un utilisateur (si applicable)"""
|
||||
return getattr(user, "_api_key_obj", None)
|
||||
|
||||
|
||||
get_current_user = get_current_user_hybrid
|
||||
require_role = require_role_hybrid
|
||||
|
|
|
|||
|
|
@ -19,6 +19,7 @@ async def create_admin():
|
|||
print(" Création d'un compte administrateur")
|
||||
print("=" * 60 + "\n")
|
||||
|
||||
# Saisie des informations
|
||||
email = input("Email de l'admin: ").strip().lower()
|
||||
if not email or "@" not in email:
|
||||
print(" Email invalide")
|
||||
|
|
@ -31,6 +32,7 @@ async def create_admin():
|
|||
print(" Prénom et nom requis")
|
||||
return False
|
||||
|
||||
# Mot de passe avec validation
|
||||
while True:
|
||||
password = input(
|
||||
"Mot de passe (min 8 car., 1 maj, 1 min, 1 chiffre, 1 spécial): "
|
||||
|
|
@ -56,6 +58,7 @@ async def create_admin():
|
|||
print(f"\n Un utilisateur avec l'email {email} existe déjà")
|
||||
return False
|
||||
|
||||
# Créer l'admin
|
||||
admin = User(
|
||||
id=str(uuid.uuid4()),
|
||||
email=email,
|
||||
|
|
|
|||
|
|
@ -152,7 +152,7 @@ templates_signature_email = {
|
|||
</table>
|
||||
|
||||
<p style="color: #718096; font-size: 13px; line-height: 1.5; margin: 0;">
|
||||
<strong> Signature électronique sécurisée</strong><br>
|
||||
<strong>🔒 Signature électronique sécurisée</strong><br>
|
||||
Votre signature est protégée par notre partenaire de confiance <strong>Universign</strong>,
|
||||
certifié eIDAS et conforme au RGPD. Votre identité sera vérifiée et le document sera
|
||||
horodaté de manière infalsifiable.
|
||||
|
|
|
|||
|
|
@ -1,18 +0,0 @@
|
|||
import enum
|
||||
|
||||
|
||||
class StatutEmail(str, enum.Enum):
|
||||
EN_ATTENTE = "EN_ATTENTE"
|
||||
EN_COURS = "EN_COURS"
|
||||
ENVOYE = "ENVOYE"
|
||||
OUVERT = "OUVERT"
|
||||
ERREUR = "ERREUR"
|
||||
BOUNCE = "BOUNCE"
|
||||
|
||||
|
||||
class StatutSignature(str, enum.Enum):
|
||||
EN_ATTENTE = "EN_ATTENTE"
|
||||
ENVOYE = "ENVOYE"
|
||||
SIGNE = "SIGNE"
|
||||
REFUSE = "REFUSE"
|
||||
EXPIRE = "EXPIRE"
|
||||
|
|
@ -1,49 +1,20 @@
|
|||
import os
|
||||
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, async_sessionmaker
|
||||
from sqlalchemy.pool import NullPool
|
||||
from sqlalchemy import event, text
|
||||
import logging
|
||||
|
||||
from config.config import settings
|
||||
from database.models.generic_model import Base
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
DATABASE_URL = settings.database_url
|
||||
|
||||
|
||||
def _configure_sqlite_connection(dbapi_connection, connection_record):
|
||||
cursor = dbapi_connection.cursor()
|
||||
cursor.execute("PRAGMA journal_mode=WAL")
|
||||
cursor.execute("PRAGMA busy_timeout=30000")
|
||||
cursor.execute("PRAGMA synchronous=NORMAL")
|
||||
cursor.execute("PRAGMA cache_size=-64000") # 64MB
|
||||
cursor.execute("PRAGMA foreign_keys=ON")
|
||||
cursor.execute("PRAGMA locking_mode=NORMAL")
|
||||
cursor.close()
|
||||
|
||||
logger.debug("SQLite configuré avec WAL mode et busy_timeout=30s")
|
||||
|
||||
|
||||
engine_kwargs = {
|
||||
"echo": False,
|
||||
"future": True,
|
||||
"poolclass": NullPool,
|
||||
}
|
||||
|
||||
if DATABASE_URL and "sqlite" in DATABASE_URL:
|
||||
engine_kwargs["connect_args"] = {
|
||||
"check_same_thread": False,
|
||||
"timeout": 30,
|
||||
}
|
||||
|
||||
engine = create_async_engine(DATABASE_URL, **engine_kwargs)
|
||||
|
||||
if DATABASE_URL and "sqlite" in DATABASE_URL:
|
||||
|
||||
@event.listens_for(engine.sync_engine, "connect")
|
||||
def set_sqlite_pragma(dbapi_connection, connection_record):
|
||||
_configure_sqlite_connection(dbapi_connection, connection_record)
|
||||
DATABASE_URL = os.getenv("DATABASE_URL")
|
||||
|
||||
engine = create_async_engine(
|
||||
DATABASE_URL,
|
||||
echo=False,
|
||||
future=True,
|
||||
poolclass=NullPool,
|
||||
)
|
||||
|
||||
async_session_factory = async_sessionmaker(
|
||||
engine,
|
||||
|
|
@ -59,12 +30,6 @@ async def init_db():
|
|||
logger.info("Tentative de connexion")
|
||||
async with engine.begin() as conn:
|
||||
logger.info("Connexion etablie")
|
||||
|
||||
if DATABASE_URL and "sqlite" in DATABASE_URL:
|
||||
result = await conn.execute(text("PRAGMA journal_mode"))
|
||||
journal_mode = result.scalar()
|
||||
logger.info(f"SQLite journal_mode: {journal_mode}")
|
||||
|
||||
await conn.run_sync(Base.metadata.create_all)
|
||||
logger.info("create_all execute")
|
||||
|
||||
|
|
@ -84,57 +49,3 @@ async def get_session() -> AsyncSession:
|
|||
async def close_db():
|
||||
await engine.dispose()
|
||||
logger.info("Connexions DB fermées")
|
||||
|
||||
|
||||
async def execute_with_sqlite_retry(
|
||||
session: AsyncSession, statement, max_retries: int = 5, base_delay: float = 0.1
|
||||
):
|
||||
import asyncio
|
||||
from sqlalchemy.exc import OperationalError
|
||||
|
||||
last_error = None
|
||||
|
||||
for attempt in range(max_retries):
|
||||
try:
|
||||
result = await session.execute(statement)
|
||||
return result
|
||||
except OperationalError as e:
|
||||
last_error = e
|
||||
if "database is locked" in str(e).lower():
|
||||
delay = base_delay * (2**attempt)
|
||||
logger.warning(
|
||||
f"SQLite locked, tentative {attempt + 1}/{max_retries}, "
|
||||
f"retry dans {delay:.2f}s"
|
||||
)
|
||||
await asyncio.sleep(delay)
|
||||
else:
|
||||
raise
|
||||
|
||||
raise last_error
|
||||
|
||||
|
||||
async def commit_with_retry(
|
||||
session: AsyncSession, max_retries: int = 5, base_delay: float = 0.1
|
||||
):
|
||||
import asyncio
|
||||
from sqlalchemy.exc import OperationalError
|
||||
|
||||
last_error = None
|
||||
|
||||
for attempt in range(max_retries):
|
||||
try:
|
||||
await session.commit()
|
||||
return
|
||||
except OperationalError as e:
|
||||
last_error = e
|
||||
if "database is locked" in str(e).lower():
|
||||
delay = base_delay * (2**attempt)
|
||||
logger.warning(
|
||||
f"SQLite locked lors du commit, tentative {attempt + 1}/{max_retries}, "
|
||||
f"retry dans {delay:.2f}s"
|
||||
)
|
||||
await asyncio.sleep(delay)
|
||||
else:
|
||||
raise
|
||||
|
||||
raise last_error
|
||||
|
|
|
|||
|
|
@ -1,73 +0,0 @@
|
|||
from sqlalchemy import Column, String, Boolean, DateTime, Integer, Text
|
||||
from typing import Optional, List
|
||||
import json
|
||||
from datetime import datetime
|
||||
import uuid
|
||||
|
||||
from database.models.generic_model import Base
|
||||
|
||||
|
||||
class ApiKey(Base):
|
||||
"""Modèle pour les clés API publiques"""
|
||||
|
||||
__tablename__ = "api_keys"
|
||||
|
||||
id = Column(String(36), primary_key=True, default=lambda: str(uuid.uuid4()))
|
||||
key_hash = Column(String(64), unique=True, nullable=False, index=True)
|
||||
key_prefix = Column(String(10), nullable=False)
|
||||
|
||||
name = Column(String(255), nullable=False)
|
||||
description = Column(Text, nullable=True)
|
||||
|
||||
user_id = Column(String(36), nullable=True)
|
||||
created_by = Column(String(255), nullable=False)
|
||||
|
||||
is_active = Column(Boolean, default=True, nullable=False)
|
||||
rate_limit_per_minute = Column(Integer, default=60, nullable=False)
|
||||
allowed_endpoints = Column(Text, nullable=True)
|
||||
|
||||
total_requests = Column(Integer, default=0, nullable=False)
|
||||
last_used_at = Column(DateTime, nullable=True)
|
||||
|
||||
created_at = Column(DateTime, default=datetime.now, nullable=False)
|
||||
expires_at = Column(DateTime, nullable=True)
|
||||
revoked_at = Column(DateTime, nullable=True)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<ApiKey(name='{self.name}', prefix='{self.key_prefix}', active={self.is_active})>"
|
||||
|
||||
|
||||
class SwaggerUser(Base):
|
||||
"""Modèle pour les utilisateurs autorisés à accéder au Swagger"""
|
||||
|
||||
__tablename__ = "swagger_users"
|
||||
|
||||
id = Column(String(36), primary_key=True, default=lambda: str(uuid.uuid4()))
|
||||
username = Column(String(100), unique=True, nullable=False, index=True)
|
||||
hashed_password = Column(String(255), nullable=False)
|
||||
|
||||
full_name = Column(String(255), nullable=True)
|
||||
email = Column(String(255), nullable=True)
|
||||
|
||||
is_active = Column(Boolean, default=True, nullable=False)
|
||||
|
||||
allowed_tags = Column(Text, nullable=True)
|
||||
|
||||
created_at = Column(DateTime, default=datetime.now, nullable=False)
|
||||
last_login = Column(DateTime, nullable=True)
|
||||
|
||||
@property
|
||||
def allowed_tags_list(self) -> Optional[List[str]]:
|
||||
if self.allowed_tags:
|
||||
try:
|
||||
return json.loads(self.allowed_tags)
|
||||
except json.JSONDecodeError:
|
||||
return None
|
||||
return None
|
||||
|
||||
@allowed_tags_list.setter
|
||||
def allowed_tags_list(self, tags: Optional[List[str]]):
|
||||
self.allowed_tags = json.dumps(tags) if tags is not None else None
|
||||
|
||||
def __repr__(self):
|
||||
return f"<SwaggerUser(username='{self.username}', active={self.is_active})>"
|
||||
|
|
@ -61,7 +61,8 @@ class SageDocumentType(int, Enum):
|
|||
class UniversignTransaction(Base):
|
||||
__tablename__ = "universign_transactions"
|
||||
|
||||
id = Column(String(36), primary_key=True)
|
||||
# === IDENTIFIANTS ===
|
||||
id = Column(String(36), primary_key=True) # UUID local
|
||||
transaction_id = Column(
|
||||
String(255),
|
||||
unique=True,
|
||||
|
|
@ -70,6 +71,7 @@ class UniversignTransaction(Base):
|
|||
comment="ID Universign (ex: tr_abc123)",
|
||||
)
|
||||
|
||||
# === LIEN AVEC LE DOCUMENT SAGE ===
|
||||
sage_document_id = Column(
|
||||
String(50),
|
||||
nullable=False,
|
||||
|
|
@ -80,6 +82,7 @@ class UniversignTransaction(Base):
|
|||
SQLEnum(SageDocumentType), nullable=False, comment="Type de document Sage"
|
||||
)
|
||||
|
||||
# === STATUTS UNIVERSIGN (SOURCE DE VÉRITÉ) ===
|
||||
universign_status = Column(
|
||||
SQLEnum(UniversignTransactionStatus),
|
||||
nullable=False,
|
||||
|
|
@ -91,6 +94,7 @@ class UniversignTransaction(Base):
|
|||
DateTime, nullable=True, comment="Dernière MAJ du statut Universign"
|
||||
)
|
||||
|
||||
# === STATUT LOCAL (DÉDUIT) ===
|
||||
local_status = Column(
|
||||
SQLEnum(LocalDocumentStatus),
|
||||
nullable=False,
|
||||
|
|
@ -99,6 +103,7 @@ class UniversignTransaction(Base):
|
|||
comment="Statut métier simplifié pour l'UI",
|
||||
)
|
||||
|
||||
# === URLS ET MÉTADONNÉES UNIVERSIGN ===
|
||||
signer_url = Column(Text, nullable=True, comment="URL de signature")
|
||||
document_url = Column(Text, nullable=True, comment="URL du document signé")
|
||||
|
||||
|
|
@ -120,14 +125,17 @@ class UniversignTransaction(Base):
|
|||
|
||||
certificate_url = Column(Text, nullable=True, comment="URL du certificat")
|
||||
|
||||
# === SIGNATAIRES ===
|
||||
signers_data = Column(
|
||||
Text, nullable=True, comment="JSON des signataires (snapshot)"
|
||||
)
|
||||
|
||||
# === INFORMATIONS MÉTIER ===
|
||||
requester_email = Column(String(255), nullable=True)
|
||||
requester_name = Column(String(255), nullable=True)
|
||||
document_name = Column(String(500), nullable=True)
|
||||
|
||||
# === DATES CLÉS ===
|
||||
created_at = Column(
|
||||
DateTime,
|
||||
default=datetime.now,
|
||||
|
|
@ -142,12 +150,14 @@ class UniversignTransaction(Base):
|
|||
expired_at = Column(DateTime, nullable=True)
|
||||
canceled_at = Column(DateTime, nullable=True)
|
||||
|
||||
# === SYNCHRONISATION ===
|
||||
last_synced_at = Column(
|
||||
DateTime, nullable=True, comment="Dernière sync réussie avec Universign"
|
||||
)
|
||||
sync_attempts = Column(Integer, default=0, comment="Nombre de tentatives de sync")
|
||||
sync_error = Column(Text, nullable=True)
|
||||
|
||||
# === FLAGS ===
|
||||
is_test = Column(
|
||||
Boolean, default=False, comment="Transaction en environnement .alpha"
|
||||
)
|
||||
|
|
@ -156,6 +166,7 @@ class UniversignTransaction(Base):
|
|||
)
|
||||
webhook_received = Column(Boolean, default=False, comment="Webhook Universign reçu")
|
||||
|
||||
# === RELATION ===
|
||||
signers = relationship(
|
||||
"UniversignSigner", back_populates="transaction", cascade="all, delete-orphan"
|
||||
)
|
||||
|
|
@ -163,6 +174,7 @@ class UniversignTransaction(Base):
|
|||
"UniversignSyncLog", back_populates="transaction", cascade="all, delete-orphan"
|
||||
)
|
||||
|
||||
# === INDEXES COMPOSITES ===
|
||||
__table_args__ = (
|
||||
Index("idx_sage_doc", "sage_document_id", "sage_document_type"),
|
||||
Index("idx_sync_status", "needs_sync", "universign_status"),
|
||||
|
|
@ -178,6 +190,10 @@ class UniversignTransaction(Base):
|
|||
|
||||
|
||||
class UniversignSigner(Base):
|
||||
"""
|
||||
Détail de chaque signataire d'une transaction
|
||||
"""
|
||||
|
||||
__tablename__ = "universign_signers"
|
||||
|
||||
id = Column(String(36), primary_key=True)
|
||||
|
|
@ -188,27 +204,33 @@ class UniversignSigner(Base):
|
|||
index=True,
|
||||
)
|
||||
|
||||
# === DONNÉES SIGNATAIRE ===
|
||||
email = Column(String(255), nullable=False, index=True)
|
||||
name = Column(String(255), nullable=True)
|
||||
phone = Column(String(50), nullable=True)
|
||||
|
||||
# === STATUT ===
|
||||
status = Column(
|
||||
SQLEnum(UniversignSignerStatus),
|
||||
default=UniversignSignerStatus.WAITING,
|
||||
nullable=False,
|
||||
)
|
||||
|
||||
# === ACTIONS ===
|
||||
viewed_at = Column(DateTime, nullable=True)
|
||||
signed_at = Column(DateTime, nullable=True)
|
||||
refused_at = Column(DateTime, nullable=True)
|
||||
refusal_reason = Column(Text, nullable=True)
|
||||
|
||||
# === MÉTADONNÉES ===
|
||||
ip_address = Column(String(45), nullable=True)
|
||||
user_agent = Column(Text, nullable=True)
|
||||
signature_method = Column(String(50), nullable=True)
|
||||
|
||||
# === ORDRE ===
|
||||
order_index = Column(Integer, default=0)
|
||||
|
||||
# === RELATION ===
|
||||
transaction = relationship("UniversignTransaction", back_populates="signers")
|
||||
|
||||
def __repr__(self):
|
||||
|
|
@ -216,6 +238,10 @@ class UniversignSigner(Base):
|
|||
|
||||
|
||||
class UniversignSyncLog(Base):
|
||||
"""
|
||||
Journal de toutes les synchronisations (audit trail)
|
||||
"""
|
||||
|
||||
__tablename__ = "universign_sync_logs"
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
|
|
@ -226,18 +252,22 @@ class UniversignSyncLog(Base):
|
|||
index=True,
|
||||
)
|
||||
|
||||
# === SYNC INFO ===
|
||||
sync_type = Column(String(50), nullable=False, comment="webhook, polling, manual")
|
||||
sync_timestamp = Column(DateTime, default=datetime.now, nullable=False, index=True)
|
||||
|
||||
# === CHANGEMENTS DÉTECTÉS ===
|
||||
previous_status = Column(String(50), nullable=True)
|
||||
new_status = Column(String(50), nullable=True)
|
||||
changes_detected = Column(Text, nullable=True, comment="JSON des changements")
|
||||
|
||||
# === RÉSULTAT ===
|
||||
success = Column(Boolean, default=True)
|
||||
error_message = Column(Text, nullable=True)
|
||||
http_status_code = Column(Integer, nullable=True)
|
||||
response_time_ms = Column(Integer, nullable=True)
|
||||
|
||||
# === RELATION ===
|
||||
transaction = relationship("UniversignTransaction", back_populates="sync_logs")
|
||||
|
||||
def __repr__(self):
|
||||
|
|
@ -257,6 +287,7 @@ class UniversignConfig(Base):
|
|||
api_url = Column(String(500), nullable=False)
|
||||
api_key = Column(String(500), nullable=False, comment="À chiffrer")
|
||||
|
||||
# === OPTIONS ===
|
||||
webhook_url = Column(String(500), nullable=True)
|
||||
webhook_secret = Column(String(255), nullable=True)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
services:
|
||||
backend:
|
||||
container_name: dev-sage-api
|
||||
container_name: vps-sage-api
|
||||
build:
|
||||
context: .
|
||||
target: dev
|
||||
|
|
@ -18,7 +18,7 @@ services:
|
|||
DATABASE_URL: "sqlite+aiosqlite:///./data/sage_dataven.db"
|
||||
restart: unless-stopped
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8000/"]
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
|
|
@ -16,7 +16,7 @@ services:
|
|||
DATABASE_URL: "sqlite+aiosqlite:///./data/sage_prod.db"
|
||||
restart: always
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8004/"]
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8004/health"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 5
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ services:
|
|||
DATABASE_URL: "sqlite+aiosqlite:///./data/sage_staging.db"
|
||||
restart: unless-stopped
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8002/"]
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8002/health"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
1113
email_queue.py
1113
email_queue.py
File diff suppressed because it is too large
Load diff
|
|
@ -1,295 +0,0 @@
|
|||
from fastapi import Request, status
|
||||
from fastapi.responses import JSONResponse
|
||||
from fastapi.security import HTTPBasic, HTTPBasicCredentials
|
||||
from starlette.middleware.base import BaseHTTPMiddleware
|
||||
from starlette.types import ASGIApp, Receive, Send
|
||||
from sqlalchemy import select
|
||||
from typing import Callable, Optional
|
||||
from datetime import datetime
|
||||
import logging
|
||||
import base64
|
||||
import json
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
security = HTTPBasic()
|
||||
|
||||
|
||||
class SwaggerAuthMiddleware:
|
||||
PROTECTED_PATHS = ["/docs", "/redoc", "/openapi.json"]
|
||||
|
||||
def __init__(self, app: ASGIApp):
|
||||
self.app = app
|
||||
|
||||
async def __call__(self, scope, receive: Receive, send: Send):
|
||||
if scope["type"] != "http":
|
||||
await self.app(scope, receive, send)
|
||||
return
|
||||
|
||||
request = Request(scope, receive=receive)
|
||||
path = request.url.path
|
||||
|
||||
if not any(path.startswith(p) for p in self.PROTECTED_PATHS):
|
||||
await self.app(scope, receive, send)
|
||||
return
|
||||
|
||||
auth_header = request.headers.get("Authorization")
|
||||
|
||||
if not auth_header or not auth_header.startswith("Basic "):
|
||||
response = JSONResponse(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
content={"detail": "Authentification requise pour la documentation"},
|
||||
headers={"WWW-Authenticate": 'Basic realm="Swagger UI"'},
|
||||
)
|
||||
await response(scope, receive, send)
|
||||
return
|
||||
|
||||
try:
|
||||
encoded_credentials = auth_header.split(" ")[1]
|
||||
decoded_credentials = base64.b64decode(encoded_credentials).decode("utf-8")
|
||||
username, password = decoded_credentials.split(":", 1)
|
||||
|
||||
credentials = HTTPBasicCredentials(username=username, password=password)
|
||||
|
||||
swagger_user = await self._verify_credentials(credentials)
|
||||
|
||||
if not swagger_user:
|
||||
response = JSONResponse(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
content={"detail": "Identifiants invalides"},
|
||||
headers={"WWW-Authenticate": 'Basic realm="Swagger UI"'},
|
||||
)
|
||||
await response(scope, receive, send)
|
||||
return
|
||||
|
||||
if "state" not in scope:
|
||||
scope["state"] = {}
|
||||
|
||||
scope["state"]["swagger_user"] = swagger_user
|
||||
|
||||
logger.info(
|
||||
f"✓ Swagger auth: {swagger_user['username']} - tags: {swagger_user.get('allowed_tags', 'ALL')}"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f" Erreur parsing auth header: {e}")
|
||||
response = JSONResponse(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
content={"detail": "Format d'authentification invalide"},
|
||||
headers={"WWW-Authenticate": 'Basic realm="Swagger UI"'},
|
||||
)
|
||||
await response(scope, receive, send)
|
||||
return
|
||||
|
||||
await self.app(scope, receive, send)
|
||||
|
||||
async def _verify_credentials(
|
||||
self, credentials: HTTPBasicCredentials
|
||||
) -> Optional[dict]:
|
||||
from database.db_config import async_session_factory
|
||||
from database.models.api_key import SwaggerUser
|
||||
from security.auth import verify_password
|
||||
|
||||
try:
|
||||
async with async_session_factory() as session:
|
||||
result = await session.execute(
|
||||
select(SwaggerUser).where(
|
||||
SwaggerUser.username == credentials.username
|
||||
)
|
||||
)
|
||||
swagger_user = result.scalar_one_or_none()
|
||||
|
||||
if swagger_user and swagger_user.is_active:
|
||||
if verify_password(
|
||||
credentials.password, swagger_user.hashed_password
|
||||
):
|
||||
swagger_user.last_login = datetime.now()
|
||||
await session.commit()
|
||||
|
||||
logger.info(f"✓ Accès Swagger autorisé: {credentials.username}")
|
||||
|
||||
return {
|
||||
"id": swagger_user.id,
|
||||
"username": swagger_user.username,
|
||||
"allowed_tags": swagger_user.allowed_tags_list,
|
||||
"is_active": swagger_user.is_active,
|
||||
}
|
||||
|
||||
logger.warning(f"✗ Accès Swagger refusé: {credentials.username}")
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f" Erreur vérification credentials: {e}", exc_info=True)
|
||||
return None
|
||||
|
||||
|
||||
class ApiKeyMiddlewareHTTP(BaseHTTPMiddleware):
|
||||
EXCLUDED_PATHS = [
|
||||
"/docs",
|
||||
"/redoc",
|
||||
"/openapi.json",
|
||||
"/",
|
||||
"/health",
|
||||
"/auth",
|
||||
"/api-keys/verify",
|
||||
"/universign/webhook",
|
||||
]
|
||||
|
||||
def _is_excluded_path(self, path: str) -> bool:
|
||||
"""Vérifie si le chemin est exclu de l'authentification API Key"""
|
||||
if path == "/":
|
||||
return True
|
||||
|
||||
for excluded in self.EXCLUDED_PATHS:
|
||||
if excluded == "/":
|
||||
continue
|
||||
if path == excluded or path.startswith(excluded + "/"):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
async def dispatch(self, request: Request, call_next: Callable):
|
||||
path = request.url.path
|
||||
method = request.method
|
||||
|
||||
if self._is_excluded_path(path):
|
||||
return await call_next(request)
|
||||
|
||||
auth_header = request.headers.get("Authorization")
|
||||
api_key_header = request.headers.get("X-API-Key")
|
||||
|
||||
if api_key_header:
|
||||
api_key_header = api_key_header.strip()
|
||||
if not api_key_header or api_key_header == "":
|
||||
api_key_header = None
|
||||
|
||||
if auth_header and auth_header.startswith("Bearer "):
|
||||
token = auth_header.split(" ", 1)[1].strip()
|
||||
|
||||
if token.startswith("sdk_live_"):
|
||||
logger.warning(
|
||||
" API Key envoyée dans Authorization au lieu de X-API-Key"
|
||||
)
|
||||
return await self._handle_api_key_auth(
|
||||
request, token, path, method, call_next
|
||||
)
|
||||
|
||||
logger.debug(f"JWT détecté pour {method} {path} → délégation à FastAPI")
|
||||
request.state.authenticated_via = "jwt"
|
||||
return await call_next(request)
|
||||
|
||||
if api_key_header:
|
||||
logger.debug(f" API Key détectée pour {method} {path}")
|
||||
return await self._handle_api_key_auth(
|
||||
request, api_key_header, path, method, call_next
|
||||
)
|
||||
|
||||
logger.debug(f" Aucune auth pour {method} {path} → délégation à FastAPI")
|
||||
return await call_next(request)
|
||||
|
||||
async def _handle_api_key_auth(
|
||||
self,
|
||||
request: Request,
|
||||
api_key: str,
|
||||
path: str,
|
||||
method: str,
|
||||
call_next: Callable,
|
||||
):
|
||||
try:
|
||||
from database.db_config import async_session_factory
|
||||
from services.api_key import ApiKeyService
|
||||
|
||||
async with async_session_factory() as session:
|
||||
service = ApiKeyService(session)
|
||||
|
||||
api_key_obj = await service.verify_api_key(api_key)
|
||||
|
||||
if not api_key_obj:
|
||||
logger.warning(f"🔒 Clé API invalide: {method} {path}")
|
||||
return JSONResponse(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
content={
|
||||
"detail": "Clé API invalide ou expirée",
|
||||
"hint": "Vérifiez votre clé X-API-Key",
|
||||
},
|
||||
)
|
||||
|
||||
is_allowed, rate_info = await service.check_rate_limit(api_key_obj)
|
||||
if not is_allowed:
|
||||
logger.warning(f"⏱️ Rate limit: {api_key_obj.name}")
|
||||
return JSONResponse(
|
||||
status_code=status.HTTP_429_TOO_MANY_REQUESTS,
|
||||
content={"detail": "Rate limit dépassé"},
|
||||
headers={
|
||||
"X-RateLimit-Limit": str(rate_info["limit"]),
|
||||
"X-RateLimit-Remaining": "0",
|
||||
},
|
||||
)
|
||||
|
||||
has_access = await service.check_endpoint_access(api_key_obj, path)
|
||||
|
||||
if not has_access:
|
||||
allowed = (
|
||||
json.loads(api_key_obj.allowed_endpoints)
|
||||
if api_key_obj.allowed_endpoints
|
||||
else ["Tous"]
|
||||
)
|
||||
|
||||
logger.warning(
|
||||
f"🚫 ACCÈS REFUSÉ: {api_key_obj.name}\n"
|
||||
f" Endpoint demandé: {path}\n"
|
||||
f" Endpoints autorisés: {allowed}"
|
||||
)
|
||||
|
||||
return JSONResponse(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
content={
|
||||
"detail": "Accès non autorisé à cet endpoint",
|
||||
"endpoint_requested": path,
|
||||
"api_key_name": api_key_obj.name,
|
||||
"allowed_endpoints": allowed,
|
||||
"hint": "Cette clé API n'a pas accès à cet endpoint.",
|
||||
},
|
||||
)
|
||||
|
||||
request.state.api_key = api_key_obj
|
||||
request.state.authenticated_via = "api_key"
|
||||
|
||||
logger.info(f" ACCÈS AUTORISÉ: {api_key_obj.name} → {method} {path}")
|
||||
|
||||
return await call_next(request)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"💥 Erreur validation API Key: {e}", exc_info=True)
|
||||
return JSONResponse(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
content={"detail": f"Erreur interne: {str(e)}"},
|
||||
)
|
||||
|
||||
|
||||
ApiKeyMiddleware = ApiKeyMiddlewareHTTP
|
||||
|
||||
|
||||
def get_api_key_from_request(request: Request) -> Optional:
|
||||
"""Récupère l'objet ApiKey depuis la requête si présent"""
|
||||
return getattr(request.state, "api_key", None)
|
||||
|
||||
|
||||
def get_auth_method(request: Request) -> str:
|
||||
"""Retourne la méthode d'authentification utilisée"""
|
||||
return getattr(request.state, "authenticated_via", "none")
|
||||
|
||||
|
||||
def get_swagger_user_from_request(request: Request) -> Optional[dict]:
|
||||
"""Récupère l'utilisateur Swagger depuis la requête"""
|
||||
return getattr(request.state, "swagger_user", None)
|
||||
|
||||
|
||||
__all__ = [
|
||||
"SwaggerAuthMiddleware",
|
||||
"ApiKeyMiddlewareHTTP",
|
||||
"ApiKeyMiddleware",
|
||||
"get_api_key_from_request",
|
||||
"get_auth_method",
|
||||
"get_swagger_user_from_request",
|
||||
]
|
||||
|
|
@ -1,154 +0,0 @@
|
|||
from fastapi import APIRouter, Depends, HTTPException, status, Query
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
import logging
|
||||
|
||||
from database import get_session, User
|
||||
from core.dependencies import get_current_user, require_role
|
||||
from services.api_key import ApiKeyService, api_key_to_response
|
||||
from schemas.api_key import (
|
||||
ApiKeyCreate,
|
||||
ApiKeyCreatedResponse,
|
||||
ApiKeyResponse,
|
||||
ApiKeyList,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
router = APIRouter(prefix="/api-keys", tags=["API Keys Management"])
|
||||
|
||||
|
||||
@router.post(
|
||||
"",
|
||||
response_model=ApiKeyCreatedResponse,
|
||||
status_code=status.HTTP_201_CREATED,
|
||||
dependencies=[Depends(require_role("admin", "super_admin"))],
|
||||
)
|
||||
async def create_api_key(
|
||||
data: ApiKeyCreate,
|
||||
session: AsyncSession = Depends(get_session),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
service = ApiKeyService(session)
|
||||
|
||||
api_key_obj, api_key_plain = await service.create_api_key(
|
||||
name=data.name,
|
||||
description=data.description,
|
||||
created_by=user.email,
|
||||
user_id=user.id,
|
||||
expires_in_days=data.expires_in_days,
|
||||
rate_limit_per_minute=data.rate_limit_per_minute,
|
||||
allowed_endpoints=data.allowed_endpoints,
|
||||
)
|
||||
|
||||
logger.info(f" Clé API créée par {user.email}: {data.name}")
|
||||
|
||||
response_data = api_key_to_response(api_key_obj)
|
||||
response_data["api_key"] = api_key_plain
|
||||
|
||||
return ApiKeyCreatedResponse(**response_data)
|
||||
|
||||
|
||||
@router.get("", response_model=ApiKeyList)
|
||||
async def list_api_keys(
|
||||
include_revoked: bool = Query(False, description="Inclure les clés révoquées"),
|
||||
session: AsyncSession = Depends(get_session),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
service = ApiKeyService(session)
|
||||
|
||||
user_id = None if user.role in ["admin", "super_admin"] else user.id
|
||||
|
||||
keys = await service.list_api_keys(include_revoked=include_revoked, user_id=user_id)
|
||||
|
||||
items = [ApiKeyResponse(**api_key_to_response(k)) for k in keys]
|
||||
|
||||
return ApiKeyList(total=len(items), items=items)
|
||||
|
||||
|
||||
@router.get("/{key_id}", response_model=ApiKeyResponse)
|
||||
async def get_api_key(
|
||||
key_id: str,
|
||||
session: AsyncSession = Depends(get_session),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""Récupérer une clé API par son ID"""
|
||||
service = ApiKeyService(session)
|
||||
|
||||
api_key_obj = await service.get_by_id(key_id)
|
||||
|
||||
if not api_key_obj:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Clé API {key_id} introuvable",
|
||||
)
|
||||
|
||||
if user.role not in ["admin", "super_admin"]:
|
||||
if api_key_obj.user_id != user.id:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Accès refusé à cette clé",
|
||||
)
|
||||
|
||||
return ApiKeyResponse(**api_key_to_response(api_key_obj))
|
||||
|
||||
|
||||
@router.delete("/{key_id}", status_code=status.HTTP_200_OK)
|
||||
async def revoke_api_key(
|
||||
key_id: str,
|
||||
session: AsyncSession = Depends(get_session),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
service = ApiKeyService(session)
|
||||
|
||||
api_key_obj = await service.get_by_id(key_id)
|
||||
|
||||
if not api_key_obj:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Clé API {key_id} introuvable",
|
||||
)
|
||||
|
||||
if user.role not in ["admin", "super_admin"]:
|
||||
if api_key_obj.user_id != user.id:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Accès refusé à cette clé",
|
||||
)
|
||||
|
||||
success = await service.revoke_api_key(key_id)
|
||||
|
||||
if not success:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Erreur lors de la révocation",
|
||||
)
|
||||
|
||||
logger.info(f" Clé API révoquée par {user.email}: {api_key_obj.name}")
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"message": f"Clé API '{api_key_obj.name}' révoquée avec succès",
|
||||
}
|
||||
|
||||
|
||||
@router.post("/verify", status_code=status.HTTP_200_OK)
|
||||
async def verify_api_key_endpoint(
|
||||
api_key: str = Query(..., description="Clé API à vérifier"),
|
||||
session: AsyncSession = Depends(get_session),
|
||||
):
|
||||
service = ApiKeyService(session)
|
||||
|
||||
api_key_obj = await service.verify_api_key(api_key)
|
||||
|
||||
if not api_key_obj:
|
||||
return {
|
||||
"valid": False,
|
||||
"message": "Clé API invalide, expirée ou révoquée",
|
||||
}
|
||||
|
||||
return {
|
||||
"valid": True,
|
||||
"message": "Clé API valide",
|
||||
"key_name": api_key_obj.name,
|
||||
"rate_limit": api_key_obj.rate_limit_per_minute,
|
||||
"expires_at": api_key_obj.expires_at,
|
||||
}
|
||||
|
|
@ -7,7 +7,6 @@ from typing import Optional
|
|||
import uuid
|
||||
|
||||
from database import get_session, User, RefreshToken, LoginAttempt
|
||||
from core.dependencies import get_current_user
|
||||
from security.auth import (
|
||||
hash_password,
|
||||
verify_password,
|
||||
|
|
@ -20,6 +19,7 @@ from security.auth import (
|
|||
hash_token,
|
||||
)
|
||||
from services.email_service import AuthEmailService
|
||||
from core.dependencies import get_current_user
|
||||
from config.config import settings
|
||||
import logging
|
||||
|
||||
|
|
@ -101,7 +101,7 @@ async def check_rate_limit(
|
|||
)
|
||||
failed_attempts = result.scalars().all()
|
||||
|
||||
if len(failed_attempts) >= 15:
|
||||
if len(failed_attempts) >= 5:
|
||||
return False, "Trop de tentatives échouées. Réessayez dans 15 minutes."
|
||||
|
||||
return True, ""
|
||||
|
|
@ -286,7 +286,7 @@ async def login(
|
|||
if user:
|
||||
user.failed_login_attempts += 1
|
||||
|
||||
if user.failed_login_attempts >= 15:
|
||||
if user.failed_login_attempts >= 5:
|
||||
user.locked_until = datetime.now() + timedelta(minutes=15)
|
||||
await session.commit()
|
||||
raise HTTPException(
|
||||
|
|
@ -510,7 +510,7 @@ async def logout(
|
|||
token_record.revoked_at = datetime.now()
|
||||
await session.commit()
|
||||
|
||||
logger.info(f" Déconnexion: {user.email}")
|
||||
logger.info(f"👋 Déconnexion: {user.email}")
|
||||
|
||||
return {"success": True, "message": "Déconnexion réussie"}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,158 +0,0 @@
|
|||
from fastapi import APIRouter, HTTPException, Query, Path
|
||||
import httpx
|
||||
import logging
|
||||
from datetime import datetime
|
||||
|
||||
from schemas import EntrepriseSearch, EntrepriseSearchResponse
|
||||
from utils.enterprise import (
|
||||
calculer_tva_intracommunautaire,
|
||||
mapper_resultat_api,
|
||||
rechercher_entreprise_api,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
router = APIRouter(prefix="/entreprises", tags=["Entreprises"])
|
||||
|
||||
|
||||
@router.get("/search", response_model=EntrepriseSearchResponse)
|
||||
async def rechercher_entreprise(
|
||||
q: str = Query(..., min_length=2, description="Nom d'entreprise, SIREN ou SIRET"),
|
||||
per_page: int = Query(5, ge=1, le=25, description="Nombre de résultats (max 25)"),
|
||||
):
|
||||
try:
|
||||
logger.info(f" Recherche entreprise: '{q}'")
|
||||
|
||||
api_response = await rechercher_entreprise_api(q, per_page)
|
||||
|
||||
resultats_api = api_response.get("results", [])
|
||||
|
||||
if not resultats_api:
|
||||
logger.info(f"Aucun résultat pour: {q}")
|
||||
return EntrepriseSearchResponse(total_results=0, results=[], query=q)
|
||||
|
||||
entreprises = []
|
||||
for data in resultats_api:
|
||||
entreprise = mapper_resultat_api(data)
|
||||
if entreprise:
|
||||
entreprises.append(entreprise)
|
||||
|
||||
logger.info(f" {len(entreprises)} résultat(s) trouvé(s)")
|
||||
|
||||
return EntrepriseSearchResponse(
|
||||
total_results=len(entreprises), results=entreprises, query=q
|
||||
)
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Erreur recherche entreprise: {e}", exc_info=True)
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Erreur lors de la recherche: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get("/siren/{siren}", response_model=EntrepriseSearch)
|
||||
async def lire_entreprise_par_siren(
|
||||
siren: str = Path(
|
||||
...,
|
||||
min_length=9,
|
||||
max_length=9,
|
||||
pattern=r"^\d{9}$",
|
||||
description="Numéro SIREN (9 chiffres)",
|
||||
),
|
||||
):
|
||||
try:
|
||||
logger.info(f"Lecture entreprise SIREN: {siren}")
|
||||
|
||||
api_response = await rechercher_entreprise_api(siren, per_page=1)
|
||||
|
||||
resultats = api_response.get("results", [])
|
||||
|
||||
if not resultats:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail=f"Aucune entreprise trouvée pour le SIREN {siren}",
|
||||
)
|
||||
|
||||
entreprise_data = resultats[0]
|
||||
|
||||
if entreprise_data.get("siren") != siren:
|
||||
raise HTTPException(status_code=404, detail=f"SIREN {siren} introuvable")
|
||||
|
||||
entreprise = mapper_resultat_api(entreprise_data)
|
||||
|
||||
if not entreprise:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail="Erreur lors du traitement des données entreprise",
|
||||
)
|
||||
|
||||
if not entreprise.is_active:
|
||||
logger.warning(f" Entreprise CESSÉE: {siren}")
|
||||
|
||||
return entreprise
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Erreur lecture SIREN {siren}: {e}", exc_info=True)
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Erreur lors de la récupération: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get("/tva/{siren}")
|
||||
async def calculer_tva(
|
||||
siren: str = Path(
|
||||
...,
|
||||
min_length=9,
|
||||
max_length=9,
|
||||
pattern=r"^\d{9}$",
|
||||
description="Numéro SIREN (9 chiffres)",
|
||||
),
|
||||
):
|
||||
tva_number = calculer_tva_intracommunautaire(siren)
|
||||
|
||||
if not tva_number:
|
||||
raise HTTPException(status_code=400, detail=f"SIREN invalide: {siren}")
|
||||
|
||||
return {
|
||||
"siren": siren,
|
||||
"vat_number": tva_number,
|
||||
"format": "FR + Clé (2 chiffres) + SIREN (9 chiffres)",
|
||||
}
|
||||
|
||||
|
||||
@router.get("/health")
|
||||
async def health_check_api_sirene():
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=5.0) as client:
|
||||
response = await client.get(
|
||||
"https://recherche-entreprises.api.gouv.fr/search",
|
||||
params={"q": "test", "per_page": 1},
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
return {
|
||||
"status": "healthy",
|
||||
"api_sirene": "disponible",
|
||||
"response_time_ms": response.elapsed.total_seconds() * 1000,
|
||||
"timestamp": datetime.now().isoformat(),
|
||||
}
|
||||
else:
|
||||
return {
|
||||
"status": "degraded",
|
||||
"api_sirene": f"statut {response.status_code}",
|
||||
"timestamp": datetime.now().isoformat(),
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Health check failed: {e}")
|
||||
return {
|
||||
"status": "unhealthy",
|
||||
"api_sirene": "indisponible",
|
||||
"error": str(e),
|
||||
"timestamp": datetime.now().isoformat(),
|
||||
}
|
||||
|
|
@ -1,11 +1,12 @@
|
|||
from fastapi import APIRouter, Depends, HTTPException, Query, Request
|
||||
from fastapi.responses import FileResponse
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, func
|
||||
from sqlalchemy import false, select, func, or_, and_, true
|
||||
from sqlalchemy.orm import selectinload
|
||||
from typing import List, Optional
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timedelta
|
||||
from pydantic import BaseModel, EmailStr
|
||||
import logging
|
||||
from core.dependencies import get_current_user
|
||||
from data.data import templates_signature_email
|
||||
from email_queue import email_queue
|
||||
from database import UniversignSignerStatus, UniversignTransactionStatus, get_session
|
||||
|
|
@ -14,41 +15,83 @@ from database import (
|
|||
UniversignSigner,
|
||||
UniversignSyncLog,
|
||||
LocalDocumentStatus,
|
||||
SageDocumentType,
|
||||
)
|
||||
import os
|
||||
from pathlib import Path
|
||||
import json
|
||||
from services.universign_document import UniversignDocumentService
|
||||
from services.universign_sync import UniversignSyncService
|
||||
from config.config import settings
|
||||
from utils.generic_functions import normaliser_type_doc
|
||||
from utils.universign_status_mapping import get_status_message, map_universign_to_local
|
||||
|
||||
from database.models.email import EmailLog
|
||||
from database.enum.status import StatutEmail
|
||||
from schemas import (
|
||||
SyncStatsResponse,
|
||||
CreateSignatureRequest,
|
||||
TransactionResponse,
|
||||
)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
router = APIRouter(
|
||||
prefix="/universign",
|
||||
tags=["Universign"],
|
||||
)
|
||||
router = APIRouter(prefix="/universign", tags=["Universign"])
|
||||
|
||||
sync_service = UniversignSyncService(
|
||||
api_url=settings.universign_api_url, api_key=settings.universign_api_key
|
||||
)
|
||||
|
||||
|
||||
class CreateSignatureRequest(BaseModel):
|
||||
"""Demande de création d'une signature"""
|
||||
|
||||
sage_document_id: str
|
||||
sage_document_type: SageDocumentType
|
||||
signer_email: EmailStr
|
||||
signer_name: str
|
||||
document_name: Optional[str] = None
|
||||
|
||||
|
||||
class TransactionResponse(BaseModel):
|
||||
"""Réponse détaillée d'une transaction"""
|
||||
|
||||
id: str
|
||||
transaction_id: str
|
||||
sage_document_id: str
|
||||
sage_document_type: str
|
||||
universign_status: str
|
||||
local_status: str
|
||||
local_status_label: str
|
||||
signer_url: Optional[str]
|
||||
document_url: Optional[str]
|
||||
created_at: datetime
|
||||
sent_at: Optional[datetime]
|
||||
signed_at: Optional[datetime]
|
||||
last_synced_at: Optional[datetime]
|
||||
needs_sync: bool
|
||||
signers: List[dict]
|
||||
|
||||
signed_document_available: bool = False
|
||||
signed_document_downloaded_at: Optional[datetime] = None
|
||||
signed_document_size_kb: Optional[float] = None
|
||||
|
||||
|
||||
class SyncStatsResponse(BaseModel):
|
||||
"""Statistiques de synchronisation"""
|
||||
|
||||
total_transactions: int
|
||||
pending_sync: int
|
||||
signed: int
|
||||
in_progress: int
|
||||
refused: int
|
||||
expired: int
|
||||
last_sync_at: Optional[datetime]
|
||||
|
||||
|
||||
@router.post("/signatures/create", response_model=TransactionResponse)
|
||||
async def create_signature(
|
||||
request: CreateSignatureRequest, session: AsyncSession = Depends(get_session)
|
||||
):
|
||||
try:
|
||||
# === VÉRIFICATION DOUBLON RENFORCÉE ===
|
||||
logger.info(
|
||||
f"Vérification doublon pour: {request.sage_document_id} "
|
||||
f"🔍 Vérification doublon pour: {request.sage_document_id} "
|
||||
f"(type: {request.sage_document_type.name})"
|
||||
)
|
||||
|
||||
|
|
@ -64,6 +107,7 @@ async def create_signature(
|
|||
f"{len(all_existing)} transaction(s) existante(s) trouvée(s)"
|
||||
)
|
||||
|
||||
# Filtrer les transactions non-finales
|
||||
active_txs = [
|
||||
tx
|
||||
for tx in all_existing
|
||||
|
|
@ -93,7 +137,8 @@ async def create_signature(
|
|||
"Toutes les transactions existantes sont finales, création autorisée"
|
||||
)
|
||||
|
||||
logger.info(f"Génération PDF: {request.sage_document_id}")
|
||||
# Génération PDF
|
||||
logger.info(f"📄 Génération PDF: {request.sage_document_id}")
|
||||
pdf_bytes = email_queue._generate_pdf(
|
||||
request.sage_document_id, normaliser_type_doc(request.sage_document_type)
|
||||
)
|
||||
|
|
@ -103,12 +148,13 @@ async def create_signature(
|
|||
|
||||
logger.info(f"PDF généré: {len(pdf_bytes)} octets")
|
||||
|
||||
# === CRÉATION TRANSACTION UNIVERSIGN ===
|
||||
import requests
|
||||
import uuid
|
||||
|
||||
auth = (settings.universign_api_key, "")
|
||||
|
||||
logger.info("Création transaction Universign...")
|
||||
logger.info("🔄 Création transaction Universign...")
|
||||
|
||||
resp = requests.post(
|
||||
f"{settings.universign_api_url}/transactions",
|
||||
|
|
@ -128,7 +174,8 @@ async def create_signature(
|
|||
universign_tx_id = resp.json().get("id")
|
||||
logger.info(f"Transaction Universign créée: {universign_tx_id}")
|
||||
|
||||
logger.info("Upload PDF...")
|
||||
# Upload PDF
|
||||
logger.info("📤 Upload PDF...")
|
||||
files = {
|
||||
"file": (f"{request.sage_document_id}.pdf", pdf_bytes, "application/pdf")
|
||||
}
|
||||
|
|
@ -143,7 +190,8 @@ async def create_signature(
|
|||
file_id = resp.json().get("id")
|
||||
logger.info(f"PDF uploadé: {file_id}")
|
||||
|
||||
logger.info("Attachement document...")
|
||||
# Attachement document
|
||||
logger.info("🔗 Attachement document...")
|
||||
resp = requests.post(
|
||||
f"{settings.universign_api_url}/transactions/{universign_tx_id}/documents",
|
||||
auth=auth,
|
||||
|
|
@ -156,7 +204,8 @@ async def create_signature(
|
|||
|
||||
document_id = resp.json().get("id")
|
||||
|
||||
logger.info("Création champ signature...")
|
||||
# Création champ signature
|
||||
logger.info("✍️ Création champ signature...")
|
||||
resp = requests.post(
|
||||
f"{settings.universign_api_url}/transactions/{universign_tx_id}/documents/{document_id}/fields",
|
||||
auth=auth,
|
||||
|
|
@ -169,7 +218,8 @@ async def create_signature(
|
|||
|
||||
field_id = resp.json().get("id")
|
||||
|
||||
logger.info(f"Liaison signataire: {request.signer_email}")
|
||||
# Liaison signataire
|
||||
logger.info(f"👤 Liaison signataire: {request.signer_email}")
|
||||
resp = requests.post(
|
||||
f"{settings.universign_api_url}/transactions/{universign_tx_id}/signatures",
|
||||
auth=auth,
|
||||
|
|
@ -180,7 +230,8 @@ async def create_signature(
|
|||
if resp.status_code not in [200, 201]:
|
||||
raise HTTPException(500, "Erreur liaison signataire")
|
||||
|
||||
logger.info("Démarrage transaction...")
|
||||
# Démarrage transaction
|
||||
logger.info("🚀 Démarrage transaction...")
|
||||
resp = requests.post(
|
||||
f"{settings.universign_api_url}/transactions/{universign_tx_id}/start",
|
||||
auth=auth,
|
||||
|
|
@ -192,6 +243,7 @@ async def create_signature(
|
|||
|
||||
final_data = resp.json()
|
||||
|
||||
# Extraction URL de signature
|
||||
signer_url = ""
|
||||
if final_data.get("actions"):
|
||||
for action in final_data["actions"]:
|
||||
|
|
@ -204,11 +256,12 @@ async def create_signature(
|
|||
|
||||
logger.info("URL de signature obtenue")
|
||||
|
||||
# === ENREGISTREMENT LOCAL ===
|
||||
local_id = str(uuid.uuid4())
|
||||
|
||||
transaction = UniversignTransaction(
|
||||
id=local_id,
|
||||
transaction_id=universign_tx_id,
|
||||
transaction_id=universign_tx_id, # Utiliser l'ID Universign, ne jamais le changer
|
||||
sage_document_id=request.sage_document_id,
|
||||
sage_document_type=request.sage_document_type,
|
||||
universign_status=UniversignTransactionStatus.STARTED,
|
||||
|
|
@ -238,9 +291,10 @@ async def create_signature(
|
|||
await session.commit()
|
||||
|
||||
logger.info(
|
||||
f"Transaction sauvegardée: {local_id} (Universign: {universign_tx_id})"
|
||||
f"💾 Transaction sauvegardée: {local_id} (Universign: {universign_tx_id})"
|
||||
)
|
||||
|
||||
# === ENVOI EMAIL AVEC TEMPLATE ===
|
||||
template = templates_signature_email["demande_signature"]
|
||||
|
||||
type_labels = {
|
||||
|
|
@ -295,6 +349,7 @@ async def create_signature(
|
|||
|
||||
email_queue.enqueue(email_log.id)
|
||||
|
||||
# === MISE À JOUR STATUT SAGE (Confirmé = 1) ===
|
||||
try:
|
||||
from sage_client import sage_client
|
||||
|
||||
|
|
@ -309,6 +364,7 @@ async def create_signature(
|
|||
except Exception as e:
|
||||
logger.warning(f"Impossible de mettre à jour le statut Sage: {e}")
|
||||
|
||||
# === RÉPONSE ===
|
||||
return TransactionResponse(
|
||||
id=transaction.id,
|
||||
transaction_id=transaction.transaction_id,
|
||||
|
|
@ -388,6 +444,7 @@ async def list_transactions(
|
|||
}
|
||||
for s in tx.signers
|
||||
],
|
||||
# ✅ NOUVEAUX CHAMPS
|
||||
signed_document_available=bool(
|
||||
tx.signed_document_path and Path(tx.signed_document_path).exists()
|
||||
),
|
||||
|
|
@ -443,6 +500,7 @@ async def get_transaction(
|
|||
}
|
||||
for s in tx.signers
|
||||
],
|
||||
# ✅ NOUVEAUX CHAMPS
|
||||
signed_document_available=bool(
|
||||
tx.signed_document_path and Path(tx.signed_document_path).exists()
|
||||
),
|
||||
|
|
@ -497,20 +555,30 @@ async def sync_all_transactions(
|
|||
return {"success": True, "stats": stats, "timestamp": datetime.now().isoformat()}
|
||||
|
||||
|
||||
@router.post("/webhook", dependencies=[])
|
||||
@router.post("/webhook/", dependencies=[])
|
||||
@router.post("/webhook")
|
||||
@router.post("/webhook/")
|
||||
async def webhook_universign(
|
||||
request: Request, session: AsyncSession = Depends(get_session)
|
||||
):
|
||||
"""
|
||||
CORRECTION : Extraction correcte du transaction_id selon la structure réelle d'Universign
|
||||
"""
|
||||
try:
|
||||
payload = await request.json()
|
||||
|
||||
logger.info(f"Webhook Universign reçu - Type: {payload.get('type', 'unknown')}")
|
||||
# 📋 LOG COMPLET du payload pour débogage
|
||||
logger.info(
|
||||
f"📥 Webhook Universign reçu - Type: {payload.get('type', 'unknown')}"
|
||||
)
|
||||
logger.debug(f"Payload complet: {json.dumps(payload, indent=2)}")
|
||||
|
||||
# EXTRACTION CORRECTE DU TRANSACTION_ID
|
||||
transaction_id = None
|
||||
|
||||
# 🔍 Structure 1 : Événements avec payload imbriqué (la plus courante)
|
||||
# Exemple : transaction.lifecycle.created, transaction.lifecycle.started, etc.
|
||||
if payload.get("type", "").startswith("transaction.") and "payload" in payload:
|
||||
# Le transaction_id est dans payload.object.id
|
||||
nested_object = payload.get("payload", {}).get("object", {})
|
||||
if nested_object.get("object") == "transaction":
|
||||
transaction_id = nested_object.get("id")
|
||||
|
|
@ -518,7 +586,9 @@ async def webhook_universign(
|
|||
f"Transaction ID extrait de payload.object.id: {transaction_id}"
|
||||
)
|
||||
|
||||
# 🔍 Structure 2 : Action événements (action.opened, action.completed)
|
||||
elif payload.get("type", "").startswith("action."):
|
||||
# Le transaction_id est directement dans payload.object.transaction_id
|
||||
transaction_id = (
|
||||
payload.get("payload", {}).get("object", {}).get("transaction_id")
|
||||
)
|
||||
|
|
@ -526,14 +596,17 @@ async def webhook_universign(
|
|||
f"Transaction ID extrait de payload.object.transaction_id: {transaction_id}"
|
||||
)
|
||||
|
||||
# 🔍 Structure 3 : Transaction directe (fallback)
|
||||
elif payload.get("object") == "transaction":
|
||||
transaction_id = payload.get("id")
|
||||
logger.info(f"Transaction ID extrait direct: {transaction_id}")
|
||||
|
||||
# 🔍 Structure 4 : Ancien format (pour rétro-compatibilité)
|
||||
elif "transaction" in payload:
|
||||
transaction_id = payload.get("transaction", {}).get("id")
|
||||
logger.info(f"Transaction ID extrait de transaction.id: {transaction_id}")
|
||||
|
||||
# Échec d'extraction
|
||||
if not transaction_id:
|
||||
logger.error(
|
||||
f"Transaction ID introuvable dans webhook\n"
|
||||
|
|
@ -548,8 +621,9 @@ async def webhook_universign(
|
|||
"event_id": payload.get("id"),
|
||||
}, 400
|
||||
|
||||
logger.info(f"Transaction ID identifié: {transaction_id}")
|
||||
logger.info(f"🎯 Transaction ID identifié: {transaction_id}")
|
||||
|
||||
# Vérifier si la transaction existe localement
|
||||
query = select(UniversignTransaction).where(
|
||||
UniversignTransaction.transaction_id == transaction_id
|
||||
)
|
||||
|
|
@ -569,6 +643,7 @@ async def webhook_universign(
|
|||
"event_type": payload.get("type"),
|
||||
}
|
||||
|
||||
# Traiter le webhook
|
||||
success, error = await sync_service.process_webhook(
|
||||
session, payload, transaction_id
|
||||
)
|
||||
|
|
@ -581,6 +656,7 @@ async def webhook_universign(
|
|||
"transaction_id": transaction_id,
|
||||
}, 500
|
||||
|
||||
# Succès
|
||||
logger.info(
|
||||
f"Webhook traité avec succès\n"
|
||||
f"Transaction: {transaction_id}\n"
|
||||
|
|
@ -597,7 +673,7 @@ async def webhook_universign(
|
|||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Erreur critique webhook: {e}", exc_info=True)
|
||||
logger.error(f"💥 Erreur critique webhook: {e}", exc_info=True)
|
||||
return {"status": "error", "message": str(e)}, 500
|
||||
|
||||
|
||||
|
|
@ -605,14 +681,17 @@ async def webhook_universign(
|
|||
async def get_sync_stats(session: AsyncSession = Depends(get_session)):
|
||||
"""Statistiques globales de synchronisation"""
|
||||
|
||||
# Total
|
||||
total_query = select(func.count(UniversignTransaction.id))
|
||||
total = (await session.execute(total_query)).scalar()
|
||||
|
||||
# En attente de sync
|
||||
pending_query = select(func.count(UniversignTransaction.id)).where(
|
||||
UniversignTransaction.needs_sync
|
||||
)
|
||||
pending = (await session.execute(pending_query)).scalar()
|
||||
|
||||
# Par statut
|
||||
signed_query = select(func.count(UniversignTransaction.id)).where(
|
||||
UniversignTransaction.local_status == LocalDocumentStatus.SIGNED
|
||||
)
|
||||
|
|
@ -633,6 +712,7 @@ async def get_sync_stats(session: AsyncSession = Depends(get_session)):
|
|||
)
|
||||
expired = (await session.execute(expired_query)).scalar()
|
||||
|
||||
# Dernière sync
|
||||
last_sync_query = select(func.max(UniversignTransaction.last_synced_at))
|
||||
last_sync = (await session.execute(last_sync_query)).scalar()
|
||||
|
||||
|
|
@ -653,6 +733,7 @@ async def get_transaction_logs(
|
|||
limit: int = Query(50, le=500),
|
||||
session: AsyncSession = Depends(get_session),
|
||||
):
|
||||
# Trouver la transaction
|
||||
tx_query = select(UniversignTransaction).where(
|
||||
UniversignTransaction.transaction_id == transaction_id
|
||||
)
|
||||
|
|
@ -662,6 +743,7 @@ async def get_transaction_logs(
|
|||
if not tx:
|
||||
raise HTTPException(404, "Transaction introuvable")
|
||||
|
||||
# Logs
|
||||
logs_query = (
|
||||
select(UniversignSyncLog)
|
||||
.where(UniversignSyncLog.transaction_id == tx.id)
|
||||
|
|
@ -690,6 +772,9 @@ async def get_transaction_logs(
|
|||
}
|
||||
|
||||
|
||||
# Ajouter ces routes dans universign.py
|
||||
|
||||
|
||||
@router.get("/documents/{sage_document_id}/signatures")
|
||||
async def get_signatures_for_document(
|
||||
sage_document_id: str,
|
||||
|
|
@ -731,6 +816,10 @@ async def cleanup_duplicate_signatures(
|
|||
),
|
||||
session: AsyncSession = Depends(get_session),
|
||||
):
|
||||
"""
|
||||
Supprime les doublons de signatures pour un document.
|
||||
Garde une seule transaction (la plus récente ou ancienne selon le paramètre).
|
||||
"""
|
||||
query = (
|
||||
select(UniversignTransaction)
|
||||
.where(UniversignTransaction.sage_document_id == sage_document_id)
|
||||
|
|
@ -752,6 +841,7 @@ async def cleanup_duplicate_signatures(
|
|||
"deleted_count": 0,
|
||||
}
|
||||
|
||||
# Garder la première (selon l'ordre), supprimer les autres
|
||||
to_keep = transactions[0]
|
||||
to_delete = transactions[1:]
|
||||
|
||||
|
|
@ -811,8 +901,13 @@ async def delete_transaction(
|
|||
async def cleanup_all_duplicates(
|
||||
session: AsyncSession = Depends(get_session),
|
||||
):
|
||||
"""
|
||||
Nettoie tous les doublons dans la base.
|
||||
Pour chaque document avec plusieurs transactions, garde la plus récente non-erreur ou la plus récente.
|
||||
"""
|
||||
from sqlalchemy import func
|
||||
|
||||
# Trouver les documents avec plusieurs transactions
|
||||
subquery = (
|
||||
select(
|
||||
UniversignTransaction.sage_document_id,
|
||||
|
|
@ -830,6 +925,7 @@ async def cleanup_all_duplicates(
|
|||
cleanup_details = []
|
||||
|
||||
for doc_id in duplicate_docs:
|
||||
# Récupérer toutes les transactions pour ce document
|
||||
tx_query = (
|
||||
select(UniversignTransaction)
|
||||
.where(UniversignTransaction.sage_document_id == doc_id)
|
||||
|
|
@ -838,6 +934,7 @@ async def cleanup_all_duplicates(
|
|||
tx_result = await session.execute(tx_query)
|
||||
transactions = tx_result.scalars().all()
|
||||
|
||||
# Priorité: SIGNE > EN_COURS > EN_ATTENTE > autres
|
||||
priority = {"SIGNE": 0, "EN_COURS": 1, "EN_ATTENTE": 2}
|
||||
|
||||
def sort_key(tx):
|
||||
|
|
@ -875,11 +972,115 @@ async def cleanup_all_duplicates(
|
|||
}
|
||||
|
||||
|
||||
@router.get("/admin/diagnostic", tags=["Admin"])
|
||||
async def diagnostic_complet(session: AsyncSession = Depends(get_session)):
|
||||
"""
|
||||
Diagnostic complet de l'état des transactions Universign
|
||||
"""
|
||||
try:
|
||||
# Statistiques générales
|
||||
total_query = select(func.count(UniversignTransaction.id))
|
||||
total = (await session.execute(total_query)).scalar()
|
||||
|
||||
# Par statut local
|
||||
statuts_query = select(
|
||||
UniversignTransaction.local_status, func.count(UniversignTransaction.id)
|
||||
).group_by(UniversignTransaction.local_status)
|
||||
statuts_result = await session.execute(statuts_query)
|
||||
statuts = {status.value: count for status, count in statuts_result.all()}
|
||||
|
||||
# Transactions sans sync récente
|
||||
date_limite = datetime.now() - timedelta(hours=1)
|
||||
sans_sync_query = select(func.count(UniversignTransaction.id)).where(
|
||||
and_(
|
||||
UniversignTransaction.needs_sync,
|
||||
or_(
|
||||
UniversignTransaction.last_synced_at < date_limite,
|
||||
UniversignTransaction.last_synced_at.is_(None),
|
||||
),
|
||||
)
|
||||
)
|
||||
sans_sync = (await session.execute(sans_sync_query)).scalar()
|
||||
|
||||
# Doublons potentiels
|
||||
doublons_query = (
|
||||
select(
|
||||
UniversignTransaction.sage_document_id,
|
||||
func.count(UniversignTransaction.id).label("count"),
|
||||
)
|
||||
.group_by(UniversignTransaction.sage_document_id)
|
||||
.having(func.count(UniversignTransaction.id) > 1)
|
||||
)
|
||||
doublons_result = await session.execute(doublons_query)
|
||||
doublons = doublons_result.fetchall()
|
||||
|
||||
# Transactions avec erreurs de sync
|
||||
erreurs_query = select(func.count(UniversignTransaction.id)).where(
|
||||
UniversignTransaction.sync_error.isnot(None)
|
||||
)
|
||||
erreurs = (await session.execute(erreurs_query)).scalar()
|
||||
|
||||
# Transactions sans webhook reçu
|
||||
sans_webhook_query = select(func.count(UniversignTransaction.id)).where(
|
||||
and_(
|
||||
not UniversignTransaction.webhook_received,
|
||||
UniversignTransaction.local_status != LocalDocumentStatus.PENDING,
|
||||
)
|
||||
)
|
||||
sans_webhook = (await session.execute(sans_webhook_query)).scalar()
|
||||
|
||||
diagnostic = {
|
||||
"timestamp": datetime.now().isoformat(),
|
||||
"total_transactions": total,
|
||||
"repartition_statuts": statuts,
|
||||
"problemes_detectes": {
|
||||
"sans_sync_recente": sans_sync,
|
||||
"doublons_possibles": len(doublons),
|
||||
"erreurs_sync": erreurs,
|
||||
"sans_webhook": sans_webhook,
|
||||
},
|
||||
"documents_avec_doublons": [
|
||||
{"document_id": doc_id, "nombre_transactions": count}
|
||||
for doc_id, count in doublons
|
||||
],
|
||||
"recommandations": [],
|
||||
}
|
||||
|
||||
# Recommandations
|
||||
if sans_sync > 0:
|
||||
diagnostic["recommandations"].append(
|
||||
f"🔄 {sans_sync} transaction(s) à synchroniser. "
|
||||
f"Utilisez POST /universign/sync/all"
|
||||
)
|
||||
|
||||
if len(doublons) > 0:
|
||||
diagnostic["recommandations"].append(
|
||||
f"{len(doublons)} document(s) avec doublons. "
|
||||
f"Utilisez POST /universign/cleanup/all-duplicates"
|
||||
)
|
||||
|
||||
if erreurs > 0:
|
||||
diagnostic["recommandations"].append(
|
||||
f"{erreurs} transaction(s) en erreur. "
|
||||
f"Vérifiez les logs avec GET /universign/transactions?status=ERREUR"
|
||||
)
|
||||
|
||||
return diagnostic
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Erreur diagnostic: {e}")
|
||||
raise HTTPException(500, str(e))
|
||||
|
||||
|
||||
@router.post("/admin/force-sync-all", tags=["Admin"])
|
||||
async def forcer_sync_toutes_transactions(
|
||||
max_transactions: int = Query(200, le=500),
|
||||
session: AsyncSession = Depends(get_session),
|
||||
):
|
||||
"""
|
||||
Force la synchronisation de TOUTES les transactions (même finales)
|
||||
À utiliser pour réparer les incohérences
|
||||
"""
|
||||
try:
|
||||
query = (
|
||||
select(UniversignTransaction)
|
||||
|
|
@ -904,7 +1105,7 @@ async def forcer_sync_toutes_transactions(
|
|||
previous_status = transaction.local_status.value
|
||||
|
||||
logger.info(
|
||||
f"Force sync: {transaction.transaction_id} (statut: {previous_status})"
|
||||
f"🔄 Force sync: {transaction.transaction_id} (statut: {previous_status})"
|
||||
)
|
||||
|
||||
success, error = await sync_service.sync_transaction(
|
||||
|
|
@ -953,6 +1154,9 @@ async def forcer_sync_toutes_transactions(
|
|||
async def reparer_transaction(
|
||||
transaction_id: str, session: AsyncSession = Depends(get_session)
|
||||
):
|
||||
"""
|
||||
Répare une transaction spécifique en la re-synchronisant depuis Universign
|
||||
"""
|
||||
try:
|
||||
query = select(UniversignTransaction).where(
|
||||
UniversignTransaction.transaction_id == transaction_id
|
||||
|
|
@ -970,6 +1174,7 @@ async def reparer_transaction(
|
|||
else None
|
||||
)
|
||||
|
||||
# Force sync
|
||||
success, error = await sync_service.sync_transaction(
|
||||
session, transaction, force=True
|
||||
)
|
||||
|
|
@ -1006,7 +1211,11 @@ async def reparer_transaction(
|
|||
async def trouver_transactions_inconsistantes(
|
||||
session: AsyncSession = Depends(get_session),
|
||||
):
|
||||
"""
|
||||
Trouve les transactions dont le statut local ne correspond pas au statut Universign
|
||||
"""
|
||||
try:
|
||||
# Toutes les transactions non-finales
|
||||
query = select(UniversignTransaction).where(
|
||||
UniversignTransaction.local_status.in_(
|
||||
[LocalDocumentStatus.PENDING, LocalDocumentStatus.IN_PROGRESS]
|
||||
|
|
@ -1020,6 +1229,7 @@ async def trouver_transactions_inconsistantes(
|
|||
|
||||
for tx in transactions:
|
||||
try:
|
||||
# Récupérer le statut depuis Universign
|
||||
universign_data = sync_service.fetch_transaction_status(
|
||||
tx.transaction_id
|
||||
)
|
||||
|
|
@ -1081,6 +1291,171 @@ async def trouver_transactions_inconsistantes(
|
|||
raise HTTPException(500, str(e))
|
||||
|
||||
|
||||
@router.post("/admin/nettoyer-transactions-erreur", tags=["Admin"])
|
||||
async def nettoyer_transactions_erreur(
|
||||
age_jours: int = Query(
|
||||
7, description="Supprimer les transactions en erreur de plus de X jours"
|
||||
),
|
||||
session: AsyncSession = Depends(get_session),
|
||||
):
|
||||
"""
|
||||
Nettoie les transactions en erreur anciennes
|
||||
"""
|
||||
try:
|
||||
date_limite = datetime.now() - timedelta(days=age_jours)
|
||||
|
||||
query = select(UniversignTransaction).where(
|
||||
and_(
|
||||
UniversignTransaction.local_status == LocalDocumentStatus.ERROR,
|
||||
UniversignTransaction.created_at < date_limite,
|
||||
)
|
||||
)
|
||||
|
||||
result = await session.execute(query)
|
||||
transactions = result.scalars().all()
|
||||
|
||||
supprimees = []
|
||||
for tx in transactions:
|
||||
supprimees.append(
|
||||
{
|
||||
"transaction_id": tx.transaction_id,
|
||||
"document_id": tx.sage_document_id,
|
||||
"date_creation": tx.created_at.isoformat(),
|
||||
"erreur": tx.sync_error,
|
||||
}
|
||||
)
|
||||
await session.delete(tx)
|
||||
|
||||
await session.commit()
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"transactions_supprimees": len(supprimees),
|
||||
"age_limite_jours": age_jours,
|
||||
"details": supprimees,
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Erreur nettoyage: {e}")
|
||||
raise HTTPException(500, str(e))
|
||||
|
||||
|
||||
@router.get("/debug/webhook-payload/{transaction_id}", tags=["Debug"])
|
||||
async def voir_dernier_webhook(
|
||||
transaction_id: str, session: AsyncSession = Depends(get_session)
|
||||
):
|
||||
"""
|
||||
Affiche le dernier payload webhook reçu pour une transaction
|
||||
"""
|
||||
try:
|
||||
query = select(UniversignTransaction).where(
|
||||
UniversignTransaction.transaction_id == transaction_id
|
||||
)
|
||||
result = await session.execute(query)
|
||||
tx = result.scalar_one_or_none()
|
||||
|
||||
if not tx:
|
||||
raise HTTPException(404, "Transaction introuvable")
|
||||
|
||||
# Récupérer le dernier log de type webhook
|
||||
logs_query = (
|
||||
select(UniversignSyncLog)
|
||||
.where(
|
||||
and_(
|
||||
UniversignSyncLog.transaction_id == tx.id,
|
||||
UniversignSyncLog.sync_type.like("webhook:%"),
|
||||
)
|
||||
)
|
||||
.order_by(UniversignSyncLog.sync_timestamp.desc())
|
||||
.limit(1)
|
||||
)
|
||||
|
||||
logs_result = await session.execute(logs_query)
|
||||
last_webhook_log = logs_result.scalar_one_or_none()
|
||||
|
||||
if not last_webhook_log:
|
||||
return {
|
||||
"transaction_id": transaction_id,
|
||||
"webhook_recu": tx.webhook_received,
|
||||
"dernier_payload": None,
|
||||
"message": "Aucun webhook reçu pour cette transaction",
|
||||
}
|
||||
|
||||
return {
|
||||
"transaction_id": transaction_id,
|
||||
"webhook_recu": tx.webhook_received,
|
||||
"dernier_webhook": {
|
||||
"timestamp": last_webhook_log.sync_timestamp.isoformat(),
|
||||
"type": last_webhook_log.sync_type,
|
||||
"success": last_webhook_log.success,
|
||||
"payload": json.loads(last_webhook_log.changes_detected)
|
||||
if last_webhook_log.changes_detected
|
||||
else None,
|
||||
},
|
||||
}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Erreur debug webhook: {e}")
|
||||
raise HTTPException(500, str(e))
|
||||
|
||||
|
||||
@router.get(
|
||||
"/transactions/{transaction_id}/document/download", tags=["Documents Signés"]
|
||||
)
|
||||
async def telecharger_document_signe(
|
||||
transaction_id: str, session: AsyncSession = Depends(get_session)
|
||||
):
|
||||
"""
|
||||
Télécharge le document signé localement stocké
|
||||
"""
|
||||
try:
|
||||
query = select(UniversignTransaction).where(
|
||||
UniversignTransaction.transaction_id == transaction_id
|
||||
)
|
||||
result = await session.execute(query)
|
||||
transaction = result.scalar_one_or_none()
|
||||
|
||||
if not transaction:
|
||||
raise HTTPException(404, f"Transaction {transaction_id} introuvable")
|
||||
|
||||
if not transaction.signed_document_path:
|
||||
raise HTTPException(
|
||||
404,
|
||||
"Document signé non disponible localement. "
|
||||
"Utilisez POST /admin/download-missing-documents pour le récupérer.",
|
||||
)
|
||||
|
||||
file_path = Path(transaction.signed_document_path)
|
||||
|
||||
if not file_path.exists():
|
||||
# Document perdu, on peut tenter de le retélécharger
|
||||
logger.warning(f"Fichier perdu : {file_path}")
|
||||
raise HTTPException(
|
||||
404,
|
||||
"Fichier introuvable sur le serveur. "
|
||||
"Utilisez POST /admin/download-missing-documents pour le récupérer.",
|
||||
)
|
||||
|
||||
# Génération du nom de fichier pour le téléchargement
|
||||
download_name = (
|
||||
f"{transaction.sage_document_id}_"
|
||||
f"{transaction.sage_document_type.name}_"
|
||||
f"signe.pdf"
|
||||
)
|
||||
|
||||
return FileResponse(
|
||||
path=str(file_path), media_type="application/pdf", filename=download_name
|
||||
)
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Erreur téléchargement document : {e}", exc_info=True)
|
||||
raise HTTPException(500, str(e))
|
||||
|
||||
|
||||
@router.get("/transactions/{transaction_id}/document/info", tags=["Documents Signés"])
|
||||
async def info_document_signe(
|
||||
transaction_id: str, session: AsyncSession = Depends(get_session)
|
||||
|
|
@ -1128,3 +1503,113 @@ async def info_document_signe(
|
|||
except Exception as e:
|
||||
logger.error(f"Erreur info document : {e}")
|
||||
raise HTTPException(500, str(e))
|
||||
|
||||
|
||||
@router.post("/admin/download-missing-documents", tags=["Admin"])
|
||||
async def telecharger_documents_manquants(
|
||||
force_redownload: bool = Query(
|
||||
False, description="Forcer le retéléchargement même si déjà présent"
|
||||
),
|
||||
session: AsyncSession = Depends(get_session),
|
||||
):
|
||||
"""
|
||||
Télécharge tous les documents signés manquants pour les transactions SIGNE
|
||||
"""
|
||||
try:
|
||||
# Transactions signées sans document local
|
||||
query = select(UniversignTransaction).where(
|
||||
UniversignTransaction.local_status == LocalDocumentStatus.SIGNED,
|
||||
or_(
|
||||
UniversignTransaction.signed_document_path.is_(None),
|
||||
force_redownload,
|
||||
),
|
||||
)
|
||||
|
||||
result = await session.execute(query)
|
||||
transactions = result.scalars().all()
|
||||
|
||||
logger.info(f"📥 {len(transactions)} document(s) à télécharger")
|
||||
|
||||
document_service = UniversignDocumentService(
|
||||
api_key=settings.universign_api_key, timeout=60
|
||||
)
|
||||
|
||||
results = {"total": len(transactions), "success": 0, "failed": 0, "details": []}
|
||||
|
||||
for transaction in transactions:
|
||||
try:
|
||||
(
|
||||
success,
|
||||
error,
|
||||
) = await document_service.download_and_store_signed_document(
|
||||
session=session, transaction=transaction, force=force_redownload
|
||||
)
|
||||
|
||||
if success:
|
||||
results["success"] += 1
|
||||
results["details"].append(
|
||||
{
|
||||
"transaction_id": transaction.transaction_id,
|
||||
"sage_document_id": transaction.sage_document_id,
|
||||
"status": "success",
|
||||
}
|
||||
)
|
||||
else:
|
||||
results["failed"] += 1
|
||||
results["details"].append(
|
||||
{
|
||||
"transaction_id": transaction.transaction_id,
|
||||
"sage_document_id": transaction.sage_document_id,
|
||||
"status": "failed",
|
||||
"error": error,
|
||||
}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Erreur téléchargement {transaction.transaction_id}: {e}")
|
||||
results["failed"] += 1
|
||||
results["details"].append(
|
||||
{"transaction_id": transaction.transaction_id, "error": str(e)}
|
||||
)
|
||||
|
||||
await session.commit()
|
||||
|
||||
logger.info(
|
||||
f"Téléchargement terminé : {results['success']}/{results['total']} réussis"
|
||||
)
|
||||
|
||||
return results
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Erreur téléchargement batch : {e}", exc_info=True)
|
||||
raise HTTPException(500, str(e))
|
||||
|
||||
|
||||
@router.post("/admin/cleanup-old-documents", tags=["Admin"])
|
||||
async def nettoyer_anciens_documents(
|
||||
days_to_keep: int = Query(
|
||||
90, ge=7, le=365, description="Nombre de jours à conserver"
|
||||
),
|
||||
):
|
||||
"""
|
||||
Supprime les documents signés de plus de X jours (par défaut 90)
|
||||
"""
|
||||
try:
|
||||
document_service = UniversignDocumentService(
|
||||
api_key=settings.universign_api_key
|
||||
)
|
||||
|
||||
deleted, size_freed_mb = await document_service.cleanup_old_documents(
|
||||
days_to_keep=days_to_keep
|
||||
)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"files_deleted": deleted,
|
||||
"space_freed_mb": size_freed_mb,
|
||||
"days_kept": days_to_keep,
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Erreur nettoyage : {e}")
|
||||
raise HTTPException(500, str(e))
|
||||
|
|
|
|||
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
Before Width: | Height: | Size: 27 KiB |
161
sage_client.py
161
sage_client.py
|
|
@ -1,3 +1,4 @@
|
|||
# sage_client.py
|
||||
import requests
|
||||
from typing import Dict, List, Optional
|
||||
from config.config import settings
|
||||
|
|
@ -407,7 +408,7 @@ class SageGatewayClient:
|
|||
return self._post(
|
||||
"/sage/collaborateurs/list",
|
||||
{
|
||||
"filtre": filtre or "",
|
||||
"filtre": filtre or "", # Convertir None en ""
|
||||
"actifs_seulement": actifs_seulement,
|
||||
},
|
||||
).get("data", [])
|
||||
|
|
@ -426,155 +427,6 @@ class SageGatewayClient:
|
|||
"/sage/collaborateurs/update", {"numero": numero, **data}
|
||||
).get("data")
|
||||
|
||||
def lire_informations_societe(self) -> Optional[Dict]:
|
||||
"""Lit les informations de la société depuis P_DOSSIER"""
|
||||
return self._get("/sage/societe/info").get("data")
|
||||
|
||||
def valider_facture(self, numero_facture: str) -> dict:
|
||||
response = self._post(f"/sage/factures/{numero_facture}/valider", {})
|
||||
return response.get("data", {})
|
||||
|
||||
def devalider_facture(self, numero_facture: str) -> dict:
|
||||
response = self._post(f"/sage/factures/{numero_facture}/devalider", {})
|
||||
return response.get("data", {})
|
||||
|
||||
def get_statut_validation(self, numero_facture: str) -> dict:
|
||||
response = self._get(f"/sage/factures/{numero_facture}/statut-validation")
|
||||
return response.get("data", {})
|
||||
|
||||
def regler_facture(
|
||||
self,
|
||||
numero_facture: str,
|
||||
montant: float,
|
||||
mode_reglement: int = 0,
|
||||
date_reglement: str = None,
|
||||
reference: str = "",
|
||||
libelle: str = "",
|
||||
code_journal: str = None,
|
||||
devise_code: int = 0,
|
||||
cours_devise: float = 1.0,
|
||||
tva_encaissement: bool = False,
|
||||
compte_general: str = None,
|
||||
) -> dict:
|
||||
"""Règle une facture"""
|
||||
payload = {
|
||||
"montant": montant,
|
||||
"mode_reglement": mode_reglement,
|
||||
"reference": reference,
|
||||
"libelle": libelle,
|
||||
"devise_code": devise_code,
|
||||
"cours_devise": cours_devise,
|
||||
"tva_encaissement": tva_encaissement,
|
||||
}
|
||||
|
||||
if date_reglement:
|
||||
payload["date_reglement"] = date_reglement
|
||||
if code_journal:
|
||||
payload["code_journal"] = code_journal
|
||||
if compte_general:
|
||||
payload["compte_general"] = compte_general
|
||||
|
||||
return self._post(f"/sage/factures/{numero_facture}/regler", payload).get(
|
||||
"data", {}
|
||||
)
|
||||
|
||||
def regler_factures_client(
|
||||
self,
|
||||
client_code: str,
|
||||
montant_total: float,
|
||||
mode_reglement: int = 0,
|
||||
date_reglement: str = None,
|
||||
reference: str = "",
|
||||
libelle: str = "",
|
||||
code_journal: str = None,
|
||||
numeros_factures: list = None,
|
||||
devise_code: int = 0,
|
||||
cours_devise: float = 1.0,
|
||||
tva_encaissement: bool = False,
|
||||
) -> dict:
|
||||
"""Règle plusieurs factures d'un client"""
|
||||
payload = {
|
||||
"client_code": client_code,
|
||||
"montant_total": montant_total,
|
||||
"mode_reglement": mode_reglement,
|
||||
"reference": reference,
|
||||
"libelle": libelle,
|
||||
"devise_code": devise_code,
|
||||
"cours_devise": cours_devise,
|
||||
"tva_encaissement": tva_encaissement,
|
||||
}
|
||||
|
||||
if date_reglement:
|
||||
payload["date_reglement"] = date_reglement
|
||||
if code_journal:
|
||||
payload["code_journal"] = code_journal
|
||||
if numeros_factures:
|
||||
payload["numeros_factures"] = numeros_factures
|
||||
|
||||
return self._post("/sage/reglements/multiple", payload).get("data", {})
|
||||
|
||||
def get_reglements_facture(self, numero_facture: str) -> dict:
|
||||
"""Récupère les règlements d'une facture"""
|
||||
return self._get(f"/sage/factures/{numero_facture}/reglements").get("data", {})
|
||||
|
||||
def get_reglements_client(
|
||||
self,
|
||||
client_code: str,
|
||||
date_debut: str = None,
|
||||
date_fin: str = None,
|
||||
inclure_soldees: bool = True,
|
||||
) -> dict:
|
||||
"""Récupère les règlements d'un client"""
|
||||
params = {"inclure_soldees": inclure_soldees}
|
||||
if date_debut:
|
||||
params["date_debut"] = date_debut
|
||||
if date_fin:
|
||||
params["date_fin"] = date_fin
|
||||
|
||||
return self._get(f"/sage/clients/{client_code}/reglements", params=params).get(
|
||||
"data", {}
|
||||
)
|
||||
|
||||
def get_journaux_banque(self) -> dict:
|
||||
return self._get("/sage/journaux/banque").get("data", {})
|
||||
|
||||
def get_modes_reglement(self) -> List[dict]:
|
||||
"""Récupère les modes de règlement depuis Sage"""
|
||||
return self._get("/sage/reglements/modes").get("data", {}).get("modes", [])
|
||||
|
||||
def get_devises(self) -> List[dict]:
|
||||
"""Récupère les devises disponibles"""
|
||||
return self._get("/sage/devises").get("data", {}).get("devises", [])
|
||||
|
||||
def get_journaux_tresorerie(self) -> List[dict]:
|
||||
"""Récupère les journaux de trésorerie (banque + caisse)"""
|
||||
return (
|
||||
self._get("/sage/journaux/tresorerie").get("data", {}).get("journaux", [])
|
||||
)
|
||||
|
||||
def get_comptes_generaux(
|
||||
self, prefixe: str = None, type_compte: str = None
|
||||
) -> List[dict]:
|
||||
params = {}
|
||||
if prefixe:
|
||||
params["prefixe"] = prefixe
|
||||
if type_compte:
|
||||
params["type_compte"] = type_compte
|
||||
|
||||
return (
|
||||
self._get("/sage/comptes-generaux", params=params)
|
||||
.get("data", {})
|
||||
.get("comptes", [])
|
||||
)
|
||||
|
||||
def get_tva_taux(self) -> List[dict]:
|
||||
"""Récupère les taux de TVA"""
|
||||
return self._get("/sage/tva/taux").get("data", {}).get("taux", [])
|
||||
|
||||
def get_parametres_encaissement(self) -> dict:
|
||||
"""Récupère les paramètres TVA sur encaissement"""
|
||||
return self._get("/sage/parametres/encaissement").get("data", {})
|
||||
|
||||
def refresh_cache(self) -> Dict:
|
||||
return self._post("/sage/cache/refresh")
|
||||
|
||||
|
|
@ -588,14 +440,5 @@ class SageGatewayClient:
|
|||
except Exception:
|
||||
return {"status": "down"}
|
||||
|
||||
def get_tous_reglements(self, params=None):
|
||||
return self._get("/sage/reglements", params=params)
|
||||
|
||||
def get_reglement_facture_detail(self, facture_no):
|
||||
return self._get(f"/sage/reglements/facture/{facture_no}")
|
||||
|
||||
def get_reglement_detail(self, rg_no):
|
||||
return self._get(f"/sage/reglements/{rg_no}")
|
||||
|
||||
|
||||
sage_client = SageGatewayClient()
|
||||
|
|
|
|||
|
|
@ -26,13 +26,7 @@ from schemas.documents.documents import TypeDocument, TypeDocumentSQL
|
|||
from schemas.documents.email import StatutEmail, EmailEnvoi
|
||||
from schemas.documents.factures import FactureCreate, FactureUpdate
|
||||
from schemas.documents.livraisons import LivraisonCreate, LivraisonUpdate
|
||||
from schemas.documents.universign import (
|
||||
Signature,
|
||||
StatutSignature,
|
||||
SyncStatsResponse,
|
||||
CreateSignatureRequest,
|
||||
TransactionResponse,
|
||||
)
|
||||
from schemas.documents.universign import Signature, StatutSignature
|
||||
from schemas.articles.articles import (
|
||||
ArticleCreate,
|
||||
Article,
|
||||
|
|
@ -59,10 +53,6 @@ from schemas.sage.sage_gateway import (
|
|||
CurrentGatewayInfo,
|
||||
)
|
||||
|
||||
from schemas.society.societe import SocieteInfo
|
||||
|
||||
from schemas.society.enterprise import EntrepriseSearch, EntrepriseSearchResponse
|
||||
|
||||
__all__ = [
|
||||
"TiersDetails",
|
||||
"TypeTiers",
|
||||
|
|
@ -115,10 +105,4 @@ __all__ = [
|
|||
"SageGatewayTest",
|
||||
"SageGatewayStatsResponse",
|
||||
"CurrentGatewayInfo",
|
||||
"SyncStatsResponse",
|
||||
"CreateSignatureRequest",
|
||||
"TransactionResponse",
|
||||
"SocieteInfo",
|
||||
"EntrepriseSearch",
|
||||
"EntrepriseSearchResponse",
|
||||
]
|
||||
|
|
|
|||
|
|
@ -1,77 +0,0 @@
|
|||
from pydantic import BaseModel, Field
|
||||
from typing import Optional, List
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
class ApiKeyCreate(BaseModel):
|
||||
"""Schema pour créer une clé API"""
|
||||
|
||||
name: str = Field(..., min_length=3, max_length=255, description="Nom de la clé")
|
||||
description: Optional[str] = Field(None, description="Description de l'usage")
|
||||
expires_in_days: Optional[int] = Field(
|
||||
None, ge=1, le=3650, description="Expiration en jours (max 10 ans)"
|
||||
)
|
||||
rate_limit_per_minute: int = Field(
|
||||
60, ge=1, le=1000, description="Limite de requêtes par minute"
|
||||
)
|
||||
allowed_endpoints: Optional[List[str]] = Field(
|
||||
None, description="Endpoints autorisés ([] = tous, ['/clients*'] = wildcard)"
|
||||
)
|
||||
|
||||
|
||||
class ApiKeyResponse(BaseModel):
|
||||
"""Schema de réponse pour une clé API"""
|
||||
|
||||
id: str
|
||||
name: str
|
||||
description: Optional[str]
|
||||
key_prefix: str
|
||||
is_active: bool
|
||||
is_expired: bool
|
||||
rate_limit_per_minute: int
|
||||
allowed_endpoints: Optional[List[str]]
|
||||
total_requests: int
|
||||
last_used_at: Optional[datetime]
|
||||
created_at: datetime
|
||||
expires_at: Optional[datetime]
|
||||
revoked_at: Optional[datetime]
|
||||
created_by: str
|
||||
|
||||
|
||||
class ApiKeyCreatedResponse(ApiKeyResponse):
|
||||
"""Schema de réponse après création (inclut la clé en clair)"""
|
||||
|
||||
api_key: str = Field(
|
||||
..., description=" Clé API en clair - à sauvegarder immédiatement"
|
||||
)
|
||||
|
||||
|
||||
class ApiKeyList(BaseModel):
|
||||
"""Liste de clés API"""
|
||||
|
||||
total: int
|
||||
items: List[ApiKeyResponse]
|
||||
|
||||
|
||||
class SwaggerUserCreate(BaseModel):
|
||||
"""Schema pour créer un utilisateur Swagger"""
|
||||
|
||||
username: str = Field(..., min_length=3, max_length=100)
|
||||
password: str = Field(..., min_length=8)
|
||||
full_name: Optional[str] = None
|
||||
email: Optional[str] = None
|
||||
|
||||
|
||||
class SwaggerUserResponse(BaseModel):
|
||||
"""Schema de réponse pour un utilisateur Swagger"""
|
||||
|
||||
id: str
|
||||
username: str
|
||||
full_name: Optional[str]
|
||||
email: Optional[str]
|
||||
is_active: bool
|
||||
created_at: datetime
|
||||
last_login: Optional[datetime]
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
|
@ -76,6 +76,7 @@ class Article(BaseModel):
|
|||
)
|
||||
nb_emplacements: int = Field(0, description="Nombre d'emplacements")
|
||||
|
||||
# Champs énumérés normalisés
|
||||
suivi_stock: Optional[int] = Field(
|
||||
None,
|
||||
description="Type de suivi de stock (AR_SuiviStock): 0=Aucun, 1=CMUP, 2=FIFO/LIFO, 3=Sérialisé",
|
||||
|
|
|
|||
|
|
@ -4,7 +4,6 @@ from datetime import datetime
|
|||
|
||||
from schemas.documents.ligne_document import LigneDocument
|
||||
|
||||
|
||||
class FactureCreate(BaseModel):
|
||||
client_id: str
|
||||
date_facture: Optional[datetime] = None
|
||||
|
|
|
|||
|
|
@ -1,109 +0,0 @@
|
|||
from pydantic import BaseModel, Field, field_validator
|
||||
from typing import List, Optional
|
||||
import logging
|
||||
from decimal import Decimal
|
||||
from datetime import date
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ReglementFactureCreate(BaseModel):
|
||||
"""Requête de règlement d'une facture côté VPS"""
|
||||
|
||||
montant: Decimal = Field(..., gt=0, description="Montant à régler")
|
||||
devise_code: Optional[int] = Field(0, description="Code devise (0=EUR par défaut)")
|
||||
cours_devise: Optional[Decimal] = Field(1.0, description="Cours de la devise")
|
||||
|
||||
mode_reglement: int = Field(
|
||||
..., ge=0, description="Code mode règlement depuis /reglements/modes"
|
||||
)
|
||||
code_journal: str = Field(
|
||||
..., min_length=1, description="Code journal depuis /journaux/tresorerie"
|
||||
)
|
||||
|
||||
date_reglement: Optional[date] = Field(
|
||||
None, description="Date du règlement (défaut: aujourd'hui)"
|
||||
)
|
||||
date_echeance: Optional[date] = Field(None, description="Date d'échéance")
|
||||
|
||||
reference: Optional[str] = Field(
|
||||
"", max_length=17, description="Référence pièce règlement"
|
||||
)
|
||||
libelle: Optional[str] = Field(
|
||||
"", max_length=35, description="Libellé du règlement"
|
||||
)
|
||||
|
||||
tva_encaissement: Optional[bool] = Field(
|
||||
False, description="Appliquer TVA sur encaissement"
|
||||
)
|
||||
compte_general: Optional[str] = Field(None)
|
||||
|
||||
@field_validator("montant")
|
||||
def validate_montant(cls, v):
|
||||
if v <= 0:
|
||||
raise ValueError("Le montant doit être positif")
|
||||
return round(v, 2)
|
||||
|
||||
class Config:
|
||||
json_schema_extra = {
|
||||
"example": {
|
||||
"montant": 375.12,
|
||||
"mode_reglement": 2,
|
||||
"reference": "CHQ-001",
|
||||
"code_journal": "BEU",
|
||||
"date_reglement": "2024-01-01",
|
||||
"libelle": "Règlement multiple",
|
||||
"tva_encaissement": False,
|
||||
"devise_code": 0,
|
||||
"cours_devise": 1.0,
|
||||
"date_echeance": "2024-01-31",
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class ReglementMultipleCreate(BaseModel):
|
||||
"""Requête de règlement multiple côté VPS"""
|
||||
|
||||
client_id: str = Field(..., description="Code client")
|
||||
montant_total: Decimal = Field(..., gt=0)
|
||||
|
||||
devise_code: Optional[int] = Field(0)
|
||||
cours_devise: Optional[Decimal] = Field(1.0)
|
||||
mode_reglement: int = Field(...)
|
||||
code_journal: str = Field(...)
|
||||
date_reglement: Optional[date] = None
|
||||
reference: Optional[str] = Field("")
|
||||
libelle: Optional[str] = Field("")
|
||||
tva_encaissement: Optional[bool] = Field(False)
|
||||
|
||||
numeros_factures: Optional[List[str]] = Field(
|
||||
None, description="Si vide, règle les plus anciennes en premier"
|
||||
)
|
||||
|
||||
@field_validator("client_id", mode="before")
|
||||
def strip_client_id(cls, v):
|
||||
return v.replace("\xa0", "").strip() if v else v
|
||||
|
||||
@field_validator("montant_total")
|
||||
def validate_montant(cls, v):
|
||||
if v <= 0:
|
||||
raise ValueError("Le montant doit être positif")
|
||||
return round(v, 2)
|
||||
|
||||
class Config:
|
||||
json_schema_extra = {
|
||||
"example": {
|
||||
"client_id": "CLI000001",
|
||||
"montant_total": 1000.00,
|
||||
"mode_reglement": 2,
|
||||
"numeros_factures": ["FA00081", "FA00082"],
|
||||
"reference": "CHQ-001",
|
||||
"code_journal": "BEU",
|
||||
"date_reglement": "2024-01-01",
|
||||
"libelle": "Règlement multiple",
|
||||
"tva_encaissement": False,
|
||||
"devise_code": 0,
|
||||
"cours_devise": 1.0,
|
||||
"date_echeance": "2024-01-31",
|
||||
}
|
||||
}
|
||||
|
|
@ -1,12 +1,6 @@
|
|||
from pydantic import BaseModel, EmailStr
|
||||
from enum import Enum
|
||||
from schemas.documents.documents import TypeDocument
|
||||
from database import (
|
||||
SageDocumentType,
|
||||
)
|
||||
|
||||
from typing import List, Optional
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
class StatutSignature(str, Enum):
|
||||
|
|
@ -22,49 +16,3 @@ class Signature(BaseModel):
|
|||
type_doc: TypeDocument
|
||||
email_signataire: EmailStr
|
||||
nom_signataire: str
|
||||
|
||||
|
||||
class CreateSignatureRequest(BaseModel):
|
||||
"""Demande de création d'une signature"""
|
||||
|
||||
sage_document_id: str
|
||||
sage_document_type: SageDocumentType
|
||||
signer_email: EmailStr
|
||||
signer_name: str
|
||||
document_name: Optional[str] = None
|
||||
|
||||
|
||||
class TransactionResponse(BaseModel):
|
||||
"""Réponse détaillée d'une transaction"""
|
||||
|
||||
id: str
|
||||
transaction_id: str
|
||||
sage_document_id: str
|
||||
sage_document_type: str
|
||||
universign_status: str
|
||||
local_status: str
|
||||
local_status_label: str
|
||||
signer_url: Optional[str]
|
||||
document_url: Optional[str]
|
||||
created_at: datetime
|
||||
sent_at: Optional[datetime]
|
||||
signed_at: Optional[datetime]
|
||||
last_synced_at: Optional[datetime]
|
||||
needs_sync: bool
|
||||
signers: List[dict]
|
||||
|
||||
signed_document_available: bool = False
|
||||
signed_document_downloaded_at: Optional[datetime] = None
|
||||
signed_document_size_kb: Optional[float] = None
|
||||
|
||||
|
||||
class SyncStatsResponse(BaseModel):
|
||||
"""Statistiques de synchronisation"""
|
||||
|
||||
total_transactions: int
|
||||
pending_sync: int
|
||||
signed: int
|
||||
in_progress: int
|
||||
refused: int
|
||||
expired: int
|
||||
last_sync_at: Optional[datetime]
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ class GatewayHealthStatus(str, Enum):
|
|||
UNKNOWN = "unknown"
|
||||
|
||||
|
||||
# === CREATE ===
|
||||
class SageGatewayCreate(BaseModel):
|
||||
|
||||
name: str = Field(
|
||||
|
|
@ -70,6 +71,7 @@ class SageGatewayUpdate(BaseModel):
|
|||
return v.rstrip("/") if v else v
|
||||
|
||||
|
||||
# === RESPONSE ===
|
||||
class SageGatewayResponse(BaseModel):
|
||||
|
||||
id: str
|
||||
|
|
|
|||
|
|
@ -1,24 +0,0 @@
|
|||
from pydantic import BaseModel, Field
|
||||
from typing import Optional, List
|
||||
|
||||
|
||||
class EntrepriseSearch(BaseModel):
|
||||
"""Modèle de réponse pour une entreprise trouvée"""
|
||||
|
||||
company_name: str = Field(..., description="Raison sociale complète")
|
||||
siren: str = Field(..., description="Numéro SIREN (9 chiffres)")
|
||||
vat_number: str = Field(..., description="Numéro de TVA intracommunautaire")
|
||||
address: str = Field(..., description="Adresse complète du siège")
|
||||
naf_code: str = Field(..., description="Code NAF/APE")
|
||||
is_active: bool = Field(..., description="True si entreprise active")
|
||||
siret_siege: Optional[str] = Field(None, description="SIRET du siège")
|
||||
code_postal: Optional[str] = None
|
||||
ville: Optional[str] = None
|
||||
|
||||
|
||||
class EntrepriseSearchResponse(BaseModel):
|
||||
"""Réponse globale de la recherche"""
|
||||
|
||||
total_results: int
|
||||
results: List[EntrepriseSearch]
|
||||
query: str
|
||||
|
|
@ -1,46 +0,0 @@
|
|||
from pydantic import BaseModel
|
||||
from typing import Optional, List
|
||||
|
||||
|
||||
class ExerciceComptable(BaseModel):
|
||||
numero: int
|
||||
debut: str
|
||||
fin: Optional[str] = None
|
||||
|
||||
|
||||
class SocieteInfo(BaseModel):
|
||||
raison_sociale: str
|
||||
numero_dossier: str
|
||||
siret: Optional[str] = None
|
||||
code_ape: Optional[str] = None
|
||||
numero_tva: Optional[str] = None
|
||||
|
||||
adresse: Optional[str] = None
|
||||
complement_adresse: Optional[str] = None
|
||||
code_postal: Optional[str] = None
|
||||
ville: Optional[str] = None
|
||||
code_region: Optional[str] = None
|
||||
pays: Optional[str] = None
|
||||
|
||||
telephone: Optional[str] = None
|
||||
telecopie: Optional[str] = None
|
||||
email: Optional[str] = None
|
||||
email_societe: Optional[str] = None
|
||||
site_web: Optional[str] = None
|
||||
|
||||
capital: float = 0.0
|
||||
forme_juridique: Optional[str] = None
|
||||
|
||||
exercices: List[ExerciceComptable] = []
|
||||
|
||||
devise_compte: int = 0
|
||||
devise_equivalent: int = 0
|
||||
longueur_compte_general: int = 0
|
||||
longueur_compte_analytique: int = 0
|
||||
regime_fec: int = 0
|
||||
|
||||
base_modele: Optional[str] = None
|
||||
marqueur: int = 0
|
||||
|
||||
logo_base64: Optional[str] = None
|
||||
logo_content_type: Optional[str] = None
|
||||
|
|
@ -9,6 +9,7 @@ class CollaborateurBase(BaseModel):
|
|||
prenom: Optional[str] = Field(None, max_length=50)
|
||||
fonction: Optional[str] = Field(None, max_length=50)
|
||||
|
||||
# Adresse
|
||||
adresse: Optional[str] = Field(None, max_length=100)
|
||||
complement: Optional[str] = Field(None, max_length=100)
|
||||
code_postal: Optional[str] = Field(None, max_length=10)
|
||||
|
|
@ -16,6 +17,7 @@ class CollaborateurBase(BaseModel):
|
|||
code_region: Optional[str] = Field(None, max_length=50)
|
||||
pays: Optional[str] = Field(None, max_length=50)
|
||||
|
||||
# Services
|
||||
service: Optional[str] = Field(None, max_length=50)
|
||||
vendeur: bool = Field(default=False)
|
||||
caissier: bool = Field(default=False)
|
||||
|
|
@ -23,15 +25,18 @@ class CollaborateurBase(BaseModel):
|
|||
chef_ventes: bool = Field(default=False)
|
||||
numero_chef_ventes: Optional[int] = None
|
||||
|
||||
# Contact
|
||||
telephone: Optional[str] = Field(None, max_length=20)
|
||||
telecopie: Optional[str] = Field(None, max_length=20)
|
||||
email: Optional[EmailStr] = None
|
||||
tel_portable: Optional[str] = Field(None, max_length=20)
|
||||
|
||||
# Réseaux sociaux
|
||||
facebook: Optional[str] = Field(None, max_length=100)
|
||||
linkedin: Optional[str] = Field(None, max_length=100)
|
||||
skype: Optional[str] = Field(None, max_length=100)
|
||||
|
||||
# Autres
|
||||
matricule: Optional[str] = Field(None, max_length=20)
|
||||
sommeil: bool = Field(default=False)
|
||||
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ class TypeTiersInt(IntEnum):
|
|||
|
||||
|
||||
class TiersDetails(BaseModel):
|
||||
# IDENTIFICATION
|
||||
numero: Optional[str] = Field(None, description="Code tiers (CT_Num)")
|
||||
intitule: Optional[str] = Field(
|
||||
None, description="Raison sociale ou Nom complet (CT_Intitule)"
|
||||
|
|
@ -36,6 +37,7 @@ class TiersDetails(BaseModel):
|
|||
)
|
||||
code_naf: Optional[str] = Field(None, description="Code NAF/APE (CT_Ape)")
|
||||
|
||||
# ADRESSE
|
||||
contact: Optional[str] = Field(
|
||||
None, description="Nom du contact principal (CT_Contact)"
|
||||
)
|
||||
|
|
@ -48,6 +50,7 @@ class TiersDetails(BaseModel):
|
|||
region: Optional[str] = Field(None, description="Région/État (CT_CodeRegion)")
|
||||
pays: Optional[str] = Field(None, description="Pays (CT_Pays)")
|
||||
|
||||
# TELECOM
|
||||
telephone: Optional[str] = Field(None, description="Téléphone fixe (CT_Telephone)")
|
||||
telecopie: Optional[str] = Field(None, description="Fax (CT_Telecopie)")
|
||||
email: Optional[str] = Field(None, description="Email principal (CT_EMail)")
|
||||
|
|
@ -55,11 +58,13 @@ class TiersDetails(BaseModel):
|
|||
facebook: Optional[str] = Field(None, description="Profil Facebook (CT_Facebook)")
|
||||
linkedin: Optional[str] = Field(None, description="Profil LinkedIn (CT_LinkedIn)")
|
||||
|
||||
# TAUX
|
||||
taux01: Optional[float] = Field(None, description="Taux personnalisé 1 (CT_Taux01)")
|
||||
taux02: Optional[float] = Field(None, description="Taux personnalisé 2 (CT_Taux02)")
|
||||
taux03: Optional[float] = Field(None, description="Taux personnalisé 3 (CT_Taux03)")
|
||||
taux04: Optional[float] = Field(None, description="Taux personnalisé 4 (CT_Taux04)")
|
||||
|
||||
# STATISTIQUES
|
||||
statistique01: Optional[str] = Field(
|
||||
None, description="Statistique 1 (CT_Statistique01)"
|
||||
)
|
||||
|
|
@ -91,6 +96,7 @@ class TiersDetails(BaseModel):
|
|||
None, description="Statistique 10 (CT_Statistique10)"
|
||||
)
|
||||
|
||||
# COMMERCIAL
|
||||
encours_autorise: Optional[float] = Field(
|
||||
None, description="Encours maximum autorisé (CT_Encours)"
|
||||
)
|
||||
|
|
@ -107,6 +113,7 @@ class TiersDetails(BaseModel):
|
|||
None, description="Détails du commercial/collaborateur"
|
||||
)
|
||||
|
||||
# FACTURATION
|
||||
lettrage_auto: Optional[bool] = Field(
|
||||
None, description="Lettrage automatique (CT_Lettrage)"
|
||||
)
|
||||
|
|
@ -139,6 +146,7 @@ class TiersDetails(BaseModel):
|
|||
None, description="Bon à payer obligatoire (CT_BonAPayer)"
|
||||
)
|
||||
|
||||
# LOGISTIQUE
|
||||
priorite_livraison: Optional[int] = Field(
|
||||
None, description="Priorité livraison (CT_PrioriteLivr)"
|
||||
)
|
||||
|
|
@ -152,14 +160,17 @@ class TiersDetails(BaseModel):
|
|||
None, description="Délai appro jours (CT_DelaiAppro)"
|
||||
)
|
||||
|
||||
# COMMENTAIRE
|
||||
commentaire: Optional[str] = Field(
|
||||
None, description="Commentaire libre (CT_Commentaire)"
|
||||
)
|
||||
|
||||
# ANALYTIQUE
|
||||
section_analytique: Optional[str] = Field(
|
||||
None, description="Section analytique (CA_Num)"
|
||||
)
|
||||
|
||||
# ORGANISATION / SURVEILLANCE
|
||||
mode_reglement_code: Optional[int] = Field(
|
||||
None, description="Code mode règlement (MR_No)"
|
||||
)
|
||||
|
|
@ -189,6 +200,7 @@ class TiersDetails(BaseModel):
|
|||
None, description="Résultat financier (CT_SvResultat)"
|
||||
)
|
||||
|
||||
# COMPTE GENERAL ET CATEGORIES
|
||||
compte_general: Optional[str] = Field(
|
||||
None, description="Compte général principal (CG_NumPrinc)"
|
||||
)
|
||||
|
|
@ -199,6 +211,7 @@ class TiersDetails(BaseModel):
|
|||
None, description="Catégorie comptable (N_CatCompta)"
|
||||
)
|
||||
|
||||
# CONTACTS
|
||||
contacts: Optional[List[Contact]] = Field(
|
||||
default_factory=list, description="Liste des contacts du tiers"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,651 +0,0 @@
|
|||
import sys
|
||||
import os
|
||||
from pathlib import Path
|
||||
import asyncio
|
||||
import argparse
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import Optional, List
|
||||
import json
|
||||
from sqlalchemy import select
|
||||
|
||||
_current_file = Path(__file__).resolve()
|
||||
_script_dir = _current_file.parent
|
||||
_app_dir = _script_dir.parent
|
||||
|
||||
print(f"DEBUG: Script path: {_current_file}")
|
||||
print(f"DEBUG: App dir: {_app_dir}")
|
||||
print(f"DEBUG: Current working dir: {os.getcwd()}")
|
||||
|
||||
if str(_app_dir) in sys.path:
|
||||
sys.path.remove(str(_app_dir))
|
||||
sys.path.insert(0, str(_app_dir))
|
||||
|
||||
os.chdir(str(_app_dir))
|
||||
|
||||
print(f"DEBUG: sys.path[0]: {sys.path[0]}")
|
||||
print(f"DEBUG: New working dir: {os.getcwd()}")
|
||||
|
||||
_test_imports = [
|
||||
"database",
|
||||
"database.db_config",
|
||||
"database.models",
|
||||
"services",
|
||||
"security",
|
||||
]
|
||||
|
||||
print("\nDEBUG: Vérification des imports...")
|
||||
for module in _test_imports:
|
||||
try:
|
||||
__import__(module)
|
||||
print(f" ✓ {module}")
|
||||
except ImportError as e:
|
||||
print(f" ✗ {module}: {e}")
|
||||
|
||||
try:
|
||||
from database.db_config import async_session_factory
|
||||
from database.models.api_key import SwaggerUser, ApiKey
|
||||
from services.api_key import ApiKeyService
|
||||
from security.auth import hash_password
|
||||
except ImportError as e:
|
||||
print(f"\n ERREUR D'IMPORT: {e}")
|
||||
print(" Vérifiez que vous êtes dans /app")
|
||||
print(" Commande correcte: cd /app && python scripts/manage_security.py ...")
|
||||
sys.exit(1)
|
||||
|
||||
logging.basicConfig(level=logging.INFO, format="%(levelname)s - %(message)s")
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
AVAILABLE_TAGS = {
|
||||
"Authentication": " Authentification et gestion des comptes",
|
||||
"API Keys Management": "🔑 Gestion des clés API",
|
||||
"Clients": "👥 Gestion des clients",
|
||||
"Fournisseurs": "🏭 Gestion des fournisseurs",
|
||||
"Prospects": "🎯 Gestion des prospects",
|
||||
"Tiers": "📋 Gestion générale des tiers",
|
||||
"Contacts": "📞 Contacts des tiers",
|
||||
"Articles": "📦 Catalogue articles",
|
||||
"Familles": "🏷️ Familles d'articles",
|
||||
"Stock": "📊 Mouvements de stock",
|
||||
"Devis": "📄 Devis",
|
||||
"Commandes": "🛒 Commandes",
|
||||
"Livraisons": "🚚 Bons de livraison",
|
||||
"Factures": "💰 Factures",
|
||||
"Avoirs": "↩️ Avoirs",
|
||||
"Règlements": "💳 Règlements et encaissements",
|
||||
"Workflows": " Transformations de documents",
|
||||
"Documents": "📑 Gestion documents (PDF)",
|
||||
"Emails": "📧 Envoi d'emails",
|
||||
"Validation": " Validations métier",
|
||||
"Collaborateurs": "👔 Collaborateurs internes",
|
||||
"Société": "🏢 Informations société",
|
||||
"Référentiels": "📚 Données de référence",
|
||||
"System": "⚙️ Système et santé",
|
||||
"Admin": "🛠️ Administration",
|
||||
"Debug": "🐛 Debug et diagnostics",
|
||||
}
|
||||
|
||||
PRESET_PROFILES = {
|
||||
"commercial": [
|
||||
"Clients",
|
||||
"Contacts",
|
||||
"Devis",
|
||||
"Commandes",
|
||||
"Factures",
|
||||
"Articles",
|
||||
"Documents",
|
||||
"Emails",
|
||||
],
|
||||
"comptable": [
|
||||
"Clients",
|
||||
"Fournisseurs",
|
||||
"Factures",
|
||||
"Avoirs",
|
||||
"Règlements",
|
||||
"Documents",
|
||||
"Emails",
|
||||
],
|
||||
"logistique": [
|
||||
"Articles",
|
||||
"Stock",
|
||||
"Commandes",
|
||||
"Livraisons",
|
||||
"Fournisseurs",
|
||||
"Documents",
|
||||
],
|
||||
"readonly": ["Clients", "Articles", "Devis", "Commandes", "Factures", "Documents"],
|
||||
"developer": [
|
||||
"Authentication",
|
||||
"API Keys Management",
|
||||
"System",
|
||||
"Clients",
|
||||
"Articles",
|
||||
"Devis",
|
||||
"Commandes",
|
||||
"Factures",
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
async def add_swagger_user(
|
||||
username: str,
|
||||
password: str,
|
||||
full_name: str = None,
|
||||
tags: Optional[List[str]] = None,
|
||||
preset: Optional[str] = None,
|
||||
):
|
||||
"""Ajouter un utilisateur Swagger avec configuration avancée"""
|
||||
async with async_session_factory() as session:
|
||||
result = await session.execute(
|
||||
select(SwaggerUser).where(SwaggerUser.username == username)
|
||||
)
|
||||
existing = result.scalar_one_or_none()
|
||||
|
||||
if existing:
|
||||
logger.error(f" L'utilisateur '{username}' existe déjà")
|
||||
return
|
||||
|
||||
if preset:
|
||||
if preset not in PRESET_PROFILES:
|
||||
logger.error(
|
||||
f" Preset '{preset}' inconnu. Disponibles: {list(PRESET_PROFILES.keys())}"
|
||||
)
|
||||
return
|
||||
tags = PRESET_PROFILES[preset]
|
||||
logger.info(f"📋 Application du preset '{preset}': {len(tags)} tags")
|
||||
|
||||
swagger_user = SwaggerUser(
|
||||
username=username,
|
||||
hashed_password=hash_password(password),
|
||||
full_name=full_name or username,
|
||||
is_active=True,
|
||||
allowed_tags=json.dumps(tags) if tags else None,
|
||||
)
|
||||
|
||||
session.add(swagger_user)
|
||||
await session.commit()
|
||||
|
||||
logger.info(f" Utilisateur Swagger créé: {username}")
|
||||
logger.info(f" Nom complet: {swagger_user.full_name}")
|
||||
|
||||
if tags:
|
||||
logger.info(f" 🏷️ Tags autorisés ({len(tags)}):")
|
||||
for tag in tags:
|
||||
desc = AVAILABLE_TAGS.get(tag, "")
|
||||
logger.info(f" • {tag} {desc}")
|
||||
else:
|
||||
logger.info(" 👑 Accès ADMIN COMPLET (tous les tags)")
|
||||
|
||||
|
||||
async def list_swagger_users():
|
||||
"""Lister tous les utilisateurs Swagger avec détails"""
|
||||
async with async_session_factory() as session:
|
||||
result = await session.execute(select(SwaggerUser))
|
||||
users = result.scalars().all()
|
||||
|
||||
if not users:
|
||||
logger.info("🔭 Aucun utilisateur Swagger")
|
||||
return
|
||||
|
||||
logger.info(f"\n👥 {len(users)} utilisateur(s) Swagger:\n")
|
||||
logger.info("=" * 80)
|
||||
|
||||
for user in users:
|
||||
status = " ACTIF" if user.is_active else " NON ACTIF"
|
||||
logger.info(f"\n{status} {user.username}")
|
||||
logger.info(f"📛 Nom: {user.full_name}")
|
||||
logger.info(f"🆔 ID: {user.id}")
|
||||
logger.info(f"📅 Créé: {user.created_at}")
|
||||
logger.info(f"🕐 Dernière connexion: {user.last_login or 'Jamais'}")
|
||||
|
||||
if user.allowed_tags:
|
||||
try:
|
||||
tags = json.loads(user.allowed_tags)
|
||||
if tags:
|
||||
logger.info(f"🏷️ Tags autorisés ({len(tags)}):")
|
||||
for tag in tags:
|
||||
desc = AVAILABLE_TAGS.get(tag, "")
|
||||
logger.info(f" • {tag} {desc}")
|
||||
|
||||
auth_schemes = []
|
||||
if "Authentication" in tags:
|
||||
auth_schemes.append("JWT (Bearer)")
|
||||
if "API Keys Management" in tags or len(tags) > 3:
|
||||
auth_schemes.append("X-API-Key")
|
||||
if not auth_schemes:
|
||||
auth_schemes.append("JWT (Bearer)")
|
||||
|
||||
logger.info(
|
||||
f" Authentification autorisée: {', '.join(auth_schemes)}"
|
||||
)
|
||||
else:
|
||||
logger.info("👑 Tags autorisés: ADMIN COMPLET (tous)")
|
||||
logger.info(" Authentification: JWT + X-API-Key (tout)")
|
||||
except json.JSONDecodeError:
|
||||
logger.info(" Tags: Erreur format")
|
||||
else:
|
||||
logger.info("👑 Tags autorisés: ADMIN COMPLET (tous)")
|
||||
logger.info(" Authentification: JWT + X-API-Key (tout)")
|
||||
|
||||
logger.info("\n" + "=" * 80)
|
||||
|
||||
|
||||
async def update_swagger_user(
|
||||
username: str,
|
||||
add_tags: Optional[List[str]] = None,
|
||||
remove_tags: Optional[List[str]] = None,
|
||||
set_tags: Optional[List[str]] = None,
|
||||
preset: Optional[str] = None,
|
||||
active: Optional[bool] = None,
|
||||
):
|
||||
"""Mettre à jour un utilisateur Swagger"""
|
||||
async with async_session_factory() as session:
|
||||
result = await session.execute(
|
||||
select(SwaggerUser).where(SwaggerUser.username == username)
|
||||
)
|
||||
user = result.scalar_one_or_none()
|
||||
|
||||
if not user:
|
||||
logger.error(f" Utilisateur '{username}' introuvable")
|
||||
return
|
||||
|
||||
modified = False
|
||||
|
||||
if preset:
|
||||
if preset not in PRESET_PROFILES:
|
||||
logger.error(f" Preset '{preset}' inconnu")
|
||||
return
|
||||
user.allowed_tags = json.dumps(PRESET_PROFILES[preset])
|
||||
logger.info(f"📋 Preset '{preset}' appliqué")
|
||||
modified = True
|
||||
|
||||
elif set_tags is not None:
|
||||
user.allowed_tags = json.dumps(set_tags) if set_tags else None
|
||||
logger.info(f" Tags remplacés: {len(set_tags) if set_tags else 0}")
|
||||
modified = True
|
||||
|
||||
elif add_tags or remove_tags:
|
||||
current_tags = []
|
||||
if user.allowed_tags:
|
||||
try:
|
||||
current_tags = json.loads(user.allowed_tags)
|
||||
except json.JSONDecodeError:
|
||||
current_tags = []
|
||||
|
||||
if add_tags:
|
||||
for tag in add_tags:
|
||||
if tag not in current_tags:
|
||||
current_tags.append(tag)
|
||||
logger.info(f"➕ Tag ajouté: {tag}")
|
||||
modified = True
|
||||
|
||||
if remove_tags:
|
||||
for tag in remove_tags:
|
||||
if tag in current_tags:
|
||||
current_tags.remove(tag)
|
||||
logger.info(f"➖ Tag retiré: {tag}")
|
||||
modified = True
|
||||
|
||||
user.allowed_tags = json.dumps(current_tags) if current_tags else None
|
||||
|
||||
if active is not None:
|
||||
user.is_active = active
|
||||
logger.info(f" Statut: {'ACTIF' if active else 'INACTIF'}")
|
||||
modified = True
|
||||
|
||||
if modified:
|
||||
await session.commit()
|
||||
logger.info(f" Utilisateur '{username}' mis à jour")
|
||||
else:
|
||||
logger.info(" Aucune modification effectuée")
|
||||
|
||||
|
||||
async def delete_swagger_user(username: str):
|
||||
"""Supprimer un utilisateur Swagger"""
|
||||
async with async_session_factory() as session:
|
||||
result = await session.execute(
|
||||
select(SwaggerUser).where(SwaggerUser.username == username)
|
||||
)
|
||||
user = result.scalar_one_or_none()
|
||||
|
||||
if not user:
|
||||
logger.error(f" Utilisateur '{username}' introuvable")
|
||||
return
|
||||
|
||||
await session.delete(user)
|
||||
await session.commit()
|
||||
logger.info(f"🗑️ Utilisateur Swagger supprimé: {username}")
|
||||
|
||||
|
||||
async def list_available_tags():
|
||||
"""Liste tous les tags disponibles avec description"""
|
||||
logger.info("\n🏷️ TAGS DISPONIBLES:\n")
|
||||
logger.info("=" * 80)
|
||||
|
||||
for tag, desc in AVAILABLE_TAGS.items():
|
||||
logger.info(f" {desc}")
|
||||
logger.info(f" Nom: {tag}\n")
|
||||
|
||||
logger.info("=" * 80)
|
||||
logger.info("\n📦 PRESETS DISPONIBLES:\n")
|
||||
|
||||
for preset_name, tags in PRESET_PROFILES.items():
|
||||
logger.info(f" {preset_name}:")
|
||||
logger.info(f" {', '.join(tags)}\n")
|
||||
|
||||
logger.info("=" * 80)
|
||||
|
||||
|
||||
async def create_api_key(
|
||||
name: str,
|
||||
description: str = None,
|
||||
expires_in_days: int = 365,
|
||||
rate_limit: int = 60,
|
||||
endpoints: list = None,
|
||||
):
|
||||
"""Créer une clé API"""
|
||||
async with async_session_factory() as session:
|
||||
service = ApiKeyService(session)
|
||||
|
||||
api_key_obj, api_key_plain = await service.create_api_key(
|
||||
name=name,
|
||||
description=description,
|
||||
created_by="cli",
|
||||
expires_in_days=expires_in_days,
|
||||
rate_limit_per_minute=rate_limit,
|
||||
allowed_endpoints=endpoints,
|
||||
)
|
||||
|
||||
logger.info("=" * 70)
|
||||
logger.info("🔑 Clé API créée avec succès")
|
||||
logger.info("=" * 70)
|
||||
logger.info(f" ID: {api_key_obj.id}")
|
||||
logger.info(f" Nom: {api_key_obj.name}")
|
||||
logger.info(f" Clé: {api_key_plain}")
|
||||
logger.info(f" Préfixe: {api_key_obj.key_prefix}")
|
||||
logger.info(f" Rate limit: {api_key_obj.rate_limit_per_minute} req/min")
|
||||
logger.info(f" Expire le: {api_key_obj.expires_at}")
|
||||
|
||||
if api_key_obj.allowed_endpoints:
|
||||
try:
|
||||
endpoints_list = json.loads(api_key_obj.allowed_endpoints)
|
||||
logger.info(f" Endpoints: {', '.join(endpoints_list)}")
|
||||
except Exception:
|
||||
logger.info(f" Endpoints: {api_key_obj.allowed_endpoints}")
|
||||
else:
|
||||
logger.info(" Endpoints: Tous (aucune restriction)")
|
||||
|
||||
logger.info("=" * 70)
|
||||
logger.info(" SAUVEGARDEZ CETTE CLÉ - Elle ne sera plus affichée !")
|
||||
logger.info("=" * 70)
|
||||
|
||||
|
||||
async def list_api_keys():
|
||||
"""Lister toutes les clés API"""
|
||||
async with async_session_factory() as session:
|
||||
service = ApiKeyService(session)
|
||||
keys = await service.list_api_keys()
|
||||
|
||||
if not keys:
|
||||
logger.info("🔭 Aucune clé API")
|
||||
return
|
||||
|
||||
logger.info(f"🔑 {len(keys)} clé(s) API:\n")
|
||||
|
||||
for key in keys:
|
||||
is_valid = key.is_active and (
|
||||
not key.expires_at or key.expires_at > datetime.now()
|
||||
)
|
||||
status = "" if is_valid else ""
|
||||
|
||||
logger.info(f" {status} {key.name:<30} ({key.key_prefix}...)")
|
||||
logger.info(f" ID: {key.id}")
|
||||
logger.info(f" Rate limit: {key.rate_limit_per_minute} req/min")
|
||||
logger.info(f" Requêtes: {key.total_requests}")
|
||||
logger.info(f" Expire: {key.expires_at or 'Jamais'}")
|
||||
logger.info(f" Dernière utilisation: {key.last_used_at or 'Jamais'}")
|
||||
|
||||
if key.allowed_endpoints:
|
||||
try:
|
||||
endpoints = json.loads(key.allowed_endpoints)
|
||||
display = ", ".join(endpoints[:4])
|
||||
if len(endpoints) > 4:
|
||||
display += f"... (+{len(endpoints) - 4})"
|
||||
logger.info(f" Endpoints: {display}")
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
logger.info(" Endpoints: Tous")
|
||||
logger.info("")
|
||||
|
||||
|
||||
async def revoke_api_key(key_id: str):
|
||||
"""Révoquer une clé API"""
|
||||
async with async_session_factory() as session:
|
||||
result = await session.execute(select(ApiKey).where(ApiKey.id == key_id))
|
||||
key = result.scalar_one_or_none()
|
||||
|
||||
if not key:
|
||||
logger.error(f" Clé API '{key_id}' introuvable")
|
||||
return
|
||||
|
||||
key.is_active = False
|
||||
key.revoked_at = datetime.now()
|
||||
await session.commit()
|
||||
|
||||
logger.info(f"🗑️ Clé API révoquée: {key.name}")
|
||||
logger.info(f" ID: {key.id}")
|
||||
|
||||
|
||||
async def verify_api_key(api_key: str):
|
||||
"""Vérifier une clé API"""
|
||||
async with async_session_factory() as session:
|
||||
service = ApiKeyService(session)
|
||||
key = await service.verify_api_key(api_key)
|
||||
|
||||
if not key:
|
||||
logger.error(" Clé API invalide ou expirée")
|
||||
return
|
||||
|
||||
logger.info("=" * 60)
|
||||
logger.info(" Clé API valide")
|
||||
logger.info("=" * 60)
|
||||
logger.info(f" Nom: {key.name}")
|
||||
logger.info(f" ID: {key.id}")
|
||||
logger.info(f" Rate limit: {key.rate_limit_per_minute} req/min")
|
||||
logger.info(f" Requêtes totales: {key.total_requests}")
|
||||
logger.info(f" Expire: {key.expires_at or 'Jamais'}")
|
||||
|
||||
if key.allowed_endpoints:
|
||||
try:
|
||||
endpoints = json.loads(key.allowed_endpoints)
|
||||
logger.info(f" Endpoints autorisés: {endpoints}")
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
logger.info(" Endpoints autorisés: Tous")
|
||||
logger.info("=" * 60)
|
||||
|
||||
|
||||
async def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Gestion avancée des utilisateurs Swagger et clés API",
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
epilog="""
|
||||
EXEMPLES D'UTILISATION:
|
||||
|
||||
=== UTILISATEURS SWAGGER ===
|
||||
|
||||
1. Créer un utilisateur avec preset:
|
||||
python scripts/manage_security.py swagger add commercial Pass123! --preset commercial
|
||||
|
||||
2. Créer un admin complet:
|
||||
python scripts/manage_security.py swagger add admin AdminPass
|
||||
|
||||
3. Créer avec tags spécifiques:
|
||||
python scripts/manage_security.py swagger add client Pass123! --tags Clients Devis Factures
|
||||
|
||||
4. Mettre à jour un utilisateur (ajouter des tags):
|
||||
python scripts/manage_security.py swagger update client --add-tags Commandes Livraisons
|
||||
|
||||
5. Changer complètement les tags:
|
||||
python scripts/manage_security.py swagger update client --set-tags Clients Articles
|
||||
|
||||
6. Appliquer un preset:
|
||||
python scripts/manage_security.py swagger update client --preset comptable
|
||||
|
||||
7. Lister les tags disponibles:
|
||||
python scripts/manage_security.py swagger tags
|
||||
|
||||
8. Désactiver temporairement:
|
||||
python scripts/manage_security.py swagger update client --inactive
|
||||
|
||||
=== CLÉS API ===
|
||||
|
||||
9. Créer une clé API:
|
||||
python scripts/manage_security.py apikey create "Mon App" --days 365 --rate-limit 100
|
||||
|
||||
10. Créer avec endpoints restreints:
|
||||
python scripts/manage_security.py apikey create "SDK-ReadOnly" --endpoints "/clients" "/clients/*" "/devis" "/devis/*"
|
||||
|
||||
11. Lister les clés:
|
||||
python scripts/manage_security.py apikey list
|
||||
|
||||
12. Vérifier une clé:
|
||||
python scripts/manage_security.py apikey verify sdk_live_xxxxx
|
||||
|
||||
13. Révoquer une clé:
|
||||
python scripts/manage_security.py apikey revoke <key_id>
|
||||
""",
|
||||
)
|
||||
|
||||
subparsers = parser.add_subparsers(dest="command", help="Commandes")
|
||||
|
||||
swagger_parser = subparsers.add_parser("swagger", help="Gestion Swagger")
|
||||
swagger_sub = swagger_parser.add_subparsers(dest="swagger_command")
|
||||
|
||||
add_p = swagger_sub.add_parser("add", help="Ajouter utilisateur")
|
||||
add_p.add_argument("username", help="Nom d'utilisateur")
|
||||
add_p.add_argument("password", help="Mot de passe")
|
||||
add_p.add_argument("--full-name", help="Nom complet", default=None)
|
||||
add_p.add_argument(
|
||||
"--tags",
|
||||
nargs="*",
|
||||
help="Tags autorisés. Vide = admin complet",
|
||||
default=None,
|
||||
)
|
||||
add_p.add_argument(
|
||||
"--preset",
|
||||
choices=list(PRESET_PROFILES.keys()),
|
||||
help="Appliquer un preset de tags",
|
||||
)
|
||||
|
||||
update_p = swagger_sub.add_parser("update", help="Mettre à jour utilisateur")
|
||||
update_p.add_argument("username", help="Nom d'utilisateur")
|
||||
update_p.add_argument("--add-tags", nargs="+", help="Ajouter des tags")
|
||||
update_p.add_argument("--remove-tags", nargs="+", help="Retirer des tags")
|
||||
update_p.add_argument("--set-tags", nargs="*", help="Définir les tags (remplace)")
|
||||
update_p.add_argument(
|
||||
"--preset", choices=list(PRESET_PROFILES.keys()), help="Appliquer preset"
|
||||
)
|
||||
update_p.add_argument("--active", action="store_true", help="Activer l'utilisateur")
|
||||
update_p.add_argument(
|
||||
"--inactive", action="store_true", help="Désactiver l'utilisateur"
|
||||
)
|
||||
|
||||
swagger_sub.add_parser("list", help="Lister utilisateurs")
|
||||
|
||||
del_p = swagger_sub.add_parser("delete", help="Supprimer utilisateur")
|
||||
del_p.add_argument("username", help="Nom d'utilisateur")
|
||||
|
||||
swagger_sub.add_parser("tags", help="Lister les tags disponibles")
|
||||
|
||||
apikey_parser = subparsers.add_parser("apikey", help="Gestion clés API")
|
||||
apikey_sub = apikey_parser.add_subparsers(dest="apikey_command")
|
||||
|
||||
create_p = apikey_sub.add_parser("create", help="Créer clé API")
|
||||
create_p.add_argument("name", help="Nom de la clé")
|
||||
create_p.add_argument("--description", help="Description")
|
||||
create_p.add_argument("--days", type=int, default=365, help="Expiration (jours)")
|
||||
create_p.add_argument("--rate-limit", type=int, default=60, help="Req/min")
|
||||
create_p.add_argument("--endpoints", nargs="+", help="Endpoints autorisés")
|
||||
|
||||
apikey_sub.add_parser("list", help="Lister clés")
|
||||
|
||||
rev_p = apikey_sub.add_parser("revoke", help="Révoquer clé")
|
||||
rev_p.add_argument("key_id", help="ID de la clé")
|
||||
|
||||
ver_p = apikey_sub.add_parser("verify", help="Vérifier clé")
|
||||
ver_p.add_argument("api_key", help="Clé API complète")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if not args.command:
|
||||
parser.print_help()
|
||||
return
|
||||
|
||||
if args.command == "swagger":
|
||||
if args.swagger_command == "add":
|
||||
await add_swagger_user(
|
||||
args.username,
|
||||
args.password,
|
||||
args.full_name,
|
||||
args.tags,
|
||||
args.preset,
|
||||
)
|
||||
elif args.swagger_command == "update":
|
||||
active = None
|
||||
if args.active:
|
||||
active = True
|
||||
elif args.inactive:
|
||||
active = False
|
||||
|
||||
await update_swagger_user(
|
||||
args.username,
|
||||
add_tags=args.add_tags,
|
||||
remove_tags=args.remove_tags,
|
||||
set_tags=args.set_tags,
|
||||
preset=args.preset,
|
||||
active=active,
|
||||
)
|
||||
elif args.swagger_command == "list":
|
||||
await list_swagger_users()
|
||||
elif args.swagger_command == "delete":
|
||||
await delete_swagger_user(args.username)
|
||||
elif args.swagger_command == "tags":
|
||||
await list_available_tags()
|
||||
else:
|
||||
swagger_parser.print_help()
|
||||
|
||||
elif args.command == "apikey":
|
||||
if args.apikey_command == "create":
|
||||
await create_api_key(
|
||||
name=args.name,
|
||||
description=args.description,
|
||||
expires_in_days=args.days,
|
||||
rate_limit=args.rate_limit,
|
||||
endpoints=args.endpoints,
|
||||
)
|
||||
elif args.apikey_command == "list":
|
||||
await list_api_keys()
|
||||
elif args.apikey_command == "revoke":
|
||||
await revoke_api_key(args.key_id)
|
||||
elif args.apikey_command == "verify":
|
||||
await verify_api_key(args.api_key)
|
||||
else:
|
||||
apikey_parser.print_help()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
asyncio.run(main())
|
||||
except KeyboardInterrupt:
|
||||
print("\n Interrupted")
|
||||
sys.exit(0)
|
||||
except Exception as e:
|
||||
logger.error(f" Erreur: {e}")
|
||||
import traceback
|
||||
|
||||
traceback.print_exc()
|
||||
sys.exit(1)
|
||||
|
|
@ -1,354 +0,0 @@
|
|||
import requests
|
||||
import argparse
|
||||
import sys
|
||||
from typing import Tuple
|
||||
|
||||
|
||||
class SecurityTester:
|
||||
def __init__(self, base_url: str):
|
||||
self.base_url = base_url.rstrip("/")
|
||||
self.results = {"passed": 0, "failed": 0, "tests": []}
|
||||
|
||||
def log_test(self, name: str, passed: bool, details: str = ""):
|
||||
"""Enregistrer le résultat d'un test"""
|
||||
status = " PASS" if passed else " FAIL"
|
||||
print(f"{status} - {name}")
|
||||
if details:
|
||||
print(f" {details}")
|
||||
|
||||
self.results["tests"].append(
|
||||
{"name": name, "passed": passed, "details": details}
|
||||
)
|
||||
|
||||
if passed:
|
||||
self.results["passed"] += 1
|
||||
else:
|
||||
self.results["failed"] += 1
|
||||
|
||||
def test_swagger_without_auth(self) -> bool:
|
||||
"""Test 1: Swagger UI devrait demander une authentification"""
|
||||
print("\n Test 1: Protection Swagger UI")
|
||||
|
||||
try:
|
||||
response = requests.get(f"{self.base_url}/docs", timeout=5)
|
||||
|
||||
if response.status_code == 401:
|
||||
self.log_test(
|
||||
"Swagger protégé",
|
||||
True,
|
||||
"Code 401 retourné sans authentification",
|
||||
)
|
||||
return True
|
||||
else:
|
||||
self.log_test(
|
||||
"Swagger protégé",
|
||||
False,
|
||||
f"Code {response.status_code} au lieu de 401",
|
||||
)
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
self.log_test("Swagger protégé", False, f"Erreur: {str(e)}")
|
||||
return False
|
||||
|
||||
def test_swagger_with_auth(self, username: str, password: str) -> bool:
|
||||
"""Test 2: Swagger UI accessible avec credentials valides"""
|
||||
print("\n Test 2: Accès Swagger avec authentification")
|
||||
|
||||
try:
|
||||
response = requests.get(
|
||||
f"{self.base_url}/docs", auth=(username, password), timeout=5
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
self.log_test(
|
||||
"Accès Swagger avec auth",
|
||||
True,
|
||||
f"Authentifié comme {username}",
|
||||
)
|
||||
return True
|
||||
else:
|
||||
self.log_test(
|
||||
"Accès Swagger avec auth",
|
||||
False,
|
||||
f"Code {response.status_code}, credentials invalides?",
|
||||
)
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
self.log_test("Accès Swagger avec auth", False, f"Erreur: {str(e)}")
|
||||
return False
|
||||
|
||||
def test_api_without_auth(self) -> bool:
|
||||
"""Test 3: Endpoints API devraient demander une authentification"""
|
||||
print("\n Test 3: Protection des endpoints API")
|
||||
|
||||
test_endpoints = ["/api/v1/clients", "/api/v1/documents"]
|
||||
|
||||
all_protected = True
|
||||
for endpoint in test_endpoints:
|
||||
try:
|
||||
response = requests.get(f"{self.base_url}{endpoint}", timeout=5)
|
||||
|
||||
if response.status_code == 401:
|
||||
print(f" {endpoint} protégé (401)")
|
||||
else:
|
||||
print(
|
||||
f" {endpoint} accessible sans auth (code {response.status_code})"
|
||||
)
|
||||
all_protected = False
|
||||
|
||||
except Exception as e:
|
||||
print(f" {endpoint} erreur: {str(e)}")
|
||||
all_protected = False
|
||||
|
||||
self.log_test("Endpoints API protégés", all_protected)
|
||||
return all_protected
|
||||
|
||||
def test_health_endpoint_public(self) -> bool:
|
||||
"""Test 4: Endpoint /health devrait être accessible sans auth"""
|
||||
print("\n Test 4: Endpoint /health public")
|
||||
|
||||
try:
|
||||
response = requests.get(f"{self.base_url}/health", timeout=5)
|
||||
|
||||
if response.status_code == 200:
|
||||
self.log_test("/health accessible", True, "Endpoint public fonctionne")
|
||||
return True
|
||||
else:
|
||||
self.log_test(
|
||||
"/health accessible",
|
||||
False,
|
||||
f"Code {response.status_code} inattendu",
|
||||
)
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
self.log_test("/health accessible", False, f"Erreur: {str(e)}")
|
||||
return False
|
||||
|
||||
def test_api_key_creation(self, username: str, password: str) -> Tuple[bool, str]:
|
||||
"""Test 5: Créer une clé API via l'endpoint"""
|
||||
print("\n Test 5: Création d'une clé API")
|
||||
|
||||
try:
|
||||
login_response = requests.post(
|
||||
f"{self.base_url}/api/v1/auth/login",
|
||||
json={"email": username, "password": password},
|
||||
timeout=5,
|
||||
)
|
||||
|
||||
if login_response.status_code != 200:
|
||||
self.log_test(
|
||||
"Création clé API",
|
||||
False,
|
||||
"Impossible de se connecter pour obtenir un JWT",
|
||||
)
|
||||
return False, ""
|
||||
|
||||
jwt_token = login_response.json().get("access_token")
|
||||
|
||||
create_response = requests.post(
|
||||
f"{self.base_url}/api/v1/api-keys",
|
||||
headers={"Authorization": f"Bearer {jwt_token}"},
|
||||
json={
|
||||
"name": "Test API Key",
|
||||
"description": "Clé de test automatisé",
|
||||
"rate_limit_per_minute": 60,
|
||||
"expires_in_days": 30,
|
||||
},
|
||||
timeout=5,
|
||||
)
|
||||
|
||||
if create_response.status_code == 201:
|
||||
api_key = create_response.json().get("api_key")
|
||||
self.log_test("Création clé API", True, f"Clé créée: {api_key[:20]}...")
|
||||
return True, api_key
|
||||
else:
|
||||
self.log_test(
|
||||
"Création clé API",
|
||||
False,
|
||||
f"Code {create_response.status_code}",
|
||||
)
|
||||
return False, ""
|
||||
|
||||
except Exception as e:
|
||||
self.log_test("Création clé API", False, f"Erreur: {str(e)}")
|
||||
return False, ""
|
||||
|
||||
def test_api_key_usage(self, api_key: str) -> bool:
|
||||
"""Test 6: Utiliser une clé API pour accéder à un endpoint"""
|
||||
print("\n Test 6: Utilisation d'une clé API")
|
||||
|
||||
if not api_key:
|
||||
self.log_test("Utilisation clé API", False, "Pas de clé disponible")
|
||||
return False
|
||||
|
||||
try:
|
||||
response = requests.get(
|
||||
f"{self.base_url}/api/v1/clients",
|
||||
headers={"X-API-Key": api_key},
|
||||
timeout=5,
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
self.log_test("Utilisation clé API", True, "Clé acceptée")
|
||||
return True
|
||||
else:
|
||||
self.log_test(
|
||||
"Utilisation clé API",
|
||||
False,
|
||||
f"Code {response.status_code}, clé refusée?",
|
||||
)
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
self.log_test("Utilisation clé API", False, f"Erreur: {str(e)}")
|
||||
return False
|
||||
|
||||
def test_invalid_api_key(self) -> bool:
|
||||
"""Test 7: Une clé invalide devrait être refusée"""
|
||||
print("\n Test 7: Rejet de clé API invalide")
|
||||
|
||||
invalid_key = "sdk_live_invalid_key_12345"
|
||||
|
||||
try:
|
||||
response = requests.get(
|
||||
f"{self.base_url}/api/v1/clients",
|
||||
headers={"X-API-Key": invalid_key},
|
||||
timeout=5,
|
||||
)
|
||||
|
||||
if response.status_code == 401:
|
||||
self.log_test("Clé invalide rejetée", True, "Code 401 comme attendu")
|
||||
return True
|
||||
else:
|
||||
self.log_test(
|
||||
"Clé invalide rejetée",
|
||||
False,
|
||||
f"Code {response.status_code} au lieu de 401",
|
||||
)
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
self.log_test("Clé invalide rejetée", False, f"Erreur: {str(e)}")
|
||||
return False
|
||||
|
||||
def test_rate_limiting(self, api_key: str) -> bool:
|
||||
"""Test 8: Rate limiting (optionnel, peut prendre du temps)"""
|
||||
print("\n Test 8: Rate limiting (test simple)")
|
||||
|
||||
if not api_key:
|
||||
self.log_test("Rate limiting", False, "Pas de clé disponible")
|
||||
return False
|
||||
|
||||
print(" Envoi de 70 requêtes rapides...")
|
||||
|
||||
rate_limited = False
|
||||
for i in range(70):
|
||||
try:
|
||||
response = requests.get(
|
||||
f"{self.base_url}/health",
|
||||
headers={"X-API-Key": api_key},
|
||||
timeout=1,
|
||||
)
|
||||
|
||||
if response.status_code == 429:
|
||||
rate_limited = True
|
||||
print(f" Rate limit atteint à la requête {i + 1}")
|
||||
break
|
||||
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if rate_limited:
|
||||
self.log_test("Rate limiting", True, "Rate limit détecté")
|
||||
return True
|
||||
else:
|
||||
self.log_test(
|
||||
"Rate limiting",
|
||||
True,
|
||||
"Aucun rate limit détecté (peut être normal si pas implémenté)",
|
||||
)
|
||||
return True
|
||||
|
||||
def print_summary(self):
|
||||
"""Afficher le résumé des tests"""
|
||||
print("\n" + "=" * 60)
|
||||
print(" RÉSUMÉ DES TESTS")
|
||||
print("=" * 60)
|
||||
|
||||
total = self.results["passed"] + self.results["failed"]
|
||||
success_rate = (self.results["passed"] / total * 100) if total > 0 else 0
|
||||
|
||||
print(f"\nTotal: {total} tests")
|
||||
print(f" Réussis: {self.results['passed']}")
|
||||
print(f" Échoués: {self.results['failed']}")
|
||||
print(f"Taux de réussite: {success_rate:.1f}%\n")
|
||||
|
||||
if self.results["failed"] == 0:
|
||||
print("🎉 Tous les tests sont passés ! Sécurité OK.")
|
||||
return 0
|
||||
else:
|
||||
print(" Certains tests ont échoué. Vérifiez la configuration.")
|
||||
return 1
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Test automatisé de la sécurité de l'API"
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--url",
|
||||
required=True,
|
||||
help="URL de base de l'API (ex: http://localhost:8000)",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--swagger-user", required=True, help="Utilisateur Swagger pour les tests"
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--swagger-pass", required=True, help="Mot de passe Swagger pour les tests"
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--skip-rate-limit",
|
||||
action="store_true",
|
||||
help="Sauter le test de rate limiting (long)",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
print(" Démarrage des tests de sécurité")
|
||||
print(f" URL cible: {args.url}\n")
|
||||
|
||||
tester = SecurityTester(args.url)
|
||||
|
||||
tester.test_swagger_without_auth()
|
||||
tester.test_swagger_with_auth(args.swagger_user, args.swagger_pass)
|
||||
tester.test_api_without_auth()
|
||||
tester.test_health_endpoint_public()
|
||||
|
||||
success, api_key = tester.test_api_key_creation(
|
||||
args.swagger_user, args.swagger_pass
|
||||
)
|
||||
|
||||
if success and api_key:
|
||||
tester.test_api_key_usage(api_key)
|
||||
tester.test_invalid_api_key()
|
||||
|
||||
if not args.skip_rate_limit:
|
||||
tester.test_rate_limiting(api_key)
|
||||
else:
|
||||
print("\n Test de rate limiting sauté")
|
||||
else:
|
||||
print("\n Tests avec clé API sautés (création échouée)")
|
||||
|
||||
exit_code = tester.print_summary()
|
||||
sys.exit(exit_code)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -5,12 +5,10 @@ import jwt
|
|||
import secrets
|
||||
import hashlib
|
||||
|
||||
from config.config import settings
|
||||
|
||||
SECRET_KEY = settings.jwt_secret
|
||||
ALGORITHM = settings.jwt_algorithm
|
||||
ACCESS_TOKEN_EXPIRE_MINUTES = settings.access_token_expire_minutes
|
||||
REFRESH_TOKEN_EXPIRE_DAYS = settings.refresh_token_expire_days
|
||||
SECRET_KEY = "VOTRE_SECRET_KEY_A_METTRE_EN_.ENV"
|
||||
ALGORITHM = "HS256"
|
||||
ACCESS_TOKEN_EXPIRE_MINUTES = 10080
|
||||
REFRESH_TOKEN_EXPIRE_DAYS = 7
|
||||
|
||||
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
|
||||
|
||||
|
|
@ -69,13 +67,9 @@ def decode_token(token: str) -> Optional[Dict]:
|
|||
payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
|
||||
return payload
|
||||
except jwt.ExpiredSignatureError:
|
||||
raise jwt.InvalidTokenError("Token expiré")
|
||||
except jwt.DecodeError:
|
||||
raise jwt.InvalidTokenError("Token invalide (format incorrect)")
|
||||
except jwt.InvalidTokenError as e:
|
||||
raise jwt.InvalidTokenError(f"Token invalide: {str(e)}")
|
||||
except Exception as e:
|
||||
raise jwt.InvalidTokenError(f"Erreur lors du décodage du token: {str(e)}")
|
||||
return None
|
||||
except jwt.JWTError:
|
||||
return None
|
||||
|
||||
|
||||
def validate_password_strength(password: str) -> tuple[bool, str]:
|
||||
|
|
|
|||
|
|
@ -1,223 +0,0 @@
|
|||
import secrets
|
||||
import hashlib
|
||||
import json
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Optional, List, Dict
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, and_, or_
|
||||
import logging
|
||||
|
||||
from database.models.api_key import ApiKey
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ApiKeyService:
|
||||
"""Service de gestion des clés API"""
|
||||
|
||||
def __init__(self, session: AsyncSession):
|
||||
self.session = session
|
||||
|
||||
@staticmethod
|
||||
def generate_api_key() -> str:
|
||||
"""Génère une clé API unique et sécurisée"""
|
||||
random_part = secrets.token_urlsafe(32)
|
||||
return f"sdk_live_{random_part}"
|
||||
|
||||
@staticmethod
|
||||
def hash_api_key(api_key: str) -> str:
|
||||
"""Hash la clé API pour stockage sécurisé"""
|
||||
return hashlib.sha256(api_key.encode()).hexdigest()
|
||||
|
||||
@staticmethod
|
||||
def get_key_prefix(api_key: str) -> str:
|
||||
"""Extrait le préfixe de la clé pour identification"""
|
||||
return api_key[:12] if len(api_key) >= 12 else api_key
|
||||
|
||||
async def create_api_key(
|
||||
self,
|
||||
name: str,
|
||||
description: Optional[str] = None,
|
||||
created_by: str = "system",
|
||||
user_id: Optional[str] = None,
|
||||
expires_in_days: Optional[int] = None,
|
||||
rate_limit_per_minute: int = 60,
|
||||
allowed_endpoints: Optional[List[str]] = None,
|
||||
) -> tuple[ApiKey, str]:
|
||||
api_key_plain = self.generate_api_key()
|
||||
key_hash = self.hash_api_key(api_key_plain)
|
||||
key_prefix = self.get_key_prefix(api_key_plain)
|
||||
|
||||
expires_at = None
|
||||
if expires_in_days:
|
||||
expires_at = datetime.now() + timedelta(days=expires_in_days)
|
||||
|
||||
api_key_obj = ApiKey(
|
||||
key_hash=key_hash,
|
||||
key_prefix=key_prefix,
|
||||
name=name,
|
||||
description=description,
|
||||
created_by=created_by,
|
||||
user_id=user_id,
|
||||
expires_at=expires_at,
|
||||
rate_limit_per_minute=rate_limit_per_minute,
|
||||
allowed_endpoints=json.dumps(allowed_endpoints)
|
||||
if allowed_endpoints
|
||||
else None,
|
||||
)
|
||||
|
||||
self.session.add(api_key_obj)
|
||||
await self.session.commit()
|
||||
await self.session.refresh(api_key_obj)
|
||||
|
||||
logger.info(f" Clé API créée: {name} (prefix: {key_prefix})")
|
||||
|
||||
return api_key_obj, api_key_plain
|
||||
|
||||
async def verify_api_key(self, api_key_plain: str) -> Optional[ApiKey]:
|
||||
key_hash = self.hash_api_key(api_key_plain)
|
||||
|
||||
result = await self.session.execute(
|
||||
select(ApiKey).where(
|
||||
and_(
|
||||
ApiKey.key_hash == key_hash,
|
||||
ApiKey.is_active,
|
||||
ApiKey.revoked_at.is_(None),
|
||||
or_(
|
||||
ApiKey.expires_at.is_(None), ApiKey.expires_at > datetime.now()
|
||||
),
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
api_key_obj = result.scalar_one_or_none()
|
||||
|
||||
if api_key_obj:
|
||||
api_key_obj.total_requests += 1
|
||||
api_key_obj.last_used_at = datetime.now()
|
||||
await self.session.commit()
|
||||
|
||||
logger.debug(f" Clé API validée: {api_key_obj.name}")
|
||||
else:
|
||||
logger.warning(" Clé API invalide ou expirée")
|
||||
|
||||
return api_key_obj
|
||||
|
||||
async def list_api_keys(
|
||||
self,
|
||||
include_revoked: bool = False,
|
||||
user_id: Optional[str] = None,
|
||||
) -> List[ApiKey]:
|
||||
"""Liste les clés API"""
|
||||
query = select(ApiKey)
|
||||
|
||||
if not include_revoked:
|
||||
query = query.where(ApiKey.revoked_at.is_(None))
|
||||
|
||||
if user_id:
|
||||
query = query.where(ApiKey.user_id == user_id)
|
||||
|
||||
query = query.order_by(ApiKey.created_at.desc())
|
||||
|
||||
result = await self.session.execute(query)
|
||||
return list(result.scalars().all())
|
||||
|
||||
async def revoke_api_key(self, key_id: str) -> bool:
|
||||
"""Révoque une clé API"""
|
||||
result = await self.session.execute(select(ApiKey).where(ApiKey.id == key_id))
|
||||
api_key_obj = result.scalar_one_or_none()
|
||||
|
||||
if not api_key_obj:
|
||||
return False
|
||||
|
||||
api_key_obj.is_active = False
|
||||
api_key_obj.revoked_at = datetime.now()
|
||||
await self.session.commit()
|
||||
|
||||
logger.info(f"🗑️ Clé API révoquée: {api_key_obj.name}")
|
||||
return True
|
||||
|
||||
async def get_by_id(self, key_id: str) -> Optional[ApiKey]:
|
||||
"""Récupère une clé API par son ID"""
|
||||
result = await self.session.execute(select(ApiKey).where(ApiKey.id == key_id))
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
async def check_rate_limit(self, api_key_obj: ApiKey) -> tuple[bool, Dict]:
|
||||
return True, {
|
||||
"allowed": True,
|
||||
"limit": api_key_obj.rate_limit_per_minute,
|
||||
"remaining": api_key_obj.rate_limit_per_minute,
|
||||
}
|
||||
|
||||
async def check_endpoint_access(self, api_key_obj: ApiKey, endpoint: str) -> bool:
|
||||
if not api_key_obj.allowed_endpoints:
|
||||
logger.debug(
|
||||
f"🔓 API Key {api_key_obj.name}: Aucune restriction d'endpoint"
|
||||
)
|
||||
return True
|
||||
|
||||
try:
|
||||
allowed = json.loads(api_key_obj.allowed_endpoints)
|
||||
|
||||
if "*" in allowed or "/*" in allowed:
|
||||
logger.debug(f"🔓 API Key {api_key_obj.name}: Accès global autorisé")
|
||||
return True
|
||||
|
||||
for pattern in allowed:
|
||||
if pattern == endpoint:
|
||||
logger.debug(f" Match exact: {pattern} == {endpoint}")
|
||||
return True
|
||||
|
||||
if pattern.endswith("/*"):
|
||||
base = pattern[:-2] # "/clients/*" → "/clients"
|
||||
if endpoint == base or endpoint.startswith(base + "/"):
|
||||
logger.debug(f" Match wildcard: {pattern} ↔ {endpoint}")
|
||||
return True
|
||||
|
||||
elif pattern.endswith("*"):
|
||||
base = pattern[:-1] # "/clients*" → "/clients"
|
||||
if endpoint.startswith(base):
|
||||
logger.debug(f" Match prefix: {pattern} ↔ {endpoint}")
|
||||
return True
|
||||
|
||||
logger.warning(
|
||||
f" API Key {api_key_obj.name}: Accès refusé à {endpoint}\n"
|
||||
f" Endpoints autorisés: {allowed}"
|
||||
)
|
||||
return False
|
||||
|
||||
except json.JSONDecodeError:
|
||||
logger.error(f" Erreur parsing allowed_endpoints pour {api_key_obj.id}")
|
||||
return False
|
||||
|
||||
|
||||
def api_key_to_response(api_key_obj: ApiKey, show_key: bool = False) -> Dict:
|
||||
"""Convertit un objet ApiKey en réponse API"""
|
||||
|
||||
allowed_endpoints = None
|
||||
if api_key_obj.allowed_endpoints:
|
||||
try:
|
||||
allowed_endpoints = json.loads(api_key_obj.allowed_endpoints)
|
||||
except json.JSONDecodeError:
|
||||
pass
|
||||
|
||||
is_expired = False
|
||||
if api_key_obj.expires_at:
|
||||
is_expired = api_key_obj.expires_at < datetime.now()
|
||||
|
||||
return {
|
||||
"id": api_key_obj.id,
|
||||
"name": api_key_obj.name,
|
||||
"description": api_key_obj.description,
|
||||
"key_prefix": api_key_obj.key_prefix,
|
||||
"is_active": api_key_obj.is_active,
|
||||
"is_expired": is_expired,
|
||||
"rate_limit_per_minute": api_key_obj.rate_limit_per_minute,
|
||||
"allowed_endpoints": allowed_endpoints,
|
||||
"total_requests": api_key_obj.total_requests,
|
||||
"last_used_at": api_key_obj.last_used_at,
|
||||
"created_at": api_key_obj.created_at,
|
||||
"expires_at": api_key_obj.expires_at,
|
||||
"revoked_at": api_key_obj.revoked_at,
|
||||
"created_by": api_key_obj.created_by,
|
||||
}
|
||||
|
|
@ -6,7 +6,7 @@ import httpx
|
|||
from datetime import datetime
|
||||
from typing import Optional, Tuple, List
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import false, select, update, and_
|
||||
from sqlalchemy import false, select, true, update, and_
|
||||
import logging
|
||||
|
||||
from config.config import settings
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import logging
|
|||
import requests
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
from typing import Optional, Tuple, Dict, List
|
||||
from typing import Optional, Tuple
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
|
@ -13,187 +13,46 @@ SIGNED_DOCS_DIR.mkdir(parents=True, exist_ok=True)
|
|||
|
||||
|
||||
class UniversignDocumentService:
|
||||
"""Service de gestion des documents signés Universign - VERSION CORRIGÉE"""
|
||||
"""Service de gestion des documents signés Universign"""
|
||||
|
||||
def __init__(self, api_url: str, api_key: str, timeout: int = 60):
|
||||
self.api_url = api_url.rstrip("/")
|
||||
def __init__(self, api_key: str, timeout: int = 60):
|
||||
self.api_key = api_key
|
||||
self.timeout = timeout
|
||||
self.auth = (api_key, "")
|
||||
|
||||
def fetch_transaction_documents(self, transaction_id: str) -> Optional[List[Dict]]:
|
||||
try:
|
||||
logger.info(f" Récupération documents pour transaction: {transaction_id}")
|
||||
|
||||
response = requests.get(
|
||||
f"{self.api_url}/transactions/{transaction_id}",
|
||||
auth=self.auth,
|
||||
timeout=self.timeout,
|
||||
headers={"Accept": "application/json"},
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
documents = data.get("documents", [])
|
||||
|
||||
logger.info(f"{len(documents)} document(s) trouvé(s)")
|
||||
|
||||
for idx, doc in enumerate(documents):
|
||||
logger.debug(
|
||||
f" Document {idx}: id={doc.get('id')}, "
|
||||
f"name={doc.get('name')}, status={doc.get('status')}"
|
||||
)
|
||||
|
||||
return documents
|
||||
|
||||
elif response.status_code == 404:
|
||||
logger.warning(
|
||||
f"Transaction {transaction_id} introuvable sur Universign"
|
||||
)
|
||||
return None
|
||||
|
||||
else:
|
||||
logger.error(
|
||||
f"Erreur HTTP {response.status_code} pour {transaction_id}: "
|
||||
f"{response.text[:500]}"
|
||||
)
|
||||
return None
|
||||
|
||||
except requests.exceptions.Timeout:
|
||||
logger.error(f"⏱️ Timeout récupération transaction {transaction_id}")
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f" Erreur fetch documents: {e}", exc_info=True)
|
||||
return None
|
||||
|
||||
def download_signed_document(
|
||||
self, transaction_id: str, document_id: str
|
||||
) -> Optional[bytes]:
|
||||
try:
|
||||
download_url = (
|
||||
f"{self.api_url}/transactions/{transaction_id}"
|
||||
f"/documents/{document_id}/download"
|
||||
)
|
||||
|
||||
logger.info(f"Téléchargement depuis: {download_url}")
|
||||
|
||||
response = requests.get(
|
||||
download_url,
|
||||
auth=self.auth,
|
||||
timeout=self.timeout,
|
||||
stream=True,
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
content_type = response.headers.get("Content-Type", "")
|
||||
content_length = response.headers.get("Content-Length", "unknown")
|
||||
|
||||
logger.info(
|
||||
f"Téléchargement réussi: "
|
||||
f"Content-Type={content_type}, Size={content_length}"
|
||||
)
|
||||
|
||||
if (
|
||||
"pdf" not in content_type.lower()
|
||||
and "octet-stream" not in content_type.lower()
|
||||
):
|
||||
logger.warning(
|
||||
f"Type de contenu inattendu: {content_type}. "
|
||||
f"Tentative de lecture quand même..."
|
||||
)
|
||||
|
||||
content = response.content
|
||||
|
||||
if len(content) < 1024:
|
||||
logger.error(f" Document trop petit: {len(content)} octets")
|
||||
return None
|
||||
|
||||
return content
|
||||
|
||||
elif response.status_code == 404:
|
||||
logger.error(
|
||||
f" Document {document_id} introuvable pour transaction {transaction_id}"
|
||||
)
|
||||
return None
|
||||
|
||||
elif response.status_code == 403:
|
||||
logger.error(
|
||||
f" Accès refusé au document {document_id}. "
|
||||
f"Vérifiez que la transaction est bien signée."
|
||||
)
|
||||
return None
|
||||
|
||||
else:
|
||||
logger.error(
|
||||
f" Erreur HTTP {response.status_code}: {response.text[:500]}"
|
||||
)
|
||||
return None
|
||||
|
||||
except requests.exceptions.Timeout:
|
||||
logger.error(f"⏱️ Timeout téléchargement document {document_id}")
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f" Erreur téléchargement: {e}", exc_info=True)
|
||||
return None
|
||||
|
||||
async def download_and_store_signed_document(
|
||||
self, session: AsyncSession, transaction, force: bool = False
|
||||
) -> Tuple[bool, Optional[str]]:
|
||||
if not force and transaction.signed_document_path:
|
||||
if os.path.exists(transaction.signed_document_path):
|
||||
logger.debug(
|
||||
f"Document déjà téléchargé: {transaction.transaction_id}"
|
||||
)
|
||||
logger.debug(f"Document déjà téléchargé : {transaction.transaction_id}")
|
||||
return True, None
|
||||
|
||||
if not transaction.document_url:
|
||||
error = "Aucune URL de document disponible"
|
||||
logger.warning(f"{error} pour {transaction.transaction_id}")
|
||||
transaction.download_error = error
|
||||
await session.commit()
|
||||
return False, error
|
||||
|
||||
try:
|
||||
logger.info(f"Téléchargement document signé : {transaction.transaction_id}")
|
||||
|
||||
transaction.download_attempts += 1
|
||||
|
||||
try:
|
||||
logger.info(
|
||||
f"Récupération document signé pour: {transaction.transaction_id}"
|
||||
response = requests.get(
|
||||
transaction.document_url,
|
||||
auth=self.auth,
|
||||
timeout=self.timeout,
|
||||
stream=True,
|
||||
)
|
||||
|
||||
documents = self.fetch_transaction_documents(transaction.transaction_id)
|
||||
response.raise_for_status()
|
||||
|
||||
if not documents:
|
||||
error = "Aucun document trouvé dans la transaction Universign"
|
||||
logger.warning(f"{error}")
|
||||
transaction.download_error = error
|
||||
await session.commit()
|
||||
return False, error
|
||||
|
||||
document_id = None
|
||||
for doc in documents:
|
||||
doc_id = doc.get("id")
|
||||
doc_status = doc.get("status", "").lower()
|
||||
|
||||
if doc_status in ["signed", "completed", "closed"]:
|
||||
document_id = doc_id
|
||||
logger.info(
|
||||
f"Document signé trouvé: {doc_id} (status: {doc_status})"
|
||||
)
|
||||
break
|
||||
|
||||
if document_id is None:
|
||||
document_id = doc_id
|
||||
|
||||
if not document_id:
|
||||
error = "Impossible de déterminer l'ID du document à télécharger"
|
||||
logger.error(f" {error}")
|
||||
transaction.download_error = error
|
||||
await session.commit()
|
||||
return False, error
|
||||
|
||||
if hasattr(transaction, "universign_document_id"):
|
||||
transaction.universign_document_id = document_id
|
||||
|
||||
pdf_content = self.download_signed_document(
|
||||
transaction_id=transaction.transaction_id, document_id=document_id
|
||||
)
|
||||
|
||||
if not pdf_content:
|
||||
error = f"Échec téléchargement document {document_id}"
|
||||
logger.error(f" {error}")
|
||||
content_type = response.headers.get("Content-Type", "")
|
||||
if "pdf" not in content_type.lower():
|
||||
error = f"Type de contenu invalide : {content_type}"
|
||||
logger.error(error)
|
||||
transaction.download_error = error
|
||||
await session.commit()
|
||||
return False, error
|
||||
|
|
@ -202,28 +61,37 @@ class UniversignDocumentService:
|
|||
file_path = SIGNED_DOCS_DIR / filename
|
||||
|
||||
with open(file_path, "wb") as f:
|
||||
f.write(pdf_content)
|
||||
for chunk in response.iter_content(chunk_size=8192):
|
||||
f.write(chunk)
|
||||
|
||||
file_size = os.path.getsize(file_path)
|
||||
|
||||
if file_size < 1024: # Moins de 1 KB = suspect
|
||||
error = f"Fichier trop petit : {file_size} octets"
|
||||
logger.error(error)
|
||||
os.remove(file_path)
|
||||
transaction.download_error = error
|
||||
await session.commit()
|
||||
return False, error
|
||||
|
||||
transaction.signed_document_path = str(file_path)
|
||||
transaction.signed_document_downloaded_at = datetime.now()
|
||||
transaction.signed_document_size_bytes = file_size
|
||||
transaction.download_error = None
|
||||
|
||||
transaction.document_url = (
|
||||
f"{self.api_url}/transactions/{transaction.transaction_id}"
|
||||
f"/documents/{document_id}/download"
|
||||
)
|
||||
|
||||
await session.commit()
|
||||
|
||||
logger.info(
|
||||
f"Document signé téléchargé: {filename} ({file_size / 1024:.1f} KB)"
|
||||
)
|
||||
logger.info(f"Document téléchargé : {filename} ({file_size / 1024:.1f} KB)")
|
||||
|
||||
return True, None
|
||||
|
||||
except requests.exceptions.RequestException as e:
|
||||
error = f"Erreur HTTP : {str(e)}"
|
||||
logger.error(f"{error} pour {transaction.transaction_id}")
|
||||
transaction.download_error = error
|
||||
await session.commit()
|
||||
return False, error
|
||||
|
||||
except OSError as e:
|
||||
error = f"Erreur filesystem : {str(e)}"
|
||||
logger.error(f"{error}")
|
||||
|
|
@ -239,123 +107,50 @@ class UniversignDocumentService:
|
|||
return False, error
|
||||
|
||||
def _generate_filename(self, transaction) -> str:
|
||||
"""Génère un nom de fichier unique pour le document signé"""
|
||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
|
||||
tx_id = transaction.transaction_id.replace("tr_", "")
|
||||
filename = f"{transaction.sage_document_id}_{tx_id}_{timestamp}_signed.pdf"
|
||||
|
||||
filename = f"{transaction.sage_document_id}_{tx_id}_{timestamp}.pdf"
|
||||
|
||||
return filename
|
||||
|
||||
def get_document_path(self, transaction) -> Optional[Path]:
|
||||
"""Retourne le chemin du document signé s'il existe"""
|
||||
if not transaction.signed_document_path:
|
||||
return None
|
||||
|
||||
path = Path(transaction.signed_document_path)
|
||||
if path.exists():
|
||||
return path
|
||||
|
||||
return None
|
||||
|
||||
async def cleanup_old_documents(self, days_to_keep: int = 90) -> Tuple[int, int]:
|
||||
"""Supprime les anciens documents signés"""
|
||||
from datetime import timedelta
|
||||
|
||||
cutoff_date = datetime.now() - timedelta(days=days_to_keep)
|
||||
|
||||
deleted = 0
|
||||
size_freed = 0
|
||||
|
||||
for file_path in SIGNED_DOCS_DIR.glob("*.pdf"):
|
||||
try:
|
||||
file_time = datetime.fromtimestamp(os.path.getmtime(file_path))
|
||||
|
||||
if file_time < cutoff_date:
|
||||
size_freed += os.path.getsize(file_path)
|
||||
os.remove(file_path)
|
||||
deleted += 1
|
||||
logger.info(f"🗑️ Supprimé : {file_path.name}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Erreur suppression {file_path}: {e}")
|
||||
|
||||
size_freed_mb = size_freed / (1024 * 1024)
|
||||
|
||||
logger.info(
|
||||
f"Nettoyage terminé : {deleted} fichiers supprimés "
|
||||
f"({size_freed_mb:.2f} MB libérés)"
|
||||
)
|
||||
|
||||
return deleted, int(size_freed_mb)
|
||||
|
||||
|
||||
def diagnose_transaction(self, transaction_id: str) -> Dict:
|
||||
"""
|
||||
Diagnostic complet d'une transaction pour debug
|
||||
"""
|
||||
result = {
|
||||
"transaction_id": transaction_id,
|
||||
"api_url": self.api_url,
|
||||
"timestamp": datetime.now().isoformat(),
|
||||
"checks": {},
|
||||
}
|
||||
|
||||
try:
|
||||
logger.info(f"Diagnostic transaction: {transaction_id}")
|
||||
|
||||
response = requests.get(
|
||||
f"{self.api_url}/transactions/{transaction_id}",
|
||||
auth=self.auth,
|
||||
timeout=self.timeout,
|
||||
)
|
||||
|
||||
result["checks"]["transaction_fetch"] = {
|
||||
"status_code": response.status_code,
|
||||
"success": response.status_code == 200,
|
||||
}
|
||||
|
||||
if response.status_code != 200:
|
||||
result["checks"]["transaction_fetch"]["error"] = response.text[:500]
|
||||
return result
|
||||
|
||||
data = response.json()
|
||||
|
||||
result["checks"]["transaction_data"] = {
|
||||
"state": data.get("state"),
|
||||
"documents_count": len(data.get("documents", [])),
|
||||
"participants_count": len(data.get("participants", [])),
|
||||
}
|
||||
|
||||
documents = data.get("documents", [])
|
||||
result["checks"]["documents"] = []
|
||||
|
||||
for doc in documents:
|
||||
doc_info = {
|
||||
"id": doc.get("id"),
|
||||
"name": doc.get("name"),
|
||||
"status": doc.get("status"),
|
||||
}
|
||||
|
||||
if doc.get("id"):
|
||||
download_url = (
|
||||
f"{self.api_url}/transactions/{transaction_id}"
|
||||
f"/documents/{doc['id']}/download"
|
||||
)
|
||||
|
||||
try:
|
||||
dl_response = requests.head(
|
||||
download_url,
|
||||
auth=self.auth,
|
||||
timeout=10,
|
||||
)
|
||||
doc_info["download_check"] = {
|
||||
"url": download_url,
|
||||
"status_code": dl_response.status_code,
|
||||
"accessible": dl_response.status_code in [200, 302],
|
||||
"content_type": dl_response.headers.get("Content-Type"),
|
||||
}
|
||||
except Exception as e:
|
||||
doc_info["download_check"] = {"error": str(e)}
|
||||
|
||||
result["checks"]["documents"].append(doc_info)
|
||||
|
||||
result["success"] = True
|
||||
|
||||
except Exception as e:
|
||||
result["success"] = False
|
||||
result["error"] = str(e)
|
||||
|
||||
return result
|
||||
|
|
|
|||
|
|
@ -40,9 +40,7 @@ class UniversignSyncService:
|
|||
self.sage_client = None
|
||||
self.email_queue = None
|
||||
self.settings = None
|
||||
self.document_service = UniversignDocumentService(
|
||||
api_url=api_url, api_key=api_key, timeout=60
|
||||
)
|
||||
self.document_service = UniversignDocumentService(api_key=api_key, timeout=60)
|
||||
|
||||
def configure(self, sage_client, email_queue, settings):
|
||||
self.sage_client = sage_client
|
||||
|
|
@ -159,6 +157,7 @@ class UniversignSyncService:
|
|||
|
||||
return stats
|
||||
|
||||
# CORRECTION 1 : process_webhook dans universign_sync.py
|
||||
async def process_webhook(
|
||||
self, session: AsyncSession, payload: Dict, transaction_id: str = None
|
||||
) -> Tuple[bool, Optional[str]]:
|
||||
|
|
@ -166,7 +165,9 @@ class UniversignSyncService:
|
|||
Traite un webhook Universign - CORRECTION : meilleure gestion des payloads
|
||||
"""
|
||||
try:
|
||||
# Si transaction_id n'est pas fourni, essayer de l'extraire
|
||||
if not transaction_id:
|
||||
# Même logique que dans universign.py
|
||||
if (
|
||||
payload.get("type", "").startswith("transaction.")
|
||||
and "payload" in payload
|
||||
|
|
@ -192,6 +193,7 @@ class UniversignSyncService:
|
|||
f"📨 Traitement webhook: transaction={transaction_id}, event={event_type}"
|
||||
)
|
||||
|
||||
# Récupérer la transaction locale
|
||||
query = (
|
||||
select(UniversignTransaction)
|
||||
.options(selectinload(UniversignTransaction.signers))
|
||||
|
|
@ -204,20 +206,25 @@ class UniversignSyncService:
|
|||
logger.warning(f"Transaction {transaction_id} inconnue localement")
|
||||
return False, "Transaction inconnue"
|
||||
|
||||
# Marquer comme webhook reçu
|
||||
transaction.webhook_received = True
|
||||
|
||||
# Stocker l'ancien statut pour comparaison
|
||||
old_status = transaction.local_status.value
|
||||
|
||||
# Force la synchronisation complète
|
||||
success, error = await self.sync_transaction(
|
||||
session, transaction, force=True
|
||||
)
|
||||
|
||||
# Log du changement de statut
|
||||
if success and transaction.local_status.value != old_status:
|
||||
logger.info(
|
||||
f"Webhook traité: {transaction_id} | "
|
||||
f"{old_status} → {transaction.local_status.value}"
|
||||
)
|
||||
|
||||
# Enregistrer le log du webhook
|
||||
await self._log_sync_attempt(
|
||||
session=session,
|
||||
transaction=transaction,
|
||||
|
|
@ -239,6 +246,7 @@ class UniversignSyncService:
|
|||
logger.error(f"💥 Erreur traitement webhook: {e}", exc_info=True)
|
||||
return False, str(e)
|
||||
|
||||
# CORRECTION 2 : _sync_signers - Ne pas écraser les signers existants
|
||||
async def _sync_signers(
|
||||
self,
|
||||
session: AsyncSession,
|
||||
|
|
@ -261,6 +269,7 @@ class UniversignSyncService:
|
|||
logger.warning(f"Signataire sans email à l'index {idx}, ignoré")
|
||||
continue
|
||||
|
||||
# PROTECTION : gérer les statuts inconnus
|
||||
raw_status = signer_data.get("status") or signer_data.get(
|
||||
"state", "waiting"
|
||||
)
|
||||
|
|
@ -291,6 +300,7 @@ class UniversignSyncService:
|
|||
if signer_data.get("name") and not signer.name:
|
||||
signer.name = signer_data.get("name")
|
||||
else:
|
||||
# Nouveau signer avec gestion d'erreur intégrée
|
||||
try:
|
||||
signer = UniversignSigner(
|
||||
id=f"{transaction.id}_signer_{idx}_{int(datetime.now().timestamp())}",
|
||||
|
|
@ -310,32 +320,39 @@ class UniversignSyncService:
|
|||
except Exception as e:
|
||||
logger.error(f"Erreur création signer {email}: {e}")
|
||||
|
||||
# CORRECTION 3 : Amélioration du logging dans sync_transaction
|
||||
async def sync_transaction(
|
||||
self,
|
||||
session,
|
||||
transaction,
|
||||
session: AsyncSession,
|
||||
transaction: UniversignTransaction,
|
||||
force: bool = False,
|
||||
):
|
||||
import json
|
||||
) -> Tuple[bool, Optional[str]]:
|
||||
"""
|
||||
CORRECTION : Meilleur logging et gestion d'erreurs
|
||||
"""
|
||||
|
||||
# Si statut final et pas de force, skip
|
||||
if is_final_status(transaction.local_status.value) and not force:
|
||||
logger.debug(
|
||||
f"⏭️ Skip {transaction.transaction_id}: statut final "
|
||||
f"{transaction.local_status.value}"
|
||||
f"⏭️ Skip {transaction.transaction_id}: statut final {transaction.local_status.value}"
|
||||
)
|
||||
transaction.needs_sync = False
|
||||
await session.commit()
|
||||
return True, None
|
||||
|
||||
logger.info(f"Synchronisation: {transaction.transaction_id}")
|
||||
# Récupération du statut distant
|
||||
logger.info(f"🔄 Synchronisation: {transaction.transaction_id}")
|
||||
|
||||
result = self.fetch_transaction_status(transaction.transaction_id)
|
||||
|
||||
if not result:
|
||||
error = "Échec récupération données Universign"
|
||||
logger.error(f"{error}: {transaction.transaction_id}")
|
||||
|
||||
# CORRECTION : Incrémenter les tentatives MÊME en cas d'échec
|
||||
transaction.sync_attempts += 1
|
||||
transaction.sync_error = error
|
||||
|
||||
await self._log_sync_attempt(session, transaction, "polling", False, error)
|
||||
await session.commit()
|
||||
return False, error
|
||||
|
|
@ -344,16 +361,18 @@ class UniversignSyncService:
|
|||
universign_data = result["transaction"]
|
||||
universign_status_raw = universign_data.get("state", "draft")
|
||||
|
||||
logger.info(f" Statut Universign brut: {universign_status_raw}")
|
||||
logger.info(f"📊 Statut Universign brut: {universign_status_raw}")
|
||||
|
||||
# Convertir le statut
|
||||
new_local_status = map_universign_to_local(universign_status_raw)
|
||||
previous_local_status = transaction.local_status.value
|
||||
|
||||
logger.info(
|
||||
f"Mapping: {universign_status_raw} (Universign) → "
|
||||
f"🔄 Mapping: {universign_status_raw} (Universign) → "
|
||||
f"{new_local_status} (Local) | Actuel: {previous_local_status}"
|
||||
)
|
||||
|
||||
# Vérifier la transition
|
||||
if not is_transition_allowed(previous_local_status, new_local_status):
|
||||
logger.warning(
|
||||
f"Transition refusée: {previous_local_status} → {new_local_status}"
|
||||
|
|
@ -367,15 +386,17 @@ class UniversignSyncService:
|
|||
|
||||
if status_changed:
|
||||
logger.info(
|
||||
f"CHANGEMENT DÉTECTÉ: {previous_local_status} → {new_local_status}"
|
||||
f"🔔 CHANGEMENT DÉTECTÉ: {previous_local_status} → {new_local_status}"
|
||||
)
|
||||
|
||||
# Mise à jour du statut Universign brut
|
||||
try:
|
||||
transaction.universign_status = UniversignTransactionStatus(
|
||||
universign_status_raw
|
||||
)
|
||||
except ValueError:
|
||||
logger.warning(f"Statut Universign inconnu: {universign_status_raw}")
|
||||
# Fallback intelligent
|
||||
if new_local_status == "SIGNE":
|
||||
transaction.universign_status = (
|
||||
UniversignTransactionStatus.COMPLETED
|
||||
|
|
@ -387,12 +408,14 @@ class UniversignSyncService:
|
|||
else:
|
||||
transaction.universign_status = UniversignTransactionStatus.STARTED
|
||||
|
||||
# Mise à jour du statut local
|
||||
transaction.local_status = LocalDocumentStatus(new_local_status)
|
||||
transaction.universign_status_updated_at = datetime.now()
|
||||
|
||||
# Mise à jour des dates
|
||||
if new_local_status == "EN_COURS" and not transaction.sent_at:
|
||||
transaction.sent_at = datetime.now()
|
||||
logger.info("Date d'envoi mise à jour")
|
||||
logger.info("📅 Date d'envoi mise à jour")
|
||||
|
||||
if new_local_status == "SIGNE" and not transaction.signed_at:
|
||||
transaction.signed_at = datetime.now()
|
||||
|
|
@ -404,20 +427,22 @@ class UniversignSyncService:
|
|||
|
||||
if new_local_status == "EXPIRE" and not transaction.expired_at:
|
||||
transaction.expired_at = datetime.now()
|
||||
logger.info("Date d'expiration mise à jour")
|
||||
logger.info("⏰ Date d'expiration mise à jour")
|
||||
|
||||
documents = universign_data.get("documents", [])
|
||||
if documents:
|
||||
first_doc = documents[0]
|
||||
logger.info(
|
||||
f"Document Universign trouvé: id={first_doc.get('id')}, "
|
||||
f"status={first_doc.get('status')}"
|
||||
)
|
||||
# Mise à jour des URLs
|
||||
if (
|
||||
universign_data.get("documents")
|
||||
and len(universign_data["documents"]) > 0
|
||||
):
|
||||
first_doc = universign_data["documents"][0]
|
||||
if first_doc.get("url"):
|
||||
transaction.document_url = first_doc["url"]
|
||||
|
||||
if new_local_status == "SIGNE" and not transaction.signed_document_path:
|
||||
logger.info("Déclenchement téléchargement document signé...")
|
||||
# NOUVEAU : Téléchargement automatique du document signé
|
||||
if new_local_status == "SIGNE" and transaction.document_url:
|
||||
if not transaction.signed_document_path:
|
||||
logger.info("Déclenchement téléchargement document signé")
|
||||
|
||||
try:
|
||||
(
|
||||
download_success,
|
||||
download_error,
|
||||
|
|
@ -426,20 +451,20 @@ class UniversignSyncService:
|
|||
)
|
||||
|
||||
if download_success:
|
||||
logger.info("Document signé téléchargé et stocké")
|
||||
logger.info("Document signé téléchargé avec succès")
|
||||
else:
|
||||
logger.warning(f"Échec téléchargement : {download_error}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f" Erreur téléchargement document: {e}", exc_info=True)
|
||||
|
||||
# Synchroniser les signataires
|
||||
await self._sync_signers(session, transaction, universign_data)
|
||||
|
||||
# Mise à jour des métadonnées de sync
|
||||
transaction.last_synced_at = datetime.now()
|
||||
transaction.sync_attempts += 1
|
||||
transaction.needs_sync = not is_final_status(new_local_status)
|
||||
transaction.sync_error = None
|
||||
transaction.sync_error = None # Effacer l'erreur précédente
|
||||
|
||||
# Log de la tentative
|
||||
await self._log_sync_attempt(
|
||||
session=session,
|
||||
transaction=transaction,
|
||||
|
|
@ -452,15 +477,15 @@ class UniversignSyncService:
|
|||
{
|
||||
"status_changed": status_changed,
|
||||
"universign_raw": universign_status_raw,
|
||||
"documents_count": len(documents),
|
||||
"response_time_ms": result.get("response_time_ms"),
|
||||
},
|
||||
default=str,
|
||||
default=str, # Éviter les erreurs de sérialisation
|
||||
),
|
||||
)
|
||||
|
||||
await session.commit()
|
||||
|
||||
# Exécuter les actions post-changement
|
||||
if status_changed:
|
||||
logger.info(f"🎬 Exécution actions pour statut: {new_local_status}")
|
||||
await self._execute_status_actions(
|
||||
|
|
@ -478,7 +503,7 @@ class UniversignSyncService:
|
|||
error_msg = f"Erreur lors de la synchronisation: {str(e)}"
|
||||
logger.error(f"{error_msg}", exc_info=True)
|
||||
|
||||
transaction.sync_error = error_msg[:1000]
|
||||
transaction.sync_error = error_msg[:1000] # Tronquer si trop long
|
||||
transaction.sync_attempts += 1
|
||||
|
||||
await self._log_sync_attempt(
|
||||
|
|
@ -488,50 +513,6 @@ class UniversignSyncService:
|
|||
|
||||
return False, error_msg
|
||||
|
||||
async def _sync_transaction_documents_corrected(
|
||||
self, session, transaction, universign_data: dict, new_local_status: str
|
||||
):
|
||||
documents = universign_data.get("documents", [])
|
||||
|
||||
if documents:
|
||||
first_doc = documents[0]
|
||||
first_doc_id = first_doc.get("id")
|
||||
|
||||
if first_doc_id:
|
||||
if hasattr(transaction, "universign_document_id"):
|
||||
transaction.universign_document_id = first_doc_id
|
||||
|
||||
logger.info(
|
||||
f"Document Universign: id={first_doc_id}, "
|
||||
f"name={first_doc.get('name')}, status={first_doc.get('status')}"
|
||||
)
|
||||
else:
|
||||
logger.debug("Aucun document dans la réponse Universign")
|
||||
|
||||
if new_local_status == "SIGNE":
|
||||
if not transaction.signed_document_path:
|
||||
logger.info("Déclenchement téléchargement document signé...")
|
||||
|
||||
try:
|
||||
(
|
||||
download_success,
|
||||
download_error,
|
||||
) = await self.document_service.download_and_store_signed_document(
|
||||
session=session, transaction=transaction, force=False
|
||||
)
|
||||
|
||||
if download_success:
|
||||
logger.info("Document signé téléchargé avec succès")
|
||||
else:
|
||||
logger.warning(f"Échec téléchargement: {download_error}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f" Erreur téléchargement document: {e}", exc_info=True)
|
||||
else:
|
||||
logger.debug(
|
||||
f"Document déjà téléchargé: {transaction.signed_document_path}"
|
||||
)
|
||||
|
||||
async def _log_sync_attempt(
|
||||
self,
|
||||
session: AsyncSession,
|
||||
|
|
|
|||
|
|
@ -1,15 +0,0 @@
|
|||
from pathlib import Path
|
||||
|
||||
|
||||
def supprimer_commentaires_ligne(fichier):
|
||||
path = Path(fichier)
|
||||
lignes = path.read_text(encoding="utf-8").splitlines()
|
||||
lignes_sans_commentaires = [line for line in lignes if not line.lstrip().startswith("#")]
|
||||
path.write_text("\n".join(lignes_sans_commentaires), encoding="utf-8")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
base_dir = Path(__file__).resolve().parent.parent
|
||||
fichier_api = base_dir / "data/data.py"
|
||||
|
||||
supprimer_commentaires_ligne(fichier_api)
|
||||
|
|
@ -1,52 +0,0 @@
|
|||
import ast
|
||||
import os
|
||||
import textwrap
|
||||
|
||||
SOURCE_FILE = "main.py"
|
||||
MODELS_DIR = "../models"
|
||||
|
||||
os.makedirs(MODELS_DIR, exist_ok=True)
|
||||
|
||||
with open(SOURCE_FILE, "r", encoding="utf-8") as f:
|
||||
source_code = f.read()
|
||||
|
||||
tree = ast.parse(source_code)
|
||||
|
||||
pydantic_classes = []
|
||||
other_nodes = []
|
||||
|
||||
for node in tree.body:
|
||||
if isinstance(node, ast.ClassDef):
|
||||
if any(
|
||||
isinstance(base, ast.Name) and base.id == "BaseModel" for base in node.bases
|
||||
):
|
||||
pydantic_classes.append(node)
|
||||
continue
|
||||
other_nodes.append(node)
|
||||
|
||||
imports = """
|
||||
from pydantic import BaseModel, Field
|
||||
from typing import Optional, List
|
||||
"""
|
||||
|
||||
for cls in pydantic_classes:
|
||||
class_name = cls.name
|
||||
file_name = f"{class_name.lower()}.py"
|
||||
file_path = os.path.join(MODELS_DIR, file_name)
|
||||
|
||||
class_code = ast.get_source_segment(source_code, cls)
|
||||
class_code = textwrap.dedent(class_code)
|
||||
|
||||
with open(file_path, "w", encoding="utf-8") as f:
|
||||
f.write(imports.strip() + "\n\n")
|
||||
f.write(class_code)
|
||||
|
||||
print(f"✅ Modèle extrait : {class_name} → {file_path}")
|
||||
|
||||
new_tree = ast.Module(body=other_nodes, type_ignores=[])
|
||||
new_source = ast.unparse(new_tree)
|
||||
|
||||
with open(SOURCE_FILE, "w", encoding="utf-8") as f:
|
||||
f.write(new_source)
|
||||
|
||||
print("\n🎉 Extraction terminée")
|
||||
|
|
@ -1,136 +0,0 @@
|
|||
from fastapi import HTTPException
|
||||
from typing import Optional
|
||||
import httpx
|
||||
import logging
|
||||
|
||||
from schemas import EntrepriseSearch
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def calculer_tva_intracommunautaire(siren: str) -> Optional[str]:
|
||||
try:
|
||||
siren_clean = siren.replace(" ", "").strip()
|
||||
|
||||
if not siren_clean.isdigit() or len(siren_clean) != 9:
|
||||
logger.warning(f"SIREN invalide: {siren}")
|
||||
return None
|
||||
|
||||
siren_int = int(siren_clean)
|
||||
|
||||
cle = (12 + 3 * (siren_int % 97)) % 97
|
||||
|
||||
cle_str = f"{cle:02d}"
|
||||
|
||||
return f"FR{cle_str}{siren_clean}"
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Erreur calcul TVA pour SIREN {siren}: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def formater_adresse(siege_data: dict) -> str:
|
||||
try:
|
||||
adresse_parts = []
|
||||
|
||||
if siege_data.get("numero_voie"):
|
||||
adresse_parts.append(siege_data["numero_voie"])
|
||||
|
||||
if siege_data.get("type_voie"):
|
||||
adresse_parts.append(siege_data["type_voie"])
|
||||
|
||||
if siege_data.get("libelle_voie"):
|
||||
adresse_parts.append(siege_data["libelle_voie"])
|
||||
|
||||
if siege_data.get("code_postal"):
|
||||
adresse_parts.append(siege_data["code_postal"])
|
||||
|
||||
if siege_data.get("libelle_commune"):
|
||||
adresse_parts.append(siege_data["libelle_commune"].upper())
|
||||
|
||||
return " ".join(adresse_parts)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Erreur formatage adresse: {e}")
|
||||
return ""
|
||||
|
||||
|
||||
async def rechercher_entreprise_api(query: str, per_page: int = 5) -> dict:
|
||||
api_url = "https://recherche-entreprises.api.gouv.fr/search"
|
||||
|
||||
params = {
|
||||
"q": query,
|
||||
"per_page": per_page,
|
||||
"limite_etablissements": 5,
|
||||
}
|
||||
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=10.0) as client:
|
||||
response = await client.get(api_url, params=params)
|
||||
|
||||
if response.status_code == 429:
|
||||
logger.warning("Rate limit atteint (7 req/s)")
|
||||
raise HTTPException(
|
||||
status_code=429,
|
||||
detail="Trop de requêtes. Veuillez réessayer dans 1 seconde.",
|
||||
)
|
||||
|
||||
if response.status_code == 503:
|
||||
logger.error("API Sirene indisponible (503)")
|
||||
raise HTTPException(
|
||||
status_code=503,
|
||||
detail="Service de recherche momentanément indisponible.",
|
||||
)
|
||||
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
except httpx.TimeoutException:
|
||||
logger.error(f"Timeout lors de la recherche: {query}")
|
||||
raise HTTPException(
|
||||
status_code=504, detail="Délai d'attente dépassé pour l'API de recherche."
|
||||
)
|
||||
|
||||
except httpx.HTTPError as e:
|
||||
logger.error(f"Erreur HTTP API Sirene: {e}")
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Erreur lors de la communication avec l'API: {str(e)}",
|
||||
)
|
||||
|
||||
|
||||
def mapper_resultat_api(entreprise_data: dict) -> Optional[EntrepriseSearch]:
|
||||
try:
|
||||
siren = entreprise_data.get("siren")
|
||||
|
||||
if not siren:
|
||||
logger.warning("Entreprise sans SIREN, ignorée")
|
||||
return None
|
||||
|
||||
tva_number = calculer_tva_intracommunautaire(siren)
|
||||
|
||||
if not tva_number:
|
||||
logger.warning(f"Impossible de calculer TVA pour SIREN: {siren}")
|
||||
return None
|
||||
|
||||
siege = entreprise_data.get("siege", {})
|
||||
|
||||
etat_admin = entreprise_data.get("etat_administratif", "A")
|
||||
is_active = etat_admin == "A"
|
||||
|
||||
return EntrepriseSearch(
|
||||
company_name=entreprise_data.get("nom_complet", ""),
|
||||
siren=siren,
|
||||
vat_number=tva_number,
|
||||
address=formater_adresse(siege),
|
||||
naf_code=entreprise_data.get("activite_principale", ""),
|
||||
is_active=is_active,
|
||||
siret_siege=siege.get("siret"),
|
||||
code_postal=siege.get("code_postal"),
|
||||
ville=siege.get("libelle_commune"),
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Erreur mapping entreprise: {e}", exc_info=True)
|
||||
return None
|
||||
|
|
@ -290,11 +290,15 @@ def _preparer_lignes_document(lignes: List) -> List[Dict]:
|
|||
|
||||
|
||||
UNIVERSIGN_TO_LOCAL: Dict[str, str] = {
|
||||
# États initiaux
|
||||
"draft": "EN_ATTENTE",
|
||||
"ready": "EN_ATTENTE",
|
||||
# En cours
|
||||
"started": "EN_COURS",
|
||||
# États finaux (succès)
|
||||
"completed": "SIGNE",
|
||||
"closed": "SIGNE",
|
||||
# États finaux (échec)
|
||||
"refused": "REFUSE",
|
||||
"expired": "EXPIRE",
|
||||
"canceled": "REFUSE",
|
||||
|
|
@ -419,13 +423,13 @@ STATUS_MESSAGES: Dict[str, Dict[str, str]] = {
|
|||
"SIGNE": {
|
||||
"fr": "Signé avec succès",
|
||||
"en": "Successfully signed",
|
||||
"icon": "",
|
||||
"icon": "✅",
|
||||
"color": "green",
|
||||
},
|
||||
"REFUSE": {
|
||||
"fr": "Signature refusée",
|
||||
"en": "Signature refused",
|
||||
"icon": "",
|
||||
"icon": "❌",
|
||||
"color": "red",
|
||||
},
|
||||
"EXPIRE": {
|
||||
|
|
@ -437,7 +441,7 @@ STATUS_MESSAGES: Dict[str, Dict[str, str]] = {
|
|||
"ERREUR": {
|
||||
"fr": "Erreur technique",
|
||||
"en": "Technical error",
|
||||
"icon": "",
|
||||
"icon": "⚠️",
|
||||
"color": "red",
|
||||
},
|
||||
}
|
||||
|
|
|
|||
|
|
@ -90,13 +90,13 @@ STATUS_MESSAGES: Dict[str, Dict[str, str]] = {
|
|||
"SIGNE": {
|
||||
"fr": "Signé avec succès",
|
||||
"en": "Successfully signed",
|
||||
"icon": "",
|
||||
"icon": "✅",
|
||||
"color": "green",
|
||||
},
|
||||
"REFUSE": {
|
||||
"fr": "Signature refusée",
|
||||
"en": "Signature refused",
|
||||
"icon": "",
|
||||
"icon": "❌",
|
||||
"color": "red",
|
||||
},
|
||||
"EXPIRE": {
|
||||
|
|
@ -108,7 +108,7 @@ STATUS_MESSAGES: Dict[str, Dict[str, str]] = {
|
|||
"ERREUR": {
|
||||
"fr": "Erreur technique",
|
||||
"en": "Technical error",
|
||||
"icon": "",
|
||||
"icon": "⚠️",
|
||||
"color": "red",
|
||||
},
|
||||
}
|
||||
|
|
|
|||
Loading…
Reference in a new issue