Compare commits
44 commits
89510537b3
...
5eec115d1d
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5eec115d1d | ||
|
|
d89c9fd35b | ||
|
|
211dd4fd23 | ||
|
|
67ef83c4e3 | ||
|
|
82d1d92e58 | ||
|
|
28c8fb3008 | ||
|
|
f8cec7ebc5 | ||
|
|
1a08894b47 | ||
|
|
3cdb490ee5 | ||
|
|
c84e4ddc20 | ||
|
|
41ca202d4b | ||
|
|
918f5d3f19 | ||
|
|
fa95d0d117 | ||
|
|
a1150390f4 | ||
|
|
0001dbe634 | ||
|
|
5b584bf969 | ||
|
|
022149c237 | ||
|
|
72d1ac58d1 | ||
|
|
cce1cdf76a | ||
|
|
e51a5e0a0b | ||
|
|
dd65ae4d96 | ||
|
|
cc0062b3bc | ||
|
|
9bd0f62459 | ||
|
|
e0f08fd83a | ||
|
|
f59e56490c | ||
|
|
2aafd525cd | ||
|
|
17a4251eea | ||
|
|
abc9ff820a | ||
|
|
b85bd26dbe | ||
|
|
1164c7975a | ||
|
|
a10fda072c | ||
|
|
9f6c1de8ef | ||
|
|
09eae50952 | ||
|
|
9f12727bd3 | ||
|
|
18603ded6e | ||
|
|
18d72b3bf9 | ||
|
|
fdf359738b | ||
|
|
ba9e474109 | ||
|
|
c5c17fdd9b | ||
|
|
c389129ae7 | ||
|
|
6b6246b6e5 | ||
| b3419eafaa | |||
|
|
795b848dff | ||
|
|
e990cbdc08 |
31 changed files with 2338 additions and 815 deletions
4
.gitignore
vendored
4
.gitignore
vendored
|
|
@ -45,4 +45,6 @@ tools/
|
||||||
.env.staging
|
.env.staging
|
||||||
.env.production
|
.env.production
|
||||||
|
|
||||||
.trunk
|
.trunk
|
||||||
|
|
||||||
|
*clean*.py
|
||||||
|
|
@ -7,7 +7,6 @@ class Settings(BaseSettings):
|
||||||
env_file=".env", env_file_encoding="utf-8", case_sensitive=False, extra="ignore"
|
env_file=".env", env_file_encoding="utf-8", case_sensitive=False, extra="ignore"
|
||||||
)
|
)
|
||||||
|
|
||||||
# === JWT & Auth ===
|
|
||||||
jwt_secret: str
|
jwt_secret: str
|
||||||
jwt_algorithm: str
|
jwt_algorithm: str
|
||||||
access_token_expire_minutes: int
|
access_token_expire_minutes: int
|
||||||
|
|
@ -21,15 +20,12 @@ class Settings(BaseSettings):
|
||||||
SAGE_TYPE_BON_AVOIR: int = 50
|
SAGE_TYPE_BON_AVOIR: int = 50
|
||||||
SAGE_TYPE_FACTURE: int = 60
|
SAGE_TYPE_FACTURE: int = 60
|
||||||
|
|
||||||
# === Sage Gateway (Windows) ===
|
|
||||||
sage_gateway_url: str
|
sage_gateway_url: str
|
||||||
sage_gateway_token: str
|
sage_gateway_token: str
|
||||||
frontend_url: str
|
frontend_url: str
|
||||||
|
|
||||||
# === Base de données ===
|
|
||||||
database_url: str = "sqlite+aiosqlite:///./data/sage_dataven.db"
|
database_url: str = "sqlite+aiosqlite:///./data/sage_dataven.db"
|
||||||
|
|
||||||
# === SMTP ===
|
|
||||||
smtp_host: str
|
smtp_host: str
|
||||||
smtp_port: int = 587
|
smtp_port: int = 587
|
||||||
smtp_user: str
|
smtp_user: str
|
||||||
|
|
@ -37,21 +33,17 @@ class Settings(BaseSettings):
|
||||||
smtp_from: str
|
smtp_from: str
|
||||||
smtp_use_tls: bool = True
|
smtp_use_tls: bool = True
|
||||||
|
|
||||||
# === Universign ===
|
|
||||||
universign_api_key: str
|
universign_api_key: str
|
||||||
universign_api_url: str
|
universign_api_url: str
|
||||||
|
|
||||||
# === API ===
|
|
||||||
api_host: str
|
api_host: str
|
||||||
api_port: int
|
api_port: int
|
||||||
api_reload: bool = False
|
api_reload: bool = False
|
||||||
|
|
||||||
# === Email Queue ===
|
|
||||||
max_email_workers: int = 3
|
max_email_workers: int = 3
|
||||||
max_retry_attempts: int = 3
|
max_retry_attempts: int = 3
|
||||||
retry_delay_seconds: int = 3
|
retry_delay_seconds: int = 3
|
||||||
|
|
||||||
# === CORS ===
|
|
||||||
cors_origins: List[str] = ["*"]
|
cors_origins: List[str] = ["*"]
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
125
config/cors_config.py
Normal file
125
config/cors_config.py
Normal file
|
|
@ -0,0 +1,125 @@
|
||||||
|
from fastapi import FastAPI
|
||||||
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
|
from typing import List
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def configure_cors_open(app: FastAPI):
|
||||||
|
app.add_middleware(
|
||||||
|
CORSMiddleware,
|
||||||
|
allow_origins=["*"],
|
||||||
|
allow_credentials=False,
|
||||||
|
allow_methods=["GET", "POST", "PUT", "DELETE", "PATCH", "OPTIONS"],
|
||||||
|
allow_headers=["*"],
|
||||||
|
expose_headers=["X-RateLimit-Limit", "X-RateLimit-Remaining"],
|
||||||
|
max_age=3600,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(" CORS configuré: Mode OUVERT (sécurisé par API Keys)")
|
||||||
|
logger.info(" - Origins: * (toutes)")
|
||||||
|
logger.info(" - Headers: * (dont X-API-Key)")
|
||||||
|
logger.info(" - Credentials: False")
|
||||||
|
|
||||||
|
|
||||||
|
def configure_cors_whitelist(app: FastAPI):
|
||||||
|
allowed_origins_str = os.getenv("CORS_ALLOWED_ORIGINS", "")
|
||||||
|
|
||||||
|
if allowed_origins_str:
|
||||||
|
allowed_origins = [
|
||||||
|
origin.strip()
|
||||||
|
for origin in allowed_origins_str.split(",")
|
||||||
|
if origin.strip()
|
||||||
|
]
|
||||||
|
else:
|
||||||
|
allowed_origins = ["*"]
|
||||||
|
|
||||||
|
app.add_middleware(
|
||||||
|
CORSMiddleware,
|
||||||
|
allow_origins=allowed_origins,
|
||||||
|
allow_credentials=True,
|
||||||
|
allow_methods=["GET", "POST", "PUT", "DELETE", "PATCH", "OPTIONS"],
|
||||||
|
allow_headers=["Content-Type", "Authorization", "X-API-Key"],
|
||||||
|
expose_headers=["X-RateLimit-Limit", "X-RateLimit-Remaining"],
|
||||||
|
max_age=3600,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(" CORS configuré: Mode WHITELIST")
|
||||||
|
logger.info(f" - Origins autorisées: {len(allowed_origins)}")
|
||||||
|
for origin in allowed_origins:
|
||||||
|
logger.info(f" • {origin}")
|
||||||
|
|
||||||
|
|
||||||
|
def configure_cors_regex(app: FastAPI):
|
||||||
|
origin_regex = r"*"
|
||||||
|
|
||||||
|
app.add_middleware(
|
||||||
|
CORSMiddleware,
|
||||||
|
allow_origin_regex=origin_regex,
|
||||||
|
allow_credentials=True,
|
||||||
|
allow_methods=["GET", "POST", "PUT", "DELETE", "PATCH", "OPTIONS"],
|
||||||
|
allow_headers=["Content-Type", "Authorization", "X-API-Key"],
|
||||||
|
expose_headers=["X-RateLimit-Limit", "X-RateLimit-Remaining"],
|
||||||
|
max_age=3600,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(" CORS configuré: Mode REGEX")
|
||||||
|
logger.info(f" - Pattern: {origin_regex}")
|
||||||
|
|
||||||
|
|
||||||
|
def configure_cors_hybrid(app: FastAPI):
|
||||||
|
from starlette.middleware.base import BaseHTTPMiddleware
|
||||||
|
|
||||||
|
class HybridCORSMiddleware(BaseHTTPMiddleware):
|
||||||
|
def __init__(self, app, known_origins: List[str]):
|
||||||
|
super().__init__(app)
|
||||||
|
self.known_origins = set(known_origins)
|
||||||
|
|
||||||
|
async def dispatch(self, request, call_next):
|
||||||
|
origin = request.headers.get("origin")
|
||||||
|
|
||||||
|
if origin in self.known_origins:
|
||||||
|
response = await call_next(request)
|
||||||
|
response.headers["Access-Control-Allow-Origin"] = origin
|
||||||
|
response.headers["Access-Control-Allow-Credentials"] = "true"
|
||||||
|
response.headers["Access-Control-Allow-Methods"] = (
|
||||||
|
"GET, POST, PUT, DELETE, PATCH, OPTIONS"
|
||||||
|
)
|
||||||
|
response.headers["Access-Control-Allow-Headers"] = (
|
||||||
|
"Content-Type, Authorization, X-API-Key"
|
||||||
|
)
|
||||||
|
return response
|
||||||
|
|
||||||
|
response = await call_next(request)
|
||||||
|
response.headers["Access-Control-Allow-Origin"] = "*"
|
||||||
|
response.headers["Access-Control-Allow-Methods"] = (
|
||||||
|
"GET, POST, PUT, DELETE, PATCH, OPTIONS"
|
||||||
|
)
|
||||||
|
response.headers["Access-Control-Allow-Headers"] = "*"
|
||||||
|
return response
|
||||||
|
|
||||||
|
known_origins = ["*"]
|
||||||
|
|
||||||
|
app.add_middleware(HybridCORSMiddleware, known_origins=known_origins)
|
||||||
|
|
||||||
|
logger.info(" CORS configuré: Mode HYBRIDE")
|
||||||
|
logger.info(f" - Whitelist: {len(known_origins)} domaines")
|
||||||
|
logger.info(" - Fallback: * (ouvert)")
|
||||||
|
|
||||||
|
|
||||||
|
def setup_cors(app: FastAPI, mode: str = "open"):
|
||||||
|
if mode == "open":
|
||||||
|
configure_cors_open(app)
|
||||||
|
elif mode == "whitelist":
|
||||||
|
configure_cors_whitelist(app)
|
||||||
|
elif mode == "regex":
|
||||||
|
configure_cors_regex(app)
|
||||||
|
elif mode == "hybrid":
|
||||||
|
configure_cors_hybrid(app)
|
||||||
|
else:
|
||||||
|
logger.warning(
|
||||||
|
f" Mode CORS inconnu: {mode}. Utilisation de 'open' par défaut."
|
||||||
|
)
|
||||||
|
configure_cors_open(app)
|
||||||
|
|
@ -1,94 +1,118 @@
|
||||||
from fastapi import Depends, HTTPException, status
|
from fastapi import Depends, HTTPException, status, Request
|
||||||
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
|
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
from sqlalchemy import select
|
from sqlalchemy import select
|
||||||
|
from typing import Optional
|
||||||
|
from jwt.exceptions import InvalidTokenError
|
||||||
|
|
||||||
from database import get_session, User
|
from database import get_session, User
|
||||||
from security.auth import decode_token
|
from security.auth import decode_token
|
||||||
from typing import Optional
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
security = HTTPBearer()
|
security = HTTPBearer(auto_error=False)
|
||||||
|
|
||||||
|
|
||||||
async def get_current_user(
|
async def get_current_user_hybrid(
|
||||||
credentials: HTTPAuthorizationCredentials = Depends(security),
|
request: Request,
|
||||||
session: AsyncSession = Depends(get_session),
|
|
||||||
) -> User:
|
|
||||||
token = credentials.credentials
|
|
||||||
|
|
||||||
payload = decode_token(token)
|
|
||||||
if not payload:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
|
||||||
detail="Token invalide ou expiré",
|
|
||||||
headers={"WWW-Authenticate": "Bearer"},
|
|
||||||
)
|
|
||||||
|
|
||||||
if payload.get("type") != "access":
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
|
||||||
detail="Type de token incorrect",
|
|
||||||
headers={"WWW-Authenticate": "Bearer"},
|
|
||||||
)
|
|
||||||
|
|
||||||
user_id: str = payload.get("sub")
|
|
||||||
if not user_id:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
|
||||||
detail="Token malformé",
|
|
||||||
headers={"WWW-Authenticate": "Bearer"},
|
|
||||||
)
|
|
||||||
|
|
||||||
result = await session.execute(select(User).where(User.id == user_id))
|
|
||||||
user = result.scalar_one_or_none()
|
|
||||||
|
|
||||||
if not user:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
|
||||||
detail="Utilisateur introuvable",
|
|
||||||
headers={"WWW-Authenticate": "Bearer"},
|
|
||||||
)
|
|
||||||
|
|
||||||
if not user.is_active:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_403_FORBIDDEN, detail="Compte désactivé"
|
|
||||||
)
|
|
||||||
|
|
||||||
if not user.is_verified:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_403_FORBIDDEN,
|
|
||||||
detail="Email non vérifié. Consultez votre boîte de réception.",
|
|
||||||
)
|
|
||||||
|
|
||||||
if user.locked_until and user.locked_until > datetime.now():
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_403_FORBIDDEN,
|
|
||||||
detail="Compte temporairement verrouillé suite à trop de tentatives échouées",
|
|
||||||
)
|
|
||||||
|
|
||||||
return user
|
|
||||||
|
|
||||||
|
|
||||||
async def get_current_user_optional(
|
|
||||||
credentials: Optional[HTTPAuthorizationCredentials] = Depends(security),
|
credentials: Optional[HTTPAuthorizationCredentials] = Depends(security),
|
||||||
session: AsyncSession = Depends(get_session),
|
session: AsyncSession = Depends(get_session),
|
||||||
) -> Optional[User]:
|
) -> User:
|
||||||
|
api_key_obj = getattr(request.state, "api_key", None)
|
||||||
|
|
||||||
|
if api_key_obj:
|
||||||
|
if api_key_obj.user_id:
|
||||||
|
result = await session.execute(
|
||||||
|
select(User).where(User.id == api_key_obj.user_id)
|
||||||
|
)
|
||||||
|
user = result.scalar_one_or_none()
|
||||||
|
|
||||||
|
if user:
|
||||||
|
user._is_api_key_user = True
|
||||||
|
user._api_key_obj = api_key_obj
|
||||||
|
return user
|
||||||
|
|
||||||
|
virtual_user = User(
|
||||||
|
id=f"api_key_{api_key_obj.id}",
|
||||||
|
email=f"api_key_{api_key_obj.id}@virtual.local",
|
||||||
|
nom=api_key_obj.name,
|
||||||
|
prenom="API",
|
||||||
|
hashed_password="",
|
||||||
|
role="api_client",
|
||||||
|
is_active=True,
|
||||||
|
is_verified=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
virtual_user._is_api_key_user = True
|
||||||
|
virtual_user._api_key_obj = api_key_obj
|
||||||
|
|
||||||
|
return virtual_user
|
||||||
|
|
||||||
if not credentials:
|
if not credentials:
|
||||||
return None
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="Authentification requise (JWT ou API Key)",
|
||||||
|
headers={"WWW-Authenticate": "Bearer"},
|
||||||
|
)
|
||||||
|
|
||||||
|
token = credentials.credentials
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return await get_current_user(credentials, session)
|
payload = decode_token(token)
|
||||||
except HTTPException:
|
user_id: str = payload.get("sub")
|
||||||
return None
|
|
||||||
|
if user_id is None:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="Token invalide: user_id manquant",
|
||||||
|
headers={"WWW-Authenticate": "Bearer"},
|
||||||
|
)
|
||||||
|
|
||||||
|
result = await session.execute(select(User).where(User.id == user_id))
|
||||||
|
user = result.scalar_one_or_none()
|
||||||
|
|
||||||
|
if user is None:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="Utilisateur introuvable",
|
||||||
|
headers={"WWW-Authenticate": "Bearer"},
|
||||||
|
)
|
||||||
|
|
||||||
|
if not user.is_active:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_403_FORBIDDEN,
|
||||||
|
detail="Utilisateur inactif",
|
||||||
|
)
|
||||||
|
|
||||||
|
return user
|
||||||
|
|
||||||
|
except InvalidTokenError as e:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail=f"Token invalide: {str(e)}",
|
||||||
|
headers={"WWW-Authenticate": "Bearer"},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def require_role(*allowed_roles: str):
|
def require_role_hybrid(*allowed_roles: str):
|
||||||
async def role_checker(user: User = Depends(get_current_user)) -> User:
|
async def role_checker(user: User = Depends(get_current_user_hybrid)) -> User:
|
||||||
if user.role not in allowed_roles:
|
if user.role not in allowed_roles:
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_403_FORBIDDEN,
|
status_code=status.HTTP_403_FORBIDDEN,
|
||||||
detail=f"Accès refusé. Rôles requis: {', '.join(allowed_roles)}",
|
detail=f"Accès interdit. Rôles autorisés: {', '.join(allowed_roles)}",
|
||||||
)
|
)
|
||||||
return user
|
return user
|
||||||
|
|
||||||
return role_checker
|
return role_checker
|
||||||
|
|
||||||
|
|
||||||
|
def is_api_key_user(user: User) -> bool:
|
||||||
|
"""Vérifie si l'utilisateur est authentifié via API Key"""
|
||||||
|
return getattr(user, "_is_api_key_user", False)
|
||||||
|
|
||||||
|
|
||||||
|
def get_api_key_from_user(user: User):
|
||||||
|
"""Récupère l'objet ApiKey depuis un utilisateur (si applicable)"""
|
||||||
|
return getattr(user, "_api_key_obj", None)
|
||||||
|
|
||||||
|
|
||||||
|
get_current_user = get_current_user_hybrid
|
||||||
|
require_role = require_role_hybrid
|
||||||
|
|
|
||||||
|
|
@ -19,7 +19,6 @@ async def create_admin():
|
||||||
print(" Création d'un compte administrateur")
|
print(" Création d'un compte administrateur")
|
||||||
print("=" * 60 + "\n")
|
print("=" * 60 + "\n")
|
||||||
|
|
||||||
# Saisie des informations
|
|
||||||
email = input("Email de l'admin: ").strip().lower()
|
email = input("Email de l'admin: ").strip().lower()
|
||||||
if not email or "@" not in email:
|
if not email or "@" not in email:
|
||||||
print(" Email invalide")
|
print(" Email invalide")
|
||||||
|
|
@ -32,7 +31,6 @@ async def create_admin():
|
||||||
print(" Prénom et nom requis")
|
print(" Prénom et nom requis")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# Mot de passe avec validation
|
|
||||||
while True:
|
while True:
|
||||||
password = input(
|
password = input(
|
||||||
"Mot de passe (min 8 car., 1 maj, 1 min, 1 chiffre, 1 spécial): "
|
"Mot de passe (min 8 car., 1 maj, 1 min, 1 chiffre, 1 spécial): "
|
||||||
|
|
@ -58,7 +56,6 @@ async def create_admin():
|
||||||
print(f"\n Un utilisateur avec l'email {email} existe déjà")
|
print(f"\n Un utilisateur avec l'email {email} existe déjà")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# Créer l'admin
|
|
||||||
admin = User(
|
admin = User(
|
||||||
id=str(uuid.uuid4()),
|
id=str(uuid.uuid4()),
|
||||||
email=email,
|
email=email,
|
||||||
|
|
|
||||||
|
|
@ -1,14 +1,14 @@
|
||||||
import os
|
|
||||||
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, async_sessionmaker
|
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, async_sessionmaker
|
||||||
from sqlalchemy.pool import NullPool
|
from sqlalchemy.pool import NullPool
|
||||||
from sqlalchemy import event, text
|
from sqlalchemy import event, text
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
from config.config import settings
|
||||||
from database.models.generic_model import Base
|
from database.models.generic_model import Base
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
DATABASE_URL = os.getenv("DATABASE_URL")
|
DATABASE_URL = settings.database_url
|
||||||
|
|
||||||
|
|
||||||
def _configure_sqlite_connection(dbapi_connection, connection_record):
|
def _configure_sqlite_connection(dbapi_connection, connection_record):
|
||||||
|
|
|
||||||
56
database/models/api_key.py
Normal file
56
database/models/api_key.py
Normal file
|
|
@ -0,0 +1,56 @@
|
||||||
|
from sqlalchemy import Column, String, Boolean, DateTime, Integer, Text
|
||||||
|
from datetime import datetime
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
from database.models.generic_model import Base
|
||||||
|
|
||||||
|
|
||||||
|
class ApiKey(Base):
|
||||||
|
"""Modèle pour les clés API publiques"""
|
||||||
|
|
||||||
|
__tablename__ = "api_keys"
|
||||||
|
|
||||||
|
id = Column(String(36), primary_key=True, default=lambda: str(uuid.uuid4()))
|
||||||
|
key_hash = Column(String(64), unique=True, nullable=False, index=True)
|
||||||
|
key_prefix = Column(String(10), nullable=False)
|
||||||
|
|
||||||
|
name = Column(String(255), nullable=False)
|
||||||
|
description = Column(Text, nullable=True)
|
||||||
|
|
||||||
|
user_id = Column(String(36), nullable=True)
|
||||||
|
created_by = Column(String(255), nullable=False)
|
||||||
|
|
||||||
|
is_active = Column(Boolean, default=True, nullable=False)
|
||||||
|
rate_limit_per_minute = Column(Integer, default=60, nullable=False)
|
||||||
|
allowed_endpoints = Column(Text, nullable=True)
|
||||||
|
|
||||||
|
total_requests = Column(Integer, default=0, nullable=False)
|
||||||
|
last_used_at = Column(DateTime, nullable=True)
|
||||||
|
|
||||||
|
created_at = Column(DateTime, default=datetime.now, nullable=False)
|
||||||
|
expires_at = Column(DateTime, nullable=True)
|
||||||
|
revoked_at = Column(DateTime, nullable=True)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"<ApiKey(name='{self.name}', prefix='{self.key_prefix}', active={self.is_active})>"
|
||||||
|
|
||||||
|
|
||||||
|
class SwaggerUser(Base):
|
||||||
|
"""Modèle pour les utilisateurs autorisés à accéder au Swagger"""
|
||||||
|
|
||||||
|
__tablename__ = "swagger_users"
|
||||||
|
|
||||||
|
id = Column(String(36), primary_key=True, default=lambda: str(uuid.uuid4()))
|
||||||
|
username = Column(String(100), unique=True, nullable=False, index=True)
|
||||||
|
hashed_password = Column(String(255), nullable=False)
|
||||||
|
|
||||||
|
full_name = Column(String(255), nullable=True)
|
||||||
|
email = Column(String(255), nullable=True)
|
||||||
|
|
||||||
|
is_active = Column(Boolean, default=True, nullable=False)
|
||||||
|
|
||||||
|
created_at = Column(DateTime, default=datetime.now, nullable=False)
|
||||||
|
last_login = Column(DateTime, nullable=True)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"<SwaggerUser(username='{self.username}', active={self.is_active})>"
|
||||||
271
middleware/security.py
Normal file
271
middleware/security.py
Normal file
|
|
@ -0,0 +1,271 @@
|
||||||
|
from fastapi import Request, status
|
||||||
|
from fastapi.responses import JSONResponse
|
||||||
|
from fastapi.security import HTTPBasic, HTTPBasicCredentials
|
||||||
|
from starlette.middleware.base import BaseHTTPMiddleware
|
||||||
|
from starlette.types import ASGIApp
|
||||||
|
from sqlalchemy import select
|
||||||
|
from typing import Callable
|
||||||
|
from datetime import datetime
|
||||||
|
import logging
|
||||||
|
import base64
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
security = HTTPBasic()
|
||||||
|
|
||||||
|
|
||||||
|
class SwaggerAuthMiddleware:
|
||||||
|
PROTECTED_PATHS = ["/docs", "/redoc", "/openapi.json"]
|
||||||
|
|
||||||
|
def __init__(self, app: ASGIApp):
|
||||||
|
self.app = app
|
||||||
|
|
||||||
|
async def __call__(self, scope, receive, send):
|
||||||
|
if scope["type"] != "http":
|
||||||
|
await self.app(scope, receive, send)
|
||||||
|
return
|
||||||
|
|
||||||
|
request = Request(scope, receive=receive)
|
||||||
|
path = request.url.path
|
||||||
|
|
||||||
|
if not any(path.startswith(p) for p in self.PROTECTED_PATHS):
|
||||||
|
await self.app(scope, receive, send)
|
||||||
|
return
|
||||||
|
|
||||||
|
auth_header = request.headers.get("Authorization")
|
||||||
|
|
||||||
|
if not auth_header or not auth_header.startswith("Basic "):
|
||||||
|
response = JSONResponse(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
content={"detail": "Authentification requise pour la documentation"},
|
||||||
|
headers={"WWW-Authenticate": 'Basic realm="Swagger UI"'},
|
||||||
|
)
|
||||||
|
await response(scope, receive, send)
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
encoded_credentials = auth_header.split(" ")[1]
|
||||||
|
decoded_credentials = base64.b64decode(encoded_credentials).decode("utf-8")
|
||||||
|
username, password = decoded_credentials.split(":", 1)
|
||||||
|
|
||||||
|
credentials = HTTPBasicCredentials(username=username, password=password)
|
||||||
|
|
||||||
|
if not await self._verify_credentials(credentials):
|
||||||
|
response = JSONResponse(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
content={"detail": "Identifiants invalides"},
|
||||||
|
headers={"WWW-Authenticate": 'Basic realm="Swagger UI"'},
|
||||||
|
)
|
||||||
|
await response(scope, receive, send)
|
||||||
|
return
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Erreur parsing auth header: {e}")
|
||||||
|
response = JSONResponse(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
content={"detail": "Format d'authentification invalide"},
|
||||||
|
headers={"WWW-Authenticate": 'Basic realm="Swagger UI"'},
|
||||||
|
)
|
||||||
|
await response(scope, receive, send)
|
||||||
|
return
|
||||||
|
|
||||||
|
await self.app(scope, receive, send)
|
||||||
|
|
||||||
|
async def _verify_credentials(self, credentials: HTTPBasicCredentials) -> bool:
|
||||||
|
"""Vérifie les identifiants dans la base de données"""
|
||||||
|
from database.db_config import async_session_factory
|
||||||
|
from database.models.api_key import SwaggerUser
|
||||||
|
from security.auth import verify_password
|
||||||
|
|
||||||
|
try:
|
||||||
|
async with async_session_factory() as session:
|
||||||
|
result = await session.execute(
|
||||||
|
select(SwaggerUser).where(
|
||||||
|
SwaggerUser.username == credentials.username
|
||||||
|
)
|
||||||
|
)
|
||||||
|
swagger_user = result.scalar_one_or_none()
|
||||||
|
|
||||||
|
if swagger_user and swagger_user.is_active:
|
||||||
|
if verify_password(
|
||||||
|
credentials.password, swagger_user.hashed_password
|
||||||
|
):
|
||||||
|
swagger_user.last_login = datetime.now()
|
||||||
|
await session.commit()
|
||||||
|
logger.info(f"✓ Accès Swagger autorisé: {credentials.username}")
|
||||||
|
return True
|
||||||
|
|
||||||
|
logger.warning(f"✗ Accès Swagger refusé: {credentials.username}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Erreur vérification credentials: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
class ApiKeyMiddlewareHTTP(BaseHTTPMiddleware):
|
||||||
|
EXCLUDED_PATHS = [
|
||||||
|
"/docs",
|
||||||
|
"/redoc",
|
||||||
|
"/openapi.json",
|
||||||
|
"/",
|
||||||
|
"/health",
|
||||||
|
"/auth",
|
||||||
|
"/api-keys/verify",
|
||||||
|
]
|
||||||
|
|
||||||
|
def _is_excluded_path(self, path: str) -> bool:
|
||||||
|
"""Vérifie si le chemin est exclu de l'authentification"""
|
||||||
|
if path == "/":
|
||||||
|
return True
|
||||||
|
|
||||||
|
for excluded in self.EXCLUDED_PATHS:
|
||||||
|
if excluded == "/":
|
||||||
|
continue
|
||||||
|
if path == excluded or path.startswith(excluded + "/"):
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def dispatch(self, request: Request, call_next: Callable):
|
||||||
|
path = request.url.path
|
||||||
|
method = request.method
|
||||||
|
|
||||||
|
if self._is_excluded_path(path):
|
||||||
|
return await call_next(request)
|
||||||
|
|
||||||
|
auth_header = request.headers.get("Authorization")
|
||||||
|
api_key_header = request.headers.get("X-API-Key")
|
||||||
|
|
||||||
|
if auth_header and auth_header.startswith("Bearer "):
|
||||||
|
token = auth_header.split(" ")[1]
|
||||||
|
|
||||||
|
if token.startswith("sdk_live_"):
|
||||||
|
logger.warning(
|
||||||
|
" API Key envoyée dans Authorization au lieu de X-API-Key"
|
||||||
|
)
|
||||||
|
api_key_header = token
|
||||||
|
else:
|
||||||
|
logger.debug(f" JWT détecté pour {method} {path}")
|
||||||
|
return await call_next(request)
|
||||||
|
|
||||||
|
if api_key_header:
|
||||||
|
logger.debug(f" API Key détectée pour {method} {path}")
|
||||||
|
return await self._handle_api_key_auth(
|
||||||
|
request, api_key_header, path, method, call_next
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.warning(f" Aucune authentification: {method} {path}")
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
content={
|
||||||
|
"detail": "Authentification requise",
|
||||||
|
"hint": "Utilisez 'X-API-Key: sdk_live_xxx' ou 'Authorization: Bearer <jwt>'",
|
||||||
|
"path": path,
|
||||||
|
},
|
||||||
|
headers={"WWW-Authenticate": 'Bearer realm="API", charset="UTF-8"'},
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _handle_api_key_auth(
|
||||||
|
self,
|
||||||
|
request: Request,
|
||||||
|
api_key: str,
|
||||||
|
path: str,
|
||||||
|
method: str,
|
||||||
|
call_next: Callable,
|
||||||
|
):
|
||||||
|
"""Gère l'authentification par API Key avec vérification STRICTE"""
|
||||||
|
try:
|
||||||
|
from database.db_config import async_session_factory
|
||||||
|
from services.api_key import ApiKeyService
|
||||||
|
|
||||||
|
async with async_session_factory() as session:
|
||||||
|
service = ApiKeyService(session)
|
||||||
|
|
||||||
|
api_key_obj = await service.verify_api_key(api_key)
|
||||||
|
|
||||||
|
if not api_key_obj:
|
||||||
|
logger.warning(f" Clé API invalide: {method} {path}")
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
content={
|
||||||
|
"detail": "Clé API invalide ou expirée",
|
||||||
|
"hint": "Vérifiez votre clé X-API-Key",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
is_allowed, rate_info = await service.check_rate_limit(api_key_obj)
|
||||||
|
if not is_allowed:
|
||||||
|
logger.warning(f"⚠️ Rate limit: {api_key_obj.name}")
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=status.HTTP_429_TOO_MANY_REQUESTS,
|
||||||
|
content={"detail": "Rate limit dépassé"},
|
||||||
|
headers={
|
||||||
|
"X-RateLimit-Limit": str(rate_info["limit"]),
|
||||||
|
"X-RateLimit-Remaining": "0",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
has_access = await service.check_endpoint_access(api_key_obj, path)
|
||||||
|
|
||||||
|
if not has_access:
|
||||||
|
import json
|
||||||
|
|
||||||
|
allowed = (
|
||||||
|
json.loads(api_key_obj.allowed_endpoints)
|
||||||
|
if api_key_obj.allowed_endpoints
|
||||||
|
else ["Tous"]
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.warning(
|
||||||
|
f" ACCÈS REFUSÉ: {api_key_obj.name}\n"
|
||||||
|
f" Endpoint demandé: {path}\n"
|
||||||
|
f" Endpoints autorisés: {allowed}"
|
||||||
|
)
|
||||||
|
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=status.HTTP_403_FORBIDDEN,
|
||||||
|
content={
|
||||||
|
"detail": "Accès non autorisé à cet endpoint",
|
||||||
|
"endpoint_requested": path,
|
||||||
|
"api_key_name": api_key_obj.name,
|
||||||
|
"allowed_endpoints": allowed,
|
||||||
|
"hint": "Cette clé API n'a pas accès à cet endpoint. Contactez l'administrateur.",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
request.state.api_key = api_key_obj
|
||||||
|
request.state.authenticated_via = "api_key"
|
||||||
|
|
||||||
|
logger.info(f" ACCÈS AUTORISÉ: {api_key_obj.name} → {method} {path}")
|
||||||
|
|
||||||
|
return await call_next(request)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f" Erreur validation API Key: {e}", exc_info=True)
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
content={"detail": f"Erreur interne: {str(e)}"},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
ApiKeyMiddleware = ApiKeyMiddlewareHTTP
|
||||||
|
|
||||||
|
|
||||||
|
def get_api_key_from_request(request: Request):
|
||||||
|
"""Récupère l'objet ApiKey depuis la requête si présent"""
|
||||||
|
return getattr(request.state, "api_key", None)
|
||||||
|
|
||||||
|
|
||||||
|
def get_auth_method(request: Request) -> str:
|
||||||
|
"""Retourne la méthode d'authentification utilisée"""
|
||||||
|
return getattr(request.state, "authenticated_via", "none")
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"SwaggerAuthMiddleware",
|
||||||
|
"ApiKeyMiddlewareHTTP",
|
||||||
|
"ApiKeyMiddleware",
|
||||||
|
"get_api_key_from_request",
|
||||||
|
"get_auth_method",
|
||||||
|
]
|
||||||
154
routes/api_keys.py
Normal file
154
routes/api_keys.py
Normal file
|
|
@ -0,0 +1,154 @@
|
||||||
|
from fastapi import APIRouter, Depends, HTTPException, status, Query
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from database import get_session, User
|
||||||
|
from core.dependencies import get_current_user, require_role
|
||||||
|
from services.api_key import ApiKeyService, api_key_to_response
|
||||||
|
from schemas.api_key import (
|
||||||
|
ApiKeyCreate,
|
||||||
|
ApiKeyCreatedResponse,
|
||||||
|
ApiKeyResponse,
|
||||||
|
ApiKeyList,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
router = APIRouter(prefix="/api-keys", tags=["API Keys Management"])
|
||||||
|
|
||||||
|
|
||||||
|
@router.post(
|
||||||
|
"",
|
||||||
|
response_model=ApiKeyCreatedResponse,
|
||||||
|
status_code=status.HTTP_201_CREATED,
|
||||||
|
dependencies=[Depends(require_role("admin", "super_admin"))],
|
||||||
|
)
|
||||||
|
async def create_api_key(
|
||||||
|
data: ApiKeyCreate,
|
||||||
|
session: AsyncSession = Depends(get_session),
|
||||||
|
user: User = Depends(get_current_user),
|
||||||
|
):
|
||||||
|
service = ApiKeyService(session)
|
||||||
|
|
||||||
|
api_key_obj, api_key_plain = await service.create_api_key(
|
||||||
|
name=data.name,
|
||||||
|
description=data.description,
|
||||||
|
created_by=user.email,
|
||||||
|
user_id=user.id,
|
||||||
|
expires_in_days=data.expires_in_days,
|
||||||
|
rate_limit_per_minute=data.rate_limit_per_minute,
|
||||||
|
allowed_endpoints=data.allowed_endpoints,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(f" Clé API créée par {user.email}: {data.name}")
|
||||||
|
|
||||||
|
response_data = api_key_to_response(api_key_obj)
|
||||||
|
response_data["api_key"] = api_key_plain
|
||||||
|
|
||||||
|
return ApiKeyCreatedResponse(**response_data)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("", response_model=ApiKeyList)
|
||||||
|
async def list_api_keys(
|
||||||
|
include_revoked: bool = Query(False, description="Inclure les clés révoquées"),
|
||||||
|
session: AsyncSession = Depends(get_session),
|
||||||
|
user: User = Depends(get_current_user),
|
||||||
|
):
|
||||||
|
service = ApiKeyService(session)
|
||||||
|
|
||||||
|
user_id = None if user.role in ["admin", "super_admin"] else user.id
|
||||||
|
|
||||||
|
keys = await service.list_api_keys(include_revoked=include_revoked, user_id=user_id)
|
||||||
|
|
||||||
|
items = [ApiKeyResponse(**api_key_to_response(k)) for k in keys]
|
||||||
|
|
||||||
|
return ApiKeyList(total=len(items), items=items)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{key_id}", response_model=ApiKeyResponse)
|
||||||
|
async def get_api_key(
|
||||||
|
key_id: str,
|
||||||
|
session: AsyncSession = Depends(get_session),
|
||||||
|
user: User = Depends(get_current_user),
|
||||||
|
):
|
||||||
|
"""Récupérer une clé API par son ID"""
|
||||||
|
service = ApiKeyService(session)
|
||||||
|
|
||||||
|
api_key_obj = await service.get_by_id(key_id)
|
||||||
|
|
||||||
|
if not api_key_obj:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND,
|
||||||
|
detail=f"Clé API {key_id} introuvable",
|
||||||
|
)
|
||||||
|
|
||||||
|
if user.role not in ["admin", "super_admin"]:
|
||||||
|
if api_key_obj.user_id != user.id:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_403_FORBIDDEN,
|
||||||
|
detail="Accès refusé à cette clé",
|
||||||
|
)
|
||||||
|
|
||||||
|
return ApiKeyResponse(**api_key_to_response(api_key_obj))
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/{key_id}", status_code=status.HTTP_200_OK)
|
||||||
|
async def revoke_api_key(
|
||||||
|
key_id: str,
|
||||||
|
session: AsyncSession = Depends(get_session),
|
||||||
|
user: User = Depends(get_current_user),
|
||||||
|
):
|
||||||
|
service = ApiKeyService(session)
|
||||||
|
|
||||||
|
api_key_obj = await service.get_by_id(key_id)
|
||||||
|
|
||||||
|
if not api_key_obj:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND,
|
||||||
|
detail=f"Clé API {key_id} introuvable",
|
||||||
|
)
|
||||||
|
|
||||||
|
if user.role not in ["admin", "super_admin"]:
|
||||||
|
if api_key_obj.user_id != user.id:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_403_FORBIDDEN,
|
||||||
|
detail="Accès refusé à cette clé",
|
||||||
|
)
|
||||||
|
|
||||||
|
success = await service.revoke_api_key(key_id)
|
||||||
|
|
||||||
|
if not success:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
detail="Erreur lors de la révocation",
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(f" Clé API révoquée par {user.email}: {api_key_obj.name}")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"message": f"Clé API '{api_key_obj.name}' révoquée avec succès",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/verify", status_code=status.HTTP_200_OK)
|
||||||
|
async def verify_api_key_endpoint(
|
||||||
|
api_key: str = Query(..., description="Clé API à vérifier"),
|
||||||
|
session: AsyncSession = Depends(get_session),
|
||||||
|
):
|
||||||
|
service = ApiKeyService(session)
|
||||||
|
|
||||||
|
api_key_obj = await service.verify_api_key(api_key)
|
||||||
|
|
||||||
|
if not api_key_obj:
|
||||||
|
return {
|
||||||
|
"valid": False,
|
||||||
|
"message": "Clé API invalide, expirée ou révoquée",
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
"valid": True,
|
||||||
|
"message": "Clé API valide",
|
||||||
|
"key_name": api_key_obj.name,
|
||||||
|
"rate_limit": api_key_obj.rate_limit_per_minute,
|
||||||
|
"expires_at": api_key_obj.expires_at,
|
||||||
|
}
|
||||||
|
|
@ -7,6 +7,7 @@ from typing import Optional
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
from database import get_session, User, RefreshToken, LoginAttempt
|
from database import get_session, User, RefreshToken, LoginAttempt
|
||||||
|
from core.dependencies import get_current_user
|
||||||
from security.auth import (
|
from security.auth import (
|
||||||
hash_password,
|
hash_password,
|
||||||
verify_password,
|
verify_password,
|
||||||
|
|
@ -19,7 +20,6 @@ from security.auth import (
|
||||||
hash_token,
|
hash_token,
|
||||||
)
|
)
|
||||||
from services.email_service import AuthEmailService
|
from services.email_service import AuthEmailService
|
||||||
from core.dependencies import get_current_user
|
|
||||||
from config.config import settings
|
from config.config import settings
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
|
@ -101,7 +101,7 @@ async def check_rate_limit(
|
||||||
)
|
)
|
||||||
failed_attempts = result.scalars().all()
|
failed_attempts = result.scalars().all()
|
||||||
|
|
||||||
if len(failed_attempts) >= 5:
|
if len(failed_attempts) >= 15:
|
||||||
return False, "Trop de tentatives échouées. Réessayez dans 15 minutes."
|
return False, "Trop de tentatives échouées. Réessayez dans 15 minutes."
|
||||||
|
|
||||||
return True, ""
|
return True, ""
|
||||||
|
|
@ -286,7 +286,7 @@ async def login(
|
||||||
if user:
|
if user:
|
||||||
user.failed_login_attempts += 1
|
user.failed_login_attempts += 1
|
||||||
|
|
||||||
if user.failed_login_attempts >= 5:
|
if user.failed_login_attempts >= 15:
|
||||||
user.locked_until = datetime.now() + timedelta(minutes=15)
|
user.locked_until = datetime.now() + timedelta(minutes=15)
|
||||||
await session.commit()
|
await session.commit()
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
|
|
@ -510,7 +510,7 @@ async def logout(
|
||||||
token_record.revoked_at = datetime.now()
|
token_record.revoked_at = datetime.now()
|
||||||
await session.commit()
|
await session.commit()
|
||||||
|
|
||||||
logger.info(f"👋 Déconnexion: {user.email}")
|
logger.info(f" Déconnexion: {user.email}")
|
||||||
|
|
||||||
return {"success": True, "message": "Déconnexion réussie"}
|
return {"success": True, "message": "Déconnexion réussie"}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -22,7 +22,6 @@ async def rechercher_entreprise(
|
||||||
try:
|
try:
|
||||||
logger.info(f" Recherche entreprise: '{q}'")
|
logger.info(f" Recherche entreprise: '{q}'")
|
||||||
|
|
||||||
# Appel API
|
|
||||||
api_response = await rechercher_entreprise_api(q, per_page)
|
api_response = await rechercher_entreprise_api(q, per_page)
|
||||||
|
|
||||||
resultats_api = api_response.get("results", [])
|
resultats_api = api_response.get("results", [])
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,11 @@
|
||||||
from fastapi import APIRouter, Depends, HTTPException, Query, Request
|
from fastapi import APIRouter, Depends, HTTPException, Query, Request
|
||||||
from fastapi.responses import FileResponse
|
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
from sqlalchemy import select, func, and_
|
from sqlalchemy import select, func
|
||||||
from sqlalchemy.orm import selectinload
|
from sqlalchemy.orm import selectinload
|
||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime
|
||||||
import logging
|
import logging
|
||||||
|
from core.dependencies import get_current_user
|
||||||
from data.data import templates_signature_email
|
from data.data import templates_signature_email
|
||||||
from email_queue import email_queue
|
from email_queue import email_queue
|
||||||
from database import UniversignSignerStatus, UniversignTransactionStatus, get_session
|
from database import UniversignSignerStatus, UniversignTransactionStatus, get_session
|
||||||
|
|
@ -32,7 +32,10 @@ from schemas import (
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
router = APIRouter(prefix="/universign", tags=["Universign"])
|
router = APIRouter(
|
||||||
|
prefix="/universign",
|
||||||
|
tags=["Universign"],
|
||||||
|
)
|
||||||
|
|
||||||
sync_service = UniversignSyncService(
|
sync_service = UniversignSyncService(
|
||||||
api_url=settings.universign_api_url, api_key=settings.universign_api_key
|
api_url=settings.universign_api_url, api_key=settings.universign_api_key
|
||||||
|
|
@ -494,14 +497,11 @@ async def sync_all_transactions(
|
||||||
return {"success": True, "stats": stats, "timestamp": datetime.now().isoformat()}
|
return {"success": True, "stats": stats, "timestamp": datetime.now().isoformat()}
|
||||||
|
|
||||||
|
|
||||||
@router.post("/webhook")
|
@router.post("/webhook", dependencies=[])
|
||||||
@router.post("/webhook/")
|
@router.post("/webhook/", dependencies=[])
|
||||||
async def webhook_universign(
|
async def webhook_universign(
|
||||||
request: Request, session: AsyncSession = Depends(get_session)
|
request: Request, session: AsyncSession = Depends(get_session)
|
||||||
):
|
):
|
||||||
"""
|
|
||||||
CORRECTION : Extraction correcte du transaction_id selon la structure réelle d'Universign
|
|
||||||
"""
|
|
||||||
try:
|
try:
|
||||||
payload = await request.json()
|
payload = await request.json()
|
||||||
|
|
||||||
|
|
@ -511,7 +511,6 @@ async def webhook_universign(
|
||||||
transaction_id = None
|
transaction_id = None
|
||||||
|
|
||||||
if payload.get("type", "").startswith("transaction.") and "payload" in payload:
|
if payload.get("type", "").startswith("transaction.") and "payload" in payload:
|
||||||
# Le transaction_id est dans payload.object.id
|
|
||||||
nested_object = payload.get("payload", {}).get("object", {})
|
nested_object = payload.get("payload", {}).get("object", {})
|
||||||
if nested_object.get("object") == "transaction":
|
if nested_object.get("object") == "transaction":
|
||||||
transaction_id = nested_object.get("id")
|
transaction_id = nested_object.get("id")
|
||||||
|
|
@ -1082,159 +1081,6 @@ async def trouver_transactions_inconsistantes(
|
||||||
raise HTTPException(500, str(e))
|
raise HTTPException(500, str(e))
|
||||||
|
|
||||||
|
|
||||||
@router.post("/admin/nettoyer-transactions-erreur", tags=["Admin"])
|
|
||||||
async def nettoyer_transactions_erreur(
|
|
||||||
age_jours: int = Query(
|
|
||||||
7, description="Supprimer les transactions en erreur de plus de X jours"
|
|
||||||
),
|
|
||||||
session: AsyncSession = Depends(get_session),
|
|
||||||
):
|
|
||||||
try:
|
|
||||||
date_limite = datetime.now() - timedelta(days=age_jours)
|
|
||||||
|
|
||||||
query = select(UniversignTransaction).where(
|
|
||||||
and_(
|
|
||||||
UniversignTransaction.local_status == LocalDocumentStatus.ERROR,
|
|
||||||
UniversignTransaction.created_at < date_limite,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
result = await session.execute(query)
|
|
||||||
transactions = result.scalars().all()
|
|
||||||
|
|
||||||
supprimees = []
|
|
||||||
for tx in transactions:
|
|
||||||
supprimees.append(
|
|
||||||
{
|
|
||||||
"transaction_id": tx.transaction_id,
|
|
||||||
"document_id": tx.sage_document_id,
|
|
||||||
"date_creation": tx.created_at.isoformat(),
|
|
||||||
"erreur": tx.sync_error,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
await session.delete(tx)
|
|
||||||
|
|
||||||
await session.commit()
|
|
||||||
|
|
||||||
return {
|
|
||||||
"success": True,
|
|
||||||
"transactions_supprimees": len(supprimees),
|
|
||||||
"age_limite_jours": age_jours,
|
|
||||||
"details": supprimees,
|
|
||||||
}
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Erreur nettoyage: {e}")
|
|
||||||
raise HTTPException(500, str(e))
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/debug/webhook-payload/{transaction_id}", tags=["Debug"])
|
|
||||||
async def voir_dernier_webhook(
|
|
||||||
transaction_id: str, session: AsyncSession = Depends(get_session)
|
|
||||||
):
|
|
||||||
try:
|
|
||||||
query = select(UniversignTransaction).where(
|
|
||||||
UniversignTransaction.transaction_id == transaction_id
|
|
||||||
)
|
|
||||||
result = await session.execute(query)
|
|
||||||
tx = result.scalar_one_or_none()
|
|
||||||
|
|
||||||
if not tx:
|
|
||||||
raise HTTPException(404, "Transaction introuvable")
|
|
||||||
|
|
||||||
logs_query = (
|
|
||||||
select(UniversignSyncLog)
|
|
||||||
.where(
|
|
||||||
and_(
|
|
||||||
UniversignSyncLog.transaction_id == tx.id,
|
|
||||||
UniversignSyncLog.sync_type.like("webhook:%"),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.order_by(UniversignSyncLog.sync_timestamp.desc())
|
|
||||||
.limit(1)
|
|
||||||
)
|
|
||||||
|
|
||||||
logs_result = await session.execute(logs_query)
|
|
||||||
last_webhook_log = logs_result.scalar_one_or_none()
|
|
||||||
|
|
||||||
if not last_webhook_log:
|
|
||||||
return {
|
|
||||||
"transaction_id": transaction_id,
|
|
||||||
"webhook_recu": tx.webhook_received,
|
|
||||||
"dernier_payload": None,
|
|
||||||
"message": "Aucun webhook reçu pour cette transaction",
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
"transaction_id": transaction_id,
|
|
||||||
"webhook_recu": tx.webhook_received,
|
|
||||||
"dernier_webhook": {
|
|
||||||
"timestamp": last_webhook_log.sync_timestamp.isoformat(),
|
|
||||||
"type": last_webhook_log.sync_type,
|
|
||||||
"success": last_webhook_log.success,
|
|
||||||
"payload": json.loads(last_webhook_log.changes_detected)
|
|
||||||
if last_webhook_log.changes_detected
|
|
||||||
else None,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
except HTTPException:
|
|
||||||
raise
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Erreur debug webhook: {e}")
|
|
||||||
raise HTTPException(500, str(e))
|
|
||||||
|
|
||||||
|
|
||||||
@router.get(
|
|
||||||
"/transactions/{transaction_id}/document/download", tags=["Documents Signés"]
|
|
||||||
)
|
|
||||||
async def telecharger_document_signe(
|
|
||||||
transaction_id: str, session: AsyncSession = Depends(get_session)
|
|
||||||
):
|
|
||||||
try:
|
|
||||||
query = select(UniversignTransaction).where(
|
|
||||||
UniversignTransaction.transaction_id == transaction_id
|
|
||||||
)
|
|
||||||
result = await session.execute(query)
|
|
||||||
transaction = result.scalar_one_or_none()
|
|
||||||
|
|
||||||
if not transaction:
|
|
||||||
raise HTTPException(404, f"Transaction {transaction_id} introuvable")
|
|
||||||
|
|
||||||
if not transaction.signed_document_path:
|
|
||||||
raise HTTPException(
|
|
||||||
404,
|
|
||||||
"Document signé non disponible localement. "
|
|
||||||
"Utilisez POST /admin/download-missing-documents pour le récupérer.",
|
|
||||||
)
|
|
||||||
|
|
||||||
file_path = Path(transaction.signed_document_path)
|
|
||||||
|
|
||||||
if not file_path.exists():
|
|
||||||
logger.warning(f"Fichier perdu : {file_path}")
|
|
||||||
raise HTTPException(
|
|
||||||
404,
|
|
||||||
"Fichier introuvable sur le serveur. "
|
|
||||||
"Utilisez POST /admin/download-missing-documents pour le récupérer.",
|
|
||||||
)
|
|
||||||
|
|
||||||
download_name = (
|
|
||||||
f"{transaction.sage_document_id}_"
|
|
||||||
f"{transaction.sage_document_type.name}_"
|
|
||||||
f"signe.pdf"
|
|
||||||
)
|
|
||||||
|
|
||||||
return FileResponse(
|
|
||||||
path=str(file_path), media_type="application/pdf", filename=download_name
|
|
||||||
)
|
|
||||||
|
|
||||||
except HTTPException:
|
|
||||||
raise
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Erreur téléchargement document : {e}", exc_info=True)
|
|
||||||
raise HTTPException(500, str(e))
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/transactions/{transaction_id}/document/info", tags=["Documents Signés"])
|
@router.get("/transactions/{transaction_id}/document/info", tags=["Documents Signés"])
|
||||||
async def info_document_signe(
|
async def info_document_signe(
|
||||||
transaction_id: str, session: AsyncSession = Depends(get_session)
|
transaction_id: str, session: AsyncSession = Depends(get_session)
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,3 @@
|
||||||
# sage_client.py
|
|
||||||
import requests
|
import requests
|
||||||
from typing import Dict, List, Optional
|
from typing import Dict, List, Optional
|
||||||
from config.config import settings
|
from config.config import settings
|
||||||
|
|
@ -468,7 +467,6 @@ class SageGatewayClient:
|
||||||
"tva_encaissement": tva_encaissement,
|
"tva_encaissement": tva_encaissement,
|
||||||
}
|
}
|
||||||
|
|
||||||
# Champs optionnels
|
|
||||||
if date_reglement:
|
if date_reglement:
|
||||||
payload["date_reglement"] = date_reglement
|
payload["date_reglement"] = date_reglement
|
||||||
if code_journal:
|
if code_journal:
|
||||||
|
|
|
||||||
77
schemas/api_key.py
Normal file
77
schemas/api_key.py
Normal file
|
|
@ -0,0 +1,77 @@
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
from typing import Optional, List
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
|
||||||
|
class ApiKeyCreate(BaseModel):
|
||||||
|
"""Schema pour créer une clé API"""
|
||||||
|
|
||||||
|
name: str = Field(..., min_length=3, max_length=255, description="Nom de la clé")
|
||||||
|
description: Optional[str] = Field(None, description="Description de l'usage")
|
||||||
|
expires_in_days: Optional[int] = Field(
|
||||||
|
None, ge=1, le=3650, description="Expiration en jours (max 10 ans)"
|
||||||
|
)
|
||||||
|
rate_limit_per_minute: int = Field(
|
||||||
|
60, ge=1, le=1000, description="Limite de requêtes par minute"
|
||||||
|
)
|
||||||
|
allowed_endpoints: Optional[List[str]] = Field(
|
||||||
|
None, description="Endpoints autorisés ([] = tous, ['/clients*'] = wildcard)"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ApiKeyResponse(BaseModel):
|
||||||
|
"""Schema de réponse pour une clé API"""
|
||||||
|
|
||||||
|
id: str
|
||||||
|
name: str
|
||||||
|
description: Optional[str]
|
||||||
|
key_prefix: str
|
||||||
|
is_active: bool
|
||||||
|
is_expired: bool
|
||||||
|
rate_limit_per_minute: int
|
||||||
|
allowed_endpoints: Optional[List[str]]
|
||||||
|
total_requests: int
|
||||||
|
last_used_at: Optional[datetime]
|
||||||
|
created_at: datetime
|
||||||
|
expires_at: Optional[datetime]
|
||||||
|
revoked_at: Optional[datetime]
|
||||||
|
created_by: str
|
||||||
|
|
||||||
|
|
||||||
|
class ApiKeyCreatedResponse(ApiKeyResponse):
|
||||||
|
"""Schema de réponse après création (inclut la clé en clair)"""
|
||||||
|
|
||||||
|
api_key: str = Field(
|
||||||
|
..., description=" Clé API en clair - à sauvegarder immédiatement"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ApiKeyList(BaseModel):
|
||||||
|
"""Liste de clés API"""
|
||||||
|
|
||||||
|
total: int
|
||||||
|
items: List[ApiKeyResponse]
|
||||||
|
|
||||||
|
|
||||||
|
class SwaggerUserCreate(BaseModel):
|
||||||
|
"""Schema pour créer un utilisateur Swagger"""
|
||||||
|
|
||||||
|
username: str = Field(..., min_length=3, max_length=100)
|
||||||
|
password: str = Field(..., min_length=8)
|
||||||
|
full_name: Optional[str] = None
|
||||||
|
email: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class SwaggerUserResponse(BaseModel):
|
||||||
|
"""Schema de réponse pour un utilisateur Swagger"""
|
||||||
|
|
||||||
|
id: str
|
||||||
|
username: str
|
||||||
|
full_name: Optional[str]
|
||||||
|
email: Optional[str]
|
||||||
|
is_active: bool
|
||||||
|
created_at: datetime
|
||||||
|
last_login: Optional[datetime]
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
from_attributes = True
|
||||||
|
|
@ -76,7 +76,6 @@ class Article(BaseModel):
|
||||||
)
|
)
|
||||||
nb_emplacements: int = Field(0, description="Nombre d'emplacements")
|
nb_emplacements: int = Field(0, description="Nombre d'emplacements")
|
||||||
|
|
||||||
# Champs énumérés normalisés
|
|
||||||
suivi_stock: Optional[int] = Field(
|
suivi_stock: Optional[int] = Field(
|
||||||
None,
|
None,
|
||||||
description="Type de suivi de stock (AR_SuiviStock): 0=Aucun, 1=CMUP, 2=FIFO/LIFO, 3=Sérialisé",
|
description="Type de suivi de stock (AR_SuiviStock): 0=Aucun, 1=CMUP, 2=FIFO/LIFO, 3=Sérialisé",
|
||||||
|
|
|
||||||
|
|
@ -10,12 +10,10 @@ logger = logging.getLogger(__name__)
|
||||||
class ReglementFactureCreate(BaseModel):
|
class ReglementFactureCreate(BaseModel):
|
||||||
"""Requête de règlement d'une facture côté VPS"""
|
"""Requête de règlement d'une facture côté VPS"""
|
||||||
|
|
||||||
# Montant et devise
|
|
||||||
montant: Decimal = Field(..., gt=0, description="Montant à régler")
|
montant: Decimal = Field(..., gt=0, description="Montant à régler")
|
||||||
devise_code: Optional[int] = Field(0, description="Code devise (0=EUR par défaut)")
|
devise_code: Optional[int] = Field(0, description="Code devise (0=EUR par défaut)")
|
||||||
cours_devise: Optional[Decimal] = Field(1.0, description="Cours de la devise")
|
cours_devise: Optional[Decimal] = Field(1.0, description="Cours de la devise")
|
||||||
|
|
||||||
# Mode et journal
|
|
||||||
mode_reglement: int = Field(
|
mode_reglement: int = Field(
|
||||||
..., ge=0, description="Code mode règlement depuis /reglements/modes"
|
..., ge=0, description="Code mode règlement depuis /reglements/modes"
|
||||||
)
|
)
|
||||||
|
|
@ -23,13 +21,11 @@ class ReglementFactureCreate(BaseModel):
|
||||||
..., min_length=1, description="Code journal depuis /journaux/tresorerie"
|
..., min_length=1, description="Code journal depuis /journaux/tresorerie"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Dates
|
|
||||||
date_reglement: Optional[date] = Field(
|
date_reglement: Optional[date] = Field(
|
||||||
None, description="Date du règlement (défaut: aujourd'hui)"
|
None, description="Date du règlement (défaut: aujourd'hui)"
|
||||||
)
|
)
|
||||||
date_echeance: Optional[date] = Field(None, description="Date d'échéance")
|
date_echeance: Optional[date] = Field(None, description="Date d'échéance")
|
||||||
|
|
||||||
# Références
|
|
||||||
reference: Optional[str] = Field(
|
reference: Optional[str] = Field(
|
||||||
"", max_length=17, description="Référence pièce règlement"
|
"", max_length=17, description="Référence pièce règlement"
|
||||||
)
|
)
|
||||||
|
|
@ -37,7 +33,6 @@ class ReglementFactureCreate(BaseModel):
|
||||||
"", max_length=35, description="Libellé du règlement"
|
"", max_length=35, description="Libellé du règlement"
|
||||||
)
|
)
|
||||||
|
|
||||||
# TVA sur encaissement
|
|
||||||
tva_encaissement: Optional[bool] = Field(
|
tva_encaissement: Optional[bool] = Field(
|
||||||
False, description="Appliquer TVA sur encaissement"
|
False, description="Appliquer TVA sur encaissement"
|
||||||
)
|
)
|
||||||
|
|
@ -81,7 +76,6 @@ class ReglementMultipleCreate(BaseModel):
|
||||||
libelle: Optional[str] = Field("")
|
libelle: Optional[str] = Field("")
|
||||||
tva_encaissement: Optional[bool] = Field(False)
|
tva_encaissement: Optional[bool] = Field(False)
|
||||||
|
|
||||||
# Factures spécifiques (optionnel)
|
|
||||||
numeros_factures: Optional[List[str]] = Field(
|
numeros_factures: Optional[List[str]] = Field(
|
||||||
None, description="Si vide, règle les plus anciennes en premier"
|
None, description="Si vide, règle les plus anciennes en premier"
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,6 @@ class GatewayHealthStatus(str, Enum):
|
||||||
UNKNOWN = "unknown"
|
UNKNOWN = "unknown"
|
||||||
|
|
||||||
|
|
||||||
# === CREATE ===
|
|
||||||
class SageGatewayCreate(BaseModel):
|
class SageGatewayCreate(BaseModel):
|
||||||
|
|
||||||
name: str = Field(
|
name: str = Field(
|
||||||
|
|
@ -71,7 +70,6 @@ class SageGatewayUpdate(BaseModel):
|
||||||
return v.rstrip("/") if v else v
|
return v.rstrip("/") if v else v
|
||||||
|
|
||||||
|
|
||||||
# === RESPONSE ===
|
|
||||||
class SageGatewayResponse(BaseModel):
|
class SageGatewayResponse(BaseModel):
|
||||||
|
|
||||||
id: str
|
id: str
|
||||||
|
|
|
||||||
|
|
@ -9,7 +9,6 @@ class CollaborateurBase(BaseModel):
|
||||||
prenom: Optional[str] = Field(None, max_length=50)
|
prenom: Optional[str] = Field(None, max_length=50)
|
||||||
fonction: Optional[str] = Field(None, max_length=50)
|
fonction: Optional[str] = Field(None, max_length=50)
|
||||||
|
|
||||||
# Adresse
|
|
||||||
adresse: Optional[str] = Field(None, max_length=100)
|
adresse: Optional[str] = Field(None, max_length=100)
|
||||||
complement: Optional[str] = Field(None, max_length=100)
|
complement: Optional[str] = Field(None, max_length=100)
|
||||||
code_postal: Optional[str] = Field(None, max_length=10)
|
code_postal: Optional[str] = Field(None, max_length=10)
|
||||||
|
|
@ -17,7 +16,6 @@ class CollaborateurBase(BaseModel):
|
||||||
code_region: Optional[str] = Field(None, max_length=50)
|
code_region: Optional[str] = Field(None, max_length=50)
|
||||||
pays: Optional[str] = Field(None, max_length=50)
|
pays: Optional[str] = Field(None, max_length=50)
|
||||||
|
|
||||||
# Services
|
|
||||||
service: Optional[str] = Field(None, max_length=50)
|
service: Optional[str] = Field(None, max_length=50)
|
||||||
vendeur: bool = Field(default=False)
|
vendeur: bool = Field(default=False)
|
||||||
caissier: bool = Field(default=False)
|
caissier: bool = Field(default=False)
|
||||||
|
|
@ -25,18 +23,15 @@ class CollaborateurBase(BaseModel):
|
||||||
chef_ventes: bool = Field(default=False)
|
chef_ventes: bool = Field(default=False)
|
||||||
numero_chef_ventes: Optional[int] = None
|
numero_chef_ventes: Optional[int] = None
|
||||||
|
|
||||||
# Contact
|
|
||||||
telephone: Optional[str] = Field(None, max_length=20)
|
telephone: Optional[str] = Field(None, max_length=20)
|
||||||
telecopie: Optional[str] = Field(None, max_length=20)
|
telecopie: Optional[str] = Field(None, max_length=20)
|
||||||
email: Optional[EmailStr] = None
|
email: Optional[EmailStr] = None
|
||||||
tel_portable: Optional[str] = Field(None, max_length=20)
|
tel_portable: Optional[str] = Field(None, max_length=20)
|
||||||
|
|
||||||
# Réseaux sociaux
|
|
||||||
facebook: Optional[str] = Field(None, max_length=100)
|
facebook: Optional[str] = Field(None, max_length=100)
|
||||||
linkedin: Optional[str] = Field(None, max_length=100)
|
linkedin: Optional[str] = Field(None, max_length=100)
|
||||||
skype: Optional[str] = Field(None, max_length=100)
|
skype: Optional[str] = Field(None, max_length=100)
|
||||||
|
|
||||||
# Autres
|
|
||||||
matricule: Optional[str] = Field(None, max_length=20)
|
matricule: Optional[str] = Field(None, max_length=20)
|
||||||
sommeil: bool = Field(default=False)
|
sommeil: bool = Field(default=False)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -14,7 +14,6 @@ class TypeTiersInt(IntEnum):
|
||||||
|
|
||||||
|
|
||||||
class TiersDetails(BaseModel):
|
class TiersDetails(BaseModel):
|
||||||
# IDENTIFICATION
|
|
||||||
numero: Optional[str] = Field(None, description="Code tiers (CT_Num)")
|
numero: Optional[str] = Field(None, description="Code tiers (CT_Num)")
|
||||||
intitule: Optional[str] = Field(
|
intitule: Optional[str] = Field(
|
||||||
None, description="Raison sociale ou Nom complet (CT_Intitule)"
|
None, description="Raison sociale ou Nom complet (CT_Intitule)"
|
||||||
|
|
@ -37,7 +36,6 @@ class TiersDetails(BaseModel):
|
||||||
)
|
)
|
||||||
code_naf: Optional[str] = Field(None, description="Code NAF/APE (CT_Ape)")
|
code_naf: Optional[str] = Field(None, description="Code NAF/APE (CT_Ape)")
|
||||||
|
|
||||||
# ADRESSE
|
|
||||||
contact: Optional[str] = Field(
|
contact: Optional[str] = Field(
|
||||||
None, description="Nom du contact principal (CT_Contact)"
|
None, description="Nom du contact principal (CT_Contact)"
|
||||||
)
|
)
|
||||||
|
|
@ -50,7 +48,6 @@ class TiersDetails(BaseModel):
|
||||||
region: Optional[str] = Field(None, description="Région/État (CT_CodeRegion)")
|
region: Optional[str] = Field(None, description="Région/État (CT_CodeRegion)")
|
||||||
pays: Optional[str] = Field(None, description="Pays (CT_Pays)")
|
pays: Optional[str] = Field(None, description="Pays (CT_Pays)")
|
||||||
|
|
||||||
# TELECOM
|
|
||||||
telephone: Optional[str] = Field(None, description="Téléphone fixe (CT_Telephone)")
|
telephone: Optional[str] = Field(None, description="Téléphone fixe (CT_Telephone)")
|
||||||
telecopie: Optional[str] = Field(None, description="Fax (CT_Telecopie)")
|
telecopie: Optional[str] = Field(None, description="Fax (CT_Telecopie)")
|
||||||
email: Optional[str] = Field(None, description="Email principal (CT_EMail)")
|
email: Optional[str] = Field(None, description="Email principal (CT_EMail)")
|
||||||
|
|
@ -58,13 +55,11 @@ class TiersDetails(BaseModel):
|
||||||
facebook: Optional[str] = Field(None, description="Profil Facebook (CT_Facebook)")
|
facebook: Optional[str] = Field(None, description="Profil Facebook (CT_Facebook)")
|
||||||
linkedin: Optional[str] = Field(None, description="Profil LinkedIn (CT_LinkedIn)")
|
linkedin: Optional[str] = Field(None, description="Profil LinkedIn (CT_LinkedIn)")
|
||||||
|
|
||||||
# TAUX
|
|
||||||
taux01: Optional[float] = Field(None, description="Taux personnalisé 1 (CT_Taux01)")
|
taux01: Optional[float] = Field(None, description="Taux personnalisé 1 (CT_Taux01)")
|
||||||
taux02: Optional[float] = Field(None, description="Taux personnalisé 2 (CT_Taux02)")
|
taux02: Optional[float] = Field(None, description="Taux personnalisé 2 (CT_Taux02)")
|
||||||
taux03: Optional[float] = Field(None, description="Taux personnalisé 3 (CT_Taux03)")
|
taux03: Optional[float] = Field(None, description="Taux personnalisé 3 (CT_Taux03)")
|
||||||
taux04: Optional[float] = Field(None, description="Taux personnalisé 4 (CT_Taux04)")
|
taux04: Optional[float] = Field(None, description="Taux personnalisé 4 (CT_Taux04)")
|
||||||
|
|
||||||
# STATISTIQUES
|
|
||||||
statistique01: Optional[str] = Field(
|
statistique01: Optional[str] = Field(
|
||||||
None, description="Statistique 1 (CT_Statistique01)"
|
None, description="Statistique 1 (CT_Statistique01)"
|
||||||
)
|
)
|
||||||
|
|
@ -96,7 +91,6 @@ class TiersDetails(BaseModel):
|
||||||
None, description="Statistique 10 (CT_Statistique10)"
|
None, description="Statistique 10 (CT_Statistique10)"
|
||||||
)
|
)
|
||||||
|
|
||||||
# COMMERCIAL
|
|
||||||
encours_autorise: Optional[float] = Field(
|
encours_autorise: Optional[float] = Field(
|
||||||
None, description="Encours maximum autorisé (CT_Encours)"
|
None, description="Encours maximum autorisé (CT_Encours)"
|
||||||
)
|
)
|
||||||
|
|
@ -113,7 +107,6 @@ class TiersDetails(BaseModel):
|
||||||
None, description="Détails du commercial/collaborateur"
|
None, description="Détails du commercial/collaborateur"
|
||||||
)
|
)
|
||||||
|
|
||||||
# FACTURATION
|
|
||||||
lettrage_auto: Optional[bool] = Field(
|
lettrage_auto: Optional[bool] = Field(
|
||||||
None, description="Lettrage automatique (CT_Lettrage)"
|
None, description="Lettrage automatique (CT_Lettrage)"
|
||||||
)
|
)
|
||||||
|
|
@ -146,7 +139,6 @@ class TiersDetails(BaseModel):
|
||||||
None, description="Bon à payer obligatoire (CT_BonAPayer)"
|
None, description="Bon à payer obligatoire (CT_BonAPayer)"
|
||||||
)
|
)
|
||||||
|
|
||||||
# LOGISTIQUE
|
|
||||||
priorite_livraison: Optional[int] = Field(
|
priorite_livraison: Optional[int] = Field(
|
||||||
None, description="Priorité livraison (CT_PrioriteLivr)"
|
None, description="Priorité livraison (CT_PrioriteLivr)"
|
||||||
)
|
)
|
||||||
|
|
@ -160,17 +152,14 @@ class TiersDetails(BaseModel):
|
||||||
None, description="Délai appro jours (CT_DelaiAppro)"
|
None, description="Délai appro jours (CT_DelaiAppro)"
|
||||||
)
|
)
|
||||||
|
|
||||||
# COMMENTAIRE
|
|
||||||
commentaire: Optional[str] = Field(
|
commentaire: Optional[str] = Field(
|
||||||
None, description="Commentaire libre (CT_Commentaire)"
|
None, description="Commentaire libre (CT_Commentaire)"
|
||||||
)
|
)
|
||||||
|
|
||||||
# ANALYTIQUE
|
|
||||||
section_analytique: Optional[str] = Field(
|
section_analytique: Optional[str] = Field(
|
||||||
None, description="Section analytique (CA_Num)"
|
None, description="Section analytique (CA_Num)"
|
||||||
)
|
)
|
||||||
|
|
||||||
# ORGANISATION / SURVEILLANCE
|
|
||||||
mode_reglement_code: Optional[int] = Field(
|
mode_reglement_code: Optional[int] = Field(
|
||||||
None, description="Code mode règlement (MR_No)"
|
None, description="Code mode règlement (MR_No)"
|
||||||
)
|
)
|
||||||
|
|
@ -200,7 +189,6 @@ class TiersDetails(BaseModel):
|
||||||
None, description="Résultat financier (CT_SvResultat)"
|
None, description="Résultat financier (CT_SvResultat)"
|
||||||
)
|
)
|
||||||
|
|
||||||
# COMPTE GENERAL ET CATEGORIES
|
|
||||||
compte_general: Optional[str] = Field(
|
compte_general: Optional[str] = Field(
|
||||||
None, description="Compte général principal (CG_NumPrinc)"
|
None, description="Compte général principal (CG_NumPrinc)"
|
||||||
)
|
)
|
||||||
|
|
@ -211,7 +199,6 @@ class TiersDetails(BaseModel):
|
||||||
None, description="Catégorie comptable (N_CatCompta)"
|
None, description="Catégorie comptable (N_CatCompta)"
|
||||||
)
|
)
|
||||||
|
|
||||||
# CONTACTS
|
|
||||||
contacts: Optional[List[Contact]] = Field(
|
contacts: Optional[List[Contact]] = Field(
|
||||||
default_factory=list, description="Liste des contacts du tiers"
|
default_factory=list, description="Liste des contacts du tiers"
|
||||||
)
|
)
|
||||||
|
|
|
||||||
353
scripts/manage_security.py
Normal file
353
scripts/manage_security.py
Normal file
|
|
@ -0,0 +1,353 @@
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
_current_file = Path(__file__).resolve()
|
||||||
|
_script_dir = _current_file.parent
|
||||||
|
_app_dir = _script_dir.parent
|
||||||
|
|
||||||
|
print(f"DEBUG: Script path: {_current_file}")
|
||||||
|
print(f"DEBUG: App dir: {_app_dir}")
|
||||||
|
print(f"DEBUG: Current working dir: {os.getcwd()}")
|
||||||
|
|
||||||
|
if str(_app_dir) in sys.path:
|
||||||
|
sys.path.remove(str(_app_dir))
|
||||||
|
sys.path.insert(0, str(_app_dir))
|
||||||
|
|
||||||
|
os.chdir(str(_app_dir))
|
||||||
|
|
||||||
|
print(f"DEBUG: sys.path[0]: {sys.path[0]}")
|
||||||
|
print(f"DEBUG: New working dir: {os.getcwd()}")
|
||||||
|
|
||||||
|
_test_imports = [
|
||||||
|
"database",
|
||||||
|
"database.db_config",
|
||||||
|
"database.models",
|
||||||
|
"services",
|
||||||
|
"security",
|
||||||
|
]
|
||||||
|
|
||||||
|
print("\nDEBUG: Vérification des imports...")
|
||||||
|
for module in _test_imports:
|
||||||
|
try:
|
||||||
|
__import__(module)
|
||||||
|
print(f" {module}")
|
||||||
|
except ImportError as e:
|
||||||
|
print(f" {module}: {e}")
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import argparse
|
||||||
|
import logging
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from sqlalchemy import select
|
||||||
|
|
||||||
|
try:
|
||||||
|
from database.db_config import async_session_factory
|
||||||
|
from database.models.user import User
|
||||||
|
from database.models.api_key import SwaggerUser, ApiKey
|
||||||
|
from services.api_key import ApiKeyService
|
||||||
|
from security.auth import hash_password
|
||||||
|
except ImportError as e:
|
||||||
|
print(f"\n ERREUR D'IMPORT: {e}")
|
||||||
|
print(f" Vérifiez que vous êtes dans /app")
|
||||||
|
print(f" Commande correcte: cd /app && python scripts/manage_security.py ...")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
logging.basicConfig(level=logging.INFO, format="%(levelname)s - %(message)s")
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
async def add_swagger_user(username: str, password: str, full_name: str = None):
|
||||||
|
"""Ajouter un utilisateur Swagger"""
|
||||||
|
async with async_session_factory() as session:
|
||||||
|
result = await session.execute(
|
||||||
|
select(SwaggerUser).where(SwaggerUser.username == username)
|
||||||
|
)
|
||||||
|
existing = result.scalar_one_or_none()
|
||||||
|
|
||||||
|
if existing:
|
||||||
|
logger.error(f" L'utilisateur '{username}' existe déjà")
|
||||||
|
return
|
||||||
|
|
||||||
|
swagger_user = SwaggerUser(
|
||||||
|
username=username,
|
||||||
|
hashed_password=hash_password(password),
|
||||||
|
full_name=full_name or username,
|
||||||
|
is_active=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
session.add(swagger_user)
|
||||||
|
await session.commit()
|
||||||
|
|
||||||
|
logger.info(f" Utilisateur Swagger créé: {username}")
|
||||||
|
logger.info(f" Nom complet: {swagger_user.full_name}")
|
||||||
|
|
||||||
|
|
||||||
|
async def list_swagger_users():
|
||||||
|
"""Lister tous les utilisateurs Swagger"""
|
||||||
|
async with async_session_factory() as session:
|
||||||
|
result = await session.execute(select(SwaggerUser))
|
||||||
|
users = result.scalars().all()
|
||||||
|
|
||||||
|
if not users:
|
||||||
|
logger.info("🔭 Aucun utilisateur Swagger")
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.info(f"👥 {len(users)} utilisateur(s) Swagger:\n")
|
||||||
|
for user in users:
|
||||||
|
status = "" if user.is_active else ""
|
||||||
|
logger.info(f" {status} {user.username}")
|
||||||
|
logger.info(f" Nom: {user.full_name}")
|
||||||
|
logger.info(f" Créé: {user.created_at}")
|
||||||
|
logger.info(f" Dernière connexion: {user.last_login or 'Jamais'}\n")
|
||||||
|
|
||||||
|
|
||||||
|
async def delete_swagger_user(username: str):
|
||||||
|
"""Supprimer un utilisateur Swagger"""
|
||||||
|
async with async_session_factory() as session:
|
||||||
|
result = await session.execute(
|
||||||
|
select(SwaggerUser).where(SwaggerUser.username == username)
|
||||||
|
)
|
||||||
|
user = result.scalar_one_or_none()
|
||||||
|
|
||||||
|
if not user:
|
||||||
|
logger.error(f" Utilisateur '{username}' introuvable")
|
||||||
|
return
|
||||||
|
|
||||||
|
await session.delete(user)
|
||||||
|
await session.commit()
|
||||||
|
logger.info(f"🗑️ Utilisateur Swagger supprimé: {username}")
|
||||||
|
|
||||||
|
|
||||||
|
async def create_api_key(
|
||||||
|
name: str,
|
||||||
|
description: str = None,
|
||||||
|
expires_in_days: int = 365,
|
||||||
|
rate_limit: int = 60,
|
||||||
|
endpoints: list = None,
|
||||||
|
):
|
||||||
|
"""Créer une clé API"""
|
||||||
|
async with async_session_factory() as session:
|
||||||
|
service = ApiKeyService(session)
|
||||||
|
|
||||||
|
api_key_obj, api_key_plain = await service.create_api_key(
|
||||||
|
name=name,
|
||||||
|
description=description,
|
||||||
|
created_by="cli",
|
||||||
|
expires_in_days=expires_in_days,
|
||||||
|
rate_limit_per_minute=rate_limit,
|
||||||
|
allowed_endpoints=endpoints,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info("=" * 70)
|
||||||
|
logger.info("🔑 Clé API créée avec succès")
|
||||||
|
logger.info("=" * 70)
|
||||||
|
logger.info(f" ID: {api_key_obj.id}")
|
||||||
|
logger.info(f" Nom: {api_key_obj.name}")
|
||||||
|
logger.info(f" Clé: {api_key_plain}")
|
||||||
|
logger.info(f" Préfixe: {api_key_obj.key_prefix}")
|
||||||
|
logger.info(f" Rate limit: {api_key_obj.rate_limit_per_minute} req/min")
|
||||||
|
logger.info(f" Expire le: {api_key_obj.expires_at}")
|
||||||
|
|
||||||
|
if api_key_obj.allowed_endpoints:
|
||||||
|
import json
|
||||||
|
|
||||||
|
try:
|
||||||
|
endpoints_list = json.loads(api_key_obj.allowed_endpoints)
|
||||||
|
logger.info(f" Endpoints: {', '.join(endpoints_list)}")
|
||||||
|
except:
|
||||||
|
logger.info(f" Endpoints: {api_key_obj.allowed_endpoints}")
|
||||||
|
else:
|
||||||
|
logger.info(" Endpoints: Tous (aucune restriction)")
|
||||||
|
|
||||||
|
logger.info("=" * 70)
|
||||||
|
logger.info("⚠️ SAUVEGARDEZ CETTE CLÉ - Elle ne sera plus affichée !")
|
||||||
|
logger.info("=" * 70)
|
||||||
|
|
||||||
|
|
||||||
|
async def list_api_keys():
|
||||||
|
"""Lister toutes les clés API"""
|
||||||
|
async with async_session_factory() as session:
|
||||||
|
service = ApiKeyService(session)
|
||||||
|
keys = await service.list_api_keys()
|
||||||
|
|
||||||
|
if not keys:
|
||||||
|
logger.info("🔭 Aucune clé API")
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.info(f"🔑 {len(keys)} clé(s) API:\n")
|
||||||
|
|
||||||
|
for key in keys:
|
||||||
|
is_valid = key.is_active and (
|
||||||
|
not key.expires_at or key.expires_at > datetime.now()
|
||||||
|
)
|
||||||
|
status = "" if is_valid else ""
|
||||||
|
|
||||||
|
logger.info(f" {status} {key.name:<30} ({key.key_prefix}...)")
|
||||||
|
logger.info(f" ID: {key.id}")
|
||||||
|
logger.info(f" Rate limit: {key.rate_limit_per_minute} req/min")
|
||||||
|
logger.info(f" Requêtes: {key.total_requests}")
|
||||||
|
logger.info(f" Expire: {key.expires_at or 'Jamais'}")
|
||||||
|
logger.info(f" Dernière utilisation: {key.last_used_at or 'Jamais'}")
|
||||||
|
|
||||||
|
if key.allowed_endpoints:
|
||||||
|
import json
|
||||||
|
|
||||||
|
try:
|
||||||
|
endpoints = json.loads(key.allowed_endpoints)
|
||||||
|
display = ", ".join(endpoints[:4])
|
||||||
|
if len(endpoints) > 4:
|
||||||
|
display += f"... (+{len(endpoints) - 4})"
|
||||||
|
logger.info(f" Endpoints: {display}")
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
logger.info(" Endpoints: Tous")
|
||||||
|
logger.info("")
|
||||||
|
|
||||||
|
|
||||||
|
async def revoke_api_key(key_id: str):
|
||||||
|
"""Révoquer une clé API"""
|
||||||
|
async with async_session_factory() as session:
|
||||||
|
result = await session.execute(select(ApiKey).where(ApiKey.id == key_id))
|
||||||
|
key = result.scalar_one_or_none()
|
||||||
|
|
||||||
|
if not key:
|
||||||
|
logger.error(f" Clé API '{key_id}' introuvable")
|
||||||
|
return
|
||||||
|
|
||||||
|
key.is_active = False
|
||||||
|
key.revoked_at = datetime.now()
|
||||||
|
await session.commit()
|
||||||
|
|
||||||
|
logger.info(f"🗑️ Clé API révoquée: {key.name}")
|
||||||
|
logger.info(f" ID: {key.id}")
|
||||||
|
|
||||||
|
|
||||||
|
async def verify_api_key(api_key: str):
|
||||||
|
"""Vérifier une clé API"""
|
||||||
|
async with async_session_factory() as session:
|
||||||
|
service = ApiKeyService(session)
|
||||||
|
key = await service.verify_api_key(api_key)
|
||||||
|
|
||||||
|
if not key:
|
||||||
|
logger.error(" Clé API invalide ou expirée")
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.info("=" * 60)
|
||||||
|
logger.info(" Clé API valide")
|
||||||
|
logger.info("=" * 60)
|
||||||
|
logger.info(f" Nom: {key.name}")
|
||||||
|
logger.info(f" ID: {key.id}")
|
||||||
|
logger.info(f" Rate limit: {key.rate_limit_per_minute} req/min")
|
||||||
|
logger.info(f" Requêtes totales: {key.total_requests}")
|
||||||
|
logger.info(f" Expire: {key.expires_at or 'Jamais'}")
|
||||||
|
|
||||||
|
if key.allowed_endpoints:
|
||||||
|
import json
|
||||||
|
|
||||||
|
try:
|
||||||
|
endpoints = json.loads(key.allowed_endpoints)
|
||||||
|
logger.info(f" Endpoints autorisés: {endpoints}")
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
logger.info(" Endpoints autorisés: Tous")
|
||||||
|
logger.info("=" * 60)
|
||||||
|
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Gestion des utilisateurs Swagger et clés API",
|
||||||
|
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||||
|
epilog="""
|
||||||
|
Exemples:
|
||||||
|
python scripts/manage_security.py swagger add admin MyP@ssw0rd
|
||||||
|
python scripts/manage_security.py swagger list
|
||||||
|
python scripts/manage_security.py apikey create "Mon App" --days 365 --rate-limit 100
|
||||||
|
python scripts/manage_security.py apikey create "SDK-ReadOnly" --endpoints "/clients" "/clients/*" "/devis" "/devis/*"
|
||||||
|
python scripts/manage_security.py apikey list
|
||||||
|
python scripts/manage_security.py apikey verify sdk_live_xxxxx
|
||||||
|
""",
|
||||||
|
)
|
||||||
|
subparsers = parser.add_subparsers(dest="command", help="Commandes")
|
||||||
|
|
||||||
|
swagger_parser = subparsers.add_parser("swagger", help="Gestion Swagger")
|
||||||
|
swagger_sub = swagger_parser.add_subparsers(dest="swagger_command")
|
||||||
|
|
||||||
|
add_p = swagger_sub.add_parser("add", help="Ajouter utilisateur")
|
||||||
|
add_p.add_argument("username", help="Nom d'utilisateur")
|
||||||
|
add_p.add_argument("password", help="Mot de passe")
|
||||||
|
add_p.add_argument("--full-name", help="Nom complet")
|
||||||
|
|
||||||
|
swagger_sub.add_parser("list", help="Lister utilisateurs")
|
||||||
|
|
||||||
|
del_p = swagger_sub.add_parser("delete", help="Supprimer utilisateur")
|
||||||
|
del_p.add_argument("username", help="Nom d'utilisateur")
|
||||||
|
|
||||||
|
apikey_parser = subparsers.add_parser("apikey", help="Gestion clés API")
|
||||||
|
apikey_sub = apikey_parser.add_subparsers(dest="apikey_command")
|
||||||
|
|
||||||
|
create_p = apikey_sub.add_parser("create", help="Créer clé API")
|
||||||
|
create_p.add_argument("name", help="Nom de la clé")
|
||||||
|
create_p.add_argument("--description", help="Description")
|
||||||
|
create_p.add_argument("--days", type=int, default=365, help="Expiration (jours)")
|
||||||
|
create_p.add_argument("--rate-limit", type=int, default=60, help="Req/min")
|
||||||
|
create_p.add_argument("--endpoints", nargs="+", help="Endpoints autorisés")
|
||||||
|
|
||||||
|
apikey_sub.add_parser("list", help="Lister clés")
|
||||||
|
|
||||||
|
rev_p = apikey_sub.add_parser("revoke", help="Révoquer clé")
|
||||||
|
rev_p.add_argument("key_id", help="ID de la clé")
|
||||||
|
|
||||||
|
ver_p = apikey_sub.add_parser("verify", help="Vérifier clé")
|
||||||
|
ver_p.add_argument("api_key", help="Clé API complète")
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
if not args.command:
|
||||||
|
parser.print_help()
|
||||||
|
return
|
||||||
|
|
||||||
|
if args.command == "swagger":
|
||||||
|
if args.swagger_command == "add":
|
||||||
|
await add_swagger_user(args.username, args.password, args.full_name)
|
||||||
|
elif args.swagger_command == "list":
|
||||||
|
await list_swagger_users()
|
||||||
|
elif args.swagger_command == "delete":
|
||||||
|
await delete_swagger_user(args.username)
|
||||||
|
else:
|
||||||
|
swagger_parser.print_help()
|
||||||
|
|
||||||
|
elif args.command == "apikey":
|
||||||
|
if args.apikey_command == "create":
|
||||||
|
await create_api_key(
|
||||||
|
name=args.name,
|
||||||
|
description=args.description,
|
||||||
|
expires_in_days=args.days,
|
||||||
|
rate_limit=args.rate_limit,
|
||||||
|
endpoints=args.endpoints,
|
||||||
|
)
|
||||||
|
elif args.apikey_command == "list":
|
||||||
|
await list_api_keys()
|
||||||
|
elif args.apikey_command == "revoke":
|
||||||
|
await revoke_api_key(args.key_id)
|
||||||
|
elif args.apikey_command == "verify":
|
||||||
|
await verify_api_key(args.api_key)
|
||||||
|
else:
|
||||||
|
apikey_parser.print_help()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
try:
|
||||||
|
asyncio.run(main())
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
print("\nℹ️ Interrupted")
|
||||||
|
sys.exit(0)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f" Erreur: {e}")
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
traceback.print_exc()
|
||||||
|
sys.exit(1)
|
||||||
354
scripts/test_security.py
Normal file
354
scripts/test_security.py
Normal file
|
|
@ -0,0 +1,354 @@
|
||||||
|
import requests
|
||||||
|
import argparse
|
||||||
|
import sys
|
||||||
|
from typing import Tuple
|
||||||
|
|
||||||
|
|
||||||
|
class SecurityTester:
|
||||||
|
def __init__(self, base_url: str):
|
||||||
|
self.base_url = base_url.rstrip("/")
|
||||||
|
self.results = {"passed": 0, "failed": 0, "tests": []}
|
||||||
|
|
||||||
|
def log_test(self, name: str, passed: bool, details: str = ""):
|
||||||
|
"""Enregistrer le résultat d'un test"""
|
||||||
|
status = " PASS" if passed else " FAIL"
|
||||||
|
print(f"{status} - {name}")
|
||||||
|
if details:
|
||||||
|
print(f" {details}")
|
||||||
|
|
||||||
|
self.results["tests"].append(
|
||||||
|
{"name": name, "passed": passed, "details": details}
|
||||||
|
)
|
||||||
|
|
||||||
|
if passed:
|
||||||
|
self.results["passed"] += 1
|
||||||
|
else:
|
||||||
|
self.results["failed"] += 1
|
||||||
|
|
||||||
|
def test_swagger_without_auth(self) -> bool:
|
||||||
|
"""Test 1: Swagger UI devrait demander une authentification"""
|
||||||
|
print("\n Test 1: Protection Swagger UI")
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = requests.get(f"{self.base_url}/docs", timeout=5)
|
||||||
|
|
||||||
|
if response.status_code == 401:
|
||||||
|
self.log_test(
|
||||||
|
"Swagger protégé",
|
||||||
|
True,
|
||||||
|
"Code 401 retourné sans authentification",
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
self.log_test(
|
||||||
|
"Swagger protégé",
|
||||||
|
False,
|
||||||
|
f"Code {response.status_code} au lieu de 401",
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.log_test("Swagger protégé", False, f"Erreur: {str(e)}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def test_swagger_with_auth(self, username: str, password: str) -> bool:
|
||||||
|
"""Test 2: Swagger UI accessible avec credentials valides"""
|
||||||
|
print("\n Test 2: Accès Swagger avec authentification")
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = requests.get(
|
||||||
|
f"{self.base_url}/docs", auth=(username, password), timeout=5
|
||||||
|
)
|
||||||
|
|
||||||
|
if response.status_code == 200:
|
||||||
|
self.log_test(
|
||||||
|
"Accès Swagger avec auth",
|
||||||
|
True,
|
||||||
|
f"Authentifié comme {username}",
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
self.log_test(
|
||||||
|
"Accès Swagger avec auth",
|
||||||
|
False,
|
||||||
|
f"Code {response.status_code}, credentials invalides?",
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.log_test("Accès Swagger avec auth", False, f"Erreur: {str(e)}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def test_api_without_auth(self) -> bool:
|
||||||
|
"""Test 3: Endpoints API devraient demander une authentification"""
|
||||||
|
print("\n Test 3: Protection des endpoints API")
|
||||||
|
|
||||||
|
test_endpoints = ["/api/v1/clients", "/api/v1/documents"]
|
||||||
|
|
||||||
|
all_protected = True
|
||||||
|
for endpoint in test_endpoints:
|
||||||
|
try:
|
||||||
|
response = requests.get(f"{self.base_url}{endpoint}", timeout=5)
|
||||||
|
|
||||||
|
if response.status_code == 401:
|
||||||
|
print(f" {endpoint} protégé (401)")
|
||||||
|
else:
|
||||||
|
print(
|
||||||
|
f" {endpoint} accessible sans auth (code {response.status_code})"
|
||||||
|
)
|
||||||
|
all_protected = False
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f" {endpoint} erreur: {str(e)}")
|
||||||
|
all_protected = False
|
||||||
|
|
||||||
|
self.log_test("Endpoints API protégés", all_protected)
|
||||||
|
return all_protected
|
||||||
|
|
||||||
|
def test_health_endpoint_public(self) -> bool:
|
||||||
|
"""Test 4: Endpoint /health devrait être accessible sans auth"""
|
||||||
|
print("\n Test 4: Endpoint /health public")
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = requests.get(f"{self.base_url}/health", timeout=5)
|
||||||
|
|
||||||
|
if response.status_code == 200:
|
||||||
|
self.log_test("/health accessible", True, "Endpoint public fonctionne")
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
self.log_test(
|
||||||
|
"/health accessible",
|
||||||
|
False,
|
||||||
|
f"Code {response.status_code} inattendu",
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.log_test("/health accessible", False, f"Erreur: {str(e)}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def test_api_key_creation(self, username: str, password: str) -> Tuple[bool, str]:
|
||||||
|
"""Test 5: Créer une clé API via l'endpoint"""
|
||||||
|
print("\n Test 5: Création d'une clé API")
|
||||||
|
|
||||||
|
try:
|
||||||
|
login_response = requests.post(
|
||||||
|
f"{self.base_url}/api/v1/auth/login",
|
||||||
|
json={"email": username, "password": password},
|
||||||
|
timeout=5,
|
||||||
|
)
|
||||||
|
|
||||||
|
if login_response.status_code != 200:
|
||||||
|
self.log_test(
|
||||||
|
"Création clé API",
|
||||||
|
False,
|
||||||
|
"Impossible de se connecter pour obtenir un JWT",
|
||||||
|
)
|
||||||
|
return False, ""
|
||||||
|
|
||||||
|
jwt_token = login_response.json().get("access_token")
|
||||||
|
|
||||||
|
create_response = requests.post(
|
||||||
|
f"{self.base_url}/api/v1/api-keys",
|
||||||
|
headers={"Authorization": f"Bearer {jwt_token}"},
|
||||||
|
json={
|
||||||
|
"name": "Test API Key",
|
||||||
|
"description": "Clé de test automatisé",
|
||||||
|
"rate_limit_per_minute": 60,
|
||||||
|
"expires_in_days": 30,
|
||||||
|
},
|
||||||
|
timeout=5,
|
||||||
|
)
|
||||||
|
|
||||||
|
if create_response.status_code == 201:
|
||||||
|
api_key = create_response.json().get("api_key")
|
||||||
|
self.log_test("Création clé API", True, f"Clé créée: {api_key[:20]}...")
|
||||||
|
return True, api_key
|
||||||
|
else:
|
||||||
|
self.log_test(
|
||||||
|
"Création clé API",
|
||||||
|
False,
|
||||||
|
f"Code {create_response.status_code}",
|
||||||
|
)
|
||||||
|
return False, ""
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.log_test("Création clé API", False, f"Erreur: {str(e)}")
|
||||||
|
return False, ""
|
||||||
|
|
||||||
|
def test_api_key_usage(self, api_key: str) -> bool:
|
||||||
|
"""Test 6: Utiliser une clé API pour accéder à un endpoint"""
|
||||||
|
print("\n Test 6: Utilisation d'une clé API")
|
||||||
|
|
||||||
|
if not api_key:
|
||||||
|
self.log_test("Utilisation clé API", False, "Pas de clé disponible")
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = requests.get(
|
||||||
|
f"{self.base_url}/api/v1/clients",
|
||||||
|
headers={"X-API-Key": api_key},
|
||||||
|
timeout=5,
|
||||||
|
)
|
||||||
|
|
||||||
|
if response.status_code == 200:
|
||||||
|
self.log_test("Utilisation clé API", True, "Clé acceptée")
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
self.log_test(
|
||||||
|
"Utilisation clé API",
|
||||||
|
False,
|
||||||
|
f"Code {response.status_code}, clé refusée?",
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.log_test("Utilisation clé API", False, f"Erreur: {str(e)}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def test_invalid_api_key(self) -> bool:
|
||||||
|
"""Test 7: Une clé invalide devrait être refusée"""
|
||||||
|
print("\n Test 7: Rejet de clé API invalide")
|
||||||
|
|
||||||
|
invalid_key = "sdk_live_invalid_key_12345"
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = requests.get(
|
||||||
|
f"{self.base_url}/api/v1/clients",
|
||||||
|
headers={"X-API-Key": invalid_key},
|
||||||
|
timeout=5,
|
||||||
|
)
|
||||||
|
|
||||||
|
if response.status_code == 401:
|
||||||
|
self.log_test("Clé invalide rejetée", True, "Code 401 comme attendu")
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
self.log_test(
|
||||||
|
"Clé invalide rejetée",
|
||||||
|
False,
|
||||||
|
f"Code {response.status_code} au lieu de 401",
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.log_test("Clé invalide rejetée", False, f"Erreur: {str(e)}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def test_rate_limiting(self, api_key: str) -> bool:
|
||||||
|
"""Test 8: Rate limiting (optionnel, peut prendre du temps)"""
|
||||||
|
print("\n Test 8: Rate limiting (test simple)")
|
||||||
|
|
||||||
|
if not api_key:
|
||||||
|
self.log_test("Rate limiting", False, "Pas de clé disponible")
|
||||||
|
return False
|
||||||
|
|
||||||
|
print(" Envoi de 70 requêtes rapides...")
|
||||||
|
|
||||||
|
rate_limited = False
|
||||||
|
for i in range(70):
|
||||||
|
try:
|
||||||
|
response = requests.get(
|
||||||
|
f"{self.base_url}/health",
|
||||||
|
headers={"X-API-Key": api_key},
|
||||||
|
timeout=1,
|
||||||
|
)
|
||||||
|
|
||||||
|
if response.status_code == 429:
|
||||||
|
rate_limited = True
|
||||||
|
print(f" Rate limit atteint à la requête {i + 1}")
|
||||||
|
break
|
||||||
|
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if rate_limited:
|
||||||
|
self.log_test("Rate limiting", True, "Rate limit détecté")
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
self.log_test(
|
||||||
|
"Rate limiting",
|
||||||
|
True,
|
||||||
|
"Aucun rate limit détecté (peut être normal si pas implémenté)",
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
|
||||||
|
def print_summary(self):
|
||||||
|
"""Afficher le résumé des tests"""
|
||||||
|
print("\n" + "=" * 60)
|
||||||
|
print(" RÉSUMÉ DES TESTS")
|
||||||
|
print("=" * 60)
|
||||||
|
|
||||||
|
total = self.results["passed"] + self.results["failed"]
|
||||||
|
success_rate = (self.results["passed"] / total * 100) if total > 0 else 0
|
||||||
|
|
||||||
|
print(f"\nTotal: {total} tests")
|
||||||
|
print(f" Réussis: {self.results['passed']}")
|
||||||
|
print(f" Échoués: {self.results['failed']}")
|
||||||
|
print(f"Taux de réussite: {success_rate:.1f}%\n")
|
||||||
|
|
||||||
|
if self.results["failed"] == 0:
|
||||||
|
print("🎉 Tous les tests sont passés ! Sécurité OK.")
|
||||||
|
return 0
|
||||||
|
else:
|
||||||
|
print(" Certains tests ont échoué. Vérifiez la configuration.")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Test automatisé de la sécurité de l'API"
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"--url",
|
||||||
|
required=True,
|
||||||
|
help="URL de base de l'API (ex: http://localhost:8000)",
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"--swagger-user", required=True, help="Utilisateur Swagger pour les tests"
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"--swagger-pass", required=True, help="Mot de passe Swagger pour les tests"
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"--skip-rate-limit",
|
||||||
|
action="store_true",
|
||||||
|
help="Sauter le test de rate limiting (long)",
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
print(" Démarrage des tests de sécurité")
|
||||||
|
print(f" URL cible: {args.url}\n")
|
||||||
|
|
||||||
|
tester = SecurityTester(args.url)
|
||||||
|
|
||||||
|
tester.test_swagger_without_auth()
|
||||||
|
tester.test_swagger_with_auth(args.swagger_user, args.swagger_pass)
|
||||||
|
tester.test_api_without_auth()
|
||||||
|
tester.test_health_endpoint_public()
|
||||||
|
|
||||||
|
success, api_key = tester.test_api_key_creation(
|
||||||
|
args.swagger_user, args.swagger_pass
|
||||||
|
)
|
||||||
|
|
||||||
|
if success and api_key:
|
||||||
|
tester.test_api_key_usage(api_key)
|
||||||
|
tester.test_invalid_api_key()
|
||||||
|
|
||||||
|
if not args.skip_rate_limit:
|
||||||
|
tester.test_rate_limiting(api_key)
|
||||||
|
else:
|
||||||
|
print("\n Test de rate limiting sauté")
|
||||||
|
else:
|
||||||
|
print("\n Tests avec clé API sautés (création échouée)")
|
||||||
|
|
||||||
|
exit_code = tester.print_summary()
|
||||||
|
sys.exit(exit_code)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
|
@ -5,10 +5,12 @@ import jwt
|
||||||
import secrets
|
import secrets
|
||||||
import hashlib
|
import hashlib
|
||||||
|
|
||||||
SECRET_KEY = "VOTRE_SECRET_KEY_A_METTRE_EN_.ENV"
|
from config.config import settings
|
||||||
ALGORITHM = "HS256"
|
|
||||||
ACCESS_TOKEN_EXPIRE_MINUTES = 10080
|
SECRET_KEY = settings.jwt_secret
|
||||||
REFRESH_TOKEN_EXPIRE_DAYS = 7
|
ALGORITHM = settings.jwt_algorithm
|
||||||
|
ACCESS_TOKEN_EXPIRE_MINUTES = settings.access_token_expire_minutes
|
||||||
|
REFRESH_TOKEN_EXPIRE_DAYS = settings.refresh_token_expire_days
|
||||||
|
|
||||||
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
|
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
|
||||||
|
|
||||||
|
|
@ -67,9 +69,13 @@ def decode_token(token: str) -> Optional[Dict]:
|
||||||
payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
|
payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
|
||||||
return payload
|
return payload
|
||||||
except jwt.ExpiredSignatureError:
|
except jwt.ExpiredSignatureError:
|
||||||
return None
|
raise jwt.InvalidTokenError("Token expiré")
|
||||||
except jwt.JWTError:
|
except jwt.DecodeError:
|
||||||
return None
|
raise jwt.InvalidTokenError("Token invalide (format incorrect)")
|
||||||
|
except jwt.InvalidTokenError as e:
|
||||||
|
raise jwt.InvalidTokenError(f"Token invalide: {str(e)}")
|
||||||
|
except Exception as e:
|
||||||
|
raise jwt.InvalidTokenError(f"Erreur lors du décodage du token: {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
def validate_password_strength(password: str) -> tuple[bool, str]:
|
def validate_password_strength(password: str) -> tuple[bool, str]:
|
||||||
|
|
|
||||||
223
services/api_key.py
Normal file
223
services/api_key.py
Normal file
|
|
@ -0,0 +1,223 @@
|
||||||
|
import secrets
|
||||||
|
import hashlib
|
||||||
|
import json
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from typing import Optional, List, Dict
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
from sqlalchemy import select, and_, or_
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from database.models.api_key import ApiKey
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class ApiKeyService:
|
||||||
|
"""Service de gestion des clés API"""
|
||||||
|
|
||||||
|
def __init__(self, session: AsyncSession):
|
||||||
|
self.session = session
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def generate_api_key() -> str:
|
||||||
|
"""Génère une clé API unique et sécurisée"""
|
||||||
|
random_part = secrets.token_urlsafe(32)
|
||||||
|
return f"sdk_live_{random_part}"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def hash_api_key(api_key: str) -> str:
|
||||||
|
"""Hash la clé API pour stockage sécurisé"""
|
||||||
|
return hashlib.sha256(api_key.encode()).hexdigest()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_key_prefix(api_key: str) -> str:
|
||||||
|
"""Extrait le préfixe de la clé pour identification"""
|
||||||
|
return api_key[:12] if len(api_key) >= 12 else api_key
|
||||||
|
|
||||||
|
async def create_api_key(
|
||||||
|
self,
|
||||||
|
name: str,
|
||||||
|
description: Optional[str] = None,
|
||||||
|
created_by: str = "system",
|
||||||
|
user_id: Optional[str] = None,
|
||||||
|
expires_in_days: Optional[int] = None,
|
||||||
|
rate_limit_per_minute: int = 60,
|
||||||
|
allowed_endpoints: Optional[List[str]] = None,
|
||||||
|
) -> tuple[ApiKey, str]:
|
||||||
|
api_key_plain = self.generate_api_key()
|
||||||
|
key_hash = self.hash_api_key(api_key_plain)
|
||||||
|
key_prefix = self.get_key_prefix(api_key_plain)
|
||||||
|
|
||||||
|
expires_at = None
|
||||||
|
if expires_in_days:
|
||||||
|
expires_at = datetime.now() + timedelta(days=expires_in_days)
|
||||||
|
|
||||||
|
api_key_obj = ApiKey(
|
||||||
|
key_hash=key_hash,
|
||||||
|
key_prefix=key_prefix,
|
||||||
|
name=name,
|
||||||
|
description=description,
|
||||||
|
created_by=created_by,
|
||||||
|
user_id=user_id,
|
||||||
|
expires_at=expires_at,
|
||||||
|
rate_limit_per_minute=rate_limit_per_minute,
|
||||||
|
allowed_endpoints=json.dumps(allowed_endpoints)
|
||||||
|
if allowed_endpoints
|
||||||
|
else None,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.session.add(api_key_obj)
|
||||||
|
await self.session.commit()
|
||||||
|
await self.session.refresh(api_key_obj)
|
||||||
|
|
||||||
|
logger.info(f" Clé API créée: {name} (prefix: {key_prefix})")
|
||||||
|
|
||||||
|
return api_key_obj, api_key_plain
|
||||||
|
|
||||||
|
async def verify_api_key(self, api_key_plain: str) -> Optional[ApiKey]:
|
||||||
|
key_hash = self.hash_api_key(api_key_plain)
|
||||||
|
|
||||||
|
result = await self.session.execute(
|
||||||
|
select(ApiKey).where(
|
||||||
|
and_(
|
||||||
|
ApiKey.key_hash == key_hash,
|
||||||
|
ApiKey.is_active,
|
||||||
|
ApiKey.revoked_at.is_(None),
|
||||||
|
or_(
|
||||||
|
ApiKey.expires_at.is_(None), ApiKey.expires_at > datetime.now()
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
api_key_obj = result.scalar_one_or_none()
|
||||||
|
|
||||||
|
if api_key_obj:
|
||||||
|
api_key_obj.total_requests += 1
|
||||||
|
api_key_obj.last_used_at = datetime.now()
|
||||||
|
await self.session.commit()
|
||||||
|
|
||||||
|
logger.debug(f" Clé API validée: {api_key_obj.name}")
|
||||||
|
else:
|
||||||
|
logger.warning(" Clé API invalide ou expirée")
|
||||||
|
|
||||||
|
return api_key_obj
|
||||||
|
|
||||||
|
async def list_api_keys(
|
||||||
|
self,
|
||||||
|
include_revoked: bool = False,
|
||||||
|
user_id: Optional[str] = None,
|
||||||
|
) -> List[ApiKey]:
|
||||||
|
"""Liste les clés API"""
|
||||||
|
query = select(ApiKey)
|
||||||
|
|
||||||
|
if not include_revoked:
|
||||||
|
query = query.where(ApiKey.revoked_at.is_(None))
|
||||||
|
|
||||||
|
if user_id:
|
||||||
|
query = query.where(ApiKey.user_id == user_id)
|
||||||
|
|
||||||
|
query = query.order_by(ApiKey.created_at.desc())
|
||||||
|
|
||||||
|
result = await self.session.execute(query)
|
||||||
|
return list(result.scalars().all())
|
||||||
|
|
||||||
|
async def revoke_api_key(self, key_id: str) -> bool:
|
||||||
|
"""Révoque une clé API"""
|
||||||
|
result = await self.session.execute(select(ApiKey).where(ApiKey.id == key_id))
|
||||||
|
api_key_obj = result.scalar_one_or_none()
|
||||||
|
|
||||||
|
if not api_key_obj:
|
||||||
|
return False
|
||||||
|
|
||||||
|
api_key_obj.is_active = False
|
||||||
|
api_key_obj.revoked_at = datetime.now()
|
||||||
|
await self.session.commit()
|
||||||
|
|
||||||
|
logger.info(f"🗑️ Clé API révoquée: {api_key_obj.name}")
|
||||||
|
return True
|
||||||
|
|
||||||
|
async def get_by_id(self, key_id: str) -> Optional[ApiKey]:
|
||||||
|
"""Récupère une clé API par son ID"""
|
||||||
|
result = await self.session.execute(select(ApiKey).where(ApiKey.id == key_id))
|
||||||
|
return result.scalar_one_or_none()
|
||||||
|
|
||||||
|
async def check_rate_limit(self, api_key_obj: ApiKey) -> tuple[bool, Dict]:
|
||||||
|
return True, {
|
||||||
|
"allowed": True,
|
||||||
|
"limit": api_key_obj.rate_limit_per_minute,
|
||||||
|
"remaining": api_key_obj.rate_limit_per_minute,
|
||||||
|
}
|
||||||
|
|
||||||
|
async def check_endpoint_access(self, api_key_obj: ApiKey, endpoint: str) -> bool:
|
||||||
|
if not api_key_obj.allowed_endpoints:
|
||||||
|
logger.debug(
|
||||||
|
f"🔓 API Key {api_key_obj.name}: Aucune restriction d'endpoint"
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
|
||||||
|
try:
|
||||||
|
allowed = json.loads(api_key_obj.allowed_endpoints)
|
||||||
|
|
||||||
|
if "*" in allowed or "/*" in allowed:
|
||||||
|
logger.debug(f"🔓 API Key {api_key_obj.name}: Accès global autorisé")
|
||||||
|
return True
|
||||||
|
|
||||||
|
for pattern in allowed:
|
||||||
|
if pattern == endpoint:
|
||||||
|
logger.debug(f" Match exact: {pattern} == {endpoint}")
|
||||||
|
return True
|
||||||
|
|
||||||
|
if pattern.endswith("/*"):
|
||||||
|
base = pattern[:-2] # "/clients/*" → "/clients"
|
||||||
|
if endpoint == base or endpoint.startswith(base + "/"):
|
||||||
|
logger.debug(f" Match wildcard: {pattern} ↔ {endpoint}")
|
||||||
|
return True
|
||||||
|
|
||||||
|
elif pattern.endswith("*"):
|
||||||
|
base = pattern[:-1] # "/clients*" → "/clients"
|
||||||
|
if endpoint.startswith(base):
|
||||||
|
logger.debug(f" Match prefix: {pattern} ↔ {endpoint}")
|
||||||
|
return True
|
||||||
|
|
||||||
|
logger.warning(
|
||||||
|
f" API Key {api_key_obj.name}: Accès refusé à {endpoint}\n"
|
||||||
|
f" Endpoints autorisés: {allowed}"
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
logger.error(f" Erreur parsing allowed_endpoints pour {api_key_obj.id}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def api_key_to_response(api_key_obj: ApiKey, show_key: bool = False) -> Dict:
|
||||||
|
"""Convertit un objet ApiKey en réponse API"""
|
||||||
|
|
||||||
|
allowed_endpoints = None
|
||||||
|
if api_key_obj.allowed_endpoints:
|
||||||
|
try:
|
||||||
|
allowed_endpoints = json.loads(api_key_obj.allowed_endpoints)
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
is_expired = False
|
||||||
|
if api_key_obj.expires_at:
|
||||||
|
is_expired = api_key_obj.expires_at < datetime.now()
|
||||||
|
|
||||||
|
return {
|
||||||
|
"id": api_key_obj.id,
|
||||||
|
"name": api_key_obj.name,
|
||||||
|
"description": api_key_obj.description,
|
||||||
|
"key_prefix": api_key_obj.key_prefix,
|
||||||
|
"is_active": api_key_obj.is_active,
|
||||||
|
"is_expired": is_expired,
|
||||||
|
"rate_limit_per_minute": api_key_obj.rate_limit_per_minute,
|
||||||
|
"allowed_endpoints": allowed_endpoints,
|
||||||
|
"total_requests": api_key_obj.total_requests,
|
||||||
|
"last_used_at": api_key_obj.last_used_at,
|
||||||
|
"created_at": api_key_obj.created_at,
|
||||||
|
"expires_at": api_key_obj.expires_at,
|
||||||
|
"revoked_at": api_key_obj.revoked_at,
|
||||||
|
"created_by": api_key_obj.created_by,
|
||||||
|
}
|
||||||
|
|
@ -6,7 +6,7 @@ import httpx
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import Optional, Tuple, List
|
from typing import Optional, Tuple, List
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
from sqlalchemy import false, select, true, update, and_
|
from sqlalchemy import false, select, update, and_
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from config.config import settings
|
from config.config import settings
|
||||||
|
|
|
||||||
|
|
@ -23,7 +23,7 @@ class UniversignDocumentService:
|
||||||
|
|
||||||
def fetch_transaction_documents(self, transaction_id: str) -> Optional[List[Dict]]:
|
def fetch_transaction_documents(self, transaction_id: str) -> Optional[List[Dict]]:
|
||||||
try:
|
try:
|
||||||
logger.info(f"📋 Récupération documents pour transaction: {transaction_id}")
|
logger.info(f" Récupération documents pour transaction: {transaction_id}")
|
||||||
|
|
||||||
response = requests.get(
|
response = requests.get(
|
||||||
f"{self.api_url}/transactions/{transaction_id}",
|
f"{self.api_url}/transactions/{transaction_id}",
|
||||||
|
|
@ -38,7 +38,6 @@ class UniversignDocumentService:
|
||||||
|
|
||||||
logger.info(f"{len(documents)} document(s) trouvé(s)")
|
logger.info(f"{len(documents)} document(s) trouvé(s)")
|
||||||
|
|
||||||
# Log détaillé de chaque document
|
|
||||||
for idx, doc in enumerate(documents):
|
for idx, doc in enumerate(documents):
|
||||||
logger.debug(
|
logger.debug(
|
||||||
f" Document {idx}: id={doc.get('id')}, "
|
f" Document {idx}: id={doc.get('id')}, "
|
||||||
|
|
@ -64,7 +63,7 @@ class UniversignDocumentService:
|
||||||
logger.error(f"⏱️ Timeout récupération transaction {transaction_id}")
|
logger.error(f"⏱️ Timeout récupération transaction {transaction_id}")
|
||||||
return None
|
return None
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"❌ Erreur fetch documents: {e}", exc_info=True)
|
logger.error(f" Erreur fetch documents: {e}", exc_info=True)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def download_signed_document(
|
def download_signed_document(
|
||||||
|
|
@ -94,7 +93,6 @@ class UniversignDocumentService:
|
||||||
f"Content-Type={content_type}, Size={content_length}"
|
f"Content-Type={content_type}, Size={content_length}"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Vérification du type de contenu
|
|
||||||
if (
|
if (
|
||||||
"pdf" not in content_type.lower()
|
"pdf" not in content_type.lower()
|
||||||
and "octet-stream" not in content_type.lower()
|
and "octet-stream" not in content_type.lower()
|
||||||
|
|
@ -104,31 +102,30 @@ class UniversignDocumentService:
|
||||||
f"Tentative de lecture quand même..."
|
f"Tentative de lecture quand même..."
|
||||||
)
|
)
|
||||||
|
|
||||||
# Lecture du contenu
|
|
||||||
content = response.content
|
content = response.content
|
||||||
|
|
||||||
if len(content) < 1024:
|
if len(content) < 1024:
|
||||||
logger.error(f"❌ Document trop petit: {len(content)} octets")
|
logger.error(f" Document trop petit: {len(content)} octets")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return content
|
return content
|
||||||
|
|
||||||
elif response.status_code == 404:
|
elif response.status_code == 404:
|
||||||
logger.error(
|
logger.error(
|
||||||
f"❌ Document {document_id} introuvable pour transaction {transaction_id}"
|
f" Document {document_id} introuvable pour transaction {transaction_id}"
|
||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
elif response.status_code == 403:
|
elif response.status_code == 403:
|
||||||
logger.error(
|
logger.error(
|
||||||
f"❌ Accès refusé au document {document_id}. "
|
f" Accès refusé au document {document_id}. "
|
||||||
f"Vérifiez que la transaction est bien signée."
|
f"Vérifiez que la transaction est bien signée."
|
||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
else:
|
else:
|
||||||
logger.error(
|
logger.error(
|
||||||
f"❌ Erreur HTTP {response.status_code}: {response.text[:500]}"
|
f" Erreur HTTP {response.status_code}: {response.text[:500]}"
|
||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
@ -136,13 +133,12 @@ class UniversignDocumentService:
|
||||||
logger.error(f"⏱️ Timeout téléchargement document {document_id}")
|
logger.error(f"⏱️ Timeout téléchargement document {document_id}")
|
||||||
return None
|
return None
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"❌ Erreur téléchargement: {e}", exc_info=True)
|
logger.error(f" Erreur téléchargement: {e}", exc_info=True)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
async def download_and_store_signed_document(
|
async def download_and_store_signed_document(
|
||||||
self, session: AsyncSession, transaction, force: bool = False
|
self, session: AsyncSession, transaction, force: bool = False
|
||||||
) -> Tuple[bool, Optional[str]]:
|
) -> Tuple[bool, Optional[str]]:
|
||||||
# Vérification si déjà téléchargé
|
|
||||||
if not force and transaction.signed_document_path:
|
if not force and transaction.signed_document_path:
|
||||||
if os.path.exists(transaction.signed_document_path):
|
if os.path.exists(transaction.signed_document_path):
|
||||||
logger.debug(
|
logger.debug(
|
||||||
|
|
@ -153,7 +149,6 @@ class UniversignDocumentService:
|
||||||
transaction.download_attempts += 1
|
transaction.download_attempts += 1
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# ÉTAPE 1: Récupérer les documents de la transaction
|
|
||||||
logger.info(
|
logger.info(
|
||||||
f"Récupération document signé pour: {transaction.transaction_id}"
|
f"Récupération document signé pour: {transaction.transaction_id}"
|
||||||
)
|
)
|
||||||
|
|
@ -167,13 +162,11 @@ class UniversignDocumentService:
|
||||||
await session.commit()
|
await session.commit()
|
||||||
return False, error
|
return False, error
|
||||||
|
|
||||||
# ÉTAPE 2: Récupérer le premier document (ou chercher celui qui est signé)
|
|
||||||
document_id = None
|
document_id = None
|
||||||
for doc in documents:
|
for doc in documents:
|
||||||
doc_id = doc.get("id")
|
doc_id = doc.get("id")
|
||||||
doc_status = doc.get("status", "").lower()
|
doc_status = doc.get("status", "").lower()
|
||||||
|
|
||||||
# Priorité aux documents marqués comme signés/complétés
|
|
||||||
if doc_status in ["signed", "completed", "closed"]:
|
if doc_status in ["signed", "completed", "closed"]:
|
||||||
document_id = doc_id
|
document_id = doc_id
|
||||||
logger.info(
|
logger.info(
|
||||||
|
|
@ -181,34 +174,30 @@ class UniversignDocumentService:
|
||||||
)
|
)
|
||||||
break
|
break
|
||||||
|
|
||||||
# Fallback sur le premier document si aucun n'est explicitement signé
|
|
||||||
if document_id is None:
|
if document_id is None:
|
||||||
document_id = doc_id
|
document_id = doc_id
|
||||||
|
|
||||||
if not document_id:
|
if not document_id:
|
||||||
error = "Impossible de déterminer l'ID du document à télécharger"
|
error = "Impossible de déterminer l'ID du document à télécharger"
|
||||||
logger.error(f"❌ {error}")
|
logger.error(f" {error}")
|
||||||
transaction.download_error = error
|
transaction.download_error = error
|
||||||
await session.commit()
|
await session.commit()
|
||||||
return False, error
|
return False, error
|
||||||
|
|
||||||
# Stocker le document_id pour référence future
|
|
||||||
if hasattr(transaction, "universign_document_id"):
|
if hasattr(transaction, "universign_document_id"):
|
||||||
transaction.universign_document_id = document_id
|
transaction.universign_document_id = document_id
|
||||||
|
|
||||||
# ÉTAPE 3: Télécharger le document signé
|
|
||||||
pdf_content = self.download_signed_document(
|
pdf_content = self.download_signed_document(
|
||||||
transaction_id=transaction.transaction_id, document_id=document_id
|
transaction_id=transaction.transaction_id, document_id=document_id
|
||||||
)
|
)
|
||||||
|
|
||||||
if not pdf_content:
|
if not pdf_content:
|
||||||
error = f"Échec téléchargement document {document_id}"
|
error = f"Échec téléchargement document {document_id}"
|
||||||
logger.error(f"❌ {error}")
|
logger.error(f" {error}")
|
||||||
transaction.download_error = error
|
transaction.download_error = error
|
||||||
await session.commit()
|
await session.commit()
|
||||||
return False, error
|
return False, error
|
||||||
|
|
||||||
# ÉTAPE 4: Stocker le fichier localement
|
|
||||||
filename = self._generate_filename(transaction)
|
filename = self._generate_filename(transaction)
|
||||||
file_path = SIGNED_DOCS_DIR / filename
|
file_path = SIGNED_DOCS_DIR / filename
|
||||||
|
|
||||||
|
|
@ -217,13 +206,11 @@ class UniversignDocumentService:
|
||||||
|
|
||||||
file_size = os.path.getsize(file_path)
|
file_size = os.path.getsize(file_path)
|
||||||
|
|
||||||
# Mise à jour de la transaction
|
|
||||||
transaction.signed_document_path = str(file_path)
|
transaction.signed_document_path = str(file_path)
|
||||||
transaction.signed_document_downloaded_at = datetime.now()
|
transaction.signed_document_downloaded_at = datetime.now()
|
||||||
transaction.signed_document_size_bytes = file_size
|
transaction.signed_document_size_bytes = file_size
|
||||||
transaction.download_error = None
|
transaction.download_error = None
|
||||||
|
|
||||||
# Stocker aussi l'URL de téléchargement pour référence
|
|
||||||
transaction.document_url = (
|
transaction.document_url = (
|
||||||
f"{self.api_url}/transactions/{transaction.transaction_id}"
|
f"{self.api_url}/transactions/{transaction.transaction_id}"
|
||||||
f"/documents/{document_id}/download"
|
f"/documents/{document_id}/download"
|
||||||
|
|
@ -239,14 +226,14 @@ class UniversignDocumentService:
|
||||||
|
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
error = f"Erreur filesystem: {str(e)}"
|
error = f"Erreur filesystem: {str(e)}"
|
||||||
logger.error(f"❌ {error}")
|
logger.error(f" {error}")
|
||||||
transaction.download_error = error
|
transaction.download_error = error
|
||||||
await session.commit()
|
await session.commit()
|
||||||
return False, error
|
return False, error
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
error = f"Erreur inattendue: {str(e)}"
|
error = f"Erreur inattendue: {str(e)}"
|
||||||
logger.error(f"❌ {error}", exc_info=True)
|
logger.error(f" {error}", exc_info=True)
|
||||||
transaction.download_error = error
|
transaction.download_error = error
|
||||||
await session.commit()
|
await session.commit()
|
||||||
return False, error
|
return False, error
|
||||||
|
|
@ -294,7 +281,6 @@ class UniversignDocumentService:
|
||||||
|
|
||||||
return deleted, int(size_freed_mb)
|
return deleted, int(size_freed_mb)
|
||||||
|
|
||||||
# === MÉTHODES DE DIAGNOSTIC ===
|
|
||||||
|
|
||||||
def diagnose_transaction(self, transaction_id: str) -> Dict:
|
def diagnose_transaction(self, transaction_id: str) -> Dict:
|
||||||
"""
|
"""
|
||||||
|
|
@ -308,7 +294,6 @@ class UniversignDocumentService:
|
||||||
}
|
}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Test 1: Récupération de la transaction
|
|
||||||
logger.info(f"Diagnostic transaction: {transaction_id}")
|
logger.info(f"Diagnostic transaction: {transaction_id}")
|
||||||
|
|
||||||
response = requests.get(
|
response = requests.get(
|
||||||
|
|
@ -334,7 +319,6 @@ class UniversignDocumentService:
|
||||||
"participants_count": len(data.get("participants", [])),
|
"participants_count": len(data.get("participants", [])),
|
||||||
}
|
}
|
||||||
|
|
||||||
# Test 2: Documents disponibles
|
|
||||||
documents = data.get("documents", [])
|
documents = data.get("documents", [])
|
||||||
result["checks"]["documents"] = []
|
result["checks"]["documents"] = []
|
||||||
|
|
||||||
|
|
@ -345,7 +329,6 @@ class UniversignDocumentService:
|
||||||
"status": doc.get("status"),
|
"status": doc.get("status"),
|
||||||
}
|
}
|
||||||
|
|
||||||
# Test téléchargement
|
|
||||||
if doc.get("id"):
|
if doc.get("id"):
|
||||||
download_url = (
|
download_url = (
|
||||||
f"{self.api_url}/transactions/{transaction_id}"
|
f"{self.api_url}/transactions/{transaction_id}"
|
||||||
|
|
|
||||||
|
|
@ -159,7 +159,6 @@ class UniversignSyncService:
|
||||||
|
|
||||||
return stats
|
return stats
|
||||||
|
|
||||||
# CORRECTION 1 : process_webhook dans universign_sync.py
|
|
||||||
async def process_webhook(
|
async def process_webhook(
|
||||||
self, session: AsyncSession, payload: Dict, transaction_id: str = None
|
self, session: AsyncSession, payload: Dict, transaction_id: str = None
|
||||||
) -> Tuple[bool, Optional[str]]:
|
) -> Tuple[bool, Optional[str]]:
|
||||||
|
|
@ -167,9 +166,7 @@ class UniversignSyncService:
|
||||||
Traite un webhook Universign - CORRECTION : meilleure gestion des payloads
|
Traite un webhook Universign - CORRECTION : meilleure gestion des payloads
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
# Si transaction_id n'est pas fourni, essayer de l'extraire
|
|
||||||
if not transaction_id:
|
if not transaction_id:
|
||||||
# Même logique que dans universign.py
|
|
||||||
if (
|
if (
|
||||||
payload.get("type", "").startswith("transaction.")
|
payload.get("type", "").startswith("transaction.")
|
||||||
and "payload" in payload
|
and "payload" in payload
|
||||||
|
|
@ -195,7 +192,6 @@ class UniversignSyncService:
|
||||||
f"📨 Traitement webhook: transaction={transaction_id}, event={event_type}"
|
f"📨 Traitement webhook: transaction={transaction_id}, event={event_type}"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Récupérer la transaction locale
|
|
||||||
query = (
|
query = (
|
||||||
select(UniversignTransaction)
|
select(UniversignTransaction)
|
||||||
.options(selectinload(UniversignTransaction.signers))
|
.options(selectinload(UniversignTransaction.signers))
|
||||||
|
|
@ -208,25 +204,20 @@ class UniversignSyncService:
|
||||||
logger.warning(f"Transaction {transaction_id} inconnue localement")
|
logger.warning(f"Transaction {transaction_id} inconnue localement")
|
||||||
return False, "Transaction inconnue"
|
return False, "Transaction inconnue"
|
||||||
|
|
||||||
# Marquer comme webhook reçu
|
|
||||||
transaction.webhook_received = True
|
transaction.webhook_received = True
|
||||||
|
|
||||||
# Stocker l'ancien statut pour comparaison
|
|
||||||
old_status = transaction.local_status.value
|
old_status = transaction.local_status.value
|
||||||
|
|
||||||
# Force la synchronisation complète
|
|
||||||
success, error = await self.sync_transaction(
|
success, error = await self.sync_transaction(
|
||||||
session, transaction, force=True
|
session, transaction, force=True
|
||||||
)
|
)
|
||||||
|
|
||||||
# Log du changement de statut
|
|
||||||
if success and transaction.local_status.value != old_status:
|
if success and transaction.local_status.value != old_status:
|
||||||
logger.info(
|
logger.info(
|
||||||
f"Webhook traité: {transaction_id} | "
|
f"Webhook traité: {transaction_id} | "
|
||||||
f"{old_status} → {transaction.local_status.value}"
|
f"{old_status} → {transaction.local_status.value}"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Enregistrer le log du webhook
|
|
||||||
await self._log_sync_attempt(
|
await self._log_sync_attempt(
|
||||||
session=session,
|
session=session,
|
||||||
transaction=transaction,
|
transaction=transaction,
|
||||||
|
|
@ -248,7 +239,6 @@ class UniversignSyncService:
|
||||||
logger.error(f"💥 Erreur traitement webhook: {e}", exc_info=True)
|
logger.error(f"💥 Erreur traitement webhook: {e}", exc_info=True)
|
||||||
return False, str(e)
|
return False, str(e)
|
||||||
|
|
||||||
# CORRECTION 2 : _sync_signers - Ne pas écraser les signers existants
|
|
||||||
async def _sync_signers(
|
async def _sync_signers(
|
||||||
self,
|
self,
|
||||||
session: AsyncSession,
|
session: AsyncSession,
|
||||||
|
|
@ -271,7 +261,6 @@ class UniversignSyncService:
|
||||||
logger.warning(f"Signataire sans email à l'index {idx}, ignoré")
|
logger.warning(f"Signataire sans email à l'index {idx}, ignoré")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# PROTECTION : gérer les statuts inconnus
|
|
||||||
raw_status = signer_data.get("status") or signer_data.get(
|
raw_status = signer_data.get("status") or signer_data.get(
|
||||||
"state", "waiting"
|
"state", "waiting"
|
||||||
)
|
)
|
||||||
|
|
@ -302,7 +291,6 @@ class UniversignSyncService:
|
||||||
if signer_data.get("name") and not signer.name:
|
if signer_data.get("name") and not signer.name:
|
||||||
signer.name = signer_data.get("name")
|
signer.name = signer_data.get("name")
|
||||||
else:
|
else:
|
||||||
# Nouveau signer avec gestion d'erreur intégrée
|
|
||||||
try:
|
try:
|
||||||
signer = UniversignSigner(
|
signer = UniversignSigner(
|
||||||
id=f"{transaction.id}_signer_{idx}_{int(datetime.now().timestamp())}",
|
id=f"{transaction.id}_signer_{idx}_{int(datetime.now().timestamp())}",
|
||||||
|
|
@ -330,7 +318,6 @@ class UniversignSyncService:
|
||||||
):
|
):
|
||||||
import json
|
import json
|
||||||
|
|
||||||
# Si statut final et pas de force, skip
|
|
||||||
if is_final_status(transaction.local_status.value) and not force:
|
if is_final_status(transaction.local_status.value) and not force:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
f"⏭️ Skip {transaction.transaction_id}: statut final "
|
f"⏭️ Skip {transaction.transaction_id}: statut final "
|
||||||
|
|
@ -340,14 +327,13 @@ class UniversignSyncService:
|
||||||
await session.commit()
|
await session.commit()
|
||||||
return True, None
|
return True, None
|
||||||
|
|
||||||
# Récupération du statut distant
|
|
||||||
logger.info(f"Synchronisation: {transaction.transaction_id}")
|
logger.info(f"Synchronisation: {transaction.transaction_id}")
|
||||||
|
|
||||||
result = self.fetch_transaction_status(transaction.transaction_id)
|
result = self.fetch_transaction_status(transaction.transaction_id)
|
||||||
|
|
||||||
if not result:
|
if not result:
|
||||||
error = "Échec récupération données Universign"
|
error = "Échec récupération données Universign"
|
||||||
logger.error(f"❌ {error}: {transaction.transaction_id}")
|
logger.error(f" {error}: {transaction.transaction_id}")
|
||||||
transaction.sync_attempts += 1
|
transaction.sync_attempts += 1
|
||||||
transaction.sync_error = error
|
transaction.sync_error = error
|
||||||
await self._log_sync_attempt(session, transaction, "polling", False, error)
|
await self._log_sync_attempt(session, transaction, "polling", False, error)
|
||||||
|
|
@ -358,9 +344,8 @@ class UniversignSyncService:
|
||||||
universign_data = result["transaction"]
|
universign_data = result["transaction"]
|
||||||
universign_status_raw = universign_data.get("state", "draft")
|
universign_status_raw = universign_data.get("state", "draft")
|
||||||
|
|
||||||
logger.info(f"📊 Statut Universign brut: {universign_status_raw}")
|
logger.info(f" Statut Universign brut: {universign_status_raw}")
|
||||||
|
|
||||||
# Convertir le statut
|
|
||||||
new_local_status = map_universign_to_local(universign_status_raw)
|
new_local_status = map_universign_to_local(universign_status_raw)
|
||||||
previous_local_status = transaction.local_status.value
|
previous_local_status = transaction.local_status.value
|
||||||
|
|
||||||
|
|
@ -369,7 +354,6 @@ class UniversignSyncService:
|
||||||
f"{new_local_status} (Local) | Actuel: {previous_local_status}"
|
f"{new_local_status} (Local) | Actuel: {previous_local_status}"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Vérifier la transition
|
|
||||||
if not is_transition_allowed(previous_local_status, new_local_status):
|
if not is_transition_allowed(previous_local_status, new_local_status):
|
||||||
logger.warning(
|
logger.warning(
|
||||||
f"Transition refusée: {previous_local_status} → {new_local_status}"
|
f"Transition refusée: {previous_local_status} → {new_local_status}"
|
||||||
|
|
@ -383,10 +367,9 @@ class UniversignSyncService:
|
||||||
|
|
||||||
if status_changed:
|
if status_changed:
|
||||||
logger.info(
|
logger.info(
|
||||||
f"🔔 CHANGEMENT DÉTECTÉ: {previous_local_status} → {new_local_status}"
|
f"CHANGEMENT DÉTECTÉ: {previous_local_status} → {new_local_status}"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Mise à jour du statut Universign brut
|
|
||||||
try:
|
try:
|
||||||
transaction.universign_status = UniversignTransactionStatus(
|
transaction.universign_status = UniversignTransactionStatus(
|
||||||
universign_status_raw
|
universign_status_raw
|
||||||
|
|
@ -404,14 +387,12 @@ class UniversignSyncService:
|
||||||
else:
|
else:
|
||||||
transaction.universign_status = UniversignTransactionStatus.STARTED
|
transaction.universign_status = UniversignTransactionStatus.STARTED
|
||||||
|
|
||||||
# Mise à jour du statut local
|
|
||||||
transaction.local_status = LocalDocumentStatus(new_local_status)
|
transaction.local_status = LocalDocumentStatus(new_local_status)
|
||||||
transaction.universign_status_updated_at = datetime.now()
|
transaction.universign_status_updated_at = datetime.now()
|
||||||
|
|
||||||
# Mise à jour des dates
|
|
||||||
if new_local_status == "EN_COURS" and not transaction.sent_at:
|
if new_local_status == "EN_COURS" and not transaction.sent_at:
|
||||||
transaction.sent_at = datetime.now()
|
transaction.sent_at = datetime.now()
|
||||||
logger.info("📅 Date d'envoi mise à jour")
|
logger.info("Date d'envoi mise à jour")
|
||||||
|
|
||||||
if new_local_status == "SIGNE" and not transaction.signed_at:
|
if new_local_status == "SIGNE" and not transaction.signed_at:
|
||||||
transaction.signed_at = datetime.now()
|
transaction.signed_at = datetime.now()
|
||||||
|
|
@ -419,15 +400,11 @@ class UniversignSyncService:
|
||||||
|
|
||||||
if new_local_status == "REFUSE" and not transaction.refused_at:
|
if new_local_status == "REFUSE" and not transaction.refused_at:
|
||||||
transaction.refused_at = datetime.now()
|
transaction.refused_at = datetime.now()
|
||||||
logger.info("❌ Date de refus mise à jour")
|
logger.info(" Date de refus mise à jour")
|
||||||
|
|
||||||
if new_local_status == "EXPIRE" and not transaction.expired_at:
|
if new_local_status == "EXPIRE" and not transaction.expired_at:
|
||||||
transaction.expired_at = datetime.now()
|
transaction.expired_at = datetime.now()
|
||||||
logger.info("⏰ Date d'expiration mise à jour")
|
logger.info("Date d'expiration mise à jour")
|
||||||
|
|
||||||
# === SECTION CORRIGÉE: Gestion des documents ===
|
|
||||||
# Ne plus chercher document_url dans la réponse (elle n'existe pas!)
|
|
||||||
# Le téléchargement se fait via le service document qui utilise le bon endpoint
|
|
||||||
|
|
||||||
documents = universign_data.get("documents", [])
|
documents = universign_data.get("documents", [])
|
||||||
if documents:
|
if documents:
|
||||||
|
|
@ -437,7 +414,6 @@ class UniversignSyncService:
|
||||||
f"status={first_doc.get('status')}"
|
f"status={first_doc.get('status')}"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Téléchargement automatique du document signé
|
|
||||||
if new_local_status == "SIGNE" and not transaction.signed_document_path:
|
if new_local_status == "SIGNE" and not transaction.signed_document_path:
|
||||||
logger.info("Déclenchement téléchargement document signé...")
|
logger.info("Déclenchement téléchargement document signé...")
|
||||||
|
|
||||||
|
|
@ -455,21 +431,15 @@ class UniversignSyncService:
|
||||||
logger.warning(f"Échec téléchargement: {download_error}")
|
logger.warning(f"Échec téléchargement: {download_error}")
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(
|
logger.error(f" Erreur téléchargement document: {e}", exc_info=True)
|
||||||
f"❌ Erreur téléchargement document: {e}", exc_info=True
|
|
||||||
)
|
|
||||||
# === FIN SECTION CORRIGÉE ===
|
|
||||||
|
|
||||||
# Synchroniser les signataires
|
|
||||||
await self._sync_signers(session, transaction, universign_data)
|
await self._sync_signers(session, transaction, universign_data)
|
||||||
|
|
||||||
# Mise à jour des métadonnées de sync
|
|
||||||
transaction.last_synced_at = datetime.now()
|
transaction.last_synced_at = datetime.now()
|
||||||
transaction.sync_attempts += 1
|
transaction.sync_attempts += 1
|
||||||
transaction.needs_sync = not is_final_status(new_local_status)
|
transaction.needs_sync = not is_final_status(new_local_status)
|
||||||
transaction.sync_error = None
|
transaction.sync_error = None
|
||||||
|
|
||||||
# Log de la tentative
|
|
||||||
await self._log_sync_attempt(
|
await self._log_sync_attempt(
|
||||||
session=session,
|
session=session,
|
||||||
transaction=transaction,
|
transaction=transaction,
|
||||||
|
|
@ -491,7 +461,6 @@ class UniversignSyncService:
|
||||||
|
|
||||||
await session.commit()
|
await session.commit()
|
||||||
|
|
||||||
# Exécuter les actions post-changement
|
|
||||||
if status_changed:
|
if status_changed:
|
||||||
logger.info(f"🎬 Exécution actions pour statut: {new_local_status}")
|
logger.info(f"🎬 Exécution actions pour statut: {new_local_status}")
|
||||||
await self._execute_status_actions(
|
await self._execute_status_actions(
|
||||||
|
|
@ -507,7 +476,7 @@ class UniversignSyncService:
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
error_msg = f"Erreur lors de la synchronisation: {str(e)}"
|
error_msg = f"Erreur lors de la synchronisation: {str(e)}"
|
||||||
logger.error(f"❌ {error_msg}", exc_info=True)
|
logger.error(f" {error_msg}", exc_info=True)
|
||||||
|
|
||||||
transaction.sync_error = error_msg[:1000]
|
transaction.sync_error = error_msg[:1000]
|
||||||
transaction.sync_attempts += 1
|
transaction.sync_attempts += 1
|
||||||
|
|
@ -519,20 +488,16 @@ class UniversignSyncService:
|
||||||
|
|
||||||
return False, error_msg
|
return False, error_msg
|
||||||
|
|
||||||
# CORRECTION 3 : Amélioration du logging dans sync_transaction
|
|
||||||
async def _sync_transaction_documents_corrected(
|
async def _sync_transaction_documents_corrected(
|
||||||
self, session, transaction, universign_data: dict, new_local_status: str
|
self, session, transaction, universign_data: dict, new_local_status: str
|
||||||
):
|
):
|
||||||
# Récupérer et stocker les infos documents
|
|
||||||
documents = universign_data.get("documents", [])
|
documents = universign_data.get("documents", [])
|
||||||
|
|
||||||
if documents:
|
if documents:
|
||||||
# Stocker le premier document_id pour référence
|
|
||||||
first_doc = documents[0]
|
first_doc = documents[0]
|
||||||
first_doc_id = first_doc.get("id")
|
first_doc_id = first_doc.get("id")
|
||||||
|
|
||||||
if first_doc_id:
|
if first_doc_id:
|
||||||
# Stocker l'ID du document (si le champ existe dans le modèle)
|
|
||||||
if hasattr(transaction, "universign_document_id"):
|
if hasattr(transaction, "universign_document_id"):
|
||||||
transaction.universign_document_id = first_doc_id
|
transaction.universign_document_id = first_doc_id
|
||||||
|
|
||||||
|
|
@ -543,7 +508,6 @@ class UniversignSyncService:
|
||||||
else:
|
else:
|
||||||
logger.debug("Aucun document dans la réponse Universign")
|
logger.debug("Aucun document dans la réponse Universign")
|
||||||
|
|
||||||
# Téléchargement automatique si signé
|
|
||||||
if new_local_status == "SIGNE":
|
if new_local_status == "SIGNE":
|
||||||
if not transaction.signed_document_path:
|
if not transaction.signed_document_path:
|
||||||
logger.info("Déclenchement téléchargement document signé...")
|
logger.info("Déclenchement téléchargement document signé...")
|
||||||
|
|
@ -562,9 +526,7 @@ class UniversignSyncService:
|
||||||
logger.warning(f"Échec téléchargement: {download_error}")
|
logger.warning(f"Échec téléchargement: {download_error}")
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(
|
logger.error(f" Erreur téléchargement document: {e}", exc_info=True)
|
||||||
f"❌ Erreur téléchargement document: {e}", exc_info=True
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
f"Document déjà téléchargé: {transaction.signed_document_path}"
|
f"Document déjà téléchargé: {transaction.signed_document_path}"
|
||||||
|
|
|
||||||
15
tools/cleaner.py
Normal file
15
tools/cleaner.py
Normal file
|
|
@ -0,0 +1,15 @@
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
def supprimer_commentaires_ligne(fichier):
|
||||||
|
path = Path(fichier)
|
||||||
|
lignes = path.read_text(encoding="utf-8").splitlines()
|
||||||
|
lignes_sans_commentaires = [line for line in lignes if not line.lstrip().startswith("#")]
|
||||||
|
path.write_text("\n".join(lignes_sans_commentaires), encoding="utf-8")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
base_dir = Path(__file__).resolve().parent.parent
|
||||||
|
fichier_api = base_dir / "data/data.py"
|
||||||
|
|
||||||
|
supprimer_commentaires_ligne(fichier_api)
|
||||||
52
tools/extract_pydantic_models.py
Normal file
52
tools/extract_pydantic_models.py
Normal file
|
|
@ -0,0 +1,52 @@
|
||||||
|
import ast
|
||||||
|
import os
|
||||||
|
import textwrap
|
||||||
|
|
||||||
|
SOURCE_FILE = "main.py"
|
||||||
|
MODELS_DIR = "../models"
|
||||||
|
|
||||||
|
os.makedirs(MODELS_DIR, exist_ok=True)
|
||||||
|
|
||||||
|
with open(SOURCE_FILE, "r", encoding="utf-8") as f:
|
||||||
|
source_code = f.read()
|
||||||
|
|
||||||
|
tree = ast.parse(source_code)
|
||||||
|
|
||||||
|
pydantic_classes = []
|
||||||
|
other_nodes = []
|
||||||
|
|
||||||
|
for node in tree.body:
|
||||||
|
if isinstance(node, ast.ClassDef):
|
||||||
|
if any(
|
||||||
|
isinstance(base, ast.Name) and base.id == "BaseModel" for base in node.bases
|
||||||
|
):
|
||||||
|
pydantic_classes.append(node)
|
||||||
|
continue
|
||||||
|
other_nodes.append(node)
|
||||||
|
|
||||||
|
imports = """
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
from typing import Optional, List
|
||||||
|
"""
|
||||||
|
|
||||||
|
for cls in pydantic_classes:
|
||||||
|
class_name = cls.name
|
||||||
|
file_name = f"{class_name.lower()}.py"
|
||||||
|
file_path = os.path.join(MODELS_DIR, file_name)
|
||||||
|
|
||||||
|
class_code = ast.get_source_segment(source_code, cls)
|
||||||
|
class_code = textwrap.dedent(class_code)
|
||||||
|
|
||||||
|
with open(file_path, "w", encoding="utf-8") as f:
|
||||||
|
f.write(imports.strip() + "\n\n")
|
||||||
|
f.write(class_code)
|
||||||
|
|
||||||
|
print(f"✅ Modèle extrait : {class_name} → {file_path}")
|
||||||
|
|
||||||
|
new_tree = ast.Module(body=other_nodes, type_ignores=[])
|
||||||
|
new_source = ast.unparse(new_tree)
|
||||||
|
|
||||||
|
with open(SOURCE_FILE, "w", encoding="utf-8") as f:
|
||||||
|
f.write(new_source)
|
||||||
|
|
||||||
|
print("\n🎉 Extraction terminée")
|
||||||
|
|
@ -290,15 +290,11 @@ def _preparer_lignes_document(lignes: List) -> List[Dict]:
|
||||||
|
|
||||||
|
|
||||||
UNIVERSIGN_TO_LOCAL: Dict[str, str] = {
|
UNIVERSIGN_TO_LOCAL: Dict[str, str] = {
|
||||||
# États initiaux
|
|
||||||
"draft": "EN_ATTENTE",
|
"draft": "EN_ATTENTE",
|
||||||
"ready": "EN_ATTENTE",
|
"ready": "EN_ATTENTE",
|
||||||
# En cours
|
|
||||||
"started": "EN_COURS",
|
"started": "EN_COURS",
|
||||||
# États finaux (succès)
|
|
||||||
"completed": "SIGNE",
|
"completed": "SIGNE",
|
||||||
"closed": "SIGNE",
|
"closed": "SIGNE",
|
||||||
# États finaux (échec)
|
|
||||||
"refused": "REFUSE",
|
"refused": "REFUSE",
|
||||||
"expired": "EXPIRE",
|
"expired": "EXPIRE",
|
||||||
"canceled": "REFUSE",
|
"canceled": "REFUSE",
|
||||||
|
|
@ -429,7 +425,7 @@ STATUS_MESSAGES: Dict[str, Dict[str, str]] = {
|
||||||
"REFUSE": {
|
"REFUSE": {
|
||||||
"fr": "Signature refusée",
|
"fr": "Signature refusée",
|
||||||
"en": "Signature refused",
|
"en": "Signature refused",
|
||||||
"icon": "❌",
|
"icon": "",
|
||||||
"color": "red",
|
"color": "red",
|
||||||
},
|
},
|
||||||
"EXPIRE": {
|
"EXPIRE": {
|
||||||
|
|
|
||||||
|
|
@ -96,7 +96,7 @@ STATUS_MESSAGES: Dict[str, Dict[str, str]] = {
|
||||||
"REFUSE": {
|
"REFUSE": {
|
||||||
"fr": "Signature refusée",
|
"fr": "Signature refusée",
|
||||||
"en": "Signature refused",
|
"en": "Signature refused",
|
||||||
"icon": "❌",
|
"icon": "",
|
||||||
"color": "red",
|
"color": "red",
|
||||||
},
|
},
|
||||||
"EXPIRE": {
|
"EXPIRE": {
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue