Compare commits

...

109 commits

Author SHA1 Message Date
Fanilo-Nantenaina
3b5c183b47 refactor(security): remove emojis from logs and improve script debugging 2026-01-21 16:53:38 +03:00
Fanilo-Nantenaina
d25c2cffa9 refactor(security): clean up authentication middleware and api docs 2026-01-21 16:23:34 +03:00
Fanilo-Nantenaina
1c6c45465f fix(security): improve api key and jwt validation handling 2026-01-21 14:00:57 +03:00
Fanilo-Nantenaina
574d82f3c4 fix(security): improve auth handling and openapi schema generation 2026-01-21 13:50:44 +03:00
Fanilo-Nantenaina
a6a623d1ab refactor(api): simplify auth scheme handling and improve schema filtering 2026-01-21 13:41:37 +03:00
Fanilo-Nantenaina
437ecd0ed3 Merge branch 'feat/controlled_swagger_access' into main_2 2026-01-21 13:26:49 +03:00
Fanilo-Nantenaina
c057a085ed refactor(security): improve swagger user management and logging 2026-01-21 13:21:16 +03:00
Fanilo-Nantenaina
23a94f5558 Merge branch 'feat/controlled_swagger_access' into main_2 2026-01-21 13:01:38 +03:00
Fanilo-Nantenaina
797aed0240 feat(security): enhance swagger auth with user context and filtered docs 2026-01-21 12:56:02 +03:00
Fanilo-Nantenaina
5f40c677a8 refactor(manage_security): simplify delete_swagger_user function 2026-01-21 12:21:10 +03:00
Fanilo-Nantenaina
8a22e285df refactor(security): improve logging format and argument handling 2026-01-21 12:12:35 +03:00
Fanilo-Nantenaina
92597a1143 feat(api): add tag-based OpenAPI schema filtering for Swagger users 2026-01-21 12:05:06 +03:00
Fanilo-Nantenaina
c1f4c66e8c refactor(api): remove user dependency from all endpoints 2026-01-20 19:28:27 +03:00
Fanilo-Nantenaina
43da1b09ed Merge branch 'develop' into develop_like 2026-01-20 19:26:42 +03:00
Fanilo-Nantenaina
6d5f8594d0 chore: ignore python clean scripts in gitignore 2026-01-20 19:26:28 +03:00
Fanilo-Nantenaina
a7457c3979 fix(security): improve auth handling and logging in middleware 2026-01-20 19:14:00 +03:00
Fanilo-Nantenaina
5eec115d1d Merge branch 'main' into develop 2026-01-20 16:29:35 +03:00
Fanilo-Nantenaina
d89c9fd35b chore: ignore clean scripts in gitignore 2026-01-20 16:29:10 +03:00
Fanilo-Nantenaina
211dd4fd23 fix(security): improve api key authentication and error handling 2026-01-20 16:18:04 +03:00
Fanilo-Nantenaina
67ef83c4e3 refactor(security): improve authentication logging and endpoint checks 2026-01-20 16:01:54 +03:00
Fanilo-Nantenaina
82d1d92e58 refactor(scripts): improve import handling and path management 2026-01-20 15:35:06 +03:00
Fanilo-Nantenaina
28c8fb3008 refactor(security): improve user management and session handling 2026-01-20 15:27:26 +03:00
Fanilo-Nantenaina
f8cec7ebc5 refactor(security): improve security scripts and api documentation 2026-01-20 15:13:19 +03:00
Fanilo-Nantenaina
1a08894b47 refactor(scripts): improve logging format and endpoint handling in security management 2026-01-20 15:01:57 +03:00
Fanilo-Nantenaina
3cdb490ee5 refactor(security): improve middleware structure and configuration handling 2026-01-20 14:47:07 +03:00
Fanilo-Nantenaina
c84e4ddc20 refactor(auth): simplify authentication logic and improve error handling 2026-01-20 14:25:06 +03:00
Fanilo-Nantenaina
41ca202d4b refactor(security): move security config to environment variables and improve error handling 2026-01-20 14:19:48 +03:00
Fanilo-Nantenaina
918f5d3f19 docs(api): fix incorrect comment syntax in openapi configuration 2026-01-20 14:06:10 +03:00
Fanilo-Nantenaina
fa95d0d117 refactor(api): replace get_current_user with get_sage_client_for_user in dependencies 2026-01-20 13:56:16 +03:00
Fanilo-Nantenaina
a1150390f4 Merge branch 'fix/security' into main_2 2026-01-20 13:54:42 +03:00
Fanilo-Nantenaina
0001dbe634 docs(api): add comments for security schemas and openapi setup 2026-01-20 13:54:36 +03:00
Fanilo-Nantenaina
5b584bf969 refactor(security): improve auth middleware and logging 2026-01-20 13:51:09 +03:00
Fanilo-Nantenaina
022149c237 refactor(api): replace get_sage_client_for_user with get_current_user for dependency injection 2026-01-20 13:46:27 +03:00
Fanilo-Nantenaina
72d1ac58d1 refactor(security): reorganize imports and improve logging message 2026-01-20 12:40:56 +03:00
Fanilo-Nantenaina
cce1cdf76a refactor(scripts): improve manage_security.py organization and error handling 2026-01-20 12:32:49 +03:00
Fanilo-Nantenaina
e51a5e0a0b refactor(database): update import to use direct get_session import 2026-01-20 12:19:51 +03:00
Fanilo-Nantenaina
dd65ae4d96 style: remove emoji from log messages 2026-01-20 12:17:52 +03:00
Fanilo-Nantenaina
cc0062b3bc refactor(security): improve security management script with better logging and structure 2026-01-20 12:11:20 +03:00
Fanilo-Nantenaina
9bd0f62459 feat(api): add authentication to all endpoints and update OpenAPI schema 2026-01-20 11:49:57 +03:00
Fanilo-Nantenaina
e0f08fd83a refactor(dependencies): rename require_role_hybrid to require_role for consistency 2026-01-20 11:29:12 +03:00
Fanilo-Nantenaina
f59e56490c feat(api): add api keys router to middleware stack 2026-01-20 11:24:42 +03:00
Fanilo-Nantenaina
2aafd525cd refactor(api): update middleware and cors configuration 2026-01-20 11:23:10 +03:00
Fanilo-Nantenaina
17a4251eea Merge branch 'feat/secureing_API' 2026-01-20 11:12:59 +03:00
Fanilo-Nantenaina
abc9ff820a feat(security): implement api key management and authentication system 2026-01-20 11:11:32 +03:00
Fanilo-Nantenaina
b85bd26dbe Merge branches 'develop' and 'main' of https://git.dataven.fr/fanilo/Sage100-vps 2026-01-20 11:02:28 +03:00
Fanilo-Nantenaina
1164c7975a refactor: remove emojis and clean up code comments 2026-01-20 06:31:17 +03:00
Fanilo-Nantenaina
a10fda072c feat(security): implement API key authentication system 2026-01-19 20:38:01 +03:00
Fanilo-Nantenaina
9f6c1de8ef feat(api-keys): implement api key management system 2026-01-19 20:35:03 +03:00
Fanilo-Nantenaina
09eae50952 refactor: clean up code by removing unnecessary comments 2026-01-19 20:32:40 +03:00
Fanilo-Nantenaina
4b686c4544 Merge branch 'feat/get_all_reglements' into develop_like 2026-01-17 12:53:01 +03:00
Fanilo-Nantenaina
89510537b3 fix(api): cast montant_total to float in regler_factures_multiple 2026-01-17 11:41:44 +03:00
Fanilo-Nantenaina
0e18129325 feat(reglements): add endpoint to get payment details by invoice number 2026-01-16 17:58:02 +03:00
Fanilo-Nantenaina
aa89ebdf9e feat(reglements): add endpoints to list and get payment details 2026-01-16 15:46:58 +03:00
Fanilo-Nantenaina
9f12727bd3 refactor(routes): remove authentication dependency from universign router 2026-01-16 13:26:24 +03:00
Fanilo-Nantenaina
18603ded6e refactor(auth): reorganize imports and remove unused dependencies - Added some missing auth 2026-01-16 13:22:13 +03:00
Fanilo-Nantenaina
18d72b3bf9 Secured all routes 2026-01-16 12:47:56 +03:00
Fanilo-Nantenaina
fdf359738b Merge branch 'develop' 2026-01-16 12:35:17 +03:00
Fanilo-Nantenaina
ba9e474109 Merge branch 'main' of https://git.dataven.fr/fanilo/backend_vps 2026-01-16 12:34:47 +03:00
Fanilo-Nantenaina
b291cbf65f feat(reglements): add compte_general field to ReglementFactureCreate 2026-01-15 17:13:31 +03:00
Fanilo-Nantenaina
6f2136c3ca feat(sage_client): add new payment parameters and make fields optional 2026-01-15 16:56:25 +03:00
Fanilo-Nantenaina
beabefa3f9 feat(payments): enhance payment processing with new endpoints and schema 2026-01-15 15:49:15 +03:00
Fanilo-Nantenaina
457c746706 Merge branch 'feat/validation_facture' into develop 2026-01-15 14:46:51 +03:00
Fanilo-Nantenaina
d719966339 Merge branch 'feat/settle_invoice' of https://git.dataven.fr/fanilo/backend_vps into feat/settle_invoice 2026-01-15 14:44:17 +03:00
Fanilo-Nantenaina
cc1609549f feat(api): add endpoint to fetch bank journals 2026-01-15 14:44:04 +03:00
Fanilo-Nantenaina
25be0bd569 feat(payments): add payment functionality for invoices 2026-01-15 14:44:04 +03:00
Fanilo-Nantenaina
19faec9b24 Merge branch 'feat/validation_facture' of https://git.dataven.fr/fanilo/backend_vps into feat/validation_facture 2026-01-15 14:39:56 +03:00
Fanilo-Nantenaina
2f06a083dc refactor: simplify invoice validation methods and error handling 2026-01-15 14:38:42 +03:00
Fanilo-Nantenaina
149d8fb2de feat(factures): add invoice validation endpoints and client methods 2026-01-15 14:38:42 +03:00
Fanilo-Nantenaina
eedc628a5f refactor(models): clean up model comments and whitespace 2026-01-15 14:38:26 +03:00
Fanilo-Nantenaina
f505dad8a7 refactor: simplify invoice validation methods and error handling 2026-01-15 11:08:52 +03:00
Fanilo-Nantenaina
a824592398 feat(factures): add invoice validation endpoints and client methods 2026-01-15 10:34:40 +03:00
Fanilo-Nantenaina
23575fa231 feat(api): add endpoint to fetch bank journals 2026-01-14 19:58:56 +03:00
Fanilo-Nantenaina
a9df408399 feat(payments): add payment functionality for invoices 2026-01-14 18:40:21 +03:00
Fanilo-Nantenaina
671d5bac15 feat(society): add logo support and preview endpoint 2026-01-14 15:20:16 +03:00
Fanilo-Nantenaina
d5273a0786 refactor(enterprise): change siren parameter from Query to Path and corrected deprecated "regex" 2026-01-13 18:24:10 +03:00
Fanilo-Nantenaina
e7003d4059 feat(enterprise): add enterprise search functionality with API integration 2026-01-13 18:20:30 +03:00
Fanilo-Nantenaina
30ffc7a493 refactor: improve societe info handling and add sqlite lock retry docs 2026-01-13 18:03:33 +03:00
Fanilo-Nantenaina
3f1dce918d feat(pdf): add dynamic company info to PDF generator 2026-01-13 17:37:23 +03:00
Fanilo-Nantenaina
08665f15dd Merge branch 'feat/get_society' into fix/update_pdf_structure 2026-01-13 17:19:19 +03:00
Fanilo-Nantenaina
3233630401 feat(database): add SQLite optimization and retry mechanisms 2026-01-13 17:14:55 +03:00
Fanilo-Nantenaina
d2f02e1555 refactor(api/schemas): simplify company info response and schema 2026-01-13 17:09:12 +03:00
Fanilo-Nantenaina
9ae447e2c7 feat(society): add company info schema and endpoint 2026-01-13 17:03:18 +03:00
Fanilo-Nantenaina
358b2e3639 refactor(email_queue): update font and logo fallback paths to use pdfs directory 2026-01-13 16:58:18 +03:00
Fanilo-Nantenaina
a2c85a211a feat(pdf): refactor PDF generation with new SagePDFGenerator class 2026-01-13 16:30:34 +03:00
Fanilo-Nantenaina
b17e4abf12 Merge branch 'fix/update_pdf_structure' of https://git.dataven.fr/fanilo/backend_vps into fix/update_pdf_structure 2026-01-13 11:57:07 +03:00
Fanilo-Nantenaina
23f9bba174 chore: add .trunk to gitignore 2026-01-13 11:52:24 +03:00
Fanilo-Nantenaina
983e960b9b feat(universign): add signed document download and storage functionality 2026-01-13 11:50:32 +03:00
Fanilo-Nantenaina
74c0d73294 chore: add .trunk to gitignore 2026-01-13 11:49:57 +03:00
Fanilo-Nantenaina
18f9a45ef6 refactor(universign): remove emojis from logs and clean up admin routes 2026-01-13 11:46:18 +03:00
Fanilo-Nantenaina
d6ed8792cc fix(universign): add missing api_url parameter to document service 2026-01-13 11:13:58 +03:00
Fanilo-Nantenaina
24d7a49a73 feat(universign): add transaction diagnosis endpoint and improve document handling 2026-01-13 11:05:34 +03:00
Fanilo-Nantenaina
c5c17fdd9b fix(auth): increase failed login attempt threshold from 5 to 15 2026-01-13 10:42:58 +03:00
Fanilo-Nantenaina
c389129ae7 Updated using the correct redirection structure 2026-01-12 19:04:09 +03:00
Fanilo-Nantenaina
6b6246b6e5 Testing multi-sage users 2026-01-12 18:56:37 +03:00
b3419eafaa Deleted cached dev database 2026-01-08 17:29:16 +00:00
Fanilo-Nantenaina
a9aff7b386 chore: add staging and production env files to gitignore 2026-01-08 20:07:07 +03:00
Fanilo-Nantenaina
4f0fe17ee9 chore: remove staging environment configuration file 2026-01-08 20:06:51 +03:00
d4d6cbc44f Added enum status caused by case sensibility on Linux 2026-01-08 17:04:09 +00:00
Fanilo-Nantenaina
795b848dff chore: update gitignore and add status enums 2026-01-08 16:59:08 +03:00
Fanilo-Nantenaina
e990cbdc08 MERGING branch develop INTO MAIN 2026-01-08 16:58:43 +03:00
Fanilo-Nantenaina
1972f22b80 chore: update container name in dev docker-compose file 2026-01-08 16:37:14 +03:00
Fanilo-Nantenaina
ce3b234fee chore: update container name in docker-compose.dev.yml 2026-01-08 15:24:11 +03:00
Fanilo-Nantenaina
d78d189606 chore: update database name in docker-compose.dev.yml 2026-01-08 15:22:27 +03:00
Fanilo-Nantenaina
8a012fc162 refactor(docker): restructure docker setup for multiple environments 2026-01-08 15:15:21 +03:00
Fanilo-Nantenaina
f3957dddcf Merge branch 'fix/update_pdf_structure' into feat/update_pdf_structure 2026-01-08 10:48:26 +03:00
Fanilo-Nantenaina
268dfb3618 refactor(api): replace sage dependency with direct sage_client usage 2026-01-08 10:29:21 +03:00
Fanilo-Nantenaina
bcee1f277f refactor: replace is_() method with direct boolean comparison 2026-01-07 20:29:35 +03:00
Fanilo-Nantenaina
d8ec61802d feat(universign): add signed document storage and download functionality 2026-01-07 20:01:55 +03:00
Fanilo-Nantenaina
4a1960745a feat(pdf-generation): redesign document layout with modern styling 2026-01-07 19:42:29 +03:00
53 changed files with 5815 additions and 1193 deletions

9
.gitignore vendored
View file

@ -39,3 +39,12 @@ data/*.db.bak
*.db
tools/
.trunk
.env.staging
.env.production
.trunk
*clean*.py

9
.trunk/.gitignore vendored Normal file
View file

@ -0,0 +1,9 @@
*out
*logs
*actions
*notifications
*tools
plugins
user_trunk.yaml
user.yaml
tmp

32
.trunk/trunk.yaml Normal file
View file

@ -0,0 +1,32 @@
# This file controls the behavior of Trunk: https://docs.trunk.io/cli
# To learn more about the format of this file, see https://docs.trunk.io/reference/trunk-yaml
version: 0.1
cli:
version: 1.25.0
# Trunk provides extensibility via plugins. (https://docs.trunk.io/plugins)
plugins:
sources:
- id: trunk
ref: v1.7.4
uri: https://github.com/trunk-io/plugins
# Many linters and tools depend on runtimes - configure them here. (https://docs.trunk.io/runtimes)
runtimes:
enabled:
- node@22.16.0
- python@3.10.8
# This is the section where you manage your linters. (https://docs.trunk.io/check/configuration)
lint:
enabled:
- git-diff-check
- hadolint@2.14.0
- markdownlint@0.47.0
- osv-scanner@2.3.1
- prettier@3.7.4
- trufflehog@3.92.4
actions:
disabled:
- trunk-announce
- trunk-check-pre-push
- trunk-fmt-pre-commit
enabled:
- trunk-upgrade-available

View file

@ -1,23 +1,78 @@
# Backend Dockerfile
FROM python:3.12-slim
# ================================
# Base
# ================================
FROM python:3.12-slim AS base
WORKDIR /app
# Copier et installer les dépendances
COPY requirements.txt .
RUN pip install --no-cache-dir --upgrade pip \
&& pip install --no-cache-dir -r requirements.txt
# Installer dépendances système si nécessaire
RUN apt-get update && apt-get install -y --no-install-recommends \
curl \
&& rm -rf /var/lib/apt/lists/*
COPY requirements.txt .
RUN pip install --no-cache-dir --upgrade pip
# ================================
# DEV
# ================================
FROM base AS dev
ENV PYTHONUNBUFFERED=1 \
PYTHONDONTWRITEBYTECODE=1 \
ENV=development
# Installer dépendances dev (si vous avez un requirements.dev.txt)
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
# Créer dossiers
RUN mkdir -p /app/data /app/logs && chmod -R 777 /app/data /app/logs
# Copier le reste du projet
COPY . .
# Créer dossier persistant pour SQLite avec bonnes permissions
RUN mkdir -p /app/data && chmod 777 /app/data
# Exposer le port
EXPOSE 8000
CMD ["uvicorn", "api:app", "--host", "0.0.0.0", "--port", "8000", "--reload"]
# Lancer l'API et initialiser la DB au démarrage
# CMD ["sh", "-c", "uvicorn api:app --host 0.0.0.0 --port 8000"]
# ================================
# STAGING
# ================================
FROM base AS staging
ENV PYTHONUNBUFFERED=1 \
PYTHONDONTWRITEBYTECODE=1 \
ENV=staging
CMD ["sh", "-c", "python init_db.py && uvicorn api:app --host 0.0.0.0 --port 8000"]
RUN pip install --no-cache-dir -r requirements.txt
RUN mkdir -p /app/data /app/logs && chmod -R 755 /app/data /app/logs
COPY . .
# Initialiser la DB au build
RUN python init_db.py || true
EXPOSE 8002
CMD ["uvicorn", "api:app", "--host", "0.0.0.0", "--port", "8002", "--log-level", "info"]
# ================================
# PROD
# ================================
FROM base AS prod
ENV PYTHONUNBUFFERED=1 \
PYTHONDONTWRITEBYTECODE=1 \
ENV=production
RUN pip install --no-cache-dir -r requirements.txt
# Créer utilisateur non-root pour la sécurité
RUN useradd -m -u 1000 appuser && \
mkdir -p /app/data /app/logs && \
chown -R appuser:appuser /app
COPY --chown=appuser:appuser . .
# Initialiser la DB au build
RUN python init_db.py || true
USER appuser
EXPOSE 8004
CMD ["uvicorn", "api:app", "--host", "0.0.0.0", "--port", "8004", "--workers", "4"]

1641
api.py

File diff suppressed because it is too large Load diff

View file

@ -7,7 +7,6 @@ class Settings(BaseSettings):
env_file=".env", env_file_encoding="utf-8", case_sensitive=False, extra="ignore"
)
# === JWT & Auth ===
jwt_secret: str
jwt_algorithm: str
access_token_expire_minutes: int
@ -21,15 +20,12 @@ class Settings(BaseSettings):
SAGE_TYPE_BON_AVOIR: int = 50
SAGE_TYPE_FACTURE: int = 60
# === Sage Gateway (Windows) ===
sage_gateway_url: str
sage_gateway_token: str
frontend_url: str
# === Base de données ===
database_url: str = "sqlite+aiosqlite:///./data/sage_dataven.db"
# === SMTP ===
smtp_host: str
smtp_port: int = 587
smtp_user: str
@ -37,21 +33,17 @@ class Settings(BaseSettings):
smtp_from: str
smtp_use_tls: bool = True
# === Universign ===
universign_api_key: str
universign_api_url: str
# === API ===
api_host: str
api_port: int
api_reload: bool = False
# === Email Queue ===
max_email_workers: int = 3
max_retry_attempts: int = 3
retry_delay_seconds: int = 3
# === CORS ===
cors_origins: List[str] = ["*"]

125
config/cors_config.py Normal file
View file

@ -0,0 +1,125 @@
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from typing import List
import os
import logging
logger = logging.getLogger(__name__)
def configure_cors_open(app: FastAPI):
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=False,
allow_methods=["GET", "POST", "PUT", "DELETE", "PATCH", "OPTIONS"],
allow_headers=["*"],
expose_headers=["X-RateLimit-Limit", "X-RateLimit-Remaining"],
max_age=3600,
)
logger.info(" CORS configuré: Mode OUVERT (sécurisé par API Keys)")
logger.info(" - Origins: * (toutes)")
logger.info(" - Headers: * (dont X-API-Key)")
logger.info(" - Credentials: False")
def configure_cors_whitelist(app: FastAPI):
allowed_origins_str = os.getenv("CORS_ALLOWED_ORIGINS", "")
if allowed_origins_str:
allowed_origins = [
origin.strip()
for origin in allowed_origins_str.split(",")
if origin.strip()
]
else:
allowed_origins = ["*"]
app.add_middleware(
CORSMiddleware,
allow_origins=allowed_origins,
allow_credentials=True,
allow_methods=["GET", "POST", "PUT", "DELETE", "PATCH", "OPTIONS"],
allow_headers=["Content-Type", "Authorization", "X-API-Key"],
expose_headers=["X-RateLimit-Limit", "X-RateLimit-Remaining"],
max_age=3600,
)
logger.info(" CORS configuré: Mode WHITELIST")
logger.info(f" - Origins autorisées: {len(allowed_origins)}")
for origin in allowed_origins:
logger.info(f"{origin}")
def configure_cors_regex(app: FastAPI):
origin_regex = r"*"
app.add_middleware(
CORSMiddleware,
allow_origin_regex=origin_regex,
allow_credentials=True,
allow_methods=["GET", "POST", "PUT", "DELETE", "PATCH", "OPTIONS"],
allow_headers=["Content-Type", "Authorization", "X-API-Key"],
expose_headers=["X-RateLimit-Limit", "X-RateLimit-Remaining"],
max_age=3600,
)
logger.info(" CORS configuré: Mode REGEX")
logger.info(f" - Pattern: {origin_regex}")
def configure_cors_hybrid(app: FastAPI):
from starlette.middleware.base import BaseHTTPMiddleware
class HybridCORSMiddleware(BaseHTTPMiddleware):
def __init__(self, app, known_origins: List[str]):
super().__init__(app)
self.known_origins = set(known_origins)
async def dispatch(self, request, call_next):
origin = request.headers.get("origin")
if origin in self.known_origins:
response = await call_next(request)
response.headers["Access-Control-Allow-Origin"] = origin
response.headers["Access-Control-Allow-Credentials"] = "true"
response.headers["Access-Control-Allow-Methods"] = (
"GET, POST, PUT, DELETE, PATCH, OPTIONS"
)
response.headers["Access-Control-Allow-Headers"] = (
"Content-Type, Authorization, X-API-Key"
)
return response
response = await call_next(request)
response.headers["Access-Control-Allow-Origin"] = "*"
response.headers["Access-Control-Allow-Methods"] = (
"GET, POST, PUT, DELETE, PATCH, OPTIONS"
)
response.headers["Access-Control-Allow-Headers"] = "*"
return response
known_origins = ["*"]
app.add_middleware(HybridCORSMiddleware, known_origins=known_origins)
logger.info(" CORS configuré: Mode HYBRIDE")
logger.info(f" - Whitelist: {len(known_origins)} domaines")
logger.info(" - Fallback: * (ouvert)")
def setup_cors(app: FastAPI, mode: str = "open"):
if mode == "open":
configure_cors_open(app)
elif mode == "whitelist":
configure_cors_whitelist(app)
elif mode == "regex":
configure_cors_regex(app)
elif mode == "hybrid":
configure_cors_hybrid(app)
else:
logger.warning(
f" Mode CORS inconnu: {mode}. Utilisation de 'open' par défaut."
)
configure_cors_open(app)

View file

@ -1,48 +1,75 @@
from fastapi import Depends, HTTPException, status
from fastapi import Depends, HTTPException, status, Request
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select
from typing import Optional
from jwt.exceptions import InvalidTokenError
from database import get_session, User
from security.auth import decode_token
from typing import Optional
from datetime import datetime
security = HTTPBearer()
security = HTTPBearer(auto_error=False)
async def get_current_user(
credentials: HTTPAuthorizationCredentials = Depends(security),
async def get_current_user_hybrid(
request: Request,
credentials: Optional[HTTPAuthorizationCredentials] = Depends(security),
session: AsyncSession = Depends(get_session),
) -> User:
api_key_obj = getattr(request.state, "api_key", None)
if api_key_obj:
if api_key_obj.user_id:
result = await session.execute(
select(User).where(User.id == api_key_obj.user_id)
)
user = result.scalar_one_or_none()
if user:
user._is_api_key_user = True
user._api_key_obj = api_key_obj
return user
virtual_user = User(
id=f"api_key_{api_key_obj.id}",
email=f"api_key_{api_key_obj.id}@virtual.local",
nom=api_key_obj.name,
prenom="API",
hashed_password="",
role="api_client",
is_active=True,
is_verified=True,
)
virtual_user._is_api_key_user = True
virtual_user._api_key_obj = api_key_obj
return virtual_user
if not credentials:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Authentification requise (JWT ou API Key)",
headers={"WWW-Authenticate": "Bearer"},
)
token = credentials.credentials
try:
payload = decode_token(token)
if not payload:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Token invalide ou expiré",
headers={"WWW-Authenticate": "Bearer"},
)
if payload.get("type") != "access":
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Type de token incorrect",
headers={"WWW-Authenticate": "Bearer"},
)
user_id: str = payload.get("sub")
if not user_id:
if user_id is None:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Token malformé",
detail="Token invalide: user_id manquant",
headers={"WWW-Authenticate": "Bearer"},
)
result = await session.execute(select(User).where(User.id == user_id))
user = result.scalar_one_or_none()
if not user:
if user is None:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Utilisateur introuvable",
@ -50,45 +77,42 @@ async def get_current_user(
)
if not user.is_active:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN, detail="Compte désactivé"
)
if not user.is_verified:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="Email non vérifié. Consultez votre boîte de réception.",
)
if user.locked_until and user.locked_until > datetime.now():
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="Compte temporairement verrouillé suite à trop de tentatives échouées",
detail="Utilisateur inactif",
)
return user
async def get_current_user_optional(
credentials: Optional[HTTPAuthorizationCredentials] = Depends(security),
session: AsyncSession = Depends(get_session),
) -> Optional[User]:
if not credentials:
return None
try:
return await get_current_user(credentials, session)
except HTTPException:
return None
except InvalidTokenError as e:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=f"Token invalide: {str(e)}",
headers={"WWW-Authenticate": "Bearer"},
)
def require_role(*allowed_roles: str):
async def role_checker(user: User = Depends(get_current_user)) -> User:
def require_role_hybrid(*allowed_roles: str):
async def role_checker(user: User = Depends(get_current_user_hybrid)) -> User:
if user.role not in allowed_roles:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=f"Accès refusé. Rôles requis: {', '.join(allowed_roles)}",
detail=f"Accès interdit. Rôles autorisés: {', '.join(allowed_roles)}",
)
return user
return role_checker
def is_api_key_user(user: User) -> bool:
"""Vérifie si l'utilisateur est authentifié via API Key"""
return getattr(user, "_is_api_key_user", False)
def get_api_key_from_user(user: User):
"""Récupère l'objet ApiKey depuis un utilisateur (si applicable)"""
return getattr(user, "_api_key_obj", None)
get_current_user = get_current_user_hybrid
require_role = require_role_hybrid

View file

@ -19,7 +19,6 @@ async def create_admin():
print(" Création d'un compte administrateur")
print("=" * 60 + "\n")
# Saisie des informations
email = input("Email de l'admin: ").strip().lower()
if not email or "@" not in email:
print(" Email invalide")
@ -32,7 +31,6 @@ async def create_admin():
print(" Prénom et nom requis")
return False
# Mot de passe avec validation
while True:
password = input(
"Mot de passe (min 8 car., 1 maj, 1 min, 1 chiffre, 1 spécial): "
@ -58,7 +56,6 @@ async def create_admin():
print(f"\n Un utilisateur avec l'email {email} existe déjà")
return False
# Créer l'admin
admin = User(
id=str(uuid.uuid4()),
email=email,

View file

@ -152,7 +152,7 @@ templates_signature_email = {
</table>
<p style="color: #718096; font-size: 13px; line-height: 1.5; margin: 0;">
<strong>🔒 Signature électronique sécurisée</strong><br>
<strong> Signature électronique sécurisée</strong><br>
Votre signature est protégée par notre partenaire de confiance <strong>Universign</strong>,
certifié eIDAS et conforme au RGPD. Votre identité sera vérifiée et le document sera
horodaté de manière infalsifiable.

View file

@ -1,20 +1,49 @@
import os
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, async_sessionmaker
from sqlalchemy.pool import NullPool
from sqlalchemy import event, text
import logging
from config.config import settings
from database.models.generic_model import Base
logger = logging.getLogger(__name__)
DATABASE_URL = os.getenv("DATABASE_URL", "sqlite+aiosqlite:///./data/sage_dataven.db")
DATABASE_URL = settings.database_url
def _configure_sqlite_connection(dbapi_connection, connection_record):
cursor = dbapi_connection.cursor()
cursor.execute("PRAGMA journal_mode=WAL")
cursor.execute("PRAGMA busy_timeout=30000")
cursor.execute("PRAGMA synchronous=NORMAL")
cursor.execute("PRAGMA cache_size=-64000") # 64MB
cursor.execute("PRAGMA foreign_keys=ON")
cursor.execute("PRAGMA locking_mode=NORMAL")
cursor.close()
logger.debug("SQLite configuré avec WAL mode et busy_timeout=30s")
engine_kwargs = {
"echo": False,
"future": True,
"poolclass": NullPool,
}
if DATABASE_URL and "sqlite" in DATABASE_URL:
engine_kwargs["connect_args"] = {
"check_same_thread": False,
"timeout": 30,
}
engine = create_async_engine(DATABASE_URL, **engine_kwargs)
if DATABASE_URL and "sqlite" in DATABASE_URL:
@event.listens_for(engine.sync_engine, "connect")
def set_sqlite_pragma(dbapi_connection, connection_record):
_configure_sqlite_connection(dbapi_connection, connection_record)
engine = create_async_engine(
DATABASE_URL,
echo=False,
future=True,
poolclass=NullPool,
)
async_session_factory = async_sessionmaker(
engine,
@ -30,6 +59,12 @@ async def init_db():
logger.info("Tentative de connexion")
async with engine.begin() as conn:
logger.info("Connexion etablie")
if DATABASE_URL and "sqlite" in DATABASE_URL:
result = await conn.execute(text("PRAGMA journal_mode"))
journal_mode = result.scalar()
logger.info(f"SQLite journal_mode: {journal_mode}")
await conn.run_sync(Base.metadata.create_all)
logger.info("create_all execute")
@ -49,3 +84,57 @@ async def get_session() -> AsyncSession:
async def close_db():
await engine.dispose()
logger.info("Connexions DB fermées")
async def execute_with_sqlite_retry(
session: AsyncSession, statement, max_retries: int = 5, base_delay: float = 0.1
):
import asyncio
from sqlalchemy.exc import OperationalError
last_error = None
for attempt in range(max_retries):
try:
result = await session.execute(statement)
return result
except OperationalError as e:
last_error = e
if "database is locked" in str(e).lower():
delay = base_delay * (2**attempt)
logger.warning(
f"SQLite locked, tentative {attempt + 1}/{max_retries}, "
f"retry dans {delay:.2f}s"
)
await asyncio.sleep(delay)
else:
raise
raise last_error
async def commit_with_retry(
session: AsyncSession, max_retries: int = 5, base_delay: float = 0.1
):
import asyncio
from sqlalchemy.exc import OperationalError
last_error = None
for attempt in range(max_retries):
try:
await session.commit()
return
except OperationalError as e:
last_error = e
if "database is locked" in str(e).lower():
delay = base_delay * (2**attempt)
logger.warning(
f"SQLite locked lors du commit, tentative {attempt + 1}/{max_retries}, "
f"retry dans {delay:.2f}s"
)
await asyncio.sleep(delay)
else:
raise
raise last_error

18
database/enum/status.py Normal file
View file

@ -0,0 +1,18 @@
import enum
class StatutEmail(str, enum.Enum):
EN_ATTENTE = "EN_ATTENTE"
EN_COURS = "EN_COURS"
ENVOYE = "ENVOYE"
OUVERT = "OUVERT"
ERREUR = "ERREUR"
BOUNCE = "BOUNCE"
class StatutSignature(str, enum.Enum):
EN_ATTENTE = "EN_ATTENTE"
ENVOYE = "ENVOYE"
SIGNE = "SIGNE"
REFUSE = "REFUSE"
EXPIRE = "EXPIRE"

View file

@ -0,0 +1,73 @@
from sqlalchemy import Column, String, Boolean, DateTime, Integer, Text
from typing import Optional, List
import json
from datetime import datetime
import uuid
from database.models.generic_model import Base
class ApiKey(Base):
"""Modèle pour les clés API publiques"""
__tablename__ = "api_keys"
id = Column(String(36), primary_key=True, default=lambda: str(uuid.uuid4()))
key_hash = Column(String(64), unique=True, nullable=False, index=True)
key_prefix = Column(String(10), nullable=False)
name = Column(String(255), nullable=False)
description = Column(Text, nullable=True)
user_id = Column(String(36), nullable=True)
created_by = Column(String(255), nullable=False)
is_active = Column(Boolean, default=True, nullable=False)
rate_limit_per_minute = Column(Integer, default=60, nullable=False)
allowed_endpoints = Column(Text, nullable=True)
total_requests = Column(Integer, default=0, nullable=False)
last_used_at = Column(DateTime, nullable=True)
created_at = Column(DateTime, default=datetime.now, nullable=False)
expires_at = Column(DateTime, nullable=True)
revoked_at = Column(DateTime, nullable=True)
def __repr__(self):
return f"<ApiKey(name='{self.name}', prefix='{self.key_prefix}', active={self.is_active})>"
class SwaggerUser(Base):
"""Modèle pour les utilisateurs autorisés à accéder au Swagger"""
__tablename__ = "swagger_users"
id = Column(String(36), primary_key=True, default=lambda: str(uuid.uuid4()))
username = Column(String(100), unique=True, nullable=False, index=True)
hashed_password = Column(String(255), nullable=False)
full_name = Column(String(255), nullable=True)
email = Column(String(255), nullable=True)
is_active = Column(Boolean, default=True, nullable=False)
allowed_tags = Column(Text, nullable=True)
created_at = Column(DateTime, default=datetime.now, nullable=False)
last_login = Column(DateTime, nullable=True)
@property
def allowed_tags_list(self) -> Optional[List[str]]:
if self.allowed_tags:
try:
return json.loads(self.allowed_tags)
except json.JSONDecodeError:
return None
return None
@allowed_tags_list.setter
def allowed_tags_list(self, tags: Optional[List[str]]):
self.allowed_tags = json.dumps(tags) if tags is not None else None
def __repr__(self):
return f"<SwaggerUser(username='{self.username}', active={self.is_active})>"

View file

@ -61,8 +61,7 @@ class SageDocumentType(int, Enum):
class UniversignTransaction(Base):
__tablename__ = "universign_transactions"
# === IDENTIFIANTS ===
id = Column(String(36), primary_key=True) # UUID local
id = Column(String(36), primary_key=True)
transaction_id = Column(
String(255),
unique=True,
@ -71,7 +70,6 @@ class UniversignTransaction(Base):
comment="ID Universign (ex: tr_abc123)",
)
# === LIEN AVEC LE DOCUMENT SAGE ===
sage_document_id = Column(
String(50),
nullable=False,
@ -82,7 +80,6 @@ class UniversignTransaction(Base):
SQLEnum(SageDocumentType), nullable=False, comment="Type de document Sage"
)
# === STATUTS UNIVERSIGN (SOURCE DE VÉRITÉ) ===
universign_status = Column(
SQLEnum(UniversignTransactionStatus),
nullable=False,
@ -94,7 +91,6 @@ class UniversignTransaction(Base):
DateTime, nullable=True, comment="Dernière MAJ du statut Universign"
)
# === STATUT LOCAL (DÉDUIT) ===
local_status = Column(
SQLEnum(LocalDocumentStatus),
nullable=False,
@ -103,22 +99,35 @@ class UniversignTransaction(Base):
comment="Statut métier simplifié pour l'UI",
)
# === URLS ET MÉTADONNÉES UNIVERSIGN ===
signer_url = Column(Text, nullable=True, comment="URL de signature")
document_url = Column(Text, nullable=True, comment="URL du document signé")
signed_document_path = Column(
Text, nullable=True, comment="Chemin local du PDF signé"
)
signed_document_downloaded_at = Column(
DateTime, nullable=True, comment="Date de téléchargement du document"
)
signed_document_size_bytes = Column(
Integer, nullable=True, comment="Taille du fichier en octets"
)
download_attempts = Column(
Integer, default=0, comment="Nombre de tentatives de téléchargement"
)
download_error = Column(
Text, nullable=True, comment="Dernière erreur de téléchargement"
)
certificate_url = Column(Text, nullable=True, comment="URL du certificat")
# === SIGNATAIRES ===
signers_data = Column(
Text, nullable=True, comment="JSON des signataires (snapshot)"
)
# === INFORMATIONS MÉTIER ===
requester_email = Column(String(255), nullable=True)
requester_name = Column(String(255), nullable=True)
document_name = Column(String(500), nullable=True)
# === DATES CLÉS ===
created_at = Column(
DateTime,
default=datetime.now,
@ -133,14 +142,12 @@ class UniversignTransaction(Base):
expired_at = Column(DateTime, nullable=True)
canceled_at = Column(DateTime, nullable=True)
# === SYNCHRONISATION ===
last_synced_at = Column(
DateTime, nullable=True, comment="Dernière sync réussie avec Universign"
)
sync_attempts = Column(Integer, default=0, comment="Nombre de tentatives de sync")
sync_error = Column(Text, nullable=True)
# === FLAGS ===
is_test = Column(
Boolean, default=False, comment="Transaction en environnement .alpha"
)
@ -149,7 +156,6 @@ class UniversignTransaction(Base):
)
webhook_received = Column(Boolean, default=False, comment="Webhook Universign reçu")
# === RELATION ===
signers = relationship(
"UniversignSigner", back_populates="transaction", cascade="all, delete-orphan"
)
@ -157,7 +163,6 @@ class UniversignTransaction(Base):
"UniversignSyncLog", back_populates="transaction", cascade="all, delete-orphan"
)
# === INDEXES COMPOSITES ===
__table_args__ = (
Index("idx_sage_doc", "sage_document_id", "sage_document_type"),
Index("idx_sync_status", "needs_sync", "universign_status"),
@ -173,10 +178,6 @@ class UniversignTransaction(Base):
class UniversignSigner(Base):
"""
Détail de chaque signataire d'une transaction
"""
__tablename__ = "universign_signers"
id = Column(String(36), primary_key=True)
@ -187,33 +188,27 @@ class UniversignSigner(Base):
index=True,
)
# === DONNÉES SIGNATAIRE ===
email = Column(String(255), nullable=False, index=True)
name = Column(String(255), nullable=True)
phone = Column(String(50), nullable=True)
# === STATUT ===
status = Column(
SQLEnum(UniversignSignerStatus),
default=UniversignSignerStatus.WAITING,
nullable=False,
)
# === ACTIONS ===
viewed_at = Column(DateTime, nullable=True)
signed_at = Column(DateTime, nullable=True)
refused_at = Column(DateTime, nullable=True)
refusal_reason = Column(Text, nullable=True)
# === MÉTADONNÉES ===
ip_address = Column(String(45), nullable=True)
user_agent = Column(Text, nullable=True)
signature_method = Column(String(50), nullable=True)
# === ORDRE ===
order_index = Column(Integer, default=0)
# === RELATION ===
transaction = relationship("UniversignTransaction", back_populates="signers")
def __repr__(self):
@ -221,10 +216,6 @@ class UniversignSigner(Base):
class UniversignSyncLog(Base):
"""
Journal de toutes les synchronisations (audit trail)
"""
__tablename__ = "universign_sync_logs"
id = Column(Integer, primary_key=True, autoincrement=True)
@ -235,22 +226,18 @@ class UniversignSyncLog(Base):
index=True,
)
# === SYNC INFO ===
sync_type = Column(String(50), nullable=False, comment="webhook, polling, manual")
sync_timestamp = Column(DateTime, default=datetime.now, nullable=False, index=True)
# === CHANGEMENTS DÉTECTÉS ===
previous_status = Column(String(50), nullable=True)
new_status = Column(String(50), nullable=True)
changes_detected = Column(Text, nullable=True, comment="JSON des changements")
# === RÉSULTAT ===
success = Column(Boolean, default=True)
error_message = Column(Text, nullable=True)
http_status_code = Column(Integer, nullable=True)
response_time_ms = Column(Integer, nullable=True)
# === RELATION ===
transaction = relationship("UniversignTransaction", back_populates="sync_logs")
def __repr__(self):
@ -268,9 +255,8 @@ class UniversignConfig(Base):
)
api_url = Column(String(500), nullable=False)
api_key = Column(String(500), nullable=False, comment="⚠️ À chiffrer")
api_key = Column(String(500), nullable=False, comment="À chiffrer")
# === OPTIONS ===
webhook_url = Column(String(500), nullable=True)
webhook_secret = Column(String(255), nullable=True)

24
docker-compose.dev.yml Normal file
View file

@ -0,0 +1,24 @@
services:
backend:
container_name: dev-sage-api
build:
context: .
target: dev
env_file: .env
volumes:
- .:/app
- /app/__pycache__
- ./data:/app/data
- ./logs:/app/logs
ports:
- "8000:8000"
environment:
ENV: development
DEBUG: "true"
DATABASE_URL: "sqlite+aiosqlite:///./data/sage_dataven.db"
restart: unless-stopped
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8000/"]
interval: 30s
timeout: 10s
retries: 3

23
docker-compose.prod.yml Normal file
View file

@ -0,0 +1,23 @@
services:
backend:
container_name: prod_sage_api
build:
context: .
target: prod
env_file: .env.production
volumes:
- ./data:/app/data
- ./logs:/app/logs
ports:
- "8004:8004"
environment:
ENV: production
DEBUG: "false"
DATABASE_URL: "sqlite+aiosqlite:///./data/sage_prod.db"
restart: always
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8004/"]
interval: 30s
timeout: 10s
retries: 5
start_period: 40s

View file

@ -0,0 +1,22 @@
services:
backend:
container_name: staging_sage_api
build:
context: .
target: staging
env_file: .env.staging
volumes:
- ./data:/app/data
- ./logs:/app/logs
ports:
- "8002:8002"
environment:
ENV: staging
DEBUG: "false"
DATABASE_URL: "sqlite+aiosqlite:///./data/sage_staging.db"
restart: unless-stopped
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8002/"]
interval: 30s
timeout: 10s
retries: 3

View file

@ -1,11 +1,4 @@
services:
vps-sage-api:
build: .
container_name: vps-sage-api
env_file: .env
volumes:
- ./data:/app/data
- ./logs:/app/logs
ports:
- "8000:8000"
restart: unless-stopped
backend:
build:
context: .

File diff suppressed because it is too large Load diff

View file

@ -14,33 +14,14 @@ logger = logging.getLogger(__name__)
async def main():
print("\n" + "=" * 60)
print("Initialisation de la base de données délocalisée")
print("=" * 60 + "\n")
try:
logger.info("Debut de l'initialisation")
await init_db()
logger.info("Initialisation terminee")
print("\nInitialisation terminee")
print("\nBase de données créée avec succès !")
print("Fichier: sage_dataven.db")
print("\nTables créées:")
print(" |- email_logs (Journalisation emails)")
print(" |- signature_logs (Suivi signatures Universign)")
print(" |- workflow_logs (Transformations documents)")
print(" |- cache_metadata (Métadonnées cache)")
print(" |- audit_logs (Journal d'audit)")
print("\nProchaines étapes:")
print(" 1. Configurer le fichier .env avec les credentials")
print(" 2. Lancer la gateway Windows sur la machine Sage")
print(" 3. Lancer l'API VPS: uvicorn api:app --host 0.0.0.0 --port 8000")
print(" 4. Ou avec Docker : docker-compose up -d")
print(" 5. Tester: http://IP_DU_VPS:8000/docs")
print("\n" + "=" * 60 + "\n")
return True
except Exception as e:

295
middleware/security.py Normal file
View file

@ -0,0 +1,295 @@
from fastapi import Request, status
from fastapi.responses import JSONResponse
from fastapi.security import HTTPBasic, HTTPBasicCredentials
from starlette.middleware.base import BaseHTTPMiddleware
from starlette.types import ASGIApp, Receive, Send
from sqlalchemy import select
from typing import Callable, Optional
from datetime import datetime
import logging
import base64
import json
logger = logging.getLogger(__name__)
security = HTTPBasic()
class SwaggerAuthMiddleware:
PROTECTED_PATHS = ["/docs", "/redoc", "/openapi.json"]
def __init__(self, app: ASGIApp):
self.app = app
async def __call__(self, scope, receive: Receive, send: Send):
if scope["type"] != "http":
await self.app(scope, receive, send)
return
request = Request(scope, receive=receive)
path = request.url.path
if not any(path.startswith(p) for p in self.PROTECTED_PATHS):
await self.app(scope, receive, send)
return
auth_header = request.headers.get("Authorization")
if not auth_header or not auth_header.startswith("Basic "):
response = JSONResponse(
status_code=status.HTTP_401_UNAUTHORIZED,
content={"detail": "Authentification requise pour la documentation"},
headers={"WWW-Authenticate": 'Basic realm="Swagger UI"'},
)
await response(scope, receive, send)
return
try:
encoded_credentials = auth_header.split(" ")[1]
decoded_credentials = base64.b64decode(encoded_credentials).decode("utf-8")
username, password = decoded_credentials.split(":", 1)
credentials = HTTPBasicCredentials(username=username, password=password)
swagger_user = await self._verify_credentials(credentials)
if not swagger_user:
response = JSONResponse(
status_code=status.HTTP_401_UNAUTHORIZED,
content={"detail": "Identifiants invalides"},
headers={"WWW-Authenticate": 'Basic realm="Swagger UI"'},
)
await response(scope, receive, send)
return
if "state" not in scope:
scope["state"] = {}
scope["state"]["swagger_user"] = swagger_user
logger.info(
f"✓ Swagger auth: {swagger_user['username']} - tags: {swagger_user.get('allowed_tags', 'ALL')}"
)
except Exception as e:
logger.error(f" Erreur parsing auth header: {e}")
response = JSONResponse(
status_code=status.HTTP_401_UNAUTHORIZED,
content={"detail": "Format d'authentification invalide"},
headers={"WWW-Authenticate": 'Basic realm="Swagger UI"'},
)
await response(scope, receive, send)
return
await self.app(scope, receive, send)
async def _verify_credentials(
self, credentials: HTTPBasicCredentials
) -> Optional[dict]:
from database.db_config import async_session_factory
from database.models.api_key import SwaggerUser
from security.auth import verify_password
try:
async with async_session_factory() as session:
result = await session.execute(
select(SwaggerUser).where(
SwaggerUser.username == credentials.username
)
)
swagger_user = result.scalar_one_or_none()
if swagger_user and swagger_user.is_active:
if verify_password(
credentials.password, swagger_user.hashed_password
):
swagger_user.last_login = datetime.now()
await session.commit()
logger.info(f"✓ Accès Swagger autorisé: {credentials.username}")
return {
"id": swagger_user.id,
"username": swagger_user.username,
"allowed_tags": swagger_user.allowed_tags_list,
"is_active": swagger_user.is_active,
}
logger.warning(f"✗ Accès Swagger refusé: {credentials.username}")
return None
except Exception as e:
logger.error(f" Erreur vérification credentials: {e}", exc_info=True)
return None
class ApiKeyMiddlewareHTTP(BaseHTTPMiddleware):
EXCLUDED_PATHS = [
"/docs",
"/redoc",
"/openapi.json",
"/",
"/health",
"/auth",
"/api-keys/verify",
"/universign/webhook",
]
def _is_excluded_path(self, path: str) -> bool:
"""Vérifie si le chemin est exclu de l'authentification API Key"""
if path == "/":
return True
for excluded in self.EXCLUDED_PATHS:
if excluded == "/":
continue
if path == excluded or path.startswith(excluded + "/"):
return True
return False
async def dispatch(self, request: Request, call_next: Callable):
path = request.url.path
method = request.method
if self._is_excluded_path(path):
return await call_next(request)
auth_header = request.headers.get("Authorization")
api_key_header = request.headers.get("X-API-Key")
if api_key_header:
api_key_header = api_key_header.strip()
if not api_key_header or api_key_header == "":
api_key_header = None
if auth_header and auth_header.startswith("Bearer "):
token = auth_header.split(" ", 1)[1].strip()
if token.startswith("sdk_live_"):
logger.warning(
" API Key envoyée dans Authorization au lieu de X-API-Key"
)
return await self._handle_api_key_auth(
request, token, path, method, call_next
)
logger.debug(f"JWT détecté pour {method} {path} → délégation à FastAPI")
request.state.authenticated_via = "jwt"
return await call_next(request)
if api_key_header:
logger.debug(f" API Key détectée pour {method} {path}")
return await self._handle_api_key_auth(
request, api_key_header, path, method, call_next
)
logger.debug(f" Aucune auth pour {method} {path} → délégation à FastAPI")
return await call_next(request)
async def _handle_api_key_auth(
self,
request: Request,
api_key: str,
path: str,
method: str,
call_next: Callable,
):
try:
from database.db_config import async_session_factory
from services.api_key import ApiKeyService
async with async_session_factory() as session:
service = ApiKeyService(session)
api_key_obj = await service.verify_api_key(api_key)
if not api_key_obj:
logger.warning(f"🔒 Clé API invalide: {method} {path}")
return JSONResponse(
status_code=status.HTTP_401_UNAUTHORIZED,
content={
"detail": "Clé API invalide ou expirée",
"hint": "Vérifiez votre clé X-API-Key",
},
)
is_allowed, rate_info = await service.check_rate_limit(api_key_obj)
if not is_allowed:
logger.warning(f"⏱️ Rate limit: {api_key_obj.name}")
return JSONResponse(
status_code=status.HTTP_429_TOO_MANY_REQUESTS,
content={"detail": "Rate limit dépassé"},
headers={
"X-RateLimit-Limit": str(rate_info["limit"]),
"X-RateLimit-Remaining": "0",
},
)
has_access = await service.check_endpoint_access(api_key_obj, path)
if not has_access:
allowed = (
json.loads(api_key_obj.allowed_endpoints)
if api_key_obj.allowed_endpoints
else ["Tous"]
)
logger.warning(
f"🚫 ACCÈS REFUSÉ: {api_key_obj.name}\n"
f" Endpoint demandé: {path}\n"
f" Endpoints autorisés: {allowed}"
)
return JSONResponse(
status_code=status.HTTP_403_FORBIDDEN,
content={
"detail": "Accès non autorisé à cet endpoint",
"endpoint_requested": path,
"api_key_name": api_key_obj.name,
"allowed_endpoints": allowed,
"hint": "Cette clé API n'a pas accès à cet endpoint.",
},
)
request.state.api_key = api_key_obj
request.state.authenticated_via = "api_key"
logger.info(f" ACCÈS AUTORISÉ: {api_key_obj.name}{method} {path}")
return await call_next(request)
except Exception as e:
logger.error(f"💥 Erreur validation API Key: {e}", exc_info=True)
return JSONResponse(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
content={"detail": f"Erreur interne: {str(e)}"},
)
ApiKeyMiddleware = ApiKeyMiddlewareHTTP
def get_api_key_from_request(request: Request) -> Optional:
"""Récupère l'objet ApiKey depuis la requête si présent"""
return getattr(request.state, "api_key", None)
def get_auth_method(request: Request) -> str:
"""Retourne la méthode d'authentification utilisée"""
return getattr(request.state, "authenticated_via", "none")
def get_swagger_user_from_request(request: Request) -> Optional[dict]:
"""Récupère l'utilisateur Swagger depuis la requête"""
return getattr(request.state, "swagger_user", None)
__all__ = [
"SwaggerAuthMiddleware",
"ApiKeyMiddlewareHTTP",
"ApiKeyMiddleware",
"get_api_key_from_request",
"get_auth_method",
"get_swagger_user_from_request",
]

154
routes/api_keys.py Normal file
View file

@ -0,0 +1,154 @@
from fastapi import APIRouter, Depends, HTTPException, status, Query
from sqlalchemy.ext.asyncio import AsyncSession
import logging
from database import get_session, User
from core.dependencies import get_current_user, require_role
from services.api_key import ApiKeyService, api_key_to_response
from schemas.api_key import (
ApiKeyCreate,
ApiKeyCreatedResponse,
ApiKeyResponse,
ApiKeyList,
)
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/api-keys", tags=["API Keys Management"])
@router.post(
"",
response_model=ApiKeyCreatedResponse,
status_code=status.HTTP_201_CREATED,
dependencies=[Depends(require_role("admin", "super_admin"))],
)
async def create_api_key(
data: ApiKeyCreate,
session: AsyncSession = Depends(get_session),
user: User = Depends(get_current_user),
):
service = ApiKeyService(session)
api_key_obj, api_key_plain = await service.create_api_key(
name=data.name,
description=data.description,
created_by=user.email,
user_id=user.id,
expires_in_days=data.expires_in_days,
rate_limit_per_minute=data.rate_limit_per_minute,
allowed_endpoints=data.allowed_endpoints,
)
logger.info(f" Clé API créée par {user.email}: {data.name}")
response_data = api_key_to_response(api_key_obj)
response_data["api_key"] = api_key_plain
return ApiKeyCreatedResponse(**response_data)
@router.get("", response_model=ApiKeyList)
async def list_api_keys(
include_revoked: bool = Query(False, description="Inclure les clés révoquées"),
session: AsyncSession = Depends(get_session),
user: User = Depends(get_current_user),
):
service = ApiKeyService(session)
user_id = None if user.role in ["admin", "super_admin"] else user.id
keys = await service.list_api_keys(include_revoked=include_revoked, user_id=user_id)
items = [ApiKeyResponse(**api_key_to_response(k)) for k in keys]
return ApiKeyList(total=len(items), items=items)
@router.get("/{key_id}", response_model=ApiKeyResponse)
async def get_api_key(
key_id: str,
session: AsyncSession = Depends(get_session),
user: User = Depends(get_current_user),
):
"""Récupérer une clé API par son ID"""
service = ApiKeyService(session)
api_key_obj = await service.get_by_id(key_id)
if not api_key_obj:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Clé API {key_id} introuvable",
)
if user.role not in ["admin", "super_admin"]:
if api_key_obj.user_id != user.id:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="Accès refusé à cette clé",
)
return ApiKeyResponse(**api_key_to_response(api_key_obj))
@router.delete("/{key_id}", status_code=status.HTTP_200_OK)
async def revoke_api_key(
key_id: str,
session: AsyncSession = Depends(get_session),
user: User = Depends(get_current_user),
):
service = ApiKeyService(session)
api_key_obj = await service.get_by_id(key_id)
if not api_key_obj:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Clé API {key_id} introuvable",
)
if user.role not in ["admin", "super_admin"]:
if api_key_obj.user_id != user.id:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="Accès refusé à cette clé",
)
success = await service.revoke_api_key(key_id)
if not success:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Erreur lors de la révocation",
)
logger.info(f" Clé API révoquée par {user.email}: {api_key_obj.name}")
return {
"success": True,
"message": f"Clé API '{api_key_obj.name}' révoquée avec succès",
}
@router.post("/verify", status_code=status.HTTP_200_OK)
async def verify_api_key_endpoint(
api_key: str = Query(..., description="Clé API à vérifier"),
session: AsyncSession = Depends(get_session),
):
service = ApiKeyService(session)
api_key_obj = await service.verify_api_key(api_key)
if not api_key_obj:
return {
"valid": False,
"message": "Clé API invalide, expirée ou révoquée",
}
return {
"valid": True,
"message": "Clé API valide",
"key_name": api_key_obj.name,
"rate_limit": api_key_obj.rate_limit_per_minute,
"expires_at": api_key_obj.expires_at,
}

View file

@ -7,6 +7,7 @@ from typing import Optional
import uuid
from database import get_session, User, RefreshToken, LoginAttempt
from core.dependencies import get_current_user
from security.auth import (
hash_password,
verify_password,
@ -19,7 +20,6 @@ from security.auth import (
hash_token,
)
from services.email_service import AuthEmailService
from core.dependencies import get_current_user
from config.config import settings
import logging
@ -101,7 +101,7 @@ async def check_rate_limit(
)
failed_attempts = result.scalars().all()
if len(failed_attempts) >= 5:
if len(failed_attempts) >= 15:
return False, "Trop de tentatives échouées. Réessayez dans 15 minutes."
return True, ""
@ -286,7 +286,7 @@ async def login(
if user:
user.failed_login_attempts += 1
if user.failed_login_attempts >= 5:
if user.failed_login_attempts >= 15:
user.locked_until = datetime.now() + timedelta(minutes=15)
await session.commit()
raise HTTPException(
@ -510,7 +510,7 @@ async def logout(
token_record.revoked_at = datetime.now()
await session.commit()
logger.info(f"👋 Déconnexion: {user.email}")
logger.info(f" Déconnexion: {user.email}")
return {"success": True, "message": "Déconnexion réussie"}

158
routes/enterprise.py Normal file
View file

@ -0,0 +1,158 @@
from fastapi import APIRouter, HTTPException, Query, Path
import httpx
import logging
from datetime import datetime
from schemas import EntrepriseSearch, EntrepriseSearchResponse
from utils.enterprise import (
calculer_tva_intracommunautaire,
mapper_resultat_api,
rechercher_entreprise_api,
)
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/entreprises", tags=["Entreprises"])
@router.get("/search", response_model=EntrepriseSearchResponse)
async def rechercher_entreprise(
q: str = Query(..., min_length=2, description="Nom d'entreprise, SIREN ou SIRET"),
per_page: int = Query(5, ge=1, le=25, description="Nombre de résultats (max 25)"),
):
try:
logger.info(f" Recherche entreprise: '{q}'")
api_response = await rechercher_entreprise_api(q, per_page)
resultats_api = api_response.get("results", [])
if not resultats_api:
logger.info(f"Aucun résultat pour: {q}")
return EntrepriseSearchResponse(total_results=0, results=[], query=q)
entreprises = []
for data in resultats_api:
entreprise = mapper_resultat_api(data)
if entreprise:
entreprises.append(entreprise)
logger.info(f" {len(entreprises)} résultat(s) trouvé(s)")
return EntrepriseSearchResponse(
total_results=len(entreprises), results=entreprises, query=q
)
except HTTPException:
raise
except Exception as e:
logger.error(f"Erreur recherche entreprise: {e}", exc_info=True)
raise HTTPException(
status_code=500, detail=f"Erreur lors de la recherche: {str(e)}"
)
@router.get("/siren/{siren}", response_model=EntrepriseSearch)
async def lire_entreprise_par_siren(
siren: str = Path(
...,
min_length=9,
max_length=9,
pattern=r"^\d{9}$",
description="Numéro SIREN (9 chiffres)",
),
):
try:
logger.info(f"Lecture entreprise SIREN: {siren}")
api_response = await rechercher_entreprise_api(siren, per_page=1)
resultats = api_response.get("results", [])
if not resultats:
raise HTTPException(
status_code=404,
detail=f"Aucune entreprise trouvée pour le SIREN {siren}",
)
entreprise_data = resultats[0]
if entreprise_data.get("siren") != siren:
raise HTTPException(status_code=404, detail=f"SIREN {siren} introuvable")
entreprise = mapper_resultat_api(entreprise_data)
if not entreprise:
raise HTTPException(
status_code=500,
detail="Erreur lors du traitement des données entreprise",
)
if not entreprise.is_active:
logger.warning(f" Entreprise CESSÉE: {siren}")
return entreprise
except HTTPException:
raise
except Exception as e:
logger.error(f"Erreur lecture SIREN {siren}: {e}", exc_info=True)
raise HTTPException(
status_code=500, detail=f"Erreur lors de la récupération: {str(e)}"
)
@router.get("/tva/{siren}")
async def calculer_tva(
siren: str = Path(
...,
min_length=9,
max_length=9,
pattern=r"^\d{9}$",
description="Numéro SIREN (9 chiffres)",
),
):
tva_number = calculer_tva_intracommunautaire(siren)
if not tva_number:
raise HTTPException(status_code=400, detail=f"SIREN invalide: {siren}")
return {
"siren": siren,
"vat_number": tva_number,
"format": "FR + Clé (2 chiffres) + SIREN (9 chiffres)",
}
@router.get("/health")
async def health_check_api_sirene():
try:
async with httpx.AsyncClient(timeout=5.0) as client:
response = await client.get(
"https://recherche-entreprises.api.gouv.fr/search",
params={"q": "test", "per_page": 1},
)
if response.status_code == 200:
return {
"status": "healthy",
"api_sirene": "disponible",
"response_time_ms": response.elapsed.total_seconds() * 1000,
"timestamp": datetime.now().isoformat(),
}
else:
return {
"status": "degraded",
"api_sirene": f"statut {response.status_code}",
"timestamp": datetime.now().isoformat(),
}
except Exception as e:
logger.error(f"Health check failed: {e}")
return {
"status": "unhealthy",
"api_sirene": "indisponible",
"error": str(e),
"timestamp": datetime.now().isoformat(),
}

View file

@ -1,11 +1,11 @@
from fastapi import APIRouter, Depends, HTTPException, Query, Request
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, func, or_, and_
from sqlalchemy import select, func
from sqlalchemy.orm import selectinload
from typing import List, Optional
from datetime import datetime, timedelta
from pydantic import BaseModel, EmailStr
from datetime import datetime
import logging
from core.dependencies import get_current_user
from data.data import templates_signature_email
from email_queue import email_queue
from database import UniversignSignerStatus, UniversignTransactionStatus, get_session
@ -14,76 +14,41 @@ from database import (
UniversignSigner,
UniversignSyncLog,
LocalDocumentStatus,
SageDocumentType,
)
import os
from pathlib import Path
import json
from services.universign_sync import UniversignSyncService
from config.config import settings
from utils.generic_functions import normaliser_type_doc
from utils.universign_status_mapping import get_status_message, map_universign_to_local
from database.models.email import EmailLog
from database.enum.status import StatutEmail
from schemas import (
SyncStatsResponse,
CreateSignatureRequest,
TransactionResponse,
)
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/universign", tags=["Universign"])
router = APIRouter(
prefix="/universign",
tags=["Universign"],
)
sync_service = UniversignSyncService(
api_url=settings.universign_api_url, api_key=settings.universign_api_key
)
class CreateSignatureRequest(BaseModel):
"""Demande de création d'une signature"""
sage_document_id: str
sage_document_type: SageDocumentType
signer_email: EmailStr
signer_name: str
document_name: Optional[str] = None
class TransactionResponse(BaseModel):
"""Réponse détaillée d'une transaction"""
id: str
transaction_id: str
sage_document_id: str
sage_document_type: str
universign_status: str
local_status: str
local_status_label: str
signer_url: Optional[str]
document_url: Optional[str]
created_at: datetime
sent_at: Optional[datetime]
signed_at: Optional[datetime]
last_synced_at: Optional[datetime]
needs_sync: bool
signers: List[dict]
class SyncStatsResponse(BaseModel):
"""Statistiques de synchronisation"""
total_transactions: int
pending_sync: int
signed: int
in_progress: int
refused: int
expired: int
last_sync_at: Optional[datetime]
@router.post("/signatures/create", response_model=TransactionResponse)
async def create_signature(
request: CreateSignatureRequest, session: AsyncSession = Depends(get_session)
):
try:
# === VÉRIFICATION DOUBLON RENFORCÉE ===
logger.info(
f"🔍 Vérification doublon pour: {request.sage_document_id} "
f"Vérification doublon pour: {request.sage_document_id} "
f"(type: {request.sage_document_type.name})"
)
@ -96,10 +61,9 @@ async def create_signature(
if all_existing:
logger.warning(
f"⚠️ {len(all_existing)} transaction(s) existante(s) trouvée(s)"
f"{len(all_existing)} transaction(s) existante(s) trouvée(s)"
)
# Filtrer les transactions non-finales
active_txs = [
tx
for tx in all_existing
@ -115,7 +79,7 @@ async def create_signature(
if active_txs:
active_tx = active_txs[0]
logger.error(
f"Transaction active existante: {active_tx.transaction_id} "
f"Transaction active existante: {active_tx.transaction_id} "
f"(statut: {active_tx.local_status.value})"
)
raise HTTPException(
@ -126,11 +90,10 @@ async def create_signature(
)
logger.info(
"Toutes les transactions existantes sont finales, création autorisée"
"Toutes les transactions existantes sont finales, création autorisée"
)
# Génération PDF
logger.info(f"📄 Génération PDF: {request.sage_document_id}")
logger.info(f"Génération PDF: {request.sage_document_id}")
pdf_bytes = email_queue._generate_pdf(
request.sage_document_id, normaliser_type_doc(request.sage_document_type)
)
@ -138,15 +101,14 @@ async def create_signature(
if not pdf_bytes:
raise HTTPException(400, "Échec génération PDF")
logger.info(f"PDF généré: {len(pdf_bytes)} octets")
logger.info(f"PDF généré: {len(pdf_bytes)} octets")
# === CRÉATION TRANSACTION UNIVERSIGN ===
import requests
import uuid
auth = (settings.universign_api_key, "")
logger.info("🔄 Création transaction Universign...")
logger.info("Création transaction Universign...")
resp = requests.post(
f"{settings.universign_api_url}/transactions",
@ -160,14 +122,13 @@ async def create_signature(
)
if resp.status_code != 200:
logger.error(f"Erreur Universign (création): {resp.text}")
logger.error(f"Erreur Universign (création): {resp.text}")
raise HTTPException(500, f"Erreur Universign: {resp.status_code}")
universign_tx_id = resp.json().get("id")
logger.info(f"Transaction Universign créée: {universign_tx_id}")
logger.info(f"Transaction Universign créée: {universign_tx_id}")
# Upload PDF
logger.info("📤 Upload PDF...")
logger.info("Upload PDF...")
files = {
"file": (f"{request.sage_document_id}.pdf", pdf_bytes, "application/pdf")
}
@ -176,14 +137,13 @@ async def create_signature(
)
if resp.status_code not in [200, 201]:
logger.error(f"Erreur upload: {resp.text}")
logger.error(f"Erreur upload: {resp.text}")
raise HTTPException(500, "Erreur upload PDF")
file_id = resp.json().get("id")
logger.info(f"PDF uploadé: {file_id}")
logger.info(f"PDF uploadé: {file_id}")
# Attachement document
logger.info("🔗 Attachement document...")
logger.info("Attachement document...")
resp = requests.post(
f"{settings.universign_api_url}/transactions/{universign_tx_id}/documents",
auth=auth,
@ -196,8 +156,7 @@ async def create_signature(
document_id = resp.json().get("id")
# Création champ signature
logger.info("✍️ Création champ signature...")
logger.info("Création champ signature...")
resp = requests.post(
f"{settings.universign_api_url}/transactions/{universign_tx_id}/documents/{document_id}/fields",
auth=auth,
@ -210,8 +169,7 @@ async def create_signature(
field_id = resp.json().get("id")
# Liaison signataire
logger.info(f"👤 Liaison signataire: {request.signer_email}")
logger.info(f"Liaison signataire: {request.signer_email}")
resp = requests.post(
f"{settings.universign_api_url}/transactions/{universign_tx_id}/signatures",
auth=auth,
@ -222,8 +180,7 @@ async def create_signature(
if resp.status_code not in [200, 201]:
raise HTTPException(500, "Erreur liaison signataire")
# Démarrage transaction
logger.info("🚀 Démarrage transaction...")
logger.info("Démarrage transaction...")
resp = requests.post(
f"{settings.universign_api_url}/transactions/{universign_tx_id}/start",
auth=auth,
@ -235,7 +192,6 @@ async def create_signature(
final_data = resp.json()
# Extraction URL de signature
signer_url = ""
if final_data.get("actions"):
for action in final_data["actions"]:
@ -246,14 +202,13 @@ async def create_signature(
if not signer_url:
raise HTTPException(500, "URL de signature non retournée")
logger.info("URL de signature obtenue")
logger.info("URL de signature obtenue")
# === ENREGISTREMENT LOCAL ===
local_id = str(uuid.uuid4())
transaction = UniversignTransaction(
id=local_id,
transaction_id=universign_tx_id, # ⚠️ Utiliser l'ID Universign, ne jamais le changer
transaction_id=universign_tx_id,
sage_document_id=request.sage_document_id,
sage_document_type=request.sage_document_type,
universign_status=UniversignTransactionStatus.STARTED,
@ -283,10 +238,9 @@ async def create_signature(
await session.commit()
logger.info(
f"💾 Transaction sauvegardée: {local_id} (Universign: {universign_tx_id})"
f"Transaction sauvegardée: {local_id} (Universign: {universign_tx_id})"
)
# === ENVOI EMAIL AVEC TEMPLATE ===
template = templates_signature_email["demande_signature"]
type_labels = {
@ -341,7 +295,6 @@ async def create_signature(
email_queue.enqueue(email_log.id)
# === MISE À JOUR STATUT SAGE (Confirmé = 1) ===
try:
from sage_client import sage_client
@ -356,7 +309,6 @@ async def create_signature(
except Exception as e:
logger.warning(f"Impossible de mettre à jour le statut Sage: {e}")
# === RÉPONSE ===
return TransactionResponse(
id=transaction.id,
transaction_id=transaction.transaction_id,
@ -436,6 +388,15 @@ async def list_transactions(
}
for s in tx.signers
],
signed_document_available=bool(
tx.signed_document_path and Path(tx.signed_document_path).exists()
),
signed_document_downloaded_at=tx.signed_document_downloaded_at,
signed_document_size_kb=(
tx.signed_document_size_bytes / 1024
if tx.signed_document_size_bytes
else None
),
)
for tx in transactions
]
@ -482,6 +443,15 @@ async def get_transaction(
}
for s in tx.signers
],
signed_document_available=bool(
tx.signed_document_path and Path(tx.signed_document_path).exists()
),
signed_document_downloaded_at=tx.signed_document_downloaded_at,
signed_document_size_kb=(
tx.signed_document_size_bytes / 1024
if tx.signed_document_size_bytes
else None
),
)
@ -527,63 +497,46 @@ async def sync_all_transactions(
return {"success": True, "stats": stats, "timestamp": datetime.now().isoformat()}
@router.post("/webhook")
@router.post("/webhook/")
@router.post("/webhook", dependencies=[])
@router.post("/webhook/", dependencies=[])
async def webhook_universign(
request: Request, session: AsyncSession = Depends(get_session)
):
"""
CORRECTION : Extraction correcte du transaction_id selon la structure réelle d'Universign
"""
try:
payload = await request.json()
# 📋 LOG COMPLET du payload pour débogage
logger.info(
f"📥 Webhook Universign reçu - Type: {payload.get('type', 'unknown')}"
)
logger.info(f"Webhook Universign reçu - Type: {payload.get('type', 'unknown')}")
logger.debug(f"Payload complet: {json.dumps(payload, indent=2)}")
# ✅ EXTRACTION CORRECTE DU TRANSACTION_ID
transaction_id = None
# 🔍 Structure 1 : Événements avec payload imbriqué (la plus courante)
# Exemple : transaction.lifecycle.created, transaction.lifecycle.started, etc.
if payload.get("type", "").startswith("transaction.") and "payload" in payload:
# Le transaction_id est dans payload.object.id
nested_object = payload.get("payload", {}).get("object", {})
if nested_object.get("object") == "transaction":
transaction_id = nested_object.get("id")
logger.info(
f"Transaction ID extrait de payload.object.id: {transaction_id}"
f"Transaction ID extrait de payload.object.id: {transaction_id}"
)
# 🔍 Structure 2 : Action événements (action.opened, action.completed)
elif payload.get("type", "").startswith("action."):
# Le transaction_id est directement dans payload.object.transaction_id
transaction_id = (
payload.get("payload", {}).get("object", {}).get("transaction_id")
)
logger.info(
f"Transaction ID extrait de payload.object.transaction_id: {transaction_id}"
f"Transaction ID extrait de payload.object.transaction_id: {transaction_id}"
)
# 🔍 Structure 3 : Transaction directe (fallback)
elif payload.get("object") == "transaction":
transaction_id = payload.get("id")
logger.info(f"Transaction ID extrait direct: {transaction_id}")
logger.info(f"Transaction ID extrait direct: {transaction_id}")
# 🔍 Structure 4 : Ancien format (pour rétro-compatibilité)
elif "transaction" in payload:
transaction_id = payload.get("transaction", {}).get("id")
logger.info(
f"✅ Transaction ID extrait de transaction.id: {transaction_id}"
)
logger.info(f"Transaction ID extrait de transaction.id: {transaction_id}")
# ❌ Échec d'extraction
if not transaction_id:
logger.error(
f"Transaction ID introuvable dans webhook\n"
f"Transaction ID introuvable dans webhook\n"
f"Type d'événement: {payload.get('type', 'unknown')}\n"
f"Clés racine: {list(payload.keys())}\n"
f"Payload simplifié: {json.dumps({k: v if k != 'payload' else '...' for k, v in payload.items()})}"
@ -595,9 +548,8 @@ async def webhook_universign(
"event_id": payload.get("id"),
}, 400
logger.info(f"🎯 Transaction ID identifié: {transaction_id}")
logger.info(f"Transaction ID identifié: {transaction_id}")
# Vérifier si la transaction existe localement
query = select(UniversignTransaction).where(
UniversignTransaction.transaction_id == transaction_id
)
@ -606,7 +558,7 @@ async def webhook_universign(
if not tx:
logger.warning(
f"⚠️ Transaction {transaction_id} inconnue en local\n"
f"Transaction {transaction_id} inconnue en local\n"
f"Type d'événement: {payload.get('type')}\n"
f"Elle sera synchronisée au prochain polling"
)
@ -617,22 +569,20 @@ async def webhook_universign(
"event_type": payload.get("type"),
}
# Traiter le webhook
success, error = await sync_service.process_webhook(
session, payload, transaction_id
)
if not success:
logger.error(f"Erreur traitement webhook: {error}")
logger.error(f"Erreur traitement webhook: {error}")
return {
"status": "error",
"message": error,
"transaction_id": transaction_id,
}, 500
# ✅ Succès
logger.info(
f"Webhook traité avec succès\n"
f"Webhook traité avec succès\n"
f"Transaction: {transaction_id}\n"
f"Nouveau statut: {tx.local_status.value if tx else 'unknown'}\n"
f"Type d'événement: {payload.get('type')}"
@ -647,7 +597,7 @@ async def webhook_universign(
}
except Exception as e:
logger.error(f"💥 Erreur critique webhook: {e}", exc_info=True)
logger.error(f"Erreur critique webhook: {e}", exc_info=True)
return {"status": "error", "message": str(e)}, 500
@ -655,17 +605,14 @@ async def webhook_universign(
async def get_sync_stats(session: AsyncSession = Depends(get_session)):
"""Statistiques globales de synchronisation"""
# Total
total_query = select(func.count(UniversignTransaction.id))
total = (await session.execute(total_query)).scalar()
# En attente de sync
pending_query = select(func.count(UniversignTransaction.id)).where(
UniversignTransaction.needs_sync
)
pending = (await session.execute(pending_query)).scalar()
# Par statut
signed_query = select(func.count(UniversignTransaction.id)).where(
UniversignTransaction.local_status == LocalDocumentStatus.SIGNED
)
@ -686,7 +633,6 @@ async def get_sync_stats(session: AsyncSession = Depends(get_session)):
)
expired = (await session.execute(expired_query)).scalar()
# Dernière sync
last_sync_query = select(func.max(UniversignTransaction.last_synced_at))
last_sync = (await session.execute(last_sync_query)).scalar()
@ -707,7 +653,6 @@ async def get_transaction_logs(
limit: int = Query(50, le=500),
session: AsyncSession = Depends(get_session),
):
# Trouver la transaction
tx_query = select(UniversignTransaction).where(
UniversignTransaction.transaction_id == transaction_id
)
@ -717,7 +662,6 @@ async def get_transaction_logs(
if not tx:
raise HTTPException(404, "Transaction introuvable")
# Logs
logs_query = (
select(UniversignSyncLog)
.where(UniversignSyncLog.transaction_id == tx.id)
@ -746,9 +690,6 @@ async def get_transaction_logs(
}
# Ajouter ces routes dans universign.py
@router.get("/documents/{sage_document_id}/signatures")
async def get_signatures_for_document(
sage_document_id: str,
@ -790,10 +731,6 @@ async def cleanup_duplicate_signatures(
),
session: AsyncSession = Depends(get_session),
):
"""
Supprime les doublons de signatures pour un document.
Garde une seule transaction (la plus récente ou ancienne selon le paramètre).
"""
query = (
select(UniversignTransaction)
.where(UniversignTransaction.sage_document_id == sage_document_id)
@ -815,7 +752,6 @@ async def cleanup_duplicate_signatures(
"deleted_count": 0,
}
# Garder la première (selon l'ordre), supprimer les autres
to_keep = transactions[0]
to_delete = transactions[1:]
@ -875,13 +811,8 @@ async def delete_transaction(
async def cleanup_all_duplicates(
session: AsyncSession = Depends(get_session),
):
"""
Nettoie tous les doublons dans la base.
Pour chaque document avec plusieurs transactions, garde la plus récente non-erreur ou la plus récente.
"""
from sqlalchemy import func
# Trouver les documents avec plusieurs transactions
subquery = (
select(
UniversignTransaction.sage_document_id,
@ -899,7 +830,6 @@ async def cleanup_all_duplicates(
cleanup_details = []
for doc_id in duplicate_docs:
# Récupérer toutes les transactions pour ce document
tx_query = (
select(UniversignTransaction)
.where(UniversignTransaction.sage_document_id == doc_id)
@ -908,7 +838,6 @@ async def cleanup_all_duplicates(
tx_result = await session.execute(tx_query)
transactions = tx_result.scalars().all()
# Priorité: SIGNE > EN_COURS > EN_ATTENTE > autres
priority = {"SIGNE": 0, "EN_COURS": 1, "EN_ATTENTE": 2}
def sort_key(tx):
@ -946,115 +875,11 @@ async def cleanup_all_duplicates(
}
@router.get("/admin/diagnostic", tags=["Admin"])
async def diagnostic_complet(session: AsyncSession = Depends(get_session)):
"""
Diagnostic complet de l'état des transactions Universign
"""
try:
# Statistiques générales
total_query = select(func.count(UniversignTransaction.id))
total = (await session.execute(total_query)).scalar()
# Par statut local
statuts_query = select(
UniversignTransaction.local_status, func.count(UniversignTransaction.id)
).group_by(UniversignTransaction.local_status)
statuts_result = await session.execute(statuts_query)
statuts = {status.value: count for status, count in statuts_result.all()}
# Transactions sans sync récente
date_limite = datetime.now() - timedelta(hours=1)
sans_sync_query = select(func.count(UniversignTransaction.id)).where(
and_(
UniversignTransaction.needs_sync == True,
or_(
UniversignTransaction.last_synced_at < date_limite,
UniversignTransaction.last_synced_at.is_(None),
),
)
)
sans_sync = (await session.execute(sans_sync_query)).scalar()
# Doublons potentiels
doublons_query = (
select(
UniversignTransaction.sage_document_id,
func.count(UniversignTransaction.id).label("count"),
)
.group_by(UniversignTransaction.sage_document_id)
.having(func.count(UniversignTransaction.id) > 1)
)
doublons_result = await session.execute(doublons_query)
doublons = doublons_result.fetchall()
# Transactions avec erreurs de sync
erreurs_query = select(func.count(UniversignTransaction.id)).where(
UniversignTransaction.sync_error.isnot(None)
)
erreurs = (await session.execute(erreurs_query)).scalar()
# Transactions sans webhook reçu
sans_webhook_query = select(func.count(UniversignTransaction.id)).where(
and_(
UniversignTransaction.webhook_received == False,
UniversignTransaction.local_status != LocalDocumentStatus.PENDING,
)
)
sans_webhook = (await session.execute(sans_webhook_query)).scalar()
diagnostic = {
"timestamp": datetime.now().isoformat(),
"total_transactions": total,
"repartition_statuts": statuts,
"problemes_detectes": {
"sans_sync_recente": sans_sync,
"doublons_possibles": len(doublons),
"erreurs_sync": erreurs,
"sans_webhook": sans_webhook,
},
"documents_avec_doublons": [
{"document_id": doc_id, "nombre_transactions": count}
for doc_id, count in doublons
],
"recommandations": [],
}
# Recommandations
if sans_sync > 0:
diagnostic["recommandations"].append(
f"🔄 {sans_sync} transaction(s) à synchroniser. "
f"Utilisez POST /universign/sync/all"
)
if len(doublons) > 0:
diagnostic["recommandations"].append(
f"⚠️ {len(doublons)} document(s) avec doublons. "
f"Utilisez POST /universign/cleanup/all-duplicates"
)
if erreurs > 0:
diagnostic["recommandations"].append(
f"{erreurs} transaction(s) en erreur. "
f"Vérifiez les logs avec GET /universign/transactions?status=ERREUR"
)
return diagnostic
except Exception as e:
logger.error(f"Erreur diagnostic: {e}")
raise HTTPException(500, str(e))
@router.post("/admin/force-sync-all", tags=["Admin"])
async def forcer_sync_toutes_transactions(
max_transactions: int = Query(200, le=500),
session: AsyncSession = Depends(get_session),
):
"""
Force la synchronisation de TOUTES les transactions (même finales)
À utiliser pour réparer les incohérences
"""
try:
query = (
select(UniversignTransaction)
@ -1079,7 +904,7 @@ async def forcer_sync_toutes_transactions(
previous_status = transaction.local_status.value
logger.info(
f"🔄 Force sync: {transaction.transaction_id} (statut: {previous_status})"
f"Force sync: {transaction.transaction_id} (statut: {previous_status})"
)
success, error = await sync_service.sync_transaction(
@ -1128,9 +953,6 @@ async def forcer_sync_toutes_transactions(
async def reparer_transaction(
transaction_id: str, session: AsyncSession = Depends(get_session)
):
"""
Répare une transaction spécifique en la re-synchronisant depuis Universign
"""
try:
query = select(UniversignTransaction).where(
UniversignTransaction.transaction_id == transaction_id
@ -1148,7 +970,6 @@ async def reparer_transaction(
else None
)
# Force sync
success, error = await sync_service.sync_transaction(
session, transaction, force=True
)
@ -1185,11 +1006,7 @@ async def reparer_transaction(
async def trouver_transactions_inconsistantes(
session: AsyncSession = Depends(get_session),
):
"""
Trouve les transactions dont le statut local ne correspond pas au statut Universign
"""
try:
# Toutes les transactions non-finales
query = select(UniversignTransaction).where(
UniversignTransaction.local_status.in_(
[LocalDocumentStatus.PENDING, LocalDocumentStatus.IN_PROGRESS]
@ -1203,7 +1020,6 @@ async def trouver_transactions_inconsistantes(
for tx in transactions:
try:
# Récupérer le statut depuis Universign
universign_data = sync_service.fetch_transaction_status(
tx.transaction_id
)
@ -1265,111 +1081,50 @@ async def trouver_transactions_inconsistantes(
raise HTTPException(500, str(e))
@router.post("/admin/nettoyer-transactions-erreur", tags=["Admin"])
async def nettoyer_transactions_erreur(
age_jours: int = Query(
7, description="Supprimer les transactions en erreur de plus de X jours"
),
session: AsyncSession = Depends(get_session),
):
"""
Nettoie les transactions en erreur anciennes
"""
try:
date_limite = datetime.now() - timedelta(days=age_jours)
query = select(UniversignTransaction).where(
and_(
UniversignTransaction.local_status == LocalDocumentStatus.ERROR,
UniversignTransaction.created_at < date_limite,
)
)
result = await session.execute(query)
transactions = result.scalars().all()
supprimees = []
for tx in transactions:
supprimees.append(
{
"transaction_id": tx.transaction_id,
"document_id": tx.sage_document_id,
"date_creation": tx.created_at.isoformat(),
"erreur": tx.sync_error,
}
)
await session.delete(tx)
await session.commit()
return {
"success": True,
"transactions_supprimees": len(supprimees),
"age_limite_jours": age_jours,
"details": supprimees,
}
except Exception as e:
logger.error(f"Erreur nettoyage: {e}")
raise HTTPException(500, str(e))
@router.get("/debug/webhook-payload/{transaction_id}", tags=["Debug"])
async def voir_dernier_webhook(
@router.get("/transactions/{transaction_id}/document/info", tags=["Documents Signés"])
async def info_document_signe(
transaction_id: str, session: AsyncSession = Depends(get_session)
):
"""
Affiche le dernier payload webhook reçu pour une transaction
Informations sur le document signé
"""
try:
query = select(UniversignTransaction).where(
UniversignTransaction.transaction_id == transaction_id
)
result = await session.execute(query)
tx = result.scalar_one_or_none()
transaction = result.scalar_one_or_none()
if not tx:
raise HTTPException(404, "Transaction introuvable")
if not transaction:
raise HTTPException(404, f"Transaction {transaction_id} introuvable")
# Récupérer le dernier log de type webhook
logs_query = (
select(UniversignSyncLog)
.where(
and_(
UniversignSyncLog.transaction_id == tx.id,
UniversignSyncLog.sync_type.like("webhook:%"),
)
)
.order_by(UniversignSyncLog.sync_timestamp.desc())
.limit(1)
)
file_exists = False
file_size_mb = None
logs_result = await session.execute(logs_query)
last_webhook_log = logs_result.scalar_one_or_none()
if transaction.signed_document_path:
file_path = Path(transaction.signed_document_path)
file_exists = file_path.exists()
if not last_webhook_log:
return {
"transaction_id": transaction_id,
"webhook_recu": tx.webhook_received,
"dernier_payload": None,
"message": "Aucun webhook reçu pour cette transaction",
}
if file_exists:
file_size_mb = os.path.getsize(file_path) / (1024 * 1024)
return {
"transaction_id": transaction_id,
"webhook_recu": tx.webhook_received,
"dernier_webhook": {
"timestamp": last_webhook_log.sync_timestamp.isoformat(),
"type": last_webhook_log.sync_type,
"success": last_webhook_log.success,
"payload": json.loads(last_webhook_log.changes_detected)
if last_webhook_log.changes_detected
else None,
},
"document_available_locally": file_exists,
"document_url_universign": transaction.document_url,
"downloaded_at": (
transaction.signed_document_downloaded_at.isoformat()
if transaction.signed_document_downloaded_at
else None
),
"file_size_mb": round(file_size_mb, 2) if file_size_mb else None,
"download_attempts": transaction.download_attempts,
"last_download_error": transaction.download_error,
"local_path": transaction.signed_document_path if file_exists else None,
}
except HTTPException:
raise
except Exception as e:
logger.error(f"Erreur debug webhook: {e}")
logger.error(f"Erreur info document : {e}")
raise HTTPException(500, str(e))

Binary file not shown.

Binary file not shown.

BIN
sage/pdfs/logo.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 27 KiB

View file

@ -1,4 +1,3 @@
# sage_client.py
import requests
from typing import Dict, List, Optional
from config.config import settings
@ -408,7 +407,7 @@ class SageGatewayClient:
return self._post(
"/sage/collaborateurs/list",
{
"filtre": filtre or "", # ⚠️ Convertir None en ""
"filtre": filtre or "",
"actifs_seulement": actifs_seulement,
},
).get("data", [])
@ -427,6 +426,155 @@ class SageGatewayClient:
"/sage/collaborateurs/update", {"numero": numero, **data}
).get("data")
def lire_informations_societe(self) -> Optional[Dict]:
"""Lit les informations de la société depuis P_DOSSIER"""
return self._get("/sage/societe/info").get("data")
def valider_facture(self, numero_facture: str) -> dict:
response = self._post(f"/sage/factures/{numero_facture}/valider", {})
return response.get("data", {})
def devalider_facture(self, numero_facture: str) -> dict:
response = self._post(f"/sage/factures/{numero_facture}/devalider", {})
return response.get("data", {})
def get_statut_validation(self, numero_facture: str) -> dict:
response = self._get(f"/sage/factures/{numero_facture}/statut-validation")
return response.get("data", {})
def regler_facture(
self,
numero_facture: str,
montant: float,
mode_reglement: int = 0,
date_reglement: str = None,
reference: str = "",
libelle: str = "",
code_journal: str = None,
devise_code: int = 0,
cours_devise: float = 1.0,
tva_encaissement: bool = False,
compte_general: str = None,
) -> dict:
"""Règle une facture"""
payload = {
"montant": montant,
"mode_reglement": mode_reglement,
"reference": reference,
"libelle": libelle,
"devise_code": devise_code,
"cours_devise": cours_devise,
"tva_encaissement": tva_encaissement,
}
if date_reglement:
payload["date_reglement"] = date_reglement
if code_journal:
payload["code_journal"] = code_journal
if compte_general:
payload["compte_general"] = compte_general
return self._post(f"/sage/factures/{numero_facture}/regler", payload).get(
"data", {}
)
def regler_factures_client(
self,
client_code: str,
montant_total: float,
mode_reglement: int = 0,
date_reglement: str = None,
reference: str = "",
libelle: str = "",
code_journal: str = None,
numeros_factures: list = None,
devise_code: int = 0,
cours_devise: float = 1.0,
tva_encaissement: bool = False,
) -> dict:
"""Règle plusieurs factures d'un client"""
payload = {
"client_code": client_code,
"montant_total": montant_total,
"mode_reglement": mode_reglement,
"reference": reference,
"libelle": libelle,
"devise_code": devise_code,
"cours_devise": cours_devise,
"tva_encaissement": tva_encaissement,
}
if date_reglement:
payload["date_reglement"] = date_reglement
if code_journal:
payload["code_journal"] = code_journal
if numeros_factures:
payload["numeros_factures"] = numeros_factures
return self._post("/sage/reglements/multiple", payload).get("data", {})
def get_reglements_facture(self, numero_facture: str) -> dict:
"""Récupère les règlements d'une facture"""
return self._get(f"/sage/factures/{numero_facture}/reglements").get("data", {})
def get_reglements_client(
self,
client_code: str,
date_debut: str = None,
date_fin: str = None,
inclure_soldees: bool = True,
) -> dict:
"""Récupère les règlements d'un client"""
params = {"inclure_soldees": inclure_soldees}
if date_debut:
params["date_debut"] = date_debut
if date_fin:
params["date_fin"] = date_fin
return self._get(f"/sage/clients/{client_code}/reglements", params=params).get(
"data", {}
)
def get_journaux_banque(self) -> dict:
return self._get("/sage/journaux/banque").get("data", {})
def get_modes_reglement(self) -> List[dict]:
"""Récupère les modes de règlement depuis Sage"""
return self._get("/sage/reglements/modes").get("data", {}).get("modes", [])
def get_devises(self) -> List[dict]:
"""Récupère les devises disponibles"""
return self._get("/sage/devises").get("data", {}).get("devises", [])
def get_journaux_tresorerie(self) -> List[dict]:
"""Récupère les journaux de trésorerie (banque + caisse)"""
return (
self._get("/sage/journaux/tresorerie").get("data", {}).get("journaux", [])
)
def get_comptes_generaux(
self, prefixe: str = None, type_compte: str = None
) -> List[dict]:
params = {}
if prefixe:
params["prefixe"] = prefixe
if type_compte:
params["type_compte"] = type_compte
return (
self._get("/sage/comptes-generaux", params=params)
.get("data", {})
.get("comptes", [])
)
def get_tva_taux(self) -> List[dict]:
"""Récupère les taux de TVA"""
return self._get("/sage/tva/taux").get("data", {}).get("taux", [])
def get_parametres_encaissement(self) -> dict:
"""Récupère les paramètres TVA sur encaissement"""
return self._get("/sage/parametres/encaissement").get("data", {})
def refresh_cache(self) -> Dict:
return self._post("/sage/cache/refresh")
@ -440,5 +588,14 @@ class SageGatewayClient:
except Exception:
return {"status": "down"}
def get_tous_reglements(self, params=None):
return self._get("/sage/reglements", params=params)
def get_reglement_facture_detail(self, facture_no):
return self._get(f"/sage/reglements/facture/{facture_no}")
def get_reglement_detail(self, rg_no):
return self._get(f"/sage/reglements/{rg_no}")
sage_client = SageGatewayClient()

View file

@ -26,7 +26,13 @@ from schemas.documents.documents import TypeDocument, TypeDocumentSQL
from schemas.documents.email import StatutEmail, EmailEnvoi
from schemas.documents.factures import FactureCreate, FactureUpdate
from schemas.documents.livraisons import LivraisonCreate, LivraisonUpdate
from schemas.documents.universign import Signature, StatutSignature
from schemas.documents.universign import (
Signature,
StatutSignature,
SyncStatsResponse,
CreateSignatureRequest,
TransactionResponse,
)
from schemas.articles.articles import (
ArticleCreate,
Article,
@ -53,6 +59,10 @@ from schemas.sage.sage_gateway import (
CurrentGatewayInfo,
)
from schemas.society.societe import SocieteInfo
from schemas.society.enterprise import EntrepriseSearch, EntrepriseSearchResponse
__all__ = [
"TiersDetails",
"TypeTiers",
@ -105,4 +115,10 @@ __all__ = [
"SageGatewayTest",
"SageGatewayStatsResponse",
"CurrentGatewayInfo",
"SyncStatsResponse",
"CreateSignatureRequest",
"TransactionResponse",
"SocieteInfo",
"EntrepriseSearch",
"EntrepriseSearchResponse",
]

77
schemas/api_key.py Normal file
View file

@ -0,0 +1,77 @@
from pydantic import BaseModel, Field
from typing import Optional, List
from datetime import datetime
class ApiKeyCreate(BaseModel):
"""Schema pour créer une clé API"""
name: str = Field(..., min_length=3, max_length=255, description="Nom de la clé")
description: Optional[str] = Field(None, description="Description de l'usage")
expires_in_days: Optional[int] = Field(
None, ge=1, le=3650, description="Expiration en jours (max 10 ans)"
)
rate_limit_per_minute: int = Field(
60, ge=1, le=1000, description="Limite de requêtes par minute"
)
allowed_endpoints: Optional[List[str]] = Field(
None, description="Endpoints autorisés ([] = tous, ['/clients*'] = wildcard)"
)
class ApiKeyResponse(BaseModel):
"""Schema de réponse pour une clé API"""
id: str
name: str
description: Optional[str]
key_prefix: str
is_active: bool
is_expired: bool
rate_limit_per_minute: int
allowed_endpoints: Optional[List[str]]
total_requests: int
last_used_at: Optional[datetime]
created_at: datetime
expires_at: Optional[datetime]
revoked_at: Optional[datetime]
created_by: str
class ApiKeyCreatedResponse(ApiKeyResponse):
"""Schema de réponse après création (inclut la clé en clair)"""
api_key: str = Field(
..., description=" Clé API en clair - à sauvegarder immédiatement"
)
class ApiKeyList(BaseModel):
"""Liste de clés API"""
total: int
items: List[ApiKeyResponse]
class SwaggerUserCreate(BaseModel):
"""Schema pour créer un utilisateur Swagger"""
username: str = Field(..., min_length=3, max_length=100)
password: str = Field(..., min_length=8)
full_name: Optional[str] = None
email: Optional[str] = None
class SwaggerUserResponse(BaseModel):
"""Schema de réponse pour un utilisateur Swagger"""
id: str
username: str
full_name: Optional[str]
email: Optional[str]
is_active: bool
created_at: datetime
last_login: Optional[datetime]
class Config:
from_attributes = True

View file

@ -76,7 +76,6 @@ class Article(BaseModel):
)
nb_emplacements: int = Field(0, description="Nombre d'emplacements")
# Champs énumérés normalisés
suivi_stock: Optional[int] = Field(
None,
description="Type de suivi de stock (AR_SuiviStock): 0=Aucun, 1=CMUP, 2=FIFO/LIFO, 3=Sérialisé",

View file

@ -4,6 +4,7 @@ from datetime import datetime
from schemas.documents.ligne_document import LigneDocument
class FactureCreate(BaseModel):
client_id: str
date_facture: Optional[datetime] = None

View file

@ -0,0 +1,109 @@
from pydantic import BaseModel, Field, field_validator
from typing import List, Optional
import logging
from decimal import Decimal
from datetime import date
logger = logging.getLogger(__name__)
class ReglementFactureCreate(BaseModel):
"""Requête de règlement d'une facture côté VPS"""
montant: Decimal = Field(..., gt=0, description="Montant à régler")
devise_code: Optional[int] = Field(0, description="Code devise (0=EUR par défaut)")
cours_devise: Optional[Decimal] = Field(1.0, description="Cours de la devise")
mode_reglement: int = Field(
..., ge=0, description="Code mode règlement depuis /reglements/modes"
)
code_journal: str = Field(
..., min_length=1, description="Code journal depuis /journaux/tresorerie"
)
date_reglement: Optional[date] = Field(
None, description="Date du règlement (défaut: aujourd'hui)"
)
date_echeance: Optional[date] = Field(None, description="Date d'échéance")
reference: Optional[str] = Field(
"", max_length=17, description="Référence pièce règlement"
)
libelle: Optional[str] = Field(
"", max_length=35, description="Libellé du règlement"
)
tva_encaissement: Optional[bool] = Field(
False, description="Appliquer TVA sur encaissement"
)
compte_general: Optional[str] = Field(None)
@field_validator("montant")
def validate_montant(cls, v):
if v <= 0:
raise ValueError("Le montant doit être positif")
return round(v, 2)
class Config:
json_schema_extra = {
"example": {
"montant": 375.12,
"mode_reglement": 2,
"reference": "CHQ-001",
"code_journal": "BEU",
"date_reglement": "2024-01-01",
"libelle": "Règlement multiple",
"tva_encaissement": False,
"devise_code": 0,
"cours_devise": 1.0,
"date_echeance": "2024-01-31",
}
}
class ReglementMultipleCreate(BaseModel):
"""Requête de règlement multiple côté VPS"""
client_id: str = Field(..., description="Code client")
montant_total: Decimal = Field(..., gt=0)
devise_code: Optional[int] = Field(0)
cours_devise: Optional[Decimal] = Field(1.0)
mode_reglement: int = Field(...)
code_journal: str = Field(...)
date_reglement: Optional[date] = None
reference: Optional[str] = Field("")
libelle: Optional[str] = Field("")
tva_encaissement: Optional[bool] = Field(False)
numeros_factures: Optional[List[str]] = Field(
None, description="Si vide, règle les plus anciennes en premier"
)
@field_validator("client_id", mode="before")
def strip_client_id(cls, v):
return v.replace("\xa0", "").strip() if v else v
@field_validator("montant_total")
def validate_montant(cls, v):
if v <= 0:
raise ValueError("Le montant doit être positif")
return round(v, 2)
class Config:
json_schema_extra = {
"example": {
"client_id": "CLI000001",
"montant_total": 1000.00,
"mode_reglement": 2,
"numeros_factures": ["FA00081", "FA00082"],
"reference": "CHQ-001",
"code_journal": "BEU",
"date_reglement": "2024-01-01",
"libelle": "Règlement multiple",
"tva_encaissement": False,
"devise_code": 0,
"cours_devise": 1.0,
"date_echeance": "2024-01-31",
}
}

View file

@ -1,6 +1,12 @@
from pydantic import BaseModel, EmailStr
from enum import Enum
from schemas.documents.documents import TypeDocument
from database import (
SageDocumentType,
)
from typing import List, Optional
from datetime import datetime
class StatutSignature(str, Enum):
@ -16,3 +22,49 @@ class Signature(BaseModel):
type_doc: TypeDocument
email_signataire: EmailStr
nom_signataire: str
class CreateSignatureRequest(BaseModel):
"""Demande de création d'une signature"""
sage_document_id: str
sage_document_type: SageDocumentType
signer_email: EmailStr
signer_name: str
document_name: Optional[str] = None
class TransactionResponse(BaseModel):
"""Réponse détaillée d'une transaction"""
id: str
transaction_id: str
sage_document_id: str
sage_document_type: str
universign_status: str
local_status: str
local_status_label: str
signer_url: Optional[str]
document_url: Optional[str]
created_at: datetime
sent_at: Optional[datetime]
signed_at: Optional[datetime]
last_synced_at: Optional[datetime]
needs_sync: bool
signers: List[dict]
signed_document_available: bool = False
signed_document_downloaded_at: Optional[datetime] = None
signed_document_size_kb: Optional[float] = None
class SyncStatsResponse(BaseModel):
"""Statistiques de synchronisation"""
total_transactions: int
pending_sync: int
signed: int
in_progress: int
refused: int
expired: int
last_sync_at: Optional[datetime]

View file

@ -10,7 +10,6 @@ class GatewayHealthStatus(str, Enum):
UNKNOWN = "unknown"
# === CREATE ===
class SageGatewayCreate(BaseModel):
name: str = Field(
@ -71,7 +70,6 @@ class SageGatewayUpdate(BaseModel):
return v.rstrip("/") if v else v
# === RESPONSE ===
class SageGatewayResponse(BaseModel):
id: str

View file

@ -0,0 +1,24 @@
from pydantic import BaseModel, Field
from typing import Optional, List
class EntrepriseSearch(BaseModel):
"""Modèle de réponse pour une entreprise trouvée"""
company_name: str = Field(..., description="Raison sociale complète")
siren: str = Field(..., description="Numéro SIREN (9 chiffres)")
vat_number: str = Field(..., description="Numéro de TVA intracommunautaire")
address: str = Field(..., description="Adresse complète du siège")
naf_code: str = Field(..., description="Code NAF/APE")
is_active: bool = Field(..., description="True si entreprise active")
siret_siege: Optional[str] = Field(None, description="SIRET du siège")
code_postal: Optional[str] = None
ville: Optional[str] = None
class EntrepriseSearchResponse(BaseModel):
"""Réponse globale de la recherche"""
total_results: int
results: List[EntrepriseSearch]
query: str

View file

@ -0,0 +1,46 @@
from pydantic import BaseModel
from typing import Optional, List
class ExerciceComptable(BaseModel):
numero: int
debut: str
fin: Optional[str] = None
class SocieteInfo(BaseModel):
raison_sociale: str
numero_dossier: str
siret: Optional[str] = None
code_ape: Optional[str] = None
numero_tva: Optional[str] = None
adresse: Optional[str] = None
complement_adresse: Optional[str] = None
code_postal: Optional[str] = None
ville: Optional[str] = None
code_region: Optional[str] = None
pays: Optional[str] = None
telephone: Optional[str] = None
telecopie: Optional[str] = None
email: Optional[str] = None
email_societe: Optional[str] = None
site_web: Optional[str] = None
capital: float = 0.0
forme_juridique: Optional[str] = None
exercices: List[ExerciceComptable] = []
devise_compte: int = 0
devise_equivalent: int = 0
longueur_compte_general: int = 0
longueur_compte_analytique: int = 0
regime_fec: int = 0
base_modele: Optional[str] = None
marqueur: int = 0
logo_base64: Optional[str] = None
logo_content_type: Optional[str] = None

View file

@ -9,7 +9,6 @@ class CollaborateurBase(BaseModel):
prenom: Optional[str] = Field(None, max_length=50)
fonction: Optional[str] = Field(None, max_length=50)
# Adresse
adresse: Optional[str] = Field(None, max_length=100)
complement: Optional[str] = Field(None, max_length=100)
code_postal: Optional[str] = Field(None, max_length=10)
@ -17,7 +16,6 @@ class CollaborateurBase(BaseModel):
code_region: Optional[str] = Field(None, max_length=50)
pays: Optional[str] = Field(None, max_length=50)
# Services
service: Optional[str] = Field(None, max_length=50)
vendeur: bool = Field(default=False)
caissier: bool = Field(default=False)
@ -25,18 +23,15 @@ class CollaborateurBase(BaseModel):
chef_ventes: bool = Field(default=False)
numero_chef_ventes: Optional[int] = None
# Contact
telephone: Optional[str] = Field(None, max_length=20)
telecopie: Optional[str] = Field(None, max_length=20)
email: Optional[EmailStr] = None
tel_portable: Optional[str] = Field(None, max_length=20)
# Réseaux sociaux
facebook: Optional[str] = Field(None, max_length=100)
linkedin: Optional[str] = Field(None, max_length=100)
skype: Optional[str] = Field(None, max_length=100)
# Autres
matricule: Optional[str] = Field(None, max_length=20)
sommeil: bool = Field(default=False)

View file

@ -14,7 +14,6 @@ class TypeTiersInt(IntEnum):
class TiersDetails(BaseModel):
# IDENTIFICATION
numero: Optional[str] = Field(None, description="Code tiers (CT_Num)")
intitule: Optional[str] = Field(
None, description="Raison sociale ou Nom complet (CT_Intitule)"
@ -37,7 +36,6 @@ class TiersDetails(BaseModel):
)
code_naf: Optional[str] = Field(None, description="Code NAF/APE (CT_Ape)")
# ADRESSE
contact: Optional[str] = Field(
None, description="Nom du contact principal (CT_Contact)"
)
@ -50,7 +48,6 @@ class TiersDetails(BaseModel):
region: Optional[str] = Field(None, description="Région/État (CT_CodeRegion)")
pays: Optional[str] = Field(None, description="Pays (CT_Pays)")
# TELECOM
telephone: Optional[str] = Field(None, description="Téléphone fixe (CT_Telephone)")
telecopie: Optional[str] = Field(None, description="Fax (CT_Telecopie)")
email: Optional[str] = Field(None, description="Email principal (CT_EMail)")
@ -58,13 +55,11 @@ class TiersDetails(BaseModel):
facebook: Optional[str] = Field(None, description="Profil Facebook (CT_Facebook)")
linkedin: Optional[str] = Field(None, description="Profil LinkedIn (CT_LinkedIn)")
# TAUX
taux01: Optional[float] = Field(None, description="Taux personnalisé 1 (CT_Taux01)")
taux02: Optional[float] = Field(None, description="Taux personnalisé 2 (CT_Taux02)")
taux03: Optional[float] = Field(None, description="Taux personnalisé 3 (CT_Taux03)")
taux04: Optional[float] = Field(None, description="Taux personnalisé 4 (CT_Taux04)")
# STATISTIQUES
statistique01: Optional[str] = Field(
None, description="Statistique 1 (CT_Statistique01)"
)
@ -96,7 +91,6 @@ class TiersDetails(BaseModel):
None, description="Statistique 10 (CT_Statistique10)"
)
# COMMERCIAL
encours_autorise: Optional[float] = Field(
None, description="Encours maximum autorisé (CT_Encours)"
)
@ -113,7 +107,6 @@ class TiersDetails(BaseModel):
None, description="Détails du commercial/collaborateur"
)
# FACTURATION
lettrage_auto: Optional[bool] = Field(
None, description="Lettrage automatique (CT_Lettrage)"
)
@ -146,7 +139,6 @@ class TiersDetails(BaseModel):
None, description="Bon à payer obligatoire (CT_BonAPayer)"
)
# LOGISTIQUE
priorite_livraison: Optional[int] = Field(
None, description="Priorité livraison (CT_PrioriteLivr)"
)
@ -160,17 +152,14 @@ class TiersDetails(BaseModel):
None, description="Délai appro jours (CT_DelaiAppro)"
)
# COMMENTAIRE
commentaire: Optional[str] = Field(
None, description="Commentaire libre (CT_Commentaire)"
)
# ANALYTIQUE
section_analytique: Optional[str] = Field(
None, description="Section analytique (CA_Num)"
)
# ORGANISATION / SURVEILLANCE
mode_reglement_code: Optional[int] = Field(
None, description="Code mode règlement (MR_No)"
)
@ -200,7 +189,6 @@ class TiersDetails(BaseModel):
None, description="Résultat financier (CT_SvResultat)"
)
# COMPTE GENERAL ET CATEGORIES
compte_general: Optional[str] = Field(
None, description="Compte général principal (CG_NumPrinc)"
)
@ -211,7 +199,6 @@ class TiersDetails(BaseModel):
None, description="Catégorie comptable (N_CatCompta)"
)
# CONTACTS
contacts: Optional[List[Contact]] = Field(
default_factory=list, description="Liste des contacts du tiers"
)

651
scripts/manage_security.py Normal file
View file

@ -0,0 +1,651 @@
import sys
import os
from pathlib import Path
import asyncio
import argparse
import logging
from datetime import datetime
from typing import Optional, List
import json
from sqlalchemy import select
_current_file = Path(__file__).resolve()
_script_dir = _current_file.parent
_app_dir = _script_dir.parent
print(f"DEBUG: Script path: {_current_file}")
print(f"DEBUG: App dir: {_app_dir}")
print(f"DEBUG: Current working dir: {os.getcwd()}")
if str(_app_dir) in sys.path:
sys.path.remove(str(_app_dir))
sys.path.insert(0, str(_app_dir))
os.chdir(str(_app_dir))
print(f"DEBUG: sys.path[0]: {sys.path[0]}")
print(f"DEBUG: New working dir: {os.getcwd()}")
_test_imports = [
"database",
"database.db_config",
"database.models",
"services",
"security",
]
print("\nDEBUG: Vérification des imports...")
for module in _test_imports:
try:
__import__(module)
print(f"{module}")
except ImportError as e:
print(f"{module}: {e}")
try:
from database.db_config import async_session_factory
from database.models.api_key import SwaggerUser, ApiKey
from services.api_key import ApiKeyService
from security.auth import hash_password
except ImportError as e:
print(f"\n ERREUR D'IMPORT: {e}")
print(" Vérifiez que vous êtes dans /app")
print(" Commande correcte: cd /app && python scripts/manage_security.py ...")
sys.exit(1)
logging.basicConfig(level=logging.INFO, format="%(levelname)s - %(message)s")
logger = logging.getLogger(__name__)
AVAILABLE_TAGS = {
"Authentication": " Authentification et gestion des comptes",
"API Keys Management": "🔑 Gestion des clés API",
"Clients": "👥 Gestion des clients",
"Fournisseurs": "🏭 Gestion des fournisseurs",
"Prospects": "🎯 Gestion des prospects",
"Tiers": "📋 Gestion générale des tiers",
"Contacts": "📞 Contacts des tiers",
"Articles": "📦 Catalogue articles",
"Familles": "🏷️ Familles d'articles",
"Stock": "📊 Mouvements de stock",
"Devis": "📄 Devis",
"Commandes": "🛒 Commandes",
"Livraisons": "🚚 Bons de livraison",
"Factures": "💰 Factures",
"Avoirs": "↩️ Avoirs",
"Règlements": "💳 Règlements et encaissements",
"Workflows": " Transformations de documents",
"Documents": "📑 Gestion documents (PDF)",
"Emails": "📧 Envoi d'emails",
"Validation": " Validations métier",
"Collaborateurs": "👔 Collaborateurs internes",
"Société": "🏢 Informations société",
"Référentiels": "📚 Données de référence",
"System": "⚙️ Système et santé",
"Admin": "🛠️ Administration",
"Debug": "🐛 Debug et diagnostics",
}
PRESET_PROFILES = {
"commercial": [
"Clients",
"Contacts",
"Devis",
"Commandes",
"Factures",
"Articles",
"Documents",
"Emails",
],
"comptable": [
"Clients",
"Fournisseurs",
"Factures",
"Avoirs",
"Règlements",
"Documents",
"Emails",
],
"logistique": [
"Articles",
"Stock",
"Commandes",
"Livraisons",
"Fournisseurs",
"Documents",
],
"readonly": ["Clients", "Articles", "Devis", "Commandes", "Factures", "Documents"],
"developer": [
"Authentication",
"API Keys Management",
"System",
"Clients",
"Articles",
"Devis",
"Commandes",
"Factures",
],
}
async def add_swagger_user(
username: str,
password: str,
full_name: str = None,
tags: Optional[List[str]] = None,
preset: Optional[str] = None,
):
"""Ajouter un utilisateur Swagger avec configuration avancée"""
async with async_session_factory() as session:
result = await session.execute(
select(SwaggerUser).where(SwaggerUser.username == username)
)
existing = result.scalar_one_or_none()
if existing:
logger.error(f" L'utilisateur '{username}' existe déjà")
return
if preset:
if preset not in PRESET_PROFILES:
logger.error(
f" Preset '{preset}' inconnu. Disponibles: {list(PRESET_PROFILES.keys())}"
)
return
tags = PRESET_PROFILES[preset]
logger.info(f"📋 Application du preset '{preset}': {len(tags)} tags")
swagger_user = SwaggerUser(
username=username,
hashed_password=hash_password(password),
full_name=full_name or username,
is_active=True,
allowed_tags=json.dumps(tags) if tags else None,
)
session.add(swagger_user)
await session.commit()
logger.info(f" Utilisateur Swagger créé: {username}")
logger.info(f" Nom complet: {swagger_user.full_name}")
if tags:
logger.info(f" 🏷️ Tags autorisés ({len(tags)}):")
for tag in tags:
desc = AVAILABLE_TAGS.get(tag, "")
logger.info(f"{tag} {desc}")
else:
logger.info(" 👑 Accès ADMIN COMPLET (tous les tags)")
async def list_swagger_users():
"""Lister tous les utilisateurs Swagger avec détails"""
async with async_session_factory() as session:
result = await session.execute(select(SwaggerUser))
users = result.scalars().all()
if not users:
logger.info("🔭 Aucun utilisateur Swagger")
return
logger.info(f"\n👥 {len(users)} utilisateur(s) Swagger:\n")
logger.info("=" * 80)
for user in users:
status = " ACTIF" if user.is_active else " NON ACTIF"
logger.info(f"\n{status} {user.username}")
logger.info(f"📛 Nom: {user.full_name}")
logger.info(f"🆔 ID: {user.id}")
logger.info(f"📅 Créé: {user.created_at}")
logger.info(f"🕐 Dernière connexion: {user.last_login or 'Jamais'}")
if user.allowed_tags:
try:
tags = json.loads(user.allowed_tags)
if tags:
logger.info(f"🏷️ Tags autorisés ({len(tags)}):")
for tag in tags:
desc = AVAILABLE_TAGS.get(tag, "")
logger.info(f"{tag} {desc}")
auth_schemes = []
if "Authentication" in tags:
auth_schemes.append("JWT (Bearer)")
if "API Keys Management" in tags or len(tags) > 3:
auth_schemes.append("X-API-Key")
if not auth_schemes:
auth_schemes.append("JWT (Bearer)")
logger.info(
f" Authentification autorisée: {', '.join(auth_schemes)}"
)
else:
logger.info("👑 Tags autorisés: ADMIN COMPLET (tous)")
logger.info(" Authentification: JWT + X-API-Key (tout)")
except json.JSONDecodeError:
logger.info(" Tags: Erreur format")
else:
logger.info("👑 Tags autorisés: ADMIN COMPLET (tous)")
logger.info(" Authentification: JWT + X-API-Key (tout)")
logger.info("\n" + "=" * 80)
async def update_swagger_user(
username: str,
add_tags: Optional[List[str]] = None,
remove_tags: Optional[List[str]] = None,
set_tags: Optional[List[str]] = None,
preset: Optional[str] = None,
active: Optional[bool] = None,
):
"""Mettre à jour un utilisateur Swagger"""
async with async_session_factory() as session:
result = await session.execute(
select(SwaggerUser).where(SwaggerUser.username == username)
)
user = result.scalar_one_or_none()
if not user:
logger.error(f" Utilisateur '{username}' introuvable")
return
modified = False
if preset:
if preset not in PRESET_PROFILES:
logger.error(f" Preset '{preset}' inconnu")
return
user.allowed_tags = json.dumps(PRESET_PROFILES[preset])
logger.info(f"📋 Preset '{preset}' appliqué")
modified = True
elif set_tags is not None:
user.allowed_tags = json.dumps(set_tags) if set_tags else None
logger.info(f" Tags remplacés: {len(set_tags) if set_tags else 0}")
modified = True
elif add_tags or remove_tags:
current_tags = []
if user.allowed_tags:
try:
current_tags = json.loads(user.allowed_tags)
except json.JSONDecodeError:
current_tags = []
if add_tags:
for tag in add_tags:
if tag not in current_tags:
current_tags.append(tag)
logger.info(f" Tag ajouté: {tag}")
modified = True
if remove_tags:
for tag in remove_tags:
if tag in current_tags:
current_tags.remove(tag)
logger.info(f" Tag retiré: {tag}")
modified = True
user.allowed_tags = json.dumps(current_tags) if current_tags else None
if active is not None:
user.is_active = active
logger.info(f" Statut: {'ACTIF' if active else 'INACTIF'}")
modified = True
if modified:
await session.commit()
logger.info(f" Utilisateur '{username}' mis à jour")
else:
logger.info(" Aucune modification effectuée")
async def delete_swagger_user(username: str):
"""Supprimer un utilisateur Swagger"""
async with async_session_factory() as session:
result = await session.execute(
select(SwaggerUser).where(SwaggerUser.username == username)
)
user = result.scalar_one_or_none()
if not user:
logger.error(f" Utilisateur '{username}' introuvable")
return
await session.delete(user)
await session.commit()
logger.info(f"🗑️ Utilisateur Swagger supprimé: {username}")
async def list_available_tags():
"""Liste tous les tags disponibles avec description"""
logger.info("\n🏷️ TAGS DISPONIBLES:\n")
logger.info("=" * 80)
for tag, desc in AVAILABLE_TAGS.items():
logger.info(f" {desc}")
logger.info(f" Nom: {tag}\n")
logger.info("=" * 80)
logger.info("\n📦 PRESETS DISPONIBLES:\n")
for preset_name, tags in PRESET_PROFILES.items():
logger.info(f" {preset_name}:")
logger.info(f" {', '.join(tags)}\n")
logger.info("=" * 80)
async def create_api_key(
name: str,
description: str = None,
expires_in_days: int = 365,
rate_limit: int = 60,
endpoints: list = None,
):
"""Créer une clé API"""
async with async_session_factory() as session:
service = ApiKeyService(session)
api_key_obj, api_key_plain = await service.create_api_key(
name=name,
description=description,
created_by="cli",
expires_in_days=expires_in_days,
rate_limit_per_minute=rate_limit,
allowed_endpoints=endpoints,
)
logger.info("=" * 70)
logger.info("🔑 Clé API créée avec succès")
logger.info("=" * 70)
logger.info(f" ID: {api_key_obj.id}")
logger.info(f" Nom: {api_key_obj.name}")
logger.info(f" Clé: {api_key_plain}")
logger.info(f" Préfixe: {api_key_obj.key_prefix}")
logger.info(f" Rate limit: {api_key_obj.rate_limit_per_minute} req/min")
logger.info(f" Expire le: {api_key_obj.expires_at}")
if api_key_obj.allowed_endpoints:
try:
endpoints_list = json.loads(api_key_obj.allowed_endpoints)
logger.info(f" Endpoints: {', '.join(endpoints_list)}")
except Exception:
logger.info(f" Endpoints: {api_key_obj.allowed_endpoints}")
else:
logger.info(" Endpoints: Tous (aucune restriction)")
logger.info("=" * 70)
logger.info(" SAUVEGARDEZ CETTE CLÉ - Elle ne sera plus affichée !")
logger.info("=" * 70)
async def list_api_keys():
"""Lister toutes les clés API"""
async with async_session_factory() as session:
service = ApiKeyService(session)
keys = await service.list_api_keys()
if not keys:
logger.info("🔭 Aucune clé API")
return
logger.info(f"🔑 {len(keys)} clé(s) API:\n")
for key in keys:
is_valid = key.is_active and (
not key.expires_at or key.expires_at > datetime.now()
)
status = "" if is_valid else ""
logger.info(f" {status} {key.name:<30} ({key.key_prefix}...)")
logger.info(f" ID: {key.id}")
logger.info(f" Rate limit: {key.rate_limit_per_minute} req/min")
logger.info(f" Requêtes: {key.total_requests}")
logger.info(f" Expire: {key.expires_at or 'Jamais'}")
logger.info(f" Dernière utilisation: {key.last_used_at or 'Jamais'}")
if key.allowed_endpoints:
try:
endpoints = json.loads(key.allowed_endpoints)
display = ", ".join(endpoints[:4])
if len(endpoints) > 4:
display += f"... (+{len(endpoints) - 4})"
logger.info(f" Endpoints: {display}")
except Exception:
pass
else:
logger.info(" Endpoints: Tous")
logger.info("")
async def revoke_api_key(key_id: str):
"""Révoquer une clé API"""
async with async_session_factory() as session:
result = await session.execute(select(ApiKey).where(ApiKey.id == key_id))
key = result.scalar_one_or_none()
if not key:
logger.error(f" Clé API '{key_id}' introuvable")
return
key.is_active = False
key.revoked_at = datetime.now()
await session.commit()
logger.info(f"🗑️ Clé API révoquée: {key.name}")
logger.info(f" ID: {key.id}")
async def verify_api_key(api_key: str):
"""Vérifier une clé API"""
async with async_session_factory() as session:
service = ApiKeyService(session)
key = await service.verify_api_key(api_key)
if not key:
logger.error(" Clé API invalide ou expirée")
return
logger.info("=" * 60)
logger.info(" Clé API valide")
logger.info("=" * 60)
logger.info(f" Nom: {key.name}")
logger.info(f" ID: {key.id}")
logger.info(f" Rate limit: {key.rate_limit_per_minute} req/min")
logger.info(f" Requêtes totales: {key.total_requests}")
logger.info(f" Expire: {key.expires_at or 'Jamais'}")
if key.allowed_endpoints:
try:
endpoints = json.loads(key.allowed_endpoints)
logger.info(f" Endpoints autorisés: {endpoints}")
except Exception:
pass
else:
logger.info(" Endpoints autorisés: Tous")
logger.info("=" * 60)
async def main():
parser = argparse.ArgumentParser(
description="Gestion avancée des utilisateurs Swagger et clés API",
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog="""
EXEMPLES D'UTILISATION:
=== UTILISATEURS SWAGGER ===
1. Créer un utilisateur avec preset:
python scripts/manage_security.py swagger add commercial Pass123! --preset commercial
2. Créer un admin complet:
python scripts/manage_security.py swagger add admin AdminPass
3. Créer avec tags spécifiques:
python scripts/manage_security.py swagger add client Pass123! --tags Clients Devis Factures
4. Mettre à jour un utilisateur (ajouter des tags):
python scripts/manage_security.py swagger update client --add-tags Commandes Livraisons
5. Changer complètement les tags:
python scripts/manage_security.py swagger update client --set-tags Clients Articles
6. Appliquer un preset:
python scripts/manage_security.py swagger update client --preset comptable
7. Lister les tags disponibles:
python scripts/manage_security.py swagger tags
8. Désactiver temporairement:
python scripts/manage_security.py swagger update client --inactive
=== CLÉS API ===
9. Créer une clé API:
python scripts/manage_security.py apikey create "Mon App" --days 365 --rate-limit 100
10. Créer avec endpoints restreints:
python scripts/manage_security.py apikey create "SDK-ReadOnly" --endpoints "/clients" "/clients/*" "/devis" "/devis/*"
11. Lister les clés:
python scripts/manage_security.py apikey list
12. Vérifier une clé:
python scripts/manage_security.py apikey verify sdk_live_xxxxx
13. Révoquer une clé:
python scripts/manage_security.py apikey revoke <key_id>
""",
)
subparsers = parser.add_subparsers(dest="command", help="Commandes")
swagger_parser = subparsers.add_parser("swagger", help="Gestion Swagger")
swagger_sub = swagger_parser.add_subparsers(dest="swagger_command")
add_p = swagger_sub.add_parser("add", help="Ajouter utilisateur")
add_p.add_argument("username", help="Nom d'utilisateur")
add_p.add_argument("password", help="Mot de passe")
add_p.add_argument("--full-name", help="Nom complet", default=None)
add_p.add_argument(
"--tags",
nargs="*",
help="Tags autorisés. Vide = admin complet",
default=None,
)
add_p.add_argument(
"--preset",
choices=list(PRESET_PROFILES.keys()),
help="Appliquer un preset de tags",
)
update_p = swagger_sub.add_parser("update", help="Mettre à jour utilisateur")
update_p.add_argument("username", help="Nom d'utilisateur")
update_p.add_argument("--add-tags", nargs="+", help="Ajouter des tags")
update_p.add_argument("--remove-tags", nargs="+", help="Retirer des tags")
update_p.add_argument("--set-tags", nargs="*", help="Définir les tags (remplace)")
update_p.add_argument(
"--preset", choices=list(PRESET_PROFILES.keys()), help="Appliquer preset"
)
update_p.add_argument("--active", action="store_true", help="Activer l'utilisateur")
update_p.add_argument(
"--inactive", action="store_true", help="Désactiver l'utilisateur"
)
swagger_sub.add_parser("list", help="Lister utilisateurs")
del_p = swagger_sub.add_parser("delete", help="Supprimer utilisateur")
del_p.add_argument("username", help="Nom d'utilisateur")
swagger_sub.add_parser("tags", help="Lister les tags disponibles")
apikey_parser = subparsers.add_parser("apikey", help="Gestion clés API")
apikey_sub = apikey_parser.add_subparsers(dest="apikey_command")
create_p = apikey_sub.add_parser("create", help="Créer clé API")
create_p.add_argument("name", help="Nom de la clé")
create_p.add_argument("--description", help="Description")
create_p.add_argument("--days", type=int, default=365, help="Expiration (jours)")
create_p.add_argument("--rate-limit", type=int, default=60, help="Req/min")
create_p.add_argument("--endpoints", nargs="+", help="Endpoints autorisés")
apikey_sub.add_parser("list", help="Lister clés")
rev_p = apikey_sub.add_parser("revoke", help="Révoquer clé")
rev_p.add_argument("key_id", help="ID de la clé")
ver_p = apikey_sub.add_parser("verify", help="Vérifier clé")
ver_p.add_argument("api_key", help="Clé API complète")
args = parser.parse_args()
if not args.command:
parser.print_help()
return
if args.command == "swagger":
if args.swagger_command == "add":
await add_swagger_user(
args.username,
args.password,
args.full_name,
args.tags,
args.preset,
)
elif args.swagger_command == "update":
active = None
if args.active:
active = True
elif args.inactive:
active = False
await update_swagger_user(
args.username,
add_tags=args.add_tags,
remove_tags=args.remove_tags,
set_tags=args.set_tags,
preset=args.preset,
active=active,
)
elif args.swagger_command == "list":
await list_swagger_users()
elif args.swagger_command == "delete":
await delete_swagger_user(args.username)
elif args.swagger_command == "tags":
await list_available_tags()
else:
swagger_parser.print_help()
elif args.command == "apikey":
if args.apikey_command == "create":
await create_api_key(
name=args.name,
description=args.description,
expires_in_days=args.days,
rate_limit=args.rate_limit,
endpoints=args.endpoints,
)
elif args.apikey_command == "list":
await list_api_keys()
elif args.apikey_command == "revoke":
await revoke_api_key(args.key_id)
elif args.apikey_command == "verify":
await verify_api_key(args.api_key)
else:
apikey_parser.print_help()
if __name__ == "__main__":
try:
asyncio.run(main())
except KeyboardInterrupt:
print("\n Interrupted")
sys.exit(0)
except Exception as e:
logger.error(f" Erreur: {e}")
import traceback
traceback.print_exc()
sys.exit(1)

354
scripts/test_security.py Normal file
View file

@ -0,0 +1,354 @@
import requests
import argparse
import sys
from typing import Tuple
class SecurityTester:
def __init__(self, base_url: str):
self.base_url = base_url.rstrip("/")
self.results = {"passed": 0, "failed": 0, "tests": []}
def log_test(self, name: str, passed: bool, details: str = ""):
"""Enregistrer le résultat d'un test"""
status = " PASS" if passed else " FAIL"
print(f"{status} - {name}")
if details:
print(f" {details}")
self.results["tests"].append(
{"name": name, "passed": passed, "details": details}
)
if passed:
self.results["passed"] += 1
else:
self.results["failed"] += 1
def test_swagger_without_auth(self) -> bool:
"""Test 1: Swagger UI devrait demander une authentification"""
print("\n Test 1: Protection Swagger UI")
try:
response = requests.get(f"{self.base_url}/docs", timeout=5)
if response.status_code == 401:
self.log_test(
"Swagger protégé",
True,
"Code 401 retourné sans authentification",
)
return True
else:
self.log_test(
"Swagger protégé",
False,
f"Code {response.status_code} au lieu de 401",
)
return False
except Exception as e:
self.log_test("Swagger protégé", False, f"Erreur: {str(e)}")
return False
def test_swagger_with_auth(self, username: str, password: str) -> bool:
"""Test 2: Swagger UI accessible avec credentials valides"""
print("\n Test 2: Accès Swagger avec authentification")
try:
response = requests.get(
f"{self.base_url}/docs", auth=(username, password), timeout=5
)
if response.status_code == 200:
self.log_test(
"Accès Swagger avec auth",
True,
f"Authentifié comme {username}",
)
return True
else:
self.log_test(
"Accès Swagger avec auth",
False,
f"Code {response.status_code}, credentials invalides?",
)
return False
except Exception as e:
self.log_test("Accès Swagger avec auth", False, f"Erreur: {str(e)}")
return False
def test_api_without_auth(self) -> bool:
"""Test 3: Endpoints API devraient demander une authentification"""
print("\n Test 3: Protection des endpoints API")
test_endpoints = ["/api/v1/clients", "/api/v1/documents"]
all_protected = True
for endpoint in test_endpoints:
try:
response = requests.get(f"{self.base_url}{endpoint}", timeout=5)
if response.status_code == 401:
print(f" {endpoint} protégé (401)")
else:
print(
f" {endpoint} accessible sans auth (code {response.status_code})"
)
all_protected = False
except Exception as e:
print(f" {endpoint} erreur: {str(e)}")
all_protected = False
self.log_test("Endpoints API protégés", all_protected)
return all_protected
def test_health_endpoint_public(self) -> bool:
"""Test 4: Endpoint /health devrait être accessible sans auth"""
print("\n Test 4: Endpoint /health public")
try:
response = requests.get(f"{self.base_url}/health", timeout=5)
if response.status_code == 200:
self.log_test("/health accessible", True, "Endpoint public fonctionne")
return True
else:
self.log_test(
"/health accessible",
False,
f"Code {response.status_code} inattendu",
)
return False
except Exception as e:
self.log_test("/health accessible", False, f"Erreur: {str(e)}")
return False
def test_api_key_creation(self, username: str, password: str) -> Tuple[bool, str]:
"""Test 5: Créer une clé API via l'endpoint"""
print("\n Test 5: Création d'une clé API")
try:
login_response = requests.post(
f"{self.base_url}/api/v1/auth/login",
json={"email": username, "password": password},
timeout=5,
)
if login_response.status_code != 200:
self.log_test(
"Création clé API",
False,
"Impossible de se connecter pour obtenir un JWT",
)
return False, ""
jwt_token = login_response.json().get("access_token")
create_response = requests.post(
f"{self.base_url}/api/v1/api-keys",
headers={"Authorization": f"Bearer {jwt_token}"},
json={
"name": "Test API Key",
"description": "Clé de test automatisé",
"rate_limit_per_minute": 60,
"expires_in_days": 30,
},
timeout=5,
)
if create_response.status_code == 201:
api_key = create_response.json().get("api_key")
self.log_test("Création clé API", True, f"Clé créée: {api_key[:20]}...")
return True, api_key
else:
self.log_test(
"Création clé API",
False,
f"Code {create_response.status_code}",
)
return False, ""
except Exception as e:
self.log_test("Création clé API", False, f"Erreur: {str(e)}")
return False, ""
def test_api_key_usage(self, api_key: str) -> bool:
"""Test 6: Utiliser une clé API pour accéder à un endpoint"""
print("\n Test 6: Utilisation d'une clé API")
if not api_key:
self.log_test("Utilisation clé API", False, "Pas de clé disponible")
return False
try:
response = requests.get(
f"{self.base_url}/api/v1/clients",
headers={"X-API-Key": api_key},
timeout=5,
)
if response.status_code == 200:
self.log_test("Utilisation clé API", True, "Clé acceptée")
return True
else:
self.log_test(
"Utilisation clé API",
False,
f"Code {response.status_code}, clé refusée?",
)
return False
except Exception as e:
self.log_test("Utilisation clé API", False, f"Erreur: {str(e)}")
return False
def test_invalid_api_key(self) -> bool:
"""Test 7: Une clé invalide devrait être refusée"""
print("\n Test 7: Rejet de clé API invalide")
invalid_key = "sdk_live_invalid_key_12345"
try:
response = requests.get(
f"{self.base_url}/api/v1/clients",
headers={"X-API-Key": invalid_key},
timeout=5,
)
if response.status_code == 401:
self.log_test("Clé invalide rejetée", True, "Code 401 comme attendu")
return True
else:
self.log_test(
"Clé invalide rejetée",
False,
f"Code {response.status_code} au lieu de 401",
)
return False
except Exception as e:
self.log_test("Clé invalide rejetée", False, f"Erreur: {str(e)}")
return False
def test_rate_limiting(self, api_key: str) -> bool:
"""Test 8: Rate limiting (optionnel, peut prendre du temps)"""
print("\n Test 8: Rate limiting (test simple)")
if not api_key:
self.log_test("Rate limiting", False, "Pas de clé disponible")
return False
print(" Envoi de 70 requêtes rapides...")
rate_limited = False
for i in range(70):
try:
response = requests.get(
f"{self.base_url}/health",
headers={"X-API-Key": api_key},
timeout=1,
)
if response.status_code == 429:
rate_limited = True
print(f" Rate limit atteint à la requête {i + 1}")
break
except Exception:
pass
if rate_limited:
self.log_test("Rate limiting", True, "Rate limit détecté")
return True
else:
self.log_test(
"Rate limiting",
True,
"Aucun rate limit détecté (peut être normal si pas implémenté)",
)
return True
def print_summary(self):
"""Afficher le résumé des tests"""
print("\n" + "=" * 60)
print(" RÉSUMÉ DES TESTS")
print("=" * 60)
total = self.results["passed"] + self.results["failed"]
success_rate = (self.results["passed"] / total * 100) if total > 0 else 0
print(f"\nTotal: {total} tests")
print(f" Réussis: {self.results['passed']}")
print(f" Échoués: {self.results['failed']}")
print(f"Taux de réussite: {success_rate:.1f}%\n")
if self.results["failed"] == 0:
print("🎉 Tous les tests sont passés ! Sécurité OK.")
return 0
else:
print(" Certains tests ont échoué. Vérifiez la configuration.")
return 1
def main():
parser = argparse.ArgumentParser(
description="Test automatisé de la sécurité de l'API"
)
parser.add_argument(
"--url",
required=True,
help="URL de base de l'API (ex: http://localhost:8000)",
)
parser.add_argument(
"--swagger-user", required=True, help="Utilisateur Swagger pour les tests"
)
parser.add_argument(
"--swagger-pass", required=True, help="Mot de passe Swagger pour les tests"
)
parser.add_argument(
"--skip-rate-limit",
action="store_true",
help="Sauter le test de rate limiting (long)",
)
args = parser.parse_args()
print(" Démarrage des tests de sécurité")
print(f" URL cible: {args.url}\n")
tester = SecurityTester(args.url)
tester.test_swagger_without_auth()
tester.test_swagger_with_auth(args.swagger_user, args.swagger_pass)
tester.test_api_without_auth()
tester.test_health_endpoint_public()
success, api_key = tester.test_api_key_creation(
args.swagger_user, args.swagger_pass
)
if success and api_key:
tester.test_api_key_usage(api_key)
tester.test_invalid_api_key()
if not args.skip_rate_limit:
tester.test_rate_limiting(api_key)
else:
print("\n Test de rate limiting sauté")
else:
print("\n Tests avec clé API sautés (création échouée)")
exit_code = tester.print_summary()
sys.exit(exit_code)
if __name__ == "__main__":
main()

View file

@ -5,10 +5,12 @@ import jwt
import secrets
import hashlib
SECRET_KEY = "VOTRE_SECRET_KEY_A_METTRE_EN_.ENV"
ALGORITHM = "HS256"
ACCESS_TOKEN_EXPIRE_MINUTES = 10080
REFRESH_TOKEN_EXPIRE_DAYS = 7
from config.config import settings
SECRET_KEY = settings.jwt_secret
ALGORITHM = settings.jwt_algorithm
ACCESS_TOKEN_EXPIRE_MINUTES = settings.access_token_expire_minutes
REFRESH_TOKEN_EXPIRE_DAYS = settings.refresh_token_expire_days
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
@ -67,9 +69,13 @@ def decode_token(token: str) -> Optional[Dict]:
payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
return payload
except jwt.ExpiredSignatureError:
return None
except jwt.JWTError:
return None
raise jwt.InvalidTokenError("Token expiré")
except jwt.DecodeError:
raise jwt.InvalidTokenError("Token invalide (format incorrect)")
except jwt.InvalidTokenError as e:
raise jwt.InvalidTokenError(f"Token invalide: {str(e)}")
except Exception as e:
raise jwt.InvalidTokenError(f"Erreur lors du décodage du token: {str(e)}")
def validate_password_strength(password: str) -> tuple[bool, str]:

223
services/api_key.py Normal file
View file

@ -0,0 +1,223 @@
import secrets
import hashlib
import json
from datetime import datetime, timedelta
from typing import Optional, List, Dict
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, and_, or_
import logging
from database.models.api_key import ApiKey
logger = logging.getLogger(__name__)
class ApiKeyService:
"""Service de gestion des clés API"""
def __init__(self, session: AsyncSession):
self.session = session
@staticmethod
def generate_api_key() -> str:
"""Génère une clé API unique et sécurisée"""
random_part = secrets.token_urlsafe(32)
return f"sdk_live_{random_part}"
@staticmethod
def hash_api_key(api_key: str) -> str:
"""Hash la clé API pour stockage sécurisé"""
return hashlib.sha256(api_key.encode()).hexdigest()
@staticmethod
def get_key_prefix(api_key: str) -> str:
"""Extrait le préfixe de la clé pour identification"""
return api_key[:12] if len(api_key) >= 12 else api_key
async def create_api_key(
self,
name: str,
description: Optional[str] = None,
created_by: str = "system",
user_id: Optional[str] = None,
expires_in_days: Optional[int] = None,
rate_limit_per_minute: int = 60,
allowed_endpoints: Optional[List[str]] = None,
) -> tuple[ApiKey, str]:
api_key_plain = self.generate_api_key()
key_hash = self.hash_api_key(api_key_plain)
key_prefix = self.get_key_prefix(api_key_plain)
expires_at = None
if expires_in_days:
expires_at = datetime.now() + timedelta(days=expires_in_days)
api_key_obj = ApiKey(
key_hash=key_hash,
key_prefix=key_prefix,
name=name,
description=description,
created_by=created_by,
user_id=user_id,
expires_at=expires_at,
rate_limit_per_minute=rate_limit_per_minute,
allowed_endpoints=json.dumps(allowed_endpoints)
if allowed_endpoints
else None,
)
self.session.add(api_key_obj)
await self.session.commit()
await self.session.refresh(api_key_obj)
logger.info(f" Clé API créée: {name} (prefix: {key_prefix})")
return api_key_obj, api_key_plain
async def verify_api_key(self, api_key_plain: str) -> Optional[ApiKey]:
key_hash = self.hash_api_key(api_key_plain)
result = await self.session.execute(
select(ApiKey).where(
and_(
ApiKey.key_hash == key_hash,
ApiKey.is_active,
ApiKey.revoked_at.is_(None),
or_(
ApiKey.expires_at.is_(None), ApiKey.expires_at > datetime.now()
),
)
)
)
api_key_obj = result.scalar_one_or_none()
if api_key_obj:
api_key_obj.total_requests += 1
api_key_obj.last_used_at = datetime.now()
await self.session.commit()
logger.debug(f" Clé API validée: {api_key_obj.name}")
else:
logger.warning(" Clé API invalide ou expirée")
return api_key_obj
async def list_api_keys(
self,
include_revoked: bool = False,
user_id: Optional[str] = None,
) -> List[ApiKey]:
"""Liste les clés API"""
query = select(ApiKey)
if not include_revoked:
query = query.where(ApiKey.revoked_at.is_(None))
if user_id:
query = query.where(ApiKey.user_id == user_id)
query = query.order_by(ApiKey.created_at.desc())
result = await self.session.execute(query)
return list(result.scalars().all())
async def revoke_api_key(self, key_id: str) -> bool:
"""Révoque une clé API"""
result = await self.session.execute(select(ApiKey).where(ApiKey.id == key_id))
api_key_obj = result.scalar_one_or_none()
if not api_key_obj:
return False
api_key_obj.is_active = False
api_key_obj.revoked_at = datetime.now()
await self.session.commit()
logger.info(f"🗑️ Clé API révoquée: {api_key_obj.name}")
return True
async def get_by_id(self, key_id: str) -> Optional[ApiKey]:
"""Récupère une clé API par son ID"""
result = await self.session.execute(select(ApiKey).where(ApiKey.id == key_id))
return result.scalar_one_or_none()
async def check_rate_limit(self, api_key_obj: ApiKey) -> tuple[bool, Dict]:
return True, {
"allowed": True,
"limit": api_key_obj.rate_limit_per_minute,
"remaining": api_key_obj.rate_limit_per_minute,
}
async def check_endpoint_access(self, api_key_obj: ApiKey, endpoint: str) -> bool:
if not api_key_obj.allowed_endpoints:
logger.debug(
f"🔓 API Key {api_key_obj.name}: Aucune restriction d'endpoint"
)
return True
try:
allowed = json.loads(api_key_obj.allowed_endpoints)
if "*" in allowed or "/*" in allowed:
logger.debug(f"🔓 API Key {api_key_obj.name}: Accès global autorisé")
return True
for pattern in allowed:
if pattern == endpoint:
logger.debug(f" Match exact: {pattern} == {endpoint}")
return True
if pattern.endswith("/*"):
base = pattern[:-2] # "/clients/*" → "/clients"
if endpoint == base or endpoint.startswith(base + "/"):
logger.debug(f" Match wildcard: {pattern}{endpoint}")
return True
elif pattern.endswith("*"):
base = pattern[:-1] # "/clients*" → "/clients"
if endpoint.startswith(base):
logger.debug(f" Match prefix: {pattern}{endpoint}")
return True
logger.warning(
f" API Key {api_key_obj.name}: Accès refusé à {endpoint}\n"
f" Endpoints autorisés: {allowed}"
)
return False
except json.JSONDecodeError:
logger.error(f" Erreur parsing allowed_endpoints pour {api_key_obj.id}")
return False
def api_key_to_response(api_key_obj: ApiKey, show_key: bool = False) -> Dict:
"""Convertit un objet ApiKey en réponse API"""
allowed_endpoints = None
if api_key_obj.allowed_endpoints:
try:
allowed_endpoints = json.loads(api_key_obj.allowed_endpoints)
except json.JSONDecodeError:
pass
is_expired = False
if api_key_obj.expires_at:
is_expired = api_key_obj.expires_at < datetime.now()
return {
"id": api_key_obj.id,
"name": api_key_obj.name,
"description": api_key_obj.description,
"key_prefix": api_key_obj.key_prefix,
"is_active": api_key_obj.is_active,
"is_expired": is_expired,
"rate_limit_per_minute": api_key_obj.rate_limit_per_minute,
"allowed_endpoints": allowed_endpoints,
"total_requests": api_key_obj.total_requests,
"last_used_at": api_key_obj.last_used_at,
"created_at": api_key_obj.created_at,
"expires_at": api_key_obj.expires_at,
"revoked_at": api_key_obj.revoked_at,
"created_by": api_key_obj.created_by,
}

View file

@ -6,7 +6,7 @@ import httpx
from datetime import datetime
from typing import Optional, Tuple, List
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import false, select, true, update, and_
from sqlalchemy import false, select, update, and_
import logging
from config.config import settings
@ -55,7 +55,7 @@ class SageGatewayService:
and_(
SageGatewayConfig.id == gateway_id,
SageGatewayConfig.user_id == user_id,
# SageGatewayConfig.is_deleted.is_(false()),
SageGatewayConfig.is_deleted == false(),
)
)
)
@ -67,7 +67,7 @@ class SageGatewayService:
query = select(SageGatewayConfig).where(SageGatewayConfig.user_id == user_id)
if not include_deleted:
query = query.where(SageGatewayConfig.is_deleted.is_(false()))
query = query.where(SageGatewayConfig.is_deleted == false())
query = query.order_by(
SageGatewayConfig.is_active.desc(),
@ -167,7 +167,7 @@ class SageGatewayService:
and_(
SageGatewayConfig.user_id == user_id,
SageGatewayConfig.is_active,
SageGatewayConfig.is_deleted.is_(true()),
SageGatewayConfig.is_deleted == false(),
)
)
)

View file

@ -0,0 +1,361 @@
import os
import logging
import requests
from pathlib import Path
from datetime import datetime
from typing import Optional, Tuple, Dict, List
from sqlalchemy.ext.asyncio import AsyncSession
logger = logging.getLogger(__name__)
SIGNED_DOCS_DIR = Path(os.getenv("SIGNED_DOCS_PATH", "/app/data/signed_documents"))
SIGNED_DOCS_DIR.mkdir(parents=True, exist_ok=True)
class UniversignDocumentService:
"""Service de gestion des documents signés Universign - VERSION CORRIGÉE"""
def __init__(self, api_url: str, api_key: str, timeout: int = 60):
self.api_url = api_url.rstrip("/")
self.api_key = api_key
self.timeout = timeout
self.auth = (api_key, "")
def fetch_transaction_documents(self, transaction_id: str) -> Optional[List[Dict]]:
try:
logger.info(f" Récupération documents pour transaction: {transaction_id}")
response = requests.get(
f"{self.api_url}/transactions/{transaction_id}",
auth=self.auth,
timeout=self.timeout,
headers={"Accept": "application/json"},
)
if response.status_code == 200:
data = response.json()
documents = data.get("documents", [])
logger.info(f"{len(documents)} document(s) trouvé(s)")
for idx, doc in enumerate(documents):
logger.debug(
f" Document {idx}: id={doc.get('id')}, "
f"name={doc.get('name')}, status={doc.get('status')}"
)
return documents
elif response.status_code == 404:
logger.warning(
f"Transaction {transaction_id} introuvable sur Universign"
)
return None
else:
logger.error(
f"Erreur HTTP {response.status_code} pour {transaction_id}: "
f"{response.text[:500]}"
)
return None
except requests.exceptions.Timeout:
logger.error(f"⏱️ Timeout récupération transaction {transaction_id}")
return None
except Exception as e:
logger.error(f" Erreur fetch documents: {e}", exc_info=True)
return None
def download_signed_document(
self, transaction_id: str, document_id: str
) -> Optional[bytes]:
try:
download_url = (
f"{self.api_url}/transactions/{transaction_id}"
f"/documents/{document_id}/download"
)
logger.info(f"Téléchargement depuis: {download_url}")
response = requests.get(
download_url,
auth=self.auth,
timeout=self.timeout,
stream=True,
)
if response.status_code == 200:
content_type = response.headers.get("Content-Type", "")
content_length = response.headers.get("Content-Length", "unknown")
logger.info(
f"Téléchargement réussi: "
f"Content-Type={content_type}, Size={content_length}"
)
if (
"pdf" not in content_type.lower()
and "octet-stream" not in content_type.lower()
):
logger.warning(
f"Type de contenu inattendu: {content_type}. "
f"Tentative de lecture quand même..."
)
content = response.content
if len(content) < 1024:
logger.error(f" Document trop petit: {len(content)} octets")
return None
return content
elif response.status_code == 404:
logger.error(
f" Document {document_id} introuvable pour transaction {transaction_id}"
)
return None
elif response.status_code == 403:
logger.error(
f" Accès refusé au document {document_id}. "
f"Vérifiez que la transaction est bien signée."
)
return None
else:
logger.error(
f" Erreur HTTP {response.status_code}: {response.text[:500]}"
)
return None
except requests.exceptions.Timeout:
logger.error(f"⏱️ Timeout téléchargement document {document_id}")
return None
except Exception as e:
logger.error(f" Erreur téléchargement: {e}", exc_info=True)
return None
async def download_and_store_signed_document(
self, session: AsyncSession, transaction, force: bool = False
) -> Tuple[bool, Optional[str]]:
if not force and transaction.signed_document_path:
if os.path.exists(transaction.signed_document_path):
logger.debug(
f"Document déjà téléchargé: {transaction.transaction_id}"
)
return True, None
transaction.download_attempts += 1
try:
logger.info(
f"Récupération document signé pour: {transaction.transaction_id}"
)
documents = self.fetch_transaction_documents(transaction.transaction_id)
if not documents:
error = "Aucun document trouvé dans la transaction Universign"
logger.warning(f"{error}")
transaction.download_error = error
await session.commit()
return False, error
document_id = None
for doc in documents:
doc_id = doc.get("id")
doc_status = doc.get("status", "").lower()
if doc_status in ["signed", "completed", "closed"]:
document_id = doc_id
logger.info(
f"Document signé trouvé: {doc_id} (status: {doc_status})"
)
break
if document_id is None:
document_id = doc_id
if not document_id:
error = "Impossible de déterminer l'ID du document à télécharger"
logger.error(f" {error}")
transaction.download_error = error
await session.commit()
return False, error
if hasattr(transaction, "universign_document_id"):
transaction.universign_document_id = document_id
pdf_content = self.download_signed_document(
transaction_id=transaction.transaction_id, document_id=document_id
)
if not pdf_content:
error = f"Échec téléchargement document {document_id}"
logger.error(f" {error}")
transaction.download_error = error
await session.commit()
return False, error
filename = self._generate_filename(transaction)
file_path = SIGNED_DOCS_DIR / filename
with open(file_path, "wb") as f:
f.write(pdf_content)
file_size = os.path.getsize(file_path)
transaction.signed_document_path = str(file_path)
transaction.signed_document_downloaded_at = datetime.now()
transaction.signed_document_size_bytes = file_size
transaction.download_error = None
transaction.document_url = (
f"{self.api_url}/transactions/{transaction.transaction_id}"
f"/documents/{document_id}/download"
)
await session.commit()
logger.info(
f"Document signé téléchargé: {filename} ({file_size / 1024:.1f} KB)"
)
return True, None
except OSError as e:
error = f"Erreur filesystem: {str(e)}"
logger.error(f" {error}")
transaction.download_error = error
await session.commit()
return False, error
except Exception as e:
error = f"Erreur inattendue: {str(e)}"
logger.error(f" {error}", exc_info=True)
transaction.download_error = error
await session.commit()
return False, error
def _generate_filename(self, transaction) -> str:
"""Génère un nom de fichier unique pour le document signé"""
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
tx_id = transaction.transaction_id.replace("tr_", "")
filename = f"{transaction.sage_document_id}_{tx_id}_{timestamp}_signed.pdf"
return filename
def get_document_path(self, transaction) -> Optional[Path]:
"""Retourne le chemin du document signé s'il existe"""
if not transaction.signed_document_path:
return None
path = Path(transaction.signed_document_path)
if path.exists():
return path
return None
async def cleanup_old_documents(self, days_to_keep: int = 90) -> Tuple[int, int]:
"""Supprime les anciens documents signés"""
from datetime import timedelta
cutoff_date = datetime.now() - timedelta(days=days_to_keep)
deleted = 0
size_freed = 0
for file_path in SIGNED_DOCS_DIR.glob("*.pdf"):
try:
file_time = datetime.fromtimestamp(os.path.getmtime(file_path))
if file_time < cutoff_date:
size_freed += os.path.getsize(file_path)
os.remove(file_path)
deleted += 1
logger.info(f"🗑️ Supprimé: {file_path.name}")
except Exception as e:
logger.error(f"Erreur suppression {file_path}: {e}")
size_freed_mb = size_freed / (1024 * 1024)
logger.info(
f"Nettoyage terminé: {deleted} fichiers supprimés "
f"({size_freed_mb:.2f} MB libérés)"
)
return deleted, int(size_freed_mb)
def diagnose_transaction(self, transaction_id: str) -> Dict:
"""
Diagnostic complet d'une transaction pour debug
"""
result = {
"transaction_id": transaction_id,
"api_url": self.api_url,
"timestamp": datetime.now().isoformat(),
"checks": {},
}
try:
logger.info(f"Diagnostic transaction: {transaction_id}")
response = requests.get(
f"{self.api_url}/transactions/{transaction_id}",
auth=self.auth,
timeout=self.timeout,
)
result["checks"]["transaction_fetch"] = {
"status_code": response.status_code,
"success": response.status_code == 200,
}
if response.status_code != 200:
result["checks"]["transaction_fetch"]["error"] = response.text[:500]
return result
data = response.json()
result["checks"]["transaction_data"] = {
"state": data.get("state"),
"documents_count": len(data.get("documents", [])),
"participants_count": len(data.get("participants", [])),
}
documents = data.get("documents", [])
result["checks"]["documents"] = []
for doc in documents:
doc_info = {
"id": doc.get("id"),
"name": doc.get("name"),
"status": doc.get("status"),
}
if doc.get("id"):
download_url = (
f"{self.api_url}/transactions/{transaction_id}"
f"/documents/{doc['id']}/download"
)
try:
dl_response = requests.head(
download_url,
auth=self.auth,
timeout=10,
)
doc_info["download_check"] = {
"url": download_url,
"status_code": dl_response.status_code,
"accessible": dl_response.status_code in [200, 302],
"content_type": dl_response.headers.get("Content-Type"),
}
except Exception as e:
doc_info["download_check"] = {"error": str(e)}
result["checks"]["documents"].append(doc_info)
result["success"] = True
except Exception as e:
result["success"] = False
result["error"] = str(e)
return result

View file

@ -19,6 +19,7 @@ from database import (
StatutEmail,
)
from data.data import templates_signature_email
from services.universign_document import UniversignDocumentService
from utils.universign_status_mapping import (
map_universign_to_local,
is_transition_allowed,
@ -39,6 +40,9 @@ class UniversignSyncService:
self.sage_client = None
self.email_queue = None
self.settings = None
self.document_service = UniversignDocumentService(
api_url=api_url, api_key=api_key, timeout=60
)
def configure(self, sage_client, email_queue, settings):
self.sage_client = sage_client
@ -155,7 +159,6 @@ class UniversignSyncService:
return stats
# CORRECTION 1 : process_webhook dans universign_sync.py
async def process_webhook(
self, session: AsyncSession, payload: Dict, transaction_id: str = None
) -> Tuple[bool, Optional[str]]:
@ -163,9 +166,7 @@ class UniversignSyncService:
Traite un webhook Universign - CORRECTION : meilleure gestion des payloads
"""
try:
# Si transaction_id n'est pas fourni, essayer de l'extraire
if not transaction_id:
# Même logique que dans universign.py
if (
payload.get("type", "").startswith("transaction.")
and "payload" in payload
@ -191,7 +192,6 @@ class UniversignSyncService:
f"📨 Traitement webhook: transaction={transaction_id}, event={event_type}"
)
# Récupérer la transaction locale
query = (
select(UniversignTransaction)
.options(selectinload(UniversignTransaction.signers))
@ -201,28 +201,23 @@ class UniversignSyncService:
transaction = result.scalar_one_or_none()
if not transaction:
logger.warning(f"⚠️ Transaction {transaction_id} inconnue localement")
logger.warning(f"Transaction {transaction_id} inconnue localement")
return False, "Transaction inconnue"
# Marquer comme webhook reçu
transaction.webhook_received = True
# Stocker l'ancien statut pour comparaison
old_status = transaction.local_status.value
# Force la synchronisation complète
success, error = await self.sync_transaction(
session, transaction, force=True
)
# Log du changement de statut
if success and transaction.local_status.value != old_status:
logger.info(
f"Webhook traité: {transaction_id} | "
f"Webhook traité: {transaction_id} | "
f"{old_status}{transaction.local_status.value}"
)
# Enregistrer le log du webhook
await self._log_sync_attempt(
session=session,
transaction=transaction,
@ -233,7 +228,7 @@ class UniversignSyncService:
new_status=transaction.local_status.value,
changes=json.dumps(
payload, default=str
), # Ajout default=str pour éviter les erreurs JSON
), # Ajout default=str pour éviter les erreurs JSON
)
await session.commit()
@ -244,7 +239,6 @@ class UniversignSyncService:
logger.error(f"💥 Erreur traitement webhook: {e}", exc_info=True)
return False, str(e)
# CORRECTION 2 : _sync_signers - Ne pas écraser les signers existants
async def _sync_signers(
self,
session: AsyncSession,
@ -267,7 +261,6 @@ class UniversignSyncService:
logger.warning(f"Signataire sans email à l'index {idx}, ignoré")
continue
# ✅ PROTECTION : gérer les statuts inconnus
raw_status = signer_data.get("status") or signer_data.get(
"state", "waiting"
)
@ -298,7 +291,6 @@ class UniversignSyncService:
if signer_data.get("name") and not signer.name:
signer.name = signer_data.get("name")
else:
# ✅ Nouveau signer avec gestion d'erreur intégrée
try:
signer = UniversignSigner(
id=f"{transaction.id}_signer_{idx}_{int(datetime.now().timestamp())}",
@ -318,39 +310,32 @@ class UniversignSyncService:
except Exception as e:
logger.error(f"Erreur création signer {email}: {e}")
# CORRECTION 3 : Amélioration du logging dans sync_transaction
async def sync_transaction(
self,
session: AsyncSession,
transaction: UniversignTransaction,
session,
transaction,
force: bool = False,
) -> Tuple[bool, Optional[str]]:
"""
CORRECTION : Meilleur logging et gestion d'erreurs
"""
):
import json
# Si statut final et pas de force, skip
if is_final_status(transaction.local_status.value) and not force:
logger.debug(
f"⏭️ Skip {transaction.transaction_id}: statut final {transaction.local_status.value}"
f"⏭️ Skip {transaction.transaction_id}: statut final "
f"{transaction.local_status.value}"
)
transaction.needs_sync = False
await session.commit()
return True, None
# Récupération du statut distant
logger.info(f"🔄 Synchronisation: {transaction.transaction_id}")
logger.info(f"Synchronisation: {transaction.transaction_id}")
result = self.fetch_transaction_status(transaction.transaction_id)
if not result:
error = "Échec récupération données Universign"
logger.error(f"{error}: {transaction.transaction_id}")
# ✅ CORRECTION : Incrémenter les tentatives MÊME en cas d'échec
logger.error(f" {error}: {transaction.transaction_id}")
transaction.sync_attempts += 1
transaction.sync_error = error
await self._log_sync_attempt(session, transaction, "polling", False, error)
await session.commit()
return False, error
@ -359,44 +344,38 @@ class UniversignSyncService:
universign_data = result["transaction"]
universign_status_raw = universign_data.get("state", "draft")
logger.info(f"📊 Statut Universign brut: {universign_status_raw}")
logger.info(f" Statut Universign brut: {universign_status_raw}")
# Convertir le statut
new_local_status = map_universign_to_local(universign_status_raw)
previous_local_status = transaction.local_status.value
logger.info(
f"🔄 Mapping: {universign_status_raw} (Universign) → "
f"Mapping: {universign_status_raw} (Universign) → "
f"{new_local_status} (Local) | Actuel: {previous_local_status}"
)
# Vérifier la transition
if not is_transition_allowed(previous_local_status, new_local_status):
logger.warning(
f"⚠️ Transition refusée: {previous_local_status}{new_local_status}"
f"Transition refusée: {previous_local_status}{new_local_status}"
)
new_local_status = resolve_status_conflict(
previous_local_status, new_local_status
)
logger.info(
f"✅ Résolution conflit: statut résolu = {new_local_status}"
)
logger.info(f"Résolution conflit: statut résolu = {new_local_status}")
status_changed = previous_local_status != new_local_status
if status_changed:
logger.info(
f"🔔 CHANGEMENT DÉTECTÉ: {previous_local_status}{new_local_status}"
f"CHANGEMENT DÉTECTÉ: {previous_local_status}{new_local_status}"
)
# Mise à jour du statut Universign brut
try:
transaction.universign_status = UniversignTransactionStatus(
universign_status_raw
)
except ValueError:
logger.warning(f"⚠️ Statut Universign inconnu: {universign_status_raw}")
# Fallback intelligent
logger.warning(f"Statut Universign inconnu: {universign_status_raw}")
if new_local_status == "SIGNE":
transaction.universign_status = (
UniversignTransactionStatus.COMPLETED
@ -408,46 +387,59 @@ class UniversignSyncService:
else:
transaction.universign_status = UniversignTransactionStatus.STARTED
# ✅ Mise à jour du statut local
transaction.local_status = LocalDocumentStatus(new_local_status)
transaction.universign_status_updated_at = datetime.now()
# Mise à jour des dates
if new_local_status == "EN_COURS" and not transaction.sent_at:
transaction.sent_at = datetime.now()
logger.info("📅 Date d'envoi mise à jour")
logger.info("Date d'envoi mise à jour")
if new_local_status == "SIGNE" and not transaction.signed_at:
transaction.signed_at = datetime.now()
logger.info("Date de signature mise à jour")
logger.info("Date de signature mise à jour")
if new_local_status == "REFUSE" and not transaction.refused_at:
transaction.refused_at = datetime.now()
logger.info(" Date de refus mise à jour")
logger.info(" Date de refus mise à jour")
if new_local_status == "EXPIRE" and not transaction.expired_at:
transaction.expired_at = datetime.now()
logger.info("Date d'expiration mise à jour")
logger.info("Date d'expiration mise à jour")
# Mise à jour des URLs
if (
universign_data.get("documents")
and len(universign_data["documents"]) > 0
):
first_doc = universign_data["documents"][0]
if first_doc.get("url"):
transaction.document_url = first_doc["url"]
documents = universign_data.get("documents", [])
if documents:
first_doc = documents[0]
logger.info(
f"Document Universign trouvé: id={first_doc.get('id')}, "
f"status={first_doc.get('status')}"
)
if new_local_status == "SIGNE" and not transaction.signed_document_path:
logger.info("Déclenchement téléchargement document signé...")
try:
(
download_success,
download_error,
) = await self.document_service.download_and_store_signed_document(
session=session, transaction=transaction, force=False
)
if download_success:
logger.info("Document signé téléchargé et stocké")
else:
logger.warning(f"Échec téléchargement: {download_error}")
except Exception as e:
logger.error(f" Erreur téléchargement document: {e}", exc_info=True)
# Synchroniser les signataires
await self._sync_signers(session, transaction, universign_data)
# Mise à jour des métadonnées de sync
transaction.last_synced_at = datetime.now()
transaction.sync_attempts += 1
transaction.needs_sync = not is_final_status(new_local_status)
transaction.sync_error = None # ✅ Effacer l'erreur précédente
transaction.sync_error = None
# Log de la tentative
await self._log_sync_attempt(
session=session,
transaction=transaction,
@ -460,15 +452,15 @@ class UniversignSyncService:
{
"status_changed": status_changed,
"universign_raw": universign_status_raw,
"documents_count": len(documents),
"response_time_ms": result.get("response_time_ms"),
},
default=str, # ✅ Éviter les erreurs de sérialisation
default=str,
),
)
await session.commit()
# Exécuter les actions post-changement
if status_changed:
logger.info(f"🎬 Exécution actions pour statut: {new_local_status}")
await self._execute_status_actions(
@ -476,7 +468,7 @@ class UniversignSyncService:
)
logger.info(
f"Sync terminée: {transaction.transaction_id} | "
f"Sync terminée: {transaction.transaction_id} | "
f"{previous_local_status}{new_local_status}"
)
@ -484,9 +476,9 @@ class UniversignSyncService:
except Exception as e:
error_msg = f"Erreur lors de la synchronisation: {str(e)}"
logger.error(f" {error_msg}", exc_info=True)
logger.error(f" {error_msg}", exc_info=True)
transaction.sync_error = error_msg[:1000] # Tronquer si trop long
transaction.sync_error = error_msg[:1000]
transaction.sync_attempts += 1
await self._log_sync_attempt(
@ -496,6 +488,50 @@ class UniversignSyncService:
return False, error_msg
async def _sync_transaction_documents_corrected(
self, session, transaction, universign_data: dict, new_local_status: str
):
documents = universign_data.get("documents", [])
if documents:
first_doc = documents[0]
first_doc_id = first_doc.get("id")
if first_doc_id:
if hasattr(transaction, "universign_document_id"):
transaction.universign_document_id = first_doc_id
logger.info(
f"Document Universign: id={first_doc_id}, "
f"name={first_doc.get('name')}, status={first_doc.get('status')}"
)
else:
logger.debug("Aucun document dans la réponse Universign")
if new_local_status == "SIGNE":
if not transaction.signed_document_path:
logger.info("Déclenchement téléchargement document signé...")
try:
(
download_success,
download_error,
) = await self.document_service.download_and_store_signed_document(
session=session, transaction=transaction, force=False
)
if download_success:
logger.info("Document signé téléchargé avec succès")
else:
logger.warning(f"Échec téléchargement: {download_error}")
except Exception as e:
logger.error(f" Erreur téléchargement document: {e}", exc_info=True)
else:
logger.debug(
f"Document déjà téléchargé: {transaction.signed_document_path}"
)
async def _log_sync_attempt(
self,
session: AsyncSession,

15
tools/cleaner.py Normal file
View file

@ -0,0 +1,15 @@
from pathlib import Path
def supprimer_commentaires_ligne(fichier):
path = Path(fichier)
lignes = path.read_text(encoding="utf-8").splitlines()
lignes_sans_commentaires = [line for line in lignes if not line.lstrip().startswith("#")]
path.write_text("\n".join(lignes_sans_commentaires), encoding="utf-8")
if __name__ == "__main__":
base_dir = Path(__file__).resolve().parent.parent
fichier_api = base_dir / "data/data.py"
supprimer_commentaires_ligne(fichier_api)

View file

@ -0,0 +1,52 @@
import ast
import os
import textwrap
SOURCE_FILE = "main.py"
MODELS_DIR = "../models"
os.makedirs(MODELS_DIR, exist_ok=True)
with open(SOURCE_FILE, "r", encoding="utf-8") as f:
source_code = f.read()
tree = ast.parse(source_code)
pydantic_classes = []
other_nodes = []
for node in tree.body:
if isinstance(node, ast.ClassDef):
if any(
isinstance(base, ast.Name) and base.id == "BaseModel" for base in node.bases
):
pydantic_classes.append(node)
continue
other_nodes.append(node)
imports = """
from pydantic import BaseModel, Field
from typing import Optional, List
"""
for cls in pydantic_classes:
class_name = cls.name
file_name = f"{class_name.lower()}.py"
file_path = os.path.join(MODELS_DIR, file_name)
class_code = ast.get_source_segment(source_code, cls)
class_code = textwrap.dedent(class_code)
with open(file_path, "w", encoding="utf-8") as f:
f.write(imports.strip() + "\n\n")
f.write(class_code)
print(f"✅ Modèle extrait : {class_name}{file_path}")
new_tree = ast.Module(body=other_nodes, type_ignores=[])
new_source = ast.unparse(new_tree)
with open(SOURCE_FILE, "w", encoding="utf-8") as f:
f.write(new_source)
print("\n🎉 Extraction terminée")

136
utils/enterprise.py Normal file
View file

@ -0,0 +1,136 @@
from fastapi import HTTPException
from typing import Optional
import httpx
import logging
from schemas import EntrepriseSearch
logger = logging.getLogger(__name__)
def calculer_tva_intracommunautaire(siren: str) -> Optional[str]:
try:
siren_clean = siren.replace(" ", "").strip()
if not siren_clean.isdigit() or len(siren_clean) != 9:
logger.warning(f"SIREN invalide: {siren}")
return None
siren_int = int(siren_clean)
cle = (12 + 3 * (siren_int % 97)) % 97
cle_str = f"{cle:02d}"
return f"FR{cle_str}{siren_clean}"
except Exception as e:
logger.error(f"Erreur calcul TVA pour SIREN {siren}: {e}")
return None
def formater_adresse(siege_data: dict) -> str:
try:
adresse_parts = []
if siege_data.get("numero_voie"):
adresse_parts.append(siege_data["numero_voie"])
if siege_data.get("type_voie"):
adresse_parts.append(siege_data["type_voie"])
if siege_data.get("libelle_voie"):
adresse_parts.append(siege_data["libelle_voie"])
if siege_data.get("code_postal"):
adresse_parts.append(siege_data["code_postal"])
if siege_data.get("libelle_commune"):
adresse_parts.append(siege_data["libelle_commune"].upper())
return " ".join(adresse_parts)
except Exception as e:
logger.error(f"Erreur formatage adresse: {e}")
return ""
async def rechercher_entreprise_api(query: str, per_page: int = 5) -> dict:
api_url = "https://recherche-entreprises.api.gouv.fr/search"
params = {
"q": query,
"per_page": per_page,
"limite_etablissements": 5,
}
try:
async with httpx.AsyncClient(timeout=10.0) as client:
response = await client.get(api_url, params=params)
if response.status_code == 429:
logger.warning("Rate limit atteint (7 req/s)")
raise HTTPException(
status_code=429,
detail="Trop de requêtes. Veuillez réessayer dans 1 seconde.",
)
if response.status_code == 503:
logger.error("API Sirene indisponible (503)")
raise HTTPException(
status_code=503,
detail="Service de recherche momentanément indisponible.",
)
response.raise_for_status()
return response.json()
except httpx.TimeoutException:
logger.error(f"Timeout lors de la recherche: {query}")
raise HTTPException(
status_code=504, detail="Délai d'attente dépassé pour l'API de recherche."
)
except httpx.HTTPError as e:
logger.error(f"Erreur HTTP API Sirene: {e}")
raise HTTPException(
status_code=500,
detail=f"Erreur lors de la communication avec l'API: {str(e)}",
)
def mapper_resultat_api(entreprise_data: dict) -> Optional[EntrepriseSearch]:
try:
siren = entreprise_data.get("siren")
if not siren:
logger.warning("Entreprise sans SIREN, ignorée")
return None
tva_number = calculer_tva_intracommunautaire(siren)
if not tva_number:
logger.warning(f"Impossible de calculer TVA pour SIREN: {siren}")
return None
siege = entreprise_data.get("siege", {})
etat_admin = entreprise_data.get("etat_administratif", "A")
is_active = etat_admin == "A"
return EntrepriseSearch(
company_name=entreprise_data.get("nom_complet", ""),
siren=siren,
vat_number=tva_number,
address=formater_adresse(siege),
naf_code=entreprise_data.get("activite_principale", ""),
is_active=is_active,
siret_siege=siege.get("siret"),
code_postal=siege.get("code_postal"),
ville=siege.get("libelle_commune"),
)
except Exception as e:
logger.error(f"Erreur mapping entreprise: {e}", exc_info=True)
return None

View file

@ -290,15 +290,11 @@ def _preparer_lignes_document(lignes: List) -> List[Dict]:
UNIVERSIGN_TO_LOCAL: Dict[str, str] = {
# États initiaux
"draft": "EN_ATTENTE",
"ready": "EN_ATTENTE",
# En cours
"started": "EN_COURS",
# États finaux (succès)
"completed": "SIGNE",
"closed": "SIGNE",
# États finaux (échec)
"refused": "REFUSE",
"expired": "EXPIRE",
"canceled": "REFUSE",
@ -423,13 +419,13 @@ STATUS_MESSAGES: Dict[str, Dict[str, str]] = {
"SIGNE": {
"fr": "Signé avec succès",
"en": "Successfully signed",
"icon": "",
"icon": "",
"color": "green",
},
"REFUSE": {
"fr": "Signature refusée",
"en": "Signature refused",
"icon": "",
"icon": "",
"color": "red",
},
"EXPIRE": {
@ -441,7 +437,7 @@ STATUS_MESSAGES: Dict[str, Dict[str, str]] = {
"ERREUR": {
"fr": "Erreur technique",
"en": "Technical error",
"icon": "⚠️",
"icon": "",
"color": "red",
},
}

View file

@ -90,13 +90,13 @@ STATUS_MESSAGES: Dict[str, Dict[str, str]] = {
"SIGNE": {
"fr": "Signé avec succès",
"en": "Successfully signed",
"icon": "",
"icon": "",
"color": "green",
},
"REFUSE": {
"fr": "Signature refusée",
"en": "Signature refused",
"icon": "",
"icon": "",
"color": "red",
},
"EXPIRE": {
@ -108,7 +108,7 @@ STATUS_MESSAGES: Dict[str, Dict[str, str]] = {
"ERREUR": {
"fr": "Erreur technique",
"en": "Technical error",
"icon": "⚠️",
"icon": "",
"color": "red",
},
}