Compare commits

...

116 commits

Author SHA1 Message Date
Fanilo-Nantenaina
3b5c183b47 refactor(security): remove emojis from logs and improve script debugging 2026-01-21 16:53:38 +03:00
Fanilo-Nantenaina
d25c2cffa9 refactor(security): clean up authentication middleware and api docs 2026-01-21 16:23:34 +03:00
Fanilo-Nantenaina
1c6c45465f fix(security): improve api key and jwt validation handling 2026-01-21 14:00:57 +03:00
Fanilo-Nantenaina
574d82f3c4 fix(security): improve auth handling and openapi schema generation 2026-01-21 13:50:44 +03:00
Fanilo-Nantenaina
a6a623d1ab refactor(api): simplify auth scheme handling and improve schema filtering 2026-01-21 13:41:37 +03:00
Fanilo-Nantenaina
437ecd0ed3 Merge branch 'feat/controlled_swagger_access' into main_2 2026-01-21 13:26:49 +03:00
Fanilo-Nantenaina
c057a085ed refactor(security): improve swagger user management and logging 2026-01-21 13:21:16 +03:00
Fanilo-Nantenaina
23a94f5558 Merge branch 'feat/controlled_swagger_access' into main_2 2026-01-21 13:01:38 +03:00
Fanilo-Nantenaina
797aed0240 feat(security): enhance swagger auth with user context and filtered docs 2026-01-21 12:56:02 +03:00
Fanilo-Nantenaina
5f40c677a8 refactor(manage_security): simplify delete_swagger_user function 2026-01-21 12:21:10 +03:00
Fanilo-Nantenaina
8a22e285df refactor(security): improve logging format and argument handling 2026-01-21 12:12:35 +03:00
Fanilo-Nantenaina
92597a1143 feat(api): add tag-based OpenAPI schema filtering for Swagger users 2026-01-21 12:05:06 +03:00
Fanilo-Nantenaina
c1f4c66e8c refactor(api): remove user dependency from all endpoints 2026-01-20 19:28:27 +03:00
Fanilo-Nantenaina
43da1b09ed Merge branch 'develop' into develop_like 2026-01-20 19:26:42 +03:00
Fanilo-Nantenaina
6d5f8594d0 chore: ignore python clean scripts in gitignore 2026-01-20 19:26:28 +03:00
Fanilo-Nantenaina
a7457c3979 fix(security): improve auth handling and logging in middleware 2026-01-20 19:14:00 +03:00
Fanilo-Nantenaina
5eec115d1d Merge branch 'main' into develop 2026-01-20 16:29:35 +03:00
Fanilo-Nantenaina
d89c9fd35b chore: ignore clean scripts in gitignore 2026-01-20 16:29:10 +03:00
Fanilo-Nantenaina
211dd4fd23 fix(security): improve api key authentication and error handling 2026-01-20 16:18:04 +03:00
Fanilo-Nantenaina
67ef83c4e3 refactor(security): improve authentication logging and endpoint checks 2026-01-20 16:01:54 +03:00
Fanilo-Nantenaina
82d1d92e58 refactor(scripts): improve import handling and path management 2026-01-20 15:35:06 +03:00
Fanilo-Nantenaina
28c8fb3008 refactor(security): improve user management and session handling 2026-01-20 15:27:26 +03:00
Fanilo-Nantenaina
f8cec7ebc5 refactor(security): improve security scripts and api documentation 2026-01-20 15:13:19 +03:00
Fanilo-Nantenaina
1a08894b47 refactor(scripts): improve logging format and endpoint handling in security management 2026-01-20 15:01:57 +03:00
Fanilo-Nantenaina
3cdb490ee5 refactor(security): improve middleware structure and configuration handling 2026-01-20 14:47:07 +03:00
Fanilo-Nantenaina
c84e4ddc20 refactor(auth): simplify authentication logic and improve error handling 2026-01-20 14:25:06 +03:00
Fanilo-Nantenaina
41ca202d4b refactor(security): move security config to environment variables and improve error handling 2026-01-20 14:19:48 +03:00
Fanilo-Nantenaina
918f5d3f19 docs(api): fix incorrect comment syntax in openapi configuration 2026-01-20 14:06:10 +03:00
Fanilo-Nantenaina
fa95d0d117 refactor(api): replace get_current_user with get_sage_client_for_user in dependencies 2026-01-20 13:56:16 +03:00
Fanilo-Nantenaina
a1150390f4 Merge branch 'fix/security' into main_2 2026-01-20 13:54:42 +03:00
Fanilo-Nantenaina
0001dbe634 docs(api): add comments for security schemas and openapi setup 2026-01-20 13:54:36 +03:00
Fanilo-Nantenaina
5b584bf969 refactor(security): improve auth middleware and logging 2026-01-20 13:51:09 +03:00
Fanilo-Nantenaina
022149c237 refactor(api): replace get_sage_client_for_user with get_current_user for dependency injection 2026-01-20 13:46:27 +03:00
Fanilo-Nantenaina
72d1ac58d1 refactor(security): reorganize imports and improve logging message 2026-01-20 12:40:56 +03:00
Fanilo-Nantenaina
cce1cdf76a refactor(scripts): improve manage_security.py organization and error handling 2026-01-20 12:32:49 +03:00
Fanilo-Nantenaina
e51a5e0a0b refactor(database): update import to use direct get_session import 2026-01-20 12:19:51 +03:00
Fanilo-Nantenaina
dd65ae4d96 style: remove emoji from log messages 2026-01-20 12:17:52 +03:00
Fanilo-Nantenaina
cc0062b3bc refactor(security): improve security management script with better logging and structure 2026-01-20 12:11:20 +03:00
Fanilo-Nantenaina
9bd0f62459 feat(api): add authentication to all endpoints and update OpenAPI schema 2026-01-20 11:49:57 +03:00
Fanilo-Nantenaina
e0f08fd83a refactor(dependencies): rename require_role_hybrid to require_role for consistency 2026-01-20 11:29:12 +03:00
Fanilo-Nantenaina
f59e56490c feat(api): add api keys router to middleware stack 2026-01-20 11:24:42 +03:00
Fanilo-Nantenaina
2aafd525cd refactor(api): update middleware and cors configuration 2026-01-20 11:23:10 +03:00
Fanilo-Nantenaina
17a4251eea Merge branch 'feat/secureing_API' 2026-01-20 11:12:59 +03:00
Fanilo-Nantenaina
abc9ff820a feat(security): implement api key management and authentication system 2026-01-20 11:11:32 +03:00
Fanilo-Nantenaina
b85bd26dbe Merge branches 'develop' and 'main' of https://git.dataven.fr/fanilo/Sage100-vps 2026-01-20 11:02:28 +03:00
Fanilo-Nantenaina
1164c7975a refactor: remove emojis and clean up code comments 2026-01-20 06:31:17 +03:00
Fanilo-Nantenaina
a10fda072c feat(security): implement API key authentication system 2026-01-19 20:38:01 +03:00
Fanilo-Nantenaina
9f6c1de8ef feat(api-keys): implement api key management system 2026-01-19 20:35:03 +03:00
Fanilo-Nantenaina
09eae50952 refactor: clean up code by removing unnecessary comments 2026-01-19 20:32:40 +03:00
Fanilo-Nantenaina
4b686c4544 Merge branch 'feat/get_all_reglements' into develop_like 2026-01-17 12:53:01 +03:00
Fanilo-Nantenaina
89510537b3 fix(api): cast montant_total to float in regler_factures_multiple 2026-01-17 11:41:44 +03:00
Fanilo-Nantenaina
0e18129325 feat(reglements): add endpoint to get payment details by invoice number 2026-01-16 17:58:02 +03:00
Fanilo-Nantenaina
aa89ebdf9e feat(reglements): add endpoints to list and get payment details 2026-01-16 15:46:58 +03:00
Fanilo-Nantenaina
9f12727bd3 refactor(routes): remove authentication dependency from universign router 2026-01-16 13:26:24 +03:00
Fanilo-Nantenaina
18603ded6e refactor(auth): reorganize imports and remove unused dependencies - Added some missing auth 2026-01-16 13:22:13 +03:00
Fanilo-Nantenaina
18d72b3bf9 Secured all routes 2026-01-16 12:47:56 +03:00
Fanilo-Nantenaina
fdf359738b Merge branch 'develop' 2026-01-16 12:35:17 +03:00
Fanilo-Nantenaina
ba9e474109 Merge branch 'main' of https://git.dataven.fr/fanilo/backend_vps 2026-01-16 12:34:47 +03:00
Fanilo-Nantenaina
b291cbf65f feat(reglements): add compte_general field to ReglementFactureCreate 2026-01-15 17:13:31 +03:00
Fanilo-Nantenaina
6f2136c3ca feat(sage_client): add new payment parameters and make fields optional 2026-01-15 16:56:25 +03:00
Fanilo-Nantenaina
beabefa3f9 feat(payments): enhance payment processing with new endpoints and schema 2026-01-15 15:49:15 +03:00
Fanilo-Nantenaina
457c746706 Merge branch 'feat/validation_facture' into develop 2026-01-15 14:46:51 +03:00
Fanilo-Nantenaina
d719966339 Merge branch 'feat/settle_invoice' of https://git.dataven.fr/fanilo/backend_vps into feat/settle_invoice 2026-01-15 14:44:17 +03:00
Fanilo-Nantenaina
cc1609549f feat(api): add endpoint to fetch bank journals 2026-01-15 14:44:04 +03:00
Fanilo-Nantenaina
25be0bd569 feat(payments): add payment functionality for invoices 2026-01-15 14:44:04 +03:00
Fanilo-Nantenaina
19faec9b24 Merge branch 'feat/validation_facture' of https://git.dataven.fr/fanilo/backend_vps into feat/validation_facture 2026-01-15 14:39:56 +03:00
Fanilo-Nantenaina
2f06a083dc refactor: simplify invoice validation methods and error handling 2026-01-15 14:38:42 +03:00
Fanilo-Nantenaina
149d8fb2de feat(factures): add invoice validation endpoints and client methods 2026-01-15 14:38:42 +03:00
Fanilo-Nantenaina
eedc628a5f refactor(models): clean up model comments and whitespace 2026-01-15 14:38:26 +03:00
Fanilo-Nantenaina
f505dad8a7 refactor: simplify invoice validation methods and error handling 2026-01-15 11:08:52 +03:00
Fanilo-Nantenaina
a824592398 feat(factures): add invoice validation endpoints and client methods 2026-01-15 10:34:40 +03:00
Fanilo-Nantenaina
23575fa231 feat(api): add endpoint to fetch bank journals 2026-01-14 19:58:56 +03:00
Fanilo-Nantenaina
a9df408399 feat(payments): add payment functionality for invoices 2026-01-14 18:40:21 +03:00
Fanilo-Nantenaina
671d5bac15 feat(society): add logo support and preview endpoint 2026-01-14 15:20:16 +03:00
Fanilo-Nantenaina
d5273a0786 refactor(enterprise): change siren parameter from Query to Path and corrected deprecated "regex" 2026-01-13 18:24:10 +03:00
Fanilo-Nantenaina
e7003d4059 feat(enterprise): add enterprise search functionality with API integration 2026-01-13 18:20:30 +03:00
Fanilo-Nantenaina
30ffc7a493 refactor: improve societe info handling and add sqlite lock retry docs 2026-01-13 18:03:33 +03:00
Fanilo-Nantenaina
3f1dce918d feat(pdf): add dynamic company info to PDF generator 2026-01-13 17:37:23 +03:00
Fanilo-Nantenaina
08665f15dd Merge branch 'feat/get_society' into fix/update_pdf_structure 2026-01-13 17:19:19 +03:00
Fanilo-Nantenaina
3233630401 feat(database): add SQLite optimization and retry mechanisms 2026-01-13 17:14:55 +03:00
Fanilo-Nantenaina
d2f02e1555 refactor(api/schemas): simplify company info response and schema 2026-01-13 17:09:12 +03:00
Fanilo-Nantenaina
9ae447e2c7 feat(society): add company info schema and endpoint 2026-01-13 17:03:18 +03:00
Fanilo-Nantenaina
358b2e3639 refactor(email_queue): update font and logo fallback paths to use pdfs directory 2026-01-13 16:58:18 +03:00
Fanilo-Nantenaina
a2c85a211a feat(pdf): refactor PDF generation with new SagePDFGenerator class 2026-01-13 16:30:34 +03:00
Fanilo-Nantenaina
b17e4abf12 Merge branch 'fix/update_pdf_structure' of https://git.dataven.fr/fanilo/backend_vps into fix/update_pdf_structure 2026-01-13 11:57:07 +03:00
Fanilo-Nantenaina
23f9bba174 chore: add .trunk to gitignore 2026-01-13 11:52:24 +03:00
Fanilo-Nantenaina
983e960b9b feat(universign): add signed document download and storage functionality 2026-01-13 11:50:32 +03:00
Fanilo-Nantenaina
74c0d73294 chore: add .trunk to gitignore 2026-01-13 11:49:57 +03:00
Fanilo-Nantenaina
18f9a45ef6 refactor(universign): remove emojis from logs and clean up admin routes 2026-01-13 11:46:18 +03:00
Fanilo-Nantenaina
d6ed8792cc fix(universign): add missing api_url parameter to document service 2026-01-13 11:13:58 +03:00
Fanilo-Nantenaina
24d7a49a73 feat(universign): add transaction diagnosis endpoint and improve document handling 2026-01-13 11:05:34 +03:00
Fanilo-Nantenaina
c5c17fdd9b fix(auth): increase failed login attempt threshold from 5 to 15 2026-01-13 10:42:58 +03:00
Fanilo-Nantenaina
c389129ae7 Updated using the correct redirection structure 2026-01-12 19:04:09 +03:00
Fanilo-Nantenaina
6b6246b6e5 Testing multi-sage users 2026-01-12 18:56:37 +03:00
b3419eafaa Deleted cached dev database 2026-01-08 17:29:16 +00:00
Fanilo-Nantenaina
a9aff7b386 chore: add staging and production env files to gitignore 2026-01-08 20:07:07 +03:00
Fanilo-Nantenaina
4f0fe17ee9 chore: remove staging environment configuration file 2026-01-08 20:06:51 +03:00
d4d6cbc44f Added enum status caused by case sensibility on Linux 2026-01-08 17:04:09 +00:00
Fanilo-Nantenaina
795b848dff chore: update gitignore and add status enums 2026-01-08 16:59:08 +03:00
Fanilo-Nantenaina
e990cbdc08 MERGING branch develop INTO MAIN 2026-01-08 16:58:43 +03:00
Fanilo-Nantenaina
1972f22b80 chore: update container name in dev docker-compose file 2026-01-08 16:37:14 +03:00
Fanilo-Nantenaina
ce3b234fee chore: update container name in docker-compose.dev.yml 2026-01-08 15:24:11 +03:00
Fanilo-Nantenaina
d78d189606 chore: update database name in docker-compose.dev.yml 2026-01-08 15:22:27 +03:00
Fanilo-Nantenaina
8a012fc162 refactor(docker): restructure docker setup for multiple environments 2026-01-08 15:15:21 +03:00
Fanilo-Nantenaina
f3957dddcf Merge branch 'fix/update_pdf_structure' into feat/update_pdf_structure 2026-01-08 10:48:26 +03:00
Fanilo-Nantenaina
268dfb3618 refactor(api): replace sage dependency with direct sage_client usage 2026-01-08 10:29:21 +03:00
Fanilo-Nantenaina
bcee1f277f refactor: replace is_() method with direct boolean comparison 2026-01-07 20:29:35 +03:00
Fanilo-Nantenaina
d8ec61802d feat(universign): add signed document storage and download functionality 2026-01-07 20:01:55 +03:00
Fanilo-Nantenaina
4a1960745a feat(pdf-generation): redesign document layout with modern styling 2026-01-07 19:42:29 +03:00
0be28f6744 Merge pull request 'debug/universign_3' (#3) from debug/universign_3 into develop
Reviewed-on: fanilo/backend_vps#3
2026-01-07 03:13:26 +00:00
Fanilo-Nantenaina
e4024168b2 fix: update total label in PDF generation to "Total HT NET" 2026-01-07 06:12:25 +03:00
Fanilo-Nantenaina
cd9dd9348d fix: update invoice pdf to use total_ht_net instead of total_ht 2026-01-07 04:26:56 +03:00
Fanilo-Nantenaina
bcaa621432 feat(signer-status): add new signer statuses and improve status handling 2026-01-06 20:24:29 +03:00
Fanilo-Nantenaina
fbaa43e3fd fix(universign): improve webhook payload handling and transaction sync 2026-01-06 19:56:39 +03:00
Fanilo-Nantenaina
1ce85517be feat(universign): improve transaction sync and webhook handling 2026-01-06 19:43:42 +03:00
Fanilo-Nantenaina
a3f02cbd91 feat(universign): add transaction management and status synchronization 2026-01-06 19:15:35 +03:00
53 changed files with 6731 additions and 1081 deletions

9
.gitignore vendored
View file

@ -39,3 +39,12 @@ data/*.db.bak
*.db *.db
tools/ tools/
.trunk
.env.staging
.env.production
.trunk
*clean*.py

9
.trunk/.gitignore vendored Normal file
View file

@ -0,0 +1,9 @@
*out
*logs
*actions
*notifications
*tools
plugins
user_trunk.yaml
user.yaml
tmp

32
.trunk/trunk.yaml Normal file
View file

@ -0,0 +1,32 @@
# This file controls the behavior of Trunk: https://docs.trunk.io/cli
# To learn more about the format of this file, see https://docs.trunk.io/reference/trunk-yaml
version: 0.1
cli:
version: 1.25.0
# Trunk provides extensibility via plugins. (https://docs.trunk.io/plugins)
plugins:
sources:
- id: trunk
ref: v1.7.4
uri: https://github.com/trunk-io/plugins
# Many linters and tools depend on runtimes - configure them here. (https://docs.trunk.io/runtimes)
runtimes:
enabled:
- node@22.16.0
- python@3.10.8
# This is the section where you manage your linters. (https://docs.trunk.io/check/configuration)
lint:
enabled:
- git-diff-check
- hadolint@2.14.0
- markdownlint@0.47.0
- osv-scanner@2.3.1
- prettier@3.7.4
- trufflehog@3.92.4
actions:
disabled:
- trunk-announce
- trunk-check-pre-push
- trunk-fmt-pre-commit
enabled:
- trunk-upgrade-available

View file

@ -1,23 +1,78 @@
# Backend Dockerfile # ================================
FROM python:3.12-slim # Base
# ================================
FROM python:3.12-slim AS base
WORKDIR /app WORKDIR /app
# Copier et installer les dépendances # Installer dépendances système si nécessaire
COPY requirements.txt . RUN apt-get update && apt-get install -y --no-install-recommends \
RUN pip install --no-cache-dir --upgrade pip \ curl \
&& pip install --no-cache-dir -r requirements.txt && rm -rf /var/lib/apt/lists/*
COPY requirements.txt .
RUN pip install --no-cache-dir --upgrade pip
# ================================
# DEV
# ================================
FROM base AS dev
ENV PYTHONUNBUFFERED=1 \
PYTHONDONTWRITEBYTECODE=1 \
ENV=development
# Installer dépendances dev (si vous avez un requirements.dev.txt)
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
# Créer dossiers
RUN mkdir -p /app/data /app/logs && chmod -R 777 /app/data /app/logs
# Copier le reste du projet
COPY . . COPY . .
# Créer dossier persistant pour SQLite avec bonnes permissions
RUN mkdir -p /app/data && chmod 777 /app/data
# Exposer le port
EXPOSE 8000 EXPOSE 8000
CMD ["uvicorn", "api:app", "--host", "0.0.0.0", "--port", "8000", "--reload"]
# Lancer l'API et initialiser la DB au démarrage # ================================
# CMD ["sh", "-c", "uvicorn api:app --host 0.0.0.0 --port 8000"] # STAGING
# ================================
FROM base AS staging
ENV PYTHONUNBUFFERED=1 \
PYTHONDONTWRITEBYTECODE=1 \
ENV=staging
CMD ["sh", "-c", "python init_db.py && uvicorn api:app --host 0.0.0.0 --port 8000"] RUN pip install --no-cache-dir -r requirements.txt
RUN mkdir -p /app/data /app/logs && chmod -R 755 /app/data /app/logs
COPY . .
# Initialiser la DB au build
RUN python init_db.py || true
EXPOSE 8002
CMD ["uvicorn", "api:app", "--host", "0.0.0.0", "--port", "8002", "--log-level", "info"]
# ================================
# PROD
# ================================
FROM base AS prod
ENV PYTHONUNBUFFERED=1 \
PYTHONDONTWRITEBYTECODE=1 \
ENV=production
RUN pip install --no-cache-dir -r requirements.txt
# Créer utilisateur non-root pour la sécurité
RUN useradd -m -u 1000 appuser && \
mkdir -p /app/data /app/logs && \
chown -R appuser:appuser /app
COPY --chown=appuser:appuser . .
# Initialiser la DB au build
RUN python init_db.py || true
USER appuser
EXPOSE 8004
CMD ["uvicorn", "api:app", "--host", "0.0.0.0", "--port", "8004", "--workers", "4"]

1642
api.py

File diff suppressed because it is too large Load diff

View file

@ -7,7 +7,6 @@ class Settings(BaseSettings):
env_file=".env", env_file_encoding="utf-8", case_sensitive=False, extra="ignore" env_file=".env", env_file_encoding="utf-8", case_sensitive=False, extra="ignore"
) )
# === JWT & Auth ===
jwt_secret: str jwt_secret: str
jwt_algorithm: str jwt_algorithm: str
access_token_expire_minutes: int access_token_expire_minutes: int
@ -21,15 +20,12 @@ class Settings(BaseSettings):
SAGE_TYPE_BON_AVOIR: int = 50 SAGE_TYPE_BON_AVOIR: int = 50
SAGE_TYPE_FACTURE: int = 60 SAGE_TYPE_FACTURE: int = 60
# === Sage Gateway (Windows) ===
sage_gateway_url: str sage_gateway_url: str
sage_gateway_token: str sage_gateway_token: str
frontend_url: str frontend_url: str
# === Base de données ===
database_url: str = "sqlite+aiosqlite:///./data/sage_dataven.db" database_url: str = "sqlite+aiosqlite:///./data/sage_dataven.db"
# === SMTP ===
smtp_host: str smtp_host: str
smtp_port: int = 587 smtp_port: int = 587
smtp_user: str smtp_user: str
@ -37,21 +33,17 @@ class Settings(BaseSettings):
smtp_from: str smtp_from: str
smtp_use_tls: bool = True smtp_use_tls: bool = True
# === Universign ===
universign_api_key: str universign_api_key: str
universign_api_url: str universign_api_url: str
# === API ===
api_host: str api_host: str
api_port: int api_port: int
api_reload: bool = False api_reload: bool = False
# === Email Queue ===
max_email_workers: int = 3 max_email_workers: int = 3
max_retry_attempts: int = 3 max_retry_attempts: int = 3
retry_delay_seconds: int = 3 retry_delay_seconds: int = 3
# === CORS ===
cors_origins: List[str] = ["*"] cors_origins: List[str] = ["*"]

125
config/cors_config.py Normal file
View file

@ -0,0 +1,125 @@
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from typing import List
import os
import logging
logger = logging.getLogger(__name__)
def configure_cors_open(app: FastAPI):
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=False,
allow_methods=["GET", "POST", "PUT", "DELETE", "PATCH", "OPTIONS"],
allow_headers=["*"],
expose_headers=["X-RateLimit-Limit", "X-RateLimit-Remaining"],
max_age=3600,
)
logger.info(" CORS configuré: Mode OUVERT (sécurisé par API Keys)")
logger.info(" - Origins: * (toutes)")
logger.info(" - Headers: * (dont X-API-Key)")
logger.info(" - Credentials: False")
def configure_cors_whitelist(app: FastAPI):
allowed_origins_str = os.getenv("CORS_ALLOWED_ORIGINS", "")
if allowed_origins_str:
allowed_origins = [
origin.strip()
for origin in allowed_origins_str.split(",")
if origin.strip()
]
else:
allowed_origins = ["*"]
app.add_middleware(
CORSMiddleware,
allow_origins=allowed_origins,
allow_credentials=True,
allow_methods=["GET", "POST", "PUT", "DELETE", "PATCH", "OPTIONS"],
allow_headers=["Content-Type", "Authorization", "X-API-Key"],
expose_headers=["X-RateLimit-Limit", "X-RateLimit-Remaining"],
max_age=3600,
)
logger.info(" CORS configuré: Mode WHITELIST")
logger.info(f" - Origins autorisées: {len(allowed_origins)}")
for origin in allowed_origins:
logger.info(f"{origin}")
def configure_cors_regex(app: FastAPI):
origin_regex = r"*"
app.add_middleware(
CORSMiddleware,
allow_origin_regex=origin_regex,
allow_credentials=True,
allow_methods=["GET", "POST", "PUT", "DELETE", "PATCH", "OPTIONS"],
allow_headers=["Content-Type", "Authorization", "X-API-Key"],
expose_headers=["X-RateLimit-Limit", "X-RateLimit-Remaining"],
max_age=3600,
)
logger.info(" CORS configuré: Mode REGEX")
logger.info(f" - Pattern: {origin_regex}")
def configure_cors_hybrid(app: FastAPI):
from starlette.middleware.base import BaseHTTPMiddleware
class HybridCORSMiddleware(BaseHTTPMiddleware):
def __init__(self, app, known_origins: List[str]):
super().__init__(app)
self.known_origins = set(known_origins)
async def dispatch(self, request, call_next):
origin = request.headers.get("origin")
if origin in self.known_origins:
response = await call_next(request)
response.headers["Access-Control-Allow-Origin"] = origin
response.headers["Access-Control-Allow-Credentials"] = "true"
response.headers["Access-Control-Allow-Methods"] = (
"GET, POST, PUT, DELETE, PATCH, OPTIONS"
)
response.headers["Access-Control-Allow-Headers"] = (
"Content-Type, Authorization, X-API-Key"
)
return response
response = await call_next(request)
response.headers["Access-Control-Allow-Origin"] = "*"
response.headers["Access-Control-Allow-Methods"] = (
"GET, POST, PUT, DELETE, PATCH, OPTIONS"
)
response.headers["Access-Control-Allow-Headers"] = "*"
return response
known_origins = ["*"]
app.add_middleware(HybridCORSMiddleware, known_origins=known_origins)
logger.info(" CORS configuré: Mode HYBRIDE")
logger.info(f" - Whitelist: {len(known_origins)} domaines")
logger.info(" - Fallback: * (ouvert)")
def setup_cors(app: FastAPI, mode: str = "open"):
if mode == "open":
configure_cors_open(app)
elif mode == "whitelist":
configure_cors_whitelist(app)
elif mode == "regex":
configure_cors_regex(app)
elif mode == "hybrid":
configure_cors_hybrid(app)
else:
logger.warning(
f" Mode CORS inconnu: {mode}. Utilisation de 'open' par défaut."
)
configure_cors_open(app)

View file

@ -1,94 +1,118 @@
from fastapi import Depends, HTTPException, status from fastapi import Depends, HTTPException, status, Request
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select from sqlalchemy import select
from typing import Optional
from jwt.exceptions import InvalidTokenError
from database import get_session, User from database import get_session, User
from security.auth import decode_token from security.auth import decode_token
from typing import Optional
from datetime import datetime
security = HTTPBearer() security = HTTPBearer(auto_error=False)
async def get_current_user( async def get_current_user_hybrid(
credentials: HTTPAuthorizationCredentials = Depends(security), request: Request,
session: AsyncSession = Depends(get_session),
) -> User:
token = credentials.credentials
payload = decode_token(token)
if not payload:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Token invalide ou expiré",
headers={"WWW-Authenticate": "Bearer"},
)
if payload.get("type") != "access":
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Type de token incorrect",
headers={"WWW-Authenticate": "Bearer"},
)
user_id: str = payload.get("sub")
if not user_id:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Token malformé",
headers={"WWW-Authenticate": "Bearer"},
)
result = await session.execute(select(User).where(User.id == user_id))
user = result.scalar_one_or_none()
if not user:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Utilisateur introuvable",
headers={"WWW-Authenticate": "Bearer"},
)
if not user.is_active:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN, detail="Compte désactivé"
)
if not user.is_verified:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="Email non vérifié. Consultez votre boîte de réception.",
)
if user.locked_until and user.locked_until > datetime.now():
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="Compte temporairement verrouillé suite à trop de tentatives échouées",
)
return user
async def get_current_user_optional(
credentials: Optional[HTTPAuthorizationCredentials] = Depends(security), credentials: Optional[HTTPAuthorizationCredentials] = Depends(security),
session: AsyncSession = Depends(get_session), session: AsyncSession = Depends(get_session),
) -> Optional[User]: ) -> User:
api_key_obj = getattr(request.state, "api_key", None)
if api_key_obj:
if api_key_obj.user_id:
result = await session.execute(
select(User).where(User.id == api_key_obj.user_id)
)
user = result.scalar_one_or_none()
if user:
user._is_api_key_user = True
user._api_key_obj = api_key_obj
return user
virtual_user = User(
id=f"api_key_{api_key_obj.id}",
email=f"api_key_{api_key_obj.id}@virtual.local",
nom=api_key_obj.name,
prenom="API",
hashed_password="",
role="api_client",
is_active=True,
is_verified=True,
)
virtual_user._is_api_key_user = True
virtual_user._api_key_obj = api_key_obj
return virtual_user
if not credentials: if not credentials:
return None raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Authentification requise (JWT ou API Key)",
headers={"WWW-Authenticate": "Bearer"},
)
token = credentials.credentials
try: try:
return await get_current_user(credentials, session) payload = decode_token(token)
except HTTPException: user_id: str = payload.get("sub")
return None
if user_id is None:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Token invalide: user_id manquant",
headers={"WWW-Authenticate": "Bearer"},
)
result = await session.execute(select(User).where(User.id == user_id))
user = result.scalar_one_or_none()
if user is None:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Utilisateur introuvable",
headers={"WWW-Authenticate": "Bearer"},
)
if not user.is_active:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="Utilisateur inactif",
)
return user
except InvalidTokenError as e:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=f"Token invalide: {str(e)}",
headers={"WWW-Authenticate": "Bearer"},
)
def require_role(*allowed_roles: str): def require_role_hybrid(*allowed_roles: str):
async def role_checker(user: User = Depends(get_current_user)) -> User: async def role_checker(user: User = Depends(get_current_user_hybrid)) -> User:
if user.role not in allowed_roles: if user.role not in allowed_roles:
raise HTTPException( raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN, status_code=status.HTTP_403_FORBIDDEN,
detail=f"Accès refusé. Rôles requis: {', '.join(allowed_roles)}", detail=f"Accès interdit. Rôles autorisés: {', '.join(allowed_roles)}",
) )
return user return user
return role_checker return role_checker
def is_api_key_user(user: User) -> bool:
"""Vérifie si l'utilisateur est authentifié via API Key"""
return getattr(user, "_is_api_key_user", False)
def get_api_key_from_user(user: User):
"""Récupère l'objet ApiKey depuis un utilisateur (si applicable)"""
return getattr(user, "_api_key_obj", None)
get_current_user = get_current_user_hybrid
require_role = require_role_hybrid

View file

@ -1,12 +1,3 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Script de création du premier utilisateur administrateur
Usage:
python create_admin.py
"""
import asyncio import asyncio
import sys import sys
from pathlib import Path from pathlib import Path
@ -28,7 +19,6 @@ async def create_admin():
print(" Création d'un compte administrateur") print(" Création d'un compte administrateur")
print("=" * 60 + "\n") print("=" * 60 + "\n")
# Saisie des informations
email = input("Email de l'admin: ").strip().lower() email = input("Email de l'admin: ").strip().lower()
if not email or "@" not in email: if not email or "@" not in email:
print(" Email invalide") print(" Email invalide")
@ -41,7 +31,6 @@ async def create_admin():
print(" Prénom et nom requis") print(" Prénom et nom requis")
return False return False
# Mot de passe avec validation
while True: while True:
password = input( password = input(
"Mot de passe (min 8 car., 1 maj, 1 min, 1 chiffre, 1 spécial): " "Mot de passe (min 8 car., 1 maj, 1 min, 1 chiffre, 1 spécial): "
@ -67,7 +56,6 @@ async def create_admin():
print(f"\n Un utilisateur avec l'email {email} existe déjà") print(f"\n Un utilisateur avec l'email {email} existe déjà")
return False return False
# Créer l'admin
admin = User( admin = User(
id=str(uuid.uuid4()), id=str(uuid.uuid4()),
email=email, email=email,

View file

@ -152,7 +152,7 @@ templates_signature_email = {
</table> </table>
<p style="color: #718096; font-size: 13px; line-height: 1.5; margin: 0;"> <p style="color: #718096; font-size: 13px; line-height: 1.5; margin: 0;">
<strong>🔒 Signature électronique sécurisée</strong><br> <strong> Signature électronique sécurisée</strong><br>
Votre signature est protégée par notre partenaire de confiance <strong>Universign</strong>, Votre signature est protégée par notre partenaire de confiance <strong>Universign</strong>,
certifié eIDAS et conforme au RGPD. Votre identité sera vérifiée et le document sera certifié eIDAS et conforme au RGPD. Votre identité sera vérifiée et le document sera
horodaté de manière infalsifiable. horodaté de manière infalsifiable.

View file

@ -1,20 +1,49 @@
import os
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, async_sessionmaker from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, async_sessionmaker
from sqlalchemy.pool import NullPool from sqlalchemy.pool import NullPool
from sqlalchemy import event, text
import logging import logging
from config.config import settings
from database.models.generic_model import Base from database.models.generic_model import Base
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
DATABASE_URL = os.getenv("DATABASE_URL", "sqlite+aiosqlite:///./data/sage_dataven.db") DATABASE_URL = settings.database_url
def _configure_sqlite_connection(dbapi_connection, connection_record):
cursor = dbapi_connection.cursor()
cursor.execute("PRAGMA journal_mode=WAL")
cursor.execute("PRAGMA busy_timeout=30000")
cursor.execute("PRAGMA synchronous=NORMAL")
cursor.execute("PRAGMA cache_size=-64000") # 64MB
cursor.execute("PRAGMA foreign_keys=ON")
cursor.execute("PRAGMA locking_mode=NORMAL")
cursor.close()
logger.debug("SQLite configuré avec WAL mode et busy_timeout=30s")
engine_kwargs = {
"echo": False,
"future": True,
"poolclass": NullPool,
}
if DATABASE_URL and "sqlite" in DATABASE_URL:
engine_kwargs["connect_args"] = {
"check_same_thread": False,
"timeout": 30,
}
engine = create_async_engine(DATABASE_URL, **engine_kwargs)
if DATABASE_URL and "sqlite" in DATABASE_URL:
@event.listens_for(engine.sync_engine, "connect")
def set_sqlite_pragma(dbapi_connection, connection_record):
_configure_sqlite_connection(dbapi_connection, connection_record)
engine = create_async_engine(
DATABASE_URL,
echo=False,
future=True,
poolclass=NullPool,
)
async_session_factory = async_sessionmaker( async_session_factory = async_sessionmaker(
engine, engine,
@ -30,6 +59,12 @@ async def init_db():
logger.info("Tentative de connexion") logger.info("Tentative de connexion")
async with engine.begin() as conn: async with engine.begin() as conn:
logger.info("Connexion etablie") logger.info("Connexion etablie")
if DATABASE_URL and "sqlite" in DATABASE_URL:
result = await conn.execute(text("PRAGMA journal_mode"))
journal_mode = result.scalar()
logger.info(f"SQLite journal_mode: {journal_mode}")
await conn.run_sync(Base.metadata.create_all) await conn.run_sync(Base.metadata.create_all)
logger.info("create_all execute") logger.info("create_all execute")
@ -49,3 +84,57 @@ async def get_session() -> AsyncSession:
async def close_db(): async def close_db():
await engine.dispose() await engine.dispose()
logger.info("Connexions DB fermées") logger.info("Connexions DB fermées")
async def execute_with_sqlite_retry(
session: AsyncSession, statement, max_retries: int = 5, base_delay: float = 0.1
):
import asyncio
from sqlalchemy.exc import OperationalError
last_error = None
for attempt in range(max_retries):
try:
result = await session.execute(statement)
return result
except OperationalError as e:
last_error = e
if "database is locked" in str(e).lower():
delay = base_delay * (2**attempt)
logger.warning(
f"SQLite locked, tentative {attempt + 1}/{max_retries}, "
f"retry dans {delay:.2f}s"
)
await asyncio.sleep(delay)
else:
raise
raise last_error
async def commit_with_retry(
session: AsyncSession, max_retries: int = 5, base_delay: float = 0.1
):
import asyncio
from sqlalchemy.exc import OperationalError
last_error = None
for attempt in range(max_retries):
try:
await session.commit()
return
except OperationalError as e:
last_error = e
if "database is locked" in str(e).lower():
delay = base_delay * (2**attempt)
logger.warning(
f"SQLite locked lors du commit, tentative {attempt + 1}/{max_retries}, "
f"retry dans {delay:.2f}s"
)
await asyncio.sleep(delay)
else:
raise
raise last_error

18
database/enum/status.py Normal file
View file

@ -0,0 +1,18 @@
import enum
class StatutEmail(str, enum.Enum):
EN_ATTENTE = "EN_ATTENTE"
EN_COURS = "EN_COURS"
ENVOYE = "ENVOYE"
OUVERT = "OUVERT"
ERREUR = "ERREUR"
BOUNCE = "BOUNCE"
class StatutSignature(str, enum.Enum):
EN_ATTENTE = "EN_ATTENTE"
ENVOYE = "ENVOYE"
SIGNE = "SIGNE"
REFUSE = "REFUSE"
EXPIRE = "EXPIRE"

View file

@ -0,0 +1,73 @@
from sqlalchemy import Column, String, Boolean, DateTime, Integer, Text
from typing import Optional, List
import json
from datetime import datetime
import uuid
from database.models.generic_model import Base
class ApiKey(Base):
"""Modèle pour les clés API publiques"""
__tablename__ = "api_keys"
id = Column(String(36), primary_key=True, default=lambda: str(uuid.uuid4()))
key_hash = Column(String(64), unique=True, nullable=False, index=True)
key_prefix = Column(String(10), nullable=False)
name = Column(String(255), nullable=False)
description = Column(Text, nullable=True)
user_id = Column(String(36), nullable=True)
created_by = Column(String(255), nullable=False)
is_active = Column(Boolean, default=True, nullable=False)
rate_limit_per_minute = Column(Integer, default=60, nullable=False)
allowed_endpoints = Column(Text, nullable=True)
total_requests = Column(Integer, default=0, nullable=False)
last_used_at = Column(DateTime, nullable=True)
created_at = Column(DateTime, default=datetime.now, nullable=False)
expires_at = Column(DateTime, nullable=True)
revoked_at = Column(DateTime, nullable=True)
def __repr__(self):
return f"<ApiKey(name='{self.name}', prefix='{self.key_prefix}', active={self.is_active})>"
class SwaggerUser(Base):
"""Modèle pour les utilisateurs autorisés à accéder au Swagger"""
__tablename__ = "swagger_users"
id = Column(String(36), primary_key=True, default=lambda: str(uuid.uuid4()))
username = Column(String(100), unique=True, nullable=False, index=True)
hashed_password = Column(String(255), nullable=False)
full_name = Column(String(255), nullable=True)
email = Column(String(255), nullable=True)
is_active = Column(Boolean, default=True, nullable=False)
allowed_tags = Column(Text, nullable=True)
created_at = Column(DateTime, default=datetime.now, nullable=False)
last_login = Column(DateTime, nullable=True)
@property
def allowed_tags_list(self) -> Optional[List[str]]:
if self.allowed_tags:
try:
return json.loads(self.allowed_tags)
except json.JSONDecodeError:
return None
return None
@allowed_tags_list.setter
def allowed_tags_list(self, tags: Optional[List[str]]):
self.allowed_tags = json.dumps(tags) if tags is not None else None
def __repr__(self):
return f"<SwaggerUser(username='{self.username}', active={self.is_active})>"

View file

@ -29,10 +29,14 @@ class UniversignTransactionStatus(str, Enum):
class UniversignSignerStatus(str, Enum): class UniversignSignerStatus(str, Enum):
WAITING = "waiting" WAITING = "waiting"
OPEN = "open"
VIEWED = "viewed" VIEWED = "viewed"
SIGNED = "signed" SIGNED = "signed"
COMPLETED = "completed"
REFUSED = "refused" REFUSED = "refused"
EXPIRED = "expired" EXPIRED = "expired"
STALLED = "stalled"
UNKNOWN = "unknown"
class LocalDocumentStatus(str, Enum): class LocalDocumentStatus(str, Enum):
@ -57,8 +61,7 @@ class SageDocumentType(int, Enum):
class UniversignTransaction(Base): class UniversignTransaction(Base):
__tablename__ = "universign_transactions" __tablename__ = "universign_transactions"
# === IDENTIFIANTS === id = Column(String(36), primary_key=True)
id = Column(String(36), primary_key=True) # UUID local
transaction_id = Column( transaction_id = Column(
String(255), String(255),
unique=True, unique=True,
@ -67,7 +70,6 @@ class UniversignTransaction(Base):
comment="ID Universign (ex: tr_abc123)", comment="ID Universign (ex: tr_abc123)",
) )
# === LIEN AVEC LE DOCUMENT SAGE ===
sage_document_id = Column( sage_document_id = Column(
String(50), String(50),
nullable=False, nullable=False,
@ -78,7 +80,6 @@ class UniversignTransaction(Base):
SQLEnum(SageDocumentType), nullable=False, comment="Type de document Sage" SQLEnum(SageDocumentType), nullable=False, comment="Type de document Sage"
) )
# === STATUTS UNIVERSIGN (SOURCE DE VÉRITÉ) ===
universign_status = Column( universign_status = Column(
SQLEnum(UniversignTransactionStatus), SQLEnum(UniversignTransactionStatus),
nullable=False, nullable=False,
@ -90,7 +91,6 @@ class UniversignTransaction(Base):
DateTime, nullable=True, comment="Dernière MAJ du statut Universign" DateTime, nullable=True, comment="Dernière MAJ du statut Universign"
) )
# === STATUT LOCAL (DÉDUIT) ===
local_status = Column( local_status = Column(
SQLEnum(LocalDocumentStatus), SQLEnum(LocalDocumentStatus),
nullable=False, nullable=False,
@ -99,22 +99,35 @@ class UniversignTransaction(Base):
comment="Statut métier simplifié pour l'UI", comment="Statut métier simplifié pour l'UI",
) )
# === URLS ET MÉTADONNÉES UNIVERSIGN ===
signer_url = Column(Text, nullable=True, comment="URL de signature") signer_url = Column(Text, nullable=True, comment="URL de signature")
document_url = Column(Text, nullable=True, comment="URL du document signé") document_url = Column(Text, nullable=True, comment="URL du document signé")
signed_document_path = Column(
Text, nullable=True, comment="Chemin local du PDF signé"
)
signed_document_downloaded_at = Column(
DateTime, nullable=True, comment="Date de téléchargement du document"
)
signed_document_size_bytes = Column(
Integer, nullable=True, comment="Taille du fichier en octets"
)
download_attempts = Column(
Integer, default=0, comment="Nombre de tentatives de téléchargement"
)
download_error = Column(
Text, nullable=True, comment="Dernière erreur de téléchargement"
)
certificate_url = Column(Text, nullable=True, comment="URL du certificat") certificate_url = Column(Text, nullable=True, comment="URL du certificat")
# === SIGNATAIRES ===
signers_data = Column( signers_data = Column(
Text, nullable=True, comment="JSON des signataires (snapshot)" Text, nullable=True, comment="JSON des signataires (snapshot)"
) )
# === INFORMATIONS MÉTIER ===
requester_email = Column(String(255), nullable=True) requester_email = Column(String(255), nullable=True)
requester_name = Column(String(255), nullable=True) requester_name = Column(String(255), nullable=True)
document_name = Column(String(500), nullable=True) document_name = Column(String(500), nullable=True)
# === DATES CLÉS ===
created_at = Column( created_at = Column(
DateTime, DateTime,
default=datetime.now, default=datetime.now,
@ -129,14 +142,12 @@ class UniversignTransaction(Base):
expired_at = Column(DateTime, nullable=True) expired_at = Column(DateTime, nullable=True)
canceled_at = Column(DateTime, nullable=True) canceled_at = Column(DateTime, nullable=True)
# === SYNCHRONISATION ===
last_synced_at = Column( last_synced_at = Column(
DateTime, nullable=True, comment="Dernière sync réussie avec Universign" DateTime, nullable=True, comment="Dernière sync réussie avec Universign"
) )
sync_attempts = Column(Integer, default=0, comment="Nombre de tentatives de sync") sync_attempts = Column(Integer, default=0, comment="Nombre de tentatives de sync")
sync_error = Column(Text, nullable=True) sync_error = Column(Text, nullable=True)
# === FLAGS ===
is_test = Column( is_test = Column(
Boolean, default=False, comment="Transaction en environnement .alpha" Boolean, default=False, comment="Transaction en environnement .alpha"
) )
@ -145,7 +156,6 @@ class UniversignTransaction(Base):
) )
webhook_received = Column(Boolean, default=False, comment="Webhook Universign reçu") webhook_received = Column(Boolean, default=False, comment="Webhook Universign reçu")
# === RELATION ===
signers = relationship( signers = relationship(
"UniversignSigner", back_populates="transaction", cascade="all, delete-orphan" "UniversignSigner", back_populates="transaction", cascade="all, delete-orphan"
) )
@ -153,7 +163,6 @@ class UniversignTransaction(Base):
"UniversignSyncLog", back_populates="transaction", cascade="all, delete-orphan" "UniversignSyncLog", back_populates="transaction", cascade="all, delete-orphan"
) )
# === INDEXES COMPOSITES ===
__table_args__ = ( __table_args__ = (
Index("idx_sage_doc", "sage_document_id", "sage_document_type"), Index("idx_sage_doc", "sage_document_id", "sage_document_type"),
Index("idx_sync_status", "needs_sync", "universign_status"), Index("idx_sync_status", "needs_sync", "universign_status"),
@ -169,10 +178,6 @@ class UniversignTransaction(Base):
class UniversignSigner(Base): class UniversignSigner(Base):
"""
Détail de chaque signataire d'une transaction
"""
__tablename__ = "universign_signers" __tablename__ = "universign_signers"
id = Column(String(36), primary_key=True) id = Column(String(36), primary_key=True)
@ -183,33 +188,27 @@ class UniversignSigner(Base):
index=True, index=True,
) )
# === DONNÉES SIGNATAIRE ===
email = Column(String(255), nullable=False, index=True) email = Column(String(255), nullable=False, index=True)
name = Column(String(255), nullable=True) name = Column(String(255), nullable=True)
phone = Column(String(50), nullable=True) phone = Column(String(50), nullable=True)
# === STATUT ===
status = Column( status = Column(
SQLEnum(UniversignSignerStatus), SQLEnum(UniversignSignerStatus),
default=UniversignSignerStatus.WAITING, default=UniversignSignerStatus.WAITING,
nullable=False, nullable=False,
) )
# === ACTIONS ===
viewed_at = Column(DateTime, nullable=True) viewed_at = Column(DateTime, nullable=True)
signed_at = Column(DateTime, nullable=True) signed_at = Column(DateTime, nullable=True)
refused_at = Column(DateTime, nullable=True) refused_at = Column(DateTime, nullable=True)
refusal_reason = Column(Text, nullable=True) refusal_reason = Column(Text, nullable=True)
# === MÉTADONNÉES ===
ip_address = Column(String(45), nullable=True) ip_address = Column(String(45), nullable=True)
user_agent = Column(Text, nullable=True) user_agent = Column(Text, nullable=True)
signature_method = Column(String(50), nullable=True) signature_method = Column(String(50), nullable=True)
# === ORDRE ===
order_index = Column(Integer, default=0) order_index = Column(Integer, default=0)
# === RELATION ===
transaction = relationship("UniversignTransaction", back_populates="signers") transaction = relationship("UniversignTransaction", back_populates="signers")
def __repr__(self): def __repr__(self):
@ -217,10 +216,6 @@ class UniversignSigner(Base):
class UniversignSyncLog(Base): class UniversignSyncLog(Base):
"""
Journal de toutes les synchronisations (audit trail)
"""
__tablename__ = "universign_sync_logs" __tablename__ = "universign_sync_logs"
id = Column(Integer, primary_key=True, autoincrement=True) id = Column(Integer, primary_key=True, autoincrement=True)
@ -231,22 +226,18 @@ class UniversignSyncLog(Base):
index=True, index=True,
) )
# === SYNC INFO ===
sync_type = Column(String(50), nullable=False, comment="webhook, polling, manual") sync_type = Column(String(50), nullable=False, comment="webhook, polling, manual")
sync_timestamp = Column(DateTime, default=datetime.now, nullable=False, index=True) sync_timestamp = Column(DateTime, default=datetime.now, nullable=False, index=True)
# === CHANGEMENTS DÉTECTÉS ===
previous_status = Column(String(50), nullable=True) previous_status = Column(String(50), nullable=True)
new_status = Column(String(50), nullable=True) new_status = Column(String(50), nullable=True)
changes_detected = Column(Text, nullable=True, comment="JSON des changements") changes_detected = Column(Text, nullable=True, comment="JSON des changements")
# === RÉSULTAT ===
success = Column(Boolean, default=True) success = Column(Boolean, default=True)
error_message = Column(Text, nullable=True) error_message = Column(Text, nullable=True)
http_status_code = Column(Integer, nullable=True) http_status_code = Column(Integer, nullable=True)
response_time_ms = Column(Integer, nullable=True) response_time_ms = Column(Integer, nullable=True)
# === RELATION ===
transaction = relationship("UniversignTransaction", back_populates="sync_logs") transaction = relationship("UniversignTransaction", back_populates="sync_logs")
def __repr__(self): def __repr__(self):
@ -264,9 +255,8 @@ class UniversignConfig(Base):
) )
api_url = Column(String(500), nullable=False) api_url = Column(String(500), nullable=False)
api_key = Column(String(500), nullable=False, comment="⚠️ À chiffrer") api_key = Column(String(500), nullable=False, comment="À chiffrer")
# === OPTIONS ===
webhook_url = Column(String(500), nullable=True) webhook_url = Column(String(500), nullable=True)
webhook_secret = Column(String(255), nullable=True) webhook_secret = Column(String(255), nullable=True)

24
docker-compose.dev.yml Normal file
View file

@ -0,0 +1,24 @@
services:
backend:
container_name: dev-sage-api
build:
context: .
target: dev
env_file: .env
volumes:
- .:/app
- /app/__pycache__
- ./data:/app/data
- ./logs:/app/logs
ports:
- "8000:8000"
environment:
ENV: development
DEBUG: "true"
DATABASE_URL: "sqlite+aiosqlite:///./data/sage_dataven.db"
restart: unless-stopped
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8000/"]
interval: 30s
timeout: 10s
retries: 3

23
docker-compose.prod.yml Normal file
View file

@ -0,0 +1,23 @@
services:
backend:
container_name: prod_sage_api
build:
context: .
target: prod
env_file: .env.production
volumes:
- ./data:/app/data
- ./logs:/app/logs
ports:
- "8004:8004"
environment:
ENV: production
DEBUG: "false"
DATABASE_URL: "sqlite+aiosqlite:///./data/sage_prod.db"
restart: always
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8004/"]
interval: 30s
timeout: 10s
retries: 5
start_period: 40s

View file

@ -0,0 +1,22 @@
services:
backend:
container_name: staging_sage_api
build:
context: .
target: staging
env_file: .env.staging
volumes:
- ./data:/app/data
- ./logs:/app/logs
ports:
- "8002:8002"
environment:
ENV: staging
DEBUG: "false"
DATABASE_URL: "sqlite+aiosqlite:///./data/sage_staging.db"
restart: unless-stopped
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8002/"]
interval: 30s
timeout: 10s
retries: 3

View file

@ -1,11 +1,4 @@
services: services:
vps-sage-api: backend:
build: . build:
container_name: vps-sage-api context: .
env_file: .env
volumes:
- ./data:/app/data
- ./logs:/app/logs
ports:
- "8000:8000"
restart: unless-stopped

File diff suppressed because it is too large Load diff

View file

@ -14,33 +14,14 @@ logger = logging.getLogger(__name__)
async def main(): async def main():
print("\n" + "=" * 60)
print("Initialisation de la base de données délocalisée")
print("=" * 60 + "\n")
try: try:
logger.info("Debut de l'initialisation") logger.info("Debut de l'initialisation")
await init_db() await init_db()
logger.info("Initialisation terminee") logger.info("Initialisation terminee")
print("\nInitialisation terminee")
print("\nBase de données créée avec succès !") print("\nBase de données créée avec succès !")
print("Fichier: sage_dataven.db")
print("\nTables créées:")
print(" |- email_logs (Journalisation emails)")
print(" |- signature_logs (Suivi signatures Universign)")
print(" |- workflow_logs (Transformations documents)")
print(" |- cache_metadata (Métadonnées cache)")
print(" |- audit_logs (Journal d'audit)")
print("\nProchaines étapes:")
print(" 1. Configurer le fichier .env avec les credentials")
print(" 2. Lancer la gateway Windows sur la machine Sage")
print(" 3. Lancer l'API VPS: uvicorn api:app --host 0.0.0.0 --port 8000")
print(" 4. Ou avec Docker : docker-compose up -d")
print(" 5. Tester: http://IP_DU_VPS:8000/docs")
print("\n" + "=" * 60 + "\n")
return True return True
except Exception as e: except Exception as e:

295
middleware/security.py Normal file
View file

@ -0,0 +1,295 @@
from fastapi import Request, status
from fastapi.responses import JSONResponse
from fastapi.security import HTTPBasic, HTTPBasicCredentials
from starlette.middleware.base import BaseHTTPMiddleware
from starlette.types import ASGIApp, Receive, Send
from sqlalchemy import select
from typing import Callable, Optional
from datetime import datetime
import logging
import base64
import json
logger = logging.getLogger(__name__)
security = HTTPBasic()
class SwaggerAuthMiddleware:
PROTECTED_PATHS = ["/docs", "/redoc", "/openapi.json"]
def __init__(self, app: ASGIApp):
self.app = app
async def __call__(self, scope, receive: Receive, send: Send):
if scope["type"] != "http":
await self.app(scope, receive, send)
return
request = Request(scope, receive=receive)
path = request.url.path
if not any(path.startswith(p) for p in self.PROTECTED_PATHS):
await self.app(scope, receive, send)
return
auth_header = request.headers.get("Authorization")
if not auth_header or not auth_header.startswith("Basic "):
response = JSONResponse(
status_code=status.HTTP_401_UNAUTHORIZED,
content={"detail": "Authentification requise pour la documentation"},
headers={"WWW-Authenticate": 'Basic realm="Swagger UI"'},
)
await response(scope, receive, send)
return
try:
encoded_credentials = auth_header.split(" ")[1]
decoded_credentials = base64.b64decode(encoded_credentials).decode("utf-8")
username, password = decoded_credentials.split(":", 1)
credentials = HTTPBasicCredentials(username=username, password=password)
swagger_user = await self._verify_credentials(credentials)
if not swagger_user:
response = JSONResponse(
status_code=status.HTTP_401_UNAUTHORIZED,
content={"detail": "Identifiants invalides"},
headers={"WWW-Authenticate": 'Basic realm="Swagger UI"'},
)
await response(scope, receive, send)
return
if "state" not in scope:
scope["state"] = {}
scope["state"]["swagger_user"] = swagger_user
logger.info(
f"✓ Swagger auth: {swagger_user['username']} - tags: {swagger_user.get('allowed_tags', 'ALL')}"
)
except Exception as e:
logger.error(f" Erreur parsing auth header: {e}")
response = JSONResponse(
status_code=status.HTTP_401_UNAUTHORIZED,
content={"detail": "Format d'authentification invalide"},
headers={"WWW-Authenticate": 'Basic realm="Swagger UI"'},
)
await response(scope, receive, send)
return
await self.app(scope, receive, send)
async def _verify_credentials(
self, credentials: HTTPBasicCredentials
) -> Optional[dict]:
from database.db_config import async_session_factory
from database.models.api_key import SwaggerUser
from security.auth import verify_password
try:
async with async_session_factory() as session:
result = await session.execute(
select(SwaggerUser).where(
SwaggerUser.username == credentials.username
)
)
swagger_user = result.scalar_one_or_none()
if swagger_user and swagger_user.is_active:
if verify_password(
credentials.password, swagger_user.hashed_password
):
swagger_user.last_login = datetime.now()
await session.commit()
logger.info(f"✓ Accès Swagger autorisé: {credentials.username}")
return {
"id": swagger_user.id,
"username": swagger_user.username,
"allowed_tags": swagger_user.allowed_tags_list,
"is_active": swagger_user.is_active,
}
logger.warning(f"✗ Accès Swagger refusé: {credentials.username}")
return None
except Exception as e:
logger.error(f" Erreur vérification credentials: {e}", exc_info=True)
return None
class ApiKeyMiddlewareHTTP(BaseHTTPMiddleware):
EXCLUDED_PATHS = [
"/docs",
"/redoc",
"/openapi.json",
"/",
"/health",
"/auth",
"/api-keys/verify",
"/universign/webhook",
]
def _is_excluded_path(self, path: str) -> bool:
"""Vérifie si le chemin est exclu de l'authentification API Key"""
if path == "/":
return True
for excluded in self.EXCLUDED_PATHS:
if excluded == "/":
continue
if path == excluded or path.startswith(excluded + "/"):
return True
return False
async def dispatch(self, request: Request, call_next: Callable):
path = request.url.path
method = request.method
if self._is_excluded_path(path):
return await call_next(request)
auth_header = request.headers.get("Authorization")
api_key_header = request.headers.get("X-API-Key")
if api_key_header:
api_key_header = api_key_header.strip()
if not api_key_header or api_key_header == "":
api_key_header = None
if auth_header and auth_header.startswith("Bearer "):
token = auth_header.split(" ", 1)[1].strip()
if token.startswith("sdk_live_"):
logger.warning(
" API Key envoyée dans Authorization au lieu de X-API-Key"
)
return await self._handle_api_key_auth(
request, token, path, method, call_next
)
logger.debug(f"JWT détecté pour {method} {path} → délégation à FastAPI")
request.state.authenticated_via = "jwt"
return await call_next(request)
if api_key_header:
logger.debug(f" API Key détectée pour {method} {path}")
return await self._handle_api_key_auth(
request, api_key_header, path, method, call_next
)
logger.debug(f" Aucune auth pour {method} {path} → délégation à FastAPI")
return await call_next(request)
async def _handle_api_key_auth(
self,
request: Request,
api_key: str,
path: str,
method: str,
call_next: Callable,
):
try:
from database.db_config import async_session_factory
from services.api_key import ApiKeyService
async with async_session_factory() as session:
service = ApiKeyService(session)
api_key_obj = await service.verify_api_key(api_key)
if not api_key_obj:
logger.warning(f"🔒 Clé API invalide: {method} {path}")
return JSONResponse(
status_code=status.HTTP_401_UNAUTHORIZED,
content={
"detail": "Clé API invalide ou expirée",
"hint": "Vérifiez votre clé X-API-Key",
},
)
is_allowed, rate_info = await service.check_rate_limit(api_key_obj)
if not is_allowed:
logger.warning(f"⏱️ Rate limit: {api_key_obj.name}")
return JSONResponse(
status_code=status.HTTP_429_TOO_MANY_REQUESTS,
content={"detail": "Rate limit dépassé"},
headers={
"X-RateLimit-Limit": str(rate_info["limit"]),
"X-RateLimit-Remaining": "0",
},
)
has_access = await service.check_endpoint_access(api_key_obj, path)
if not has_access:
allowed = (
json.loads(api_key_obj.allowed_endpoints)
if api_key_obj.allowed_endpoints
else ["Tous"]
)
logger.warning(
f"🚫 ACCÈS REFUSÉ: {api_key_obj.name}\n"
f" Endpoint demandé: {path}\n"
f" Endpoints autorisés: {allowed}"
)
return JSONResponse(
status_code=status.HTTP_403_FORBIDDEN,
content={
"detail": "Accès non autorisé à cet endpoint",
"endpoint_requested": path,
"api_key_name": api_key_obj.name,
"allowed_endpoints": allowed,
"hint": "Cette clé API n'a pas accès à cet endpoint.",
},
)
request.state.api_key = api_key_obj
request.state.authenticated_via = "api_key"
logger.info(f" ACCÈS AUTORISÉ: {api_key_obj.name}{method} {path}")
return await call_next(request)
except Exception as e:
logger.error(f"💥 Erreur validation API Key: {e}", exc_info=True)
return JSONResponse(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
content={"detail": f"Erreur interne: {str(e)}"},
)
ApiKeyMiddleware = ApiKeyMiddlewareHTTP
def get_api_key_from_request(request: Request) -> Optional:
"""Récupère l'objet ApiKey depuis la requête si présent"""
return getattr(request.state, "api_key", None)
def get_auth_method(request: Request) -> str:
"""Retourne la méthode d'authentification utilisée"""
return getattr(request.state, "authenticated_via", "none")
def get_swagger_user_from_request(request: Request) -> Optional[dict]:
"""Récupère l'utilisateur Swagger depuis la requête"""
return getattr(request.state, "swagger_user", None)
__all__ = [
"SwaggerAuthMiddleware",
"ApiKeyMiddlewareHTTP",
"ApiKeyMiddleware",
"get_api_key_from_request",
"get_auth_method",
"get_swagger_user_from_request",
]

154
routes/api_keys.py Normal file
View file

@ -0,0 +1,154 @@
from fastapi import APIRouter, Depends, HTTPException, status, Query
from sqlalchemy.ext.asyncio import AsyncSession
import logging
from database import get_session, User
from core.dependencies import get_current_user, require_role
from services.api_key import ApiKeyService, api_key_to_response
from schemas.api_key import (
ApiKeyCreate,
ApiKeyCreatedResponse,
ApiKeyResponse,
ApiKeyList,
)
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/api-keys", tags=["API Keys Management"])
@router.post(
"",
response_model=ApiKeyCreatedResponse,
status_code=status.HTTP_201_CREATED,
dependencies=[Depends(require_role("admin", "super_admin"))],
)
async def create_api_key(
data: ApiKeyCreate,
session: AsyncSession = Depends(get_session),
user: User = Depends(get_current_user),
):
service = ApiKeyService(session)
api_key_obj, api_key_plain = await service.create_api_key(
name=data.name,
description=data.description,
created_by=user.email,
user_id=user.id,
expires_in_days=data.expires_in_days,
rate_limit_per_minute=data.rate_limit_per_minute,
allowed_endpoints=data.allowed_endpoints,
)
logger.info(f" Clé API créée par {user.email}: {data.name}")
response_data = api_key_to_response(api_key_obj)
response_data["api_key"] = api_key_plain
return ApiKeyCreatedResponse(**response_data)
@router.get("", response_model=ApiKeyList)
async def list_api_keys(
include_revoked: bool = Query(False, description="Inclure les clés révoquées"),
session: AsyncSession = Depends(get_session),
user: User = Depends(get_current_user),
):
service = ApiKeyService(session)
user_id = None if user.role in ["admin", "super_admin"] else user.id
keys = await service.list_api_keys(include_revoked=include_revoked, user_id=user_id)
items = [ApiKeyResponse(**api_key_to_response(k)) for k in keys]
return ApiKeyList(total=len(items), items=items)
@router.get("/{key_id}", response_model=ApiKeyResponse)
async def get_api_key(
key_id: str,
session: AsyncSession = Depends(get_session),
user: User = Depends(get_current_user),
):
"""Récupérer une clé API par son ID"""
service = ApiKeyService(session)
api_key_obj = await service.get_by_id(key_id)
if not api_key_obj:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Clé API {key_id} introuvable",
)
if user.role not in ["admin", "super_admin"]:
if api_key_obj.user_id != user.id:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="Accès refusé à cette clé",
)
return ApiKeyResponse(**api_key_to_response(api_key_obj))
@router.delete("/{key_id}", status_code=status.HTTP_200_OK)
async def revoke_api_key(
key_id: str,
session: AsyncSession = Depends(get_session),
user: User = Depends(get_current_user),
):
service = ApiKeyService(session)
api_key_obj = await service.get_by_id(key_id)
if not api_key_obj:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Clé API {key_id} introuvable",
)
if user.role not in ["admin", "super_admin"]:
if api_key_obj.user_id != user.id:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="Accès refusé à cette clé",
)
success = await service.revoke_api_key(key_id)
if not success:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Erreur lors de la révocation",
)
logger.info(f" Clé API révoquée par {user.email}: {api_key_obj.name}")
return {
"success": True,
"message": f"Clé API '{api_key_obj.name}' révoquée avec succès",
}
@router.post("/verify", status_code=status.HTTP_200_OK)
async def verify_api_key_endpoint(
api_key: str = Query(..., description="Clé API à vérifier"),
session: AsyncSession = Depends(get_session),
):
service = ApiKeyService(session)
api_key_obj = await service.verify_api_key(api_key)
if not api_key_obj:
return {
"valid": False,
"message": "Clé API invalide, expirée ou révoquée",
}
return {
"valid": True,
"message": "Clé API valide",
"key_name": api_key_obj.name,
"rate_limit": api_key_obj.rate_limit_per_minute,
"expires_at": api_key_obj.expires_at,
}

View file

@ -7,6 +7,7 @@ from typing import Optional
import uuid import uuid
from database import get_session, User, RefreshToken, LoginAttempt from database import get_session, User, RefreshToken, LoginAttempt
from core.dependencies import get_current_user
from security.auth import ( from security.auth import (
hash_password, hash_password,
verify_password, verify_password,
@ -19,7 +20,6 @@ from security.auth import (
hash_token, hash_token,
) )
from services.email_service import AuthEmailService from services.email_service import AuthEmailService
from core.dependencies import get_current_user
from config.config import settings from config.config import settings
import logging import logging
@ -101,7 +101,7 @@ async def check_rate_limit(
) )
failed_attempts = result.scalars().all() failed_attempts = result.scalars().all()
if len(failed_attempts) >= 5: if len(failed_attempts) >= 15:
return False, "Trop de tentatives échouées. Réessayez dans 15 minutes." return False, "Trop de tentatives échouées. Réessayez dans 15 minutes."
return True, "" return True, ""
@ -286,7 +286,7 @@ async def login(
if user: if user:
user.failed_login_attempts += 1 user.failed_login_attempts += 1
if user.failed_login_attempts >= 5: if user.failed_login_attempts >= 15:
user.locked_until = datetime.now() + timedelta(minutes=15) user.locked_until = datetime.now() + timedelta(minutes=15)
await session.commit() await session.commit()
raise HTTPException( raise HTTPException(
@ -510,7 +510,7 @@ async def logout(
token_record.revoked_at = datetime.now() token_record.revoked_at = datetime.now()
await session.commit() await session.commit()
logger.info(f"👋 Déconnexion: {user.email}") logger.info(f" Déconnexion: {user.email}")
return {"success": True, "message": "Déconnexion réussie"} return {"success": True, "message": "Déconnexion réussie"}

158
routes/enterprise.py Normal file
View file

@ -0,0 +1,158 @@
from fastapi import APIRouter, HTTPException, Query, Path
import httpx
import logging
from datetime import datetime
from schemas import EntrepriseSearch, EntrepriseSearchResponse
from utils.enterprise import (
calculer_tva_intracommunautaire,
mapper_resultat_api,
rechercher_entreprise_api,
)
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/entreprises", tags=["Entreprises"])
@router.get("/search", response_model=EntrepriseSearchResponse)
async def rechercher_entreprise(
q: str = Query(..., min_length=2, description="Nom d'entreprise, SIREN ou SIRET"),
per_page: int = Query(5, ge=1, le=25, description="Nombre de résultats (max 25)"),
):
try:
logger.info(f" Recherche entreprise: '{q}'")
api_response = await rechercher_entreprise_api(q, per_page)
resultats_api = api_response.get("results", [])
if not resultats_api:
logger.info(f"Aucun résultat pour: {q}")
return EntrepriseSearchResponse(total_results=0, results=[], query=q)
entreprises = []
for data in resultats_api:
entreprise = mapper_resultat_api(data)
if entreprise:
entreprises.append(entreprise)
logger.info(f" {len(entreprises)} résultat(s) trouvé(s)")
return EntrepriseSearchResponse(
total_results=len(entreprises), results=entreprises, query=q
)
except HTTPException:
raise
except Exception as e:
logger.error(f"Erreur recherche entreprise: {e}", exc_info=True)
raise HTTPException(
status_code=500, detail=f"Erreur lors de la recherche: {str(e)}"
)
@router.get("/siren/{siren}", response_model=EntrepriseSearch)
async def lire_entreprise_par_siren(
siren: str = Path(
...,
min_length=9,
max_length=9,
pattern=r"^\d{9}$",
description="Numéro SIREN (9 chiffres)",
),
):
try:
logger.info(f"Lecture entreprise SIREN: {siren}")
api_response = await rechercher_entreprise_api(siren, per_page=1)
resultats = api_response.get("results", [])
if not resultats:
raise HTTPException(
status_code=404,
detail=f"Aucune entreprise trouvée pour le SIREN {siren}",
)
entreprise_data = resultats[0]
if entreprise_data.get("siren") != siren:
raise HTTPException(status_code=404, detail=f"SIREN {siren} introuvable")
entreprise = mapper_resultat_api(entreprise_data)
if not entreprise:
raise HTTPException(
status_code=500,
detail="Erreur lors du traitement des données entreprise",
)
if not entreprise.is_active:
logger.warning(f" Entreprise CESSÉE: {siren}")
return entreprise
except HTTPException:
raise
except Exception as e:
logger.error(f"Erreur lecture SIREN {siren}: {e}", exc_info=True)
raise HTTPException(
status_code=500, detail=f"Erreur lors de la récupération: {str(e)}"
)
@router.get("/tva/{siren}")
async def calculer_tva(
siren: str = Path(
...,
min_length=9,
max_length=9,
pattern=r"^\d{9}$",
description="Numéro SIREN (9 chiffres)",
),
):
tva_number = calculer_tva_intracommunautaire(siren)
if not tva_number:
raise HTTPException(status_code=400, detail=f"SIREN invalide: {siren}")
return {
"siren": siren,
"vat_number": tva_number,
"format": "FR + Clé (2 chiffres) + SIREN (9 chiffres)",
}
@router.get("/health")
async def health_check_api_sirene():
try:
async with httpx.AsyncClient(timeout=5.0) as client:
response = await client.get(
"https://recherche-entreprises.api.gouv.fr/search",
params={"q": "test", "per_page": 1},
)
if response.status_code == 200:
return {
"status": "healthy",
"api_sirene": "disponible",
"response_time_ms": response.elapsed.total_seconds() * 1000,
"timestamp": datetime.now().isoformat(),
}
else:
return {
"status": "degraded",
"api_sirene": f"statut {response.status_code}",
"timestamp": datetime.now().isoformat(),
}
except Exception as e:
logger.error(f"Health check failed: {e}")
return {
"status": "unhealthy",
"api_sirene": "indisponible",
"error": str(e),
"timestamp": datetime.now().isoformat(),
}

View file

@ -4,8 +4,8 @@ from sqlalchemy import select, func
from sqlalchemy.orm import selectinload from sqlalchemy.orm import selectinload
from typing import List, Optional from typing import List, Optional
from datetime import datetime from datetime import datetime
from pydantic import BaseModel, EmailStr
import logging import logging
from core.dependencies import get_current_user
from data.data import templates_signature_email from data.data import templates_signature_email
from email_queue import email_queue from email_queue import email_queue
from database import UniversignSignerStatus, UniversignTransactionStatus, get_session from database import UniversignSignerStatus, UniversignTransactionStatus, get_session
@ -14,72 +14,86 @@ from database import (
UniversignSigner, UniversignSigner,
UniversignSyncLog, UniversignSyncLog,
LocalDocumentStatus, LocalDocumentStatus,
SageDocumentType,
) )
import os
from pathlib import Path
import json
from services.universign_sync import UniversignSyncService from services.universign_sync import UniversignSyncService
from config.config import settings from config.config import settings
from utils.generic_functions import normaliser_type_doc from utils.generic_functions import normaliser_type_doc
from utils.universign_status_mapping import get_status_message from utils.universign_status_mapping import get_status_message, map_universign_to_local
from database.models.email import EmailLog from database.models.email import EmailLog
from database.enum.status import StatutEmail from database.enum.status import StatutEmail
from schemas import (
SyncStatsResponse,
CreateSignatureRequest,
TransactionResponse,
)
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
router = APIRouter(prefix="/universign", tags=["Universign"]) router = APIRouter(
prefix="/universign",
tags=["Universign"],
)
sync_service = UniversignSyncService( sync_service = UniversignSyncService(
api_url=settings.universign_api_url, api_key=settings.universign_api_key api_url=settings.universign_api_url, api_key=settings.universign_api_key
) )
class CreateSignatureRequest(BaseModel):
"""Demande de création d'une signature"""
sage_document_id: str
sage_document_type: SageDocumentType
signer_email: EmailStr
signer_name: str
document_name: Optional[str] = None
class TransactionResponse(BaseModel):
"""Réponse détaillée d'une transaction"""
id: str
transaction_id: str
sage_document_id: str
sage_document_type: str
universign_status: str
local_status: str
local_status_label: str
signer_url: Optional[str]
document_url: Optional[str]
created_at: datetime
sent_at: Optional[datetime]
signed_at: Optional[datetime]
last_synced_at: Optional[datetime]
needs_sync: bool
signers: List[dict]
class SyncStatsResponse(BaseModel):
"""Statistiques de synchronisation"""
total_transactions: int
pending_sync: int
signed: int
in_progress: int
refused: int
expired: int
last_sync_at: Optional[datetime]
@router.post("/signatures/create", response_model=TransactionResponse) @router.post("/signatures/create", response_model=TransactionResponse)
async def create_signature( async def create_signature(
request: CreateSignatureRequest, session: AsyncSession = Depends(get_session) request: CreateSignatureRequest, session: AsyncSession = Depends(get_session)
): ):
try: try:
logger.info(
f"Vérification doublon pour: {request.sage_document_id} "
f"(type: {request.sage_document_type.name})"
)
existing_query = select(UniversignTransaction).where(
UniversignTransaction.sage_document_id == request.sage_document_id,
UniversignTransaction.sage_document_type == request.sage_document_type,
)
existing_result = await session.execute(existing_query)
all_existing = existing_result.scalars().all()
if all_existing:
logger.warning(
f"{len(all_existing)} transaction(s) existante(s) trouvée(s)"
)
active_txs = [
tx
for tx in all_existing
if tx.local_status
not in [
LocalDocumentStatus.SIGNED,
LocalDocumentStatus.REJECTED,
LocalDocumentStatus.EXPIRED,
LocalDocumentStatus.ERROR,
]
]
if active_txs:
active_tx = active_txs[0]
logger.error(
f"Transaction active existante: {active_tx.transaction_id} "
f"(statut: {active_tx.local_status.value})"
)
raise HTTPException(
400,
f"Une demande de signature est déjà en cours pour {request.sage_document_id} "
f"(transaction: {active_tx.transaction_id}, statut: {active_tx.local_status.value}). "
f"Utilisez GET /universign/documents/{request.sage_document_id}/signatures pour voir toutes les transactions.",
)
logger.info(
"Toutes les transactions existantes sont finales, création autorisée"
)
logger.info(f"Génération PDF: {request.sage_document_id}")
pdf_bytes = email_queue._generate_pdf( pdf_bytes = email_queue._generate_pdf(
request.sage_document_id, normaliser_type_doc(request.sage_document_type) request.sage_document_id, normaliser_type_doc(request.sage_document_type)
) )
@ -87,12 +101,15 @@ async def create_signature(
if not pdf_bytes: if not pdf_bytes:
raise HTTPException(400, "Échec génération PDF") raise HTTPException(400, "Échec génération PDF")
# === CRÉATION TRANSACTION UNIVERSIGN === logger.info(f"PDF généré: {len(pdf_bytes)} octets")
import requests import requests
import uuid import uuid
auth = (settings.universign_api_key, "") auth = (settings.universign_api_key, "")
logger.info("Création transaction Universign...")
resp = requests.post( resp = requests.post(
f"{settings.universign_api_url}/transactions", f"{settings.universign_api_url}/transactions",
auth=auth, auth=auth,
@ -105,10 +122,13 @@ async def create_signature(
) )
if resp.status_code != 200: if resp.status_code != 200:
logger.error(f"Erreur Universign (création): {resp.text}")
raise HTTPException(500, f"Erreur Universign: {resp.status_code}") raise HTTPException(500, f"Erreur Universign: {resp.status_code}")
universign_tx_id = resp.json().get("id") universign_tx_id = resp.json().get("id")
logger.info(f"Transaction Universign créée: {universign_tx_id}")
logger.info("Upload PDF...")
files = { files = {
"file": (f"{request.sage_document_id}.pdf", pdf_bytes, "application/pdf") "file": (f"{request.sage_document_id}.pdf", pdf_bytes, "application/pdf")
} }
@ -117,10 +137,13 @@ async def create_signature(
) )
if resp.status_code not in [200, 201]: if resp.status_code not in [200, 201]:
logger.error(f"Erreur upload: {resp.text}")
raise HTTPException(500, "Erreur upload PDF") raise HTTPException(500, "Erreur upload PDF")
file_id = resp.json().get("id") file_id = resp.json().get("id")
logger.info(f"PDF uploadé: {file_id}")
logger.info("Attachement document...")
resp = requests.post( resp = requests.post(
f"{settings.universign_api_url}/transactions/{universign_tx_id}/documents", f"{settings.universign_api_url}/transactions/{universign_tx_id}/documents",
auth=auth, auth=auth,
@ -133,6 +156,7 @@ async def create_signature(
document_id = resp.json().get("id") document_id = resp.json().get("id")
logger.info("Création champ signature...")
resp = requests.post( resp = requests.post(
f"{settings.universign_api_url}/transactions/{universign_tx_id}/documents/{document_id}/fields", f"{settings.universign_api_url}/transactions/{universign_tx_id}/documents/{document_id}/fields",
auth=auth, auth=auth,
@ -145,6 +169,7 @@ async def create_signature(
field_id = resp.json().get("id") field_id = resp.json().get("id")
logger.info(f"Liaison signataire: {request.signer_email}")
resp = requests.post( resp = requests.post(
f"{settings.universign_api_url}/transactions/{universign_tx_id}/signatures", f"{settings.universign_api_url}/transactions/{universign_tx_id}/signatures",
auth=auth, auth=auth,
@ -155,6 +180,7 @@ async def create_signature(
if resp.status_code not in [200, 201]: if resp.status_code not in [200, 201]:
raise HTTPException(500, "Erreur liaison signataire") raise HTTPException(500, "Erreur liaison signataire")
logger.info("Démarrage transaction...")
resp = requests.post( resp = requests.post(
f"{settings.universign_api_url}/transactions/{universign_tx_id}/start", f"{settings.universign_api_url}/transactions/{universign_tx_id}/start",
auth=auth, auth=auth,
@ -176,7 +202,8 @@ async def create_signature(
if not signer_url: if not signer_url:
raise HTTPException(500, "URL de signature non retournée") raise HTTPException(500, "URL de signature non retournée")
# === ENREGISTREMENT LOCAL === logger.info("URL de signature obtenue")
local_id = str(uuid.uuid4()) local_id = str(uuid.uuid4())
transaction = UniversignTransaction( transaction = UniversignTransaction(
@ -210,7 +237,10 @@ async def create_signature(
session.add(signer) session.add(signer)
await session.commit() await session.commit()
# === ENVOI EMAIL AVEC TEMPLATE === logger.info(
f"Transaction sauvegardée: {local_id} (Universign: {universign_tx_id})"
)
template = templates_signature_email["demande_signature"] template = templates_signature_email["demande_signature"]
type_labels = { type_labels = {
@ -265,7 +295,20 @@ async def create_signature(
email_queue.enqueue(email_log.id) email_queue.enqueue(email_log.id)
# === RÉPONSE === try:
from sage_client import sage_client
sage_client.changer_statut_document(
document_type_code=request.sage_document_type.value,
numero=request.sage_document_id,
nouveau_statut=1,
)
logger.info(
f"Statut Sage mis à jour: {request.sage_document_id} → Confirmé (1)"
)
except Exception as e:
logger.warning(f"Impossible de mettre à jour le statut Sage: {e}")
return TransactionResponse( return TransactionResponse(
id=transaction.id, id=transaction.id,
transaction_id=transaction.transaction_id, transaction_id=transaction.transaction_id,
@ -345,6 +388,15 @@ async def list_transactions(
} }
for s in tx.signers for s in tx.signers
], ],
signed_document_available=bool(
tx.signed_document_path and Path(tx.signed_document_path).exists()
),
signed_document_downloaded_at=tx.signed_document_downloaded_at,
signed_document_size_kb=(
tx.signed_document_size_bytes / 1024
if tx.signed_document_size_bytes
else None
),
) )
for tx in transactions for tx in transactions
] ]
@ -391,6 +443,15 @@ async def get_transaction(
} }
for s in tx.signers for s in tx.signers
], ],
signed_document_available=bool(
tx.signed_document_path and Path(tx.signed_document_path).exists()
),
signed_document_downloaded_at=tx.signed_document_downloaded_at,
signed_document_size_kb=(
tx.signed_document_size_bytes / 1024
if tx.signed_document_size_bytes
else None
),
) )
@ -436,32 +497,107 @@ async def sync_all_transactions(
return {"success": True, "stats": stats, "timestamp": datetime.now().isoformat()} return {"success": True, "stats": stats, "timestamp": datetime.now().isoformat()}
@router.post("/webhook") @router.post("/webhook", dependencies=[])
@router.post("/webhook/") @router.post("/webhook/", dependencies=[])
async def webhook_universign( async def webhook_universign(
request: Request, session: AsyncSession = Depends(get_session) request: Request, session: AsyncSession = Depends(get_session)
): ):
try: try:
payload = await request.json() payload = await request.json()
logger.info( logger.info(f"Webhook Universign reçu - Type: {payload.get('type', 'unknown')}")
f"Webhook reçu: {payload.get('event')} - {payload.get('transaction_id')}" logger.debug(f"Payload complet: {json.dumps(payload, indent=2)}")
)
success, error = await sync_service.process_webhook(session, payload) transaction_id = None
if payload.get("type", "").startswith("transaction.") and "payload" in payload:
nested_object = payload.get("payload", {}).get("object", {})
if nested_object.get("object") == "transaction":
transaction_id = nested_object.get("id")
logger.info(
f"Transaction ID extrait de payload.object.id: {transaction_id}"
)
elif payload.get("type", "").startswith("action."):
transaction_id = (
payload.get("payload", {}).get("object", {}).get("transaction_id")
)
logger.info(
f"Transaction ID extrait de payload.object.transaction_id: {transaction_id}"
)
elif payload.get("object") == "transaction":
transaction_id = payload.get("id")
logger.info(f"Transaction ID extrait direct: {transaction_id}")
elif "transaction" in payload:
transaction_id = payload.get("transaction", {}).get("id")
logger.info(f"Transaction ID extrait de transaction.id: {transaction_id}")
if not transaction_id:
logger.error(
f"Transaction ID introuvable dans webhook\n"
f"Type d'événement: {payload.get('type', 'unknown')}\n"
f"Clés racine: {list(payload.keys())}\n"
f"Payload simplifié: {json.dumps({k: v if k != 'payload' else '...' for k, v in payload.items()})}"
)
return {
"status": "error",
"message": "Transaction ID manquant dans webhook",
"event_type": payload.get("type"),
"event_id": payload.get("id"),
}, 400
logger.info(f"Transaction ID identifié: {transaction_id}")
query = select(UniversignTransaction).where(
UniversignTransaction.transaction_id == transaction_id
)
result = await session.execute(query)
tx = result.scalar_one_or_none()
if not tx:
logger.warning(
f"Transaction {transaction_id} inconnue en local\n"
f"Type d'événement: {payload.get('type')}\n"
f"Elle sera synchronisée au prochain polling"
)
return {
"status": "accepted",
"message": f"Transaction {transaction_id} non trouvée localement, sera synchronisée au prochain polling",
"transaction_id": transaction_id,
"event_type": payload.get("type"),
}
success, error = await sync_service.process_webhook(
session, payload, transaction_id
)
if not success: if not success:
logger.error(f"Erreur traitement webhook: {error}") logger.error(f"Erreur traitement webhook: {error}")
return {"status": "error", "message": error}, 500 return {
"status": "error",
"message": error,
"transaction_id": transaction_id,
}, 500
logger.info(
f"Webhook traité avec succès\n"
f"Transaction: {transaction_id}\n"
f"Nouveau statut: {tx.local_status.value if tx else 'unknown'}\n"
f"Type d'événement: {payload.get('type')}"
)
return { return {
"status": "processed", "status": "processed",
"event": payload.get("event"), "transaction_id": transaction_id,
"transaction_id": payload.get("transaction_id"), "local_status": tx.local_status.value if tx else None,
"event_type": payload.get("type"),
"event_id": payload.get("id"),
} }
except Exception as e: except Exception as e:
logger.error(f"Erreur webhook: {e}", exc_info=True) logger.error(f"Erreur critique webhook: {e}", exc_info=True)
return {"status": "error", "message": str(e)}, 500 return {"status": "error", "message": str(e)}, 500
@ -469,17 +605,14 @@ async def webhook_universign(
async def get_sync_stats(session: AsyncSession = Depends(get_session)): async def get_sync_stats(session: AsyncSession = Depends(get_session)):
"""Statistiques globales de synchronisation""" """Statistiques globales de synchronisation"""
# Total
total_query = select(func.count(UniversignTransaction.id)) total_query = select(func.count(UniversignTransaction.id))
total = (await session.execute(total_query)).scalar() total = (await session.execute(total_query)).scalar()
# En attente de sync
pending_query = select(func.count(UniversignTransaction.id)).where( pending_query = select(func.count(UniversignTransaction.id)).where(
UniversignTransaction.needs_sync UniversignTransaction.needs_sync
) )
pending = (await session.execute(pending_query)).scalar() pending = (await session.execute(pending_query)).scalar()
# Par statut
signed_query = select(func.count(UniversignTransaction.id)).where( signed_query = select(func.count(UniversignTransaction.id)).where(
UniversignTransaction.local_status == LocalDocumentStatus.SIGNED UniversignTransaction.local_status == LocalDocumentStatus.SIGNED
) )
@ -500,7 +633,6 @@ async def get_sync_stats(session: AsyncSession = Depends(get_session)):
) )
expired = (await session.execute(expired_query)).scalar() expired = (await session.execute(expired_query)).scalar()
# Dernière sync
last_sync_query = select(func.max(UniversignTransaction.last_synced_at)) last_sync_query = select(func.max(UniversignTransaction.last_synced_at))
last_sync = (await session.execute(last_sync_query)).scalar() last_sync = (await session.execute(last_sync_query)).scalar()
@ -521,7 +653,6 @@ async def get_transaction_logs(
limit: int = Query(50, le=500), limit: int = Query(50, le=500),
session: AsyncSession = Depends(get_session), session: AsyncSession = Depends(get_session),
): ):
# Trouver la transaction
tx_query = select(UniversignTransaction).where( tx_query = select(UniversignTransaction).where(
UniversignTransaction.transaction_id == transaction_id UniversignTransaction.transaction_id == transaction_id
) )
@ -531,7 +662,6 @@ async def get_transaction_logs(
if not tx: if not tx:
raise HTTPException(404, "Transaction introuvable") raise HTTPException(404, "Transaction introuvable")
# Logs
logs_query = ( logs_query = (
select(UniversignSyncLog) select(UniversignSyncLog)
.where(UniversignSyncLog.transaction_id == tx.id) .where(UniversignSyncLog.transaction_id == tx.id)
@ -558,3 +688,443 @@ async def get_transaction_logs(
for log in logs for log in logs
], ],
} }
@router.get("/documents/{sage_document_id}/signatures")
async def get_signatures_for_document(
sage_document_id: str,
session: AsyncSession = Depends(get_session),
):
"""Liste toutes les transactions de signature pour un document Sage"""
query = (
select(UniversignTransaction)
.options(selectinload(UniversignTransaction.signers))
.where(UniversignTransaction.sage_document_id == sage_document_id)
.order_by(UniversignTransaction.created_at.desc())
)
result = await session.execute(query)
transactions = result.scalars().all()
return [
{
"id": tx.id,
"transaction_id": tx.transaction_id,
"local_status": tx.local_status.value,
"universign_status": tx.universign_status.value
if tx.universign_status
else None,
"created_at": tx.created_at.isoformat(),
"signed_at": tx.signed_at.isoformat() if tx.signed_at else None,
"signer_url": tx.signer_url,
"signers_count": len(tx.signers),
}
for tx in transactions
]
@router.delete("/documents/{sage_document_id}/duplicates")
async def cleanup_duplicate_signatures(
sage_document_id: str,
keep_latest: bool = Query(
True, description="Garder la plus récente (True) ou la plus ancienne (False)"
),
session: AsyncSession = Depends(get_session),
):
query = (
select(UniversignTransaction)
.where(UniversignTransaction.sage_document_id == sage_document_id)
.order_by(
UniversignTransaction.created_at.desc()
if keep_latest
else UniversignTransaction.created_at.asc()
)
)
result = await session.execute(query)
transactions = result.scalars().all()
if len(transactions) <= 1:
return {
"success": True,
"message": "Aucun doublon trouvé",
"kept": transactions[0].transaction_id if transactions else None,
"deleted_count": 0,
}
to_keep = transactions[0]
to_delete = transactions[1:]
deleted_ids = []
for tx in to_delete:
deleted_ids.append(tx.transaction_id)
await session.delete(tx)
await session.commit()
logger.info(
f"Nettoyage doublons {sage_document_id}: gardé {to_keep.transaction_id}, supprimé {deleted_ids}"
)
return {
"success": True,
"document_id": sage_document_id,
"kept": {
"id": to_keep.id,
"transaction_id": to_keep.transaction_id,
"status": to_keep.local_status.value,
"created_at": to_keep.created_at.isoformat(),
},
"deleted_count": len(deleted_ids),
"deleted_transaction_ids": deleted_ids,
}
@router.delete("/transactions/{transaction_id}")
async def delete_transaction(
transaction_id: str,
session: AsyncSession = Depends(get_session),
):
"""Supprime une transaction spécifique par son ID Universign"""
query = select(UniversignTransaction).where(
UniversignTransaction.transaction_id == transaction_id
)
result = await session.execute(query)
tx = result.scalar_one_or_none()
if not tx:
raise HTTPException(404, f"Transaction {transaction_id} introuvable")
await session.delete(tx)
await session.commit()
logger.info(f"Transaction {transaction_id} supprimée")
return {
"success": True,
"deleted_transaction_id": transaction_id,
"document_id": tx.sage_document_id,
}
@router.post("/cleanup/all-duplicates")
async def cleanup_all_duplicates(
session: AsyncSession = Depends(get_session),
):
from sqlalchemy import func
subquery = (
select(
UniversignTransaction.sage_document_id,
func.count(UniversignTransaction.id).label("count"),
)
.group_by(UniversignTransaction.sage_document_id)
.having(func.count(UniversignTransaction.id) > 1)
).subquery()
duplicates_query = select(subquery.c.sage_document_id)
duplicates_result = await session.execute(duplicates_query)
duplicate_docs = [row[0] for row in duplicates_result.fetchall()]
total_deleted = 0
cleanup_details = []
for doc_id in duplicate_docs:
tx_query = (
select(UniversignTransaction)
.where(UniversignTransaction.sage_document_id == doc_id)
.order_by(UniversignTransaction.created_at.desc())
)
tx_result = await session.execute(tx_query)
transactions = tx_result.scalars().all()
priority = {"SIGNE": 0, "EN_COURS": 1, "EN_ATTENTE": 2}
def sort_key(tx):
status_priority = priority.get(tx.local_status.value, 99)
return (status_priority, -tx.created_at.timestamp())
sorted_txs = sorted(transactions, key=sort_key)
to_keep = sorted_txs[0]
to_delete = sorted_txs[1:]
for tx in to_delete:
await session.delete(tx)
total_deleted += 1
cleanup_details.append(
{
"document_id": doc_id,
"kept": to_keep.transaction_id,
"kept_status": to_keep.local_status.value,
"deleted_count": len(to_delete),
}
)
await session.commit()
logger.info(
f"Nettoyage global: {total_deleted} doublons supprimés sur {len(duplicate_docs)} documents"
)
return {
"success": True,
"documents_processed": len(duplicate_docs),
"total_deleted": total_deleted,
"details": cleanup_details,
}
@router.post("/admin/force-sync-all", tags=["Admin"])
async def forcer_sync_toutes_transactions(
max_transactions: int = Query(200, le=500),
session: AsyncSession = Depends(get_session),
):
try:
query = (
select(UniversignTransaction)
.options(selectinload(UniversignTransaction.signers))
.order_by(UniversignTransaction.created_at.desc())
.limit(max_transactions)
)
result = await session.execute(query)
transactions = result.scalars().all()
stats = {
"total_verifie": len(transactions),
"success": 0,
"failed": 0,
"status_changes": 0,
"details": [],
}
for transaction in transactions:
try:
previous_status = transaction.local_status.value
logger.info(
f"Force sync: {transaction.transaction_id} (statut: {previous_status})"
)
success, error = await sync_service.sync_transaction(
session, transaction, force=True
)
new_status = transaction.local_status.value
if success:
stats["success"] += 1
if new_status != previous_status:
stats["status_changes"] += 1
stats["details"].append(
{
"transaction_id": transaction.transaction_id,
"document_id": transaction.sage_document_id,
"changement": f"{previous_status}{new_status}",
}
)
else:
stats["failed"] += 1
stats["details"].append(
{
"transaction_id": transaction.transaction_id,
"document_id": transaction.sage_document_id,
"erreur": error,
}
)
except Exception as e:
logger.error(f"Erreur sync {transaction.transaction_id}: {e}")
stats["failed"] += 1
return {
"success": True,
"stats": stats,
"timestamp": datetime.now().isoformat(),
}
except Exception as e:
logger.error(f"Erreur force sync: {e}")
raise HTTPException(500, str(e))
@router.post("/admin/repair-transaction/{transaction_id}", tags=["Admin"])
async def reparer_transaction(
transaction_id: str, session: AsyncSession = Depends(get_session)
):
try:
query = select(UniversignTransaction).where(
UniversignTransaction.transaction_id == transaction_id
)
result = await session.execute(query)
transaction = result.scalar_one_or_none()
if not transaction:
raise HTTPException(404, f"Transaction {transaction_id} introuvable")
old_status = transaction.local_status.value
old_universign_status = (
transaction.universign_status.value
if transaction.universign_status
else None
)
success, error = await sync_service.sync_transaction(
session, transaction, force=True
)
if not success:
return {
"success": False,
"transaction_id": transaction_id,
"erreur": error,
"ancien_statut": old_status,
}
return {
"success": True,
"transaction_id": transaction_id,
"reparation": {
"ancien_statut_local": old_status,
"nouveau_statut_local": transaction.local_status.value,
"ancien_statut_universign": old_universign_status,
"nouveau_statut_universign": transaction.universign_status.value,
"statut_change": old_status != transaction.local_status.value,
},
"derniere_sync": transaction.last_synced_at.isoformat(),
}
except HTTPException:
raise
except Exception as e:
logger.error(f"Erreur réparation: {e}")
raise HTTPException(500, str(e))
@router.get("/admin/transactions-inconsistantes", tags=["Admin"])
async def trouver_transactions_inconsistantes(
session: AsyncSession = Depends(get_session),
):
try:
query = select(UniversignTransaction).where(
UniversignTransaction.local_status.in_(
[LocalDocumentStatus.PENDING, LocalDocumentStatus.IN_PROGRESS]
)
)
result = await session.execute(query)
transactions = result.scalars().all()
inconsistantes = []
for tx in transactions:
try:
universign_data = sync_service.fetch_transaction_status(
tx.transaction_id
)
if not universign_data:
inconsistantes.append(
{
"transaction_id": tx.transaction_id,
"document_id": tx.sage_document_id,
"probleme": "Impossible de récupérer depuis Universign",
"statut_local": tx.local_status.value,
"statut_universign": None,
}
)
continue
universign_status = universign_data["transaction"].get("state")
expected_local_status = map_universign_to_local(universign_status)
if expected_local_status != tx.local_status.value:
inconsistantes.append(
{
"transaction_id": tx.transaction_id,
"document_id": tx.sage_document_id,
"probleme": "Statut incohérent",
"statut_local": tx.local_status.value,
"statut_universign": universign_status,
"statut_attendu": expected_local_status,
"derniere_sync": tx.last_synced_at.isoformat()
if tx.last_synced_at
else None,
}
)
except Exception as e:
logger.error(f"Erreur vérification {tx.transaction_id}: {e}")
inconsistantes.append(
{
"transaction_id": tx.transaction_id,
"document_id": tx.sage_document_id,
"probleme": f"Erreur: {str(e)}",
"statut_local": tx.local_status.value,
}
)
return {
"total_verifie": len(transactions),
"inconsistantes": len(inconsistantes),
"details": inconsistantes,
"recommandation": (
"Utilisez POST /universign/admin/force-sync-all pour corriger"
if inconsistantes
else "Aucune incohérence détectée"
),
}
except Exception as e:
logger.error(f"Erreur recherche incohérences: {e}")
raise HTTPException(500, str(e))
@router.get("/transactions/{transaction_id}/document/info", tags=["Documents Signés"])
async def info_document_signe(
transaction_id: str, session: AsyncSession = Depends(get_session)
):
"""
Informations sur le document signé
"""
try:
query = select(UniversignTransaction).where(
UniversignTransaction.transaction_id == transaction_id
)
result = await session.execute(query)
transaction = result.scalar_one_or_none()
if not transaction:
raise HTTPException(404, f"Transaction {transaction_id} introuvable")
file_exists = False
file_size_mb = None
if transaction.signed_document_path:
file_path = Path(transaction.signed_document_path)
file_exists = file_path.exists()
if file_exists:
file_size_mb = os.path.getsize(file_path) / (1024 * 1024)
return {
"transaction_id": transaction_id,
"document_available_locally": file_exists,
"document_url_universign": transaction.document_url,
"downloaded_at": (
transaction.signed_document_downloaded_at.isoformat()
if transaction.signed_document_downloaded_at
else None
),
"file_size_mb": round(file_size_mb, 2) if file_size_mb else None,
"download_attempts": transaction.download_attempts,
"last_download_error": transaction.download_error,
"local_path": transaction.signed_document_path if file_exists else None,
}
except HTTPException:
raise
except Exception as e:
logger.error(f"Erreur info document : {e}")
raise HTTPException(500, str(e))

Binary file not shown.

Binary file not shown.

BIN
sage/pdfs/logo.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 27 KiB

View file

@ -1,4 +1,3 @@
# sage_client.py
import requests import requests
from typing import Dict, List, Optional from typing import Dict, List, Optional
from config.config import settings from config.config import settings
@ -408,7 +407,7 @@ class SageGatewayClient:
return self._post( return self._post(
"/sage/collaborateurs/list", "/sage/collaborateurs/list",
{ {
"filtre": filtre or "", # ⚠️ Convertir None en "" "filtre": filtre or "",
"actifs_seulement": actifs_seulement, "actifs_seulement": actifs_seulement,
}, },
).get("data", []) ).get("data", [])
@ -427,6 +426,155 @@ class SageGatewayClient:
"/sage/collaborateurs/update", {"numero": numero, **data} "/sage/collaborateurs/update", {"numero": numero, **data}
).get("data") ).get("data")
def lire_informations_societe(self) -> Optional[Dict]:
"""Lit les informations de la société depuis P_DOSSIER"""
return self._get("/sage/societe/info").get("data")
def valider_facture(self, numero_facture: str) -> dict:
response = self._post(f"/sage/factures/{numero_facture}/valider", {})
return response.get("data", {})
def devalider_facture(self, numero_facture: str) -> dict:
response = self._post(f"/sage/factures/{numero_facture}/devalider", {})
return response.get("data", {})
def get_statut_validation(self, numero_facture: str) -> dict:
response = self._get(f"/sage/factures/{numero_facture}/statut-validation")
return response.get("data", {})
def regler_facture(
self,
numero_facture: str,
montant: float,
mode_reglement: int = 0,
date_reglement: str = None,
reference: str = "",
libelle: str = "",
code_journal: str = None,
devise_code: int = 0,
cours_devise: float = 1.0,
tva_encaissement: bool = False,
compte_general: str = None,
) -> dict:
"""Règle une facture"""
payload = {
"montant": montant,
"mode_reglement": mode_reglement,
"reference": reference,
"libelle": libelle,
"devise_code": devise_code,
"cours_devise": cours_devise,
"tva_encaissement": tva_encaissement,
}
if date_reglement:
payload["date_reglement"] = date_reglement
if code_journal:
payload["code_journal"] = code_journal
if compte_general:
payload["compte_general"] = compte_general
return self._post(f"/sage/factures/{numero_facture}/regler", payload).get(
"data", {}
)
def regler_factures_client(
self,
client_code: str,
montant_total: float,
mode_reglement: int = 0,
date_reglement: str = None,
reference: str = "",
libelle: str = "",
code_journal: str = None,
numeros_factures: list = None,
devise_code: int = 0,
cours_devise: float = 1.0,
tva_encaissement: bool = False,
) -> dict:
"""Règle plusieurs factures d'un client"""
payload = {
"client_code": client_code,
"montant_total": montant_total,
"mode_reglement": mode_reglement,
"reference": reference,
"libelle": libelle,
"devise_code": devise_code,
"cours_devise": cours_devise,
"tva_encaissement": tva_encaissement,
}
if date_reglement:
payload["date_reglement"] = date_reglement
if code_journal:
payload["code_journal"] = code_journal
if numeros_factures:
payload["numeros_factures"] = numeros_factures
return self._post("/sage/reglements/multiple", payload).get("data", {})
def get_reglements_facture(self, numero_facture: str) -> dict:
"""Récupère les règlements d'une facture"""
return self._get(f"/sage/factures/{numero_facture}/reglements").get("data", {})
def get_reglements_client(
self,
client_code: str,
date_debut: str = None,
date_fin: str = None,
inclure_soldees: bool = True,
) -> dict:
"""Récupère les règlements d'un client"""
params = {"inclure_soldees": inclure_soldees}
if date_debut:
params["date_debut"] = date_debut
if date_fin:
params["date_fin"] = date_fin
return self._get(f"/sage/clients/{client_code}/reglements", params=params).get(
"data", {}
)
def get_journaux_banque(self) -> dict:
return self._get("/sage/journaux/banque").get("data", {})
def get_modes_reglement(self) -> List[dict]:
"""Récupère les modes de règlement depuis Sage"""
return self._get("/sage/reglements/modes").get("data", {}).get("modes", [])
def get_devises(self) -> List[dict]:
"""Récupère les devises disponibles"""
return self._get("/sage/devises").get("data", {}).get("devises", [])
def get_journaux_tresorerie(self) -> List[dict]:
"""Récupère les journaux de trésorerie (banque + caisse)"""
return (
self._get("/sage/journaux/tresorerie").get("data", {}).get("journaux", [])
)
def get_comptes_generaux(
self, prefixe: str = None, type_compte: str = None
) -> List[dict]:
params = {}
if prefixe:
params["prefixe"] = prefixe
if type_compte:
params["type_compte"] = type_compte
return (
self._get("/sage/comptes-generaux", params=params)
.get("data", {})
.get("comptes", [])
)
def get_tva_taux(self) -> List[dict]:
"""Récupère les taux de TVA"""
return self._get("/sage/tva/taux").get("data", {}).get("taux", [])
def get_parametres_encaissement(self) -> dict:
"""Récupère les paramètres TVA sur encaissement"""
return self._get("/sage/parametres/encaissement").get("data", {})
def refresh_cache(self) -> Dict: def refresh_cache(self) -> Dict:
return self._post("/sage/cache/refresh") return self._post("/sage/cache/refresh")
@ -440,5 +588,14 @@ class SageGatewayClient:
except Exception: except Exception:
return {"status": "down"} return {"status": "down"}
def get_tous_reglements(self, params=None):
return self._get("/sage/reglements", params=params)
def get_reglement_facture_detail(self, facture_no):
return self._get(f"/sage/reglements/facture/{facture_no}")
def get_reglement_detail(self, rg_no):
return self._get(f"/sage/reglements/{rg_no}")
sage_client = SageGatewayClient() sage_client = SageGatewayClient()

View file

@ -26,7 +26,13 @@ from schemas.documents.documents import TypeDocument, TypeDocumentSQL
from schemas.documents.email import StatutEmail, EmailEnvoi from schemas.documents.email import StatutEmail, EmailEnvoi
from schemas.documents.factures import FactureCreate, FactureUpdate from schemas.documents.factures import FactureCreate, FactureUpdate
from schemas.documents.livraisons import LivraisonCreate, LivraisonUpdate from schemas.documents.livraisons import LivraisonCreate, LivraisonUpdate
from schemas.documents.universign import Signature, StatutSignature from schemas.documents.universign import (
Signature,
StatutSignature,
SyncStatsResponse,
CreateSignatureRequest,
TransactionResponse,
)
from schemas.articles.articles import ( from schemas.articles.articles import (
ArticleCreate, ArticleCreate,
Article, Article,
@ -53,6 +59,10 @@ from schemas.sage.sage_gateway import (
CurrentGatewayInfo, CurrentGatewayInfo,
) )
from schemas.society.societe import SocieteInfo
from schemas.society.enterprise import EntrepriseSearch, EntrepriseSearchResponse
__all__ = [ __all__ = [
"TiersDetails", "TiersDetails",
"TypeTiers", "TypeTiers",
@ -105,4 +115,10 @@ __all__ = [
"SageGatewayTest", "SageGatewayTest",
"SageGatewayStatsResponse", "SageGatewayStatsResponse",
"CurrentGatewayInfo", "CurrentGatewayInfo",
"SyncStatsResponse",
"CreateSignatureRequest",
"TransactionResponse",
"SocieteInfo",
"EntrepriseSearch",
"EntrepriseSearchResponse",
] ]

77
schemas/api_key.py Normal file
View file

@ -0,0 +1,77 @@
from pydantic import BaseModel, Field
from typing import Optional, List
from datetime import datetime
class ApiKeyCreate(BaseModel):
"""Schema pour créer une clé API"""
name: str = Field(..., min_length=3, max_length=255, description="Nom de la clé")
description: Optional[str] = Field(None, description="Description de l'usage")
expires_in_days: Optional[int] = Field(
None, ge=1, le=3650, description="Expiration en jours (max 10 ans)"
)
rate_limit_per_minute: int = Field(
60, ge=1, le=1000, description="Limite de requêtes par minute"
)
allowed_endpoints: Optional[List[str]] = Field(
None, description="Endpoints autorisés ([] = tous, ['/clients*'] = wildcard)"
)
class ApiKeyResponse(BaseModel):
"""Schema de réponse pour une clé API"""
id: str
name: str
description: Optional[str]
key_prefix: str
is_active: bool
is_expired: bool
rate_limit_per_minute: int
allowed_endpoints: Optional[List[str]]
total_requests: int
last_used_at: Optional[datetime]
created_at: datetime
expires_at: Optional[datetime]
revoked_at: Optional[datetime]
created_by: str
class ApiKeyCreatedResponse(ApiKeyResponse):
"""Schema de réponse après création (inclut la clé en clair)"""
api_key: str = Field(
..., description=" Clé API en clair - à sauvegarder immédiatement"
)
class ApiKeyList(BaseModel):
"""Liste de clés API"""
total: int
items: List[ApiKeyResponse]
class SwaggerUserCreate(BaseModel):
"""Schema pour créer un utilisateur Swagger"""
username: str = Field(..., min_length=3, max_length=100)
password: str = Field(..., min_length=8)
full_name: Optional[str] = None
email: Optional[str] = None
class SwaggerUserResponse(BaseModel):
"""Schema de réponse pour un utilisateur Swagger"""
id: str
username: str
full_name: Optional[str]
email: Optional[str]
is_active: bool
created_at: datetime
last_login: Optional[datetime]
class Config:
from_attributes = True

View file

@ -76,7 +76,6 @@ class Article(BaseModel):
) )
nb_emplacements: int = Field(0, description="Nombre d'emplacements") nb_emplacements: int = Field(0, description="Nombre d'emplacements")
# Champs énumérés normalisés
suivi_stock: Optional[int] = Field( suivi_stock: Optional[int] = Field(
None, None,
description="Type de suivi de stock (AR_SuiviStock): 0=Aucun, 1=CMUP, 2=FIFO/LIFO, 3=Sérialisé", description="Type de suivi de stock (AR_SuiviStock): 0=Aucun, 1=CMUP, 2=FIFO/LIFO, 3=Sérialisé",

View file

@ -4,6 +4,7 @@ from datetime import datetime
from schemas.documents.ligne_document import LigneDocument from schemas.documents.ligne_document import LigneDocument
class FactureCreate(BaseModel): class FactureCreate(BaseModel):
client_id: str client_id: str
date_facture: Optional[datetime] = None date_facture: Optional[datetime] = None

View file

@ -0,0 +1,109 @@
from pydantic import BaseModel, Field, field_validator
from typing import List, Optional
import logging
from decimal import Decimal
from datetime import date
logger = logging.getLogger(__name__)
class ReglementFactureCreate(BaseModel):
"""Requête de règlement d'une facture côté VPS"""
montant: Decimal = Field(..., gt=0, description="Montant à régler")
devise_code: Optional[int] = Field(0, description="Code devise (0=EUR par défaut)")
cours_devise: Optional[Decimal] = Field(1.0, description="Cours de la devise")
mode_reglement: int = Field(
..., ge=0, description="Code mode règlement depuis /reglements/modes"
)
code_journal: str = Field(
..., min_length=1, description="Code journal depuis /journaux/tresorerie"
)
date_reglement: Optional[date] = Field(
None, description="Date du règlement (défaut: aujourd'hui)"
)
date_echeance: Optional[date] = Field(None, description="Date d'échéance")
reference: Optional[str] = Field(
"", max_length=17, description="Référence pièce règlement"
)
libelle: Optional[str] = Field(
"", max_length=35, description="Libellé du règlement"
)
tva_encaissement: Optional[bool] = Field(
False, description="Appliquer TVA sur encaissement"
)
compte_general: Optional[str] = Field(None)
@field_validator("montant")
def validate_montant(cls, v):
if v <= 0:
raise ValueError("Le montant doit être positif")
return round(v, 2)
class Config:
json_schema_extra = {
"example": {
"montant": 375.12,
"mode_reglement": 2,
"reference": "CHQ-001",
"code_journal": "BEU",
"date_reglement": "2024-01-01",
"libelle": "Règlement multiple",
"tva_encaissement": False,
"devise_code": 0,
"cours_devise": 1.0,
"date_echeance": "2024-01-31",
}
}
class ReglementMultipleCreate(BaseModel):
"""Requête de règlement multiple côté VPS"""
client_id: str = Field(..., description="Code client")
montant_total: Decimal = Field(..., gt=0)
devise_code: Optional[int] = Field(0)
cours_devise: Optional[Decimal] = Field(1.0)
mode_reglement: int = Field(...)
code_journal: str = Field(...)
date_reglement: Optional[date] = None
reference: Optional[str] = Field("")
libelle: Optional[str] = Field("")
tva_encaissement: Optional[bool] = Field(False)
numeros_factures: Optional[List[str]] = Field(
None, description="Si vide, règle les plus anciennes en premier"
)
@field_validator("client_id", mode="before")
def strip_client_id(cls, v):
return v.replace("\xa0", "").strip() if v else v
@field_validator("montant_total")
def validate_montant(cls, v):
if v <= 0:
raise ValueError("Le montant doit être positif")
return round(v, 2)
class Config:
json_schema_extra = {
"example": {
"client_id": "CLI000001",
"montant_total": 1000.00,
"mode_reglement": 2,
"numeros_factures": ["FA00081", "FA00082"],
"reference": "CHQ-001",
"code_journal": "BEU",
"date_reglement": "2024-01-01",
"libelle": "Règlement multiple",
"tva_encaissement": False,
"devise_code": 0,
"cours_devise": 1.0,
"date_echeance": "2024-01-31",
}
}

View file

@ -1,6 +1,12 @@
from pydantic import BaseModel, EmailStr from pydantic import BaseModel, EmailStr
from enum import Enum from enum import Enum
from schemas.documents.documents import TypeDocument from schemas.documents.documents import TypeDocument
from database import (
SageDocumentType,
)
from typing import List, Optional
from datetime import datetime
class StatutSignature(str, Enum): class StatutSignature(str, Enum):
@ -16,3 +22,49 @@ class Signature(BaseModel):
type_doc: TypeDocument type_doc: TypeDocument
email_signataire: EmailStr email_signataire: EmailStr
nom_signataire: str nom_signataire: str
class CreateSignatureRequest(BaseModel):
"""Demande de création d'une signature"""
sage_document_id: str
sage_document_type: SageDocumentType
signer_email: EmailStr
signer_name: str
document_name: Optional[str] = None
class TransactionResponse(BaseModel):
"""Réponse détaillée d'une transaction"""
id: str
transaction_id: str
sage_document_id: str
sage_document_type: str
universign_status: str
local_status: str
local_status_label: str
signer_url: Optional[str]
document_url: Optional[str]
created_at: datetime
sent_at: Optional[datetime]
signed_at: Optional[datetime]
last_synced_at: Optional[datetime]
needs_sync: bool
signers: List[dict]
signed_document_available: bool = False
signed_document_downloaded_at: Optional[datetime] = None
signed_document_size_kb: Optional[float] = None
class SyncStatsResponse(BaseModel):
"""Statistiques de synchronisation"""
total_transactions: int
pending_sync: int
signed: int
in_progress: int
refused: int
expired: int
last_sync_at: Optional[datetime]

View file

@ -10,7 +10,6 @@ class GatewayHealthStatus(str, Enum):
UNKNOWN = "unknown" UNKNOWN = "unknown"
# === CREATE ===
class SageGatewayCreate(BaseModel): class SageGatewayCreate(BaseModel):
name: str = Field( name: str = Field(
@ -71,7 +70,6 @@ class SageGatewayUpdate(BaseModel):
return v.rstrip("/") if v else v return v.rstrip("/") if v else v
# === RESPONSE ===
class SageGatewayResponse(BaseModel): class SageGatewayResponse(BaseModel):
id: str id: str

View file

@ -0,0 +1,24 @@
from pydantic import BaseModel, Field
from typing import Optional, List
class EntrepriseSearch(BaseModel):
"""Modèle de réponse pour une entreprise trouvée"""
company_name: str = Field(..., description="Raison sociale complète")
siren: str = Field(..., description="Numéro SIREN (9 chiffres)")
vat_number: str = Field(..., description="Numéro de TVA intracommunautaire")
address: str = Field(..., description="Adresse complète du siège")
naf_code: str = Field(..., description="Code NAF/APE")
is_active: bool = Field(..., description="True si entreprise active")
siret_siege: Optional[str] = Field(None, description="SIRET du siège")
code_postal: Optional[str] = None
ville: Optional[str] = None
class EntrepriseSearchResponse(BaseModel):
"""Réponse globale de la recherche"""
total_results: int
results: List[EntrepriseSearch]
query: str

View file

@ -0,0 +1,46 @@
from pydantic import BaseModel
from typing import Optional, List
class ExerciceComptable(BaseModel):
numero: int
debut: str
fin: Optional[str] = None
class SocieteInfo(BaseModel):
raison_sociale: str
numero_dossier: str
siret: Optional[str] = None
code_ape: Optional[str] = None
numero_tva: Optional[str] = None
adresse: Optional[str] = None
complement_adresse: Optional[str] = None
code_postal: Optional[str] = None
ville: Optional[str] = None
code_region: Optional[str] = None
pays: Optional[str] = None
telephone: Optional[str] = None
telecopie: Optional[str] = None
email: Optional[str] = None
email_societe: Optional[str] = None
site_web: Optional[str] = None
capital: float = 0.0
forme_juridique: Optional[str] = None
exercices: List[ExerciceComptable] = []
devise_compte: int = 0
devise_equivalent: int = 0
longueur_compte_general: int = 0
longueur_compte_analytique: int = 0
regime_fec: int = 0
base_modele: Optional[str] = None
marqueur: int = 0
logo_base64: Optional[str] = None
logo_content_type: Optional[str] = None

View file

@ -9,7 +9,6 @@ class CollaborateurBase(BaseModel):
prenom: Optional[str] = Field(None, max_length=50) prenom: Optional[str] = Field(None, max_length=50)
fonction: Optional[str] = Field(None, max_length=50) fonction: Optional[str] = Field(None, max_length=50)
# Adresse
adresse: Optional[str] = Field(None, max_length=100) adresse: Optional[str] = Field(None, max_length=100)
complement: Optional[str] = Field(None, max_length=100) complement: Optional[str] = Field(None, max_length=100)
code_postal: Optional[str] = Field(None, max_length=10) code_postal: Optional[str] = Field(None, max_length=10)
@ -17,7 +16,6 @@ class CollaborateurBase(BaseModel):
code_region: Optional[str] = Field(None, max_length=50) code_region: Optional[str] = Field(None, max_length=50)
pays: Optional[str] = Field(None, max_length=50) pays: Optional[str] = Field(None, max_length=50)
# Services
service: Optional[str] = Field(None, max_length=50) service: Optional[str] = Field(None, max_length=50)
vendeur: bool = Field(default=False) vendeur: bool = Field(default=False)
caissier: bool = Field(default=False) caissier: bool = Field(default=False)
@ -25,18 +23,15 @@ class CollaborateurBase(BaseModel):
chef_ventes: bool = Field(default=False) chef_ventes: bool = Field(default=False)
numero_chef_ventes: Optional[int] = None numero_chef_ventes: Optional[int] = None
# Contact
telephone: Optional[str] = Field(None, max_length=20) telephone: Optional[str] = Field(None, max_length=20)
telecopie: Optional[str] = Field(None, max_length=20) telecopie: Optional[str] = Field(None, max_length=20)
email: Optional[EmailStr] = None email: Optional[EmailStr] = None
tel_portable: Optional[str] = Field(None, max_length=20) tel_portable: Optional[str] = Field(None, max_length=20)
# Réseaux sociaux
facebook: Optional[str] = Field(None, max_length=100) facebook: Optional[str] = Field(None, max_length=100)
linkedin: Optional[str] = Field(None, max_length=100) linkedin: Optional[str] = Field(None, max_length=100)
skype: Optional[str] = Field(None, max_length=100) skype: Optional[str] = Field(None, max_length=100)
# Autres
matricule: Optional[str] = Field(None, max_length=20) matricule: Optional[str] = Field(None, max_length=20)
sommeil: bool = Field(default=False) sommeil: bool = Field(default=False)

View file

@ -14,7 +14,6 @@ class TypeTiersInt(IntEnum):
class TiersDetails(BaseModel): class TiersDetails(BaseModel):
# IDENTIFICATION
numero: Optional[str] = Field(None, description="Code tiers (CT_Num)") numero: Optional[str] = Field(None, description="Code tiers (CT_Num)")
intitule: Optional[str] = Field( intitule: Optional[str] = Field(
None, description="Raison sociale ou Nom complet (CT_Intitule)" None, description="Raison sociale ou Nom complet (CT_Intitule)"
@ -37,7 +36,6 @@ class TiersDetails(BaseModel):
) )
code_naf: Optional[str] = Field(None, description="Code NAF/APE (CT_Ape)") code_naf: Optional[str] = Field(None, description="Code NAF/APE (CT_Ape)")
# ADRESSE
contact: Optional[str] = Field( contact: Optional[str] = Field(
None, description="Nom du contact principal (CT_Contact)" None, description="Nom du contact principal (CT_Contact)"
) )
@ -50,7 +48,6 @@ class TiersDetails(BaseModel):
region: Optional[str] = Field(None, description="Région/État (CT_CodeRegion)") region: Optional[str] = Field(None, description="Région/État (CT_CodeRegion)")
pays: Optional[str] = Field(None, description="Pays (CT_Pays)") pays: Optional[str] = Field(None, description="Pays (CT_Pays)")
# TELECOM
telephone: Optional[str] = Field(None, description="Téléphone fixe (CT_Telephone)") telephone: Optional[str] = Field(None, description="Téléphone fixe (CT_Telephone)")
telecopie: Optional[str] = Field(None, description="Fax (CT_Telecopie)") telecopie: Optional[str] = Field(None, description="Fax (CT_Telecopie)")
email: Optional[str] = Field(None, description="Email principal (CT_EMail)") email: Optional[str] = Field(None, description="Email principal (CT_EMail)")
@ -58,13 +55,11 @@ class TiersDetails(BaseModel):
facebook: Optional[str] = Field(None, description="Profil Facebook (CT_Facebook)") facebook: Optional[str] = Field(None, description="Profil Facebook (CT_Facebook)")
linkedin: Optional[str] = Field(None, description="Profil LinkedIn (CT_LinkedIn)") linkedin: Optional[str] = Field(None, description="Profil LinkedIn (CT_LinkedIn)")
# TAUX
taux01: Optional[float] = Field(None, description="Taux personnalisé 1 (CT_Taux01)") taux01: Optional[float] = Field(None, description="Taux personnalisé 1 (CT_Taux01)")
taux02: Optional[float] = Field(None, description="Taux personnalisé 2 (CT_Taux02)") taux02: Optional[float] = Field(None, description="Taux personnalisé 2 (CT_Taux02)")
taux03: Optional[float] = Field(None, description="Taux personnalisé 3 (CT_Taux03)") taux03: Optional[float] = Field(None, description="Taux personnalisé 3 (CT_Taux03)")
taux04: Optional[float] = Field(None, description="Taux personnalisé 4 (CT_Taux04)") taux04: Optional[float] = Field(None, description="Taux personnalisé 4 (CT_Taux04)")
# STATISTIQUES
statistique01: Optional[str] = Field( statistique01: Optional[str] = Field(
None, description="Statistique 1 (CT_Statistique01)" None, description="Statistique 1 (CT_Statistique01)"
) )
@ -96,7 +91,6 @@ class TiersDetails(BaseModel):
None, description="Statistique 10 (CT_Statistique10)" None, description="Statistique 10 (CT_Statistique10)"
) )
# COMMERCIAL
encours_autorise: Optional[float] = Field( encours_autorise: Optional[float] = Field(
None, description="Encours maximum autorisé (CT_Encours)" None, description="Encours maximum autorisé (CT_Encours)"
) )
@ -113,7 +107,6 @@ class TiersDetails(BaseModel):
None, description="Détails du commercial/collaborateur" None, description="Détails du commercial/collaborateur"
) )
# FACTURATION
lettrage_auto: Optional[bool] = Field( lettrage_auto: Optional[bool] = Field(
None, description="Lettrage automatique (CT_Lettrage)" None, description="Lettrage automatique (CT_Lettrage)"
) )
@ -146,7 +139,6 @@ class TiersDetails(BaseModel):
None, description="Bon à payer obligatoire (CT_BonAPayer)" None, description="Bon à payer obligatoire (CT_BonAPayer)"
) )
# LOGISTIQUE
priorite_livraison: Optional[int] = Field( priorite_livraison: Optional[int] = Field(
None, description="Priorité livraison (CT_PrioriteLivr)" None, description="Priorité livraison (CT_PrioriteLivr)"
) )
@ -160,17 +152,14 @@ class TiersDetails(BaseModel):
None, description="Délai appro jours (CT_DelaiAppro)" None, description="Délai appro jours (CT_DelaiAppro)"
) )
# COMMENTAIRE
commentaire: Optional[str] = Field( commentaire: Optional[str] = Field(
None, description="Commentaire libre (CT_Commentaire)" None, description="Commentaire libre (CT_Commentaire)"
) )
# ANALYTIQUE
section_analytique: Optional[str] = Field( section_analytique: Optional[str] = Field(
None, description="Section analytique (CA_Num)" None, description="Section analytique (CA_Num)"
) )
# ORGANISATION / SURVEILLANCE
mode_reglement_code: Optional[int] = Field( mode_reglement_code: Optional[int] = Field(
None, description="Code mode règlement (MR_No)" None, description="Code mode règlement (MR_No)"
) )
@ -200,7 +189,6 @@ class TiersDetails(BaseModel):
None, description="Résultat financier (CT_SvResultat)" None, description="Résultat financier (CT_SvResultat)"
) )
# COMPTE GENERAL ET CATEGORIES
compte_general: Optional[str] = Field( compte_general: Optional[str] = Field(
None, description="Compte général principal (CG_NumPrinc)" None, description="Compte général principal (CG_NumPrinc)"
) )
@ -211,7 +199,6 @@ class TiersDetails(BaseModel):
None, description="Catégorie comptable (N_CatCompta)" None, description="Catégorie comptable (N_CatCompta)"
) )
# CONTACTS
contacts: Optional[List[Contact]] = Field( contacts: Optional[List[Contact]] = Field(
default_factory=list, description="Liste des contacts du tiers" default_factory=list, description="Liste des contacts du tiers"
) )

651
scripts/manage_security.py Normal file
View file

@ -0,0 +1,651 @@
import sys
import os
from pathlib import Path
import asyncio
import argparse
import logging
from datetime import datetime
from typing import Optional, List
import json
from sqlalchemy import select
_current_file = Path(__file__).resolve()
_script_dir = _current_file.parent
_app_dir = _script_dir.parent
print(f"DEBUG: Script path: {_current_file}")
print(f"DEBUG: App dir: {_app_dir}")
print(f"DEBUG: Current working dir: {os.getcwd()}")
if str(_app_dir) in sys.path:
sys.path.remove(str(_app_dir))
sys.path.insert(0, str(_app_dir))
os.chdir(str(_app_dir))
print(f"DEBUG: sys.path[0]: {sys.path[0]}")
print(f"DEBUG: New working dir: {os.getcwd()}")
_test_imports = [
"database",
"database.db_config",
"database.models",
"services",
"security",
]
print("\nDEBUG: Vérification des imports...")
for module in _test_imports:
try:
__import__(module)
print(f"{module}")
except ImportError as e:
print(f"{module}: {e}")
try:
from database.db_config import async_session_factory
from database.models.api_key import SwaggerUser, ApiKey
from services.api_key import ApiKeyService
from security.auth import hash_password
except ImportError as e:
print(f"\n ERREUR D'IMPORT: {e}")
print(" Vérifiez que vous êtes dans /app")
print(" Commande correcte: cd /app && python scripts/manage_security.py ...")
sys.exit(1)
logging.basicConfig(level=logging.INFO, format="%(levelname)s - %(message)s")
logger = logging.getLogger(__name__)
AVAILABLE_TAGS = {
"Authentication": " Authentification et gestion des comptes",
"API Keys Management": "🔑 Gestion des clés API",
"Clients": "👥 Gestion des clients",
"Fournisseurs": "🏭 Gestion des fournisseurs",
"Prospects": "🎯 Gestion des prospects",
"Tiers": "📋 Gestion générale des tiers",
"Contacts": "📞 Contacts des tiers",
"Articles": "📦 Catalogue articles",
"Familles": "🏷️ Familles d'articles",
"Stock": "📊 Mouvements de stock",
"Devis": "📄 Devis",
"Commandes": "🛒 Commandes",
"Livraisons": "🚚 Bons de livraison",
"Factures": "💰 Factures",
"Avoirs": "↩️ Avoirs",
"Règlements": "💳 Règlements et encaissements",
"Workflows": " Transformations de documents",
"Documents": "📑 Gestion documents (PDF)",
"Emails": "📧 Envoi d'emails",
"Validation": " Validations métier",
"Collaborateurs": "👔 Collaborateurs internes",
"Société": "🏢 Informations société",
"Référentiels": "📚 Données de référence",
"System": "⚙️ Système et santé",
"Admin": "🛠️ Administration",
"Debug": "🐛 Debug et diagnostics",
}
PRESET_PROFILES = {
"commercial": [
"Clients",
"Contacts",
"Devis",
"Commandes",
"Factures",
"Articles",
"Documents",
"Emails",
],
"comptable": [
"Clients",
"Fournisseurs",
"Factures",
"Avoirs",
"Règlements",
"Documents",
"Emails",
],
"logistique": [
"Articles",
"Stock",
"Commandes",
"Livraisons",
"Fournisseurs",
"Documents",
],
"readonly": ["Clients", "Articles", "Devis", "Commandes", "Factures", "Documents"],
"developer": [
"Authentication",
"API Keys Management",
"System",
"Clients",
"Articles",
"Devis",
"Commandes",
"Factures",
],
}
async def add_swagger_user(
username: str,
password: str,
full_name: str = None,
tags: Optional[List[str]] = None,
preset: Optional[str] = None,
):
"""Ajouter un utilisateur Swagger avec configuration avancée"""
async with async_session_factory() as session:
result = await session.execute(
select(SwaggerUser).where(SwaggerUser.username == username)
)
existing = result.scalar_one_or_none()
if existing:
logger.error(f" L'utilisateur '{username}' existe déjà")
return
if preset:
if preset not in PRESET_PROFILES:
logger.error(
f" Preset '{preset}' inconnu. Disponibles: {list(PRESET_PROFILES.keys())}"
)
return
tags = PRESET_PROFILES[preset]
logger.info(f"📋 Application du preset '{preset}': {len(tags)} tags")
swagger_user = SwaggerUser(
username=username,
hashed_password=hash_password(password),
full_name=full_name or username,
is_active=True,
allowed_tags=json.dumps(tags) if tags else None,
)
session.add(swagger_user)
await session.commit()
logger.info(f" Utilisateur Swagger créé: {username}")
logger.info(f" Nom complet: {swagger_user.full_name}")
if tags:
logger.info(f" 🏷️ Tags autorisés ({len(tags)}):")
for tag in tags:
desc = AVAILABLE_TAGS.get(tag, "")
logger.info(f"{tag} {desc}")
else:
logger.info(" 👑 Accès ADMIN COMPLET (tous les tags)")
async def list_swagger_users():
"""Lister tous les utilisateurs Swagger avec détails"""
async with async_session_factory() as session:
result = await session.execute(select(SwaggerUser))
users = result.scalars().all()
if not users:
logger.info("🔭 Aucun utilisateur Swagger")
return
logger.info(f"\n👥 {len(users)} utilisateur(s) Swagger:\n")
logger.info("=" * 80)
for user in users:
status = " ACTIF" if user.is_active else " NON ACTIF"
logger.info(f"\n{status} {user.username}")
logger.info(f"📛 Nom: {user.full_name}")
logger.info(f"🆔 ID: {user.id}")
logger.info(f"📅 Créé: {user.created_at}")
logger.info(f"🕐 Dernière connexion: {user.last_login or 'Jamais'}")
if user.allowed_tags:
try:
tags = json.loads(user.allowed_tags)
if tags:
logger.info(f"🏷️ Tags autorisés ({len(tags)}):")
for tag in tags:
desc = AVAILABLE_TAGS.get(tag, "")
logger.info(f"{tag} {desc}")
auth_schemes = []
if "Authentication" in tags:
auth_schemes.append("JWT (Bearer)")
if "API Keys Management" in tags or len(tags) > 3:
auth_schemes.append("X-API-Key")
if not auth_schemes:
auth_schemes.append("JWT (Bearer)")
logger.info(
f" Authentification autorisée: {', '.join(auth_schemes)}"
)
else:
logger.info("👑 Tags autorisés: ADMIN COMPLET (tous)")
logger.info(" Authentification: JWT + X-API-Key (tout)")
except json.JSONDecodeError:
logger.info(" Tags: Erreur format")
else:
logger.info("👑 Tags autorisés: ADMIN COMPLET (tous)")
logger.info(" Authentification: JWT + X-API-Key (tout)")
logger.info("\n" + "=" * 80)
async def update_swagger_user(
username: str,
add_tags: Optional[List[str]] = None,
remove_tags: Optional[List[str]] = None,
set_tags: Optional[List[str]] = None,
preset: Optional[str] = None,
active: Optional[bool] = None,
):
"""Mettre à jour un utilisateur Swagger"""
async with async_session_factory() as session:
result = await session.execute(
select(SwaggerUser).where(SwaggerUser.username == username)
)
user = result.scalar_one_or_none()
if not user:
logger.error(f" Utilisateur '{username}' introuvable")
return
modified = False
if preset:
if preset not in PRESET_PROFILES:
logger.error(f" Preset '{preset}' inconnu")
return
user.allowed_tags = json.dumps(PRESET_PROFILES[preset])
logger.info(f"📋 Preset '{preset}' appliqué")
modified = True
elif set_tags is not None:
user.allowed_tags = json.dumps(set_tags) if set_tags else None
logger.info(f" Tags remplacés: {len(set_tags) if set_tags else 0}")
modified = True
elif add_tags or remove_tags:
current_tags = []
if user.allowed_tags:
try:
current_tags = json.loads(user.allowed_tags)
except json.JSONDecodeError:
current_tags = []
if add_tags:
for tag in add_tags:
if tag not in current_tags:
current_tags.append(tag)
logger.info(f" Tag ajouté: {tag}")
modified = True
if remove_tags:
for tag in remove_tags:
if tag in current_tags:
current_tags.remove(tag)
logger.info(f" Tag retiré: {tag}")
modified = True
user.allowed_tags = json.dumps(current_tags) if current_tags else None
if active is not None:
user.is_active = active
logger.info(f" Statut: {'ACTIF' if active else 'INACTIF'}")
modified = True
if modified:
await session.commit()
logger.info(f" Utilisateur '{username}' mis à jour")
else:
logger.info(" Aucune modification effectuée")
async def delete_swagger_user(username: str):
"""Supprimer un utilisateur Swagger"""
async with async_session_factory() as session:
result = await session.execute(
select(SwaggerUser).where(SwaggerUser.username == username)
)
user = result.scalar_one_or_none()
if not user:
logger.error(f" Utilisateur '{username}' introuvable")
return
await session.delete(user)
await session.commit()
logger.info(f"🗑️ Utilisateur Swagger supprimé: {username}")
async def list_available_tags():
"""Liste tous les tags disponibles avec description"""
logger.info("\n🏷️ TAGS DISPONIBLES:\n")
logger.info("=" * 80)
for tag, desc in AVAILABLE_TAGS.items():
logger.info(f" {desc}")
logger.info(f" Nom: {tag}\n")
logger.info("=" * 80)
logger.info("\n📦 PRESETS DISPONIBLES:\n")
for preset_name, tags in PRESET_PROFILES.items():
logger.info(f" {preset_name}:")
logger.info(f" {', '.join(tags)}\n")
logger.info("=" * 80)
async def create_api_key(
name: str,
description: str = None,
expires_in_days: int = 365,
rate_limit: int = 60,
endpoints: list = None,
):
"""Créer une clé API"""
async with async_session_factory() as session:
service = ApiKeyService(session)
api_key_obj, api_key_plain = await service.create_api_key(
name=name,
description=description,
created_by="cli",
expires_in_days=expires_in_days,
rate_limit_per_minute=rate_limit,
allowed_endpoints=endpoints,
)
logger.info("=" * 70)
logger.info("🔑 Clé API créée avec succès")
logger.info("=" * 70)
logger.info(f" ID: {api_key_obj.id}")
logger.info(f" Nom: {api_key_obj.name}")
logger.info(f" Clé: {api_key_plain}")
logger.info(f" Préfixe: {api_key_obj.key_prefix}")
logger.info(f" Rate limit: {api_key_obj.rate_limit_per_minute} req/min")
logger.info(f" Expire le: {api_key_obj.expires_at}")
if api_key_obj.allowed_endpoints:
try:
endpoints_list = json.loads(api_key_obj.allowed_endpoints)
logger.info(f" Endpoints: {', '.join(endpoints_list)}")
except Exception:
logger.info(f" Endpoints: {api_key_obj.allowed_endpoints}")
else:
logger.info(" Endpoints: Tous (aucune restriction)")
logger.info("=" * 70)
logger.info(" SAUVEGARDEZ CETTE CLÉ - Elle ne sera plus affichée !")
logger.info("=" * 70)
async def list_api_keys():
"""Lister toutes les clés API"""
async with async_session_factory() as session:
service = ApiKeyService(session)
keys = await service.list_api_keys()
if not keys:
logger.info("🔭 Aucune clé API")
return
logger.info(f"🔑 {len(keys)} clé(s) API:\n")
for key in keys:
is_valid = key.is_active and (
not key.expires_at or key.expires_at > datetime.now()
)
status = "" if is_valid else ""
logger.info(f" {status} {key.name:<30} ({key.key_prefix}...)")
logger.info(f" ID: {key.id}")
logger.info(f" Rate limit: {key.rate_limit_per_minute} req/min")
logger.info(f" Requêtes: {key.total_requests}")
logger.info(f" Expire: {key.expires_at or 'Jamais'}")
logger.info(f" Dernière utilisation: {key.last_used_at or 'Jamais'}")
if key.allowed_endpoints:
try:
endpoints = json.loads(key.allowed_endpoints)
display = ", ".join(endpoints[:4])
if len(endpoints) > 4:
display += f"... (+{len(endpoints) - 4})"
logger.info(f" Endpoints: {display}")
except Exception:
pass
else:
logger.info(" Endpoints: Tous")
logger.info("")
async def revoke_api_key(key_id: str):
"""Révoquer une clé API"""
async with async_session_factory() as session:
result = await session.execute(select(ApiKey).where(ApiKey.id == key_id))
key = result.scalar_one_or_none()
if not key:
logger.error(f" Clé API '{key_id}' introuvable")
return
key.is_active = False
key.revoked_at = datetime.now()
await session.commit()
logger.info(f"🗑️ Clé API révoquée: {key.name}")
logger.info(f" ID: {key.id}")
async def verify_api_key(api_key: str):
"""Vérifier une clé API"""
async with async_session_factory() as session:
service = ApiKeyService(session)
key = await service.verify_api_key(api_key)
if not key:
logger.error(" Clé API invalide ou expirée")
return
logger.info("=" * 60)
logger.info(" Clé API valide")
logger.info("=" * 60)
logger.info(f" Nom: {key.name}")
logger.info(f" ID: {key.id}")
logger.info(f" Rate limit: {key.rate_limit_per_minute} req/min")
logger.info(f" Requêtes totales: {key.total_requests}")
logger.info(f" Expire: {key.expires_at or 'Jamais'}")
if key.allowed_endpoints:
try:
endpoints = json.loads(key.allowed_endpoints)
logger.info(f" Endpoints autorisés: {endpoints}")
except Exception:
pass
else:
logger.info(" Endpoints autorisés: Tous")
logger.info("=" * 60)
async def main():
parser = argparse.ArgumentParser(
description="Gestion avancée des utilisateurs Swagger et clés API",
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog="""
EXEMPLES D'UTILISATION:
=== UTILISATEURS SWAGGER ===
1. Créer un utilisateur avec preset:
python scripts/manage_security.py swagger add commercial Pass123! --preset commercial
2. Créer un admin complet:
python scripts/manage_security.py swagger add admin AdminPass
3. Créer avec tags spécifiques:
python scripts/manage_security.py swagger add client Pass123! --tags Clients Devis Factures
4. Mettre à jour un utilisateur (ajouter des tags):
python scripts/manage_security.py swagger update client --add-tags Commandes Livraisons
5. Changer complètement les tags:
python scripts/manage_security.py swagger update client --set-tags Clients Articles
6. Appliquer un preset:
python scripts/manage_security.py swagger update client --preset comptable
7. Lister les tags disponibles:
python scripts/manage_security.py swagger tags
8. Désactiver temporairement:
python scripts/manage_security.py swagger update client --inactive
=== CLÉS API ===
9. Créer une clé API:
python scripts/manage_security.py apikey create "Mon App" --days 365 --rate-limit 100
10. Créer avec endpoints restreints:
python scripts/manage_security.py apikey create "SDK-ReadOnly" --endpoints "/clients" "/clients/*" "/devis" "/devis/*"
11. Lister les clés:
python scripts/manage_security.py apikey list
12. Vérifier une clé:
python scripts/manage_security.py apikey verify sdk_live_xxxxx
13. Révoquer une clé:
python scripts/manage_security.py apikey revoke <key_id>
""",
)
subparsers = parser.add_subparsers(dest="command", help="Commandes")
swagger_parser = subparsers.add_parser("swagger", help="Gestion Swagger")
swagger_sub = swagger_parser.add_subparsers(dest="swagger_command")
add_p = swagger_sub.add_parser("add", help="Ajouter utilisateur")
add_p.add_argument("username", help="Nom d'utilisateur")
add_p.add_argument("password", help="Mot de passe")
add_p.add_argument("--full-name", help="Nom complet", default=None)
add_p.add_argument(
"--tags",
nargs="*",
help="Tags autorisés. Vide = admin complet",
default=None,
)
add_p.add_argument(
"--preset",
choices=list(PRESET_PROFILES.keys()),
help="Appliquer un preset de tags",
)
update_p = swagger_sub.add_parser("update", help="Mettre à jour utilisateur")
update_p.add_argument("username", help="Nom d'utilisateur")
update_p.add_argument("--add-tags", nargs="+", help="Ajouter des tags")
update_p.add_argument("--remove-tags", nargs="+", help="Retirer des tags")
update_p.add_argument("--set-tags", nargs="*", help="Définir les tags (remplace)")
update_p.add_argument(
"--preset", choices=list(PRESET_PROFILES.keys()), help="Appliquer preset"
)
update_p.add_argument("--active", action="store_true", help="Activer l'utilisateur")
update_p.add_argument(
"--inactive", action="store_true", help="Désactiver l'utilisateur"
)
swagger_sub.add_parser("list", help="Lister utilisateurs")
del_p = swagger_sub.add_parser("delete", help="Supprimer utilisateur")
del_p.add_argument("username", help="Nom d'utilisateur")
swagger_sub.add_parser("tags", help="Lister les tags disponibles")
apikey_parser = subparsers.add_parser("apikey", help="Gestion clés API")
apikey_sub = apikey_parser.add_subparsers(dest="apikey_command")
create_p = apikey_sub.add_parser("create", help="Créer clé API")
create_p.add_argument("name", help="Nom de la clé")
create_p.add_argument("--description", help="Description")
create_p.add_argument("--days", type=int, default=365, help="Expiration (jours)")
create_p.add_argument("--rate-limit", type=int, default=60, help="Req/min")
create_p.add_argument("--endpoints", nargs="+", help="Endpoints autorisés")
apikey_sub.add_parser("list", help="Lister clés")
rev_p = apikey_sub.add_parser("revoke", help="Révoquer clé")
rev_p.add_argument("key_id", help="ID de la clé")
ver_p = apikey_sub.add_parser("verify", help="Vérifier clé")
ver_p.add_argument("api_key", help="Clé API complète")
args = parser.parse_args()
if not args.command:
parser.print_help()
return
if args.command == "swagger":
if args.swagger_command == "add":
await add_swagger_user(
args.username,
args.password,
args.full_name,
args.tags,
args.preset,
)
elif args.swagger_command == "update":
active = None
if args.active:
active = True
elif args.inactive:
active = False
await update_swagger_user(
args.username,
add_tags=args.add_tags,
remove_tags=args.remove_tags,
set_tags=args.set_tags,
preset=args.preset,
active=active,
)
elif args.swagger_command == "list":
await list_swagger_users()
elif args.swagger_command == "delete":
await delete_swagger_user(args.username)
elif args.swagger_command == "tags":
await list_available_tags()
else:
swagger_parser.print_help()
elif args.command == "apikey":
if args.apikey_command == "create":
await create_api_key(
name=args.name,
description=args.description,
expires_in_days=args.days,
rate_limit=args.rate_limit,
endpoints=args.endpoints,
)
elif args.apikey_command == "list":
await list_api_keys()
elif args.apikey_command == "revoke":
await revoke_api_key(args.key_id)
elif args.apikey_command == "verify":
await verify_api_key(args.api_key)
else:
apikey_parser.print_help()
if __name__ == "__main__":
try:
asyncio.run(main())
except KeyboardInterrupt:
print("\n Interrupted")
sys.exit(0)
except Exception as e:
logger.error(f" Erreur: {e}")
import traceback
traceback.print_exc()
sys.exit(1)

354
scripts/test_security.py Normal file
View file

@ -0,0 +1,354 @@
import requests
import argparse
import sys
from typing import Tuple
class SecurityTester:
def __init__(self, base_url: str):
self.base_url = base_url.rstrip("/")
self.results = {"passed": 0, "failed": 0, "tests": []}
def log_test(self, name: str, passed: bool, details: str = ""):
"""Enregistrer le résultat d'un test"""
status = " PASS" if passed else " FAIL"
print(f"{status} - {name}")
if details:
print(f" {details}")
self.results["tests"].append(
{"name": name, "passed": passed, "details": details}
)
if passed:
self.results["passed"] += 1
else:
self.results["failed"] += 1
def test_swagger_without_auth(self) -> bool:
"""Test 1: Swagger UI devrait demander une authentification"""
print("\n Test 1: Protection Swagger UI")
try:
response = requests.get(f"{self.base_url}/docs", timeout=5)
if response.status_code == 401:
self.log_test(
"Swagger protégé",
True,
"Code 401 retourné sans authentification",
)
return True
else:
self.log_test(
"Swagger protégé",
False,
f"Code {response.status_code} au lieu de 401",
)
return False
except Exception as e:
self.log_test("Swagger protégé", False, f"Erreur: {str(e)}")
return False
def test_swagger_with_auth(self, username: str, password: str) -> bool:
"""Test 2: Swagger UI accessible avec credentials valides"""
print("\n Test 2: Accès Swagger avec authentification")
try:
response = requests.get(
f"{self.base_url}/docs", auth=(username, password), timeout=5
)
if response.status_code == 200:
self.log_test(
"Accès Swagger avec auth",
True,
f"Authentifié comme {username}",
)
return True
else:
self.log_test(
"Accès Swagger avec auth",
False,
f"Code {response.status_code}, credentials invalides?",
)
return False
except Exception as e:
self.log_test("Accès Swagger avec auth", False, f"Erreur: {str(e)}")
return False
def test_api_without_auth(self) -> bool:
"""Test 3: Endpoints API devraient demander une authentification"""
print("\n Test 3: Protection des endpoints API")
test_endpoints = ["/api/v1/clients", "/api/v1/documents"]
all_protected = True
for endpoint in test_endpoints:
try:
response = requests.get(f"{self.base_url}{endpoint}", timeout=5)
if response.status_code == 401:
print(f" {endpoint} protégé (401)")
else:
print(
f" {endpoint} accessible sans auth (code {response.status_code})"
)
all_protected = False
except Exception as e:
print(f" {endpoint} erreur: {str(e)}")
all_protected = False
self.log_test("Endpoints API protégés", all_protected)
return all_protected
def test_health_endpoint_public(self) -> bool:
"""Test 4: Endpoint /health devrait être accessible sans auth"""
print("\n Test 4: Endpoint /health public")
try:
response = requests.get(f"{self.base_url}/health", timeout=5)
if response.status_code == 200:
self.log_test("/health accessible", True, "Endpoint public fonctionne")
return True
else:
self.log_test(
"/health accessible",
False,
f"Code {response.status_code} inattendu",
)
return False
except Exception as e:
self.log_test("/health accessible", False, f"Erreur: {str(e)}")
return False
def test_api_key_creation(self, username: str, password: str) -> Tuple[bool, str]:
"""Test 5: Créer une clé API via l'endpoint"""
print("\n Test 5: Création d'une clé API")
try:
login_response = requests.post(
f"{self.base_url}/api/v1/auth/login",
json={"email": username, "password": password},
timeout=5,
)
if login_response.status_code != 200:
self.log_test(
"Création clé API",
False,
"Impossible de se connecter pour obtenir un JWT",
)
return False, ""
jwt_token = login_response.json().get("access_token")
create_response = requests.post(
f"{self.base_url}/api/v1/api-keys",
headers={"Authorization": f"Bearer {jwt_token}"},
json={
"name": "Test API Key",
"description": "Clé de test automatisé",
"rate_limit_per_minute": 60,
"expires_in_days": 30,
},
timeout=5,
)
if create_response.status_code == 201:
api_key = create_response.json().get("api_key")
self.log_test("Création clé API", True, f"Clé créée: {api_key[:20]}...")
return True, api_key
else:
self.log_test(
"Création clé API",
False,
f"Code {create_response.status_code}",
)
return False, ""
except Exception as e:
self.log_test("Création clé API", False, f"Erreur: {str(e)}")
return False, ""
def test_api_key_usage(self, api_key: str) -> bool:
"""Test 6: Utiliser une clé API pour accéder à un endpoint"""
print("\n Test 6: Utilisation d'une clé API")
if not api_key:
self.log_test("Utilisation clé API", False, "Pas de clé disponible")
return False
try:
response = requests.get(
f"{self.base_url}/api/v1/clients",
headers={"X-API-Key": api_key},
timeout=5,
)
if response.status_code == 200:
self.log_test("Utilisation clé API", True, "Clé acceptée")
return True
else:
self.log_test(
"Utilisation clé API",
False,
f"Code {response.status_code}, clé refusée?",
)
return False
except Exception as e:
self.log_test("Utilisation clé API", False, f"Erreur: {str(e)}")
return False
def test_invalid_api_key(self) -> bool:
"""Test 7: Une clé invalide devrait être refusée"""
print("\n Test 7: Rejet de clé API invalide")
invalid_key = "sdk_live_invalid_key_12345"
try:
response = requests.get(
f"{self.base_url}/api/v1/clients",
headers={"X-API-Key": invalid_key},
timeout=5,
)
if response.status_code == 401:
self.log_test("Clé invalide rejetée", True, "Code 401 comme attendu")
return True
else:
self.log_test(
"Clé invalide rejetée",
False,
f"Code {response.status_code} au lieu de 401",
)
return False
except Exception as e:
self.log_test("Clé invalide rejetée", False, f"Erreur: {str(e)}")
return False
def test_rate_limiting(self, api_key: str) -> bool:
"""Test 8: Rate limiting (optionnel, peut prendre du temps)"""
print("\n Test 8: Rate limiting (test simple)")
if not api_key:
self.log_test("Rate limiting", False, "Pas de clé disponible")
return False
print(" Envoi de 70 requêtes rapides...")
rate_limited = False
for i in range(70):
try:
response = requests.get(
f"{self.base_url}/health",
headers={"X-API-Key": api_key},
timeout=1,
)
if response.status_code == 429:
rate_limited = True
print(f" Rate limit atteint à la requête {i + 1}")
break
except Exception:
pass
if rate_limited:
self.log_test("Rate limiting", True, "Rate limit détecté")
return True
else:
self.log_test(
"Rate limiting",
True,
"Aucun rate limit détecté (peut être normal si pas implémenté)",
)
return True
def print_summary(self):
"""Afficher le résumé des tests"""
print("\n" + "=" * 60)
print(" RÉSUMÉ DES TESTS")
print("=" * 60)
total = self.results["passed"] + self.results["failed"]
success_rate = (self.results["passed"] / total * 100) if total > 0 else 0
print(f"\nTotal: {total} tests")
print(f" Réussis: {self.results['passed']}")
print(f" Échoués: {self.results['failed']}")
print(f"Taux de réussite: {success_rate:.1f}%\n")
if self.results["failed"] == 0:
print("🎉 Tous les tests sont passés ! Sécurité OK.")
return 0
else:
print(" Certains tests ont échoué. Vérifiez la configuration.")
return 1
def main():
parser = argparse.ArgumentParser(
description="Test automatisé de la sécurité de l'API"
)
parser.add_argument(
"--url",
required=True,
help="URL de base de l'API (ex: http://localhost:8000)",
)
parser.add_argument(
"--swagger-user", required=True, help="Utilisateur Swagger pour les tests"
)
parser.add_argument(
"--swagger-pass", required=True, help="Mot de passe Swagger pour les tests"
)
parser.add_argument(
"--skip-rate-limit",
action="store_true",
help="Sauter le test de rate limiting (long)",
)
args = parser.parse_args()
print(" Démarrage des tests de sécurité")
print(f" URL cible: {args.url}\n")
tester = SecurityTester(args.url)
tester.test_swagger_without_auth()
tester.test_swagger_with_auth(args.swagger_user, args.swagger_pass)
tester.test_api_without_auth()
tester.test_health_endpoint_public()
success, api_key = tester.test_api_key_creation(
args.swagger_user, args.swagger_pass
)
if success and api_key:
tester.test_api_key_usage(api_key)
tester.test_invalid_api_key()
if not args.skip_rate_limit:
tester.test_rate_limiting(api_key)
else:
print("\n Test de rate limiting sauté")
else:
print("\n Tests avec clé API sautés (création échouée)")
exit_code = tester.print_summary()
sys.exit(exit_code)
if __name__ == "__main__":
main()

View file

@ -5,10 +5,12 @@ import jwt
import secrets import secrets
import hashlib import hashlib
SECRET_KEY = "VOTRE_SECRET_KEY_A_METTRE_EN_.ENV" from config.config import settings
ALGORITHM = "HS256"
ACCESS_TOKEN_EXPIRE_MINUTES = 10080 SECRET_KEY = settings.jwt_secret
REFRESH_TOKEN_EXPIRE_DAYS = 7 ALGORITHM = settings.jwt_algorithm
ACCESS_TOKEN_EXPIRE_MINUTES = settings.access_token_expire_minutes
REFRESH_TOKEN_EXPIRE_DAYS = settings.refresh_token_expire_days
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
@ -67,9 +69,13 @@ def decode_token(token: str) -> Optional[Dict]:
payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM]) payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
return payload return payload
except jwt.ExpiredSignatureError: except jwt.ExpiredSignatureError:
return None raise jwt.InvalidTokenError("Token expiré")
except jwt.JWTError: except jwt.DecodeError:
return None raise jwt.InvalidTokenError("Token invalide (format incorrect)")
except jwt.InvalidTokenError as e:
raise jwt.InvalidTokenError(f"Token invalide: {str(e)}")
except Exception as e:
raise jwt.InvalidTokenError(f"Erreur lors du décodage du token: {str(e)}")
def validate_password_strength(password: str) -> tuple[bool, str]: def validate_password_strength(password: str) -> tuple[bool, str]:

223
services/api_key.py Normal file
View file

@ -0,0 +1,223 @@
import secrets
import hashlib
import json
from datetime import datetime, timedelta
from typing import Optional, List, Dict
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, and_, or_
import logging
from database.models.api_key import ApiKey
logger = logging.getLogger(__name__)
class ApiKeyService:
"""Service de gestion des clés API"""
def __init__(self, session: AsyncSession):
self.session = session
@staticmethod
def generate_api_key() -> str:
"""Génère une clé API unique et sécurisée"""
random_part = secrets.token_urlsafe(32)
return f"sdk_live_{random_part}"
@staticmethod
def hash_api_key(api_key: str) -> str:
"""Hash la clé API pour stockage sécurisé"""
return hashlib.sha256(api_key.encode()).hexdigest()
@staticmethod
def get_key_prefix(api_key: str) -> str:
"""Extrait le préfixe de la clé pour identification"""
return api_key[:12] if len(api_key) >= 12 else api_key
async def create_api_key(
self,
name: str,
description: Optional[str] = None,
created_by: str = "system",
user_id: Optional[str] = None,
expires_in_days: Optional[int] = None,
rate_limit_per_minute: int = 60,
allowed_endpoints: Optional[List[str]] = None,
) -> tuple[ApiKey, str]:
api_key_plain = self.generate_api_key()
key_hash = self.hash_api_key(api_key_plain)
key_prefix = self.get_key_prefix(api_key_plain)
expires_at = None
if expires_in_days:
expires_at = datetime.now() + timedelta(days=expires_in_days)
api_key_obj = ApiKey(
key_hash=key_hash,
key_prefix=key_prefix,
name=name,
description=description,
created_by=created_by,
user_id=user_id,
expires_at=expires_at,
rate_limit_per_minute=rate_limit_per_minute,
allowed_endpoints=json.dumps(allowed_endpoints)
if allowed_endpoints
else None,
)
self.session.add(api_key_obj)
await self.session.commit()
await self.session.refresh(api_key_obj)
logger.info(f" Clé API créée: {name} (prefix: {key_prefix})")
return api_key_obj, api_key_plain
async def verify_api_key(self, api_key_plain: str) -> Optional[ApiKey]:
key_hash = self.hash_api_key(api_key_plain)
result = await self.session.execute(
select(ApiKey).where(
and_(
ApiKey.key_hash == key_hash,
ApiKey.is_active,
ApiKey.revoked_at.is_(None),
or_(
ApiKey.expires_at.is_(None), ApiKey.expires_at > datetime.now()
),
)
)
)
api_key_obj = result.scalar_one_or_none()
if api_key_obj:
api_key_obj.total_requests += 1
api_key_obj.last_used_at = datetime.now()
await self.session.commit()
logger.debug(f" Clé API validée: {api_key_obj.name}")
else:
logger.warning(" Clé API invalide ou expirée")
return api_key_obj
async def list_api_keys(
self,
include_revoked: bool = False,
user_id: Optional[str] = None,
) -> List[ApiKey]:
"""Liste les clés API"""
query = select(ApiKey)
if not include_revoked:
query = query.where(ApiKey.revoked_at.is_(None))
if user_id:
query = query.where(ApiKey.user_id == user_id)
query = query.order_by(ApiKey.created_at.desc())
result = await self.session.execute(query)
return list(result.scalars().all())
async def revoke_api_key(self, key_id: str) -> bool:
"""Révoque une clé API"""
result = await self.session.execute(select(ApiKey).where(ApiKey.id == key_id))
api_key_obj = result.scalar_one_or_none()
if not api_key_obj:
return False
api_key_obj.is_active = False
api_key_obj.revoked_at = datetime.now()
await self.session.commit()
logger.info(f"🗑️ Clé API révoquée: {api_key_obj.name}")
return True
async def get_by_id(self, key_id: str) -> Optional[ApiKey]:
"""Récupère une clé API par son ID"""
result = await self.session.execute(select(ApiKey).where(ApiKey.id == key_id))
return result.scalar_one_or_none()
async def check_rate_limit(self, api_key_obj: ApiKey) -> tuple[bool, Dict]:
return True, {
"allowed": True,
"limit": api_key_obj.rate_limit_per_minute,
"remaining": api_key_obj.rate_limit_per_minute,
}
async def check_endpoint_access(self, api_key_obj: ApiKey, endpoint: str) -> bool:
if not api_key_obj.allowed_endpoints:
logger.debug(
f"🔓 API Key {api_key_obj.name}: Aucune restriction d'endpoint"
)
return True
try:
allowed = json.loads(api_key_obj.allowed_endpoints)
if "*" in allowed or "/*" in allowed:
logger.debug(f"🔓 API Key {api_key_obj.name}: Accès global autorisé")
return True
for pattern in allowed:
if pattern == endpoint:
logger.debug(f" Match exact: {pattern} == {endpoint}")
return True
if pattern.endswith("/*"):
base = pattern[:-2] # "/clients/*" → "/clients"
if endpoint == base or endpoint.startswith(base + "/"):
logger.debug(f" Match wildcard: {pattern}{endpoint}")
return True
elif pattern.endswith("*"):
base = pattern[:-1] # "/clients*" → "/clients"
if endpoint.startswith(base):
logger.debug(f" Match prefix: {pattern}{endpoint}")
return True
logger.warning(
f" API Key {api_key_obj.name}: Accès refusé à {endpoint}\n"
f" Endpoints autorisés: {allowed}"
)
return False
except json.JSONDecodeError:
logger.error(f" Erreur parsing allowed_endpoints pour {api_key_obj.id}")
return False
def api_key_to_response(api_key_obj: ApiKey, show_key: bool = False) -> Dict:
"""Convertit un objet ApiKey en réponse API"""
allowed_endpoints = None
if api_key_obj.allowed_endpoints:
try:
allowed_endpoints = json.loads(api_key_obj.allowed_endpoints)
except json.JSONDecodeError:
pass
is_expired = False
if api_key_obj.expires_at:
is_expired = api_key_obj.expires_at < datetime.now()
return {
"id": api_key_obj.id,
"name": api_key_obj.name,
"description": api_key_obj.description,
"key_prefix": api_key_obj.key_prefix,
"is_active": api_key_obj.is_active,
"is_expired": is_expired,
"rate_limit_per_minute": api_key_obj.rate_limit_per_minute,
"allowed_endpoints": allowed_endpoints,
"total_requests": api_key_obj.total_requests,
"last_used_at": api_key_obj.last_used_at,
"created_at": api_key_obj.created_at,
"expires_at": api_key_obj.expires_at,
"revoked_at": api_key_obj.revoked_at,
"created_by": api_key_obj.created_by,
}

View file

@ -6,7 +6,7 @@ import httpx
from datetime import datetime from datetime import datetime
from typing import Optional, Tuple, List from typing import Optional, Tuple, List
from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import false, select, true, update, and_ from sqlalchemy import false, select, update, and_
import logging import logging
from config.config import settings from config.config import settings
@ -55,7 +55,7 @@ class SageGatewayService:
and_( and_(
SageGatewayConfig.id == gateway_id, SageGatewayConfig.id == gateway_id,
SageGatewayConfig.user_id == user_id, SageGatewayConfig.user_id == user_id,
# SageGatewayConfig.is_deleted.is_(false()), SageGatewayConfig.is_deleted == false(),
) )
) )
) )
@ -67,7 +67,7 @@ class SageGatewayService:
query = select(SageGatewayConfig).where(SageGatewayConfig.user_id == user_id) query = select(SageGatewayConfig).where(SageGatewayConfig.user_id == user_id)
if not include_deleted: if not include_deleted:
query = query.where(SageGatewayConfig.is_deleted.is_(false())) query = query.where(SageGatewayConfig.is_deleted == false())
query = query.order_by( query = query.order_by(
SageGatewayConfig.is_active.desc(), SageGatewayConfig.is_active.desc(),
@ -167,7 +167,7 @@ class SageGatewayService:
and_( and_(
SageGatewayConfig.user_id == user_id, SageGatewayConfig.user_id == user_id,
SageGatewayConfig.is_active, SageGatewayConfig.is_active,
SageGatewayConfig.is_deleted.is_(true()), SageGatewayConfig.is_deleted == false(),
) )
) )
) )

View file

@ -0,0 +1,361 @@
import os
import logging
import requests
from pathlib import Path
from datetime import datetime
from typing import Optional, Tuple, Dict, List
from sqlalchemy.ext.asyncio import AsyncSession
logger = logging.getLogger(__name__)
SIGNED_DOCS_DIR = Path(os.getenv("SIGNED_DOCS_PATH", "/app/data/signed_documents"))
SIGNED_DOCS_DIR.mkdir(parents=True, exist_ok=True)
class UniversignDocumentService:
"""Service de gestion des documents signés Universign - VERSION CORRIGÉE"""
def __init__(self, api_url: str, api_key: str, timeout: int = 60):
self.api_url = api_url.rstrip("/")
self.api_key = api_key
self.timeout = timeout
self.auth = (api_key, "")
def fetch_transaction_documents(self, transaction_id: str) -> Optional[List[Dict]]:
try:
logger.info(f" Récupération documents pour transaction: {transaction_id}")
response = requests.get(
f"{self.api_url}/transactions/{transaction_id}",
auth=self.auth,
timeout=self.timeout,
headers={"Accept": "application/json"},
)
if response.status_code == 200:
data = response.json()
documents = data.get("documents", [])
logger.info(f"{len(documents)} document(s) trouvé(s)")
for idx, doc in enumerate(documents):
logger.debug(
f" Document {idx}: id={doc.get('id')}, "
f"name={doc.get('name')}, status={doc.get('status')}"
)
return documents
elif response.status_code == 404:
logger.warning(
f"Transaction {transaction_id} introuvable sur Universign"
)
return None
else:
logger.error(
f"Erreur HTTP {response.status_code} pour {transaction_id}: "
f"{response.text[:500]}"
)
return None
except requests.exceptions.Timeout:
logger.error(f"⏱️ Timeout récupération transaction {transaction_id}")
return None
except Exception as e:
logger.error(f" Erreur fetch documents: {e}", exc_info=True)
return None
def download_signed_document(
self, transaction_id: str, document_id: str
) -> Optional[bytes]:
try:
download_url = (
f"{self.api_url}/transactions/{transaction_id}"
f"/documents/{document_id}/download"
)
logger.info(f"Téléchargement depuis: {download_url}")
response = requests.get(
download_url,
auth=self.auth,
timeout=self.timeout,
stream=True,
)
if response.status_code == 200:
content_type = response.headers.get("Content-Type", "")
content_length = response.headers.get("Content-Length", "unknown")
logger.info(
f"Téléchargement réussi: "
f"Content-Type={content_type}, Size={content_length}"
)
if (
"pdf" not in content_type.lower()
and "octet-stream" not in content_type.lower()
):
logger.warning(
f"Type de contenu inattendu: {content_type}. "
f"Tentative de lecture quand même..."
)
content = response.content
if len(content) < 1024:
logger.error(f" Document trop petit: {len(content)} octets")
return None
return content
elif response.status_code == 404:
logger.error(
f" Document {document_id} introuvable pour transaction {transaction_id}"
)
return None
elif response.status_code == 403:
logger.error(
f" Accès refusé au document {document_id}. "
f"Vérifiez que la transaction est bien signée."
)
return None
else:
logger.error(
f" Erreur HTTP {response.status_code}: {response.text[:500]}"
)
return None
except requests.exceptions.Timeout:
logger.error(f"⏱️ Timeout téléchargement document {document_id}")
return None
except Exception as e:
logger.error(f" Erreur téléchargement: {e}", exc_info=True)
return None
async def download_and_store_signed_document(
self, session: AsyncSession, transaction, force: bool = False
) -> Tuple[bool, Optional[str]]:
if not force and transaction.signed_document_path:
if os.path.exists(transaction.signed_document_path):
logger.debug(
f"Document déjà téléchargé: {transaction.transaction_id}"
)
return True, None
transaction.download_attempts += 1
try:
logger.info(
f"Récupération document signé pour: {transaction.transaction_id}"
)
documents = self.fetch_transaction_documents(transaction.transaction_id)
if not documents:
error = "Aucun document trouvé dans la transaction Universign"
logger.warning(f"{error}")
transaction.download_error = error
await session.commit()
return False, error
document_id = None
for doc in documents:
doc_id = doc.get("id")
doc_status = doc.get("status", "").lower()
if doc_status in ["signed", "completed", "closed"]:
document_id = doc_id
logger.info(
f"Document signé trouvé: {doc_id} (status: {doc_status})"
)
break
if document_id is None:
document_id = doc_id
if not document_id:
error = "Impossible de déterminer l'ID du document à télécharger"
logger.error(f" {error}")
transaction.download_error = error
await session.commit()
return False, error
if hasattr(transaction, "universign_document_id"):
transaction.universign_document_id = document_id
pdf_content = self.download_signed_document(
transaction_id=transaction.transaction_id, document_id=document_id
)
if not pdf_content:
error = f"Échec téléchargement document {document_id}"
logger.error(f" {error}")
transaction.download_error = error
await session.commit()
return False, error
filename = self._generate_filename(transaction)
file_path = SIGNED_DOCS_DIR / filename
with open(file_path, "wb") as f:
f.write(pdf_content)
file_size = os.path.getsize(file_path)
transaction.signed_document_path = str(file_path)
transaction.signed_document_downloaded_at = datetime.now()
transaction.signed_document_size_bytes = file_size
transaction.download_error = None
transaction.document_url = (
f"{self.api_url}/transactions/{transaction.transaction_id}"
f"/documents/{document_id}/download"
)
await session.commit()
logger.info(
f"Document signé téléchargé: {filename} ({file_size / 1024:.1f} KB)"
)
return True, None
except OSError as e:
error = f"Erreur filesystem: {str(e)}"
logger.error(f" {error}")
transaction.download_error = error
await session.commit()
return False, error
except Exception as e:
error = f"Erreur inattendue: {str(e)}"
logger.error(f" {error}", exc_info=True)
transaction.download_error = error
await session.commit()
return False, error
def _generate_filename(self, transaction) -> str:
"""Génère un nom de fichier unique pour le document signé"""
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
tx_id = transaction.transaction_id.replace("tr_", "")
filename = f"{transaction.sage_document_id}_{tx_id}_{timestamp}_signed.pdf"
return filename
def get_document_path(self, transaction) -> Optional[Path]:
"""Retourne le chemin du document signé s'il existe"""
if not transaction.signed_document_path:
return None
path = Path(transaction.signed_document_path)
if path.exists():
return path
return None
async def cleanup_old_documents(self, days_to_keep: int = 90) -> Tuple[int, int]:
"""Supprime les anciens documents signés"""
from datetime import timedelta
cutoff_date = datetime.now() - timedelta(days=days_to_keep)
deleted = 0
size_freed = 0
for file_path in SIGNED_DOCS_DIR.glob("*.pdf"):
try:
file_time = datetime.fromtimestamp(os.path.getmtime(file_path))
if file_time < cutoff_date:
size_freed += os.path.getsize(file_path)
os.remove(file_path)
deleted += 1
logger.info(f"🗑️ Supprimé: {file_path.name}")
except Exception as e:
logger.error(f"Erreur suppression {file_path}: {e}")
size_freed_mb = size_freed / (1024 * 1024)
logger.info(
f"Nettoyage terminé: {deleted} fichiers supprimés "
f"({size_freed_mb:.2f} MB libérés)"
)
return deleted, int(size_freed_mb)
def diagnose_transaction(self, transaction_id: str) -> Dict:
"""
Diagnostic complet d'une transaction pour debug
"""
result = {
"transaction_id": transaction_id,
"api_url": self.api_url,
"timestamp": datetime.now().isoformat(),
"checks": {},
}
try:
logger.info(f"Diagnostic transaction: {transaction_id}")
response = requests.get(
f"{self.api_url}/transactions/{transaction_id}",
auth=self.auth,
timeout=self.timeout,
)
result["checks"]["transaction_fetch"] = {
"status_code": response.status_code,
"success": response.status_code == 200,
}
if response.status_code != 200:
result["checks"]["transaction_fetch"]["error"] = response.text[:500]
return result
data = response.json()
result["checks"]["transaction_data"] = {
"state": data.get("state"),
"documents_count": len(data.get("documents", [])),
"participants_count": len(data.get("participants", [])),
}
documents = data.get("documents", [])
result["checks"]["documents"] = []
for doc in documents:
doc_info = {
"id": doc.get("id"),
"name": doc.get("name"),
"status": doc.get("status"),
}
if doc.get("id"):
download_url = (
f"{self.api_url}/transactions/{transaction_id}"
f"/documents/{doc['id']}/download"
)
try:
dl_response = requests.head(
download_url,
auth=self.auth,
timeout=10,
)
doc_info["download_check"] = {
"url": download_url,
"status_code": dl_response.status_code,
"accessible": dl_response.status_code in [200, 302],
"content_type": dl_response.headers.get("Content-Type"),
}
except Exception as e:
doc_info["download_check"] = {"error": str(e)}
result["checks"]["documents"].append(doc_info)
result["success"] = True
except Exception as e:
result["success"] = False
result["error"] = str(e)
return result

View file

@ -1,10 +1,12 @@
import requests import requests
import json import json
import logging import logging
import uuid
from typing import Dict, Optional, Tuple from typing import Dict, Optional, Tuple
from datetime import datetime, timedelta from datetime import datetime, timedelta
from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, and_, or_ from sqlalchemy import select, and_, or_
from sqlalchemy.orm import selectinload
from database import ( from database import (
UniversignTransaction, UniversignTransaction,
@ -13,7 +15,11 @@ from database import (
UniversignTransactionStatus, UniversignTransactionStatus,
LocalDocumentStatus, LocalDocumentStatus,
UniversignSignerStatus, UniversignSignerStatus,
EmailLog,
StatutEmail,
) )
from data.data import templates_signature_email
from services.universign_document import UniversignDocumentService
from utils.universign_status_mapping import ( from utils.universign_status_mapping import (
map_universign_to_local, map_universign_to_local,
is_transition_allowed, is_transition_allowed,
@ -31,6 +37,17 @@ class UniversignSyncService:
self.api_key = api_key self.api_key = api_key
self.timeout = timeout self.timeout = timeout
self.auth = (api_key, "") self.auth = (api_key, "")
self.sage_client = None
self.email_queue = None
self.settings = None
self.document_service = UniversignDocumentService(
api_url=api_url, api_key=api_key, timeout=60
)
def configure(self, sage_client, email_queue, settings):
self.sage_client = sage_client
self.email_queue = email_queue
self.settings = settings
def fetch_transaction_status(self, transaction_id: str) -> Optional[Dict]: def fetch_transaction_status(self, transaction_id: str) -> Optional[Dict]:
start_time = datetime.now() start_time = datetime.now()
@ -48,9 +65,7 @@ class UniversignSyncService:
if response.status_code == 200: if response.status_code == 200:
data = response.json() data = response.json()
logger.info( logger.info(
f"✓ Fetch OK: {transaction_id} " f"Fetch OK: {transaction_id} status={data.get('state')} ({response_time_ms}ms)"
f"status={data.get('state')} "
f"({response_time_ms}ms)"
) )
return { return {
"transaction": data, "transaction": data,
@ -67,8 +82,7 @@ class UniversignSyncService:
else: else:
logger.error( logger.error(
f"Erreur HTTP {response.status_code} " f"Erreur HTTP {response.status_code} pour {transaction_id}: {response.text}"
f"pour {transaction_id}: {response.text}"
) )
return None return None
@ -80,151 +94,12 @@ class UniversignSyncService:
logger.error(f"Erreur fetch {transaction_id}: {e}", exc_info=True) logger.error(f"Erreur fetch {transaction_id}: {e}", exc_info=True)
return None return None
async def sync_transaction(
self,
session: AsyncSession,
transaction: UniversignTransaction,
force: bool = False,
) -> Tuple[bool, Optional[str]]:
if is_final_status(transaction.local_status.value) and not force:
logger.debug(
f"Skip {transaction.transaction_id}: "
f"statut final {transaction.local_status.value}"
)
transaction.needs_sync = False
await session.commit()
return True, None
# === FETCH UNIVERSIGN ===
result = self.fetch_transaction_status(transaction.transaction_id)
if not result:
error = "Échec récupération données Universign"
await self._log_sync_attempt(session, transaction, "polling", False, error)
return False, error
# === EXTRACTION DONNÉES ===
universign_data = result["transaction"]
universign_status_raw = universign_data.get("state", "draft")
# === MAPPING STATUT ===
new_local_status = map_universign_to_local(universign_status_raw)
previous_local_status = transaction.local_status.value
# === VALIDATION TRANSITION ===
if not is_transition_allowed(previous_local_status, new_local_status):
logger.warning(
f"Transition refusée: {previous_local_status}{new_local_status}"
)
# En cas de conflit, résoudre par priorité
new_local_status = resolve_status_conflict(
previous_local_status, new_local_status
)
# === DÉTECTION CHANGEMENT ===
status_changed = previous_local_status != new_local_status
if not status_changed and not force:
logger.debug(f"Pas de changement pour {transaction.transaction_id}")
transaction.last_synced_at = datetime.now()
transaction.needs_sync = False
await session.commit()
return True, None
# === MISE À JOUR TRANSACTION ===
transaction.universign_status = UniversignTransactionStatus(
universign_status_raw
)
transaction.local_status = LocalDocumentStatus(new_local_status)
transaction.universign_status_updated_at = datetime.now()
# === DATES SPÉCIFIQUES ===
if new_local_status == "EN_COURS" and not transaction.sent_at:
transaction.sent_at = datetime.now()
if new_local_status == "SIGNE" and not transaction.signed_at:
transaction.signed_at = datetime.now()
if new_local_status == "REFUSE" and not transaction.refused_at:
transaction.refused_at = datetime.now()
if new_local_status == "EXPIRE" and not transaction.expired_at:
transaction.expired_at = datetime.now()
# === URLS ===
if "signers" in universign_data and len(universign_data["signers"]) > 0:
first_signer = universign_data["signers"][0]
if "url" in first_signer:
transaction.signer_url = first_signer["url"]
if "documents" in universign_data and len(universign_data["documents"]) > 0:
first_doc = universign_data["documents"][0]
if "url" in first_doc:
transaction.document_url = first_doc["url"]
# === SIGNATAIRES ===
await self._sync_signers(session, transaction, universign_data)
# === FLAGS ===
transaction.last_synced_at = datetime.now()
transaction.sync_attempts += 1
transaction.needs_sync = not is_final_status(new_local_status)
transaction.sync_error = None
# === LOG ===
await self._log_sync_attempt(
session=session,
transaction=transaction,
sync_type="polling",
success=True,
error_message=None,
previous_status=previous_local_status,
new_status=new_local_status,
changes=json.dumps(
{
"status_changed": status_changed,
"universign_raw": universign_status_raw,
"response_time_ms": result.get("response_time_ms"),
}
),
)
await session.commit()
# === ACTIONS MÉTIER ===
if status_changed:
await self._execute_status_actions(session, transaction, new_local_status)
logger.info(
f"✓ Sync OK: {transaction.transaction_id} "
f"{previous_local_status}{new_local_status}"
)
return True, None
async def sync_all_pending( async def sync_all_pending(
self, session: AsyncSession, max_transactions: int = 50 self, session: AsyncSession, max_transactions: int = 50
) -> Dict[str, int]: ) -> Dict[str, int]:
"""
Synchronise toutes les transactions en attente
"""
from sqlalchemy.orm import selectinload # Si pas déjà importé en haut
query = ( query = (
select(UniversignTransaction) select(UniversignTransaction)
.options(selectinload(UniversignTransaction.signers)) # AJOUTER CETTE LIGNE .options(selectinload(UniversignTransaction.signers))
.where( .where(
and_( and_(
UniversignTransaction.needs_sync, UniversignTransaction.needs_sync,
@ -267,7 +142,6 @@ class UniversignSyncService:
if success: if success:
stats["success"] += 1 stats["success"] += 1
if transaction.local_status.value != previous_status: if transaction.local_status.value != previous_status:
stats["status_changes"] += 1 stats["status_changes"] += 1
else: else:
@ -280,60 +154,89 @@ class UniversignSyncService:
stats["failed"] += 1 stats["failed"] += 1
logger.info( logger.info(
f"Polling terminé: {stats['success']}/{stats['total_found']} OK, " f"Polling terminé: {stats['success']}/{stats['total_found']} OK, {stats['status_changes']} changements détectés"
f"{stats['status_changes']} changements détectés"
) )
return stats return stats
async def process_webhook( async def process_webhook(
self, session: AsyncSession, payload: Dict self, session: AsyncSession, payload: Dict, transaction_id: str = None
) -> Tuple[bool, Optional[str]]: ) -> Tuple[bool, Optional[str]]:
"""
Traite un webhook Universign - CORRECTION : meilleure gestion des payloads
"""
try: try:
event_type = payload.get("event") if not transaction_id:
transaction_id = payload.get("transaction_id") or payload.get("id") if (
payload.get("type", "").startswith("transaction.")
and "payload" in payload
):
nested_object = payload.get("payload", {}).get("object", {})
if nested_object.get("object") == "transaction":
transaction_id = nested_object.get("id")
elif payload.get("type", "").startswith("action."):
transaction_id = (
payload.get("payload", {})
.get("object", {})
.get("transaction_id")
)
elif payload.get("object") == "transaction":
transaction_id = payload.get("id")
if not transaction_id: if not transaction_id:
return False, "Pas de transaction_id dans le webhook" return False, "Transaction ID manquant"
query = select(UniversignTransaction).where( event_type = payload.get("type", "webhook")
UniversignTransaction.transaction_id == transaction_id
logger.info(
f"📨 Traitement webhook: transaction={transaction_id}, event={event_type}"
)
query = (
select(UniversignTransaction)
.options(selectinload(UniversignTransaction.signers))
.where(UniversignTransaction.transaction_id == transaction_id)
) )
result = await session.execute(query) result = await session.execute(query)
transaction = result.scalar_one_or_none() transaction = result.scalar_one_or_none()
if not transaction: if not transaction:
logger.warning( logger.warning(f"Transaction {transaction_id} inconnue localement")
f"Webhook reçu pour transaction inconnue: {transaction_id}"
)
return False, "Transaction inconnue" return False, "Transaction inconnue"
transaction.webhook_received = True transaction.webhook_received = True
old_status = transaction.local_status.value
success, error = await self.sync_transaction( success, error = await self.sync_transaction(
session, transaction, force=True session, transaction, force=True
) )
if success and transaction.local_status.value != old_status:
logger.info(
f"Webhook traité: {transaction_id} | "
f"{old_status}{transaction.local_status.value}"
)
await self._log_sync_attempt( await self._log_sync_attempt(
session=session, session=session,
transaction=transaction, transaction=transaction,
sync_type=f"webhook:{event_type}", sync_type=f"webhook:{event_type}",
success=success, success=success,
error_message=error, error_message=error,
changes=json.dumps(payload), previous_status=old_status,
new_status=transaction.local_status.value,
changes=json.dumps(
payload, default=str
), # Ajout default=str pour éviter les erreurs JSON
) )
await session.commit() await session.commit()
logger.info(
f"✓ Webhook traité: {transaction_id} "
f"event={event_type} success={success}"
)
return success, error return success, error
except Exception as e: except Exception as e:
logger.error(f"Erreur traitement webhook: {e}", exc_info=True) logger.error(f"💥 Erreur traitement webhook: {e}", exc_info=True)
return False, str(e) return False, str(e)
async def _sync_signers( async def _sync_signers(
@ -342,27 +245,292 @@ class UniversignSyncService:
transaction: UniversignTransaction, transaction: UniversignTransaction,
universign_data: Dict, universign_data: Dict,
): ):
"""Synchronise les signataires""" signers_data = universign_data.get("participants", [])
signers_data = universign_data.get("signers", []) if not signers_data:
signers_data = universign_data.get("signers", [])
# Supprimer les anciens signataires if not signers_data:
for signer in transaction.signers: logger.debug("Aucun signataire dans les données Universign")
await session.delete(signer) return
existing_signers = {s.email: s for s in transaction.signers}
# Créer les nouveaux
for idx, signer_data in enumerate(signers_data): for idx, signer_data in enumerate(signers_data):
signer = UniversignSigner( email = signer_data.get("email", "")
id=f"{transaction.id}_signer_{idx}", if not email:
transaction_id=transaction.id, logger.warning(f"Signataire sans email à l'index {idx}, ignoré")
email=signer_data.get("email", ""), continue
name=signer_data.get("name"),
status=UniversignSignerStatus(signer_data.get("status", "waiting")), raw_status = signer_data.get("status") or signer_data.get(
order_index=idx, "state", "waiting"
viewed_at=self._parse_date(signer_data.get("viewed_at")),
signed_at=self._parse_date(signer_data.get("signed_at")),
refused_at=self._parse_date(signer_data.get("refused_at")),
) )
session.add(signer) try:
status = UniversignSignerStatus(raw_status)
except ValueError:
logger.warning(
f"Statut inconnu pour signer {email}: {raw_status}, utilisation de 'unknown'"
)
status = UniversignSignerStatus.UNKNOWN
if email in existing_signers:
signer = existing_signers[email]
signer.status = status
viewed_at = self._parse_date(signer_data.get("viewed_at"))
if viewed_at and not signer.viewed_at:
signer.viewed_at = viewed_at
signed_at = self._parse_date(signer_data.get("signed_at"))
if signed_at and not signer.signed_at:
signer.signed_at = signed_at
refused_at = self._parse_date(signer_data.get("refused_at"))
if refused_at and not signer.refused_at:
signer.refused_at = refused_at
if signer_data.get("name") and not signer.name:
signer.name = signer_data.get("name")
else:
try:
signer = UniversignSigner(
id=f"{transaction.id}_signer_{idx}_{int(datetime.now().timestamp())}",
transaction_id=transaction.id,
email=email,
name=signer_data.get("name"),
status=status,
order_index=idx,
viewed_at=self._parse_date(signer_data.get("viewed_at")),
signed_at=self._parse_date(signer_data.get("signed_at")),
refused_at=self._parse_date(signer_data.get("refused_at")),
)
session.add(signer)
logger.info(
f" Nouveau signataire ajouté: {email} (statut: {status.value})"
)
except Exception as e:
logger.error(f"Erreur création signer {email}: {e}")
async def sync_transaction(
self,
session,
transaction,
force: bool = False,
):
import json
if is_final_status(transaction.local_status.value) and not force:
logger.debug(
f"⏭️ Skip {transaction.transaction_id}: statut final "
f"{transaction.local_status.value}"
)
transaction.needs_sync = False
await session.commit()
return True, None
logger.info(f"Synchronisation: {transaction.transaction_id}")
result = self.fetch_transaction_status(transaction.transaction_id)
if not result:
error = "Échec récupération données Universign"
logger.error(f" {error}: {transaction.transaction_id}")
transaction.sync_attempts += 1
transaction.sync_error = error
await self._log_sync_attempt(session, transaction, "polling", False, error)
await session.commit()
return False, error
try:
universign_data = result["transaction"]
universign_status_raw = universign_data.get("state", "draft")
logger.info(f" Statut Universign brut: {universign_status_raw}")
new_local_status = map_universign_to_local(universign_status_raw)
previous_local_status = transaction.local_status.value
logger.info(
f"Mapping: {universign_status_raw} (Universign) → "
f"{new_local_status} (Local) | Actuel: {previous_local_status}"
)
if not is_transition_allowed(previous_local_status, new_local_status):
logger.warning(
f"Transition refusée: {previous_local_status}{new_local_status}"
)
new_local_status = resolve_status_conflict(
previous_local_status, new_local_status
)
logger.info(f"Résolution conflit: statut résolu = {new_local_status}")
status_changed = previous_local_status != new_local_status
if status_changed:
logger.info(
f"CHANGEMENT DÉTECTÉ: {previous_local_status}{new_local_status}"
)
try:
transaction.universign_status = UniversignTransactionStatus(
universign_status_raw
)
except ValueError:
logger.warning(f"Statut Universign inconnu: {universign_status_raw}")
if new_local_status == "SIGNE":
transaction.universign_status = (
UniversignTransactionStatus.COMPLETED
)
elif new_local_status == "REFUSE":
transaction.universign_status = UniversignTransactionStatus.REFUSED
elif new_local_status == "EXPIRE":
transaction.universign_status = UniversignTransactionStatus.EXPIRED
else:
transaction.universign_status = UniversignTransactionStatus.STARTED
transaction.local_status = LocalDocumentStatus(new_local_status)
transaction.universign_status_updated_at = datetime.now()
if new_local_status == "EN_COURS" and not transaction.sent_at:
transaction.sent_at = datetime.now()
logger.info("Date d'envoi mise à jour")
if new_local_status == "SIGNE" and not transaction.signed_at:
transaction.signed_at = datetime.now()
logger.info("Date de signature mise à jour")
if new_local_status == "REFUSE" and not transaction.refused_at:
transaction.refused_at = datetime.now()
logger.info(" Date de refus mise à jour")
if new_local_status == "EXPIRE" and not transaction.expired_at:
transaction.expired_at = datetime.now()
logger.info("Date d'expiration mise à jour")
documents = universign_data.get("documents", [])
if documents:
first_doc = documents[0]
logger.info(
f"Document Universign trouvé: id={first_doc.get('id')}, "
f"status={first_doc.get('status')}"
)
if new_local_status == "SIGNE" and not transaction.signed_document_path:
logger.info("Déclenchement téléchargement document signé...")
try:
(
download_success,
download_error,
) = await self.document_service.download_and_store_signed_document(
session=session, transaction=transaction, force=False
)
if download_success:
logger.info("Document signé téléchargé et stocké")
else:
logger.warning(f"Échec téléchargement: {download_error}")
except Exception as e:
logger.error(f" Erreur téléchargement document: {e}", exc_info=True)
await self._sync_signers(session, transaction, universign_data)
transaction.last_synced_at = datetime.now()
transaction.sync_attempts += 1
transaction.needs_sync = not is_final_status(new_local_status)
transaction.sync_error = None
await self._log_sync_attempt(
session=session,
transaction=transaction,
sync_type="polling",
success=True,
error_message=None,
previous_status=previous_local_status,
new_status=new_local_status,
changes=json.dumps(
{
"status_changed": status_changed,
"universign_raw": universign_status_raw,
"documents_count": len(documents),
"response_time_ms": result.get("response_time_ms"),
},
default=str,
),
)
await session.commit()
if status_changed:
logger.info(f"🎬 Exécution actions pour statut: {new_local_status}")
await self._execute_status_actions(
session, transaction, new_local_status
)
logger.info(
f"Sync terminée: {transaction.transaction_id} | "
f"{previous_local_status}{new_local_status}"
)
return True, None
except Exception as e:
error_msg = f"Erreur lors de la synchronisation: {str(e)}"
logger.error(f" {error_msg}", exc_info=True)
transaction.sync_error = error_msg[:1000]
transaction.sync_attempts += 1
await self._log_sync_attempt(
session, transaction, "polling", False, error_msg
)
await session.commit()
return False, error_msg
async def _sync_transaction_documents_corrected(
self, session, transaction, universign_data: dict, new_local_status: str
):
documents = universign_data.get("documents", [])
if documents:
first_doc = documents[0]
first_doc_id = first_doc.get("id")
if first_doc_id:
if hasattr(transaction, "universign_document_id"):
transaction.universign_document_id = first_doc_id
logger.info(
f"Document Universign: id={first_doc_id}, "
f"name={first_doc.get('name')}, status={first_doc.get('status')}"
)
else:
logger.debug("Aucun document dans la réponse Universign")
if new_local_status == "SIGNE":
if not transaction.signed_document_path:
logger.info("Déclenchement téléchargement document signé...")
try:
(
download_success,
download_error,
) = await self.document_service.download_and_store_signed_document(
session=session, transaction=transaction, force=False
)
if download_success:
logger.info("Document signé téléchargé avec succès")
else:
logger.warning(f"Échec téléchargement: {download_error}")
except Exception as e:
logger.error(f" Erreur téléchargement document: {e}", exc_info=True)
else:
logger.debug(
f"Document déjà téléchargé: {transaction.signed_document_path}"
)
async def _log_sync_attempt( async def _log_sync_attempt(
self, self,
@ -375,7 +543,6 @@ class UniversignSyncService:
new_status: Optional[str] = None, new_status: Optional[str] = None,
changes: Optional[str] = None, changes: Optional[str] = None,
): ):
"""Enregistre une tentative de sync dans les logs"""
log = UniversignSyncLog( log = UniversignSyncLog(
transaction_id=transaction.id, transaction_id=transaction.id,
sync_type=sync_type, sync_type=sync_type,
@ -391,48 +558,119 @@ class UniversignSyncService:
async def _execute_status_actions( async def _execute_status_actions(
self, session: AsyncSession, transaction: UniversignTransaction, new_status: str self, session: AsyncSession, transaction: UniversignTransaction, new_status: str
): ):
"""Exécute les actions métier associées au statut"""
actions = get_status_actions(new_status) actions = get_status_actions(new_status)
if not actions: if not actions:
return return
# Mise à jour Sage if actions.get("update_sage_status") and self.sage_client:
if actions.get("update_sage_status"):
await self._update_sage_status(transaction, new_status) await self._update_sage_status(transaction, new_status)
elif actions.get("update_sage_status"):
logger.debug(
f"sage_client non configuré, skip MAJ Sage pour {transaction.sage_document_id}"
)
# Déclencher workflow if actions.get("send_notification") and self.email_queue and self.settings:
if actions.get("trigger_workflow"): await self._send_notification(session, transaction, new_status)
await self._trigger_workflow(transaction) elif actions.get("send_notification"):
logger.debug(
f"email_queue/settings non configuré, skip notification pour {transaction.transaction_id}"
)
# Notifications async def _update_sage_status(
if actions.get("send_notification"): self, transaction: UniversignTransaction, status: str
await self._send_notification(transaction, new_status) ):
if not self.sage_client:
logger.warning("sage_client non configuré pour mise à jour Sage")
return
# Archive try:
if actions.get("archive_document"): type_doc = transaction.sage_document_type.value
await self._archive_signed_document(transaction) doc_id = transaction.sage_document_id
async def _update_sage_status(self, transaction, status): if status == "SIGNE":
"""Met à jour le statut dans Sage""" self.sage_client.changer_statut_document(
# TODO: Appeler sage_client.mettre_a_jour_champ_libre() document_type_code=type_doc, numero=doc_id, nouveau_statut=2
logger.info(f"TODO: Mettre à jour Sage pour {transaction.sage_document_id}") )
logger.info(f"Statut Sage mis à jour: {doc_id} → Accepté (2)")
async def _trigger_workflow(self, transaction): elif status == "EN_COURS":
"""Déclenche un workflow (ex: devis→commande)""" self.sage_client.changer_statut_document(
logger.info(f"TODO: Workflow pour {transaction.sage_document_id}") document_type_code=type_doc, numero=doc_id, nouveau_statut=1
)
logger.info(f"Statut Sage mis à jour: {doc_id} → Confirmé (1)")
async def _send_notification(self, transaction, status): except Exception as e:
"""Envoie une notification email""" logger.error(
logger.info(f"TODO: Notif pour {transaction.sage_document_id}") f"Erreur mise à jour Sage pour {transaction.sage_document_id}: {e}"
)
async def _archive_signed_document(self, transaction): async def _send_notification(
"""Archive le document signé""" self, session: AsyncSession, transaction: UniversignTransaction, status: str
logger.info(f"TODO: Archivage pour {transaction.sage_document_id}") ):
if not self.email_queue or not self.settings:
logger.warning("email_queue ou settings non configuré")
return
try:
if status == "SIGNE":
template = templates_signature_email["signature_confirmee"]
type_labels = {
0: "Devis",
10: "Commande",
30: "Bon de Livraison",
60: "Facture",
50: "Avoir",
}
variables = {
"NOM_SIGNATAIRE": transaction.requester_name or "Client",
"TYPE_DOC": type_labels.get(
transaction.sage_document_type.value, "Document"
),
"NUMERO": transaction.sage_document_id,
"DATE_SIGNATURE": transaction.signed_at.strftime("%d/%m/%Y à %H:%M")
if transaction.signed_at
else datetime.now().strftime("%d/%m/%Y à %H:%M"),
"TRANSACTION_ID": transaction.transaction_id,
"CONTACT_EMAIL": self.settings.smtp_from,
}
sujet = template["sujet"]
corps = template["corps_html"]
for var, valeur in variables.items():
sujet = sujet.replace(f"{{{{{var}}}}}", str(valeur))
corps = corps.replace(f"{{{{{var}}}}}", str(valeur))
email_log = EmailLog(
id=str(uuid.uuid4()),
destinataire=transaction.requester_email,
sujet=sujet,
corps_html=corps,
document_ids=transaction.sage_document_id,
type_document=transaction.sage_document_type.value,
statut=StatutEmail.EN_ATTENTE,
date_creation=datetime.now(),
nb_tentatives=0,
)
session.add(email_log)
await session.flush()
self.email_queue.enqueue(email_log.id)
logger.info(
f"Email confirmation signature envoyé à {transaction.requester_email}"
)
except Exception as e:
logger.error(
f"Erreur envoi notification pour {transaction.transaction_id}: {e}"
)
@staticmethod @staticmethod
def _parse_date(date_str: Optional[str]) -> Optional[datetime]: def _parse_date(date_str: Optional[str]) -> Optional[datetime]:
"""Parse une date ISO 8601"""
if not date_str: if not date_str:
return None return None
try: try:
@ -448,7 +686,6 @@ class UniversignSyncScheduler:
self.is_running = False self.is_running = False
async def start(self, session_factory): async def start(self, session_factory):
"""Démarre le polling automatique"""
import asyncio import asyncio
self.is_running = True self.is_running = True
@ -470,10 +707,8 @@ class UniversignSyncScheduler:
except Exception as e: except Exception as e:
logger.error(f"Erreur polling: {e}", exc_info=True) logger.error(f"Erreur polling: {e}", exc_info=True)
# Attendre avant le prochain cycle
await asyncio.sleep(self.interval_minutes * 60) await asyncio.sleep(self.interval_minutes * 60)
def stop(self): def stop(self):
"""Arrête le polling"""
self.is_running = False self.is_running = False
logger.info("Arrêt polling Universign") logger.info("Arrêt polling Universign")

15
tools/cleaner.py Normal file
View file

@ -0,0 +1,15 @@
from pathlib import Path
def supprimer_commentaires_ligne(fichier):
path = Path(fichier)
lignes = path.read_text(encoding="utf-8").splitlines()
lignes_sans_commentaires = [line for line in lignes if not line.lstrip().startswith("#")]
path.write_text("\n".join(lignes_sans_commentaires), encoding="utf-8")
if __name__ == "__main__":
base_dir = Path(__file__).resolve().parent.parent
fichier_api = base_dir / "data/data.py"
supprimer_commentaires_ligne(fichier_api)

View file

@ -0,0 +1,52 @@
import ast
import os
import textwrap
SOURCE_FILE = "main.py"
MODELS_DIR = "../models"
os.makedirs(MODELS_DIR, exist_ok=True)
with open(SOURCE_FILE, "r", encoding="utf-8") as f:
source_code = f.read()
tree = ast.parse(source_code)
pydantic_classes = []
other_nodes = []
for node in tree.body:
if isinstance(node, ast.ClassDef):
if any(
isinstance(base, ast.Name) and base.id == "BaseModel" for base in node.bases
):
pydantic_classes.append(node)
continue
other_nodes.append(node)
imports = """
from pydantic import BaseModel, Field
from typing import Optional, List
"""
for cls in pydantic_classes:
class_name = cls.name
file_name = f"{class_name.lower()}.py"
file_path = os.path.join(MODELS_DIR, file_name)
class_code = ast.get_source_segment(source_code, cls)
class_code = textwrap.dedent(class_code)
with open(file_path, "w", encoding="utf-8") as f:
f.write(imports.strip() + "\n\n")
f.write(class_code)
print(f"✅ Modèle extrait : {class_name}{file_path}")
new_tree = ast.Module(body=other_nodes, type_ignores=[])
new_source = ast.unparse(new_tree)
with open(SOURCE_FILE, "w", encoding="utf-8") as f:
f.write(new_source)
print("\n🎉 Extraction terminée")

136
utils/enterprise.py Normal file
View file

@ -0,0 +1,136 @@
from fastapi import HTTPException
from typing import Optional
import httpx
import logging
from schemas import EntrepriseSearch
logger = logging.getLogger(__name__)
def calculer_tva_intracommunautaire(siren: str) -> Optional[str]:
try:
siren_clean = siren.replace(" ", "").strip()
if not siren_clean.isdigit() or len(siren_clean) != 9:
logger.warning(f"SIREN invalide: {siren}")
return None
siren_int = int(siren_clean)
cle = (12 + 3 * (siren_int % 97)) % 97
cle_str = f"{cle:02d}"
return f"FR{cle_str}{siren_clean}"
except Exception as e:
logger.error(f"Erreur calcul TVA pour SIREN {siren}: {e}")
return None
def formater_adresse(siege_data: dict) -> str:
try:
adresse_parts = []
if siege_data.get("numero_voie"):
adresse_parts.append(siege_data["numero_voie"])
if siege_data.get("type_voie"):
adresse_parts.append(siege_data["type_voie"])
if siege_data.get("libelle_voie"):
adresse_parts.append(siege_data["libelle_voie"])
if siege_data.get("code_postal"):
adresse_parts.append(siege_data["code_postal"])
if siege_data.get("libelle_commune"):
adresse_parts.append(siege_data["libelle_commune"].upper())
return " ".join(adresse_parts)
except Exception as e:
logger.error(f"Erreur formatage adresse: {e}")
return ""
async def rechercher_entreprise_api(query: str, per_page: int = 5) -> dict:
api_url = "https://recherche-entreprises.api.gouv.fr/search"
params = {
"q": query,
"per_page": per_page,
"limite_etablissements": 5,
}
try:
async with httpx.AsyncClient(timeout=10.0) as client:
response = await client.get(api_url, params=params)
if response.status_code == 429:
logger.warning("Rate limit atteint (7 req/s)")
raise HTTPException(
status_code=429,
detail="Trop de requêtes. Veuillez réessayer dans 1 seconde.",
)
if response.status_code == 503:
logger.error("API Sirene indisponible (503)")
raise HTTPException(
status_code=503,
detail="Service de recherche momentanément indisponible.",
)
response.raise_for_status()
return response.json()
except httpx.TimeoutException:
logger.error(f"Timeout lors de la recherche: {query}")
raise HTTPException(
status_code=504, detail="Délai d'attente dépassé pour l'API de recherche."
)
except httpx.HTTPError as e:
logger.error(f"Erreur HTTP API Sirene: {e}")
raise HTTPException(
status_code=500,
detail=f"Erreur lors de la communication avec l'API: {str(e)}",
)
def mapper_resultat_api(entreprise_data: dict) -> Optional[EntrepriseSearch]:
try:
siren = entreprise_data.get("siren")
if not siren:
logger.warning("Entreprise sans SIREN, ignorée")
return None
tva_number = calculer_tva_intracommunautaire(siren)
if not tva_number:
logger.warning(f"Impossible de calculer TVA pour SIREN: {siren}")
return None
siege = entreprise_data.get("siege", {})
etat_admin = entreprise_data.get("etat_administratif", "A")
is_active = etat_admin == "A"
return EntrepriseSearch(
company_name=entreprise_data.get("nom_complet", ""),
siren=siren,
vat_number=tva_number,
address=formater_adresse(siege),
naf_code=entreprise_data.get("activite_principale", ""),
is_active=is_active,
siret_siege=siege.get("siret"),
code_postal=siege.get("code_postal"),
ville=siege.get("libelle_commune"),
)
except Exception as e:
logger.error(f"Erreur mapping entreprise: {e}", exc_info=True)
return None

View file

@ -290,14 +290,11 @@ def _preparer_lignes_document(lignes: List) -> List[Dict]:
UNIVERSIGN_TO_LOCAL: Dict[str, str] = { UNIVERSIGN_TO_LOCAL: Dict[str, str] = {
# États initiaux
"draft": "EN_ATTENTE", "draft": "EN_ATTENTE",
"ready": "EN_ATTENTE", "ready": "EN_ATTENTE",
# En cours
"started": "EN_COURS", "started": "EN_COURS",
# États finaux (succès)
"completed": "SIGNE", "completed": "SIGNE",
# États finaux (échec) "closed": "SIGNE",
"refused": "REFUSE", "refused": "REFUSE",
"expired": "EXPIRE", "expired": "EXPIRE",
"canceled": "REFUSE", "canceled": "REFUSE",
@ -422,13 +419,13 @@ STATUS_MESSAGES: Dict[str, Dict[str, str]] = {
"SIGNE": { "SIGNE": {
"fr": "Signé avec succès", "fr": "Signé avec succès",
"en": "Successfully signed", "en": "Successfully signed",
"icon": "", "icon": "",
"color": "green", "color": "green",
}, },
"REFUSE": { "REFUSE": {
"fr": "Signature refusée", "fr": "Signature refusée",
"en": "Signature refused", "en": "Signature refused",
"icon": "", "icon": "",
"color": "red", "color": "red",
}, },
"EXPIRE": { "EXPIRE": {
@ -440,7 +437,7 @@ STATUS_MESSAGES: Dict[str, Dict[str, str]] = {
"ERREUR": { "ERREUR": {
"fr": "Erreur technique", "fr": "Erreur technique",
"en": "Technical error", "en": "Technical error",
"icon": "⚠️", "icon": "",
"color": "red", "color": "red",
}, },
} }

View file

@ -1,4 +1,8 @@
from typing import Dict, Any from typing import Dict, Any
import logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
UNIVERSIGN_TO_LOCAL: Dict[str, str] = { UNIVERSIGN_TO_LOCAL: Dict[str, str] = {
"draft": "EN_ATTENTE", "draft": "EN_ATTENTE",
@ -86,13 +90,13 @@ STATUS_MESSAGES: Dict[str, Dict[str, str]] = {
"SIGNE": { "SIGNE": {
"fr": "Signé avec succès", "fr": "Signé avec succès",
"en": "Successfully signed", "en": "Successfully signed",
"icon": "", "icon": "",
"color": "green", "color": "green",
}, },
"REFUSE": { "REFUSE": {
"fr": "Signature refusée", "fr": "Signature refusée",
"en": "Signature refused", "en": "Signature refused",
"icon": "", "icon": "",
"color": "red", "color": "red",
}, },
"EXPIRE": { "EXPIRE": {
@ -104,15 +108,24 @@ STATUS_MESSAGES: Dict[str, Dict[str, str]] = {
"ERREUR": { "ERREUR": {
"fr": "Erreur technique", "fr": "Erreur technique",
"en": "Technical error", "en": "Technical error",
"icon": "⚠️", "icon": "",
"color": "red", "color": "red",
}, },
} }
def map_universign_to_local(universign_status: str) -> str: def map_universign_to_local(universign_status: str) -> str:
"""Convertit un statut Universign en statut local.""" """Convertit un statut Universign en statut local avec fallback robuste."""
return UNIVERSIGN_TO_LOCAL.get(universign_status.lower(), "ERREUR") normalized = universign_status.lower().strip()
mapped = UNIVERSIGN_TO_LOCAL.get(normalized)
if not mapped:
logger.warning(
f"Statut Universign inconnu: '{universign_status}', mapping vers ERREUR"
)
return "ERREUR"
return mapped
def get_sage_status_code(local_status: str) -> int: def get_sage_status_code(local_status: str) -> int: