Compare commits
147 commits
feat/enhan
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3b5c183b47 | ||
|
|
d25c2cffa9 | ||
|
|
1c6c45465f | ||
|
|
574d82f3c4 | ||
|
|
a6a623d1ab | ||
|
|
437ecd0ed3 | ||
|
|
c057a085ed | ||
|
|
23a94f5558 | ||
|
|
797aed0240 | ||
|
|
5f40c677a8 | ||
|
|
8a22e285df | ||
|
|
92597a1143 | ||
|
|
c1f4c66e8c | ||
|
|
43da1b09ed | ||
|
|
6d5f8594d0 | ||
|
|
a7457c3979 | ||
|
|
5eec115d1d | ||
|
|
d89c9fd35b | ||
|
|
211dd4fd23 | ||
|
|
67ef83c4e3 | ||
|
|
82d1d92e58 | ||
|
|
28c8fb3008 | ||
|
|
f8cec7ebc5 | ||
|
|
1a08894b47 | ||
|
|
3cdb490ee5 | ||
|
|
c84e4ddc20 | ||
|
|
41ca202d4b | ||
|
|
918f5d3f19 | ||
|
|
fa95d0d117 | ||
|
|
a1150390f4 | ||
|
|
0001dbe634 | ||
|
|
5b584bf969 | ||
|
|
022149c237 | ||
|
|
72d1ac58d1 | ||
|
|
cce1cdf76a | ||
|
|
e51a5e0a0b | ||
|
|
dd65ae4d96 | ||
|
|
cc0062b3bc | ||
|
|
9bd0f62459 | ||
|
|
e0f08fd83a | ||
|
|
f59e56490c | ||
|
|
2aafd525cd | ||
|
|
17a4251eea | ||
|
|
abc9ff820a | ||
|
|
b85bd26dbe | ||
|
|
1164c7975a | ||
|
|
a10fda072c | ||
|
|
9f6c1de8ef | ||
|
|
09eae50952 | ||
|
|
4b686c4544 | ||
|
|
89510537b3 | ||
|
|
0e18129325 | ||
|
|
aa89ebdf9e | ||
|
|
9f12727bd3 | ||
|
|
18603ded6e | ||
|
|
18d72b3bf9 | ||
|
|
fdf359738b | ||
|
|
ba9e474109 | ||
|
|
b291cbf65f | ||
|
|
6f2136c3ca | ||
|
|
beabefa3f9 | ||
|
|
457c746706 | ||
|
|
d719966339 | ||
|
|
cc1609549f | ||
|
|
25be0bd569 | ||
|
|
19faec9b24 | ||
|
|
2f06a083dc | ||
|
|
149d8fb2de | ||
|
|
eedc628a5f | ||
|
|
f505dad8a7 | ||
|
|
a824592398 | ||
|
|
23575fa231 | ||
|
|
a9df408399 | ||
|
|
671d5bac15 | ||
|
|
d5273a0786 | ||
|
|
e7003d4059 | ||
|
|
30ffc7a493 | ||
|
|
3f1dce918d | ||
|
|
08665f15dd | ||
|
|
3233630401 | ||
|
|
d2f02e1555 | ||
|
|
9ae447e2c7 | ||
|
|
358b2e3639 | ||
|
|
a2c85a211a | ||
|
|
b17e4abf12 | ||
|
|
23f9bba174 | ||
|
|
983e960b9b | ||
|
|
74c0d73294 | ||
|
|
18f9a45ef6 | ||
|
|
d6ed8792cc | ||
|
|
24d7a49a73 | ||
|
|
c5c17fdd9b | ||
|
|
c389129ae7 | ||
|
|
6b6246b6e5 | ||
| b3419eafaa | |||
|
|
a9aff7b386 | ||
|
|
4f0fe17ee9 | ||
| d4d6cbc44f | |||
|
|
795b848dff | ||
|
|
e990cbdc08 | ||
|
|
1972f22b80 | ||
|
|
ce3b234fee | ||
|
|
d78d189606 | ||
|
|
8a012fc162 | ||
|
|
f3957dddcf | ||
|
|
268dfb3618 | ||
|
|
bcee1f277f | ||
|
|
d8ec61802d | ||
|
|
4a1960745a | ||
| 0be28f6744 | |||
|
|
e4024168b2 | ||
|
|
cd9dd9348d | ||
|
|
bcaa621432 | ||
|
|
fbaa43e3fd | ||
|
|
1ce85517be | ||
|
|
a3f02cbd91 | ||
|
|
92a2b95cbb | ||
|
|
9a1e1d6726 | ||
|
|
c24f276ce4 | ||
|
|
5ad1fccc5c | ||
|
|
0deb178bc6 | ||
|
|
bac8cc6017 | ||
|
|
f3fc32c89f | ||
|
|
677cd826d7 | ||
|
|
410d4553d5 | ||
|
|
b40c998062 | ||
|
|
ab25443f99 | ||
|
|
e3f7090935 | ||
|
|
bbaec0f0b8 | ||
|
|
a08fb12b56 | ||
|
|
19811a2290 | ||
|
|
9f5ccb8e7b | ||
|
|
a68f5af72e | ||
|
|
50c654a74a | ||
|
|
4d6bb8f0f9 | ||
|
|
59105dae88 | ||
|
|
bdb8e4f799 | ||
|
|
c97db9b058 | ||
|
|
8850c7c266 | ||
|
|
e7bdf2d6a2 | ||
|
|
448227c80f | ||
|
|
17b17379ac | ||
|
|
e6236558fb | ||
|
|
45dd517ecf | ||
|
|
306c71b43d | ||
|
|
e3b0f7e44a | ||
|
|
512bb366dc |
72 changed files with 8802 additions and 2627 deletions
9
.gitignore
vendored
9
.gitignore
vendored
|
|
@ -39,3 +39,12 @@ data/*.db.bak
|
||||||
*.db
|
*.db
|
||||||
|
|
||||||
tools/
|
tools/
|
||||||
|
|
||||||
|
|
||||||
|
.trunk
|
||||||
|
.env.staging
|
||||||
|
.env.production
|
||||||
|
|
||||||
|
.trunk
|
||||||
|
|
||||||
|
*clean*.py
|
||||||
9
.trunk/.gitignore
vendored
Normal file
9
.trunk/.gitignore
vendored
Normal file
|
|
@ -0,0 +1,9 @@
|
||||||
|
*out
|
||||||
|
*logs
|
||||||
|
*actions
|
||||||
|
*notifications
|
||||||
|
*tools
|
||||||
|
plugins
|
||||||
|
user_trunk.yaml
|
||||||
|
user.yaml
|
||||||
|
tmp
|
||||||
32
.trunk/trunk.yaml
Normal file
32
.trunk/trunk.yaml
Normal file
|
|
@ -0,0 +1,32 @@
|
||||||
|
# This file controls the behavior of Trunk: https://docs.trunk.io/cli
|
||||||
|
# To learn more about the format of this file, see https://docs.trunk.io/reference/trunk-yaml
|
||||||
|
version: 0.1
|
||||||
|
cli:
|
||||||
|
version: 1.25.0
|
||||||
|
# Trunk provides extensibility via plugins. (https://docs.trunk.io/plugins)
|
||||||
|
plugins:
|
||||||
|
sources:
|
||||||
|
- id: trunk
|
||||||
|
ref: v1.7.4
|
||||||
|
uri: https://github.com/trunk-io/plugins
|
||||||
|
# Many linters and tools depend on runtimes - configure them here. (https://docs.trunk.io/runtimes)
|
||||||
|
runtimes:
|
||||||
|
enabled:
|
||||||
|
- node@22.16.0
|
||||||
|
- python@3.10.8
|
||||||
|
# This is the section where you manage your linters. (https://docs.trunk.io/check/configuration)
|
||||||
|
lint:
|
||||||
|
enabled:
|
||||||
|
- git-diff-check
|
||||||
|
- hadolint@2.14.0
|
||||||
|
- markdownlint@0.47.0
|
||||||
|
- osv-scanner@2.3.1
|
||||||
|
- prettier@3.7.4
|
||||||
|
- trufflehog@3.92.4
|
||||||
|
actions:
|
||||||
|
disabled:
|
||||||
|
- trunk-announce
|
||||||
|
- trunk-check-pre-push
|
||||||
|
- trunk-fmt-pre-commit
|
||||||
|
enabled:
|
||||||
|
- trunk-upgrade-available
|
||||||
85
Dockerfile
85
Dockerfile
|
|
@ -1,23 +1,78 @@
|
||||||
# Backend Dockerfile
|
# ================================
|
||||||
FROM python:3.12-slim
|
# Base
|
||||||
|
# ================================
|
||||||
|
FROM python:3.12-slim AS base
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Copier et installer les dépendances
|
# Installer dépendances système si nécessaire
|
||||||
COPY requirements.txt .
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
RUN pip install --no-cache-dir --upgrade pip \
|
curl \
|
||||||
&& pip install --no-cache-dir -r requirements.txt
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
COPY requirements.txt .
|
||||||
|
RUN pip install --no-cache-dir --upgrade pip
|
||||||
|
|
||||||
|
# ================================
|
||||||
|
# DEV
|
||||||
|
# ================================
|
||||||
|
FROM base AS dev
|
||||||
|
ENV PYTHONUNBUFFERED=1 \
|
||||||
|
PYTHONDONTWRITEBYTECODE=1 \
|
||||||
|
ENV=development
|
||||||
|
|
||||||
|
# Installer dépendances dev (si vous avez un requirements.dev.txt)
|
||||||
|
COPY requirements.txt .
|
||||||
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
|
# Créer dossiers
|
||||||
|
RUN mkdir -p /app/data /app/logs && chmod -R 777 /app/data /app/logs
|
||||||
|
|
||||||
# Copier le reste du projet
|
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
# Créer dossier persistant pour SQLite avec bonnes permissions
|
|
||||||
RUN mkdir -p /app/data && chmod 777 /app/data
|
|
||||||
|
|
||||||
# Exposer le port
|
|
||||||
EXPOSE 8000
|
EXPOSE 8000
|
||||||
|
CMD ["uvicorn", "api:app", "--host", "0.0.0.0", "--port", "8000", "--reload"]
|
||||||
|
|
||||||
# Lancer l'API et initialiser la DB au démarrage
|
# ================================
|
||||||
# CMD ["sh", "-c", "uvicorn api:app --host 0.0.0.0 --port 8000"]
|
# STAGING
|
||||||
|
# ================================
|
||||||
|
FROM base AS staging
|
||||||
|
ENV PYTHONUNBUFFERED=1 \
|
||||||
|
PYTHONDONTWRITEBYTECODE=1 \
|
||||||
|
ENV=staging
|
||||||
|
|
||||||
CMD ["sh", "-c", "python init_db.py && uvicorn api:app --host 0.0.0.0 --port 8000"]
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
|
RUN mkdir -p /app/data /app/logs && chmod -R 755 /app/data /app/logs
|
||||||
|
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
# Initialiser la DB au build
|
||||||
|
RUN python init_db.py || true
|
||||||
|
|
||||||
|
EXPOSE 8002
|
||||||
|
CMD ["uvicorn", "api:app", "--host", "0.0.0.0", "--port", "8002", "--log-level", "info"]
|
||||||
|
|
||||||
|
# ================================
|
||||||
|
# PROD
|
||||||
|
# ================================
|
||||||
|
FROM base AS prod
|
||||||
|
ENV PYTHONUNBUFFERED=1 \
|
||||||
|
PYTHONDONTWRITEBYTECODE=1 \
|
||||||
|
ENV=production
|
||||||
|
|
||||||
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
|
# Créer utilisateur non-root pour la sécurité
|
||||||
|
RUN useradd -m -u 1000 appuser && \
|
||||||
|
mkdir -p /app/data /app/logs && \
|
||||||
|
chown -R appuser:appuser /app
|
||||||
|
|
||||||
|
COPY --chown=appuser:appuser . .
|
||||||
|
|
||||||
|
# Initialiser la DB au build
|
||||||
|
RUN python init_db.py || true
|
||||||
|
|
||||||
|
USER appuser
|
||||||
|
|
||||||
|
EXPOSE 8004
|
||||||
|
CMD ["uvicorn", "api:app", "--host", "0.0.0.0", "--port", "8004", "--workers", "4"]
|
||||||
|
|
@ -7,7 +7,6 @@ class Settings(BaseSettings):
|
||||||
env_file=".env", env_file_encoding="utf-8", case_sensitive=False, extra="ignore"
|
env_file=".env", env_file_encoding="utf-8", case_sensitive=False, extra="ignore"
|
||||||
)
|
)
|
||||||
|
|
||||||
# === JWT & Auth ===
|
|
||||||
jwt_secret: str
|
jwt_secret: str
|
||||||
jwt_algorithm: str
|
jwt_algorithm: str
|
||||||
access_token_expire_minutes: int
|
access_token_expire_minutes: int
|
||||||
|
|
@ -21,15 +20,12 @@ class Settings(BaseSettings):
|
||||||
SAGE_TYPE_BON_AVOIR: int = 50
|
SAGE_TYPE_BON_AVOIR: int = 50
|
||||||
SAGE_TYPE_FACTURE: int = 60
|
SAGE_TYPE_FACTURE: int = 60
|
||||||
|
|
||||||
# === Sage Gateway (Windows) ===
|
|
||||||
sage_gateway_url: str
|
sage_gateway_url: str
|
||||||
sage_gateway_token: str
|
sage_gateway_token: str
|
||||||
frontend_url: str
|
frontend_url: str
|
||||||
|
|
||||||
# === Base de données ===
|
|
||||||
database_url: str = "sqlite+aiosqlite:///./data/sage_dataven.db"
|
database_url: str = "sqlite+aiosqlite:///./data/sage_dataven.db"
|
||||||
|
|
||||||
# === SMTP ===
|
|
||||||
smtp_host: str
|
smtp_host: str
|
||||||
smtp_port: int = 587
|
smtp_port: int = 587
|
||||||
smtp_user: str
|
smtp_user: str
|
||||||
|
|
@ -37,21 +33,17 @@ class Settings(BaseSettings):
|
||||||
smtp_from: str
|
smtp_from: str
|
||||||
smtp_use_tls: bool = True
|
smtp_use_tls: bool = True
|
||||||
|
|
||||||
# === Universign ===
|
|
||||||
universign_api_key: str
|
universign_api_key: str
|
||||||
universign_api_url: str
|
universign_api_url: str
|
||||||
|
|
||||||
# === API ===
|
|
||||||
api_host: str
|
api_host: str
|
||||||
api_port: int
|
api_port: int
|
||||||
api_reload: bool = False
|
api_reload: bool = False
|
||||||
|
|
||||||
# === Email Queue ===
|
|
||||||
max_email_workers: int = 3
|
max_email_workers: int = 3
|
||||||
max_retry_attempts: int = 3
|
max_retry_attempts: int = 3
|
||||||
retry_delay_seconds: int = 3
|
retry_delay_seconds: int = 3
|
||||||
|
|
||||||
# === CORS ===
|
|
||||||
cors_origins: List[str] = ["*"]
|
cors_origins: List[str] = ["*"]
|
||||||
|
|
||||||
|
|
||||||
125
config/cors_config.py
Normal file
125
config/cors_config.py
Normal file
|
|
@ -0,0 +1,125 @@
|
||||||
|
from fastapi import FastAPI
|
||||||
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
|
from typing import List
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def configure_cors_open(app: FastAPI):
|
||||||
|
app.add_middleware(
|
||||||
|
CORSMiddleware,
|
||||||
|
allow_origins=["*"],
|
||||||
|
allow_credentials=False,
|
||||||
|
allow_methods=["GET", "POST", "PUT", "DELETE", "PATCH", "OPTIONS"],
|
||||||
|
allow_headers=["*"],
|
||||||
|
expose_headers=["X-RateLimit-Limit", "X-RateLimit-Remaining"],
|
||||||
|
max_age=3600,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(" CORS configuré: Mode OUVERT (sécurisé par API Keys)")
|
||||||
|
logger.info(" - Origins: * (toutes)")
|
||||||
|
logger.info(" - Headers: * (dont X-API-Key)")
|
||||||
|
logger.info(" - Credentials: False")
|
||||||
|
|
||||||
|
|
||||||
|
def configure_cors_whitelist(app: FastAPI):
|
||||||
|
allowed_origins_str = os.getenv("CORS_ALLOWED_ORIGINS", "")
|
||||||
|
|
||||||
|
if allowed_origins_str:
|
||||||
|
allowed_origins = [
|
||||||
|
origin.strip()
|
||||||
|
for origin in allowed_origins_str.split(",")
|
||||||
|
if origin.strip()
|
||||||
|
]
|
||||||
|
else:
|
||||||
|
allowed_origins = ["*"]
|
||||||
|
|
||||||
|
app.add_middleware(
|
||||||
|
CORSMiddleware,
|
||||||
|
allow_origins=allowed_origins,
|
||||||
|
allow_credentials=True,
|
||||||
|
allow_methods=["GET", "POST", "PUT", "DELETE", "PATCH", "OPTIONS"],
|
||||||
|
allow_headers=["Content-Type", "Authorization", "X-API-Key"],
|
||||||
|
expose_headers=["X-RateLimit-Limit", "X-RateLimit-Remaining"],
|
||||||
|
max_age=3600,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(" CORS configuré: Mode WHITELIST")
|
||||||
|
logger.info(f" - Origins autorisées: {len(allowed_origins)}")
|
||||||
|
for origin in allowed_origins:
|
||||||
|
logger.info(f" • {origin}")
|
||||||
|
|
||||||
|
|
||||||
|
def configure_cors_regex(app: FastAPI):
|
||||||
|
origin_regex = r"*"
|
||||||
|
|
||||||
|
app.add_middleware(
|
||||||
|
CORSMiddleware,
|
||||||
|
allow_origin_regex=origin_regex,
|
||||||
|
allow_credentials=True,
|
||||||
|
allow_methods=["GET", "POST", "PUT", "DELETE", "PATCH", "OPTIONS"],
|
||||||
|
allow_headers=["Content-Type", "Authorization", "X-API-Key"],
|
||||||
|
expose_headers=["X-RateLimit-Limit", "X-RateLimit-Remaining"],
|
||||||
|
max_age=3600,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(" CORS configuré: Mode REGEX")
|
||||||
|
logger.info(f" - Pattern: {origin_regex}")
|
||||||
|
|
||||||
|
|
||||||
|
def configure_cors_hybrid(app: FastAPI):
|
||||||
|
from starlette.middleware.base import BaseHTTPMiddleware
|
||||||
|
|
||||||
|
class HybridCORSMiddleware(BaseHTTPMiddleware):
|
||||||
|
def __init__(self, app, known_origins: List[str]):
|
||||||
|
super().__init__(app)
|
||||||
|
self.known_origins = set(known_origins)
|
||||||
|
|
||||||
|
async def dispatch(self, request, call_next):
|
||||||
|
origin = request.headers.get("origin")
|
||||||
|
|
||||||
|
if origin in self.known_origins:
|
||||||
|
response = await call_next(request)
|
||||||
|
response.headers["Access-Control-Allow-Origin"] = origin
|
||||||
|
response.headers["Access-Control-Allow-Credentials"] = "true"
|
||||||
|
response.headers["Access-Control-Allow-Methods"] = (
|
||||||
|
"GET, POST, PUT, DELETE, PATCH, OPTIONS"
|
||||||
|
)
|
||||||
|
response.headers["Access-Control-Allow-Headers"] = (
|
||||||
|
"Content-Type, Authorization, X-API-Key"
|
||||||
|
)
|
||||||
|
return response
|
||||||
|
|
||||||
|
response = await call_next(request)
|
||||||
|
response.headers["Access-Control-Allow-Origin"] = "*"
|
||||||
|
response.headers["Access-Control-Allow-Methods"] = (
|
||||||
|
"GET, POST, PUT, DELETE, PATCH, OPTIONS"
|
||||||
|
)
|
||||||
|
response.headers["Access-Control-Allow-Headers"] = "*"
|
||||||
|
return response
|
||||||
|
|
||||||
|
known_origins = ["*"]
|
||||||
|
|
||||||
|
app.add_middleware(HybridCORSMiddleware, known_origins=known_origins)
|
||||||
|
|
||||||
|
logger.info(" CORS configuré: Mode HYBRIDE")
|
||||||
|
logger.info(f" - Whitelist: {len(known_origins)} domaines")
|
||||||
|
logger.info(" - Fallback: * (ouvert)")
|
||||||
|
|
||||||
|
|
||||||
|
def setup_cors(app: FastAPI, mode: str = "open"):
|
||||||
|
if mode == "open":
|
||||||
|
configure_cors_open(app)
|
||||||
|
elif mode == "whitelist":
|
||||||
|
configure_cors_whitelist(app)
|
||||||
|
elif mode == "regex":
|
||||||
|
configure_cors_regex(app)
|
||||||
|
elif mode == "hybrid":
|
||||||
|
configure_cors_hybrid(app)
|
||||||
|
else:
|
||||||
|
logger.warning(
|
||||||
|
f" Mode CORS inconnu: {mode}. Utilisation de 'open' par défaut."
|
||||||
|
)
|
||||||
|
configure_cors_open(app)
|
||||||
|
|
@ -1,48 +1,75 @@
|
||||||
from fastapi import Depends, HTTPException, status
|
from fastapi import Depends, HTTPException, status, Request
|
||||||
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
|
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
from sqlalchemy import select
|
from sqlalchemy import select
|
||||||
|
from typing import Optional
|
||||||
|
from jwt.exceptions import InvalidTokenError
|
||||||
|
|
||||||
from database import get_session, User
|
from database import get_session, User
|
||||||
from security.auth import decode_token
|
from security.auth import decode_token
|
||||||
from typing import Optional
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
security = HTTPBearer()
|
security = HTTPBearer(auto_error=False)
|
||||||
|
|
||||||
|
|
||||||
async def get_current_user(
|
async def get_current_user_hybrid(
|
||||||
credentials: HTTPAuthorizationCredentials = Depends(security),
|
request: Request,
|
||||||
|
credentials: Optional[HTTPAuthorizationCredentials] = Depends(security),
|
||||||
session: AsyncSession = Depends(get_session),
|
session: AsyncSession = Depends(get_session),
|
||||||
) -> User:
|
) -> User:
|
||||||
|
api_key_obj = getattr(request.state, "api_key", None)
|
||||||
|
|
||||||
|
if api_key_obj:
|
||||||
|
if api_key_obj.user_id:
|
||||||
|
result = await session.execute(
|
||||||
|
select(User).where(User.id == api_key_obj.user_id)
|
||||||
|
)
|
||||||
|
user = result.scalar_one_or_none()
|
||||||
|
|
||||||
|
if user:
|
||||||
|
user._is_api_key_user = True
|
||||||
|
user._api_key_obj = api_key_obj
|
||||||
|
return user
|
||||||
|
|
||||||
|
virtual_user = User(
|
||||||
|
id=f"api_key_{api_key_obj.id}",
|
||||||
|
email=f"api_key_{api_key_obj.id}@virtual.local",
|
||||||
|
nom=api_key_obj.name,
|
||||||
|
prenom="API",
|
||||||
|
hashed_password="",
|
||||||
|
role="api_client",
|
||||||
|
is_active=True,
|
||||||
|
is_verified=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
virtual_user._is_api_key_user = True
|
||||||
|
virtual_user._api_key_obj = api_key_obj
|
||||||
|
|
||||||
|
return virtual_user
|
||||||
|
|
||||||
|
if not credentials:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="Authentification requise (JWT ou API Key)",
|
||||||
|
headers={"WWW-Authenticate": "Bearer"},
|
||||||
|
)
|
||||||
|
|
||||||
token = credentials.credentials
|
token = credentials.credentials
|
||||||
|
|
||||||
|
try:
|
||||||
payload = decode_token(token)
|
payload = decode_token(token)
|
||||||
if not payload:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
|
||||||
detail="Token invalide ou expiré",
|
|
||||||
headers={"WWW-Authenticate": "Bearer"},
|
|
||||||
)
|
|
||||||
|
|
||||||
if payload.get("type") != "access":
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
|
||||||
detail="Type de token incorrect",
|
|
||||||
headers={"WWW-Authenticate": "Bearer"},
|
|
||||||
)
|
|
||||||
|
|
||||||
user_id: str = payload.get("sub")
|
user_id: str = payload.get("sub")
|
||||||
if not user_id:
|
|
||||||
|
if user_id is None:
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
detail="Token malformé",
|
detail="Token invalide: user_id manquant",
|
||||||
headers={"WWW-Authenticate": "Bearer"},
|
headers={"WWW-Authenticate": "Bearer"},
|
||||||
)
|
)
|
||||||
|
|
||||||
result = await session.execute(select(User).where(User.id == user_id))
|
result = await session.execute(select(User).where(User.id == user_id))
|
||||||
user = result.scalar_one_or_none()
|
user = result.scalar_one_or_none()
|
||||||
|
|
||||||
if not user:
|
if user is None:
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
detail="Utilisateur introuvable",
|
detail="Utilisateur introuvable",
|
||||||
|
|
@ -50,45 +77,42 @@ async def get_current_user(
|
||||||
)
|
)
|
||||||
|
|
||||||
if not user.is_active:
|
if not user.is_active:
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_403_FORBIDDEN, detail="Compte désactivé"
|
|
||||||
)
|
|
||||||
|
|
||||||
if not user.is_verified:
|
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_403_FORBIDDEN,
|
status_code=status.HTTP_403_FORBIDDEN,
|
||||||
detail="Email non vérifié. Consultez votre boîte de réception.",
|
detail="Utilisateur inactif",
|
||||||
)
|
|
||||||
|
|
||||||
if user.locked_until and user.locked_until > datetime.now():
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_403_FORBIDDEN,
|
|
||||||
detail="Compte temporairement verrouillé suite à trop de tentatives échouées",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return user
|
return user
|
||||||
|
|
||||||
|
except InvalidTokenError as e:
|
||||||
async def get_current_user_optional(
|
raise HTTPException(
|
||||||
credentials: Optional[HTTPAuthorizationCredentials] = Depends(security),
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
session: AsyncSession = Depends(get_session),
|
detail=f"Token invalide: {str(e)}",
|
||||||
) -> Optional[User]:
|
headers={"WWW-Authenticate": "Bearer"},
|
||||||
if not credentials:
|
)
|
||||||
return None
|
|
||||||
|
|
||||||
try:
|
|
||||||
return await get_current_user(credentials, session)
|
|
||||||
except HTTPException:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def require_role(*allowed_roles: str):
|
def require_role_hybrid(*allowed_roles: str):
|
||||||
async def role_checker(user: User = Depends(get_current_user)) -> User:
|
async def role_checker(user: User = Depends(get_current_user_hybrid)) -> User:
|
||||||
if user.role not in allowed_roles:
|
if user.role not in allowed_roles:
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_403_FORBIDDEN,
|
status_code=status.HTTP_403_FORBIDDEN,
|
||||||
detail=f"Accès refusé. Rôles requis: {', '.join(allowed_roles)}",
|
detail=f"Accès interdit. Rôles autorisés: {', '.join(allowed_roles)}",
|
||||||
)
|
)
|
||||||
return user
|
return user
|
||||||
|
|
||||||
return role_checker
|
return role_checker
|
||||||
|
|
||||||
|
|
||||||
|
def is_api_key_user(user: User) -> bool:
|
||||||
|
"""Vérifie si l'utilisateur est authentifié via API Key"""
|
||||||
|
return getattr(user, "_is_api_key_user", False)
|
||||||
|
|
||||||
|
|
||||||
|
def get_api_key_from_user(user: User):
|
||||||
|
"""Récupère l'objet ApiKey depuis un utilisateur (si applicable)"""
|
||||||
|
return getattr(user, "_api_key_obj", None)
|
||||||
|
|
||||||
|
|
||||||
|
get_current_user = get_current_user_hybrid
|
||||||
|
require_role = require_role_hybrid
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,7 @@ from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
from database import get_session, User
|
from database import get_session, User
|
||||||
from core.dependencies import get_current_user
|
from core.dependencies import get_current_user
|
||||||
from sage_client import SageGatewayClient
|
from sage_client import SageGatewayClient
|
||||||
from config import settings
|
from config.config import settings
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
|
||||||
|
|
@ -1,12 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
|
||||||
Script de création du premier utilisateur administrateur
|
|
||||||
|
|
||||||
Usage:
|
|
||||||
python create_admin.py
|
|
||||||
"""
|
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import sys
|
import sys
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
@ -28,7 +19,6 @@ async def create_admin():
|
||||||
print(" Création d'un compte administrateur")
|
print(" Création d'un compte administrateur")
|
||||||
print("=" * 60 + "\n")
|
print("=" * 60 + "\n")
|
||||||
|
|
||||||
# Saisie des informations
|
|
||||||
email = input("Email de l'admin: ").strip().lower()
|
email = input("Email de l'admin: ").strip().lower()
|
||||||
if not email or "@" not in email:
|
if not email or "@" not in email:
|
||||||
print(" Email invalide")
|
print(" Email invalide")
|
||||||
|
|
@ -41,7 +31,6 @@ async def create_admin():
|
||||||
print(" Prénom et nom requis")
|
print(" Prénom et nom requis")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# Mot de passe avec validation
|
|
||||||
while True:
|
while True:
|
||||||
password = input(
|
password = input(
|
||||||
"Mot de passe (min 8 car., 1 maj, 1 min, 1 chiffre, 1 spécial): "
|
"Mot de passe (min 8 car., 1 maj, 1 min, 1 chiffre, 1 spécial): "
|
||||||
|
|
@ -67,7 +56,6 @@ async def create_admin():
|
||||||
print(f"\n Un utilisateur avec l'email {email} existe déjà")
|
print(f"\n Un utilisateur avec l'email {email} existe déjà")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# Créer l'admin
|
|
||||||
admin = User(
|
admin = User(
|
||||||
id=str(uuid.uuid4()),
|
id=str(uuid.uuid4()),
|
||||||
email=email,
|
email=email,
|
||||||
|
|
|
||||||
|
|
@ -152,7 +152,7 @@ templates_signature_email = {
|
||||||
</table>
|
</table>
|
||||||
|
|
||||||
<p style="color: #718096; font-size: 13px; line-height: 1.5; margin: 0;">
|
<p style="color: #718096; font-size: 13px; line-height: 1.5; margin: 0;">
|
||||||
<strong>🔒 Signature électronique sécurisée</strong><br>
|
<strong> Signature électronique sécurisée</strong><br>
|
||||||
Votre signature est protégée par notre partenaire de confiance <strong>Universign</strong>,
|
Votre signature est protégée par notre partenaire de confiance <strong>Universign</strong>,
|
||||||
certifié eIDAS et conforme au RGPD. Votre identité sera vérifiée et le document sera
|
certifié eIDAS et conforme au RGPD. Votre identité sera vérifiée et le document sera
|
||||||
horodaté de manière infalsifiable.
|
horodaté de manière infalsifiable.
|
||||||
|
|
|
||||||
|
|
@ -15,11 +15,20 @@ from database.models.user import User
|
||||||
from database.models.email import EmailLog
|
from database.models.email import EmailLog
|
||||||
from database.models.signature import SignatureLog
|
from database.models.signature import SignatureLog
|
||||||
from database.models.sage_config import SageGatewayConfig
|
from database.models.sage_config import SageGatewayConfig
|
||||||
from database.Enum.status import (
|
from database.enum.status import (
|
||||||
StatutEmail,
|
StatutEmail,
|
||||||
StatutSignature,
|
StatutSignature,
|
||||||
)
|
)
|
||||||
from database.models.workflow import WorkflowLog
|
from database.models.workflow import WorkflowLog
|
||||||
|
from database.models.universign import (
|
||||||
|
UniversignTransaction,
|
||||||
|
UniversignSigner,
|
||||||
|
UniversignSyncLog,
|
||||||
|
UniversignTransactionStatus,
|
||||||
|
LocalDocumentStatus,
|
||||||
|
UniversignSignerStatus,
|
||||||
|
SageDocumentType
|
||||||
|
)
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"engine",
|
"engine",
|
||||||
|
|
@ -39,4 +48,11 @@ __all__ = [
|
||||||
"RefreshToken",
|
"RefreshToken",
|
||||||
"LoginAttempt",
|
"LoginAttempt",
|
||||||
"SageGatewayConfig",
|
"SageGatewayConfig",
|
||||||
|
"UniversignTransaction",
|
||||||
|
"UniversignSigner",
|
||||||
|
"UniversignSyncLog",
|
||||||
|
"UniversignTransactionStatus",
|
||||||
|
"LocalDocumentStatus",
|
||||||
|
"UniversignSignerStatus",
|
||||||
|
"SageDocumentType"
|
||||||
]
|
]
|
||||||
|
|
|
||||||
|
|
@ -1,20 +1,49 @@
|
||||||
import os
|
|
||||||
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, async_sessionmaker
|
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, async_sessionmaker
|
||||||
from sqlalchemy.pool import NullPool
|
from sqlalchemy.pool import NullPool
|
||||||
|
from sqlalchemy import event, text
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
from config.config import settings
|
||||||
from database.models.generic_model import Base
|
from database.models.generic_model import Base
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
DATABASE_URL = os.getenv("DATABASE_URL", "sqlite+aiosqlite:///./data/sage_dataven.db")
|
DATABASE_URL = settings.database_url
|
||||||
|
|
||||||
|
|
||||||
|
def _configure_sqlite_connection(dbapi_connection, connection_record):
|
||||||
|
cursor = dbapi_connection.cursor()
|
||||||
|
cursor.execute("PRAGMA journal_mode=WAL")
|
||||||
|
cursor.execute("PRAGMA busy_timeout=30000")
|
||||||
|
cursor.execute("PRAGMA synchronous=NORMAL")
|
||||||
|
cursor.execute("PRAGMA cache_size=-64000") # 64MB
|
||||||
|
cursor.execute("PRAGMA foreign_keys=ON")
|
||||||
|
cursor.execute("PRAGMA locking_mode=NORMAL")
|
||||||
|
cursor.close()
|
||||||
|
|
||||||
|
logger.debug("SQLite configuré avec WAL mode et busy_timeout=30s")
|
||||||
|
|
||||||
|
|
||||||
|
engine_kwargs = {
|
||||||
|
"echo": False,
|
||||||
|
"future": True,
|
||||||
|
"poolclass": NullPool,
|
||||||
|
}
|
||||||
|
|
||||||
|
if DATABASE_URL and "sqlite" in DATABASE_URL:
|
||||||
|
engine_kwargs["connect_args"] = {
|
||||||
|
"check_same_thread": False,
|
||||||
|
"timeout": 30,
|
||||||
|
}
|
||||||
|
|
||||||
|
engine = create_async_engine(DATABASE_URL, **engine_kwargs)
|
||||||
|
|
||||||
|
if DATABASE_URL and "sqlite" in DATABASE_URL:
|
||||||
|
|
||||||
|
@event.listens_for(engine.sync_engine, "connect")
|
||||||
|
def set_sqlite_pragma(dbapi_connection, connection_record):
|
||||||
|
_configure_sqlite_connection(dbapi_connection, connection_record)
|
||||||
|
|
||||||
engine = create_async_engine(
|
|
||||||
DATABASE_URL,
|
|
||||||
echo=False,
|
|
||||||
future=True,
|
|
||||||
poolclass=NullPool,
|
|
||||||
)
|
|
||||||
|
|
||||||
async_session_factory = async_sessionmaker(
|
async_session_factory = async_sessionmaker(
|
||||||
engine,
|
engine,
|
||||||
|
|
@ -30,6 +59,12 @@ async def init_db():
|
||||||
logger.info("Tentative de connexion")
|
logger.info("Tentative de connexion")
|
||||||
async with engine.begin() as conn:
|
async with engine.begin() as conn:
|
||||||
logger.info("Connexion etablie")
|
logger.info("Connexion etablie")
|
||||||
|
|
||||||
|
if DATABASE_URL and "sqlite" in DATABASE_URL:
|
||||||
|
result = await conn.execute(text("PRAGMA journal_mode"))
|
||||||
|
journal_mode = result.scalar()
|
||||||
|
logger.info(f"SQLite journal_mode: {journal_mode}")
|
||||||
|
|
||||||
await conn.run_sync(Base.metadata.create_all)
|
await conn.run_sync(Base.metadata.create_all)
|
||||||
logger.info("create_all execute")
|
logger.info("create_all execute")
|
||||||
|
|
||||||
|
|
@ -49,3 +84,57 @@ async def get_session() -> AsyncSession:
|
||||||
async def close_db():
|
async def close_db():
|
||||||
await engine.dispose()
|
await engine.dispose()
|
||||||
logger.info("Connexions DB fermées")
|
logger.info("Connexions DB fermées")
|
||||||
|
|
||||||
|
|
||||||
|
async def execute_with_sqlite_retry(
|
||||||
|
session: AsyncSession, statement, max_retries: int = 5, base_delay: float = 0.1
|
||||||
|
):
|
||||||
|
import asyncio
|
||||||
|
from sqlalchemy.exc import OperationalError
|
||||||
|
|
||||||
|
last_error = None
|
||||||
|
|
||||||
|
for attempt in range(max_retries):
|
||||||
|
try:
|
||||||
|
result = await session.execute(statement)
|
||||||
|
return result
|
||||||
|
except OperationalError as e:
|
||||||
|
last_error = e
|
||||||
|
if "database is locked" in str(e).lower():
|
||||||
|
delay = base_delay * (2**attempt)
|
||||||
|
logger.warning(
|
||||||
|
f"SQLite locked, tentative {attempt + 1}/{max_retries}, "
|
||||||
|
f"retry dans {delay:.2f}s"
|
||||||
|
)
|
||||||
|
await asyncio.sleep(delay)
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
|
||||||
|
raise last_error
|
||||||
|
|
||||||
|
|
||||||
|
async def commit_with_retry(
|
||||||
|
session: AsyncSession, max_retries: int = 5, base_delay: float = 0.1
|
||||||
|
):
|
||||||
|
import asyncio
|
||||||
|
from sqlalchemy.exc import OperationalError
|
||||||
|
|
||||||
|
last_error = None
|
||||||
|
|
||||||
|
for attempt in range(max_retries):
|
||||||
|
try:
|
||||||
|
await session.commit()
|
||||||
|
return
|
||||||
|
except OperationalError as e:
|
||||||
|
last_error = e
|
||||||
|
if "database is locked" in str(e).lower():
|
||||||
|
delay = base_delay * (2**attempt)
|
||||||
|
logger.warning(
|
||||||
|
f"SQLite locked lors du commit, tentative {attempt + 1}/{max_retries}, "
|
||||||
|
f"retry dans {delay:.2f}s"
|
||||||
|
)
|
||||||
|
await asyncio.sleep(delay)
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
|
||||||
|
raise last_error
|
||||||
|
|
|
||||||
18
database/enum/status.py
Normal file
18
database/enum/status.py
Normal file
|
|
@ -0,0 +1,18 @@
|
||||||
|
import enum
|
||||||
|
|
||||||
|
|
||||||
|
class StatutEmail(str, enum.Enum):
|
||||||
|
EN_ATTENTE = "EN_ATTENTE"
|
||||||
|
EN_COURS = "EN_COURS"
|
||||||
|
ENVOYE = "ENVOYE"
|
||||||
|
OUVERT = "OUVERT"
|
||||||
|
ERREUR = "ERREUR"
|
||||||
|
BOUNCE = "BOUNCE"
|
||||||
|
|
||||||
|
|
||||||
|
class StatutSignature(str, enum.Enum):
|
||||||
|
EN_ATTENTE = "EN_ATTENTE"
|
||||||
|
ENVOYE = "ENVOYE"
|
||||||
|
SIGNE = "SIGNE"
|
||||||
|
REFUSE = "REFUSE"
|
||||||
|
EXPIRE = "EXPIRE"
|
||||||
73
database/models/api_key.py
Normal file
73
database/models/api_key.py
Normal file
|
|
@ -0,0 +1,73 @@
|
||||||
|
from sqlalchemy import Column, String, Boolean, DateTime, Integer, Text
|
||||||
|
from typing import Optional, List
|
||||||
|
import json
|
||||||
|
from datetime import datetime
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
from database.models.generic_model import Base
|
||||||
|
|
||||||
|
|
||||||
|
class ApiKey(Base):
|
||||||
|
"""Modèle pour les clés API publiques"""
|
||||||
|
|
||||||
|
__tablename__ = "api_keys"
|
||||||
|
|
||||||
|
id = Column(String(36), primary_key=True, default=lambda: str(uuid.uuid4()))
|
||||||
|
key_hash = Column(String(64), unique=True, nullable=False, index=True)
|
||||||
|
key_prefix = Column(String(10), nullable=False)
|
||||||
|
|
||||||
|
name = Column(String(255), nullable=False)
|
||||||
|
description = Column(Text, nullable=True)
|
||||||
|
|
||||||
|
user_id = Column(String(36), nullable=True)
|
||||||
|
created_by = Column(String(255), nullable=False)
|
||||||
|
|
||||||
|
is_active = Column(Boolean, default=True, nullable=False)
|
||||||
|
rate_limit_per_minute = Column(Integer, default=60, nullable=False)
|
||||||
|
allowed_endpoints = Column(Text, nullable=True)
|
||||||
|
|
||||||
|
total_requests = Column(Integer, default=0, nullable=False)
|
||||||
|
last_used_at = Column(DateTime, nullable=True)
|
||||||
|
|
||||||
|
created_at = Column(DateTime, default=datetime.now, nullable=False)
|
||||||
|
expires_at = Column(DateTime, nullable=True)
|
||||||
|
revoked_at = Column(DateTime, nullable=True)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"<ApiKey(name='{self.name}', prefix='{self.key_prefix}', active={self.is_active})>"
|
||||||
|
|
||||||
|
|
||||||
|
class SwaggerUser(Base):
|
||||||
|
"""Modèle pour les utilisateurs autorisés à accéder au Swagger"""
|
||||||
|
|
||||||
|
__tablename__ = "swagger_users"
|
||||||
|
|
||||||
|
id = Column(String(36), primary_key=True, default=lambda: str(uuid.uuid4()))
|
||||||
|
username = Column(String(100), unique=True, nullable=False, index=True)
|
||||||
|
hashed_password = Column(String(255), nullable=False)
|
||||||
|
|
||||||
|
full_name = Column(String(255), nullable=True)
|
||||||
|
email = Column(String(255), nullable=True)
|
||||||
|
|
||||||
|
is_active = Column(Boolean, default=True, nullable=False)
|
||||||
|
|
||||||
|
allowed_tags = Column(Text, nullable=True)
|
||||||
|
|
||||||
|
created_at = Column(DateTime, default=datetime.now, nullable=False)
|
||||||
|
last_login = Column(DateTime, nullable=True)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def allowed_tags_list(self) -> Optional[List[str]]:
|
||||||
|
if self.allowed_tags:
|
||||||
|
try:
|
||||||
|
return json.loads(self.allowed_tags)
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
return None
|
||||||
|
return None
|
||||||
|
|
||||||
|
@allowed_tags_list.setter
|
||||||
|
def allowed_tags_list(self, tags: Optional[List[str]]):
|
||||||
|
self.allowed_tags = json.dumps(tags) if tags is not None else None
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"<SwaggerUser(username='{self.username}', active={self.is_active})>"
|
||||||
|
|
@ -8,7 +8,7 @@ from sqlalchemy import (
|
||||||
)
|
)
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from database.models.generic_model import Base
|
from database.models.generic_model import Base
|
||||||
from database.Enum.status import StatutEmail
|
from database.enum.status import StatutEmail
|
||||||
|
|
||||||
|
|
||||||
class EmailLog(Base):
|
class EmailLog(Base):
|
||||||
|
|
|
||||||
|
|
@ -9,7 +9,7 @@ from sqlalchemy import (
|
||||||
)
|
)
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from database.models.generic_model import Base
|
from database.models.generic_model import Base
|
||||||
from database.Enum.status import StatutSignature
|
from database.enum.status import StatutSignature
|
||||||
|
|
||||||
|
|
||||||
class SignatureLog(Base):
|
class SignatureLog(Base):
|
||||||
|
|
|
||||||
272
database/models/universign.py
Normal file
272
database/models/universign.py
Normal file
|
|
@ -0,0 +1,272 @@
|
||||||
|
from sqlalchemy import (
|
||||||
|
Column,
|
||||||
|
String,
|
||||||
|
DateTime,
|
||||||
|
Boolean,
|
||||||
|
Integer,
|
||||||
|
Text,
|
||||||
|
Enum as SQLEnum,
|
||||||
|
ForeignKey,
|
||||||
|
Index,
|
||||||
|
)
|
||||||
|
from sqlalchemy.orm import relationship
|
||||||
|
from datetime import datetime
|
||||||
|
from enum import Enum
|
||||||
|
from database.models.generic_model import Base
|
||||||
|
|
||||||
|
|
||||||
|
class UniversignTransactionStatus(str, Enum):
|
||||||
|
DRAFT = "draft"
|
||||||
|
READY = "ready"
|
||||||
|
STARTED = "started"
|
||||||
|
COMPLETED = "completed"
|
||||||
|
CLOSED = "closed"
|
||||||
|
REFUSED = "refused"
|
||||||
|
EXPIRED = "expired"
|
||||||
|
CANCELED = "canceled"
|
||||||
|
FAILED = "failed"
|
||||||
|
|
||||||
|
|
||||||
|
class UniversignSignerStatus(str, Enum):
|
||||||
|
WAITING = "waiting"
|
||||||
|
OPEN = "open"
|
||||||
|
VIEWED = "viewed"
|
||||||
|
SIGNED = "signed"
|
||||||
|
COMPLETED = "completed"
|
||||||
|
REFUSED = "refused"
|
||||||
|
EXPIRED = "expired"
|
||||||
|
STALLED = "stalled"
|
||||||
|
UNKNOWN = "unknown"
|
||||||
|
|
||||||
|
|
||||||
|
class LocalDocumentStatus(str, Enum):
|
||||||
|
PENDING = "EN_ATTENTE"
|
||||||
|
IN_PROGRESS = "EN_COURS"
|
||||||
|
SIGNED = "SIGNE"
|
||||||
|
REJECTED = "REFUSE"
|
||||||
|
EXPIRED = "EXPIRE"
|
||||||
|
ERROR = "ERREUR"
|
||||||
|
|
||||||
|
|
||||||
|
class SageDocumentType(int, Enum):
|
||||||
|
DEVIS = 0
|
||||||
|
BON_COMMANDE = 10
|
||||||
|
PREPARATION = 20
|
||||||
|
BON_LIVRAISON = 30
|
||||||
|
BON_RETOUR = 40
|
||||||
|
BON_AVOIR = 50
|
||||||
|
FACTURE = 60
|
||||||
|
|
||||||
|
|
||||||
|
class UniversignTransaction(Base):
|
||||||
|
__tablename__ = "universign_transactions"
|
||||||
|
|
||||||
|
id = Column(String(36), primary_key=True)
|
||||||
|
transaction_id = Column(
|
||||||
|
String(255),
|
||||||
|
unique=True,
|
||||||
|
nullable=False,
|
||||||
|
index=True,
|
||||||
|
comment="ID Universign (ex: tr_abc123)",
|
||||||
|
)
|
||||||
|
|
||||||
|
sage_document_id = Column(
|
||||||
|
String(50),
|
||||||
|
nullable=False,
|
||||||
|
index=True,
|
||||||
|
comment="Numéro du document Sage (ex: DE00123)",
|
||||||
|
)
|
||||||
|
sage_document_type = Column(
|
||||||
|
SQLEnum(SageDocumentType), nullable=False, comment="Type de document Sage"
|
||||||
|
)
|
||||||
|
|
||||||
|
universign_status = Column(
|
||||||
|
SQLEnum(UniversignTransactionStatus),
|
||||||
|
nullable=False,
|
||||||
|
default=UniversignTransactionStatus.DRAFT,
|
||||||
|
index=True,
|
||||||
|
comment="Statut brut Universign",
|
||||||
|
)
|
||||||
|
universign_status_updated_at = Column(
|
||||||
|
DateTime, nullable=True, comment="Dernière MAJ du statut Universign"
|
||||||
|
)
|
||||||
|
|
||||||
|
local_status = Column(
|
||||||
|
SQLEnum(LocalDocumentStatus),
|
||||||
|
nullable=False,
|
||||||
|
default=LocalDocumentStatus.PENDING,
|
||||||
|
index=True,
|
||||||
|
comment="Statut métier simplifié pour l'UI",
|
||||||
|
)
|
||||||
|
|
||||||
|
signer_url = Column(Text, nullable=True, comment="URL de signature")
|
||||||
|
document_url = Column(Text, nullable=True, comment="URL du document signé")
|
||||||
|
|
||||||
|
signed_document_path = Column(
|
||||||
|
Text, nullable=True, comment="Chemin local du PDF signé"
|
||||||
|
)
|
||||||
|
signed_document_downloaded_at = Column(
|
||||||
|
DateTime, nullable=True, comment="Date de téléchargement du document"
|
||||||
|
)
|
||||||
|
signed_document_size_bytes = Column(
|
||||||
|
Integer, nullable=True, comment="Taille du fichier en octets"
|
||||||
|
)
|
||||||
|
download_attempts = Column(
|
||||||
|
Integer, default=0, comment="Nombre de tentatives de téléchargement"
|
||||||
|
)
|
||||||
|
download_error = Column(
|
||||||
|
Text, nullable=True, comment="Dernière erreur de téléchargement"
|
||||||
|
)
|
||||||
|
|
||||||
|
certificate_url = Column(Text, nullable=True, comment="URL du certificat")
|
||||||
|
|
||||||
|
signers_data = Column(
|
||||||
|
Text, nullable=True, comment="JSON des signataires (snapshot)"
|
||||||
|
)
|
||||||
|
|
||||||
|
requester_email = Column(String(255), nullable=True)
|
||||||
|
requester_name = Column(String(255), nullable=True)
|
||||||
|
document_name = Column(String(500), nullable=True)
|
||||||
|
|
||||||
|
created_at = Column(
|
||||||
|
DateTime,
|
||||||
|
default=datetime.now,
|
||||||
|
nullable=False,
|
||||||
|
comment="Date de création locale",
|
||||||
|
)
|
||||||
|
sent_at = Column(
|
||||||
|
DateTime, nullable=True, comment="Date d'envoi Universign (started)"
|
||||||
|
)
|
||||||
|
signed_at = Column(DateTime, nullable=True, comment="Date de signature complète")
|
||||||
|
refused_at = Column(DateTime, nullable=True)
|
||||||
|
expired_at = Column(DateTime, nullable=True)
|
||||||
|
canceled_at = Column(DateTime, nullable=True)
|
||||||
|
|
||||||
|
last_synced_at = Column(
|
||||||
|
DateTime, nullable=True, comment="Dernière sync réussie avec Universign"
|
||||||
|
)
|
||||||
|
sync_attempts = Column(Integer, default=0, comment="Nombre de tentatives de sync")
|
||||||
|
sync_error = Column(Text, nullable=True)
|
||||||
|
|
||||||
|
is_test = Column(
|
||||||
|
Boolean, default=False, comment="Transaction en environnement .alpha"
|
||||||
|
)
|
||||||
|
needs_sync = Column(
|
||||||
|
Boolean, default=True, index=True, comment="À synchroniser avec Universign"
|
||||||
|
)
|
||||||
|
webhook_received = Column(Boolean, default=False, comment="Webhook Universign reçu")
|
||||||
|
|
||||||
|
signers = relationship(
|
||||||
|
"UniversignSigner", back_populates="transaction", cascade="all, delete-orphan"
|
||||||
|
)
|
||||||
|
sync_logs = relationship(
|
||||||
|
"UniversignSyncLog", back_populates="transaction", cascade="all, delete-orphan"
|
||||||
|
)
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
Index("idx_sage_doc", "sage_document_id", "sage_document_type"),
|
||||||
|
Index("idx_sync_status", "needs_sync", "universign_status"),
|
||||||
|
Index("idx_dates", "created_at", "signed_at"),
|
||||||
|
)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return (
|
||||||
|
f"<UniversignTransaction {self.transaction_id} "
|
||||||
|
f"sage={self.sage_document_id} "
|
||||||
|
f"status={self.universign_status.value}>"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class UniversignSigner(Base):
|
||||||
|
__tablename__ = "universign_signers"
|
||||||
|
|
||||||
|
id = Column(String(36), primary_key=True)
|
||||||
|
transaction_id = Column(
|
||||||
|
String(36),
|
||||||
|
ForeignKey("universign_transactions.id", ondelete="CASCADE"),
|
||||||
|
nullable=False,
|
||||||
|
index=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
email = Column(String(255), nullable=False, index=True)
|
||||||
|
name = Column(String(255), nullable=True)
|
||||||
|
phone = Column(String(50), nullable=True)
|
||||||
|
|
||||||
|
status = Column(
|
||||||
|
SQLEnum(UniversignSignerStatus),
|
||||||
|
default=UniversignSignerStatus.WAITING,
|
||||||
|
nullable=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
viewed_at = Column(DateTime, nullable=True)
|
||||||
|
signed_at = Column(DateTime, nullable=True)
|
||||||
|
refused_at = Column(DateTime, nullable=True)
|
||||||
|
refusal_reason = Column(Text, nullable=True)
|
||||||
|
|
||||||
|
ip_address = Column(String(45), nullable=True)
|
||||||
|
user_agent = Column(Text, nullable=True)
|
||||||
|
signature_method = Column(String(50), nullable=True)
|
||||||
|
|
||||||
|
order_index = Column(Integer, default=0)
|
||||||
|
|
||||||
|
transaction = relationship("UniversignTransaction", back_populates="signers")
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"<UniversignSigner {self.email} status={self.status.value}>"
|
||||||
|
|
||||||
|
|
||||||
|
class UniversignSyncLog(Base):
|
||||||
|
__tablename__ = "universign_sync_logs"
|
||||||
|
|
||||||
|
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||||
|
transaction_id = Column(
|
||||||
|
String(36),
|
||||||
|
ForeignKey("universign_transactions.id", ondelete="CASCADE"),
|
||||||
|
nullable=False,
|
||||||
|
index=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
sync_type = Column(String(50), nullable=False, comment="webhook, polling, manual")
|
||||||
|
sync_timestamp = Column(DateTime, default=datetime.now, nullable=False, index=True)
|
||||||
|
|
||||||
|
previous_status = Column(String(50), nullable=True)
|
||||||
|
new_status = Column(String(50), nullable=True)
|
||||||
|
changes_detected = Column(Text, nullable=True, comment="JSON des changements")
|
||||||
|
|
||||||
|
success = Column(Boolean, default=True)
|
||||||
|
error_message = Column(Text, nullable=True)
|
||||||
|
http_status_code = Column(Integer, nullable=True)
|
||||||
|
response_time_ms = Column(Integer, nullable=True)
|
||||||
|
|
||||||
|
transaction = relationship("UniversignTransaction", back_populates="sync_logs")
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"<SyncLog {self.sync_type} at {self.sync_timestamp}>"
|
||||||
|
|
||||||
|
|
||||||
|
class UniversignConfig(Base):
|
||||||
|
__tablename__ = "universign_configs"
|
||||||
|
|
||||||
|
id = Column(String(36), primary_key=True)
|
||||||
|
user_id = Column(String(36), nullable=True, index=True)
|
||||||
|
|
||||||
|
environment = Column(
|
||||||
|
String(50), nullable=False, default="alpha", comment="alpha, prod"
|
||||||
|
)
|
||||||
|
|
||||||
|
api_url = Column(String(500), nullable=False)
|
||||||
|
api_key = Column(String(500), nullable=False, comment="À chiffrer")
|
||||||
|
|
||||||
|
webhook_url = Column(String(500), nullable=True)
|
||||||
|
webhook_secret = Column(String(255), nullable=True)
|
||||||
|
|
||||||
|
auto_sync_enabled = Column(Boolean, default=True)
|
||||||
|
sync_interval_minutes = Column(Integer, default=5)
|
||||||
|
|
||||||
|
signature_expiry_days = Column(Integer, default=30)
|
||||||
|
|
||||||
|
is_active = Column(Boolean, default=True)
|
||||||
|
created_at = Column(DateTime, default=datetime.now)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"<UniversignConfig {self.environment}>"
|
||||||
24
docker-compose.dev.yml
Normal file
24
docker-compose.dev.yml
Normal file
|
|
@ -0,0 +1,24 @@
|
||||||
|
services:
|
||||||
|
backend:
|
||||||
|
container_name: dev-sage-api
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
target: dev
|
||||||
|
env_file: .env
|
||||||
|
volumes:
|
||||||
|
- .:/app
|
||||||
|
- /app/__pycache__
|
||||||
|
- ./data:/app/data
|
||||||
|
- ./logs:/app/logs
|
||||||
|
ports:
|
||||||
|
- "8000:8000"
|
||||||
|
environment:
|
||||||
|
ENV: development
|
||||||
|
DEBUG: "true"
|
||||||
|
DATABASE_URL: "sqlite+aiosqlite:///./data/sage_dataven.db"
|
||||||
|
restart: unless-stopped
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "curl", "-f", "http://localhost:8000/"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
23
docker-compose.prod.yml
Normal file
23
docker-compose.prod.yml
Normal file
|
|
@ -0,0 +1,23 @@
|
||||||
|
services:
|
||||||
|
backend:
|
||||||
|
container_name: prod_sage_api
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
target: prod
|
||||||
|
env_file: .env.production
|
||||||
|
volumes:
|
||||||
|
- ./data:/app/data
|
||||||
|
- ./logs:/app/logs
|
||||||
|
ports:
|
||||||
|
- "8004:8004"
|
||||||
|
environment:
|
||||||
|
ENV: production
|
||||||
|
DEBUG: "false"
|
||||||
|
DATABASE_URL: "sqlite+aiosqlite:///./data/sage_prod.db"
|
||||||
|
restart: always
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "curl", "-f", "http://localhost:8004/"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 5
|
||||||
|
start_period: 40s
|
||||||
22
docker-compose.staging.yml
Normal file
22
docker-compose.staging.yml
Normal file
|
|
@ -0,0 +1,22 @@
|
||||||
|
services:
|
||||||
|
backend:
|
||||||
|
container_name: staging_sage_api
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
target: staging
|
||||||
|
env_file: .env.staging
|
||||||
|
volumes:
|
||||||
|
- ./data:/app/data
|
||||||
|
- ./logs:/app/logs
|
||||||
|
ports:
|
||||||
|
- "8002:8002"
|
||||||
|
environment:
|
||||||
|
ENV: staging
|
||||||
|
DEBUG: "false"
|
||||||
|
DATABASE_URL: "sqlite+aiosqlite:///./data/sage_staging.db"
|
||||||
|
restart: unless-stopped
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "curl", "-f", "http://localhost:8002/"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
|
|
@ -1,11 +1,4 @@
|
||||||
services:
|
services:
|
||||||
vps-sage-api:
|
backend:
|
||||||
build: .
|
build:
|
||||||
container_name: vps-sage-api
|
context: .
|
||||||
env_file: .env
|
|
||||||
volumes:
|
|
||||||
- ./data:/app/data
|
|
||||||
- ./logs:/app/logs
|
|
||||||
ports:
|
|
||||||
- "8000:8000"
|
|
||||||
restart: unless-stopped
|
|
||||||
1268
email_queue.py
1268
email_queue.py
File diff suppressed because it is too large
Load diff
21
init_db.py
21
init_db.py
|
|
@ -14,33 +14,14 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
async def main():
|
async def main():
|
||||||
print("\n" + "=" * 60)
|
|
||||||
print("Initialisation de la base de données délocalisée")
|
|
||||||
print("=" * 60 + "\n")
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
logger.info("Debut de l'initialisation")
|
logger.info("Debut de l'initialisation")
|
||||||
await init_db()
|
await init_db()
|
||||||
logger.info("Initialisation terminee")
|
logger.info("Initialisation terminee")
|
||||||
|
print("\nInitialisation terminee")
|
||||||
|
|
||||||
print("\nBase de données créée avec succès !")
|
print("\nBase de données créée avec succès !")
|
||||||
print("Fichier: sage_dataven.db")
|
|
||||||
|
|
||||||
print("\nTables créées:")
|
|
||||||
print(" |- email_logs (Journalisation emails)")
|
|
||||||
print(" |- signature_logs (Suivi signatures Universign)")
|
|
||||||
print(" |- workflow_logs (Transformations documents)")
|
|
||||||
print(" |- cache_metadata (Métadonnées cache)")
|
|
||||||
print(" |- audit_logs (Journal d'audit)")
|
|
||||||
|
|
||||||
print("\nProchaines étapes:")
|
|
||||||
print(" 1. Configurer le fichier .env avec les credentials")
|
|
||||||
print(" 2. Lancer la gateway Windows sur la machine Sage")
|
|
||||||
print(" 3. Lancer l'API VPS: uvicorn api:app --host 0.0.0.0 --port 8000")
|
|
||||||
print(" 4. Ou avec Docker : docker-compose up -d")
|
|
||||||
print(" 5. Tester: http://IP_DU_VPS:8000/docs")
|
|
||||||
|
|
||||||
print("\n" + "=" * 60 + "\n")
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|
|
||||||
295
middleware/security.py
Normal file
295
middleware/security.py
Normal file
|
|
@ -0,0 +1,295 @@
|
||||||
|
from fastapi import Request, status
|
||||||
|
from fastapi.responses import JSONResponse
|
||||||
|
from fastapi.security import HTTPBasic, HTTPBasicCredentials
|
||||||
|
from starlette.middleware.base import BaseHTTPMiddleware
|
||||||
|
from starlette.types import ASGIApp, Receive, Send
|
||||||
|
from sqlalchemy import select
|
||||||
|
from typing import Callable, Optional
|
||||||
|
from datetime import datetime
|
||||||
|
import logging
|
||||||
|
import base64
|
||||||
|
import json
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
security = HTTPBasic()
|
||||||
|
|
||||||
|
|
||||||
|
class SwaggerAuthMiddleware:
|
||||||
|
PROTECTED_PATHS = ["/docs", "/redoc", "/openapi.json"]
|
||||||
|
|
||||||
|
def __init__(self, app: ASGIApp):
|
||||||
|
self.app = app
|
||||||
|
|
||||||
|
async def __call__(self, scope, receive: Receive, send: Send):
|
||||||
|
if scope["type"] != "http":
|
||||||
|
await self.app(scope, receive, send)
|
||||||
|
return
|
||||||
|
|
||||||
|
request = Request(scope, receive=receive)
|
||||||
|
path = request.url.path
|
||||||
|
|
||||||
|
if not any(path.startswith(p) for p in self.PROTECTED_PATHS):
|
||||||
|
await self.app(scope, receive, send)
|
||||||
|
return
|
||||||
|
|
||||||
|
auth_header = request.headers.get("Authorization")
|
||||||
|
|
||||||
|
if not auth_header or not auth_header.startswith("Basic "):
|
||||||
|
response = JSONResponse(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
content={"detail": "Authentification requise pour la documentation"},
|
||||||
|
headers={"WWW-Authenticate": 'Basic realm="Swagger UI"'},
|
||||||
|
)
|
||||||
|
await response(scope, receive, send)
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
encoded_credentials = auth_header.split(" ")[1]
|
||||||
|
decoded_credentials = base64.b64decode(encoded_credentials).decode("utf-8")
|
||||||
|
username, password = decoded_credentials.split(":", 1)
|
||||||
|
|
||||||
|
credentials = HTTPBasicCredentials(username=username, password=password)
|
||||||
|
|
||||||
|
swagger_user = await self._verify_credentials(credentials)
|
||||||
|
|
||||||
|
if not swagger_user:
|
||||||
|
response = JSONResponse(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
content={"detail": "Identifiants invalides"},
|
||||||
|
headers={"WWW-Authenticate": 'Basic realm="Swagger UI"'},
|
||||||
|
)
|
||||||
|
await response(scope, receive, send)
|
||||||
|
return
|
||||||
|
|
||||||
|
if "state" not in scope:
|
||||||
|
scope["state"] = {}
|
||||||
|
|
||||||
|
scope["state"]["swagger_user"] = swagger_user
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"✓ Swagger auth: {swagger_user['username']} - tags: {swagger_user.get('allowed_tags', 'ALL')}"
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f" Erreur parsing auth header: {e}")
|
||||||
|
response = JSONResponse(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
content={"detail": "Format d'authentification invalide"},
|
||||||
|
headers={"WWW-Authenticate": 'Basic realm="Swagger UI"'},
|
||||||
|
)
|
||||||
|
await response(scope, receive, send)
|
||||||
|
return
|
||||||
|
|
||||||
|
await self.app(scope, receive, send)
|
||||||
|
|
||||||
|
async def _verify_credentials(
|
||||||
|
self, credentials: HTTPBasicCredentials
|
||||||
|
) -> Optional[dict]:
|
||||||
|
from database.db_config import async_session_factory
|
||||||
|
from database.models.api_key import SwaggerUser
|
||||||
|
from security.auth import verify_password
|
||||||
|
|
||||||
|
try:
|
||||||
|
async with async_session_factory() as session:
|
||||||
|
result = await session.execute(
|
||||||
|
select(SwaggerUser).where(
|
||||||
|
SwaggerUser.username == credentials.username
|
||||||
|
)
|
||||||
|
)
|
||||||
|
swagger_user = result.scalar_one_or_none()
|
||||||
|
|
||||||
|
if swagger_user and swagger_user.is_active:
|
||||||
|
if verify_password(
|
||||||
|
credentials.password, swagger_user.hashed_password
|
||||||
|
):
|
||||||
|
swagger_user.last_login = datetime.now()
|
||||||
|
await session.commit()
|
||||||
|
|
||||||
|
logger.info(f"✓ Accès Swagger autorisé: {credentials.username}")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"id": swagger_user.id,
|
||||||
|
"username": swagger_user.username,
|
||||||
|
"allowed_tags": swagger_user.allowed_tags_list,
|
||||||
|
"is_active": swagger_user.is_active,
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.warning(f"✗ Accès Swagger refusé: {credentials.username}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f" Erreur vérification credentials: {e}", exc_info=True)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class ApiKeyMiddlewareHTTP(BaseHTTPMiddleware):
|
||||||
|
EXCLUDED_PATHS = [
|
||||||
|
"/docs",
|
||||||
|
"/redoc",
|
||||||
|
"/openapi.json",
|
||||||
|
"/",
|
||||||
|
"/health",
|
||||||
|
"/auth",
|
||||||
|
"/api-keys/verify",
|
||||||
|
"/universign/webhook",
|
||||||
|
]
|
||||||
|
|
||||||
|
def _is_excluded_path(self, path: str) -> bool:
|
||||||
|
"""Vérifie si le chemin est exclu de l'authentification API Key"""
|
||||||
|
if path == "/":
|
||||||
|
return True
|
||||||
|
|
||||||
|
for excluded in self.EXCLUDED_PATHS:
|
||||||
|
if excluded == "/":
|
||||||
|
continue
|
||||||
|
if path == excluded or path.startswith(excluded + "/"):
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def dispatch(self, request: Request, call_next: Callable):
|
||||||
|
path = request.url.path
|
||||||
|
method = request.method
|
||||||
|
|
||||||
|
if self._is_excluded_path(path):
|
||||||
|
return await call_next(request)
|
||||||
|
|
||||||
|
auth_header = request.headers.get("Authorization")
|
||||||
|
api_key_header = request.headers.get("X-API-Key")
|
||||||
|
|
||||||
|
if api_key_header:
|
||||||
|
api_key_header = api_key_header.strip()
|
||||||
|
if not api_key_header or api_key_header == "":
|
||||||
|
api_key_header = None
|
||||||
|
|
||||||
|
if auth_header and auth_header.startswith("Bearer "):
|
||||||
|
token = auth_header.split(" ", 1)[1].strip()
|
||||||
|
|
||||||
|
if token.startswith("sdk_live_"):
|
||||||
|
logger.warning(
|
||||||
|
" API Key envoyée dans Authorization au lieu de X-API-Key"
|
||||||
|
)
|
||||||
|
return await self._handle_api_key_auth(
|
||||||
|
request, token, path, method, call_next
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.debug(f"JWT détecté pour {method} {path} → délégation à FastAPI")
|
||||||
|
request.state.authenticated_via = "jwt"
|
||||||
|
return await call_next(request)
|
||||||
|
|
||||||
|
if api_key_header:
|
||||||
|
logger.debug(f" API Key détectée pour {method} {path}")
|
||||||
|
return await self._handle_api_key_auth(
|
||||||
|
request, api_key_header, path, method, call_next
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.debug(f" Aucune auth pour {method} {path} → délégation à FastAPI")
|
||||||
|
return await call_next(request)
|
||||||
|
|
||||||
|
async def _handle_api_key_auth(
|
||||||
|
self,
|
||||||
|
request: Request,
|
||||||
|
api_key: str,
|
||||||
|
path: str,
|
||||||
|
method: str,
|
||||||
|
call_next: Callable,
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
from database.db_config import async_session_factory
|
||||||
|
from services.api_key import ApiKeyService
|
||||||
|
|
||||||
|
async with async_session_factory() as session:
|
||||||
|
service = ApiKeyService(session)
|
||||||
|
|
||||||
|
api_key_obj = await service.verify_api_key(api_key)
|
||||||
|
|
||||||
|
if not api_key_obj:
|
||||||
|
logger.warning(f"🔒 Clé API invalide: {method} {path}")
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
content={
|
||||||
|
"detail": "Clé API invalide ou expirée",
|
||||||
|
"hint": "Vérifiez votre clé X-API-Key",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
is_allowed, rate_info = await service.check_rate_limit(api_key_obj)
|
||||||
|
if not is_allowed:
|
||||||
|
logger.warning(f"⏱️ Rate limit: {api_key_obj.name}")
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=status.HTTP_429_TOO_MANY_REQUESTS,
|
||||||
|
content={"detail": "Rate limit dépassé"},
|
||||||
|
headers={
|
||||||
|
"X-RateLimit-Limit": str(rate_info["limit"]),
|
||||||
|
"X-RateLimit-Remaining": "0",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
has_access = await service.check_endpoint_access(api_key_obj, path)
|
||||||
|
|
||||||
|
if not has_access:
|
||||||
|
allowed = (
|
||||||
|
json.loads(api_key_obj.allowed_endpoints)
|
||||||
|
if api_key_obj.allowed_endpoints
|
||||||
|
else ["Tous"]
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.warning(
|
||||||
|
f"🚫 ACCÈS REFUSÉ: {api_key_obj.name}\n"
|
||||||
|
f" Endpoint demandé: {path}\n"
|
||||||
|
f" Endpoints autorisés: {allowed}"
|
||||||
|
)
|
||||||
|
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=status.HTTP_403_FORBIDDEN,
|
||||||
|
content={
|
||||||
|
"detail": "Accès non autorisé à cet endpoint",
|
||||||
|
"endpoint_requested": path,
|
||||||
|
"api_key_name": api_key_obj.name,
|
||||||
|
"allowed_endpoints": allowed,
|
||||||
|
"hint": "Cette clé API n'a pas accès à cet endpoint.",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
request.state.api_key = api_key_obj
|
||||||
|
request.state.authenticated_via = "api_key"
|
||||||
|
|
||||||
|
logger.info(f" ACCÈS AUTORISÉ: {api_key_obj.name} → {method} {path}")
|
||||||
|
|
||||||
|
return await call_next(request)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"💥 Erreur validation API Key: {e}", exc_info=True)
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
content={"detail": f"Erreur interne: {str(e)}"},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
ApiKeyMiddleware = ApiKeyMiddlewareHTTP
|
||||||
|
|
||||||
|
|
||||||
|
def get_api_key_from_request(request: Request) -> Optional:
|
||||||
|
"""Récupère l'objet ApiKey depuis la requête si présent"""
|
||||||
|
return getattr(request.state, "api_key", None)
|
||||||
|
|
||||||
|
|
||||||
|
def get_auth_method(request: Request) -> str:
|
||||||
|
"""Retourne la méthode d'authentification utilisée"""
|
||||||
|
return getattr(request.state, "authenticated_via", "none")
|
||||||
|
|
||||||
|
|
||||||
|
def get_swagger_user_from_request(request: Request) -> Optional[dict]:
|
||||||
|
"""Récupère l'utilisateur Swagger depuis la requête"""
|
||||||
|
return getattr(request.state, "swagger_user", None)
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"SwaggerAuthMiddleware",
|
||||||
|
"ApiKeyMiddlewareHTTP",
|
||||||
|
"ApiKeyMiddleware",
|
||||||
|
"get_api_key_from_request",
|
||||||
|
"get_auth_method",
|
||||||
|
"get_swagger_user_from_request",
|
||||||
|
]
|
||||||
154
routes/api_keys.py
Normal file
154
routes/api_keys.py
Normal file
|
|
@ -0,0 +1,154 @@
|
||||||
|
from fastapi import APIRouter, Depends, HTTPException, status, Query
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from database import get_session, User
|
||||||
|
from core.dependencies import get_current_user, require_role
|
||||||
|
from services.api_key import ApiKeyService, api_key_to_response
|
||||||
|
from schemas.api_key import (
|
||||||
|
ApiKeyCreate,
|
||||||
|
ApiKeyCreatedResponse,
|
||||||
|
ApiKeyResponse,
|
||||||
|
ApiKeyList,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
router = APIRouter(prefix="/api-keys", tags=["API Keys Management"])
|
||||||
|
|
||||||
|
|
||||||
|
@router.post(
|
||||||
|
"",
|
||||||
|
response_model=ApiKeyCreatedResponse,
|
||||||
|
status_code=status.HTTP_201_CREATED,
|
||||||
|
dependencies=[Depends(require_role("admin", "super_admin"))],
|
||||||
|
)
|
||||||
|
async def create_api_key(
|
||||||
|
data: ApiKeyCreate,
|
||||||
|
session: AsyncSession = Depends(get_session),
|
||||||
|
user: User = Depends(get_current_user),
|
||||||
|
):
|
||||||
|
service = ApiKeyService(session)
|
||||||
|
|
||||||
|
api_key_obj, api_key_plain = await service.create_api_key(
|
||||||
|
name=data.name,
|
||||||
|
description=data.description,
|
||||||
|
created_by=user.email,
|
||||||
|
user_id=user.id,
|
||||||
|
expires_in_days=data.expires_in_days,
|
||||||
|
rate_limit_per_minute=data.rate_limit_per_minute,
|
||||||
|
allowed_endpoints=data.allowed_endpoints,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(f" Clé API créée par {user.email}: {data.name}")
|
||||||
|
|
||||||
|
response_data = api_key_to_response(api_key_obj)
|
||||||
|
response_data["api_key"] = api_key_plain
|
||||||
|
|
||||||
|
return ApiKeyCreatedResponse(**response_data)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("", response_model=ApiKeyList)
|
||||||
|
async def list_api_keys(
|
||||||
|
include_revoked: bool = Query(False, description="Inclure les clés révoquées"),
|
||||||
|
session: AsyncSession = Depends(get_session),
|
||||||
|
user: User = Depends(get_current_user),
|
||||||
|
):
|
||||||
|
service = ApiKeyService(session)
|
||||||
|
|
||||||
|
user_id = None if user.role in ["admin", "super_admin"] else user.id
|
||||||
|
|
||||||
|
keys = await service.list_api_keys(include_revoked=include_revoked, user_id=user_id)
|
||||||
|
|
||||||
|
items = [ApiKeyResponse(**api_key_to_response(k)) for k in keys]
|
||||||
|
|
||||||
|
return ApiKeyList(total=len(items), items=items)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{key_id}", response_model=ApiKeyResponse)
|
||||||
|
async def get_api_key(
|
||||||
|
key_id: str,
|
||||||
|
session: AsyncSession = Depends(get_session),
|
||||||
|
user: User = Depends(get_current_user),
|
||||||
|
):
|
||||||
|
"""Récupérer une clé API par son ID"""
|
||||||
|
service = ApiKeyService(session)
|
||||||
|
|
||||||
|
api_key_obj = await service.get_by_id(key_id)
|
||||||
|
|
||||||
|
if not api_key_obj:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND,
|
||||||
|
detail=f"Clé API {key_id} introuvable",
|
||||||
|
)
|
||||||
|
|
||||||
|
if user.role not in ["admin", "super_admin"]:
|
||||||
|
if api_key_obj.user_id != user.id:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_403_FORBIDDEN,
|
||||||
|
detail="Accès refusé à cette clé",
|
||||||
|
)
|
||||||
|
|
||||||
|
return ApiKeyResponse(**api_key_to_response(api_key_obj))
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/{key_id}", status_code=status.HTTP_200_OK)
|
||||||
|
async def revoke_api_key(
|
||||||
|
key_id: str,
|
||||||
|
session: AsyncSession = Depends(get_session),
|
||||||
|
user: User = Depends(get_current_user),
|
||||||
|
):
|
||||||
|
service = ApiKeyService(session)
|
||||||
|
|
||||||
|
api_key_obj = await service.get_by_id(key_id)
|
||||||
|
|
||||||
|
if not api_key_obj:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND,
|
||||||
|
detail=f"Clé API {key_id} introuvable",
|
||||||
|
)
|
||||||
|
|
||||||
|
if user.role not in ["admin", "super_admin"]:
|
||||||
|
if api_key_obj.user_id != user.id:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_403_FORBIDDEN,
|
||||||
|
detail="Accès refusé à cette clé",
|
||||||
|
)
|
||||||
|
|
||||||
|
success = await service.revoke_api_key(key_id)
|
||||||
|
|
||||||
|
if not success:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
detail="Erreur lors de la révocation",
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(f" Clé API révoquée par {user.email}: {api_key_obj.name}")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"message": f"Clé API '{api_key_obj.name}' révoquée avec succès",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/verify", status_code=status.HTTP_200_OK)
|
||||||
|
async def verify_api_key_endpoint(
|
||||||
|
api_key: str = Query(..., description="Clé API à vérifier"),
|
||||||
|
session: AsyncSession = Depends(get_session),
|
||||||
|
):
|
||||||
|
service = ApiKeyService(session)
|
||||||
|
|
||||||
|
api_key_obj = await service.verify_api_key(api_key)
|
||||||
|
|
||||||
|
if not api_key_obj:
|
||||||
|
return {
|
||||||
|
"valid": False,
|
||||||
|
"message": "Clé API invalide, expirée ou révoquée",
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
"valid": True,
|
||||||
|
"message": "Clé API valide",
|
||||||
|
"key_name": api_key_obj.name,
|
||||||
|
"rate_limit": api_key_obj.rate_limit_per_minute,
|
||||||
|
"expires_at": api_key_obj.expires_at,
|
||||||
|
}
|
||||||
|
|
@ -7,6 +7,7 @@ from typing import Optional
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
from database import get_session, User, RefreshToken, LoginAttempt
|
from database import get_session, User, RefreshToken, LoginAttempt
|
||||||
|
from core.dependencies import get_current_user
|
||||||
from security.auth import (
|
from security.auth import (
|
||||||
hash_password,
|
hash_password,
|
||||||
verify_password,
|
verify_password,
|
||||||
|
|
@ -19,8 +20,7 @@ from security.auth import (
|
||||||
hash_token,
|
hash_token,
|
||||||
)
|
)
|
||||||
from services.email_service import AuthEmailService
|
from services.email_service import AuthEmailService
|
||||||
from core.dependencies import get_current_user
|
from config.config import settings
|
||||||
from config import settings
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
@ -34,7 +34,7 @@ class RegisterRequest(BaseModel):
|
||||||
prenom: str = Field(..., min_length=2, max_length=100)
|
prenom: str = Field(..., min_length=2, max_length=100)
|
||||||
|
|
||||||
|
|
||||||
class LoginRequest(BaseModel):
|
class Login(BaseModel):
|
||||||
email: EmailStr
|
email: EmailStr
|
||||||
password: str
|
password: str
|
||||||
|
|
||||||
|
|
@ -50,20 +50,20 @@ class RefreshTokenRequest(BaseModel):
|
||||||
refresh_token: str
|
refresh_token: str
|
||||||
|
|
||||||
|
|
||||||
class ForgotPasswordRequest(BaseModel):
|
class ForgotPassword(BaseModel):
|
||||||
email: EmailStr
|
email: EmailStr
|
||||||
|
|
||||||
|
|
||||||
class ResetPasswordRequest(BaseModel):
|
class ResetPassword(BaseModel):
|
||||||
token: str
|
token: str
|
||||||
new_password: str = Field(..., min_length=8)
|
new_password: str = Field(..., min_length=8)
|
||||||
|
|
||||||
|
|
||||||
class VerifyEmailRequest(BaseModel):
|
class VerifyEmail(BaseModel):
|
||||||
token: str
|
token: str
|
||||||
|
|
||||||
|
|
||||||
class ResendVerificationRequest(BaseModel):
|
class ResendVerification(BaseModel):
|
||||||
email: EmailStr
|
email: EmailStr
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -101,7 +101,7 @@ async def check_rate_limit(
|
||||||
)
|
)
|
||||||
failed_attempts = result.scalars().all()
|
failed_attempts = result.scalars().all()
|
||||||
|
|
||||||
if len(failed_attempts) >= 5:
|
if len(failed_attempts) >= 15:
|
||||||
return False, "Trop de tentatives échouées. Réessayez dans 15 minutes."
|
return False, "Trop de tentatives échouées. Réessayez dans 15 minutes."
|
||||||
|
|
||||||
return True, ""
|
return True, ""
|
||||||
|
|
@ -194,7 +194,7 @@ async def verify_email_get(token: str, session: AsyncSession = Depends(get_sessi
|
||||||
|
|
||||||
@router.post("/verify-email")
|
@router.post("/verify-email")
|
||||||
async def verify_email_post(
|
async def verify_email_post(
|
||||||
data: VerifyEmailRequest, session: AsyncSession = Depends(get_session)
|
data: VerifyEmail, session: AsyncSession = Depends(get_session)
|
||||||
):
|
):
|
||||||
result = await session.execute(
|
result = await session.execute(
|
||||||
select(User).where(User.verification_token == data.token)
|
select(User).where(User.verification_token == data.token)
|
||||||
|
|
@ -228,7 +228,7 @@ async def verify_email_post(
|
||||||
|
|
||||||
@router.post("/resend-verification")
|
@router.post("/resend-verification")
|
||||||
async def resend_verification(
|
async def resend_verification(
|
||||||
data: ResendVerificationRequest,
|
data: ResendVerification,
|
||||||
request: Request,
|
request: Request,
|
||||||
session: AsyncSession = Depends(get_session),
|
session: AsyncSession = Depends(get_session),
|
||||||
):
|
):
|
||||||
|
|
@ -259,7 +259,7 @@ async def resend_verification(
|
||||||
|
|
||||||
@router.post("/login", response_model=TokenResponse)
|
@router.post("/login", response_model=TokenResponse)
|
||||||
async def login(
|
async def login(
|
||||||
data: LoginRequest, request: Request, session: AsyncSession = Depends(get_session)
|
data: Login, request: Request, session: AsyncSession = Depends(get_session)
|
||||||
):
|
):
|
||||||
ip = request.client.host if request.client else "unknown"
|
ip = request.client.host if request.client else "unknown"
|
||||||
user_agent = request.headers.get("user-agent", "unknown")
|
user_agent = request.headers.get("user-agent", "unknown")
|
||||||
|
|
@ -286,7 +286,7 @@ async def login(
|
||||||
if user:
|
if user:
|
||||||
user.failed_login_attempts += 1
|
user.failed_login_attempts += 1
|
||||||
|
|
||||||
if user.failed_login_attempts >= 5:
|
if user.failed_login_attempts >= 15:
|
||||||
user.locked_until = datetime.now() + timedelta(minutes=15)
|
user.locked_until = datetime.now() + timedelta(minutes=15)
|
||||||
await session.commit()
|
await session.commit()
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
|
|
@ -417,7 +417,7 @@ async def refresh_access_token(
|
||||||
|
|
||||||
@router.post("/forgot-password")
|
@router.post("/forgot-password")
|
||||||
async def forgot_password(
|
async def forgot_password(
|
||||||
data: ForgotPasswordRequest,
|
data: ForgotPassword,
|
||||||
request: Request,
|
request: Request,
|
||||||
session: AsyncSession = Depends(get_session),
|
session: AsyncSession = Depends(get_session),
|
||||||
):
|
):
|
||||||
|
|
@ -452,7 +452,7 @@ async def forgot_password(
|
||||||
|
|
||||||
@router.post("/reset-password")
|
@router.post("/reset-password")
|
||||||
async def reset_password(
|
async def reset_password(
|
||||||
data: ResetPasswordRequest, session: AsyncSession = Depends(get_session)
|
data: ResetPassword, session: AsyncSession = Depends(get_session)
|
||||||
):
|
):
|
||||||
result = await session.execute(select(User).where(User.reset_token == data.token))
|
result = await session.execute(select(User).where(User.reset_token == data.token))
|
||||||
user = result.scalar_one_or_none()
|
user = result.scalar_one_or_none()
|
||||||
|
|
@ -510,7 +510,7 @@ async def logout(
|
||||||
token_record.revoked_at = datetime.now()
|
token_record.revoked_at = datetime.now()
|
||||||
await session.commit()
|
await session.commit()
|
||||||
|
|
||||||
logger.info(f"👋 Déconnexion: {user.email}")
|
logger.info(f" Déconnexion: {user.email}")
|
||||||
|
|
||||||
return {"success": True, "message": "Déconnexion réussie"}
|
return {"success": True, "message": "Déconnexion réussie"}
|
||||||
|
|
||||||
|
|
|
||||||
158
routes/enterprise.py
Normal file
158
routes/enterprise.py
Normal file
|
|
@ -0,0 +1,158 @@
|
||||||
|
from fastapi import APIRouter, HTTPException, Query, Path
|
||||||
|
import httpx
|
||||||
|
import logging
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from schemas import EntrepriseSearch, EntrepriseSearchResponse
|
||||||
|
from utils.enterprise import (
|
||||||
|
calculer_tva_intracommunautaire,
|
||||||
|
mapper_resultat_api,
|
||||||
|
rechercher_entreprise_api,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
router = APIRouter(prefix="/entreprises", tags=["Entreprises"])
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/search", response_model=EntrepriseSearchResponse)
|
||||||
|
async def rechercher_entreprise(
|
||||||
|
q: str = Query(..., min_length=2, description="Nom d'entreprise, SIREN ou SIRET"),
|
||||||
|
per_page: int = Query(5, ge=1, le=25, description="Nombre de résultats (max 25)"),
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
logger.info(f" Recherche entreprise: '{q}'")
|
||||||
|
|
||||||
|
api_response = await rechercher_entreprise_api(q, per_page)
|
||||||
|
|
||||||
|
resultats_api = api_response.get("results", [])
|
||||||
|
|
||||||
|
if not resultats_api:
|
||||||
|
logger.info(f"Aucun résultat pour: {q}")
|
||||||
|
return EntrepriseSearchResponse(total_results=0, results=[], query=q)
|
||||||
|
|
||||||
|
entreprises = []
|
||||||
|
for data in resultats_api:
|
||||||
|
entreprise = mapper_resultat_api(data)
|
||||||
|
if entreprise:
|
||||||
|
entreprises.append(entreprise)
|
||||||
|
|
||||||
|
logger.info(f" {len(entreprises)} résultat(s) trouvé(s)")
|
||||||
|
|
||||||
|
return EntrepriseSearchResponse(
|
||||||
|
total_results=len(entreprises), results=entreprises, query=q
|
||||||
|
)
|
||||||
|
|
||||||
|
except HTTPException:
|
||||||
|
raise
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Erreur recherche entreprise: {e}", exc_info=True)
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=500, detail=f"Erreur lors de la recherche: {str(e)}"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/siren/{siren}", response_model=EntrepriseSearch)
|
||||||
|
async def lire_entreprise_par_siren(
|
||||||
|
siren: str = Path(
|
||||||
|
...,
|
||||||
|
min_length=9,
|
||||||
|
max_length=9,
|
||||||
|
pattern=r"^\d{9}$",
|
||||||
|
description="Numéro SIREN (9 chiffres)",
|
||||||
|
),
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
logger.info(f"Lecture entreprise SIREN: {siren}")
|
||||||
|
|
||||||
|
api_response = await rechercher_entreprise_api(siren, per_page=1)
|
||||||
|
|
||||||
|
resultats = api_response.get("results", [])
|
||||||
|
|
||||||
|
if not resultats:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=404,
|
||||||
|
detail=f"Aucune entreprise trouvée pour le SIREN {siren}",
|
||||||
|
)
|
||||||
|
|
||||||
|
entreprise_data = resultats[0]
|
||||||
|
|
||||||
|
if entreprise_data.get("siren") != siren:
|
||||||
|
raise HTTPException(status_code=404, detail=f"SIREN {siren} introuvable")
|
||||||
|
|
||||||
|
entreprise = mapper_resultat_api(entreprise_data)
|
||||||
|
|
||||||
|
if not entreprise:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=500,
|
||||||
|
detail="Erreur lors du traitement des données entreprise",
|
||||||
|
)
|
||||||
|
|
||||||
|
if not entreprise.is_active:
|
||||||
|
logger.warning(f" Entreprise CESSÉE: {siren}")
|
||||||
|
|
||||||
|
return entreprise
|
||||||
|
|
||||||
|
except HTTPException:
|
||||||
|
raise
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Erreur lecture SIREN {siren}: {e}", exc_info=True)
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=500, detail=f"Erreur lors de la récupération: {str(e)}"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/tva/{siren}")
|
||||||
|
async def calculer_tva(
|
||||||
|
siren: str = Path(
|
||||||
|
...,
|
||||||
|
min_length=9,
|
||||||
|
max_length=9,
|
||||||
|
pattern=r"^\d{9}$",
|
||||||
|
description="Numéro SIREN (9 chiffres)",
|
||||||
|
),
|
||||||
|
):
|
||||||
|
tva_number = calculer_tva_intracommunautaire(siren)
|
||||||
|
|
||||||
|
if not tva_number:
|
||||||
|
raise HTTPException(status_code=400, detail=f"SIREN invalide: {siren}")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"siren": siren,
|
||||||
|
"vat_number": tva_number,
|
||||||
|
"format": "FR + Clé (2 chiffres) + SIREN (9 chiffres)",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/health")
|
||||||
|
async def health_check_api_sirene():
|
||||||
|
try:
|
||||||
|
async with httpx.AsyncClient(timeout=5.0) as client:
|
||||||
|
response = await client.get(
|
||||||
|
"https://recherche-entreprises.api.gouv.fr/search",
|
||||||
|
params={"q": "test", "per_page": 1},
|
||||||
|
)
|
||||||
|
|
||||||
|
if response.status_code == 200:
|
||||||
|
return {
|
||||||
|
"status": "healthy",
|
||||||
|
"api_sirene": "disponible",
|
||||||
|
"response_time_ms": response.elapsed.total_seconds() * 1000,
|
||||||
|
"timestamp": datetime.now().isoformat(),
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
return {
|
||||||
|
"status": "degraded",
|
||||||
|
"api_sirene": f"statut {response.status_code}",
|
||||||
|
"timestamp": datetime.now().isoformat(),
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Health check failed: {e}")
|
||||||
|
return {
|
||||||
|
"status": "unhealthy",
|
||||||
|
"api_sirene": "indisponible",
|
||||||
|
"error": str(e),
|
||||||
|
"timestamp": datetime.now().isoformat(),
|
||||||
|
}
|
||||||
|
|
@ -12,13 +12,13 @@ from schemas import (
|
||||||
SageGatewayCreate,
|
SageGatewayCreate,
|
||||||
SageGatewayUpdate,
|
SageGatewayUpdate,
|
||||||
SageGatewayResponse,
|
SageGatewayResponse,
|
||||||
SageGatewayListResponse,
|
SageGatewayList,
|
||||||
SageGatewayHealthCheck,
|
SageGatewayHealthCheck,
|
||||||
SageGatewayTestRequest,
|
SageGatewayTest,
|
||||||
SageGatewayStatsResponse,
|
SageGatewayStatsResponse,
|
||||||
CurrentGatewayInfo,
|
CurrentGatewayInfo,
|
||||||
)
|
)
|
||||||
from config import settings
|
from config.config import settings
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
router = APIRouter(prefix="/sage-gateways", tags=["Sage Gateways"])
|
router = APIRouter(prefix="/sage-gateways", tags=["Sage Gateways"])
|
||||||
|
|
@ -41,7 +41,7 @@ async def create_gateway(
|
||||||
return SageGatewayResponse(**gateway_response_from_model(gateway))
|
return SageGatewayResponse(**gateway_response_from_model(gateway))
|
||||||
|
|
||||||
|
|
||||||
@router.get("", response_model=SageGatewayListResponse)
|
@router.get("", response_model=SageGatewayList)
|
||||||
async def list_gateways(
|
async def list_gateways(
|
||||||
include_deleted: bool = Query(False, description="Inclure les gateways supprimées"),
|
include_deleted: bool = Query(False, description="Inclure les gateways supprimées"),
|
||||||
session: AsyncSession = Depends(get_session),
|
session: AsyncSession = Depends(get_session),
|
||||||
|
|
@ -54,7 +54,7 @@ async def list_gateways(
|
||||||
|
|
||||||
items = [SageGatewayResponse(**gateway_response_from_model(g)) for g in gateways]
|
items = [SageGatewayResponse(**gateway_response_from_model(g)) for g in gateways]
|
||||||
|
|
||||||
return SageGatewayListResponse(
|
return SageGatewayList(
|
||||||
items=items,
|
items=items,
|
||||||
total=len(items),
|
total=len(items),
|
||||||
active_gateway=SageGatewayResponse(**gateway_response_from_model(active))
|
active_gateway=SageGatewayResponse(**gateway_response_from_model(active))
|
||||||
|
|
@ -268,7 +268,7 @@ async def check_gateway_health(
|
||||||
|
|
||||||
@router.post("/test", response_model=dict)
|
@router.post("/test", response_model=dict)
|
||||||
async def test_gateway_config(
|
async def test_gateway_config(
|
||||||
data: SageGatewayTestRequest,
|
data: SageGatewayTest,
|
||||||
user: User = Depends(get_current_user),
|
user: User = Depends(get_current_user),
|
||||||
session: AsyncSession = Depends(get_session),
|
session: AsyncSession = Depends(get_session),
|
||||||
):
|
):
|
||||||
|
|
|
||||||
1130
routes/universign.py
Normal file
1130
routes/universign.py
Normal file
File diff suppressed because it is too large
Load diff
BIN
sage/pdfs/Sage_Text-Bold.ttf
Normal file
BIN
sage/pdfs/Sage_Text-Bold.ttf
Normal file
Binary file not shown.
BIN
sage/pdfs/Sage_Text-Medium.ttf
Normal file
BIN
sage/pdfs/Sage_Text-Medium.ttf
Normal file
Binary file not shown.
BIN
sage/pdfs/logo.png
Normal file
BIN
sage/pdfs/logo.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 27 KiB |
187
sage_client.py
187
sage_client.py
|
|
@ -1,7 +1,6 @@
|
||||||
# sage_client.py
|
|
||||||
import requests
|
import requests
|
||||||
from typing import Dict, List, Optional
|
from typing import Dict, List, Optional
|
||||||
from config import settings
|
from config.config import settings
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
@ -401,6 +400,181 @@ class SageGatewayClient:
|
||||||
result = self._post("/sage/client/remise-max", {"code": code_client})
|
result = self._post("/sage/client/remise-max", {"code": code_client})
|
||||||
return result.get("data", {}).get("remise_max", 10.0)
|
return result.get("data", {}).get("remise_max", 10.0)
|
||||||
|
|
||||||
|
def lister_collaborateurs(
|
||||||
|
self, filtre: Optional[str] = None, actifs_seulement: bool = True
|
||||||
|
) -> List[Dict]:
|
||||||
|
"""Liste tous les collaborateurs"""
|
||||||
|
return self._post(
|
||||||
|
"/sage/collaborateurs/list",
|
||||||
|
{
|
||||||
|
"filtre": filtre or "",
|
||||||
|
"actifs_seulement": actifs_seulement,
|
||||||
|
},
|
||||||
|
).get("data", [])
|
||||||
|
|
||||||
|
def lire_collaborateur(self, numero: int) -> Optional[Dict]:
|
||||||
|
"""Lit un collaborateur par numéro"""
|
||||||
|
return self._post("/sage/collaborateurs/get", {"numero": numero}).get("data")
|
||||||
|
|
||||||
|
def creer_collaborateur(self, data: Dict) -> Optional[Dict]:
|
||||||
|
"""Crée un nouveau collaborateur"""
|
||||||
|
return self._post("/sage/collaborateurs/create", data).get("data")
|
||||||
|
|
||||||
|
def modifier_collaborateur(self, numero: int, data: Dict) -> Optional[Dict]:
|
||||||
|
"""Modifie un collaborateur existant"""
|
||||||
|
return self._post(
|
||||||
|
"/sage/collaborateurs/update", {"numero": numero, **data}
|
||||||
|
).get("data")
|
||||||
|
|
||||||
|
def lire_informations_societe(self) -> Optional[Dict]:
|
||||||
|
"""Lit les informations de la société depuis P_DOSSIER"""
|
||||||
|
return self._get("/sage/societe/info").get("data")
|
||||||
|
|
||||||
|
def valider_facture(self, numero_facture: str) -> dict:
|
||||||
|
response = self._post(f"/sage/factures/{numero_facture}/valider", {})
|
||||||
|
return response.get("data", {})
|
||||||
|
|
||||||
|
def devalider_facture(self, numero_facture: str) -> dict:
|
||||||
|
response = self._post(f"/sage/factures/{numero_facture}/devalider", {})
|
||||||
|
return response.get("data", {})
|
||||||
|
|
||||||
|
def get_statut_validation(self, numero_facture: str) -> dict:
|
||||||
|
response = self._get(f"/sage/factures/{numero_facture}/statut-validation")
|
||||||
|
return response.get("data", {})
|
||||||
|
|
||||||
|
def regler_facture(
|
||||||
|
self,
|
||||||
|
numero_facture: str,
|
||||||
|
montant: float,
|
||||||
|
mode_reglement: int = 0,
|
||||||
|
date_reglement: str = None,
|
||||||
|
reference: str = "",
|
||||||
|
libelle: str = "",
|
||||||
|
code_journal: str = None,
|
||||||
|
devise_code: int = 0,
|
||||||
|
cours_devise: float = 1.0,
|
||||||
|
tva_encaissement: bool = False,
|
||||||
|
compte_general: str = None,
|
||||||
|
) -> dict:
|
||||||
|
"""Règle une facture"""
|
||||||
|
payload = {
|
||||||
|
"montant": montant,
|
||||||
|
"mode_reglement": mode_reglement,
|
||||||
|
"reference": reference,
|
||||||
|
"libelle": libelle,
|
||||||
|
"devise_code": devise_code,
|
||||||
|
"cours_devise": cours_devise,
|
||||||
|
"tva_encaissement": tva_encaissement,
|
||||||
|
}
|
||||||
|
|
||||||
|
if date_reglement:
|
||||||
|
payload["date_reglement"] = date_reglement
|
||||||
|
if code_journal:
|
||||||
|
payload["code_journal"] = code_journal
|
||||||
|
if compte_general:
|
||||||
|
payload["compte_general"] = compte_general
|
||||||
|
|
||||||
|
return self._post(f"/sage/factures/{numero_facture}/regler", payload).get(
|
||||||
|
"data", {}
|
||||||
|
)
|
||||||
|
|
||||||
|
def regler_factures_client(
|
||||||
|
self,
|
||||||
|
client_code: str,
|
||||||
|
montant_total: float,
|
||||||
|
mode_reglement: int = 0,
|
||||||
|
date_reglement: str = None,
|
||||||
|
reference: str = "",
|
||||||
|
libelle: str = "",
|
||||||
|
code_journal: str = None,
|
||||||
|
numeros_factures: list = None,
|
||||||
|
devise_code: int = 0,
|
||||||
|
cours_devise: float = 1.0,
|
||||||
|
tva_encaissement: bool = False,
|
||||||
|
) -> dict:
|
||||||
|
"""Règle plusieurs factures d'un client"""
|
||||||
|
payload = {
|
||||||
|
"client_code": client_code,
|
||||||
|
"montant_total": montant_total,
|
||||||
|
"mode_reglement": mode_reglement,
|
||||||
|
"reference": reference,
|
||||||
|
"libelle": libelle,
|
||||||
|
"devise_code": devise_code,
|
||||||
|
"cours_devise": cours_devise,
|
||||||
|
"tva_encaissement": tva_encaissement,
|
||||||
|
}
|
||||||
|
|
||||||
|
if date_reglement:
|
||||||
|
payload["date_reglement"] = date_reglement
|
||||||
|
if code_journal:
|
||||||
|
payload["code_journal"] = code_journal
|
||||||
|
if numeros_factures:
|
||||||
|
payload["numeros_factures"] = numeros_factures
|
||||||
|
|
||||||
|
return self._post("/sage/reglements/multiple", payload).get("data", {})
|
||||||
|
|
||||||
|
def get_reglements_facture(self, numero_facture: str) -> dict:
|
||||||
|
"""Récupère les règlements d'une facture"""
|
||||||
|
return self._get(f"/sage/factures/{numero_facture}/reglements").get("data", {})
|
||||||
|
|
||||||
|
def get_reglements_client(
|
||||||
|
self,
|
||||||
|
client_code: str,
|
||||||
|
date_debut: str = None,
|
||||||
|
date_fin: str = None,
|
||||||
|
inclure_soldees: bool = True,
|
||||||
|
) -> dict:
|
||||||
|
"""Récupère les règlements d'un client"""
|
||||||
|
params = {"inclure_soldees": inclure_soldees}
|
||||||
|
if date_debut:
|
||||||
|
params["date_debut"] = date_debut
|
||||||
|
if date_fin:
|
||||||
|
params["date_fin"] = date_fin
|
||||||
|
|
||||||
|
return self._get(f"/sage/clients/{client_code}/reglements", params=params).get(
|
||||||
|
"data", {}
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_journaux_banque(self) -> dict:
|
||||||
|
return self._get("/sage/journaux/banque").get("data", {})
|
||||||
|
|
||||||
|
def get_modes_reglement(self) -> List[dict]:
|
||||||
|
"""Récupère les modes de règlement depuis Sage"""
|
||||||
|
return self._get("/sage/reglements/modes").get("data", {}).get("modes", [])
|
||||||
|
|
||||||
|
def get_devises(self) -> List[dict]:
|
||||||
|
"""Récupère les devises disponibles"""
|
||||||
|
return self._get("/sage/devises").get("data", {}).get("devises", [])
|
||||||
|
|
||||||
|
def get_journaux_tresorerie(self) -> List[dict]:
|
||||||
|
"""Récupère les journaux de trésorerie (banque + caisse)"""
|
||||||
|
return (
|
||||||
|
self._get("/sage/journaux/tresorerie").get("data", {}).get("journaux", [])
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_comptes_generaux(
|
||||||
|
self, prefixe: str = None, type_compte: str = None
|
||||||
|
) -> List[dict]:
|
||||||
|
params = {}
|
||||||
|
if prefixe:
|
||||||
|
params["prefixe"] = prefixe
|
||||||
|
if type_compte:
|
||||||
|
params["type_compte"] = type_compte
|
||||||
|
|
||||||
|
return (
|
||||||
|
self._get("/sage/comptes-generaux", params=params)
|
||||||
|
.get("data", {})
|
||||||
|
.get("comptes", [])
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_tva_taux(self) -> List[dict]:
|
||||||
|
"""Récupère les taux de TVA"""
|
||||||
|
return self._get("/sage/tva/taux").get("data", {}).get("taux", [])
|
||||||
|
|
||||||
|
def get_parametres_encaissement(self) -> dict:
|
||||||
|
"""Récupère les paramètres TVA sur encaissement"""
|
||||||
|
return self._get("/sage/parametres/encaissement").get("data", {})
|
||||||
|
|
||||||
def refresh_cache(self) -> Dict:
|
def refresh_cache(self) -> Dict:
|
||||||
return self._post("/sage/cache/refresh")
|
return self._post("/sage/cache/refresh")
|
||||||
|
|
||||||
|
|
@ -414,5 +588,14 @@ class SageGatewayClient:
|
||||||
except Exception:
|
except Exception:
|
||||||
return {"status": "down"}
|
return {"status": "down"}
|
||||||
|
|
||||||
|
def get_tous_reglements(self, params=None):
|
||||||
|
return self._get("/sage/reglements", params=params)
|
||||||
|
|
||||||
|
def get_reglement_facture_detail(self, facture_no):
|
||||||
|
return self._get(f"/sage/reglements/facture/{facture_no}")
|
||||||
|
|
||||||
|
def get_reglement_detail(self, rg_no):
|
||||||
|
return self._get(f"/sage/reglements/{rg_no}")
|
||||||
|
|
||||||
|
|
||||||
sage_client = SageGatewayClient()
|
sage_client = SageGatewayClient()
|
||||||
|
|
|
||||||
|
|
@ -1,108 +1,124 @@
|
||||||
from schemas.tiers.tiers import TiersDetails, TypeTiersInt
|
from schemas.tiers.tiers import TiersDetails, TypeTiersInt
|
||||||
from schemas.tiers.type_tiers import TypeTiers
|
from schemas.tiers.type_tiers import TypeTiers
|
||||||
from schemas.schema_mixte import BaremeRemiseResponse
|
from schemas.schema_mixte import BaremeRemiseResponse
|
||||||
from schemas.user import UserResponse
|
from schemas.user import Users
|
||||||
from schemas.tiers.clients import (
|
from schemas.tiers.clients import (
|
||||||
ClientCreateRequest,
|
ClientCreate,
|
||||||
ClientDetails,
|
ClientDetails,
|
||||||
ClientResponse,
|
ClientResponse,
|
||||||
ClientUpdateRequest,
|
ClientUpdate,
|
||||||
)
|
)
|
||||||
from schemas.tiers.contact import Contact, ContactCreate, ContactUpdate
|
from schemas.tiers.contact import Contact, ContactCreate, ContactUpdate
|
||||||
from schemas.tiers.fournisseurs import (
|
from schemas.tiers.fournisseurs import (
|
||||||
FournisseurCreateAPIRequest,
|
FournisseurCreate,
|
||||||
FournisseurDetails,
|
FournisseurDetails,
|
||||||
FournisseurUpdateRequest,
|
FournisseurUpdate,
|
||||||
)
|
)
|
||||||
from schemas.documents.avoirs import AvoirCreateRequest, AvoirUpdateRequest
|
from schemas.documents.avoirs import AvoirCreate, AvoirUpdate
|
||||||
from schemas.documents.commandes import CommandeCreateRequest, CommandeUpdateRequest
|
from schemas.documents.commandes import CommandeCreate, CommandeUpdate
|
||||||
from schemas.documents.devis import (
|
from schemas.documents.devis import (
|
||||||
DevisRequest,
|
DevisRequest,
|
||||||
DevisResponse,
|
Devis,
|
||||||
DevisUpdateRequest,
|
DevisUpdate,
|
||||||
RelanceDevisRequest,
|
RelanceDevis,
|
||||||
)
|
)
|
||||||
from schemas.documents.documents import TypeDocument, TypeDocumentSQL
|
from schemas.documents.documents import TypeDocument, TypeDocumentSQL
|
||||||
from schemas.documents.email import StatutEmail, EmailEnvoiRequest
|
from schemas.documents.email import StatutEmail, EmailEnvoi
|
||||||
from schemas.documents.factures import FactureCreateRequest, FactureUpdateRequest
|
from schemas.documents.factures import FactureCreate, FactureUpdate
|
||||||
from schemas.documents.livraisons import LivraisonCreateRequest, LivraisonUpdateRequest
|
from schemas.documents.livraisons import LivraisonCreate, LivraisonUpdate
|
||||||
from schemas.documents.universign import SignatureRequest, StatutSignature
|
from schemas.documents.universign import (
|
||||||
|
Signature,
|
||||||
|
StatutSignature,
|
||||||
|
SyncStatsResponse,
|
||||||
|
CreateSignatureRequest,
|
||||||
|
TransactionResponse,
|
||||||
|
)
|
||||||
from schemas.articles.articles import (
|
from schemas.articles.articles import (
|
||||||
ArticleCreateRequest,
|
ArticleCreate,
|
||||||
ArticleResponse,
|
Article,
|
||||||
ArticleUpdateRequest,
|
ArticleUpdate,
|
||||||
ArticleListResponse,
|
ArticleList,
|
||||||
EntreeStockRequest,
|
EntreeStock,
|
||||||
SortieStockRequest,
|
SortieStock,
|
||||||
MouvementStockResponse,
|
MouvementStock,
|
||||||
)
|
)
|
||||||
from schemas.articles.famille_article import (
|
from schemas.articles.famille_article import (
|
||||||
FamilleResponse,
|
Familles,
|
||||||
FamilleCreateRequest,
|
FamilleCreate,
|
||||||
FamilleListResponse,
|
FamilleList,
|
||||||
)
|
)
|
||||||
|
|
||||||
from schemas.sage.sage_gateway import (
|
from schemas.sage.sage_gateway import (
|
||||||
SageGatewayCreate,
|
SageGatewayCreate,
|
||||||
SageGatewayUpdate,
|
SageGatewayUpdate,
|
||||||
SageGatewayResponse,
|
SageGatewayResponse,
|
||||||
SageGatewayListResponse,
|
SageGatewayList,
|
||||||
SageGatewayHealthCheck,
|
SageGatewayHealthCheck,
|
||||||
SageGatewayTestRequest,
|
SageGatewayTest,
|
||||||
SageGatewayStatsResponse,
|
SageGatewayStatsResponse,
|
||||||
CurrentGatewayInfo,
|
CurrentGatewayInfo,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
from schemas.society.societe import SocieteInfo
|
||||||
|
|
||||||
|
from schemas.society.enterprise import EntrepriseSearch, EntrepriseSearchResponse
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"TiersDetails",
|
"TiersDetails",
|
||||||
"TypeTiers",
|
"TypeTiers",
|
||||||
"BaremeRemiseResponse",
|
"BaremeRemiseResponse",
|
||||||
"UserResponse",
|
"Users",
|
||||||
"ClientCreateRequest",
|
"ClientCreate",
|
||||||
"ClientDetails",
|
"ClientDetails",
|
||||||
"ClientResponse",
|
"ClientResponse",
|
||||||
"ClientUpdateRequest",
|
"ClientUpdate",
|
||||||
"FournisseurCreateAPIRequest",
|
"FournisseurCreate",
|
||||||
"FournisseurDetails",
|
"FournisseurDetails",
|
||||||
"FournisseurUpdateRequest",
|
"FournisseurUpdate",
|
||||||
"Contact",
|
"Contact",
|
||||||
"AvoirCreateRequest",
|
"AvoirCreate",
|
||||||
"AvoirUpdateRequest",
|
"AvoirUpdate",
|
||||||
"CommandeCreateRequest",
|
"CommandeCreate",
|
||||||
"CommandeUpdateRequest",
|
"CommandeUpdate",
|
||||||
"DevisRequest",
|
"DevisRequest",
|
||||||
"DevisResponse",
|
"Devis",
|
||||||
"DevisUpdateRequest",
|
"DevisUpdate",
|
||||||
"TypeDocument",
|
"TypeDocument",
|
||||||
"TypeDocumentSQL",
|
"TypeDocumentSQL",
|
||||||
"StatutEmail",
|
"StatutEmail",
|
||||||
"EmailEnvoiRequest",
|
"EmailEnvoi",
|
||||||
"FactureCreateRequest",
|
"FactureCreate",
|
||||||
"FactureUpdateRequest",
|
"FactureUpdate",
|
||||||
"LivraisonCreateRequest",
|
"LivraisonCreate",
|
||||||
"LivraisonUpdateRequest",
|
"LivraisonUpdate",
|
||||||
"SignatureRequest",
|
"Signature",
|
||||||
"StatutSignature",
|
"StatutSignature",
|
||||||
"TypeTiersInt",
|
"TypeTiersInt",
|
||||||
"ArticleCreateRequest",
|
"ArticleCreate",
|
||||||
"ArticleResponse",
|
"Article",
|
||||||
"ArticleUpdateRequest",
|
"ArticleUpdate",
|
||||||
"ArticleListResponse",
|
"ArticleList",
|
||||||
"EntreeStockRequest",
|
"EntreeStock",
|
||||||
"SortieStockRequest",
|
"SortieStock",
|
||||||
"MouvementStockResponse",
|
"MouvementStock",
|
||||||
"RelanceDevisRequest",
|
"RelanceDevis",
|
||||||
"FamilleResponse",
|
"Familles",
|
||||||
"FamilleCreateRequest",
|
"FamilleCreate",
|
||||||
"FamilleListResponse",
|
"FamilleList",
|
||||||
"ContactCreate",
|
"ContactCreate",
|
||||||
"ContactUpdate",
|
"ContactUpdate",
|
||||||
"SageGatewayCreate",
|
"SageGatewayCreate",
|
||||||
"SageGatewayUpdate",
|
"SageGatewayUpdate",
|
||||||
"SageGatewayResponse",
|
"SageGatewayResponse",
|
||||||
"SageGatewayListResponse",
|
"SageGatewayList",
|
||||||
"SageGatewayHealthCheck",
|
"SageGatewayHealthCheck",
|
||||||
"SageGatewayTestRequest",
|
"SageGatewayTest",
|
||||||
"SageGatewayStatsResponse",
|
"SageGatewayStatsResponse",
|
||||||
"CurrentGatewayInfo",
|
"CurrentGatewayInfo",
|
||||||
|
"SyncStatsResponse",
|
||||||
|
"CreateSignatureRequest",
|
||||||
|
"TransactionResponse",
|
||||||
|
"SocieteInfo",
|
||||||
|
"EntrepriseSearch",
|
||||||
|
"EntrepriseSearchResponse",
|
||||||
]
|
]
|
||||||
|
|
|
||||||
77
schemas/api_key.py
Normal file
77
schemas/api_key.py
Normal file
|
|
@ -0,0 +1,77 @@
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
from typing import Optional, List
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
|
||||||
|
class ApiKeyCreate(BaseModel):
|
||||||
|
"""Schema pour créer une clé API"""
|
||||||
|
|
||||||
|
name: str = Field(..., min_length=3, max_length=255, description="Nom de la clé")
|
||||||
|
description: Optional[str] = Field(None, description="Description de l'usage")
|
||||||
|
expires_in_days: Optional[int] = Field(
|
||||||
|
None, ge=1, le=3650, description="Expiration en jours (max 10 ans)"
|
||||||
|
)
|
||||||
|
rate_limit_per_minute: int = Field(
|
||||||
|
60, ge=1, le=1000, description="Limite de requêtes par minute"
|
||||||
|
)
|
||||||
|
allowed_endpoints: Optional[List[str]] = Field(
|
||||||
|
None, description="Endpoints autorisés ([] = tous, ['/clients*'] = wildcard)"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ApiKeyResponse(BaseModel):
|
||||||
|
"""Schema de réponse pour une clé API"""
|
||||||
|
|
||||||
|
id: str
|
||||||
|
name: str
|
||||||
|
description: Optional[str]
|
||||||
|
key_prefix: str
|
||||||
|
is_active: bool
|
||||||
|
is_expired: bool
|
||||||
|
rate_limit_per_minute: int
|
||||||
|
allowed_endpoints: Optional[List[str]]
|
||||||
|
total_requests: int
|
||||||
|
last_used_at: Optional[datetime]
|
||||||
|
created_at: datetime
|
||||||
|
expires_at: Optional[datetime]
|
||||||
|
revoked_at: Optional[datetime]
|
||||||
|
created_by: str
|
||||||
|
|
||||||
|
|
||||||
|
class ApiKeyCreatedResponse(ApiKeyResponse):
|
||||||
|
"""Schema de réponse après création (inclut la clé en clair)"""
|
||||||
|
|
||||||
|
api_key: str = Field(
|
||||||
|
..., description=" Clé API en clair - à sauvegarder immédiatement"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ApiKeyList(BaseModel):
|
||||||
|
"""Liste de clés API"""
|
||||||
|
|
||||||
|
total: int
|
||||||
|
items: List[ApiKeyResponse]
|
||||||
|
|
||||||
|
|
||||||
|
class SwaggerUserCreate(BaseModel):
|
||||||
|
"""Schema pour créer un utilisateur Swagger"""
|
||||||
|
|
||||||
|
username: str = Field(..., min_length=3, max_length=100)
|
||||||
|
password: str = Field(..., min_length=8)
|
||||||
|
full_name: Optional[str] = None
|
||||||
|
email: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class SwaggerUserResponse(BaseModel):
|
||||||
|
"""Schema de réponse pour un utilisateur Swagger"""
|
||||||
|
|
||||||
|
id: str
|
||||||
|
username: str
|
||||||
|
full_name: Optional[str]
|
||||||
|
email: Optional[str]
|
||||||
|
is_active: bool
|
||||||
|
created_at: datetime
|
||||||
|
last_login: Optional[datetime]
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
from_attributes = True
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
from pydantic import BaseModel, Field, validator, field_validator
|
from pydantic import BaseModel, Field, validator, field_validator
|
||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
from datetime import date, datetime
|
from datetime import date
|
||||||
|
|
||||||
from utils import (
|
from utils import (
|
||||||
NomenclatureType,
|
NomenclatureType,
|
||||||
|
|
@ -11,363 +11,7 @@ from utils import (
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class EmplacementStockModel(BaseModel):
|
class Article(BaseModel):
|
||||||
"""Détail du stock dans un emplacement spécifique"""
|
|
||||||
|
|
||||||
depot: str = Field(..., description="Numéro du dépôt (DE_No)")
|
|
||||||
emplacement: str = Field(..., description="Code emplacement (DP_No)")
|
|
||||||
|
|
||||||
qte_stockee: float = Field(0.0, description="Quantité stockée (AE_QteSto)")
|
|
||||||
qte_preparee: float = Field(0.0, description="Quantité préparée (AE_QtePrepa)")
|
|
||||||
qte_a_controler: float = Field(
|
|
||||||
0.0, description="Quantité à contrôler (AE_QteAControler)"
|
|
||||||
)
|
|
||||||
|
|
||||||
date_creation: Optional[datetime] = Field(None, description="Date création")
|
|
||||||
date_modification: Optional[datetime] = Field(None, description="Date modification")
|
|
||||||
|
|
||||||
depot_num: Optional[str] = Field(None, description="Numéro dépôt")
|
|
||||||
depot_nom: Optional[str] = Field(None, description="Nom du dépôt (DE_Intitule)")
|
|
||||||
depot_code: Optional[str] = Field(None, description="Code dépôt (DE_Code)")
|
|
||||||
depot_adresse: Optional[str] = Field(None, description="Adresse (DE_Adresse)")
|
|
||||||
depot_complement: Optional[str] = Field(None, description="Complément adresse")
|
|
||||||
depot_code_postal: Optional[str] = Field(None, description="Code postal")
|
|
||||||
depot_ville: Optional[str] = Field(None, description="Ville")
|
|
||||||
depot_contact: Optional[str] = Field(None, description="Contact")
|
|
||||||
depot_est_principal: Optional[bool] = Field(
|
|
||||||
None, description="Dépôt principal (DE_Principal)"
|
|
||||||
)
|
|
||||||
depot_categorie_compta: Optional[int] = Field(
|
|
||||||
None, description="Catégorie comptable"
|
|
||||||
)
|
|
||||||
depot_region: Optional[str] = Field(None, description="Région")
|
|
||||||
depot_pays: Optional[str] = Field(None, description="Pays")
|
|
||||||
depot_email: Optional[str] = Field(None, description="Email")
|
|
||||||
depot_telephone: Optional[str] = Field(None, description="Téléphone")
|
|
||||||
depot_fax: Optional[str] = Field(None, description="Fax")
|
|
||||||
depot_emplacement_defaut: Optional[str] = Field(
|
|
||||||
None, description="Emplacement par défaut"
|
|
||||||
)
|
|
||||||
depot_exclu: Optional[bool] = Field(None, description="Dépôt exclu")
|
|
||||||
|
|
||||||
emplacement_code: Optional[str] = Field(
|
|
||||||
None, description="Code emplacement (DP_Code)"
|
|
||||||
)
|
|
||||||
emplacement_libelle: Optional[str] = Field(
|
|
||||||
None, description="Libellé emplacement (DP_Intitule)"
|
|
||||||
)
|
|
||||||
emplacement_zone: Optional[str] = Field(None, description="Zone (DP_Zone)")
|
|
||||||
emplacement_type: Optional[int] = Field(
|
|
||||||
None, description="Type emplacement (DP_Type)"
|
|
||||||
)
|
|
||||||
|
|
||||||
class Config:
|
|
||||||
json_schema_extra = {
|
|
||||||
"example": {
|
|
||||||
"depot": "01",
|
|
||||||
"emplacement": "A1-01",
|
|
||||||
"qte_stockee": 100.0,
|
|
||||||
"qte_preparee": 5.0,
|
|
||||||
"depot_nom": "Dépôt principal",
|
|
||||||
"depot_ville": "Paris",
|
|
||||||
"emplacement_libelle": "Allée A, Niveau 1, Case 01",
|
|
||||||
"emplacement_zone": "Zone A",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class GammeArticleModel(BaseModel):
|
|
||||||
"""Gamme d'un article (taille, couleur, etc.)"""
|
|
||||||
|
|
||||||
numero_gamme: int = Field(..., description="Numéro de gamme (AG_No)")
|
|
||||||
enumere: str = Field(..., description="Code énuméré (EG_Enumere)")
|
|
||||||
type_gamme: int = Field(0, description="Type de gamme (AG_Type)")
|
|
||||||
|
|
||||||
date_creation: Optional[datetime] = Field(None, description="Date création")
|
|
||||||
date_modification: Optional[datetime] = Field(None, description="Date modification")
|
|
||||||
|
|
||||||
ligne: Optional[int] = Field(None, description="Ligne énuméré (EG_Ligne)")
|
|
||||||
borne_sup: Optional[float] = Field(
|
|
||||||
None, description="Borne supérieure (EG_BorneSup)"
|
|
||||||
)
|
|
||||||
gamme_nom: Optional[str] = Field(
|
|
||||||
None, description="Nom de la gamme (P_GAMME.G_Intitule)"
|
|
||||||
)
|
|
||||||
|
|
||||||
class Config:
|
|
||||||
json_schema_extra = {
|
|
||||||
"example": {
|
|
||||||
"numero_gamme": 1,
|
|
||||||
"enumere": "001",
|
|
||||||
"type_gamme": 0,
|
|
||||||
"ligne": 1,
|
|
||||||
"gamme_nom": "Taille",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class TarifClientModel(BaseModel):
|
|
||||||
"""Tarif spécifique pour un client ou catégorie tarifaire"""
|
|
||||||
|
|
||||||
categorie: int = Field(..., description="Catégorie tarifaire (AC_Categorie)")
|
|
||||||
client_num: Optional[str] = Field(None, description="Numéro client (CT_Num)")
|
|
||||||
|
|
||||||
prix_vente: float = Field(0.0, description="Prix de vente HT (AC_PrixVen)")
|
|
||||||
coefficient: float = Field(0.0, description="Coefficient (AC_Coef)")
|
|
||||||
prix_ttc: float = Field(0.0, description="Prix TTC (AC_PrixTTC)")
|
|
||||||
arrondi: float = Field(0.0, description="Arrondi (AC_Arrondi)")
|
|
||||||
qte_montant: float = Field(0.0, description="Quantité montant (AC_QteMont)")
|
|
||||||
|
|
||||||
enumere_gamme: int = Field(0, description="Énuméré gamme (EG_Champ)")
|
|
||||||
prix_devise: float = Field(0.0, description="Prix en devise (AC_PrixDev)")
|
|
||||||
devise: int = Field(0, description="Code devise (AC_Devise)")
|
|
||||||
|
|
||||||
remise: float = Field(0.0, description="Remise (AC_Remise)")
|
|
||||||
mode_calcul: int = Field(0, description="Mode de calcul (AC_Calcul)")
|
|
||||||
type_remise: int = Field(0, description="Type de remise (AC_TypeRem)")
|
|
||||||
ref_client: Optional[str] = Field(
|
|
||||||
None, description="Référence client (AC_RefClient)"
|
|
||||||
)
|
|
||||||
|
|
||||||
coef_nouveau: float = Field(0.0, description="Nouveau coefficient (AC_CoefNouv)")
|
|
||||||
prix_vente_nouveau: float = Field(
|
|
||||||
0.0, description="Nouveau prix vente (AC_PrixVenNouv)"
|
|
||||||
)
|
|
||||||
prix_devise_nouveau: float = Field(
|
|
||||||
0.0, description="Nouveau prix devise (AC_PrixDevNouv)"
|
|
||||||
)
|
|
||||||
remise_nouvelle: float = Field(0.0, description="Nouvelle remise (AC_RemiseNouv)")
|
|
||||||
date_application: Optional[datetime] = Field(
|
|
||||||
None, description="Date application (AC_DateApplication)"
|
|
||||||
)
|
|
||||||
|
|
||||||
date_creation: Optional[datetime] = Field(None, description="Date création")
|
|
||||||
date_modification: Optional[datetime] = Field(None, description="Date modification")
|
|
||||||
|
|
||||||
class Config:
|
|
||||||
json_schema_extra = {
|
|
||||||
"example": {
|
|
||||||
"categorie": 1,
|
|
||||||
"client_num": "CLI001",
|
|
||||||
"prix_vente": 110.00,
|
|
||||||
"coefficient": 1.294,
|
|
||||||
"remise": 12.0,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class ComposantModel(BaseModel):
|
|
||||||
"""Composant/Opération de nomenclature"""
|
|
||||||
|
|
||||||
operation: str = Field(..., description="Code opération (AT_Operation)")
|
|
||||||
code_ressource: Optional[str] = Field(None, description="Code ressource (RP_Code)")
|
|
||||||
|
|
||||||
temps: float = Field(0.0, description="Temps nécessaire (AT_Temps)")
|
|
||||||
type: int = Field(0, description="Type composant (AT_Type)")
|
|
||||||
description: Optional[str] = Field(None, description="Description (AT_Description)")
|
|
||||||
ordre: int = Field(0, description="Ordre d'exécution (AT_Ordre)")
|
|
||||||
|
|
||||||
gamme_1_comp: int = Field(0, description="Gamme 1 composant (AG_No1Comp)")
|
|
||||||
gamme_2_comp: int = Field(0, description="Gamme 2 composant (AG_No2Comp)")
|
|
||||||
|
|
||||||
type_ressource: int = Field(0, description="Type ressource (AT_TypeRessource)")
|
|
||||||
chevauche: int = Field(0, description="Chevauchement (AT_Chevauche)")
|
|
||||||
demarre: int = Field(0, description="Démarrage (AT_Demarre)")
|
|
||||||
operation_chevauche: Optional[str] = Field(
|
|
||||||
None, description="Opération chevauchée (AT_OperationChevauche)"
|
|
||||||
)
|
|
||||||
valeur_chevauche: float = Field(
|
|
||||||
0.0, description="Valeur chevauchement (AT_ValeurChevauche)"
|
|
||||||
)
|
|
||||||
type_chevauche: int = Field(0, description="Type chevauchement (AT_TypeChevauche)")
|
|
||||||
|
|
||||||
date_creation: Optional[datetime] = Field(None, description="Date création")
|
|
||||||
date_modification: Optional[datetime] = Field(None, description="Date modification")
|
|
||||||
|
|
||||||
class Config:
|
|
||||||
json_schema_extra = {
|
|
||||||
"example": {
|
|
||||||
"operation": "OP010",
|
|
||||||
"code_ressource": "RES01",
|
|
||||||
"temps": 15.5,
|
|
||||||
"description": "Montage pièce A",
|
|
||||||
"ordre": 10,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class ComptaArticleModel(BaseModel):
|
|
||||||
"""Comptabilité spécifique d'un article"""
|
|
||||||
|
|
||||||
champ: int = Field(..., description="Champ (ACP_Champ)")
|
|
||||||
compte_general: Optional[str] = Field(
|
|
||||||
None, description="Compte général (ACP_ComptaCPT_CompteG)"
|
|
||||||
)
|
|
||||||
compte_auxiliaire: Optional[str] = Field(
|
|
||||||
None, description="Compte auxiliaire (ACP_ComptaCPT_CompteA)"
|
|
||||||
)
|
|
||||||
|
|
||||||
taxe_1: Optional[str] = Field(None, description="Taxe 1 (ACP_ComptaCPT_Taxe1)")
|
|
||||||
taxe_2: Optional[str] = Field(None, description="Taxe 2 (ACP_ComptaCPT_Taxe2)")
|
|
||||||
taxe_3: Optional[str] = Field(None, description="Taxe 3 (ACP_ComptaCPT_Taxe3)")
|
|
||||||
|
|
||||||
taxe_date_1: Optional[datetime] = Field(None, description="Date taxe 1")
|
|
||||||
taxe_date_2: Optional[datetime] = Field(None, description="Date taxe 2")
|
|
||||||
taxe_date_3: Optional[datetime] = Field(None, description="Date taxe 3")
|
|
||||||
|
|
||||||
taxe_anc_1: Optional[str] = Field(None, description="Ancienne taxe 1")
|
|
||||||
taxe_anc_2: Optional[str] = Field(None, description="Ancienne taxe 2")
|
|
||||||
taxe_anc_3: Optional[str] = Field(None, description="Ancienne taxe 3")
|
|
||||||
|
|
||||||
type_facture: int = Field(0, description="Type de facture (ACP_TypeFacture)")
|
|
||||||
|
|
||||||
date_creation: Optional[datetime] = Field(None, description="Date création")
|
|
||||||
date_modification: Optional[datetime] = Field(None, description="Date modification")
|
|
||||||
|
|
||||||
class Config:
|
|
||||||
json_schema_extra = {
|
|
||||||
"example": {
|
|
||||||
"champ": 1,
|
|
||||||
"compte_general": "707100",
|
|
||||||
"taxe_1": "TVA20",
|
|
||||||
"type_facture": 0,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class FournisseurArticleModel(BaseModel):
|
|
||||||
"""Fournisseur d'un article"""
|
|
||||||
|
|
||||||
fournisseur_num: str = Field(..., description="Numéro fournisseur (CT_Num)")
|
|
||||||
ref_fournisseur: Optional[str] = Field(
|
|
||||||
None, description="Référence fournisseur (AF_RefFourniss)"
|
|
||||||
)
|
|
||||||
|
|
||||||
prix_achat: float = Field(0.0, description="Prix d'achat (AF_PrixAch)")
|
|
||||||
unite: Optional[str] = Field(None, description="Unité (AF_Unite)")
|
|
||||||
conversion: float = Field(0.0, description="Conversion (AF_Conversion)")
|
|
||||||
|
|
||||||
delai_appro: int = Field(0, description="Délai approvisionnement (AF_DelaiAppro)")
|
|
||||||
garantie: int = Field(0, description="Garantie (AF_Garantie)")
|
|
||||||
colisage: int = Field(0, description="Colisage (AF_Colisage)")
|
|
||||||
qte_mini: float = Field(0.0, description="Quantité minimum (AF_QteMini)")
|
|
||||||
qte_montant: float = Field(0.0, description="Quantité montant (AF_QteMont)")
|
|
||||||
|
|
||||||
enumere_gamme: int = Field(0, description="Énuméré gamme (EG_Champ)")
|
|
||||||
est_principal: bool = Field(
|
|
||||||
False, description="Fournisseur principal (AF_Principal)"
|
|
||||||
)
|
|
||||||
|
|
||||||
prix_devise: float = Field(0.0, description="Prix devise (AF_PrixDev)")
|
|
||||||
devise: int = Field(0, description="Code devise (AF_Devise)")
|
|
||||||
remise: float = Field(0.0, description="Remise (AF_Remise)")
|
|
||||||
conversion_devise: float = Field(0.0, description="Conversion devise (AF_ConvDiv)")
|
|
||||||
type_remise: int = Field(0, description="Type remise (AF_TypeRem)")
|
|
||||||
|
|
||||||
code_barre_fournisseur: Optional[str] = Field(
|
|
||||||
None, description="Code-barres fournisseur (AF_CodeBarre)"
|
|
||||||
)
|
|
||||||
|
|
||||||
prix_achat_nouveau: float = Field(
|
|
||||||
0.0, description="Nouveau prix achat (AF_PrixAchNouv)"
|
|
||||||
)
|
|
||||||
prix_devise_nouveau: float = Field(
|
|
||||||
0.0, description="Nouveau prix devise (AF_PrixDevNouv)"
|
|
||||||
)
|
|
||||||
remise_nouvelle: float = Field(0.0, description="Nouvelle remise (AF_RemiseNouv)")
|
|
||||||
date_application: Optional[datetime] = Field(
|
|
||||||
None, description="Date application (AF_DateApplication)"
|
|
||||||
)
|
|
||||||
|
|
||||||
date_creation: Optional[datetime] = Field(None, description="Date création")
|
|
||||||
date_modification: Optional[datetime] = Field(None, description="Date modification")
|
|
||||||
|
|
||||||
class Config:
|
|
||||||
json_schema_extra = {
|
|
||||||
"example": {
|
|
||||||
"fournisseur_num": "F001",
|
|
||||||
"ref_fournisseur": "REF-FOURN-001",
|
|
||||||
"prix_achat": 85.00,
|
|
||||||
"delai_appro": 15,
|
|
||||||
"est_principal": True,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class ReferenceEnumereeModel(BaseModel):
|
|
||||||
"""Référence énumérée (article avec gammes)"""
|
|
||||||
|
|
||||||
gamme_1: int = Field(0, description="Gamme 1 (AG_No1)")
|
|
||||||
gamme_2: int = Field(0, description="Gamme 2 (AG_No2)")
|
|
||||||
reference_enumeree: str = Field(..., description="Référence énumérée (AE_Ref)")
|
|
||||||
|
|
||||||
prix_achat: float = Field(0.0, description="Prix achat (AE_PrixAch)")
|
|
||||||
code_barre: Optional[str] = Field(None, description="Code-barres (AE_CodeBarre)")
|
|
||||||
prix_achat_nouveau: float = Field(
|
|
||||||
0.0, description="Nouveau prix achat (AE_PrixAchNouv)"
|
|
||||||
)
|
|
||||||
edi_code: Optional[str] = Field(None, description="Code EDI (AE_EdiCode)")
|
|
||||||
en_sommeil: bool = Field(False, description="En sommeil (AE_Sommeil)")
|
|
||||||
|
|
||||||
date_creation: Optional[datetime] = Field(None, description="Date création")
|
|
||||||
date_modification: Optional[datetime] = Field(None, description="Date modification")
|
|
||||||
|
|
||||||
class Config:
|
|
||||||
json_schema_extra = {
|
|
||||||
"example": {
|
|
||||||
"gamme_1": 1,
|
|
||||||
"gamme_2": 3,
|
|
||||||
"reference_enumeree": "ART001-T1-C3",
|
|
||||||
"prix_achat": 85.00,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class MediaArticleModel(BaseModel):
|
|
||||||
"""Média attaché à un article (photo, document, etc.)"""
|
|
||||||
|
|
||||||
commentaire: Optional[str] = Field(None, description="Commentaire (ME_Commentaire)")
|
|
||||||
fichier: Optional[str] = Field(None, description="Nom fichier (ME_Fichier)")
|
|
||||||
type_mime: Optional[str] = Field(None, description="Type MIME (ME_TypeMIME)")
|
|
||||||
origine: int = Field(0, description="Origine (ME_Origine)")
|
|
||||||
ged_id: Optional[str] = Field(None, description="ID GED (ME_GedId)")
|
|
||||||
|
|
||||||
date_creation: Optional[datetime] = Field(None, description="Date création")
|
|
||||||
date_modification: Optional[datetime] = Field(None, description="Date modification")
|
|
||||||
|
|
||||||
class Config:
|
|
||||||
json_schema_extra = {
|
|
||||||
"example": {
|
|
||||||
"commentaire": "Photo produit principale",
|
|
||||||
"fichier": "ART001_photo1.jpg",
|
|
||||||
"type_mime": "image/jpeg",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class PrixGammeModel(BaseModel):
|
|
||||||
"""Prix spécifique par combinaison de gammes"""
|
|
||||||
|
|
||||||
gamme_1: int = Field(0, description="Gamme 1 (AG_No1)")
|
|
||||||
gamme_2: int = Field(0, description="Gamme 2 (AG_No2)")
|
|
||||||
prix_net: float = Field(0.0, description="Prix net (AR_PUNet)")
|
|
||||||
cout_standard: float = Field(0.0, description="Coût standard (AR_CoutStd)")
|
|
||||||
|
|
||||||
date_creation: Optional[datetime] = Field(None, description="Date création")
|
|
||||||
date_modification: Optional[datetime] = Field(None, description="Date modification")
|
|
||||||
|
|
||||||
class Config:
|
|
||||||
json_schema_extra = {
|
|
||||||
"example": {
|
|
||||||
"gamme_1": 1,
|
|
||||||
"gamme_2": 3,
|
|
||||||
"prix_net": 125.50,
|
|
||||||
"cout_standard": 82.30,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class ArticleResponse(BaseModel):
|
|
||||||
"""Article complet avec tous les enrichissements disponibles"""
|
"""Article complet avec tous les enrichissements disponibles"""
|
||||||
|
|
||||||
reference: str = Field(..., description="Référence article (AR_Ref)")
|
reference: str = Field(..., description="Référence article (AR_Ref)")
|
||||||
|
|
@ -432,7 +76,6 @@ class ArticleResponse(BaseModel):
|
||||||
)
|
)
|
||||||
nb_emplacements: int = Field(0, description="Nombre d'emplacements")
|
nb_emplacements: int = Field(0, description="Nombre d'emplacements")
|
||||||
|
|
||||||
# Champs énumérés normalisés
|
|
||||||
suivi_stock: Optional[int] = Field(
|
suivi_stock: Optional[int] = Field(
|
||||||
None,
|
None,
|
||||||
description="Type de suivi de stock (AR_SuiviStock): 0=Aucun, 1=CMUP, 2=FIFO/LIFO, 3=Sérialisé",
|
description="Type de suivi de stock (AR_SuiviStock): 0=Aucun, 1=CMUP, 2=FIFO/LIFO, 3=Sérialisé",
|
||||||
|
|
@ -712,7 +355,20 @@ class ArticleResponse(BaseModel):
|
||||||
)
|
)
|
||||||
exclure: Optional[bool] = Field(None, description="Exclure de certains traitements")
|
exclure: Optional[bool] = Field(None, description="Exclure de certains traitements")
|
||||||
|
|
||||||
# ===== VALIDATEURS =====
|
@field_validator("fournisseur_principal", mode="before")
|
||||||
|
@classmethod
|
||||||
|
def convert_fournisseur_principal(cls, v):
|
||||||
|
if v in (None, "", " ", " "):
|
||||||
|
return None
|
||||||
|
if isinstance(v, str):
|
||||||
|
v = v.strip()
|
||||||
|
if not v:
|
||||||
|
return None
|
||||||
|
try:
|
||||||
|
return int(v)
|
||||||
|
except (ValueError, TypeError):
|
||||||
|
return None
|
||||||
|
return v
|
||||||
|
|
||||||
@field_validator(
|
@field_validator(
|
||||||
"unite_vente",
|
"unite_vente",
|
||||||
|
|
@ -763,11 +419,11 @@ class ArticleResponse(BaseModel):
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class ArticleListResponse(BaseModel):
|
class ArticleList(BaseModel):
|
||||||
"""Réponse pour une liste d'articles"""
|
"""Réponse pour une liste d'articles"""
|
||||||
|
|
||||||
total: int = Field(..., description="Nombre total d'articles")
|
total: int = Field(..., description="Nombre total d'articles")
|
||||||
articles: List[ArticleResponse] = Field(..., description="Liste des articles")
|
articles: List[Article] = Field(..., description="Liste des articles")
|
||||||
filtre_applique: Optional[str] = Field(
|
filtre_applique: Optional[str] = Field(
|
||||||
None, description="Filtre de recherche appliqué"
|
None, description="Filtre de recherche appliqué"
|
||||||
)
|
)
|
||||||
|
|
@ -780,37 +436,85 @@ class ArticleListResponse(BaseModel):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class ArticleCreateRequest(BaseModel):
|
class ArticleCreate(BaseModel):
|
||||||
"""Schéma pour création d'article"""
|
|
||||||
|
|
||||||
reference: str = Field(..., max_length=18, description="Référence article")
|
reference: str = Field(..., max_length=18, description="Référence article")
|
||||||
designation: str = Field(..., max_length=69, description="Désignation")
|
designation: str = Field(..., max_length=69, description="Désignation")
|
||||||
|
|
||||||
famille: Optional[str] = Field(None, max_length=18, description="Code famille")
|
famille: Optional[str] = Field(None, max_length=18, description="Code famille")
|
||||||
|
|
||||||
prix_vente: Optional[float] = Field(None, ge=0, description="Prix vente HT")
|
prix_vente: Optional[float] = Field(None, ge=0, description="Prix vente HT")
|
||||||
prix_achat: Optional[float] = Field(None, ge=0, description="Prix achat HT")
|
prix_achat: Optional[float] = Field(None, ge=0, description="Prix achat HT")
|
||||||
|
coef: Optional[float] = Field(None, ge=0, description="Coefficient")
|
||||||
|
|
||||||
stock_reel: Optional[float] = Field(None, ge=0, description="Stock initial")
|
stock_reel: Optional[float] = Field(None, ge=0, description="Stock initial")
|
||||||
stock_mini: Optional[float] = Field(None, ge=0, description="Stock minimum")
|
stock_mini: Optional[float] = Field(None, ge=0, description="Stock minimum")
|
||||||
code_ean: Optional[str] = Field(None, max_length=13, description="Code-barres")
|
stock_maxi: Optional[float] = Field(None, ge=0, description="Stock maximum")
|
||||||
unite_vente: Optional[str] = Field("UN", max_length=4, description="Unité")
|
|
||||||
tva_code: Optional[str] = Field(None, max_length=5, description="Code TVA")
|
|
||||||
description: Optional[str] = Field(None, description="Description")
|
|
||||||
|
|
||||||
|
code_ean: Optional[str] = Field(None, max_length=13, description="Code-barres EAN")
|
||||||
|
unite_vente: Optional[str] = Field("UN", max_length=10, description="Unité vente")
|
||||||
|
tva_code: Optional[str] = Field(None, max_length=10, description="Code TVA")
|
||||||
|
code_fiscal: Optional[str] = Field(None, max_length=10, description="Code fiscal")
|
||||||
|
|
||||||
class ArticleUpdateRequest(BaseModel):
|
description: Optional[str] = Field(
|
||||||
"""Schéma pour modification d'article"""
|
None, max_length=255, description="Description/Commentaire"
|
||||||
|
|
||||||
designation: Optional[str] = Field(None, max_length=69)
|
|
||||||
prix_vente: Optional[float] = Field(None, ge=0)
|
|
||||||
prix_achat: Optional[float] = Field(None, ge=0)
|
|
||||||
stock_reel: Optional[float] = Field(
|
|
||||||
None, ge=0, description="Critique pour erreur 2881"
|
|
||||||
)
|
)
|
||||||
stock_mini: Optional[float] = Field(None, ge=0)
|
|
||||||
code_ean: Optional[str] = Field(None, max_length=13)
|
pays: Optional[str] = Field(None, max_length=3, description="Pays d'origine")
|
||||||
description: Optional[str] = Field(None)
|
garantie: Optional[int] = Field(None, ge=0, description="Garantie en mois")
|
||||||
|
delai: Optional[int] = Field(None, ge=0, description="Délai livraison jours")
|
||||||
|
|
||||||
|
poids_net: Optional[float] = Field(None, ge=0, description="Poids net kg")
|
||||||
|
poids_brut: Optional[float] = Field(None, ge=0, description="Poids brut kg")
|
||||||
|
|
||||||
|
stat_01: Optional[str] = Field(None, max_length=20, description="Statistique 1")
|
||||||
|
stat_02: Optional[str] = Field(None, max_length=20, description="Statistique 2")
|
||||||
|
stat_03: Optional[str] = Field(None, max_length=20, description="Statistique 3")
|
||||||
|
stat_04: Optional[str] = Field(None, max_length=20, description="Statistique 4")
|
||||||
|
stat_05: Optional[str] = Field(None, max_length=20, description="Statistique 5")
|
||||||
|
|
||||||
|
soumis_escompte: Optional[bool] = Field(None, description="Soumis à escompte")
|
||||||
|
publie: Optional[bool] = Field(None, description="Publié web/catalogue")
|
||||||
|
en_sommeil: Optional[bool] = Field(None, description="Article en sommeil")
|
||||||
|
|
||||||
|
|
||||||
class MouvementStockLigneRequest(BaseModel):
|
class ArticleUpdate(BaseModel):
|
||||||
|
designation: Optional[str] = Field(None, max_length=69, description="Désignation")
|
||||||
|
|
||||||
|
famille: Optional[str] = Field(None, max_length=18, description="Code famille")
|
||||||
|
|
||||||
|
prix_vente: Optional[float] = Field(None, ge=0, description="Prix vente HT")
|
||||||
|
prix_achat: Optional[float] = Field(None, ge=0, description="Prix achat HT")
|
||||||
|
coef: Optional[float] = Field(None, ge=0, description="Coefficient")
|
||||||
|
|
||||||
|
stock_reel: Optional[float] = Field(None, ge=0, description="Stock réel")
|
||||||
|
stock_mini: Optional[float] = Field(None, ge=0, description="Stock minimum")
|
||||||
|
stock_maxi: Optional[float] = Field(None, ge=0, description="Stock maximum")
|
||||||
|
|
||||||
|
code_ean: Optional[str] = Field(None, max_length=13, description="Code-barres EAN")
|
||||||
|
unite_vente: Optional[str] = Field(None, max_length=10, description="Unité vente")
|
||||||
|
code_fiscal: Optional[str] = Field(None, max_length=10, description="Code fiscal")
|
||||||
|
|
||||||
|
description: Optional[str] = Field(None, max_length=255, description="Description")
|
||||||
|
|
||||||
|
pays: Optional[str] = Field(None, max_length=3, description="Pays d'origine")
|
||||||
|
garantie: Optional[int] = Field(None, ge=0, description="Garantie en mois")
|
||||||
|
delai: Optional[int] = Field(None, ge=0, description="Délai livraison jours")
|
||||||
|
|
||||||
|
poids_net: Optional[float] = Field(None, ge=0, description="Poids net kg")
|
||||||
|
poids_brut: Optional[float] = Field(None, ge=0, description="Poids brut kg")
|
||||||
|
|
||||||
|
stat_01: Optional[str] = Field(None, max_length=20, description="Statistique 1")
|
||||||
|
stat_02: Optional[str] = Field(None, max_length=20, description="Statistique 2")
|
||||||
|
stat_03: Optional[str] = Field(None, max_length=20, description="Statistique 3")
|
||||||
|
stat_04: Optional[str] = Field(None, max_length=20, description="Statistique 4")
|
||||||
|
stat_05: Optional[str] = Field(None, max_length=20, description="Statistique 5")
|
||||||
|
|
||||||
|
soumis_escompte: Optional[bool] = Field(None, description="Soumis à escompte")
|
||||||
|
publie: Optional[bool] = Field(None, description="Publié web/catalogue")
|
||||||
|
en_sommeil: Optional[bool] = Field(None, description="Article en sommeil")
|
||||||
|
|
||||||
|
|
||||||
|
class MouvementStockLigne(BaseModel):
|
||||||
article_ref: str = Field(..., description="Référence de l'article")
|
article_ref: str = Field(..., description="Référence de l'article")
|
||||||
quantite: float = Field(..., gt=0, description="Quantité (>0)")
|
quantite: float = Field(..., gt=0, description="Quantité (>0)")
|
||||||
depot_code: Optional[str] = Field(None, description="Code du dépôt (ex: '01')")
|
depot_code: Optional[str] = Field(None, description="Code du dépôt (ex: '01')")
|
||||||
|
|
@ -864,7 +568,7 @@ class MouvementStockLigneRequest(BaseModel):
|
||||||
return v
|
return v
|
||||||
|
|
||||||
|
|
||||||
class EntreeStockRequest(BaseModel):
|
class EntreeStock(BaseModel):
|
||||||
"""Création d'un bon d'entrée en stock"""
|
"""Création d'un bon d'entrée en stock"""
|
||||||
|
|
||||||
date_entree: Optional[date] = Field(
|
date_entree: Optional[date] = Field(
|
||||||
|
|
@ -874,7 +578,7 @@ class EntreeStockRequest(BaseModel):
|
||||||
depot_code: Optional[str] = Field(
|
depot_code: Optional[str] = Field(
|
||||||
None, description="Dépôt principal (si applicable)"
|
None, description="Dépôt principal (si applicable)"
|
||||||
)
|
)
|
||||||
lignes: List[MouvementStockLigneRequest] = Field(
|
lignes: List[MouvementStockLigne] = Field(
|
||||||
..., min_items=1, description="Lignes du mouvement"
|
..., min_items=1, description="Lignes du mouvement"
|
||||||
)
|
)
|
||||||
commentaire: Optional[str] = Field(None, description="Commentaire général")
|
commentaire: Optional[str] = Field(None, description="Commentaire général")
|
||||||
|
|
@ -899,7 +603,7 @@ class EntreeStockRequest(BaseModel):
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class SortieStockRequest(BaseModel):
|
class SortieStock(BaseModel):
|
||||||
"""Création d'un bon de sortie de stock"""
|
"""Création d'un bon de sortie de stock"""
|
||||||
|
|
||||||
date_sortie: Optional[date] = Field(
|
date_sortie: Optional[date] = Field(
|
||||||
|
|
@ -909,7 +613,7 @@ class SortieStockRequest(BaseModel):
|
||||||
depot_code: Optional[str] = Field(
|
depot_code: Optional[str] = Field(
|
||||||
None, description="Dépôt principal (si applicable)"
|
None, description="Dépôt principal (si applicable)"
|
||||||
)
|
)
|
||||||
lignes: List[MouvementStockLigneRequest] = Field(
|
lignes: List[MouvementStockLigne] = Field(
|
||||||
..., min_items=1, description="Lignes du mouvement"
|
..., min_items=1, description="Lignes du mouvement"
|
||||||
)
|
)
|
||||||
commentaire: Optional[str] = Field(None, description="Commentaire général")
|
commentaire: Optional[str] = Field(None, description="Commentaire général")
|
||||||
|
|
@ -933,7 +637,7 @@ class SortieStockRequest(BaseModel):
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class MouvementStockResponse(BaseModel):
|
class MouvementStock(BaseModel):
|
||||||
"""Réponse pour un mouvement de stock"""
|
"""Réponse pour un mouvement de stock"""
|
||||||
|
|
||||||
article_ref: str = Field(..., description="Numéro d'article")
|
article_ref: str = Field(..., description="Numéro d'article")
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,7 @@ from pydantic import BaseModel, Field
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
|
|
||||||
class FamilleCreateRequest(BaseModel):
|
class FamilleCreate(BaseModel):
|
||||||
"""Schéma pour création de famille d'articles"""
|
"""Schéma pour création de famille d'articles"""
|
||||||
|
|
||||||
code: str = Field(..., max_length=18, description="Code famille (max 18 car)")
|
code: str = Field(..., max_length=18, description="Code famille (max 18 car)")
|
||||||
|
|
@ -27,7 +27,7 @@ class FamilleCreateRequest(BaseModel):
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class FamilleResponse(BaseModel):
|
class Familles(BaseModel):
|
||||||
"""Modèle complet d'une famille avec données comptables et fournisseur"""
|
"""Modèle complet d'une famille avec données comptables et fournisseur"""
|
||||||
|
|
||||||
code: str = Field(..., description="Code famille")
|
code: str = Field(..., description="Code famille")
|
||||||
|
|
@ -236,10 +236,10 @@ class FamilleResponse(BaseModel):
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class FamilleListResponse(BaseModel):
|
class FamilleList(BaseModel):
|
||||||
"""Réponse pour la liste des familles"""
|
"""Réponse pour la liste des familles"""
|
||||||
|
|
||||||
familles: list[FamilleResponse]
|
familles: list[Familles]
|
||||||
total: int
|
total: int
|
||||||
filtre: Optional[str] = None
|
filtre: Optional[str] = None
|
||||||
inclure_totaux: bool = True
|
inclure_totaux: bool = True
|
||||||
|
|
|
||||||
|
|
@ -1,30 +1,22 @@
|
||||||
from pydantic import BaseModel, Field, field_validator
|
from pydantic import BaseModel, Field
|
||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
from datetime import date
|
from datetime import datetime
|
||||||
|
|
||||||
|
from schemas.documents.ligne_document import LigneDocument
|
||||||
|
|
||||||
class LigneAvoir(BaseModel):
|
class AvoirCreate(BaseModel):
|
||||||
article_code: str
|
|
||||||
quantite: float
|
|
||||||
remise_pourcentage: Optional[float] = 0.0
|
|
||||||
|
|
||||||
@field_validator("article_code", mode="before")
|
|
||||||
def strip_insecables(cls, v):
|
|
||||||
return v.replace("\xa0", "").strip()
|
|
||||||
|
|
||||||
|
|
||||||
class AvoirCreateRequest(BaseModel):
|
|
||||||
client_id: str
|
client_id: str
|
||||||
date_avoir: Optional[date] = None
|
date_avoir: Optional[datetime] = None
|
||||||
date_livraison: Optional[date] = None
|
date_livraison: Optional[datetime] = None
|
||||||
lignes: List[LigneAvoir]
|
lignes: List[LigneDocument]
|
||||||
reference: Optional[str] = None
|
reference: Optional[str] = None
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
json_schema_extra = {
|
json_schema_extra = {
|
||||||
"example": {
|
"example": {
|
||||||
"client_id": "CLI000001",
|
"client_id": "CLI000001",
|
||||||
"date_avoir": "2024-01-15",
|
"date_avoir": "2024-01-15T10:00:00",
|
||||||
|
"date_livraison": "2024-01-15T10:00:00",
|
||||||
"reference": "AV-EXT-001",
|
"reference": "AV-EXT-001",
|
||||||
"lignes": [
|
"lignes": [
|
||||||
{
|
{
|
||||||
|
|
@ -38,18 +30,18 @@ class AvoirCreateRequest(BaseModel):
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class AvoirUpdateRequest(BaseModel):
|
class AvoirUpdate(BaseModel):
|
||||||
date_avoir: Optional[date] = None
|
date_avoir: Optional[datetime] = None
|
||||||
date_livraison: Optional[date] = None
|
date_livraison: Optional[datetime] = None
|
||||||
lignes: Optional[List[LigneAvoir]] = None
|
lignes: Optional[List[LigneDocument]] = None
|
||||||
statut: Optional[int] = Field(None, ge=0, le=6)
|
statut: Optional[int] = Field(None, ge=0, le=6)
|
||||||
reference: Optional[str] = None
|
reference: Optional[str] = None
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
json_schema_extra = {
|
json_schema_extra = {
|
||||||
"example": {
|
"example": {
|
||||||
"date_avoir": "2024-01-15",
|
"date_avoir": "2024-01-15T10:00:00",
|
||||||
"date_livraison": "2024-01-15",
|
"date_livraison": "2024-01-15T10:00:00",
|
||||||
"reference": "AV-EXT-001",
|
"reference": "AV-EXT-001",
|
||||||
"lignes": [
|
"lignes": [
|
||||||
{
|
{
|
||||||
|
|
|
||||||
|
|
@ -1,30 +1,21 @@
|
||||||
from pydantic import BaseModel, Field, field_validator
|
from pydantic import BaseModel, Field
|
||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
from datetime import date
|
from datetime import datetime
|
||||||
|
|
||||||
|
from schemas.documents.ligne_document import LigneDocument
|
||||||
|
|
||||||
class LigneCommande(BaseModel):
|
class CommandeCreate(BaseModel):
|
||||||
article_code: str
|
|
||||||
quantite: float
|
|
||||||
remise_pourcentage: Optional[float] = 0.0
|
|
||||||
|
|
||||||
@field_validator("article_code", mode="before")
|
|
||||||
def strip_insecables(cls, v):
|
|
||||||
return v.replace("\xa0", "").strip()
|
|
||||||
|
|
||||||
|
|
||||||
class CommandeCreateRequest(BaseModel):
|
|
||||||
client_id: str
|
client_id: str
|
||||||
date_commande: Optional[date] = None
|
date_commande: Optional[datetime] = None
|
||||||
date_livraison: Optional[date] = None
|
date_livraison: Optional[datetime] = None
|
||||||
lignes: List[LigneCommande]
|
lignes: List[LigneDocument]
|
||||||
reference: Optional[str] = None
|
reference: Optional[str] = None
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
json_schema_extra = {
|
json_schema_extra = {
|
||||||
"example": {
|
"example": {
|
||||||
"client_id": "CLI000001",
|
"client_id": "CLI000001",
|
||||||
"date_commande": "2024-01-15",
|
"date_commande": "2024-01-15T10:00:00",
|
||||||
"reference": "CMD-EXT-001",
|
"reference": "CMD-EXT-001",
|
||||||
"lignes": [
|
"lignes": [
|
||||||
{
|
{
|
||||||
|
|
@ -38,18 +29,18 @@ class CommandeCreateRequest(BaseModel):
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class CommandeUpdateRequest(BaseModel):
|
class CommandeUpdate(BaseModel):
|
||||||
date_commande: Optional[date] = None
|
date_commande: Optional[datetime] = None
|
||||||
date_livraison: Optional[date] = None
|
date_livraison: Optional[datetime] = None
|
||||||
lignes: Optional[List[LigneCommande]] = None
|
lignes: Optional[List[LigneDocument]] = None
|
||||||
statut: Optional[int] = Field(None, ge=0, le=6)
|
statut: Optional[int] = Field(None, ge=0, le=6)
|
||||||
reference: Optional[str] = None
|
reference: Optional[str] = None
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
json_schema_extra = {
|
json_schema_extra = {
|
||||||
"example": {
|
"example": {
|
||||||
"date_commande": "2024-01-15",
|
"date_commande": "2024-01-15T10:00:00",
|
||||||
"date_livraison": "2024-01-15",
|
"date_livraison": "2024-01-15T10:00:00",
|
||||||
"reference": "CMD-EXT-001",
|
"reference": "CMD-EXT-001",
|
||||||
"lignes": [
|
"lignes": [
|
||||||
{
|
{
|
||||||
|
|
|
||||||
|
|
@ -1,27 +1,19 @@
|
||||||
from pydantic import BaseModel, Field, field_validator
|
from pydantic import BaseModel, Field
|
||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
from datetime import date
|
from datetime import datetime
|
||||||
|
|
||||||
|
from schemas.documents.ligne_document import LigneDocument
|
||||||
class LigneDevis(BaseModel):
|
|
||||||
article_code: str
|
|
||||||
quantite: float
|
|
||||||
remise_pourcentage: Optional[float] = 0.0
|
|
||||||
|
|
||||||
@field_validator("article_code", mode="before")
|
|
||||||
def strip_insecables(cls, v):
|
|
||||||
return v.replace("\xa0", "").strip()
|
|
||||||
|
|
||||||
|
|
||||||
class DevisRequest(BaseModel):
|
class DevisRequest(BaseModel):
|
||||||
client_id: str
|
client_id: str
|
||||||
date_devis: Optional[date] = None
|
date_devis: Optional[datetime] = None
|
||||||
date_livraison: Optional[date] = None
|
date_livraison: Optional[datetime] = None
|
||||||
reference: Optional[str] = None
|
reference: Optional[str] = None
|
||||||
lignes: List[LigneDevis]
|
lignes: List[LigneDocument]
|
||||||
|
|
||||||
|
|
||||||
class DevisResponse(BaseModel):
|
class Devis(BaseModel):
|
||||||
id: str
|
id: str
|
||||||
client_id: str
|
client_id: str
|
||||||
date_devis: str
|
date_devis: str
|
||||||
|
|
@ -30,20 +22,20 @@ class DevisResponse(BaseModel):
|
||||||
nb_lignes: int
|
nb_lignes: int
|
||||||
|
|
||||||
|
|
||||||
class DevisUpdateRequest(BaseModel):
|
class DevisUpdate(BaseModel):
|
||||||
"""Modèle pour modification d'un devis existant"""
|
"""Modèle pour modification d'un devis existant"""
|
||||||
|
|
||||||
date_devis: Optional[date] = None
|
date_devis: Optional[datetime] = None
|
||||||
date_livraison: Optional[date] = None
|
date_livraison: Optional[datetime] = None
|
||||||
|
lignes: Optional[List[LigneDocument]] = None
|
||||||
reference: Optional[str] = None
|
reference: Optional[str] = None
|
||||||
lignes: Optional[List[LigneDevis]] = None
|
|
||||||
statut: Optional[int] = Field(None, ge=0, le=6)
|
statut: Optional[int] = Field(None, ge=0, le=6)
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
json_schema_extra = {
|
json_schema_extra = {
|
||||||
"example": {
|
"example": {
|
||||||
"date_devis": "2024-01-15",
|
"date_devis": "2024-01-15T10:00:00",
|
||||||
"date_livraison": "2024-01-15",
|
"date_livraison": "2024-01-15T10:00:00",
|
||||||
"reference": "DEV-001",
|
"reference": "DEV-001",
|
||||||
"lignes": [
|
"lignes": [
|
||||||
{
|
{
|
||||||
|
|
@ -58,6 +50,6 @@ class DevisUpdateRequest(BaseModel):
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class RelanceDevisRequest(BaseModel):
|
class RelanceDevis(BaseModel):
|
||||||
doc_id: str
|
doc_id: str
|
||||||
message_personnalise: Optional[str] = None
|
message_personnalise: Optional[str] = None
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
from config import settings
|
from config.config import settings
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -13,7 +13,7 @@ class StatutEmail(str, Enum):
|
||||||
BOUNCE = "BOUNCE"
|
BOUNCE = "BOUNCE"
|
||||||
|
|
||||||
|
|
||||||
class EmailEnvoiRequest(BaseModel):
|
class EmailEnvoi(BaseModel):
|
||||||
destinataire: EmailStr
|
destinataire: EmailStr
|
||||||
cc: Optional[List[EmailStr]] = []
|
cc: Optional[List[EmailStr]] = []
|
||||||
cci: Optional[List[EmailStr]] = []
|
cci: Optional[List[EmailStr]] = []
|
||||||
|
|
|
||||||
|
|
@ -1,30 +1,22 @@
|
||||||
from pydantic import BaseModel, Field, field_validator
|
from pydantic import BaseModel, Field
|
||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
from datetime import date
|
from datetime import datetime
|
||||||
|
|
||||||
|
from schemas.documents.ligne_document import LigneDocument
|
||||||
|
|
||||||
|
|
||||||
class LigneFacture(BaseModel):
|
class FactureCreate(BaseModel):
|
||||||
article_code: str
|
|
||||||
quantite: float
|
|
||||||
remise_pourcentage: Optional[float] = 0.0
|
|
||||||
|
|
||||||
@field_validator("article_code", mode="before")
|
|
||||||
def strip_insecables(cls, v):
|
|
||||||
return v.replace("\xa0", "").strip()
|
|
||||||
|
|
||||||
|
|
||||||
class FactureCreateRequest(BaseModel):
|
|
||||||
client_id: str
|
client_id: str
|
||||||
date_facture: Optional[date] = None
|
date_facture: Optional[datetime] = None
|
||||||
date_livraison: Optional[date] = None
|
date_livraison: Optional[datetime] = None
|
||||||
lignes: List[LigneFacture]
|
lignes: List[LigneDocument]
|
||||||
reference: Optional[str] = None
|
reference: Optional[str] = None
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
json_schema_extra = {
|
json_schema_extra = {
|
||||||
"example": {
|
"example": {
|
||||||
"client_id": "CLI000001",
|
"client_id": "CLI000001",
|
||||||
"date_facture": "2024-01-15",
|
"date_facture": "2024-01-15T10:00:00",
|
||||||
"reference": "FA-EXT-001",
|
"reference": "FA-EXT-001",
|
||||||
"lignes": [
|
"lignes": [
|
||||||
{
|
{
|
||||||
|
|
@ -38,18 +30,18 @@ class FactureCreateRequest(BaseModel):
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class FactureUpdateRequest(BaseModel):
|
class FactureUpdate(BaseModel):
|
||||||
date_facture: Optional[date] = None
|
date_facture: Optional[datetime] = None
|
||||||
date_livraison: Optional[date] = None
|
date_livraison: Optional[datetime] = None
|
||||||
lignes: Optional[List[LigneFacture]] = None
|
lignes: Optional[List[LigneDocument]] = None
|
||||||
statut: Optional[int] = Field(None, ge=0, le=6)
|
statut: Optional[int] = Field(None, ge=0, le=6)
|
||||||
reference: Optional[str] = None
|
reference: Optional[str] = None
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
json_schema_extra = {
|
json_schema_extra = {
|
||||||
"example": {
|
"example": {
|
||||||
"date_facture": "2024-01-15",
|
"date_facture": "2024-01-15T10:00:00",
|
||||||
"date_livraison": "2024-01-15",
|
"date_livraison": "2024-01-15T10:00:00",
|
||||||
"lignes": [
|
"lignes": [
|
||||||
{
|
{
|
||||||
"article_code": "ART001",
|
"article_code": "ART001",
|
||||||
|
|
|
||||||
25
schemas/documents/ligne_document.py
Normal file
25
schemas/documents/ligne_document.py
Normal file
|
|
@ -0,0 +1,25 @@
|
||||||
|
from pydantic import BaseModel, field_validator
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
|
||||||
|
class LigneDocument(BaseModel):
|
||||||
|
article_code: str
|
||||||
|
quantite: float
|
||||||
|
prix_unitaire_ht: Optional[float] = None
|
||||||
|
remise_pourcentage: Optional[float] = 0.0
|
||||||
|
|
||||||
|
@field_validator("article_code", mode="before")
|
||||||
|
def strip_insecables(cls, v):
|
||||||
|
return v.replace("\xa0", "").strip()
|
||||||
|
|
||||||
|
@field_validator("quantite")
|
||||||
|
def validate_quantite(cls, v):
|
||||||
|
if v <= 0:
|
||||||
|
raise ValueError("La quantité doit être positive")
|
||||||
|
return v
|
||||||
|
|
||||||
|
@field_validator("remise_pourcentage")
|
||||||
|
def validate_remise(cls, v):
|
||||||
|
if v is not None and (v < 0 or v > 100):
|
||||||
|
raise ValueError("La remise doit être entre 0 et 100")
|
||||||
|
return v
|
||||||
|
|
@ -1,30 +1,22 @@
|
||||||
from pydantic import BaseModel, Field, field_validator
|
from pydantic import BaseModel, Field
|
||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
from datetime import date
|
from datetime import datetime
|
||||||
|
|
||||||
|
from schemas.documents.ligne_document import LigneDocument
|
||||||
|
|
||||||
|
|
||||||
class LigneLivraison(BaseModel):
|
class LivraisonCreate(BaseModel):
|
||||||
article_code: str
|
|
||||||
quantite: float
|
|
||||||
remise_pourcentage: Optional[float] = 0.0
|
|
||||||
|
|
||||||
@field_validator("article_code", mode="before")
|
|
||||||
def strip_insecables(cls, v):
|
|
||||||
return v.replace("\xa0", "").strip()
|
|
||||||
|
|
||||||
|
|
||||||
class LivraisonCreateRequest(BaseModel):
|
|
||||||
client_id: str
|
client_id: str
|
||||||
date_livraison: Optional[date] = None
|
date_livraison: Optional[datetime] = None
|
||||||
date_livraison_prevue: Optional[date] = None
|
date_livraison_prevue: Optional[datetime] = None
|
||||||
lignes: List[LigneLivraison]
|
lignes: List[LigneDocument]
|
||||||
reference: Optional[str] = None
|
reference: Optional[str] = None
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
json_schema_extra = {
|
json_schema_extra = {
|
||||||
"example": {
|
"example": {
|
||||||
"client_id": "CLI000001",
|
"client_id": "CLI000001",
|
||||||
"date_livraison": "2024-01-15",
|
"date_livraison": "2024-01-15T10:00:00",
|
||||||
"reference": "BL-EXT-001",
|
"reference": "BL-EXT-001",
|
||||||
"lignes": [
|
"lignes": [
|
||||||
{
|
{
|
||||||
|
|
@ -38,18 +30,18 @@ class LivraisonCreateRequest(BaseModel):
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class LivraisonUpdateRequest(BaseModel):
|
class LivraisonUpdate(BaseModel):
|
||||||
date_livraison: Optional[date] = None
|
date_livraison: Optional[datetime] = None
|
||||||
date_livraison_prevue: Optional[date] = None
|
date_livraison_prevue: Optional[datetime] = None
|
||||||
lignes: Optional[List[LigneLivraison]] = None
|
lignes: Optional[List[LigneDocument]] = None
|
||||||
statut: Optional[int] = Field(None, ge=0, le=6)
|
statut: Optional[int] = Field(None, ge=0, le=6)
|
||||||
reference: Optional[str] = None
|
reference: Optional[str] = None
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
json_schema_extra = {
|
json_schema_extra = {
|
||||||
"example": {
|
"example": {
|
||||||
"date_livraison": "2024-01-15",
|
"date_livraison": "2024-01-15T10:00:00",
|
||||||
"date_livraison_prevue": "2024-01-15",
|
"date_livraison_prevue": "2024-01-15T10:00:00",
|
||||||
"reference": "BL-EXT-001",
|
"reference": "BL-EXT-001",
|
||||||
"lignes": [
|
"lignes": [
|
||||||
{
|
{
|
||||||
|
|
|
||||||
109
schemas/documents/reglements.py
Normal file
109
schemas/documents/reglements.py
Normal file
|
|
@ -0,0 +1,109 @@
|
||||||
|
from pydantic import BaseModel, Field, field_validator
|
||||||
|
from typing import List, Optional
|
||||||
|
import logging
|
||||||
|
from decimal import Decimal
|
||||||
|
from datetime import date
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class ReglementFactureCreate(BaseModel):
|
||||||
|
"""Requête de règlement d'une facture côté VPS"""
|
||||||
|
|
||||||
|
montant: Decimal = Field(..., gt=0, description="Montant à régler")
|
||||||
|
devise_code: Optional[int] = Field(0, description="Code devise (0=EUR par défaut)")
|
||||||
|
cours_devise: Optional[Decimal] = Field(1.0, description="Cours de la devise")
|
||||||
|
|
||||||
|
mode_reglement: int = Field(
|
||||||
|
..., ge=0, description="Code mode règlement depuis /reglements/modes"
|
||||||
|
)
|
||||||
|
code_journal: str = Field(
|
||||||
|
..., min_length=1, description="Code journal depuis /journaux/tresorerie"
|
||||||
|
)
|
||||||
|
|
||||||
|
date_reglement: Optional[date] = Field(
|
||||||
|
None, description="Date du règlement (défaut: aujourd'hui)"
|
||||||
|
)
|
||||||
|
date_echeance: Optional[date] = Field(None, description="Date d'échéance")
|
||||||
|
|
||||||
|
reference: Optional[str] = Field(
|
||||||
|
"", max_length=17, description="Référence pièce règlement"
|
||||||
|
)
|
||||||
|
libelle: Optional[str] = Field(
|
||||||
|
"", max_length=35, description="Libellé du règlement"
|
||||||
|
)
|
||||||
|
|
||||||
|
tva_encaissement: Optional[bool] = Field(
|
||||||
|
False, description="Appliquer TVA sur encaissement"
|
||||||
|
)
|
||||||
|
compte_general: Optional[str] = Field(None)
|
||||||
|
|
||||||
|
@field_validator("montant")
|
||||||
|
def validate_montant(cls, v):
|
||||||
|
if v <= 0:
|
||||||
|
raise ValueError("Le montant doit être positif")
|
||||||
|
return round(v, 2)
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
json_schema_extra = {
|
||||||
|
"example": {
|
||||||
|
"montant": 375.12,
|
||||||
|
"mode_reglement": 2,
|
||||||
|
"reference": "CHQ-001",
|
||||||
|
"code_journal": "BEU",
|
||||||
|
"date_reglement": "2024-01-01",
|
||||||
|
"libelle": "Règlement multiple",
|
||||||
|
"tva_encaissement": False,
|
||||||
|
"devise_code": 0,
|
||||||
|
"cours_devise": 1.0,
|
||||||
|
"date_echeance": "2024-01-31",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class ReglementMultipleCreate(BaseModel):
|
||||||
|
"""Requête de règlement multiple côté VPS"""
|
||||||
|
|
||||||
|
client_id: str = Field(..., description="Code client")
|
||||||
|
montant_total: Decimal = Field(..., gt=0)
|
||||||
|
|
||||||
|
devise_code: Optional[int] = Field(0)
|
||||||
|
cours_devise: Optional[Decimal] = Field(1.0)
|
||||||
|
mode_reglement: int = Field(...)
|
||||||
|
code_journal: str = Field(...)
|
||||||
|
date_reglement: Optional[date] = None
|
||||||
|
reference: Optional[str] = Field("")
|
||||||
|
libelle: Optional[str] = Field("")
|
||||||
|
tva_encaissement: Optional[bool] = Field(False)
|
||||||
|
|
||||||
|
numeros_factures: Optional[List[str]] = Field(
|
||||||
|
None, description="Si vide, règle les plus anciennes en premier"
|
||||||
|
)
|
||||||
|
|
||||||
|
@field_validator("client_id", mode="before")
|
||||||
|
def strip_client_id(cls, v):
|
||||||
|
return v.replace("\xa0", "").strip() if v else v
|
||||||
|
|
||||||
|
@field_validator("montant_total")
|
||||||
|
def validate_montant(cls, v):
|
||||||
|
if v <= 0:
|
||||||
|
raise ValueError("Le montant doit être positif")
|
||||||
|
return round(v, 2)
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
json_schema_extra = {
|
||||||
|
"example": {
|
||||||
|
"client_id": "CLI000001",
|
||||||
|
"montant_total": 1000.00,
|
||||||
|
"mode_reglement": 2,
|
||||||
|
"numeros_factures": ["FA00081", "FA00082"],
|
||||||
|
"reference": "CHQ-001",
|
||||||
|
"code_journal": "BEU",
|
||||||
|
"date_reglement": "2024-01-01",
|
||||||
|
"libelle": "Règlement multiple",
|
||||||
|
"tva_encaissement": False,
|
||||||
|
"devise_code": 0,
|
||||||
|
"cours_devise": 1.0,
|
||||||
|
"date_echeance": "2024-01-31",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -1,6 +1,12 @@
|
||||||
from pydantic import BaseModel, EmailStr
|
from pydantic import BaseModel, EmailStr
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from schemas.documents.documents import TypeDocument
|
from schemas.documents.documents import TypeDocument
|
||||||
|
from database import (
|
||||||
|
SageDocumentType,
|
||||||
|
)
|
||||||
|
|
||||||
|
from typing import List, Optional
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
|
||||||
class StatutSignature(str, Enum):
|
class StatutSignature(str, Enum):
|
||||||
|
|
@ -11,8 +17,54 @@ class StatutSignature(str, Enum):
|
||||||
EXPIRE = "EXPIRE"
|
EXPIRE = "EXPIRE"
|
||||||
|
|
||||||
|
|
||||||
class SignatureRequest(BaseModel):
|
class Signature(BaseModel):
|
||||||
doc_id: str
|
doc_id: str
|
||||||
type_doc: TypeDocument
|
type_doc: TypeDocument
|
||||||
email_signataire: EmailStr
|
email_signataire: EmailStr
|
||||||
nom_signataire: str
|
nom_signataire: str
|
||||||
|
|
||||||
|
|
||||||
|
class CreateSignatureRequest(BaseModel):
|
||||||
|
"""Demande de création d'une signature"""
|
||||||
|
|
||||||
|
sage_document_id: str
|
||||||
|
sage_document_type: SageDocumentType
|
||||||
|
signer_email: EmailStr
|
||||||
|
signer_name: str
|
||||||
|
document_name: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class TransactionResponse(BaseModel):
|
||||||
|
"""Réponse détaillée d'une transaction"""
|
||||||
|
|
||||||
|
id: str
|
||||||
|
transaction_id: str
|
||||||
|
sage_document_id: str
|
||||||
|
sage_document_type: str
|
||||||
|
universign_status: str
|
||||||
|
local_status: str
|
||||||
|
local_status_label: str
|
||||||
|
signer_url: Optional[str]
|
||||||
|
document_url: Optional[str]
|
||||||
|
created_at: datetime
|
||||||
|
sent_at: Optional[datetime]
|
||||||
|
signed_at: Optional[datetime]
|
||||||
|
last_synced_at: Optional[datetime]
|
||||||
|
needs_sync: bool
|
||||||
|
signers: List[dict]
|
||||||
|
|
||||||
|
signed_document_available: bool = False
|
||||||
|
signed_document_downloaded_at: Optional[datetime] = None
|
||||||
|
signed_document_size_kb: Optional[float] = None
|
||||||
|
|
||||||
|
|
||||||
|
class SyncStatsResponse(BaseModel):
|
||||||
|
"""Statistiques de synchronisation"""
|
||||||
|
|
||||||
|
total_transactions: int
|
||||||
|
pending_sync: int
|
||||||
|
signed: int
|
||||||
|
in_progress: int
|
||||||
|
refused: int
|
||||||
|
expired: int
|
||||||
|
last_sync_at: Optional[datetime]
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,6 @@ class GatewayHealthStatus(str, Enum):
|
||||||
UNKNOWN = "unknown"
|
UNKNOWN = "unknown"
|
||||||
|
|
||||||
|
|
||||||
# === CREATE ===
|
|
||||||
class SageGatewayCreate(BaseModel):
|
class SageGatewayCreate(BaseModel):
|
||||||
|
|
||||||
name: str = Field(
|
name: str = Field(
|
||||||
|
|
@ -71,7 +70,6 @@ class SageGatewayUpdate(BaseModel):
|
||||||
return v.rstrip("/") if v else v
|
return v.rstrip("/") if v else v
|
||||||
|
|
||||||
|
|
||||||
# === RESPONSE ===
|
|
||||||
class SageGatewayResponse(BaseModel):
|
class SageGatewayResponse(BaseModel):
|
||||||
|
|
||||||
id: str
|
id: str
|
||||||
|
|
@ -110,7 +108,7 @@ class SageGatewayResponse(BaseModel):
|
||||||
from_attributes = True
|
from_attributes = True
|
||||||
|
|
||||||
|
|
||||||
class SageGatewayListResponse(BaseModel):
|
class SageGatewayList(BaseModel):
|
||||||
|
|
||||||
items: List[SageGatewayResponse]
|
items: List[SageGatewayResponse]
|
||||||
total: int
|
total: int
|
||||||
|
|
@ -132,7 +130,7 @@ class SageGatewayActivateRequest(BaseModel):
|
||||||
gateway_id: str
|
gateway_id: str
|
||||||
|
|
||||||
|
|
||||||
class SageGatewayTestRequest(BaseModel):
|
class SageGatewayTest(BaseModel):
|
||||||
gateway_url: str
|
gateway_url: str
|
||||||
gateway_token: str
|
gateway_token: str
|
||||||
|
|
||||||
|
|
|
||||||
24
schemas/society/enterprise.py
Normal file
24
schemas/society/enterprise.py
Normal file
|
|
@ -0,0 +1,24 @@
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
from typing import Optional, List
|
||||||
|
|
||||||
|
|
||||||
|
class EntrepriseSearch(BaseModel):
|
||||||
|
"""Modèle de réponse pour une entreprise trouvée"""
|
||||||
|
|
||||||
|
company_name: str = Field(..., description="Raison sociale complète")
|
||||||
|
siren: str = Field(..., description="Numéro SIREN (9 chiffres)")
|
||||||
|
vat_number: str = Field(..., description="Numéro de TVA intracommunautaire")
|
||||||
|
address: str = Field(..., description="Adresse complète du siège")
|
||||||
|
naf_code: str = Field(..., description="Code NAF/APE")
|
||||||
|
is_active: bool = Field(..., description="True si entreprise active")
|
||||||
|
siret_siege: Optional[str] = Field(None, description="SIRET du siège")
|
||||||
|
code_postal: Optional[str] = None
|
||||||
|
ville: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class EntrepriseSearchResponse(BaseModel):
|
||||||
|
"""Réponse globale de la recherche"""
|
||||||
|
|
||||||
|
total_results: int
|
||||||
|
results: List[EntrepriseSearch]
|
||||||
|
query: str
|
||||||
46
schemas/society/societe.py
Normal file
46
schemas/society/societe.py
Normal file
|
|
@ -0,0 +1,46 @@
|
||||||
|
from pydantic import BaseModel
|
||||||
|
from typing import Optional, List
|
||||||
|
|
||||||
|
|
||||||
|
class ExerciceComptable(BaseModel):
|
||||||
|
numero: int
|
||||||
|
debut: str
|
||||||
|
fin: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class SocieteInfo(BaseModel):
|
||||||
|
raison_sociale: str
|
||||||
|
numero_dossier: str
|
||||||
|
siret: Optional[str] = None
|
||||||
|
code_ape: Optional[str] = None
|
||||||
|
numero_tva: Optional[str] = None
|
||||||
|
|
||||||
|
adresse: Optional[str] = None
|
||||||
|
complement_adresse: Optional[str] = None
|
||||||
|
code_postal: Optional[str] = None
|
||||||
|
ville: Optional[str] = None
|
||||||
|
code_region: Optional[str] = None
|
||||||
|
pays: Optional[str] = None
|
||||||
|
|
||||||
|
telephone: Optional[str] = None
|
||||||
|
telecopie: Optional[str] = None
|
||||||
|
email: Optional[str] = None
|
||||||
|
email_societe: Optional[str] = None
|
||||||
|
site_web: Optional[str] = None
|
||||||
|
|
||||||
|
capital: float = 0.0
|
||||||
|
forme_juridique: Optional[str] = None
|
||||||
|
|
||||||
|
exercices: List[ExerciceComptable] = []
|
||||||
|
|
||||||
|
devise_compte: int = 0
|
||||||
|
devise_equivalent: int = 0
|
||||||
|
longueur_compte_general: int = 0
|
||||||
|
longueur_compte_analytique: int = 0
|
||||||
|
regime_fec: int = 0
|
||||||
|
|
||||||
|
base_modele: Optional[str] = None
|
||||||
|
marqueur: int = 0
|
||||||
|
|
||||||
|
logo_base64: Optional[str] = None
|
||||||
|
logo_content_type: Optional[str] = None
|
||||||
0
schemas/tiers/__init__.py
Normal file
0
schemas/tiers/__init__.py
Normal file
|
|
@ -1,6 +1,6 @@
|
||||||
from pydantic import BaseModel, Field, field_validator
|
from pydantic import BaseModel, Field, field_validator
|
||||||
from typing import List, Optional
|
from typing import Optional
|
||||||
from schemas.tiers.contact import Contact
|
from schemas.tiers.tiers import TiersDetails
|
||||||
|
|
||||||
|
|
||||||
class ClientResponse(BaseModel):
|
class ClientResponse(BaseModel):
|
||||||
|
|
@ -13,271 +13,25 @@ class ClientResponse(BaseModel):
|
||||||
telephone: Optional[str] = None
|
telephone: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
class ClientDetails(BaseModel):
|
class ClientDetails(TiersDetails):
|
||||||
numero: Optional[str] = Field(None, description="Code client (CT_Num)")
|
|
||||||
intitule: Optional[str] = Field(
|
|
||||||
None, description="Raison sociale ou Nom complet (CT_Intitule)"
|
|
||||||
)
|
|
||||||
type_tiers: Optional[int] = Field(
|
|
||||||
None, description="Type : 0=Client, 1=Fournisseur (CT_Type)"
|
|
||||||
)
|
|
||||||
qualite: Optional[str] = Field(
|
|
||||||
None, description="Qualité Sage : CLI, FOU, PRO (CT_Qualite)"
|
|
||||||
)
|
|
||||||
classement: Optional[str] = Field(
|
|
||||||
None, description="Code de classement (CT_Classement)"
|
|
||||||
)
|
|
||||||
raccourci: Optional[str] = Field(
|
|
||||||
None, description="Code raccourci 7 car. (CT_Raccourci)"
|
|
||||||
)
|
|
||||||
siret: Optional[str] = Field(None, description="N° SIRET 14 chiffres (CT_Siret)")
|
|
||||||
tva_intra: Optional[str] = Field(
|
|
||||||
None, description="N° TVA intracommunautaire (CT_Identifiant)"
|
|
||||||
)
|
|
||||||
code_naf: Optional[str] = Field(None, description="Code NAF/APE (CT_Ape)")
|
|
||||||
|
|
||||||
contact: Optional[str] = Field(
|
|
||||||
None, description="Nom du contact principal (CT_Contact)"
|
|
||||||
)
|
|
||||||
adresse: Optional[str] = Field(None, description="Adresse ligne 1 (CT_Adresse)")
|
|
||||||
complement: Optional[str] = Field(
|
|
||||||
None, description="Complément d'adresse (CT_Complement)"
|
|
||||||
)
|
|
||||||
code_postal: Optional[str] = Field(None, description="Code postal (CT_CodePostal)")
|
|
||||||
ville: Optional[str] = Field(None, description="Ville (CT_Ville)")
|
|
||||||
region: Optional[str] = Field(None, description="Région/État (CT_CodeRegion)")
|
|
||||||
pays: Optional[str] = Field(None, description="Pays (CT_Pays)")
|
|
||||||
|
|
||||||
telephone: Optional[str] = Field(None, description="Téléphone fixe (CT_Telephone)")
|
|
||||||
telecopie: Optional[str] = Field(None, description="Fax (CT_Telecopie)")
|
|
||||||
email: Optional[str] = Field(None, description="Email principal (CT_EMail)")
|
|
||||||
site_web: Optional[str] = Field(None, description="Site web (CT_Site)")
|
|
||||||
facebook: Optional[str] = Field(None, description="Profil Facebook (CT_Facebook)")
|
|
||||||
linkedin: Optional[str] = Field(None, description="Profil LinkedIn (CT_LinkedIn)")
|
|
||||||
|
|
||||||
taux01: Optional[float] = Field(None, description="Taux personnalisé 1 (CT_Taux01)")
|
|
||||||
taux02: Optional[float] = Field(None, description="Taux personnalisé 2 (CT_Taux02)")
|
|
||||||
taux03: Optional[float] = Field(None, description="Taux personnalisé 3 (CT_Taux03)")
|
|
||||||
taux04: Optional[float] = Field(None, description="Taux personnalisé 4 (CT_Taux04)")
|
|
||||||
|
|
||||||
statistique01: Optional[str] = Field(
|
|
||||||
None, description="Statistique 1 (CT_Statistique01)"
|
|
||||||
)
|
|
||||||
statistique02: Optional[str] = Field(
|
|
||||||
None, description="Statistique 2 (CT_Statistique02)"
|
|
||||||
)
|
|
||||||
statistique03: Optional[str] = Field(
|
|
||||||
None, description="Statistique 3 (CT_Statistique03)"
|
|
||||||
)
|
|
||||||
statistique04: Optional[str] = Field(
|
|
||||||
None, description="Statistique 4 (CT_Statistique04)"
|
|
||||||
)
|
|
||||||
statistique05: Optional[str] = Field(
|
|
||||||
None, description="Statistique 5 (CT_Statistique05)"
|
|
||||||
)
|
|
||||||
statistique06: Optional[str] = Field(
|
|
||||||
None, description="Statistique 6 (CT_Statistique06)"
|
|
||||||
)
|
|
||||||
statistique07: Optional[str] = Field(
|
|
||||||
None, description="Statistique 7 (CT_Statistique07)"
|
|
||||||
)
|
|
||||||
statistique08: Optional[str] = Field(
|
|
||||||
None, description="Statistique 8 (CT_Statistique08)"
|
|
||||||
)
|
|
||||||
statistique09: Optional[str] = Field(
|
|
||||||
None, description="Statistique 9 (CT_Statistique09)"
|
|
||||||
)
|
|
||||||
statistique10: Optional[str] = Field(
|
|
||||||
None, description="Statistique 10 (CT_Statistique10)"
|
|
||||||
)
|
|
||||||
|
|
||||||
encours_autorise: Optional[float] = Field(
|
|
||||||
None, description="Encours maximum autorisé (CT_Encours)"
|
|
||||||
)
|
|
||||||
assurance_credit: Optional[float] = Field(
|
|
||||||
None, description="Montant assurance crédit (CT_Assurance)"
|
|
||||||
)
|
|
||||||
langue: Optional[int] = Field(
|
|
||||||
None, description="Code langue 0=FR, 1=EN (CT_Langue)"
|
|
||||||
)
|
|
||||||
commercial_code: Optional[int] = Field(
|
|
||||||
None, description="Code du commercial (CO_No)"
|
|
||||||
)
|
|
||||||
|
|
||||||
lettrage_auto: Optional[bool] = Field(
|
|
||||||
None, description="Lettrage automatique (CT_Lettrage)"
|
|
||||||
)
|
|
||||||
est_actif: Optional[bool] = Field(None, description="True si actif (CT_Sommeil=0)")
|
|
||||||
type_facture: Optional[int] = Field(
|
|
||||||
None, description="Type facture 0=Facture, 1=BL (CT_Facture)"
|
|
||||||
)
|
|
||||||
est_prospect: Optional[bool] = Field(
|
|
||||||
None, description="True si prospect (CT_Prospect=1)"
|
|
||||||
)
|
|
||||||
bl_en_facture: Optional[int] = Field(
|
|
||||||
None, description="Imprimer BL en facture (CT_BLFact)"
|
|
||||||
)
|
|
||||||
saut_page: Optional[int] = Field(
|
|
||||||
None, description="Saut de page sur documents (CT_Saut)"
|
|
||||||
)
|
|
||||||
validation_echeance: Optional[int] = Field(
|
|
||||||
None, description="Valider les échéances (CT_ValidEch)"
|
|
||||||
)
|
|
||||||
controle_encours: Optional[int] = Field(
|
|
||||||
None, description="Contrôler l'encours (CT_ControlEnc)"
|
|
||||||
)
|
|
||||||
exclure_relance: Optional[bool] = Field(
|
|
||||||
None, description="Exclure des relances (CT_NotRappel)"
|
|
||||||
)
|
|
||||||
exclure_penalites: Optional[bool] = Field(
|
|
||||||
None, description="Exclure des pénalités (CT_NotPenal)"
|
|
||||||
)
|
|
||||||
bon_a_payer: Optional[int] = Field(
|
|
||||||
None, description="Bon à payer obligatoire (CT_BonAPayer)"
|
|
||||||
)
|
|
||||||
|
|
||||||
priorite_livraison: Optional[int] = Field(
|
|
||||||
None, description="Priorité livraison (CT_PrioriteLivr)"
|
|
||||||
)
|
|
||||||
livraison_partielle: Optional[int] = Field(
|
|
||||||
None, description="Livraison partielle (CT_LivrPartielle)"
|
|
||||||
)
|
|
||||||
delai_transport: Optional[int] = Field(
|
|
||||||
None, description="Délai transport jours (CT_DelaiTransport)"
|
|
||||||
)
|
|
||||||
delai_appro: Optional[int] = Field(
|
|
||||||
None, description="Délai appro jours (CT_DelaiAppro)"
|
|
||||||
)
|
|
||||||
|
|
||||||
commentaire: Optional[str] = Field(
|
|
||||||
None, description="Commentaire libre (CT_Commentaire)"
|
|
||||||
)
|
|
||||||
|
|
||||||
section_analytique: Optional[str] = Field(
|
|
||||||
None, description="Section analytique (CA_Num)"
|
|
||||||
)
|
|
||||||
|
|
||||||
mode_reglement_code: Optional[int] = Field(
|
|
||||||
None, description="Code mode règlement (MR_No)"
|
|
||||||
)
|
|
||||||
surveillance_active: Optional[bool] = Field(
|
|
||||||
None, description="Surveillance financière (CT_Surveillance)"
|
|
||||||
)
|
|
||||||
coface: Optional[str] = Field(None, description="Code Coface 25 car. (CT_Coface)")
|
|
||||||
forme_juridique: Optional[str] = Field(
|
|
||||||
None, description="Forme juridique SA, SARL (CT_SvFormeJuri)"
|
|
||||||
)
|
|
||||||
effectif: Optional[str] = Field(
|
|
||||||
None, description="Nombre d'employés (CT_SvEffectif)"
|
|
||||||
)
|
|
||||||
sv_regularite: Optional[str] = Field(
|
|
||||||
None, description="Régularité paiements (CT_SvRegul)"
|
|
||||||
)
|
|
||||||
sv_cotation: Optional[str] = Field(
|
|
||||||
None, description="Cotation crédit (CT_SvCotation)"
|
|
||||||
)
|
|
||||||
sv_objet_maj: Optional[str] = Field(
|
|
||||||
None, description="Objet dernière MAJ (CT_SvObjetMaj)"
|
|
||||||
)
|
|
||||||
sv_chiffre_affaires: Optional[float] = Field(
|
|
||||||
None, description="Chiffre d'affaires (CT_SvCA)"
|
|
||||||
)
|
|
||||||
sv_resultat: Optional[float] = Field(
|
|
||||||
None, description="Résultat financier (CT_SvResultat)"
|
|
||||||
)
|
|
||||||
|
|
||||||
compte_general: Optional[str] = Field(
|
|
||||||
None, description="Compte général principal (CG_NumPrinc)"
|
|
||||||
)
|
|
||||||
categorie_tarif: Optional[int] = Field(
|
|
||||||
None, description="Catégorie tarifaire (N_CatTarif)"
|
|
||||||
)
|
|
||||||
categorie_compta: Optional[int] = Field(
|
|
||||||
None, description="Catégorie comptable (N_CatCompta)"
|
|
||||||
)
|
|
||||||
|
|
||||||
contacts: Optional[List[Contact]] = Field(
|
|
||||||
default_factory=list, description="Liste des contacts du client"
|
|
||||||
)
|
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
json_schema_extra = {
|
json_schema_extra = {
|
||||||
"example": {
|
"example": {
|
||||||
"numero": "CLI000001",
|
"numero": "CLI000001",
|
||||||
"intitule": "SARL EXEMPLE",
|
"intitule": "SARL EXEMPLE",
|
||||||
"type_tiers": 0,
|
"type_tiers": 0,
|
||||||
"qualite": "CLI",
|
|
||||||
"classement": "A",
|
|
||||||
"raccourci": "EXEMPL",
|
|
||||||
"siret": "12345678901234",
|
|
||||||
"tva_intra": "FR12345678901",
|
|
||||||
"code_naf": "6201Z",
|
|
||||||
"contact": "Jean Dupont",
|
|
||||||
"adresse": "123 Rue de la Paix",
|
|
||||||
"complement": "Bâtiment B",
|
|
||||||
"code_postal": "75001",
|
|
||||||
"ville": "Paris",
|
|
||||||
"region": "Île-de-France",
|
|
||||||
"pays": "France",
|
|
||||||
"telephone": "0123456789",
|
|
||||||
"telecopie": "0123456788",
|
|
||||||
"email": "contact@exemple.fr",
|
|
||||||
"site_web": "https://www.exemple.fr",
|
|
||||||
"facebook": "https://facebook.com/exemple",
|
|
||||||
"linkedin": "https://linkedin.com/company/exemple",
|
|
||||||
"taux01": 0.0,
|
|
||||||
"taux02": 0.0,
|
|
||||||
"taux03": 0.0,
|
|
||||||
"taux04": 0.0,
|
|
||||||
"statistique01": "Informatique",
|
|
||||||
"statistique02": "",
|
|
||||||
"statistique03": "",
|
|
||||||
"statistique04": "",
|
|
||||||
"statistique05": "",
|
|
||||||
"statistique06": "",
|
|
||||||
"statistique07": "",
|
|
||||||
"statistique08": "",
|
|
||||||
"statistique09": "",
|
|
||||||
"statistique10": "",
|
|
||||||
"encours_autorise": 50000.0,
|
|
||||||
"assurance_credit": 40000.0,
|
|
||||||
"langue": 0,
|
|
||||||
"commercial_code": 1,
|
"commercial_code": 1,
|
||||||
"lettrage_auto": True,
|
"commercial": {
|
||||||
"est_actif": True,
|
"numero": 1,
|
||||||
"type_facture": 1,
|
"nom": "DUPONT",
|
||||||
"est_prospect": False,
|
"prenom": "Jean",
|
||||||
"bl_en_facture": 0,
|
"email": "j.dupont@entreprise.fr",
|
||||||
"saut_page": 0,
|
},
|
||||||
"validation_echeance": 0,
|
|
||||||
"controle_encours": 1,
|
|
||||||
"exclure_relance": False,
|
|
||||||
"exclure_penalites": False,
|
|
||||||
"bon_a_payer": 0,
|
|
||||||
"priorite_livraison": 1,
|
|
||||||
"livraison_partielle": 1,
|
|
||||||
"delai_transport": 2,
|
|
||||||
"delai_appro": 0,
|
|
||||||
"commentaire": "Client important",
|
|
||||||
"section_analytique": "",
|
|
||||||
"mode_reglement_code": 1,
|
|
||||||
"surveillance_active": True,
|
|
||||||
"coface": "COF12345",
|
|
||||||
"forme_juridique": "SARL",
|
|
||||||
"effectif": "50-99",
|
|
||||||
"sv_regularite": "",
|
|
||||||
"sv_cotation": "",
|
|
||||||
"sv_objet_maj": "",
|
|
||||||
"sv_chiffre_affaires": 2500000.0,
|
|
||||||
"sv_resultat": 150000.0,
|
|
||||||
"compte_general": "4110000",
|
|
||||||
"categorie_tarif": 0,
|
|
||||||
"categorie_compta": 0,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class ClientCreateRequest(BaseModel):
|
class ClientCreate(BaseModel):
|
||||||
intitule: str = Field(
|
intitule: str = Field(
|
||||||
..., max_length=69, description="Nom du client (CT_Intitule) - OBLIGATOIRE"
|
..., max_length=69, description="Nom du client (CT_Intitule) - OBLIGATOIRE"
|
||||||
)
|
)
|
||||||
|
|
@ -679,7 +433,7 @@ class ClientCreateRequest(BaseModel):
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class ClientUpdateRequest(BaseModel):
|
class ClientUpdate(BaseModel):
|
||||||
intitule: Optional[str] = Field(None, max_length=69)
|
intitule: Optional[str] = Field(None, max_length=69)
|
||||||
qualite: Optional[str] = Field(None, max_length=17)
|
qualite: Optional[str] = Field(None, max_length=17)
|
||||||
classement: Optional[str] = Field(None, max_length=17)
|
classement: Optional[str] = Field(None, max_length=17)
|
||||||
|
|
|
||||||
111
schemas/tiers/commercial.py
Normal file
111
schemas/tiers/commercial.py
Normal file
|
|
@ -0,0 +1,111 @@
|
||||||
|
from pydantic import BaseModel, EmailStr, Field
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
|
||||||
|
class CollaborateurBase(BaseModel):
|
||||||
|
"""Champs communs collaborateur"""
|
||||||
|
|
||||||
|
nom: str = Field(..., max_length=50)
|
||||||
|
prenom: Optional[str] = Field(None, max_length=50)
|
||||||
|
fonction: Optional[str] = Field(None, max_length=50)
|
||||||
|
|
||||||
|
adresse: Optional[str] = Field(None, max_length=100)
|
||||||
|
complement: Optional[str] = Field(None, max_length=100)
|
||||||
|
code_postal: Optional[str] = Field(None, max_length=10)
|
||||||
|
ville: Optional[str] = Field(None, max_length=50)
|
||||||
|
code_region: Optional[str] = Field(None, max_length=50)
|
||||||
|
pays: Optional[str] = Field(None, max_length=50)
|
||||||
|
|
||||||
|
service: Optional[str] = Field(None, max_length=50)
|
||||||
|
vendeur: bool = Field(default=False)
|
||||||
|
caissier: bool = Field(default=False)
|
||||||
|
acheteur: bool = Field(default=False)
|
||||||
|
chef_ventes: bool = Field(default=False)
|
||||||
|
numero_chef_ventes: Optional[int] = None
|
||||||
|
|
||||||
|
telephone: Optional[str] = Field(None, max_length=20)
|
||||||
|
telecopie: Optional[str] = Field(None, max_length=20)
|
||||||
|
email: Optional[EmailStr] = None
|
||||||
|
tel_portable: Optional[str] = Field(None, max_length=20)
|
||||||
|
|
||||||
|
facebook: Optional[str] = Field(None, max_length=100)
|
||||||
|
linkedin: Optional[str] = Field(None, max_length=100)
|
||||||
|
skype: Optional[str] = Field(None, max_length=100)
|
||||||
|
|
||||||
|
matricule: Optional[str] = Field(None, max_length=20)
|
||||||
|
sommeil: bool = Field(default=False)
|
||||||
|
|
||||||
|
|
||||||
|
class CollaborateurCreate(CollaborateurBase):
|
||||||
|
"""Création d'un collaborateur"""
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class CollaborateurUpdate(BaseModel):
|
||||||
|
"""Modification d'un collaborateur (tous champs optionnels)"""
|
||||||
|
|
||||||
|
nom: Optional[str] = Field(None, max_length=50)
|
||||||
|
prenom: Optional[str] = Field(None, max_length=50)
|
||||||
|
fonction: Optional[str] = Field(None, max_length=50)
|
||||||
|
|
||||||
|
adresse: Optional[str] = Field(None, max_length=100)
|
||||||
|
complement: Optional[str] = Field(None, max_length=100)
|
||||||
|
code_postal: Optional[str] = Field(None, max_length=10)
|
||||||
|
ville: Optional[str] = Field(None, max_length=50)
|
||||||
|
code_region: Optional[str] = Field(None, max_length=50)
|
||||||
|
pays: Optional[str] = Field(None, max_length=50)
|
||||||
|
|
||||||
|
service: Optional[str] = Field(None, max_length=50)
|
||||||
|
vendeur: Optional[bool] = None
|
||||||
|
caissier: Optional[bool] = None
|
||||||
|
acheteur: Optional[bool] = None
|
||||||
|
chef_ventes: Optional[bool] = None
|
||||||
|
numero_chef_ventes: Optional[int] = None
|
||||||
|
|
||||||
|
telephone: Optional[str] = Field(None, max_length=20)
|
||||||
|
telecopie: Optional[str] = Field(None, max_length=20)
|
||||||
|
email: Optional[EmailStr] = None
|
||||||
|
tel_portable: Optional[str] = Field(None, max_length=20)
|
||||||
|
|
||||||
|
facebook: Optional[str] = Field(None, max_length=100)
|
||||||
|
linkedin: Optional[str] = Field(None, max_length=100)
|
||||||
|
skype: Optional[str] = Field(None, max_length=100)
|
||||||
|
|
||||||
|
matricule: Optional[str] = Field(None, max_length=20)
|
||||||
|
sommeil: Optional[bool] = None
|
||||||
|
|
||||||
|
|
||||||
|
class CollaborateurListe(BaseModel):
|
||||||
|
"""Vue liste simplifiée"""
|
||||||
|
|
||||||
|
numero: int
|
||||||
|
nom: str
|
||||||
|
prenom: Optional[str]
|
||||||
|
fonction: Optional[str]
|
||||||
|
service: Optional[str]
|
||||||
|
email: Optional[str]
|
||||||
|
telephone: Optional[str]
|
||||||
|
vendeur: bool
|
||||||
|
sommeil: bool
|
||||||
|
|
||||||
|
|
||||||
|
class CollaborateurDetails(CollaborateurBase):
|
||||||
|
"""Détails complets d'un collaborateur"""
|
||||||
|
|
||||||
|
numero: int
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
json_schema_extra = {
|
||||||
|
"example": {
|
||||||
|
"numero": 1,
|
||||||
|
"nom": "DUPONT",
|
||||||
|
"prenom": "Jean",
|
||||||
|
"fonction": "Directeur Commercial",
|
||||||
|
"service": "Commercial",
|
||||||
|
"vendeur": True,
|
||||||
|
"email": "j.dupont@entreprise.fr",
|
||||||
|
"telephone": "0123456789",
|
||||||
|
"sommeil": False,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -1,273 +1,27 @@
|
||||||
from pydantic import BaseModel, Field, EmailStr
|
from pydantic import BaseModel, Field, EmailStr
|
||||||
from typing import List, Optional
|
from typing import Optional
|
||||||
from schemas.tiers.contact import Contact
|
from schemas.tiers.tiers import TiersDetails
|
||||||
|
|
||||||
|
|
||||||
class FournisseurDetails(BaseModel):
|
class FournisseurDetails(TiersDetails):
|
||||||
numero: Optional[str] = Field(None, description="Code fournisseur (CT_Num)")
|
|
||||||
intitule: Optional[str] = Field(
|
|
||||||
None, description="Raison sociale ou Nom complet (CT_Intitule)"
|
|
||||||
)
|
|
||||||
type_tiers: Optional[int] = Field(
|
|
||||||
None, description="Type : 0=Client, 1=Fournisseur (CT_Type)"
|
|
||||||
)
|
|
||||||
qualite: Optional[str] = Field(
|
|
||||||
None, description="Qualité Sage : CLI, FOU, PRO (CT_Qualite)"
|
|
||||||
)
|
|
||||||
classement: Optional[str] = Field(
|
|
||||||
None, description="Code de classement (CT_Classement)"
|
|
||||||
)
|
|
||||||
raccourci: Optional[str] = Field(
|
|
||||||
None, description="Code raccourci 7 car. (CT_Raccourci)"
|
|
||||||
)
|
|
||||||
siret: Optional[str] = Field(None, description="N° SIRET 14 chiffres (CT_Siret)")
|
|
||||||
tva_intra: Optional[str] = Field(
|
|
||||||
None, description="N° TVA intracommunautaire (CT_Identifiant)"
|
|
||||||
)
|
|
||||||
code_naf: Optional[str] = Field(None, description="Code NAF/APE (CT_Ape)")
|
|
||||||
|
|
||||||
contact: Optional[str] = Field(
|
|
||||||
None, description="Nom du contact principal (CT_Contact)"
|
|
||||||
)
|
|
||||||
adresse: Optional[str] = Field(None, description="Adresse ligne 1 (CT_Adresse)")
|
|
||||||
complement: Optional[str] = Field(
|
|
||||||
None, description="Complément d'adresse (CT_Complement)"
|
|
||||||
)
|
|
||||||
code_postal: Optional[str] = Field(None, description="Code postal (CT_CodePostal)")
|
|
||||||
ville: Optional[str] = Field(None, description="Ville (CT_Ville)")
|
|
||||||
region: Optional[str] = Field(None, description="Région/État (CT_CodeRegion)")
|
|
||||||
pays: Optional[str] = Field(None, description="Pays (CT_Pays)")
|
|
||||||
|
|
||||||
telephone: Optional[str] = Field(None, description="Téléphone fixe (CT_Telephone)")
|
|
||||||
telecopie: Optional[str] = Field(None, description="Fax (CT_Telecopie)")
|
|
||||||
email: Optional[str] = Field(None, description="Email principal (CT_EMail)")
|
|
||||||
site_web: Optional[str] = Field(None, description="Site web (CT_Site)")
|
|
||||||
facebook: Optional[str] = Field(None, description="Profil Facebook (CT_Facebook)")
|
|
||||||
linkedin: Optional[str] = Field(None, description="Profil LinkedIn (CT_LinkedIn)")
|
|
||||||
|
|
||||||
taux01: Optional[float] = Field(None, description="Taux personnalisé 1 (CT_Taux01)")
|
|
||||||
taux02: Optional[float] = Field(None, description="Taux personnalisé 2 (CT_Taux02)")
|
|
||||||
taux03: Optional[float] = Field(None, description="Taux personnalisé 3 (CT_Taux03)")
|
|
||||||
taux04: Optional[float] = Field(None, description="Taux personnalisé 4 (CT_Taux04)")
|
|
||||||
|
|
||||||
statistique01: Optional[str] = Field(
|
|
||||||
None, description="Statistique 1 (CT_Statistique01)"
|
|
||||||
)
|
|
||||||
statistique02: Optional[str] = Field(
|
|
||||||
None, description="Statistique 2 (CT_Statistique02)"
|
|
||||||
)
|
|
||||||
statistique03: Optional[str] = Field(
|
|
||||||
None, description="Statistique 3 (CT_Statistique03)"
|
|
||||||
)
|
|
||||||
statistique04: Optional[str] = Field(
|
|
||||||
None, description="Statistique 4 (CT_Statistique04)"
|
|
||||||
)
|
|
||||||
statistique05: Optional[str] = Field(
|
|
||||||
None, description="Statistique 5 (CT_Statistique05)"
|
|
||||||
)
|
|
||||||
statistique06: Optional[str] = Field(
|
|
||||||
None, description="Statistique 6 (CT_Statistique06)"
|
|
||||||
)
|
|
||||||
statistique07: Optional[str] = Field(
|
|
||||||
None, description="Statistique 7 (CT_Statistique07)"
|
|
||||||
)
|
|
||||||
statistique08: Optional[str] = Field(
|
|
||||||
None, description="Statistique 8 (CT_Statistique08)"
|
|
||||||
)
|
|
||||||
statistique09: Optional[str] = Field(
|
|
||||||
None, description="Statistique 9 (CT_Statistique09)"
|
|
||||||
)
|
|
||||||
statistique10: Optional[str] = Field(
|
|
||||||
None, description="Statistique 10 (CT_Statistique10)"
|
|
||||||
)
|
|
||||||
|
|
||||||
encours_autorise: Optional[float] = Field(
|
|
||||||
None, description="Encours maximum autorisé (CT_Encours)"
|
|
||||||
)
|
|
||||||
assurance_credit: Optional[float] = Field(
|
|
||||||
None, description="Montant assurance crédit (CT_Assurance)"
|
|
||||||
)
|
|
||||||
langue: Optional[int] = Field(
|
|
||||||
None, description="Code langue 0=FR, 1=EN (CT_Langue)"
|
|
||||||
)
|
|
||||||
commercial_code: Optional[int] = Field(
|
|
||||||
None, description="Code du commercial (CO_No)"
|
|
||||||
)
|
|
||||||
|
|
||||||
lettrage_auto: Optional[bool] = Field(
|
|
||||||
None, description="Lettrage automatique (CT_Lettrage)"
|
|
||||||
)
|
|
||||||
est_actif: Optional[bool] = Field(None, description="True si actif (CT_Sommeil=0)")
|
|
||||||
type_facture: Optional[int] = Field(
|
|
||||||
None, description="Type facture 0=Facture, 1=BL (CT_Facture)"
|
|
||||||
)
|
|
||||||
est_prospect: Optional[bool] = Field(
|
|
||||||
None, description="True si prospect (CT_Prospect=1)"
|
|
||||||
)
|
|
||||||
bl_en_facture: Optional[int] = Field(
|
|
||||||
None, description="Imprimer BL en facture (CT_BLFact)"
|
|
||||||
)
|
|
||||||
saut_page: Optional[int] = Field(
|
|
||||||
None, description="Saut de page sur documents (CT_Saut)"
|
|
||||||
)
|
|
||||||
validation_echeance: Optional[int] = Field(
|
|
||||||
None, description="Valider les échéances (CT_ValidEch)"
|
|
||||||
)
|
|
||||||
controle_encours: Optional[int] = Field(
|
|
||||||
None, description="Contrôler l'encours (CT_ControlEnc)"
|
|
||||||
)
|
|
||||||
exclure_relance: Optional[bool] = Field(
|
|
||||||
None, description="Exclure des relances (CT_NotRappel)"
|
|
||||||
)
|
|
||||||
exclure_penalites: Optional[bool] = Field(
|
|
||||||
None, description="Exclure des pénalités (CT_NotPenal)"
|
|
||||||
)
|
|
||||||
bon_a_payer: Optional[int] = Field(
|
|
||||||
None, description="Bon à payer obligatoire (CT_BonAPayer)"
|
|
||||||
)
|
|
||||||
|
|
||||||
priorite_livraison: Optional[int] = Field(
|
|
||||||
None, description="Priorité livraison (CT_PrioriteLivr)"
|
|
||||||
)
|
|
||||||
livraison_partielle: Optional[int] = Field(
|
|
||||||
None, description="Livraison partielle (CT_LivrPartielle)"
|
|
||||||
)
|
|
||||||
delai_transport: Optional[int] = Field(
|
|
||||||
None, description="Délai transport jours (CT_DelaiTransport)"
|
|
||||||
)
|
|
||||||
delai_appro: Optional[int] = Field(
|
|
||||||
None, description="Délai appro jours (CT_DelaiAppro)"
|
|
||||||
)
|
|
||||||
|
|
||||||
commentaire: Optional[str] = Field(
|
|
||||||
None, description="Commentaire libre (CT_Commentaire)"
|
|
||||||
)
|
|
||||||
|
|
||||||
section_analytique: Optional[str] = Field(
|
|
||||||
None, description="Section analytique (CA_Num)"
|
|
||||||
)
|
|
||||||
|
|
||||||
mode_reglement_code: Optional[int] = Field(
|
|
||||||
None, description="Code mode règlement (MR_No)"
|
|
||||||
)
|
|
||||||
surveillance_active: Optional[bool] = Field(
|
|
||||||
None, description="Surveillance financière (CT_Surveillance)"
|
|
||||||
)
|
|
||||||
coface: Optional[str] = Field(None, description="Code Coface 25 car. (CT_Coface)")
|
|
||||||
forme_juridique: Optional[str] = Field(
|
|
||||||
None, description="Forme juridique SA, SARL (CT_SvFormeJuri)"
|
|
||||||
)
|
|
||||||
effectif: Optional[str] = Field(
|
|
||||||
None, description="Nombre d'employés (CT_SvEffectif)"
|
|
||||||
)
|
|
||||||
sv_regularite: Optional[str] = Field(
|
|
||||||
None, description="Régularité paiements (CT_SvRegul)"
|
|
||||||
)
|
|
||||||
sv_cotation: Optional[str] = Field(
|
|
||||||
None, description="Cotation crédit (CT_SvCotation)"
|
|
||||||
)
|
|
||||||
sv_objet_maj: Optional[str] = Field(
|
|
||||||
None, description="Objet dernière MAJ (CT_SvObjetMaj)"
|
|
||||||
)
|
|
||||||
sv_chiffre_affaires: Optional[float] = Field(
|
|
||||||
None, description="Chiffre d'affaires (CT_SvCA)"
|
|
||||||
)
|
|
||||||
sv_resultat: Optional[float] = Field(
|
|
||||||
None, description="Résultat financier (CT_SvResultat)"
|
|
||||||
)
|
|
||||||
|
|
||||||
compte_general: Optional[str] = Field(
|
|
||||||
None, description="Compte général principal (CG_NumPrinc)"
|
|
||||||
)
|
|
||||||
categorie_tarif: Optional[int] = Field(
|
|
||||||
None, description="Catégorie tarifaire (N_CatTarif)"
|
|
||||||
)
|
|
||||||
categorie_compta: Optional[int] = Field(
|
|
||||||
None, description="Catégorie comptable (N_CatCompta)"
|
|
||||||
)
|
|
||||||
|
|
||||||
contacts: Optional[List[Contact]] = Field(
|
|
||||||
default_factory=list, description="Liste des contacts du fournisseur"
|
|
||||||
)
|
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
json_schema_extra = {
|
json_schema_extra = {
|
||||||
"example": {
|
"example": {
|
||||||
"numero": "FOU000001",
|
"numero": "FOU000001",
|
||||||
"intitule": "SARL FOURNISSEUR EXEMPLE",
|
"intitule": "SARL FOURNISSEUR",
|
||||||
"type_tiers": 1,
|
"type_tiers": 1,
|
||||||
"qualite": "FOU",
|
|
||||||
"classement": "A",
|
|
||||||
"raccourci": "EXEMPL",
|
|
||||||
"siret": "12345678901234",
|
|
||||||
"tva_intra": "FR12345678901",
|
|
||||||
"code_naf": "6201Z",
|
|
||||||
"contact": "Jean Dupont",
|
|
||||||
"adresse": "123 Rue de la Paix",
|
|
||||||
"complement": "Bâtiment B",
|
|
||||||
"code_postal": "75001",
|
|
||||||
"ville": "Paris",
|
|
||||||
"region": "Île-de-France",
|
|
||||||
"pays": "France",
|
|
||||||
"telephone": "0123456789",
|
|
||||||
"telecopie": "0123456788",
|
|
||||||
"email": "contact@exemple.fr",
|
|
||||||
"site_web": "https://www.exemple.fr",
|
|
||||||
"facebook": "https://facebook.com/exemple",
|
|
||||||
"linkedin": "https://linkedin.com/company/exemple",
|
|
||||||
"taux01": 0.0,
|
|
||||||
"taux02": 0.0,
|
|
||||||
"taux03": 0.0,
|
|
||||||
"taux04": 0.0,
|
|
||||||
"statistique01": "Informatique",
|
|
||||||
"statistique02": "",
|
|
||||||
"statistique03": "",
|
|
||||||
"statistique04": "",
|
|
||||||
"statistique05": "",
|
|
||||||
"statistique06": "",
|
|
||||||
"statistique07": "",
|
|
||||||
"statistique08": "",
|
|
||||||
"statistique09": "",
|
|
||||||
"statistique10": "",
|
|
||||||
"encours_autorise": 50000.0,
|
|
||||||
"assurance_credit": 40000.0,
|
|
||||||
"langue": 0,
|
|
||||||
"commercial_code": 1,
|
"commercial_code": 1,
|
||||||
"lettrage_auto": True,
|
"commercial": {
|
||||||
"est_actif": True,
|
"numero": 1,
|
||||||
"type_facture": 1,
|
"nom": "MARTIN",
|
||||||
"est_prospect": False,
|
"prenom": "Sophie",
|
||||||
"bl_en_facture": 0,
|
"email": "s.martin@entreprise.fr",
|
||||||
"saut_page": 0,
|
},
|
||||||
"validation_echeance": 0,
|
|
||||||
"controle_encours": 1,
|
|
||||||
"exclure_relance": False,
|
|
||||||
"exclure_penalites": False,
|
|
||||||
"bon_a_payer": 0,
|
|
||||||
"priorite_livraison": 1,
|
|
||||||
"livraison_partielle": 1,
|
|
||||||
"delai_transport": 2,
|
|
||||||
"delai_appro": 0,
|
|
||||||
"commentaire": "Client important",
|
|
||||||
"section_analytique": "",
|
|
||||||
"mode_reglement_code": 1,
|
|
||||||
"surveillance_active": True,
|
|
||||||
"coface": "COF12345",
|
|
||||||
"forme_juridique": "SARL",
|
|
||||||
"effectif": "50-99",
|
|
||||||
"sv_regularite": "",
|
|
||||||
"sv_cotation": "",
|
|
||||||
"sv_objet_maj": "",
|
|
||||||
"sv_chiffre_affaires": 2500000.0,
|
|
||||||
"sv_resultat": 150000.0,
|
|
||||||
"compte_general": "4110000",
|
|
||||||
"categorie_tarif": 0,
|
|
||||||
"categorie_compta": 0,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class FournisseurCreateAPIRequest(BaseModel):
|
class FournisseurCreate(BaseModel):
|
||||||
intitule: str = Field(
|
intitule: str = Field(
|
||||||
..., min_length=1, max_length=69, description="Raison sociale du fournisseur"
|
..., min_length=1, max_length=69, description="Raison sociale du fournisseur"
|
||||||
)
|
)
|
||||||
|
|
@ -304,7 +58,7 @@ class FournisseurCreateAPIRequest(BaseModel):
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class FournisseurUpdateRequest(BaseModel):
|
class FournisseurUpdate(BaseModel):
|
||||||
intitule: Optional[str] = Field(None, min_length=1, max_length=69)
|
intitule: Optional[str] = Field(None, min_length=1, max_length=69)
|
||||||
adresse: Optional[str] = Field(None, max_length=35)
|
adresse: Optional[str] = Field(None, max_length=35)
|
||||||
code_postal: Optional[str] = Field(None, max_length=9)
|
code_postal: Optional[str] = Field(None, max_length=9)
|
||||||
|
|
|
||||||
|
|
@ -3,6 +3,8 @@ from pydantic import BaseModel, Field
|
||||||
from schemas.tiers.contact import Contact
|
from schemas.tiers.contact import Contact
|
||||||
from enum import IntEnum
|
from enum import IntEnum
|
||||||
|
|
||||||
|
from schemas.tiers.tiers_collab import Collaborateur
|
||||||
|
|
||||||
|
|
||||||
class TypeTiersInt(IntEnum):
|
class TypeTiersInt(IntEnum):
|
||||||
CLIENT = 0
|
CLIENT = 0
|
||||||
|
|
@ -12,7 +14,6 @@ class TypeTiersInt(IntEnum):
|
||||||
|
|
||||||
|
|
||||||
class TiersDetails(BaseModel):
|
class TiersDetails(BaseModel):
|
||||||
# IDENTIFICATION
|
|
||||||
numero: Optional[str] = Field(None, description="Code tiers (CT_Num)")
|
numero: Optional[str] = Field(None, description="Code tiers (CT_Num)")
|
||||||
intitule: Optional[str] = Field(
|
intitule: Optional[str] = Field(
|
||||||
None, description="Raison sociale ou Nom complet (CT_Intitule)"
|
None, description="Raison sociale ou Nom complet (CT_Intitule)"
|
||||||
|
|
@ -35,7 +36,6 @@ class TiersDetails(BaseModel):
|
||||||
)
|
)
|
||||||
code_naf: Optional[str] = Field(None, description="Code NAF/APE (CT_Ape)")
|
code_naf: Optional[str] = Field(None, description="Code NAF/APE (CT_Ape)")
|
||||||
|
|
||||||
# ADRESSE
|
|
||||||
contact: Optional[str] = Field(
|
contact: Optional[str] = Field(
|
||||||
None, description="Nom du contact principal (CT_Contact)"
|
None, description="Nom du contact principal (CT_Contact)"
|
||||||
)
|
)
|
||||||
|
|
@ -48,7 +48,6 @@ class TiersDetails(BaseModel):
|
||||||
region: Optional[str] = Field(None, description="Région/État (CT_CodeRegion)")
|
region: Optional[str] = Field(None, description="Région/État (CT_CodeRegion)")
|
||||||
pays: Optional[str] = Field(None, description="Pays (CT_Pays)")
|
pays: Optional[str] = Field(None, description="Pays (CT_Pays)")
|
||||||
|
|
||||||
# TELECOM
|
|
||||||
telephone: Optional[str] = Field(None, description="Téléphone fixe (CT_Telephone)")
|
telephone: Optional[str] = Field(None, description="Téléphone fixe (CT_Telephone)")
|
||||||
telecopie: Optional[str] = Field(None, description="Fax (CT_Telecopie)")
|
telecopie: Optional[str] = Field(None, description="Fax (CT_Telecopie)")
|
||||||
email: Optional[str] = Field(None, description="Email principal (CT_EMail)")
|
email: Optional[str] = Field(None, description="Email principal (CT_EMail)")
|
||||||
|
|
@ -56,13 +55,11 @@ class TiersDetails(BaseModel):
|
||||||
facebook: Optional[str] = Field(None, description="Profil Facebook (CT_Facebook)")
|
facebook: Optional[str] = Field(None, description="Profil Facebook (CT_Facebook)")
|
||||||
linkedin: Optional[str] = Field(None, description="Profil LinkedIn (CT_LinkedIn)")
|
linkedin: Optional[str] = Field(None, description="Profil LinkedIn (CT_LinkedIn)")
|
||||||
|
|
||||||
# TAUX
|
|
||||||
taux01: Optional[float] = Field(None, description="Taux personnalisé 1 (CT_Taux01)")
|
taux01: Optional[float] = Field(None, description="Taux personnalisé 1 (CT_Taux01)")
|
||||||
taux02: Optional[float] = Field(None, description="Taux personnalisé 2 (CT_Taux02)")
|
taux02: Optional[float] = Field(None, description="Taux personnalisé 2 (CT_Taux02)")
|
||||||
taux03: Optional[float] = Field(None, description="Taux personnalisé 3 (CT_Taux03)")
|
taux03: Optional[float] = Field(None, description="Taux personnalisé 3 (CT_Taux03)")
|
||||||
taux04: Optional[float] = Field(None, description="Taux personnalisé 4 (CT_Taux04)")
|
taux04: Optional[float] = Field(None, description="Taux personnalisé 4 (CT_Taux04)")
|
||||||
|
|
||||||
# STATISTIQUES
|
|
||||||
statistique01: Optional[str] = Field(
|
statistique01: Optional[str] = Field(
|
||||||
None, description="Statistique 1 (CT_Statistique01)"
|
None, description="Statistique 1 (CT_Statistique01)"
|
||||||
)
|
)
|
||||||
|
|
@ -94,7 +91,6 @@ class TiersDetails(BaseModel):
|
||||||
None, description="Statistique 10 (CT_Statistique10)"
|
None, description="Statistique 10 (CT_Statistique10)"
|
||||||
)
|
)
|
||||||
|
|
||||||
# COMMERCIAL
|
|
||||||
encours_autorise: Optional[float] = Field(
|
encours_autorise: Optional[float] = Field(
|
||||||
None, description="Encours maximum autorisé (CT_Encours)"
|
None, description="Encours maximum autorisé (CT_Encours)"
|
||||||
)
|
)
|
||||||
|
|
@ -107,8 +103,10 @@ class TiersDetails(BaseModel):
|
||||||
commercial_code: Optional[int] = Field(
|
commercial_code: Optional[int] = Field(
|
||||||
None, description="Code du commercial (CO_No)"
|
None, description="Code du commercial (CO_No)"
|
||||||
)
|
)
|
||||||
|
commercial: Optional[Collaborateur] = Field(
|
||||||
|
None, description="Détails du commercial/collaborateur"
|
||||||
|
)
|
||||||
|
|
||||||
# FACTURATION
|
|
||||||
lettrage_auto: Optional[bool] = Field(
|
lettrage_auto: Optional[bool] = Field(
|
||||||
None, description="Lettrage automatique (CT_Lettrage)"
|
None, description="Lettrage automatique (CT_Lettrage)"
|
||||||
)
|
)
|
||||||
|
|
@ -141,7 +139,6 @@ class TiersDetails(BaseModel):
|
||||||
None, description="Bon à payer obligatoire (CT_BonAPayer)"
|
None, description="Bon à payer obligatoire (CT_BonAPayer)"
|
||||||
)
|
)
|
||||||
|
|
||||||
# LOGISTIQUE
|
|
||||||
priorite_livraison: Optional[int] = Field(
|
priorite_livraison: Optional[int] = Field(
|
||||||
None, description="Priorité livraison (CT_PrioriteLivr)"
|
None, description="Priorité livraison (CT_PrioriteLivr)"
|
||||||
)
|
)
|
||||||
|
|
@ -155,17 +152,14 @@ class TiersDetails(BaseModel):
|
||||||
None, description="Délai appro jours (CT_DelaiAppro)"
|
None, description="Délai appro jours (CT_DelaiAppro)"
|
||||||
)
|
)
|
||||||
|
|
||||||
# COMMENTAIRE
|
|
||||||
commentaire: Optional[str] = Field(
|
commentaire: Optional[str] = Field(
|
||||||
None, description="Commentaire libre (CT_Commentaire)"
|
None, description="Commentaire libre (CT_Commentaire)"
|
||||||
)
|
)
|
||||||
|
|
||||||
# ANALYTIQUE
|
|
||||||
section_analytique: Optional[str] = Field(
|
section_analytique: Optional[str] = Field(
|
||||||
None, description="Section analytique (CA_Num)"
|
None, description="Section analytique (CA_Num)"
|
||||||
)
|
)
|
||||||
|
|
||||||
# ORGANISATION / SURVEILLANCE
|
|
||||||
mode_reglement_code: Optional[int] = Field(
|
mode_reglement_code: Optional[int] = Field(
|
||||||
None, description="Code mode règlement (MR_No)"
|
None, description="Code mode règlement (MR_No)"
|
||||||
)
|
)
|
||||||
|
|
@ -195,7 +189,6 @@ class TiersDetails(BaseModel):
|
||||||
None, description="Résultat financier (CT_SvResultat)"
|
None, description="Résultat financier (CT_SvResultat)"
|
||||||
)
|
)
|
||||||
|
|
||||||
# COMPTE GENERAL ET CATEGORIES
|
|
||||||
compte_general: Optional[str] = Field(
|
compte_general: Optional[str] = Field(
|
||||||
None, description="Compte général principal (CG_NumPrinc)"
|
None, description="Compte général principal (CG_NumPrinc)"
|
||||||
)
|
)
|
||||||
|
|
@ -206,7 +199,6 @@ class TiersDetails(BaseModel):
|
||||||
None, description="Catégorie comptable (N_CatCompta)"
|
None, description="Catégorie comptable (N_CatCompta)"
|
||||||
)
|
)
|
||||||
|
|
||||||
# CONTACTS
|
|
||||||
contacts: Optional[List[Contact]] = Field(
|
contacts: Optional[List[Contact]] = Field(
|
||||||
default_factory=list, description="Liste des contacts du tiers"
|
default_factory=list, description="Liste des contacts du tiers"
|
||||||
)
|
)
|
||||||
|
|
|
||||||
54
schemas/tiers/tiers_collab.py
Normal file
54
schemas/tiers/tiers_collab.py
Normal file
|
|
@ -0,0 +1,54 @@
|
||||||
|
from typing import Optional
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
|
||||||
|
class Collaborateur(BaseModel):
|
||||||
|
"""Modèle pour un collaborateur/commercial"""
|
||||||
|
|
||||||
|
numero: Optional[int] = Field(None, description="Numéro du collaborateur (CO_No)")
|
||||||
|
nom: Optional[str] = Field(None, description="Nom (CO_Nom)")
|
||||||
|
prenom: Optional[str] = Field(None, description="Prénom (CO_Prenom)")
|
||||||
|
fonction: Optional[str] = Field(None, description="Fonction (CO_Fonction)")
|
||||||
|
adresse: Optional[str] = Field(None, description="Adresse (CO_Adresse)")
|
||||||
|
complement: Optional[str] = Field(
|
||||||
|
None, description="Complément adresse (CO_Complement)"
|
||||||
|
)
|
||||||
|
code_postal: Optional[str] = Field(None, description="Code postal (CO_CodePostal)")
|
||||||
|
ville: Optional[str] = Field(None, description="Ville (CO_Ville)")
|
||||||
|
region: Optional[str] = Field(None, description="Région (CO_CodeRegion)")
|
||||||
|
pays: Optional[str] = Field(None, description="Pays (CO_Pays)")
|
||||||
|
service: Optional[str] = Field(None, description="Service (CO_Service)")
|
||||||
|
est_vendeur: Optional[bool] = Field(None, description="Est vendeur (CO_Vendeur)")
|
||||||
|
est_caissier: Optional[bool] = Field(None, description="Est caissier (CO_Caissier)")
|
||||||
|
est_acheteur: Optional[bool] = Field(None, description="Est acheteur (CO_Acheteur)")
|
||||||
|
telephone: Optional[str] = Field(None, description="Téléphone (CO_Telephone)")
|
||||||
|
telecopie: Optional[str] = Field(None, description="Fax (CO_Telecopie)")
|
||||||
|
email: Optional[str] = Field(None, description="Email (CO_EMail)")
|
||||||
|
tel_portable: Optional[str] = Field(None, description="Portable (CO_TelPortable)")
|
||||||
|
matricule: Optional[str] = Field(None, description="Matricule (CO_Matricule)")
|
||||||
|
facebook: Optional[str] = Field(None, description="Facebook (CO_Facebook)")
|
||||||
|
linkedin: Optional[str] = Field(None, description="LinkedIn (CO_LinkedIn)")
|
||||||
|
skype: Optional[str] = Field(None, description="Skype (CO_Skype)")
|
||||||
|
est_actif: Optional[bool] = Field(None, description="Est actif (CO_Sommeil=0)")
|
||||||
|
est_chef_ventes: Optional[bool] = Field(
|
||||||
|
None, description="Est chef des ventes (CO_ChefVentes)"
|
||||||
|
)
|
||||||
|
chef_ventes_numero: Optional[int] = Field(
|
||||||
|
None, description="N° chef des ventes (CO_NoChefVentes)"
|
||||||
|
)
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
json_schema_extra = {
|
||||||
|
"example": {
|
||||||
|
"numero": 1,
|
||||||
|
"nom": "DUPONT",
|
||||||
|
"prenom": "Jean",
|
||||||
|
"fonction": "Commercial",
|
||||||
|
"service": "Ventes",
|
||||||
|
"est_vendeur": True,
|
||||||
|
"telephone": "0123456789",
|
||||||
|
"email": "j.dupont@entreprise.fr",
|
||||||
|
"tel_portable": "0612345678",
|
||||||
|
"est_actif": True,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -2,7 +2,7 @@ from pydantic import BaseModel
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
|
|
||||||
class UserResponse(BaseModel):
|
class Users(BaseModel):
|
||||||
id: str
|
id: str
|
||||||
email: str
|
email: str
|
||||||
nom: str
|
nom: str
|
||||||
|
|
|
||||||
651
scripts/manage_security.py
Normal file
651
scripts/manage_security.py
Normal file
|
|
@ -0,0 +1,651 @@
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
import asyncio
|
||||||
|
import argparse
|
||||||
|
import logging
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional, List
|
||||||
|
import json
|
||||||
|
from sqlalchemy import select
|
||||||
|
|
||||||
|
_current_file = Path(__file__).resolve()
|
||||||
|
_script_dir = _current_file.parent
|
||||||
|
_app_dir = _script_dir.parent
|
||||||
|
|
||||||
|
print(f"DEBUG: Script path: {_current_file}")
|
||||||
|
print(f"DEBUG: App dir: {_app_dir}")
|
||||||
|
print(f"DEBUG: Current working dir: {os.getcwd()}")
|
||||||
|
|
||||||
|
if str(_app_dir) in sys.path:
|
||||||
|
sys.path.remove(str(_app_dir))
|
||||||
|
sys.path.insert(0, str(_app_dir))
|
||||||
|
|
||||||
|
os.chdir(str(_app_dir))
|
||||||
|
|
||||||
|
print(f"DEBUG: sys.path[0]: {sys.path[0]}")
|
||||||
|
print(f"DEBUG: New working dir: {os.getcwd()}")
|
||||||
|
|
||||||
|
_test_imports = [
|
||||||
|
"database",
|
||||||
|
"database.db_config",
|
||||||
|
"database.models",
|
||||||
|
"services",
|
||||||
|
"security",
|
||||||
|
]
|
||||||
|
|
||||||
|
print("\nDEBUG: Vérification des imports...")
|
||||||
|
for module in _test_imports:
|
||||||
|
try:
|
||||||
|
__import__(module)
|
||||||
|
print(f" ✓ {module}")
|
||||||
|
except ImportError as e:
|
||||||
|
print(f" ✗ {module}: {e}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
from database.db_config import async_session_factory
|
||||||
|
from database.models.api_key import SwaggerUser, ApiKey
|
||||||
|
from services.api_key import ApiKeyService
|
||||||
|
from security.auth import hash_password
|
||||||
|
except ImportError as e:
|
||||||
|
print(f"\n ERREUR D'IMPORT: {e}")
|
||||||
|
print(" Vérifiez que vous êtes dans /app")
|
||||||
|
print(" Commande correcte: cd /app && python scripts/manage_security.py ...")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
logging.basicConfig(level=logging.INFO, format="%(levelname)s - %(message)s")
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
AVAILABLE_TAGS = {
|
||||||
|
"Authentication": " Authentification et gestion des comptes",
|
||||||
|
"API Keys Management": "🔑 Gestion des clés API",
|
||||||
|
"Clients": "👥 Gestion des clients",
|
||||||
|
"Fournisseurs": "🏭 Gestion des fournisseurs",
|
||||||
|
"Prospects": "🎯 Gestion des prospects",
|
||||||
|
"Tiers": "📋 Gestion générale des tiers",
|
||||||
|
"Contacts": "📞 Contacts des tiers",
|
||||||
|
"Articles": "📦 Catalogue articles",
|
||||||
|
"Familles": "🏷️ Familles d'articles",
|
||||||
|
"Stock": "📊 Mouvements de stock",
|
||||||
|
"Devis": "📄 Devis",
|
||||||
|
"Commandes": "🛒 Commandes",
|
||||||
|
"Livraisons": "🚚 Bons de livraison",
|
||||||
|
"Factures": "💰 Factures",
|
||||||
|
"Avoirs": "↩️ Avoirs",
|
||||||
|
"Règlements": "💳 Règlements et encaissements",
|
||||||
|
"Workflows": " Transformations de documents",
|
||||||
|
"Documents": "📑 Gestion documents (PDF)",
|
||||||
|
"Emails": "📧 Envoi d'emails",
|
||||||
|
"Validation": " Validations métier",
|
||||||
|
"Collaborateurs": "👔 Collaborateurs internes",
|
||||||
|
"Société": "🏢 Informations société",
|
||||||
|
"Référentiels": "📚 Données de référence",
|
||||||
|
"System": "⚙️ Système et santé",
|
||||||
|
"Admin": "🛠️ Administration",
|
||||||
|
"Debug": "🐛 Debug et diagnostics",
|
||||||
|
}
|
||||||
|
|
||||||
|
PRESET_PROFILES = {
|
||||||
|
"commercial": [
|
||||||
|
"Clients",
|
||||||
|
"Contacts",
|
||||||
|
"Devis",
|
||||||
|
"Commandes",
|
||||||
|
"Factures",
|
||||||
|
"Articles",
|
||||||
|
"Documents",
|
||||||
|
"Emails",
|
||||||
|
],
|
||||||
|
"comptable": [
|
||||||
|
"Clients",
|
||||||
|
"Fournisseurs",
|
||||||
|
"Factures",
|
||||||
|
"Avoirs",
|
||||||
|
"Règlements",
|
||||||
|
"Documents",
|
||||||
|
"Emails",
|
||||||
|
],
|
||||||
|
"logistique": [
|
||||||
|
"Articles",
|
||||||
|
"Stock",
|
||||||
|
"Commandes",
|
||||||
|
"Livraisons",
|
||||||
|
"Fournisseurs",
|
||||||
|
"Documents",
|
||||||
|
],
|
||||||
|
"readonly": ["Clients", "Articles", "Devis", "Commandes", "Factures", "Documents"],
|
||||||
|
"developer": [
|
||||||
|
"Authentication",
|
||||||
|
"API Keys Management",
|
||||||
|
"System",
|
||||||
|
"Clients",
|
||||||
|
"Articles",
|
||||||
|
"Devis",
|
||||||
|
"Commandes",
|
||||||
|
"Factures",
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async def add_swagger_user(
|
||||||
|
username: str,
|
||||||
|
password: str,
|
||||||
|
full_name: str = None,
|
||||||
|
tags: Optional[List[str]] = None,
|
||||||
|
preset: Optional[str] = None,
|
||||||
|
):
|
||||||
|
"""Ajouter un utilisateur Swagger avec configuration avancée"""
|
||||||
|
async with async_session_factory() as session:
|
||||||
|
result = await session.execute(
|
||||||
|
select(SwaggerUser).where(SwaggerUser.username == username)
|
||||||
|
)
|
||||||
|
existing = result.scalar_one_or_none()
|
||||||
|
|
||||||
|
if existing:
|
||||||
|
logger.error(f" L'utilisateur '{username}' existe déjà")
|
||||||
|
return
|
||||||
|
|
||||||
|
if preset:
|
||||||
|
if preset not in PRESET_PROFILES:
|
||||||
|
logger.error(
|
||||||
|
f" Preset '{preset}' inconnu. Disponibles: {list(PRESET_PROFILES.keys())}"
|
||||||
|
)
|
||||||
|
return
|
||||||
|
tags = PRESET_PROFILES[preset]
|
||||||
|
logger.info(f"📋 Application du preset '{preset}': {len(tags)} tags")
|
||||||
|
|
||||||
|
swagger_user = SwaggerUser(
|
||||||
|
username=username,
|
||||||
|
hashed_password=hash_password(password),
|
||||||
|
full_name=full_name or username,
|
||||||
|
is_active=True,
|
||||||
|
allowed_tags=json.dumps(tags) if tags else None,
|
||||||
|
)
|
||||||
|
|
||||||
|
session.add(swagger_user)
|
||||||
|
await session.commit()
|
||||||
|
|
||||||
|
logger.info(f" Utilisateur Swagger créé: {username}")
|
||||||
|
logger.info(f" Nom complet: {swagger_user.full_name}")
|
||||||
|
|
||||||
|
if tags:
|
||||||
|
logger.info(f" 🏷️ Tags autorisés ({len(tags)}):")
|
||||||
|
for tag in tags:
|
||||||
|
desc = AVAILABLE_TAGS.get(tag, "")
|
||||||
|
logger.info(f" • {tag} {desc}")
|
||||||
|
else:
|
||||||
|
logger.info(" 👑 Accès ADMIN COMPLET (tous les tags)")
|
||||||
|
|
||||||
|
|
||||||
|
async def list_swagger_users():
|
||||||
|
"""Lister tous les utilisateurs Swagger avec détails"""
|
||||||
|
async with async_session_factory() as session:
|
||||||
|
result = await session.execute(select(SwaggerUser))
|
||||||
|
users = result.scalars().all()
|
||||||
|
|
||||||
|
if not users:
|
||||||
|
logger.info("🔭 Aucun utilisateur Swagger")
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.info(f"\n👥 {len(users)} utilisateur(s) Swagger:\n")
|
||||||
|
logger.info("=" * 80)
|
||||||
|
|
||||||
|
for user in users:
|
||||||
|
status = " ACTIF" if user.is_active else " NON ACTIF"
|
||||||
|
logger.info(f"\n{status} {user.username}")
|
||||||
|
logger.info(f"📛 Nom: {user.full_name}")
|
||||||
|
logger.info(f"🆔 ID: {user.id}")
|
||||||
|
logger.info(f"📅 Créé: {user.created_at}")
|
||||||
|
logger.info(f"🕐 Dernière connexion: {user.last_login or 'Jamais'}")
|
||||||
|
|
||||||
|
if user.allowed_tags:
|
||||||
|
try:
|
||||||
|
tags = json.loads(user.allowed_tags)
|
||||||
|
if tags:
|
||||||
|
logger.info(f"🏷️ Tags autorisés ({len(tags)}):")
|
||||||
|
for tag in tags:
|
||||||
|
desc = AVAILABLE_TAGS.get(tag, "")
|
||||||
|
logger.info(f" • {tag} {desc}")
|
||||||
|
|
||||||
|
auth_schemes = []
|
||||||
|
if "Authentication" in tags:
|
||||||
|
auth_schemes.append("JWT (Bearer)")
|
||||||
|
if "API Keys Management" in tags or len(tags) > 3:
|
||||||
|
auth_schemes.append("X-API-Key")
|
||||||
|
if not auth_schemes:
|
||||||
|
auth_schemes.append("JWT (Bearer)")
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f" Authentification autorisée: {', '.join(auth_schemes)}"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger.info("👑 Tags autorisés: ADMIN COMPLET (tous)")
|
||||||
|
logger.info(" Authentification: JWT + X-API-Key (tout)")
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
logger.info(" Tags: Erreur format")
|
||||||
|
else:
|
||||||
|
logger.info("👑 Tags autorisés: ADMIN COMPLET (tous)")
|
||||||
|
logger.info(" Authentification: JWT + X-API-Key (tout)")
|
||||||
|
|
||||||
|
logger.info("\n" + "=" * 80)
|
||||||
|
|
||||||
|
|
||||||
|
async def update_swagger_user(
|
||||||
|
username: str,
|
||||||
|
add_tags: Optional[List[str]] = None,
|
||||||
|
remove_tags: Optional[List[str]] = None,
|
||||||
|
set_tags: Optional[List[str]] = None,
|
||||||
|
preset: Optional[str] = None,
|
||||||
|
active: Optional[bool] = None,
|
||||||
|
):
|
||||||
|
"""Mettre à jour un utilisateur Swagger"""
|
||||||
|
async with async_session_factory() as session:
|
||||||
|
result = await session.execute(
|
||||||
|
select(SwaggerUser).where(SwaggerUser.username == username)
|
||||||
|
)
|
||||||
|
user = result.scalar_one_or_none()
|
||||||
|
|
||||||
|
if not user:
|
||||||
|
logger.error(f" Utilisateur '{username}' introuvable")
|
||||||
|
return
|
||||||
|
|
||||||
|
modified = False
|
||||||
|
|
||||||
|
if preset:
|
||||||
|
if preset not in PRESET_PROFILES:
|
||||||
|
logger.error(f" Preset '{preset}' inconnu")
|
||||||
|
return
|
||||||
|
user.allowed_tags = json.dumps(PRESET_PROFILES[preset])
|
||||||
|
logger.info(f"📋 Preset '{preset}' appliqué")
|
||||||
|
modified = True
|
||||||
|
|
||||||
|
elif set_tags is not None:
|
||||||
|
user.allowed_tags = json.dumps(set_tags) if set_tags else None
|
||||||
|
logger.info(f" Tags remplacés: {len(set_tags) if set_tags else 0}")
|
||||||
|
modified = True
|
||||||
|
|
||||||
|
elif add_tags or remove_tags:
|
||||||
|
current_tags = []
|
||||||
|
if user.allowed_tags:
|
||||||
|
try:
|
||||||
|
current_tags = json.loads(user.allowed_tags)
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
current_tags = []
|
||||||
|
|
||||||
|
if add_tags:
|
||||||
|
for tag in add_tags:
|
||||||
|
if tag not in current_tags:
|
||||||
|
current_tags.append(tag)
|
||||||
|
logger.info(f"➕ Tag ajouté: {tag}")
|
||||||
|
modified = True
|
||||||
|
|
||||||
|
if remove_tags:
|
||||||
|
for tag in remove_tags:
|
||||||
|
if tag in current_tags:
|
||||||
|
current_tags.remove(tag)
|
||||||
|
logger.info(f"➖ Tag retiré: {tag}")
|
||||||
|
modified = True
|
||||||
|
|
||||||
|
user.allowed_tags = json.dumps(current_tags) if current_tags else None
|
||||||
|
|
||||||
|
if active is not None:
|
||||||
|
user.is_active = active
|
||||||
|
logger.info(f" Statut: {'ACTIF' if active else 'INACTIF'}")
|
||||||
|
modified = True
|
||||||
|
|
||||||
|
if modified:
|
||||||
|
await session.commit()
|
||||||
|
logger.info(f" Utilisateur '{username}' mis à jour")
|
||||||
|
else:
|
||||||
|
logger.info(" Aucune modification effectuée")
|
||||||
|
|
||||||
|
|
||||||
|
async def delete_swagger_user(username: str):
|
||||||
|
"""Supprimer un utilisateur Swagger"""
|
||||||
|
async with async_session_factory() as session:
|
||||||
|
result = await session.execute(
|
||||||
|
select(SwaggerUser).where(SwaggerUser.username == username)
|
||||||
|
)
|
||||||
|
user = result.scalar_one_or_none()
|
||||||
|
|
||||||
|
if not user:
|
||||||
|
logger.error(f" Utilisateur '{username}' introuvable")
|
||||||
|
return
|
||||||
|
|
||||||
|
await session.delete(user)
|
||||||
|
await session.commit()
|
||||||
|
logger.info(f"🗑️ Utilisateur Swagger supprimé: {username}")
|
||||||
|
|
||||||
|
|
||||||
|
async def list_available_tags():
|
||||||
|
"""Liste tous les tags disponibles avec description"""
|
||||||
|
logger.info("\n🏷️ TAGS DISPONIBLES:\n")
|
||||||
|
logger.info("=" * 80)
|
||||||
|
|
||||||
|
for tag, desc in AVAILABLE_TAGS.items():
|
||||||
|
logger.info(f" {desc}")
|
||||||
|
logger.info(f" Nom: {tag}\n")
|
||||||
|
|
||||||
|
logger.info("=" * 80)
|
||||||
|
logger.info("\n📦 PRESETS DISPONIBLES:\n")
|
||||||
|
|
||||||
|
for preset_name, tags in PRESET_PROFILES.items():
|
||||||
|
logger.info(f" {preset_name}:")
|
||||||
|
logger.info(f" {', '.join(tags)}\n")
|
||||||
|
|
||||||
|
logger.info("=" * 80)
|
||||||
|
|
||||||
|
|
||||||
|
async def create_api_key(
|
||||||
|
name: str,
|
||||||
|
description: str = None,
|
||||||
|
expires_in_days: int = 365,
|
||||||
|
rate_limit: int = 60,
|
||||||
|
endpoints: list = None,
|
||||||
|
):
|
||||||
|
"""Créer une clé API"""
|
||||||
|
async with async_session_factory() as session:
|
||||||
|
service = ApiKeyService(session)
|
||||||
|
|
||||||
|
api_key_obj, api_key_plain = await service.create_api_key(
|
||||||
|
name=name,
|
||||||
|
description=description,
|
||||||
|
created_by="cli",
|
||||||
|
expires_in_days=expires_in_days,
|
||||||
|
rate_limit_per_minute=rate_limit,
|
||||||
|
allowed_endpoints=endpoints,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info("=" * 70)
|
||||||
|
logger.info("🔑 Clé API créée avec succès")
|
||||||
|
logger.info("=" * 70)
|
||||||
|
logger.info(f" ID: {api_key_obj.id}")
|
||||||
|
logger.info(f" Nom: {api_key_obj.name}")
|
||||||
|
logger.info(f" Clé: {api_key_plain}")
|
||||||
|
logger.info(f" Préfixe: {api_key_obj.key_prefix}")
|
||||||
|
logger.info(f" Rate limit: {api_key_obj.rate_limit_per_minute} req/min")
|
||||||
|
logger.info(f" Expire le: {api_key_obj.expires_at}")
|
||||||
|
|
||||||
|
if api_key_obj.allowed_endpoints:
|
||||||
|
try:
|
||||||
|
endpoints_list = json.loads(api_key_obj.allowed_endpoints)
|
||||||
|
logger.info(f" Endpoints: {', '.join(endpoints_list)}")
|
||||||
|
except Exception:
|
||||||
|
logger.info(f" Endpoints: {api_key_obj.allowed_endpoints}")
|
||||||
|
else:
|
||||||
|
logger.info(" Endpoints: Tous (aucune restriction)")
|
||||||
|
|
||||||
|
logger.info("=" * 70)
|
||||||
|
logger.info(" SAUVEGARDEZ CETTE CLÉ - Elle ne sera plus affichée !")
|
||||||
|
logger.info("=" * 70)
|
||||||
|
|
||||||
|
|
||||||
|
async def list_api_keys():
|
||||||
|
"""Lister toutes les clés API"""
|
||||||
|
async with async_session_factory() as session:
|
||||||
|
service = ApiKeyService(session)
|
||||||
|
keys = await service.list_api_keys()
|
||||||
|
|
||||||
|
if not keys:
|
||||||
|
logger.info("🔭 Aucune clé API")
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.info(f"🔑 {len(keys)} clé(s) API:\n")
|
||||||
|
|
||||||
|
for key in keys:
|
||||||
|
is_valid = key.is_active and (
|
||||||
|
not key.expires_at or key.expires_at > datetime.now()
|
||||||
|
)
|
||||||
|
status = "" if is_valid else ""
|
||||||
|
|
||||||
|
logger.info(f" {status} {key.name:<30} ({key.key_prefix}...)")
|
||||||
|
logger.info(f" ID: {key.id}")
|
||||||
|
logger.info(f" Rate limit: {key.rate_limit_per_minute} req/min")
|
||||||
|
logger.info(f" Requêtes: {key.total_requests}")
|
||||||
|
logger.info(f" Expire: {key.expires_at or 'Jamais'}")
|
||||||
|
logger.info(f" Dernière utilisation: {key.last_used_at or 'Jamais'}")
|
||||||
|
|
||||||
|
if key.allowed_endpoints:
|
||||||
|
try:
|
||||||
|
endpoints = json.loads(key.allowed_endpoints)
|
||||||
|
display = ", ".join(endpoints[:4])
|
||||||
|
if len(endpoints) > 4:
|
||||||
|
display += f"... (+{len(endpoints) - 4})"
|
||||||
|
logger.info(f" Endpoints: {display}")
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
logger.info(" Endpoints: Tous")
|
||||||
|
logger.info("")
|
||||||
|
|
||||||
|
|
||||||
|
async def revoke_api_key(key_id: str):
|
||||||
|
"""Révoquer une clé API"""
|
||||||
|
async with async_session_factory() as session:
|
||||||
|
result = await session.execute(select(ApiKey).where(ApiKey.id == key_id))
|
||||||
|
key = result.scalar_one_or_none()
|
||||||
|
|
||||||
|
if not key:
|
||||||
|
logger.error(f" Clé API '{key_id}' introuvable")
|
||||||
|
return
|
||||||
|
|
||||||
|
key.is_active = False
|
||||||
|
key.revoked_at = datetime.now()
|
||||||
|
await session.commit()
|
||||||
|
|
||||||
|
logger.info(f"🗑️ Clé API révoquée: {key.name}")
|
||||||
|
logger.info(f" ID: {key.id}")
|
||||||
|
|
||||||
|
|
||||||
|
async def verify_api_key(api_key: str):
|
||||||
|
"""Vérifier une clé API"""
|
||||||
|
async with async_session_factory() as session:
|
||||||
|
service = ApiKeyService(session)
|
||||||
|
key = await service.verify_api_key(api_key)
|
||||||
|
|
||||||
|
if not key:
|
||||||
|
logger.error(" Clé API invalide ou expirée")
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.info("=" * 60)
|
||||||
|
logger.info(" Clé API valide")
|
||||||
|
logger.info("=" * 60)
|
||||||
|
logger.info(f" Nom: {key.name}")
|
||||||
|
logger.info(f" ID: {key.id}")
|
||||||
|
logger.info(f" Rate limit: {key.rate_limit_per_minute} req/min")
|
||||||
|
logger.info(f" Requêtes totales: {key.total_requests}")
|
||||||
|
logger.info(f" Expire: {key.expires_at or 'Jamais'}")
|
||||||
|
|
||||||
|
if key.allowed_endpoints:
|
||||||
|
try:
|
||||||
|
endpoints = json.loads(key.allowed_endpoints)
|
||||||
|
logger.info(f" Endpoints autorisés: {endpoints}")
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
logger.info(" Endpoints autorisés: Tous")
|
||||||
|
logger.info("=" * 60)
|
||||||
|
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Gestion avancée des utilisateurs Swagger et clés API",
|
||||||
|
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||||
|
epilog="""
|
||||||
|
EXEMPLES D'UTILISATION:
|
||||||
|
|
||||||
|
=== UTILISATEURS SWAGGER ===
|
||||||
|
|
||||||
|
1. Créer un utilisateur avec preset:
|
||||||
|
python scripts/manage_security.py swagger add commercial Pass123! --preset commercial
|
||||||
|
|
||||||
|
2. Créer un admin complet:
|
||||||
|
python scripts/manage_security.py swagger add admin AdminPass
|
||||||
|
|
||||||
|
3. Créer avec tags spécifiques:
|
||||||
|
python scripts/manage_security.py swagger add client Pass123! --tags Clients Devis Factures
|
||||||
|
|
||||||
|
4. Mettre à jour un utilisateur (ajouter des tags):
|
||||||
|
python scripts/manage_security.py swagger update client --add-tags Commandes Livraisons
|
||||||
|
|
||||||
|
5. Changer complètement les tags:
|
||||||
|
python scripts/manage_security.py swagger update client --set-tags Clients Articles
|
||||||
|
|
||||||
|
6. Appliquer un preset:
|
||||||
|
python scripts/manage_security.py swagger update client --preset comptable
|
||||||
|
|
||||||
|
7. Lister les tags disponibles:
|
||||||
|
python scripts/manage_security.py swagger tags
|
||||||
|
|
||||||
|
8. Désactiver temporairement:
|
||||||
|
python scripts/manage_security.py swagger update client --inactive
|
||||||
|
|
||||||
|
=== CLÉS API ===
|
||||||
|
|
||||||
|
9. Créer une clé API:
|
||||||
|
python scripts/manage_security.py apikey create "Mon App" --days 365 --rate-limit 100
|
||||||
|
|
||||||
|
10. Créer avec endpoints restreints:
|
||||||
|
python scripts/manage_security.py apikey create "SDK-ReadOnly" --endpoints "/clients" "/clients/*" "/devis" "/devis/*"
|
||||||
|
|
||||||
|
11. Lister les clés:
|
||||||
|
python scripts/manage_security.py apikey list
|
||||||
|
|
||||||
|
12. Vérifier une clé:
|
||||||
|
python scripts/manage_security.py apikey verify sdk_live_xxxxx
|
||||||
|
|
||||||
|
13. Révoquer une clé:
|
||||||
|
python scripts/manage_security.py apikey revoke <key_id>
|
||||||
|
""",
|
||||||
|
)
|
||||||
|
|
||||||
|
subparsers = parser.add_subparsers(dest="command", help="Commandes")
|
||||||
|
|
||||||
|
swagger_parser = subparsers.add_parser("swagger", help="Gestion Swagger")
|
||||||
|
swagger_sub = swagger_parser.add_subparsers(dest="swagger_command")
|
||||||
|
|
||||||
|
add_p = swagger_sub.add_parser("add", help="Ajouter utilisateur")
|
||||||
|
add_p.add_argument("username", help="Nom d'utilisateur")
|
||||||
|
add_p.add_argument("password", help="Mot de passe")
|
||||||
|
add_p.add_argument("--full-name", help="Nom complet", default=None)
|
||||||
|
add_p.add_argument(
|
||||||
|
"--tags",
|
||||||
|
nargs="*",
|
||||||
|
help="Tags autorisés. Vide = admin complet",
|
||||||
|
default=None,
|
||||||
|
)
|
||||||
|
add_p.add_argument(
|
||||||
|
"--preset",
|
||||||
|
choices=list(PRESET_PROFILES.keys()),
|
||||||
|
help="Appliquer un preset de tags",
|
||||||
|
)
|
||||||
|
|
||||||
|
update_p = swagger_sub.add_parser("update", help="Mettre à jour utilisateur")
|
||||||
|
update_p.add_argument("username", help="Nom d'utilisateur")
|
||||||
|
update_p.add_argument("--add-tags", nargs="+", help="Ajouter des tags")
|
||||||
|
update_p.add_argument("--remove-tags", nargs="+", help="Retirer des tags")
|
||||||
|
update_p.add_argument("--set-tags", nargs="*", help="Définir les tags (remplace)")
|
||||||
|
update_p.add_argument(
|
||||||
|
"--preset", choices=list(PRESET_PROFILES.keys()), help="Appliquer preset"
|
||||||
|
)
|
||||||
|
update_p.add_argument("--active", action="store_true", help="Activer l'utilisateur")
|
||||||
|
update_p.add_argument(
|
||||||
|
"--inactive", action="store_true", help="Désactiver l'utilisateur"
|
||||||
|
)
|
||||||
|
|
||||||
|
swagger_sub.add_parser("list", help="Lister utilisateurs")
|
||||||
|
|
||||||
|
del_p = swagger_sub.add_parser("delete", help="Supprimer utilisateur")
|
||||||
|
del_p.add_argument("username", help="Nom d'utilisateur")
|
||||||
|
|
||||||
|
swagger_sub.add_parser("tags", help="Lister les tags disponibles")
|
||||||
|
|
||||||
|
apikey_parser = subparsers.add_parser("apikey", help="Gestion clés API")
|
||||||
|
apikey_sub = apikey_parser.add_subparsers(dest="apikey_command")
|
||||||
|
|
||||||
|
create_p = apikey_sub.add_parser("create", help="Créer clé API")
|
||||||
|
create_p.add_argument("name", help="Nom de la clé")
|
||||||
|
create_p.add_argument("--description", help="Description")
|
||||||
|
create_p.add_argument("--days", type=int, default=365, help="Expiration (jours)")
|
||||||
|
create_p.add_argument("--rate-limit", type=int, default=60, help="Req/min")
|
||||||
|
create_p.add_argument("--endpoints", nargs="+", help="Endpoints autorisés")
|
||||||
|
|
||||||
|
apikey_sub.add_parser("list", help="Lister clés")
|
||||||
|
|
||||||
|
rev_p = apikey_sub.add_parser("revoke", help="Révoquer clé")
|
||||||
|
rev_p.add_argument("key_id", help="ID de la clé")
|
||||||
|
|
||||||
|
ver_p = apikey_sub.add_parser("verify", help="Vérifier clé")
|
||||||
|
ver_p.add_argument("api_key", help="Clé API complète")
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
if not args.command:
|
||||||
|
parser.print_help()
|
||||||
|
return
|
||||||
|
|
||||||
|
if args.command == "swagger":
|
||||||
|
if args.swagger_command == "add":
|
||||||
|
await add_swagger_user(
|
||||||
|
args.username,
|
||||||
|
args.password,
|
||||||
|
args.full_name,
|
||||||
|
args.tags,
|
||||||
|
args.preset,
|
||||||
|
)
|
||||||
|
elif args.swagger_command == "update":
|
||||||
|
active = None
|
||||||
|
if args.active:
|
||||||
|
active = True
|
||||||
|
elif args.inactive:
|
||||||
|
active = False
|
||||||
|
|
||||||
|
await update_swagger_user(
|
||||||
|
args.username,
|
||||||
|
add_tags=args.add_tags,
|
||||||
|
remove_tags=args.remove_tags,
|
||||||
|
set_tags=args.set_tags,
|
||||||
|
preset=args.preset,
|
||||||
|
active=active,
|
||||||
|
)
|
||||||
|
elif args.swagger_command == "list":
|
||||||
|
await list_swagger_users()
|
||||||
|
elif args.swagger_command == "delete":
|
||||||
|
await delete_swagger_user(args.username)
|
||||||
|
elif args.swagger_command == "tags":
|
||||||
|
await list_available_tags()
|
||||||
|
else:
|
||||||
|
swagger_parser.print_help()
|
||||||
|
|
||||||
|
elif args.command == "apikey":
|
||||||
|
if args.apikey_command == "create":
|
||||||
|
await create_api_key(
|
||||||
|
name=args.name,
|
||||||
|
description=args.description,
|
||||||
|
expires_in_days=args.days,
|
||||||
|
rate_limit=args.rate_limit,
|
||||||
|
endpoints=args.endpoints,
|
||||||
|
)
|
||||||
|
elif args.apikey_command == "list":
|
||||||
|
await list_api_keys()
|
||||||
|
elif args.apikey_command == "revoke":
|
||||||
|
await revoke_api_key(args.key_id)
|
||||||
|
elif args.apikey_command == "verify":
|
||||||
|
await verify_api_key(args.api_key)
|
||||||
|
else:
|
||||||
|
apikey_parser.print_help()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
try:
|
||||||
|
asyncio.run(main())
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
print("\n Interrupted")
|
||||||
|
sys.exit(0)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f" Erreur: {e}")
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
traceback.print_exc()
|
||||||
|
sys.exit(1)
|
||||||
354
scripts/test_security.py
Normal file
354
scripts/test_security.py
Normal file
|
|
@ -0,0 +1,354 @@
|
||||||
|
import requests
|
||||||
|
import argparse
|
||||||
|
import sys
|
||||||
|
from typing import Tuple
|
||||||
|
|
||||||
|
|
||||||
|
class SecurityTester:
|
||||||
|
def __init__(self, base_url: str):
|
||||||
|
self.base_url = base_url.rstrip("/")
|
||||||
|
self.results = {"passed": 0, "failed": 0, "tests": []}
|
||||||
|
|
||||||
|
def log_test(self, name: str, passed: bool, details: str = ""):
|
||||||
|
"""Enregistrer le résultat d'un test"""
|
||||||
|
status = " PASS" if passed else " FAIL"
|
||||||
|
print(f"{status} - {name}")
|
||||||
|
if details:
|
||||||
|
print(f" {details}")
|
||||||
|
|
||||||
|
self.results["tests"].append(
|
||||||
|
{"name": name, "passed": passed, "details": details}
|
||||||
|
)
|
||||||
|
|
||||||
|
if passed:
|
||||||
|
self.results["passed"] += 1
|
||||||
|
else:
|
||||||
|
self.results["failed"] += 1
|
||||||
|
|
||||||
|
def test_swagger_without_auth(self) -> bool:
|
||||||
|
"""Test 1: Swagger UI devrait demander une authentification"""
|
||||||
|
print("\n Test 1: Protection Swagger UI")
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = requests.get(f"{self.base_url}/docs", timeout=5)
|
||||||
|
|
||||||
|
if response.status_code == 401:
|
||||||
|
self.log_test(
|
||||||
|
"Swagger protégé",
|
||||||
|
True,
|
||||||
|
"Code 401 retourné sans authentification",
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
self.log_test(
|
||||||
|
"Swagger protégé",
|
||||||
|
False,
|
||||||
|
f"Code {response.status_code} au lieu de 401",
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.log_test("Swagger protégé", False, f"Erreur: {str(e)}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def test_swagger_with_auth(self, username: str, password: str) -> bool:
|
||||||
|
"""Test 2: Swagger UI accessible avec credentials valides"""
|
||||||
|
print("\n Test 2: Accès Swagger avec authentification")
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = requests.get(
|
||||||
|
f"{self.base_url}/docs", auth=(username, password), timeout=5
|
||||||
|
)
|
||||||
|
|
||||||
|
if response.status_code == 200:
|
||||||
|
self.log_test(
|
||||||
|
"Accès Swagger avec auth",
|
||||||
|
True,
|
||||||
|
f"Authentifié comme {username}",
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
self.log_test(
|
||||||
|
"Accès Swagger avec auth",
|
||||||
|
False,
|
||||||
|
f"Code {response.status_code}, credentials invalides?",
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.log_test("Accès Swagger avec auth", False, f"Erreur: {str(e)}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def test_api_without_auth(self) -> bool:
|
||||||
|
"""Test 3: Endpoints API devraient demander une authentification"""
|
||||||
|
print("\n Test 3: Protection des endpoints API")
|
||||||
|
|
||||||
|
test_endpoints = ["/api/v1/clients", "/api/v1/documents"]
|
||||||
|
|
||||||
|
all_protected = True
|
||||||
|
for endpoint in test_endpoints:
|
||||||
|
try:
|
||||||
|
response = requests.get(f"{self.base_url}{endpoint}", timeout=5)
|
||||||
|
|
||||||
|
if response.status_code == 401:
|
||||||
|
print(f" {endpoint} protégé (401)")
|
||||||
|
else:
|
||||||
|
print(
|
||||||
|
f" {endpoint} accessible sans auth (code {response.status_code})"
|
||||||
|
)
|
||||||
|
all_protected = False
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f" {endpoint} erreur: {str(e)}")
|
||||||
|
all_protected = False
|
||||||
|
|
||||||
|
self.log_test("Endpoints API protégés", all_protected)
|
||||||
|
return all_protected
|
||||||
|
|
||||||
|
def test_health_endpoint_public(self) -> bool:
|
||||||
|
"""Test 4: Endpoint /health devrait être accessible sans auth"""
|
||||||
|
print("\n Test 4: Endpoint /health public")
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = requests.get(f"{self.base_url}/health", timeout=5)
|
||||||
|
|
||||||
|
if response.status_code == 200:
|
||||||
|
self.log_test("/health accessible", True, "Endpoint public fonctionne")
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
self.log_test(
|
||||||
|
"/health accessible",
|
||||||
|
False,
|
||||||
|
f"Code {response.status_code} inattendu",
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.log_test("/health accessible", False, f"Erreur: {str(e)}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def test_api_key_creation(self, username: str, password: str) -> Tuple[bool, str]:
|
||||||
|
"""Test 5: Créer une clé API via l'endpoint"""
|
||||||
|
print("\n Test 5: Création d'une clé API")
|
||||||
|
|
||||||
|
try:
|
||||||
|
login_response = requests.post(
|
||||||
|
f"{self.base_url}/api/v1/auth/login",
|
||||||
|
json={"email": username, "password": password},
|
||||||
|
timeout=5,
|
||||||
|
)
|
||||||
|
|
||||||
|
if login_response.status_code != 200:
|
||||||
|
self.log_test(
|
||||||
|
"Création clé API",
|
||||||
|
False,
|
||||||
|
"Impossible de se connecter pour obtenir un JWT",
|
||||||
|
)
|
||||||
|
return False, ""
|
||||||
|
|
||||||
|
jwt_token = login_response.json().get("access_token")
|
||||||
|
|
||||||
|
create_response = requests.post(
|
||||||
|
f"{self.base_url}/api/v1/api-keys",
|
||||||
|
headers={"Authorization": f"Bearer {jwt_token}"},
|
||||||
|
json={
|
||||||
|
"name": "Test API Key",
|
||||||
|
"description": "Clé de test automatisé",
|
||||||
|
"rate_limit_per_minute": 60,
|
||||||
|
"expires_in_days": 30,
|
||||||
|
},
|
||||||
|
timeout=5,
|
||||||
|
)
|
||||||
|
|
||||||
|
if create_response.status_code == 201:
|
||||||
|
api_key = create_response.json().get("api_key")
|
||||||
|
self.log_test("Création clé API", True, f"Clé créée: {api_key[:20]}...")
|
||||||
|
return True, api_key
|
||||||
|
else:
|
||||||
|
self.log_test(
|
||||||
|
"Création clé API",
|
||||||
|
False,
|
||||||
|
f"Code {create_response.status_code}",
|
||||||
|
)
|
||||||
|
return False, ""
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.log_test("Création clé API", False, f"Erreur: {str(e)}")
|
||||||
|
return False, ""
|
||||||
|
|
||||||
|
def test_api_key_usage(self, api_key: str) -> bool:
|
||||||
|
"""Test 6: Utiliser une clé API pour accéder à un endpoint"""
|
||||||
|
print("\n Test 6: Utilisation d'une clé API")
|
||||||
|
|
||||||
|
if not api_key:
|
||||||
|
self.log_test("Utilisation clé API", False, "Pas de clé disponible")
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = requests.get(
|
||||||
|
f"{self.base_url}/api/v1/clients",
|
||||||
|
headers={"X-API-Key": api_key},
|
||||||
|
timeout=5,
|
||||||
|
)
|
||||||
|
|
||||||
|
if response.status_code == 200:
|
||||||
|
self.log_test("Utilisation clé API", True, "Clé acceptée")
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
self.log_test(
|
||||||
|
"Utilisation clé API",
|
||||||
|
False,
|
||||||
|
f"Code {response.status_code}, clé refusée?",
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.log_test("Utilisation clé API", False, f"Erreur: {str(e)}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def test_invalid_api_key(self) -> bool:
|
||||||
|
"""Test 7: Une clé invalide devrait être refusée"""
|
||||||
|
print("\n Test 7: Rejet de clé API invalide")
|
||||||
|
|
||||||
|
invalid_key = "sdk_live_invalid_key_12345"
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = requests.get(
|
||||||
|
f"{self.base_url}/api/v1/clients",
|
||||||
|
headers={"X-API-Key": invalid_key},
|
||||||
|
timeout=5,
|
||||||
|
)
|
||||||
|
|
||||||
|
if response.status_code == 401:
|
||||||
|
self.log_test("Clé invalide rejetée", True, "Code 401 comme attendu")
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
self.log_test(
|
||||||
|
"Clé invalide rejetée",
|
||||||
|
False,
|
||||||
|
f"Code {response.status_code} au lieu de 401",
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.log_test("Clé invalide rejetée", False, f"Erreur: {str(e)}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def test_rate_limiting(self, api_key: str) -> bool:
|
||||||
|
"""Test 8: Rate limiting (optionnel, peut prendre du temps)"""
|
||||||
|
print("\n Test 8: Rate limiting (test simple)")
|
||||||
|
|
||||||
|
if not api_key:
|
||||||
|
self.log_test("Rate limiting", False, "Pas de clé disponible")
|
||||||
|
return False
|
||||||
|
|
||||||
|
print(" Envoi de 70 requêtes rapides...")
|
||||||
|
|
||||||
|
rate_limited = False
|
||||||
|
for i in range(70):
|
||||||
|
try:
|
||||||
|
response = requests.get(
|
||||||
|
f"{self.base_url}/health",
|
||||||
|
headers={"X-API-Key": api_key},
|
||||||
|
timeout=1,
|
||||||
|
)
|
||||||
|
|
||||||
|
if response.status_code == 429:
|
||||||
|
rate_limited = True
|
||||||
|
print(f" Rate limit atteint à la requête {i + 1}")
|
||||||
|
break
|
||||||
|
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if rate_limited:
|
||||||
|
self.log_test("Rate limiting", True, "Rate limit détecté")
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
self.log_test(
|
||||||
|
"Rate limiting",
|
||||||
|
True,
|
||||||
|
"Aucun rate limit détecté (peut être normal si pas implémenté)",
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
|
||||||
|
def print_summary(self):
|
||||||
|
"""Afficher le résumé des tests"""
|
||||||
|
print("\n" + "=" * 60)
|
||||||
|
print(" RÉSUMÉ DES TESTS")
|
||||||
|
print("=" * 60)
|
||||||
|
|
||||||
|
total = self.results["passed"] + self.results["failed"]
|
||||||
|
success_rate = (self.results["passed"] / total * 100) if total > 0 else 0
|
||||||
|
|
||||||
|
print(f"\nTotal: {total} tests")
|
||||||
|
print(f" Réussis: {self.results['passed']}")
|
||||||
|
print(f" Échoués: {self.results['failed']}")
|
||||||
|
print(f"Taux de réussite: {success_rate:.1f}%\n")
|
||||||
|
|
||||||
|
if self.results["failed"] == 0:
|
||||||
|
print("🎉 Tous les tests sont passés ! Sécurité OK.")
|
||||||
|
return 0
|
||||||
|
else:
|
||||||
|
print(" Certains tests ont échoué. Vérifiez la configuration.")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Test automatisé de la sécurité de l'API"
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"--url",
|
||||||
|
required=True,
|
||||||
|
help="URL de base de l'API (ex: http://localhost:8000)",
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"--swagger-user", required=True, help="Utilisateur Swagger pour les tests"
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"--swagger-pass", required=True, help="Mot de passe Swagger pour les tests"
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"--skip-rate-limit",
|
||||||
|
action="store_true",
|
||||||
|
help="Sauter le test de rate limiting (long)",
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
print(" Démarrage des tests de sécurité")
|
||||||
|
print(f" URL cible: {args.url}\n")
|
||||||
|
|
||||||
|
tester = SecurityTester(args.url)
|
||||||
|
|
||||||
|
tester.test_swagger_without_auth()
|
||||||
|
tester.test_swagger_with_auth(args.swagger_user, args.swagger_pass)
|
||||||
|
tester.test_api_without_auth()
|
||||||
|
tester.test_health_endpoint_public()
|
||||||
|
|
||||||
|
success, api_key = tester.test_api_key_creation(
|
||||||
|
args.swagger_user, args.swagger_pass
|
||||||
|
)
|
||||||
|
|
||||||
|
if success and api_key:
|
||||||
|
tester.test_api_key_usage(api_key)
|
||||||
|
tester.test_invalid_api_key()
|
||||||
|
|
||||||
|
if not args.skip_rate_limit:
|
||||||
|
tester.test_rate_limiting(api_key)
|
||||||
|
else:
|
||||||
|
print("\n Test de rate limiting sauté")
|
||||||
|
else:
|
||||||
|
print("\n Tests avec clé API sautés (création échouée)")
|
||||||
|
|
||||||
|
exit_code = tester.print_summary()
|
||||||
|
sys.exit(exit_code)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
|
@ -5,10 +5,12 @@ import jwt
|
||||||
import secrets
|
import secrets
|
||||||
import hashlib
|
import hashlib
|
||||||
|
|
||||||
SECRET_KEY = "VOTRE_SECRET_KEY_A_METTRE_EN_.ENV"
|
from config.config import settings
|
||||||
ALGORITHM = "HS256"
|
|
||||||
ACCESS_TOKEN_EXPIRE_MINUTES = 30
|
SECRET_KEY = settings.jwt_secret
|
||||||
REFRESH_TOKEN_EXPIRE_DAYS = 7
|
ALGORITHM = settings.jwt_algorithm
|
||||||
|
ACCESS_TOKEN_EXPIRE_MINUTES = settings.access_token_expire_minutes
|
||||||
|
REFRESH_TOKEN_EXPIRE_DAYS = settings.refresh_token_expire_days
|
||||||
|
|
||||||
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
|
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
|
||||||
|
|
||||||
|
|
@ -67,9 +69,13 @@ def decode_token(token: str) -> Optional[Dict]:
|
||||||
payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
|
payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
|
||||||
return payload
|
return payload
|
||||||
except jwt.ExpiredSignatureError:
|
except jwt.ExpiredSignatureError:
|
||||||
return None
|
raise jwt.InvalidTokenError("Token expiré")
|
||||||
except jwt.JWTError:
|
except jwt.DecodeError:
|
||||||
return None
|
raise jwt.InvalidTokenError("Token invalide (format incorrect)")
|
||||||
|
except jwt.InvalidTokenError as e:
|
||||||
|
raise jwt.InvalidTokenError(f"Token invalide: {str(e)}")
|
||||||
|
except Exception as e:
|
||||||
|
raise jwt.InvalidTokenError(f"Erreur lors du décodage du token: {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
def validate_password_strength(password: str) -> tuple[bool, str]:
|
def validate_password_strength(password: str) -> tuple[bool, str]:
|
||||||
|
|
|
||||||
223
services/api_key.py
Normal file
223
services/api_key.py
Normal file
|
|
@ -0,0 +1,223 @@
|
||||||
|
import secrets
|
||||||
|
import hashlib
|
||||||
|
import json
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from typing import Optional, List, Dict
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
from sqlalchemy import select, and_, or_
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from database.models.api_key import ApiKey
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class ApiKeyService:
|
||||||
|
"""Service de gestion des clés API"""
|
||||||
|
|
||||||
|
def __init__(self, session: AsyncSession):
|
||||||
|
self.session = session
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def generate_api_key() -> str:
|
||||||
|
"""Génère une clé API unique et sécurisée"""
|
||||||
|
random_part = secrets.token_urlsafe(32)
|
||||||
|
return f"sdk_live_{random_part}"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def hash_api_key(api_key: str) -> str:
|
||||||
|
"""Hash la clé API pour stockage sécurisé"""
|
||||||
|
return hashlib.sha256(api_key.encode()).hexdigest()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_key_prefix(api_key: str) -> str:
|
||||||
|
"""Extrait le préfixe de la clé pour identification"""
|
||||||
|
return api_key[:12] if len(api_key) >= 12 else api_key
|
||||||
|
|
||||||
|
async def create_api_key(
|
||||||
|
self,
|
||||||
|
name: str,
|
||||||
|
description: Optional[str] = None,
|
||||||
|
created_by: str = "system",
|
||||||
|
user_id: Optional[str] = None,
|
||||||
|
expires_in_days: Optional[int] = None,
|
||||||
|
rate_limit_per_minute: int = 60,
|
||||||
|
allowed_endpoints: Optional[List[str]] = None,
|
||||||
|
) -> tuple[ApiKey, str]:
|
||||||
|
api_key_plain = self.generate_api_key()
|
||||||
|
key_hash = self.hash_api_key(api_key_plain)
|
||||||
|
key_prefix = self.get_key_prefix(api_key_plain)
|
||||||
|
|
||||||
|
expires_at = None
|
||||||
|
if expires_in_days:
|
||||||
|
expires_at = datetime.now() + timedelta(days=expires_in_days)
|
||||||
|
|
||||||
|
api_key_obj = ApiKey(
|
||||||
|
key_hash=key_hash,
|
||||||
|
key_prefix=key_prefix,
|
||||||
|
name=name,
|
||||||
|
description=description,
|
||||||
|
created_by=created_by,
|
||||||
|
user_id=user_id,
|
||||||
|
expires_at=expires_at,
|
||||||
|
rate_limit_per_minute=rate_limit_per_minute,
|
||||||
|
allowed_endpoints=json.dumps(allowed_endpoints)
|
||||||
|
if allowed_endpoints
|
||||||
|
else None,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.session.add(api_key_obj)
|
||||||
|
await self.session.commit()
|
||||||
|
await self.session.refresh(api_key_obj)
|
||||||
|
|
||||||
|
logger.info(f" Clé API créée: {name} (prefix: {key_prefix})")
|
||||||
|
|
||||||
|
return api_key_obj, api_key_plain
|
||||||
|
|
||||||
|
async def verify_api_key(self, api_key_plain: str) -> Optional[ApiKey]:
|
||||||
|
key_hash = self.hash_api_key(api_key_plain)
|
||||||
|
|
||||||
|
result = await self.session.execute(
|
||||||
|
select(ApiKey).where(
|
||||||
|
and_(
|
||||||
|
ApiKey.key_hash == key_hash,
|
||||||
|
ApiKey.is_active,
|
||||||
|
ApiKey.revoked_at.is_(None),
|
||||||
|
or_(
|
||||||
|
ApiKey.expires_at.is_(None), ApiKey.expires_at > datetime.now()
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
api_key_obj = result.scalar_one_or_none()
|
||||||
|
|
||||||
|
if api_key_obj:
|
||||||
|
api_key_obj.total_requests += 1
|
||||||
|
api_key_obj.last_used_at = datetime.now()
|
||||||
|
await self.session.commit()
|
||||||
|
|
||||||
|
logger.debug(f" Clé API validée: {api_key_obj.name}")
|
||||||
|
else:
|
||||||
|
logger.warning(" Clé API invalide ou expirée")
|
||||||
|
|
||||||
|
return api_key_obj
|
||||||
|
|
||||||
|
async def list_api_keys(
|
||||||
|
self,
|
||||||
|
include_revoked: bool = False,
|
||||||
|
user_id: Optional[str] = None,
|
||||||
|
) -> List[ApiKey]:
|
||||||
|
"""Liste les clés API"""
|
||||||
|
query = select(ApiKey)
|
||||||
|
|
||||||
|
if not include_revoked:
|
||||||
|
query = query.where(ApiKey.revoked_at.is_(None))
|
||||||
|
|
||||||
|
if user_id:
|
||||||
|
query = query.where(ApiKey.user_id == user_id)
|
||||||
|
|
||||||
|
query = query.order_by(ApiKey.created_at.desc())
|
||||||
|
|
||||||
|
result = await self.session.execute(query)
|
||||||
|
return list(result.scalars().all())
|
||||||
|
|
||||||
|
async def revoke_api_key(self, key_id: str) -> bool:
|
||||||
|
"""Révoque une clé API"""
|
||||||
|
result = await self.session.execute(select(ApiKey).where(ApiKey.id == key_id))
|
||||||
|
api_key_obj = result.scalar_one_or_none()
|
||||||
|
|
||||||
|
if not api_key_obj:
|
||||||
|
return False
|
||||||
|
|
||||||
|
api_key_obj.is_active = False
|
||||||
|
api_key_obj.revoked_at = datetime.now()
|
||||||
|
await self.session.commit()
|
||||||
|
|
||||||
|
logger.info(f"🗑️ Clé API révoquée: {api_key_obj.name}")
|
||||||
|
return True
|
||||||
|
|
||||||
|
async def get_by_id(self, key_id: str) -> Optional[ApiKey]:
|
||||||
|
"""Récupère une clé API par son ID"""
|
||||||
|
result = await self.session.execute(select(ApiKey).where(ApiKey.id == key_id))
|
||||||
|
return result.scalar_one_or_none()
|
||||||
|
|
||||||
|
async def check_rate_limit(self, api_key_obj: ApiKey) -> tuple[bool, Dict]:
|
||||||
|
return True, {
|
||||||
|
"allowed": True,
|
||||||
|
"limit": api_key_obj.rate_limit_per_minute,
|
||||||
|
"remaining": api_key_obj.rate_limit_per_minute,
|
||||||
|
}
|
||||||
|
|
||||||
|
async def check_endpoint_access(self, api_key_obj: ApiKey, endpoint: str) -> bool:
|
||||||
|
if not api_key_obj.allowed_endpoints:
|
||||||
|
logger.debug(
|
||||||
|
f"🔓 API Key {api_key_obj.name}: Aucune restriction d'endpoint"
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
|
||||||
|
try:
|
||||||
|
allowed = json.loads(api_key_obj.allowed_endpoints)
|
||||||
|
|
||||||
|
if "*" in allowed or "/*" in allowed:
|
||||||
|
logger.debug(f"🔓 API Key {api_key_obj.name}: Accès global autorisé")
|
||||||
|
return True
|
||||||
|
|
||||||
|
for pattern in allowed:
|
||||||
|
if pattern == endpoint:
|
||||||
|
logger.debug(f" Match exact: {pattern} == {endpoint}")
|
||||||
|
return True
|
||||||
|
|
||||||
|
if pattern.endswith("/*"):
|
||||||
|
base = pattern[:-2] # "/clients/*" → "/clients"
|
||||||
|
if endpoint == base or endpoint.startswith(base + "/"):
|
||||||
|
logger.debug(f" Match wildcard: {pattern} ↔ {endpoint}")
|
||||||
|
return True
|
||||||
|
|
||||||
|
elif pattern.endswith("*"):
|
||||||
|
base = pattern[:-1] # "/clients*" → "/clients"
|
||||||
|
if endpoint.startswith(base):
|
||||||
|
logger.debug(f" Match prefix: {pattern} ↔ {endpoint}")
|
||||||
|
return True
|
||||||
|
|
||||||
|
logger.warning(
|
||||||
|
f" API Key {api_key_obj.name}: Accès refusé à {endpoint}\n"
|
||||||
|
f" Endpoints autorisés: {allowed}"
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
logger.error(f" Erreur parsing allowed_endpoints pour {api_key_obj.id}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def api_key_to_response(api_key_obj: ApiKey, show_key: bool = False) -> Dict:
|
||||||
|
"""Convertit un objet ApiKey en réponse API"""
|
||||||
|
|
||||||
|
allowed_endpoints = None
|
||||||
|
if api_key_obj.allowed_endpoints:
|
||||||
|
try:
|
||||||
|
allowed_endpoints = json.loads(api_key_obj.allowed_endpoints)
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
is_expired = False
|
||||||
|
if api_key_obj.expires_at:
|
||||||
|
is_expired = api_key_obj.expires_at < datetime.now()
|
||||||
|
|
||||||
|
return {
|
||||||
|
"id": api_key_obj.id,
|
||||||
|
"name": api_key_obj.name,
|
||||||
|
"description": api_key_obj.description,
|
||||||
|
"key_prefix": api_key_obj.key_prefix,
|
||||||
|
"is_active": api_key_obj.is_active,
|
||||||
|
"is_expired": is_expired,
|
||||||
|
"rate_limit_per_minute": api_key_obj.rate_limit_per_minute,
|
||||||
|
"allowed_endpoints": allowed_endpoints,
|
||||||
|
"total_requests": api_key_obj.total_requests,
|
||||||
|
"last_used_at": api_key_obj.last_used_at,
|
||||||
|
"created_at": api_key_obj.created_at,
|
||||||
|
"expires_at": api_key_obj.expires_at,
|
||||||
|
"revoked_at": api_key_obj.revoked_at,
|
||||||
|
"created_by": api_key_obj.created_by,
|
||||||
|
}
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
import smtplib
|
import smtplib
|
||||||
from email.mime.multipart import MIMEMultipart
|
from email.mime.multipart import MIMEMultipart
|
||||||
from email.mime.text import MIMEText
|
from email.mime.text import MIMEText
|
||||||
from config import settings
|
from config.config import settings
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
|
||||||
|
|
@ -6,10 +6,10 @@ import httpx
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import Optional, Tuple, List
|
from typing import Optional, Tuple, List
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
from sqlalchemy import select, update, and_
|
from sqlalchemy import false, select, update, and_
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from config import settings
|
from config.config import settings
|
||||||
from database import SageGatewayConfig
|
from database import SageGatewayConfig
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
@ -55,7 +55,7 @@ class SageGatewayService:
|
||||||
and_(
|
and_(
|
||||||
SageGatewayConfig.id == gateway_id,
|
SageGatewayConfig.id == gateway_id,
|
||||||
SageGatewayConfig.user_id == user_id,
|
SageGatewayConfig.user_id == user_id,
|
||||||
SageGatewayConfig.is_deleted,
|
SageGatewayConfig.is_deleted == false(),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
@ -67,7 +67,7 @@ class SageGatewayService:
|
||||||
query = select(SageGatewayConfig).where(SageGatewayConfig.user_id == user_id)
|
query = select(SageGatewayConfig).where(SageGatewayConfig.user_id == user_id)
|
||||||
|
|
||||||
if not include_deleted:
|
if not include_deleted:
|
||||||
query = query.where(SageGatewayConfig.is_deleted)
|
query = query.where(SageGatewayConfig.is_deleted == false())
|
||||||
|
|
||||||
query = query.order_by(
|
query = query.order_by(
|
||||||
SageGatewayConfig.is_active.desc(),
|
SageGatewayConfig.is_active.desc(),
|
||||||
|
|
@ -167,7 +167,7 @@ class SageGatewayService:
|
||||||
and_(
|
and_(
|
||||||
SageGatewayConfig.user_id == user_id,
|
SageGatewayConfig.user_id == user_id,
|
||||||
SageGatewayConfig.is_active,
|
SageGatewayConfig.is_active,
|
||||||
SageGatewayConfig.is_deleted,
|
SageGatewayConfig.is_deleted == false(),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
|
||||||
361
services/universign_document.py
Normal file
361
services/universign_document.py
Normal file
|
|
@ -0,0 +1,361 @@
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
import requests
|
||||||
|
from pathlib import Path
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional, Tuple, Dict, List
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
SIGNED_DOCS_DIR = Path(os.getenv("SIGNED_DOCS_PATH", "/app/data/signed_documents"))
|
||||||
|
SIGNED_DOCS_DIR.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
|
||||||
|
class UniversignDocumentService:
|
||||||
|
"""Service de gestion des documents signés Universign - VERSION CORRIGÉE"""
|
||||||
|
|
||||||
|
def __init__(self, api_url: str, api_key: str, timeout: int = 60):
|
||||||
|
self.api_url = api_url.rstrip("/")
|
||||||
|
self.api_key = api_key
|
||||||
|
self.timeout = timeout
|
||||||
|
self.auth = (api_key, "")
|
||||||
|
|
||||||
|
def fetch_transaction_documents(self, transaction_id: str) -> Optional[List[Dict]]:
|
||||||
|
try:
|
||||||
|
logger.info(f" Récupération documents pour transaction: {transaction_id}")
|
||||||
|
|
||||||
|
response = requests.get(
|
||||||
|
f"{self.api_url}/transactions/{transaction_id}",
|
||||||
|
auth=self.auth,
|
||||||
|
timeout=self.timeout,
|
||||||
|
headers={"Accept": "application/json"},
|
||||||
|
)
|
||||||
|
|
||||||
|
if response.status_code == 200:
|
||||||
|
data = response.json()
|
||||||
|
documents = data.get("documents", [])
|
||||||
|
|
||||||
|
logger.info(f"{len(documents)} document(s) trouvé(s)")
|
||||||
|
|
||||||
|
for idx, doc in enumerate(documents):
|
||||||
|
logger.debug(
|
||||||
|
f" Document {idx}: id={doc.get('id')}, "
|
||||||
|
f"name={doc.get('name')}, status={doc.get('status')}"
|
||||||
|
)
|
||||||
|
|
||||||
|
return documents
|
||||||
|
|
||||||
|
elif response.status_code == 404:
|
||||||
|
logger.warning(
|
||||||
|
f"Transaction {transaction_id} introuvable sur Universign"
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
|
else:
|
||||||
|
logger.error(
|
||||||
|
f"Erreur HTTP {response.status_code} pour {transaction_id}: "
|
||||||
|
f"{response.text[:500]}"
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
|
except requests.exceptions.Timeout:
|
||||||
|
logger.error(f"⏱️ Timeout récupération transaction {transaction_id}")
|
||||||
|
return None
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f" Erreur fetch documents: {e}", exc_info=True)
|
||||||
|
return None
|
||||||
|
|
||||||
|
def download_signed_document(
|
||||||
|
self, transaction_id: str, document_id: str
|
||||||
|
) -> Optional[bytes]:
|
||||||
|
try:
|
||||||
|
download_url = (
|
||||||
|
f"{self.api_url}/transactions/{transaction_id}"
|
||||||
|
f"/documents/{document_id}/download"
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(f"Téléchargement depuis: {download_url}")
|
||||||
|
|
||||||
|
response = requests.get(
|
||||||
|
download_url,
|
||||||
|
auth=self.auth,
|
||||||
|
timeout=self.timeout,
|
||||||
|
stream=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
if response.status_code == 200:
|
||||||
|
content_type = response.headers.get("Content-Type", "")
|
||||||
|
content_length = response.headers.get("Content-Length", "unknown")
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"Téléchargement réussi: "
|
||||||
|
f"Content-Type={content_type}, Size={content_length}"
|
||||||
|
)
|
||||||
|
|
||||||
|
if (
|
||||||
|
"pdf" not in content_type.lower()
|
||||||
|
and "octet-stream" not in content_type.lower()
|
||||||
|
):
|
||||||
|
logger.warning(
|
||||||
|
f"Type de contenu inattendu: {content_type}. "
|
||||||
|
f"Tentative de lecture quand même..."
|
||||||
|
)
|
||||||
|
|
||||||
|
content = response.content
|
||||||
|
|
||||||
|
if len(content) < 1024:
|
||||||
|
logger.error(f" Document trop petit: {len(content)} octets")
|
||||||
|
return None
|
||||||
|
|
||||||
|
return content
|
||||||
|
|
||||||
|
elif response.status_code == 404:
|
||||||
|
logger.error(
|
||||||
|
f" Document {document_id} introuvable pour transaction {transaction_id}"
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
|
elif response.status_code == 403:
|
||||||
|
logger.error(
|
||||||
|
f" Accès refusé au document {document_id}. "
|
||||||
|
f"Vérifiez que la transaction est bien signée."
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
|
else:
|
||||||
|
logger.error(
|
||||||
|
f" Erreur HTTP {response.status_code}: {response.text[:500]}"
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
|
except requests.exceptions.Timeout:
|
||||||
|
logger.error(f"⏱️ Timeout téléchargement document {document_id}")
|
||||||
|
return None
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f" Erreur téléchargement: {e}", exc_info=True)
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def download_and_store_signed_document(
|
||||||
|
self, session: AsyncSession, transaction, force: bool = False
|
||||||
|
) -> Tuple[bool, Optional[str]]:
|
||||||
|
if not force and transaction.signed_document_path:
|
||||||
|
if os.path.exists(transaction.signed_document_path):
|
||||||
|
logger.debug(
|
||||||
|
f"Document déjà téléchargé: {transaction.transaction_id}"
|
||||||
|
)
|
||||||
|
return True, None
|
||||||
|
|
||||||
|
transaction.download_attempts += 1
|
||||||
|
|
||||||
|
try:
|
||||||
|
logger.info(
|
||||||
|
f"Récupération document signé pour: {transaction.transaction_id}"
|
||||||
|
)
|
||||||
|
|
||||||
|
documents = self.fetch_transaction_documents(transaction.transaction_id)
|
||||||
|
|
||||||
|
if not documents:
|
||||||
|
error = "Aucun document trouvé dans la transaction Universign"
|
||||||
|
logger.warning(f"{error}")
|
||||||
|
transaction.download_error = error
|
||||||
|
await session.commit()
|
||||||
|
return False, error
|
||||||
|
|
||||||
|
document_id = None
|
||||||
|
for doc in documents:
|
||||||
|
doc_id = doc.get("id")
|
||||||
|
doc_status = doc.get("status", "").lower()
|
||||||
|
|
||||||
|
if doc_status in ["signed", "completed", "closed"]:
|
||||||
|
document_id = doc_id
|
||||||
|
logger.info(
|
||||||
|
f"Document signé trouvé: {doc_id} (status: {doc_status})"
|
||||||
|
)
|
||||||
|
break
|
||||||
|
|
||||||
|
if document_id is None:
|
||||||
|
document_id = doc_id
|
||||||
|
|
||||||
|
if not document_id:
|
||||||
|
error = "Impossible de déterminer l'ID du document à télécharger"
|
||||||
|
logger.error(f" {error}")
|
||||||
|
transaction.download_error = error
|
||||||
|
await session.commit()
|
||||||
|
return False, error
|
||||||
|
|
||||||
|
if hasattr(transaction, "universign_document_id"):
|
||||||
|
transaction.universign_document_id = document_id
|
||||||
|
|
||||||
|
pdf_content = self.download_signed_document(
|
||||||
|
transaction_id=transaction.transaction_id, document_id=document_id
|
||||||
|
)
|
||||||
|
|
||||||
|
if not pdf_content:
|
||||||
|
error = f"Échec téléchargement document {document_id}"
|
||||||
|
logger.error(f" {error}")
|
||||||
|
transaction.download_error = error
|
||||||
|
await session.commit()
|
||||||
|
return False, error
|
||||||
|
|
||||||
|
filename = self._generate_filename(transaction)
|
||||||
|
file_path = SIGNED_DOCS_DIR / filename
|
||||||
|
|
||||||
|
with open(file_path, "wb") as f:
|
||||||
|
f.write(pdf_content)
|
||||||
|
|
||||||
|
file_size = os.path.getsize(file_path)
|
||||||
|
|
||||||
|
transaction.signed_document_path = str(file_path)
|
||||||
|
transaction.signed_document_downloaded_at = datetime.now()
|
||||||
|
transaction.signed_document_size_bytes = file_size
|
||||||
|
transaction.download_error = None
|
||||||
|
|
||||||
|
transaction.document_url = (
|
||||||
|
f"{self.api_url}/transactions/{transaction.transaction_id}"
|
||||||
|
f"/documents/{document_id}/download"
|
||||||
|
)
|
||||||
|
|
||||||
|
await session.commit()
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"Document signé téléchargé: {filename} ({file_size / 1024:.1f} KB)"
|
||||||
|
)
|
||||||
|
|
||||||
|
return True, None
|
||||||
|
|
||||||
|
except OSError as e:
|
||||||
|
error = f"Erreur filesystem: {str(e)}"
|
||||||
|
logger.error(f" {error}")
|
||||||
|
transaction.download_error = error
|
||||||
|
await session.commit()
|
||||||
|
return False, error
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
error = f"Erreur inattendue: {str(e)}"
|
||||||
|
logger.error(f" {error}", exc_info=True)
|
||||||
|
transaction.download_error = error
|
||||||
|
await session.commit()
|
||||||
|
return False, error
|
||||||
|
|
||||||
|
def _generate_filename(self, transaction) -> str:
|
||||||
|
"""Génère un nom de fichier unique pour le document signé"""
|
||||||
|
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||||
|
tx_id = transaction.transaction_id.replace("tr_", "")
|
||||||
|
filename = f"{transaction.sage_document_id}_{tx_id}_{timestamp}_signed.pdf"
|
||||||
|
return filename
|
||||||
|
|
||||||
|
def get_document_path(self, transaction) -> Optional[Path]:
|
||||||
|
"""Retourne le chemin du document signé s'il existe"""
|
||||||
|
if not transaction.signed_document_path:
|
||||||
|
return None
|
||||||
|
path = Path(transaction.signed_document_path)
|
||||||
|
if path.exists():
|
||||||
|
return path
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def cleanup_old_documents(self, days_to_keep: int = 90) -> Tuple[int, int]:
|
||||||
|
"""Supprime les anciens documents signés"""
|
||||||
|
from datetime import timedelta
|
||||||
|
|
||||||
|
cutoff_date = datetime.now() - timedelta(days=days_to_keep)
|
||||||
|
deleted = 0
|
||||||
|
size_freed = 0
|
||||||
|
|
||||||
|
for file_path in SIGNED_DOCS_DIR.glob("*.pdf"):
|
||||||
|
try:
|
||||||
|
file_time = datetime.fromtimestamp(os.path.getmtime(file_path))
|
||||||
|
if file_time < cutoff_date:
|
||||||
|
size_freed += os.path.getsize(file_path)
|
||||||
|
os.remove(file_path)
|
||||||
|
deleted += 1
|
||||||
|
logger.info(f"🗑️ Supprimé: {file_path.name}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Erreur suppression {file_path}: {e}")
|
||||||
|
|
||||||
|
size_freed_mb = size_freed / (1024 * 1024)
|
||||||
|
logger.info(
|
||||||
|
f"Nettoyage terminé: {deleted} fichiers supprimés "
|
||||||
|
f"({size_freed_mb:.2f} MB libérés)"
|
||||||
|
)
|
||||||
|
|
||||||
|
return deleted, int(size_freed_mb)
|
||||||
|
|
||||||
|
|
||||||
|
def diagnose_transaction(self, transaction_id: str) -> Dict:
|
||||||
|
"""
|
||||||
|
Diagnostic complet d'une transaction pour debug
|
||||||
|
"""
|
||||||
|
result = {
|
||||||
|
"transaction_id": transaction_id,
|
||||||
|
"api_url": self.api_url,
|
||||||
|
"timestamp": datetime.now().isoformat(),
|
||||||
|
"checks": {},
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
logger.info(f"Diagnostic transaction: {transaction_id}")
|
||||||
|
|
||||||
|
response = requests.get(
|
||||||
|
f"{self.api_url}/transactions/{transaction_id}",
|
||||||
|
auth=self.auth,
|
||||||
|
timeout=self.timeout,
|
||||||
|
)
|
||||||
|
|
||||||
|
result["checks"]["transaction_fetch"] = {
|
||||||
|
"status_code": response.status_code,
|
||||||
|
"success": response.status_code == 200,
|
||||||
|
}
|
||||||
|
|
||||||
|
if response.status_code != 200:
|
||||||
|
result["checks"]["transaction_fetch"]["error"] = response.text[:500]
|
||||||
|
return result
|
||||||
|
|
||||||
|
data = response.json()
|
||||||
|
|
||||||
|
result["checks"]["transaction_data"] = {
|
||||||
|
"state": data.get("state"),
|
||||||
|
"documents_count": len(data.get("documents", [])),
|
||||||
|
"participants_count": len(data.get("participants", [])),
|
||||||
|
}
|
||||||
|
|
||||||
|
documents = data.get("documents", [])
|
||||||
|
result["checks"]["documents"] = []
|
||||||
|
|
||||||
|
for doc in documents:
|
||||||
|
doc_info = {
|
||||||
|
"id": doc.get("id"),
|
||||||
|
"name": doc.get("name"),
|
||||||
|
"status": doc.get("status"),
|
||||||
|
}
|
||||||
|
|
||||||
|
if doc.get("id"):
|
||||||
|
download_url = (
|
||||||
|
f"{self.api_url}/transactions/{transaction_id}"
|
||||||
|
f"/documents/{doc['id']}/download"
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
dl_response = requests.head(
|
||||||
|
download_url,
|
||||||
|
auth=self.auth,
|
||||||
|
timeout=10,
|
||||||
|
)
|
||||||
|
doc_info["download_check"] = {
|
||||||
|
"url": download_url,
|
||||||
|
"status_code": dl_response.status_code,
|
||||||
|
"accessible": dl_response.status_code in [200, 302],
|
||||||
|
"content_type": dl_response.headers.get("Content-Type"),
|
||||||
|
}
|
||||||
|
except Exception as e:
|
||||||
|
doc_info["download_check"] = {"error": str(e)}
|
||||||
|
|
||||||
|
result["checks"]["documents"].append(doc_info)
|
||||||
|
|
||||||
|
result["success"] = True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
result["success"] = False
|
||||||
|
result["error"] = str(e)
|
||||||
|
|
||||||
|
return result
|
||||||
714
services/universign_sync.py
Normal file
714
services/universign_sync.py
Normal file
|
|
@ -0,0 +1,714 @@
|
||||||
|
import requests
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import uuid
|
||||||
|
from typing import Dict, Optional, Tuple
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
from sqlalchemy import select, and_, or_
|
||||||
|
from sqlalchemy.orm import selectinload
|
||||||
|
|
||||||
|
from database import (
|
||||||
|
UniversignTransaction,
|
||||||
|
UniversignSigner,
|
||||||
|
UniversignSyncLog,
|
||||||
|
UniversignTransactionStatus,
|
||||||
|
LocalDocumentStatus,
|
||||||
|
UniversignSignerStatus,
|
||||||
|
EmailLog,
|
||||||
|
StatutEmail,
|
||||||
|
)
|
||||||
|
from data.data import templates_signature_email
|
||||||
|
from services.universign_document import UniversignDocumentService
|
||||||
|
from utils.universign_status_mapping import (
|
||||||
|
map_universign_to_local,
|
||||||
|
is_transition_allowed,
|
||||||
|
get_status_actions,
|
||||||
|
is_final_status,
|
||||||
|
resolve_status_conflict,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class UniversignSyncService:
|
||||||
|
def __init__(self, api_url: str, api_key: str, timeout: int = 30):
|
||||||
|
self.api_url = api_url.rstrip("/")
|
||||||
|
self.api_key = api_key
|
||||||
|
self.timeout = timeout
|
||||||
|
self.auth = (api_key, "")
|
||||||
|
self.sage_client = None
|
||||||
|
self.email_queue = None
|
||||||
|
self.settings = None
|
||||||
|
self.document_service = UniversignDocumentService(
|
||||||
|
api_url=api_url, api_key=api_key, timeout=60
|
||||||
|
)
|
||||||
|
|
||||||
|
def configure(self, sage_client, email_queue, settings):
|
||||||
|
self.sage_client = sage_client
|
||||||
|
self.email_queue = email_queue
|
||||||
|
self.settings = settings
|
||||||
|
|
||||||
|
def fetch_transaction_status(self, transaction_id: str) -> Optional[Dict]:
|
||||||
|
start_time = datetime.now()
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = requests.get(
|
||||||
|
f"{self.api_url}/transactions/{transaction_id}",
|
||||||
|
auth=self.auth,
|
||||||
|
timeout=self.timeout,
|
||||||
|
headers={"Accept": "application/json"},
|
||||||
|
)
|
||||||
|
|
||||||
|
response_time_ms = int((datetime.now() - start_time).total_seconds() * 1000)
|
||||||
|
|
||||||
|
if response.status_code == 200:
|
||||||
|
data = response.json()
|
||||||
|
logger.info(
|
||||||
|
f"Fetch OK: {transaction_id} status={data.get('state')} ({response_time_ms}ms)"
|
||||||
|
)
|
||||||
|
return {
|
||||||
|
"transaction": data,
|
||||||
|
"http_status": 200,
|
||||||
|
"response_time_ms": response_time_ms,
|
||||||
|
"fetched_at": datetime.now(),
|
||||||
|
}
|
||||||
|
|
||||||
|
elif response.status_code == 404:
|
||||||
|
logger.warning(
|
||||||
|
f"Transaction {transaction_id} introuvable sur Universign"
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
|
else:
|
||||||
|
logger.error(
|
||||||
|
f"Erreur HTTP {response.status_code} pour {transaction_id}: {response.text}"
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
|
except requests.exceptions.Timeout:
|
||||||
|
logger.error(f"Timeout récupération {transaction_id} (>{self.timeout}s)")
|
||||||
|
return None
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Erreur fetch {transaction_id}: {e}", exc_info=True)
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def sync_all_pending(
|
||||||
|
self, session: AsyncSession, max_transactions: int = 50
|
||||||
|
) -> Dict[str, int]:
|
||||||
|
query = (
|
||||||
|
select(UniversignTransaction)
|
||||||
|
.options(selectinload(UniversignTransaction.signers))
|
||||||
|
.where(
|
||||||
|
and_(
|
||||||
|
UniversignTransaction.needs_sync,
|
||||||
|
or_(
|
||||||
|
~UniversignTransaction.local_status.in_(
|
||||||
|
[
|
||||||
|
LocalDocumentStatus.SIGNED,
|
||||||
|
LocalDocumentStatus.REJECTED,
|
||||||
|
LocalDocumentStatus.EXPIRED,
|
||||||
|
]
|
||||||
|
),
|
||||||
|
UniversignTransaction.last_synced_at
|
||||||
|
< (datetime.now() - timedelta(hours=1)),
|
||||||
|
UniversignTransaction.last_synced_at.is_(None),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.order_by(UniversignTransaction.created_at.asc())
|
||||||
|
.limit(max_transactions)
|
||||||
|
)
|
||||||
|
|
||||||
|
result = await session.execute(query)
|
||||||
|
transactions = result.scalars().all()
|
||||||
|
|
||||||
|
stats = {
|
||||||
|
"total_found": len(transactions),
|
||||||
|
"success": 0,
|
||||||
|
"failed": 0,
|
||||||
|
"skipped": 0,
|
||||||
|
"status_changes": 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
for transaction in transactions:
|
||||||
|
try:
|
||||||
|
previous_status = transaction.local_status.value
|
||||||
|
|
||||||
|
success, error = await self.sync_transaction(
|
||||||
|
session, transaction, force=False
|
||||||
|
)
|
||||||
|
|
||||||
|
if success:
|
||||||
|
stats["success"] += 1
|
||||||
|
if transaction.local_status.value != previous_status:
|
||||||
|
stats["status_changes"] += 1
|
||||||
|
else:
|
||||||
|
stats["failed"] += 1
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(
|
||||||
|
f"Erreur sync {transaction.transaction_id}: {e}", exc_info=True
|
||||||
|
)
|
||||||
|
stats["failed"] += 1
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"Polling terminé: {stats['success']}/{stats['total_found']} OK, {stats['status_changes']} changements détectés"
|
||||||
|
)
|
||||||
|
|
||||||
|
return stats
|
||||||
|
|
||||||
|
async def process_webhook(
|
||||||
|
self, session: AsyncSession, payload: Dict, transaction_id: str = None
|
||||||
|
) -> Tuple[bool, Optional[str]]:
|
||||||
|
"""
|
||||||
|
Traite un webhook Universign - CORRECTION : meilleure gestion des payloads
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
if not transaction_id:
|
||||||
|
if (
|
||||||
|
payload.get("type", "").startswith("transaction.")
|
||||||
|
and "payload" in payload
|
||||||
|
):
|
||||||
|
nested_object = payload.get("payload", {}).get("object", {})
|
||||||
|
if nested_object.get("object") == "transaction":
|
||||||
|
transaction_id = nested_object.get("id")
|
||||||
|
elif payload.get("type", "").startswith("action."):
|
||||||
|
transaction_id = (
|
||||||
|
payload.get("payload", {})
|
||||||
|
.get("object", {})
|
||||||
|
.get("transaction_id")
|
||||||
|
)
|
||||||
|
elif payload.get("object") == "transaction":
|
||||||
|
transaction_id = payload.get("id")
|
||||||
|
|
||||||
|
if not transaction_id:
|
||||||
|
return False, "Transaction ID manquant"
|
||||||
|
|
||||||
|
event_type = payload.get("type", "webhook")
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"📨 Traitement webhook: transaction={transaction_id}, event={event_type}"
|
||||||
|
)
|
||||||
|
|
||||||
|
query = (
|
||||||
|
select(UniversignTransaction)
|
||||||
|
.options(selectinload(UniversignTransaction.signers))
|
||||||
|
.where(UniversignTransaction.transaction_id == transaction_id)
|
||||||
|
)
|
||||||
|
result = await session.execute(query)
|
||||||
|
transaction = result.scalar_one_or_none()
|
||||||
|
|
||||||
|
if not transaction:
|
||||||
|
logger.warning(f"Transaction {transaction_id} inconnue localement")
|
||||||
|
return False, "Transaction inconnue"
|
||||||
|
|
||||||
|
transaction.webhook_received = True
|
||||||
|
|
||||||
|
old_status = transaction.local_status.value
|
||||||
|
|
||||||
|
success, error = await self.sync_transaction(
|
||||||
|
session, transaction, force=True
|
||||||
|
)
|
||||||
|
|
||||||
|
if success and transaction.local_status.value != old_status:
|
||||||
|
logger.info(
|
||||||
|
f"Webhook traité: {transaction_id} | "
|
||||||
|
f"{old_status} → {transaction.local_status.value}"
|
||||||
|
)
|
||||||
|
|
||||||
|
await self._log_sync_attempt(
|
||||||
|
session=session,
|
||||||
|
transaction=transaction,
|
||||||
|
sync_type=f"webhook:{event_type}",
|
||||||
|
success=success,
|
||||||
|
error_message=error,
|
||||||
|
previous_status=old_status,
|
||||||
|
new_status=transaction.local_status.value,
|
||||||
|
changes=json.dumps(
|
||||||
|
payload, default=str
|
||||||
|
), # Ajout default=str pour éviter les erreurs JSON
|
||||||
|
)
|
||||||
|
|
||||||
|
await session.commit()
|
||||||
|
|
||||||
|
return success, error
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"💥 Erreur traitement webhook: {e}", exc_info=True)
|
||||||
|
return False, str(e)
|
||||||
|
|
||||||
|
async def _sync_signers(
|
||||||
|
self,
|
||||||
|
session: AsyncSession,
|
||||||
|
transaction: UniversignTransaction,
|
||||||
|
universign_data: Dict,
|
||||||
|
):
|
||||||
|
signers_data = universign_data.get("participants", [])
|
||||||
|
if not signers_data:
|
||||||
|
signers_data = universign_data.get("signers", [])
|
||||||
|
|
||||||
|
if not signers_data:
|
||||||
|
logger.debug("Aucun signataire dans les données Universign")
|
||||||
|
return
|
||||||
|
|
||||||
|
existing_signers = {s.email: s for s in transaction.signers}
|
||||||
|
|
||||||
|
for idx, signer_data in enumerate(signers_data):
|
||||||
|
email = signer_data.get("email", "")
|
||||||
|
if not email:
|
||||||
|
logger.warning(f"Signataire sans email à l'index {idx}, ignoré")
|
||||||
|
continue
|
||||||
|
|
||||||
|
raw_status = signer_data.get("status") or signer_data.get(
|
||||||
|
"state", "waiting"
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
status = UniversignSignerStatus(raw_status)
|
||||||
|
except ValueError:
|
||||||
|
logger.warning(
|
||||||
|
f"Statut inconnu pour signer {email}: {raw_status}, utilisation de 'unknown'"
|
||||||
|
)
|
||||||
|
status = UniversignSignerStatus.UNKNOWN
|
||||||
|
|
||||||
|
if email in existing_signers:
|
||||||
|
signer = existing_signers[email]
|
||||||
|
signer.status = status
|
||||||
|
|
||||||
|
viewed_at = self._parse_date(signer_data.get("viewed_at"))
|
||||||
|
if viewed_at and not signer.viewed_at:
|
||||||
|
signer.viewed_at = viewed_at
|
||||||
|
|
||||||
|
signed_at = self._parse_date(signer_data.get("signed_at"))
|
||||||
|
if signed_at and not signer.signed_at:
|
||||||
|
signer.signed_at = signed_at
|
||||||
|
|
||||||
|
refused_at = self._parse_date(signer_data.get("refused_at"))
|
||||||
|
if refused_at and not signer.refused_at:
|
||||||
|
signer.refused_at = refused_at
|
||||||
|
|
||||||
|
if signer_data.get("name") and not signer.name:
|
||||||
|
signer.name = signer_data.get("name")
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
signer = UniversignSigner(
|
||||||
|
id=f"{transaction.id}_signer_{idx}_{int(datetime.now().timestamp())}",
|
||||||
|
transaction_id=transaction.id,
|
||||||
|
email=email,
|
||||||
|
name=signer_data.get("name"),
|
||||||
|
status=status,
|
||||||
|
order_index=idx,
|
||||||
|
viewed_at=self._parse_date(signer_data.get("viewed_at")),
|
||||||
|
signed_at=self._parse_date(signer_data.get("signed_at")),
|
||||||
|
refused_at=self._parse_date(signer_data.get("refused_at")),
|
||||||
|
)
|
||||||
|
session.add(signer)
|
||||||
|
logger.info(
|
||||||
|
f"➕ Nouveau signataire ajouté: {email} (statut: {status.value})"
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Erreur création signer {email}: {e}")
|
||||||
|
|
||||||
|
async def sync_transaction(
|
||||||
|
self,
|
||||||
|
session,
|
||||||
|
transaction,
|
||||||
|
force: bool = False,
|
||||||
|
):
|
||||||
|
import json
|
||||||
|
|
||||||
|
if is_final_status(transaction.local_status.value) and not force:
|
||||||
|
logger.debug(
|
||||||
|
f"⏭️ Skip {transaction.transaction_id}: statut final "
|
||||||
|
f"{transaction.local_status.value}"
|
||||||
|
)
|
||||||
|
transaction.needs_sync = False
|
||||||
|
await session.commit()
|
||||||
|
return True, None
|
||||||
|
|
||||||
|
logger.info(f"Synchronisation: {transaction.transaction_id}")
|
||||||
|
|
||||||
|
result = self.fetch_transaction_status(transaction.transaction_id)
|
||||||
|
|
||||||
|
if not result:
|
||||||
|
error = "Échec récupération données Universign"
|
||||||
|
logger.error(f" {error}: {transaction.transaction_id}")
|
||||||
|
transaction.sync_attempts += 1
|
||||||
|
transaction.sync_error = error
|
||||||
|
await self._log_sync_attempt(session, transaction, "polling", False, error)
|
||||||
|
await session.commit()
|
||||||
|
return False, error
|
||||||
|
|
||||||
|
try:
|
||||||
|
universign_data = result["transaction"]
|
||||||
|
universign_status_raw = universign_data.get("state", "draft")
|
||||||
|
|
||||||
|
logger.info(f" Statut Universign brut: {universign_status_raw}")
|
||||||
|
|
||||||
|
new_local_status = map_universign_to_local(universign_status_raw)
|
||||||
|
previous_local_status = transaction.local_status.value
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"Mapping: {universign_status_raw} (Universign) → "
|
||||||
|
f"{new_local_status} (Local) | Actuel: {previous_local_status}"
|
||||||
|
)
|
||||||
|
|
||||||
|
if not is_transition_allowed(previous_local_status, new_local_status):
|
||||||
|
logger.warning(
|
||||||
|
f"Transition refusée: {previous_local_status} → {new_local_status}"
|
||||||
|
)
|
||||||
|
new_local_status = resolve_status_conflict(
|
||||||
|
previous_local_status, new_local_status
|
||||||
|
)
|
||||||
|
logger.info(f"Résolution conflit: statut résolu = {new_local_status}")
|
||||||
|
|
||||||
|
status_changed = previous_local_status != new_local_status
|
||||||
|
|
||||||
|
if status_changed:
|
||||||
|
logger.info(
|
||||||
|
f"CHANGEMENT DÉTECTÉ: {previous_local_status} → {new_local_status}"
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
transaction.universign_status = UniversignTransactionStatus(
|
||||||
|
universign_status_raw
|
||||||
|
)
|
||||||
|
except ValueError:
|
||||||
|
logger.warning(f"Statut Universign inconnu: {universign_status_raw}")
|
||||||
|
if new_local_status == "SIGNE":
|
||||||
|
transaction.universign_status = (
|
||||||
|
UniversignTransactionStatus.COMPLETED
|
||||||
|
)
|
||||||
|
elif new_local_status == "REFUSE":
|
||||||
|
transaction.universign_status = UniversignTransactionStatus.REFUSED
|
||||||
|
elif new_local_status == "EXPIRE":
|
||||||
|
transaction.universign_status = UniversignTransactionStatus.EXPIRED
|
||||||
|
else:
|
||||||
|
transaction.universign_status = UniversignTransactionStatus.STARTED
|
||||||
|
|
||||||
|
transaction.local_status = LocalDocumentStatus(new_local_status)
|
||||||
|
transaction.universign_status_updated_at = datetime.now()
|
||||||
|
|
||||||
|
if new_local_status == "EN_COURS" and not transaction.sent_at:
|
||||||
|
transaction.sent_at = datetime.now()
|
||||||
|
logger.info("Date d'envoi mise à jour")
|
||||||
|
|
||||||
|
if new_local_status == "SIGNE" and not transaction.signed_at:
|
||||||
|
transaction.signed_at = datetime.now()
|
||||||
|
logger.info("Date de signature mise à jour")
|
||||||
|
|
||||||
|
if new_local_status == "REFUSE" and not transaction.refused_at:
|
||||||
|
transaction.refused_at = datetime.now()
|
||||||
|
logger.info(" Date de refus mise à jour")
|
||||||
|
|
||||||
|
if new_local_status == "EXPIRE" and not transaction.expired_at:
|
||||||
|
transaction.expired_at = datetime.now()
|
||||||
|
logger.info("Date d'expiration mise à jour")
|
||||||
|
|
||||||
|
documents = universign_data.get("documents", [])
|
||||||
|
if documents:
|
||||||
|
first_doc = documents[0]
|
||||||
|
logger.info(
|
||||||
|
f"Document Universign trouvé: id={first_doc.get('id')}, "
|
||||||
|
f"status={first_doc.get('status')}"
|
||||||
|
)
|
||||||
|
|
||||||
|
if new_local_status == "SIGNE" and not transaction.signed_document_path:
|
||||||
|
logger.info("Déclenchement téléchargement document signé...")
|
||||||
|
|
||||||
|
try:
|
||||||
|
(
|
||||||
|
download_success,
|
||||||
|
download_error,
|
||||||
|
) = await self.document_service.download_and_store_signed_document(
|
||||||
|
session=session, transaction=transaction, force=False
|
||||||
|
)
|
||||||
|
|
||||||
|
if download_success:
|
||||||
|
logger.info("Document signé téléchargé et stocké")
|
||||||
|
else:
|
||||||
|
logger.warning(f"Échec téléchargement: {download_error}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f" Erreur téléchargement document: {e}", exc_info=True)
|
||||||
|
|
||||||
|
await self._sync_signers(session, transaction, universign_data)
|
||||||
|
|
||||||
|
transaction.last_synced_at = datetime.now()
|
||||||
|
transaction.sync_attempts += 1
|
||||||
|
transaction.needs_sync = not is_final_status(new_local_status)
|
||||||
|
transaction.sync_error = None
|
||||||
|
|
||||||
|
await self._log_sync_attempt(
|
||||||
|
session=session,
|
||||||
|
transaction=transaction,
|
||||||
|
sync_type="polling",
|
||||||
|
success=True,
|
||||||
|
error_message=None,
|
||||||
|
previous_status=previous_local_status,
|
||||||
|
new_status=new_local_status,
|
||||||
|
changes=json.dumps(
|
||||||
|
{
|
||||||
|
"status_changed": status_changed,
|
||||||
|
"universign_raw": universign_status_raw,
|
||||||
|
"documents_count": len(documents),
|
||||||
|
"response_time_ms": result.get("response_time_ms"),
|
||||||
|
},
|
||||||
|
default=str,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
await session.commit()
|
||||||
|
|
||||||
|
if status_changed:
|
||||||
|
logger.info(f"🎬 Exécution actions pour statut: {new_local_status}")
|
||||||
|
await self._execute_status_actions(
|
||||||
|
session, transaction, new_local_status
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"Sync terminée: {transaction.transaction_id} | "
|
||||||
|
f"{previous_local_status} → {new_local_status}"
|
||||||
|
)
|
||||||
|
|
||||||
|
return True, None
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
error_msg = f"Erreur lors de la synchronisation: {str(e)}"
|
||||||
|
logger.error(f" {error_msg}", exc_info=True)
|
||||||
|
|
||||||
|
transaction.sync_error = error_msg[:1000]
|
||||||
|
transaction.sync_attempts += 1
|
||||||
|
|
||||||
|
await self._log_sync_attempt(
|
||||||
|
session, transaction, "polling", False, error_msg
|
||||||
|
)
|
||||||
|
await session.commit()
|
||||||
|
|
||||||
|
return False, error_msg
|
||||||
|
|
||||||
|
async def _sync_transaction_documents_corrected(
|
||||||
|
self, session, transaction, universign_data: dict, new_local_status: str
|
||||||
|
):
|
||||||
|
documents = universign_data.get("documents", [])
|
||||||
|
|
||||||
|
if documents:
|
||||||
|
first_doc = documents[0]
|
||||||
|
first_doc_id = first_doc.get("id")
|
||||||
|
|
||||||
|
if first_doc_id:
|
||||||
|
if hasattr(transaction, "universign_document_id"):
|
||||||
|
transaction.universign_document_id = first_doc_id
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"Document Universign: id={first_doc_id}, "
|
||||||
|
f"name={first_doc.get('name')}, status={first_doc.get('status')}"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger.debug("Aucun document dans la réponse Universign")
|
||||||
|
|
||||||
|
if new_local_status == "SIGNE":
|
||||||
|
if not transaction.signed_document_path:
|
||||||
|
logger.info("Déclenchement téléchargement document signé...")
|
||||||
|
|
||||||
|
try:
|
||||||
|
(
|
||||||
|
download_success,
|
||||||
|
download_error,
|
||||||
|
) = await self.document_service.download_and_store_signed_document(
|
||||||
|
session=session, transaction=transaction, force=False
|
||||||
|
)
|
||||||
|
|
||||||
|
if download_success:
|
||||||
|
logger.info("Document signé téléchargé avec succès")
|
||||||
|
else:
|
||||||
|
logger.warning(f"Échec téléchargement: {download_error}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f" Erreur téléchargement document: {e}", exc_info=True)
|
||||||
|
else:
|
||||||
|
logger.debug(
|
||||||
|
f"Document déjà téléchargé: {transaction.signed_document_path}"
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _log_sync_attempt(
|
||||||
|
self,
|
||||||
|
session: AsyncSession,
|
||||||
|
transaction: UniversignTransaction,
|
||||||
|
sync_type: str,
|
||||||
|
success: bool,
|
||||||
|
error_message: Optional[str] = None,
|
||||||
|
previous_status: Optional[str] = None,
|
||||||
|
new_status: Optional[str] = None,
|
||||||
|
changes: Optional[str] = None,
|
||||||
|
):
|
||||||
|
log = UniversignSyncLog(
|
||||||
|
transaction_id=transaction.id,
|
||||||
|
sync_type=sync_type,
|
||||||
|
sync_timestamp=datetime.now(),
|
||||||
|
previous_status=previous_status,
|
||||||
|
new_status=new_status,
|
||||||
|
changes_detected=changes,
|
||||||
|
success=success,
|
||||||
|
error_message=error_message,
|
||||||
|
)
|
||||||
|
session.add(log)
|
||||||
|
|
||||||
|
async def _execute_status_actions(
|
||||||
|
self, session: AsyncSession, transaction: UniversignTransaction, new_status: str
|
||||||
|
):
|
||||||
|
actions = get_status_actions(new_status)
|
||||||
|
if not actions:
|
||||||
|
return
|
||||||
|
|
||||||
|
if actions.get("update_sage_status") and self.sage_client:
|
||||||
|
await self._update_sage_status(transaction, new_status)
|
||||||
|
elif actions.get("update_sage_status"):
|
||||||
|
logger.debug(
|
||||||
|
f"sage_client non configuré, skip MAJ Sage pour {transaction.sage_document_id}"
|
||||||
|
)
|
||||||
|
|
||||||
|
if actions.get("send_notification") and self.email_queue and self.settings:
|
||||||
|
await self._send_notification(session, transaction, new_status)
|
||||||
|
elif actions.get("send_notification"):
|
||||||
|
logger.debug(
|
||||||
|
f"email_queue/settings non configuré, skip notification pour {transaction.transaction_id}"
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _update_sage_status(
|
||||||
|
self, transaction: UniversignTransaction, status: str
|
||||||
|
):
|
||||||
|
if not self.sage_client:
|
||||||
|
logger.warning("sage_client non configuré pour mise à jour Sage")
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
type_doc = transaction.sage_document_type.value
|
||||||
|
doc_id = transaction.sage_document_id
|
||||||
|
|
||||||
|
if status == "SIGNE":
|
||||||
|
self.sage_client.changer_statut_document(
|
||||||
|
document_type_code=type_doc, numero=doc_id, nouveau_statut=2
|
||||||
|
)
|
||||||
|
logger.info(f"Statut Sage mis à jour: {doc_id} → Accepté (2)")
|
||||||
|
|
||||||
|
elif status == "EN_COURS":
|
||||||
|
self.sage_client.changer_statut_document(
|
||||||
|
document_type_code=type_doc, numero=doc_id, nouveau_statut=1
|
||||||
|
)
|
||||||
|
logger.info(f"Statut Sage mis à jour: {doc_id} → Confirmé (1)")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(
|
||||||
|
f"Erreur mise à jour Sage pour {transaction.sage_document_id}: {e}"
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _send_notification(
|
||||||
|
self, session: AsyncSession, transaction: UniversignTransaction, status: str
|
||||||
|
):
|
||||||
|
if not self.email_queue or not self.settings:
|
||||||
|
logger.warning("email_queue ou settings non configuré")
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
if status == "SIGNE":
|
||||||
|
template = templates_signature_email["signature_confirmee"]
|
||||||
|
|
||||||
|
type_labels = {
|
||||||
|
0: "Devis",
|
||||||
|
10: "Commande",
|
||||||
|
30: "Bon de Livraison",
|
||||||
|
60: "Facture",
|
||||||
|
50: "Avoir",
|
||||||
|
}
|
||||||
|
|
||||||
|
variables = {
|
||||||
|
"NOM_SIGNATAIRE": transaction.requester_name or "Client",
|
||||||
|
"TYPE_DOC": type_labels.get(
|
||||||
|
transaction.sage_document_type.value, "Document"
|
||||||
|
),
|
||||||
|
"NUMERO": transaction.sage_document_id,
|
||||||
|
"DATE_SIGNATURE": transaction.signed_at.strftime("%d/%m/%Y à %H:%M")
|
||||||
|
if transaction.signed_at
|
||||||
|
else datetime.now().strftime("%d/%m/%Y à %H:%M"),
|
||||||
|
"TRANSACTION_ID": transaction.transaction_id,
|
||||||
|
"CONTACT_EMAIL": self.settings.smtp_from,
|
||||||
|
}
|
||||||
|
|
||||||
|
sujet = template["sujet"]
|
||||||
|
corps = template["corps_html"]
|
||||||
|
|
||||||
|
for var, valeur in variables.items():
|
||||||
|
sujet = sujet.replace(f"{{{{{var}}}}}", str(valeur))
|
||||||
|
corps = corps.replace(f"{{{{{var}}}}}", str(valeur))
|
||||||
|
|
||||||
|
email_log = EmailLog(
|
||||||
|
id=str(uuid.uuid4()),
|
||||||
|
destinataire=transaction.requester_email,
|
||||||
|
sujet=sujet,
|
||||||
|
corps_html=corps,
|
||||||
|
document_ids=transaction.sage_document_id,
|
||||||
|
type_document=transaction.sage_document_type.value,
|
||||||
|
statut=StatutEmail.EN_ATTENTE,
|
||||||
|
date_creation=datetime.now(),
|
||||||
|
nb_tentatives=0,
|
||||||
|
)
|
||||||
|
|
||||||
|
session.add(email_log)
|
||||||
|
await session.flush()
|
||||||
|
|
||||||
|
self.email_queue.enqueue(email_log.id)
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"Email confirmation signature envoyé à {transaction.requester_email}"
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(
|
||||||
|
f"Erreur envoi notification pour {transaction.transaction_id}: {e}"
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _parse_date(date_str: Optional[str]) -> Optional[datetime]:
|
||||||
|
if not date_str:
|
||||||
|
return None
|
||||||
|
try:
|
||||||
|
return datetime.fromisoformat(date_str.replace("Z", "+00:00"))
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class UniversignSyncScheduler:
|
||||||
|
def __init__(self, sync_service: UniversignSyncService, interval_minutes: int = 5):
|
||||||
|
self.sync_service = sync_service
|
||||||
|
self.interval_minutes = interval_minutes
|
||||||
|
self.is_running = False
|
||||||
|
|
||||||
|
async def start(self, session_factory):
|
||||||
|
import asyncio
|
||||||
|
|
||||||
|
self.is_running = True
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"Démarrage polling Universign (intervalle: {self.interval_minutes}min)"
|
||||||
|
)
|
||||||
|
|
||||||
|
while self.is_running:
|
||||||
|
try:
|
||||||
|
async with session_factory() as session:
|
||||||
|
stats = await self.sync_service.sync_all_pending(session)
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"Polling: {stats['success']} transactions synchronisées, "
|
||||||
|
f"{stats['status_changes']} changements"
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Erreur polling: {e}", exc_info=True)
|
||||||
|
|
||||||
|
await asyncio.sleep(self.interval_minutes * 60)
|
||||||
|
|
||||||
|
def stop(self):
|
||||||
|
self.is_running = False
|
||||||
|
logger.info("Arrêt polling Universign")
|
||||||
15
tools/cleaner.py
Normal file
15
tools/cleaner.py
Normal file
|
|
@ -0,0 +1,15 @@
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
def supprimer_commentaires_ligne(fichier):
|
||||||
|
path = Path(fichier)
|
||||||
|
lignes = path.read_text(encoding="utf-8").splitlines()
|
||||||
|
lignes_sans_commentaires = [line for line in lignes if not line.lstrip().startswith("#")]
|
||||||
|
path.write_text("\n".join(lignes_sans_commentaires), encoding="utf-8")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
base_dir = Path(__file__).resolve().parent.parent
|
||||||
|
fichier_api = base_dir / "data/data.py"
|
||||||
|
|
||||||
|
supprimer_commentaires_ligne(fichier_api)
|
||||||
52
tools/extract_pydantic_models.py
Normal file
52
tools/extract_pydantic_models.py
Normal file
|
|
@ -0,0 +1,52 @@
|
||||||
|
import ast
|
||||||
|
import os
|
||||||
|
import textwrap
|
||||||
|
|
||||||
|
SOURCE_FILE = "main.py"
|
||||||
|
MODELS_DIR = "../models"
|
||||||
|
|
||||||
|
os.makedirs(MODELS_DIR, exist_ok=True)
|
||||||
|
|
||||||
|
with open(SOURCE_FILE, "r", encoding="utf-8") as f:
|
||||||
|
source_code = f.read()
|
||||||
|
|
||||||
|
tree = ast.parse(source_code)
|
||||||
|
|
||||||
|
pydantic_classes = []
|
||||||
|
other_nodes = []
|
||||||
|
|
||||||
|
for node in tree.body:
|
||||||
|
if isinstance(node, ast.ClassDef):
|
||||||
|
if any(
|
||||||
|
isinstance(base, ast.Name) and base.id == "BaseModel" for base in node.bases
|
||||||
|
):
|
||||||
|
pydantic_classes.append(node)
|
||||||
|
continue
|
||||||
|
other_nodes.append(node)
|
||||||
|
|
||||||
|
imports = """
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
from typing import Optional, List
|
||||||
|
"""
|
||||||
|
|
||||||
|
for cls in pydantic_classes:
|
||||||
|
class_name = cls.name
|
||||||
|
file_name = f"{class_name.lower()}.py"
|
||||||
|
file_path = os.path.join(MODELS_DIR, file_name)
|
||||||
|
|
||||||
|
class_code = ast.get_source_segment(source_code, cls)
|
||||||
|
class_code = textwrap.dedent(class_code)
|
||||||
|
|
||||||
|
with open(file_path, "w", encoding="utf-8") as f:
|
||||||
|
f.write(imports.strip() + "\n\n")
|
||||||
|
f.write(class_code)
|
||||||
|
|
||||||
|
print(f"✅ Modèle extrait : {class_name} → {file_path}")
|
||||||
|
|
||||||
|
new_tree = ast.Module(body=other_nodes, type_ignores=[])
|
||||||
|
new_source = ast.unparse(new_tree)
|
||||||
|
|
||||||
|
with open(SOURCE_FILE, "w", encoding="utf-8") as f:
|
||||||
|
f.write(new_source)
|
||||||
|
|
||||||
|
print("\n🎉 Extraction terminée")
|
||||||
136
utils/enterprise.py
Normal file
136
utils/enterprise.py
Normal file
|
|
@ -0,0 +1,136 @@
|
||||||
|
from fastapi import HTTPException
|
||||||
|
from typing import Optional
|
||||||
|
import httpx
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from schemas import EntrepriseSearch
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def calculer_tva_intracommunautaire(siren: str) -> Optional[str]:
|
||||||
|
try:
|
||||||
|
siren_clean = siren.replace(" ", "").strip()
|
||||||
|
|
||||||
|
if not siren_clean.isdigit() or len(siren_clean) != 9:
|
||||||
|
logger.warning(f"SIREN invalide: {siren}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
siren_int = int(siren_clean)
|
||||||
|
|
||||||
|
cle = (12 + 3 * (siren_int % 97)) % 97
|
||||||
|
|
||||||
|
cle_str = f"{cle:02d}"
|
||||||
|
|
||||||
|
return f"FR{cle_str}{siren_clean}"
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Erreur calcul TVA pour SIREN {siren}: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def formater_adresse(siege_data: dict) -> str:
|
||||||
|
try:
|
||||||
|
adresse_parts = []
|
||||||
|
|
||||||
|
if siege_data.get("numero_voie"):
|
||||||
|
adresse_parts.append(siege_data["numero_voie"])
|
||||||
|
|
||||||
|
if siege_data.get("type_voie"):
|
||||||
|
adresse_parts.append(siege_data["type_voie"])
|
||||||
|
|
||||||
|
if siege_data.get("libelle_voie"):
|
||||||
|
adresse_parts.append(siege_data["libelle_voie"])
|
||||||
|
|
||||||
|
if siege_data.get("code_postal"):
|
||||||
|
adresse_parts.append(siege_data["code_postal"])
|
||||||
|
|
||||||
|
if siege_data.get("libelle_commune"):
|
||||||
|
adresse_parts.append(siege_data["libelle_commune"].upper())
|
||||||
|
|
||||||
|
return " ".join(adresse_parts)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Erreur formatage adresse: {e}")
|
||||||
|
return ""
|
||||||
|
|
||||||
|
|
||||||
|
async def rechercher_entreprise_api(query: str, per_page: int = 5) -> dict:
|
||||||
|
api_url = "https://recherche-entreprises.api.gouv.fr/search"
|
||||||
|
|
||||||
|
params = {
|
||||||
|
"q": query,
|
||||||
|
"per_page": per_page,
|
||||||
|
"limite_etablissements": 5,
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
async with httpx.AsyncClient(timeout=10.0) as client:
|
||||||
|
response = await client.get(api_url, params=params)
|
||||||
|
|
||||||
|
if response.status_code == 429:
|
||||||
|
logger.warning("Rate limit atteint (7 req/s)")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=429,
|
||||||
|
detail="Trop de requêtes. Veuillez réessayer dans 1 seconde.",
|
||||||
|
)
|
||||||
|
|
||||||
|
if response.status_code == 503:
|
||||||
|
logger.error("API Sirene indisponible (503)")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=503,
|
||||||
|
detail="Service de recherche momentanément indisponible.",
|
||||||
|
)
|
||||||
|
|
||||||
|
response.raise_for_status()
|
||||||
|
return response.json()
|
||||||
|
|
||||||
|
except httpx.TimeoutException:
|
||||||
|
logger.error(f"Timeout lors de la recherche: {query}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=504, detail="Délai d'attente dépassé pour l'API de recherche."
|
||||||
|
)
|
||||||
|
|
||||||
|
except httpx.HTTPError as e:
|
||||||
|
logger.error(f"Erreur HTTP API Sirene: {e}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=500,
|
||||||
|
detail=f"Erreur lors de la communication avec l'API: {str(e)}",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def mapper_resultat_api(entreprise_data: dict) -> Optional[EntrepriseSearch]:
|
||||||
|
try:
|
||||||
|
siren = entreprise_data.get("siren")
|
||||||
|
|
||||||
|
if not siren:
|
||||||
|
logger.warning("Entreprise sans SIREN, ignorée")
|
||||||
|
return None
|
||||||
|
|
||||||
|
tva_number = calculer_tva_intracommunautaire(siren)
|
||||||
|
|
||||||
|
if not tva_number:
|
||||||
|
logger.warning(f"Impossible de calculer TVA pour SIREN: {siren}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
siege = entreprise_data.get("siege", {})
|
||||||
|
|
||||||
|
etat_admin = entreprise_data.get("etat_administratif", "A")
|
||||||
|
is_active = etat_admin == "A"
|
||||||
|
|
||||||
|
return EntrepriseSearch(
|
||||||
|
company_name=entreprise_data.get("nom_complet", ""),
|
||||||
|
siren=siren,
|
||||||
|
vat_number=tva_number,
|
||||||
|
address=formater_adresse(siege),
|
||||||
|
naf_code=entreprise_data.get("activite_principale", ""),
|
||||||
|
is_active=is_active,
|
||||||
|
siret_siege=siege.get("siret"),
|
||||||
|
code_postal=siege.get("code_postal"),
|
||||||
|
ville=siege.get("libelle_commune"),
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Erreur mapping entreprise: {e}", exc_info=True)
|
||||||
|
return None
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
from typing import Dict
|
from typing import Dict, List
|
||||||
from config import settings
|
from config.config import settings
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
@ -264,3 +264,201 @@ async def universign_statut(transaction_id: str) -> Dict:
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Erreur statut Universign: {e}")
|
logger.error(f"Erreur statut Universign: {e}")
|
||||||
return {"statut": "ERREUR", "error": str(e)}
|
return {"statut": "ERREUR", "error": str(e)}
|
||||||
|
|
||||||
|
|
||||||
|
def normaliser_type_doc(type_doc: int) -> int:
|
||||||
|
TYPES_AUTORISES = {0, 10, 30, 50, 60}
|
||||||
|
|
||||||
|
if type_doc not in TYPES_AUTORISES:
|
||||||
|
raise ValueError(
|
||||||
|
f"type_doc invalide ({type_doc}). Valeurs autorisées : {sorted(TYPES_AUTORISES)}"
|
||||||
|
)
|
||||||
|
|
||||||
|
return type_doc if type_doc == 0 else type_doc // 10
|
||||||
|
|
||||||
|
|
||||||
|
def _preparer_lignes_document(lignes: List) -> List[Dict]:
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
"article_code": ligne.article_code,
|
||||||
|
"quantite": ligne.quantite,
|
||||||
|
"prix_unitaire_ht": ligne.prix_unitaire_ht,
|
||||||
|
"remise_pourcentage": ligne.remise_pourcentage or 0.0,
|
||||||
|
}
|
||||||
|
for ligne in lignes
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
UNIVERSIGN_TO_LOCAL: Dict[str, str] = {
|
||||||
|
"draft": "EN_ATTENTE",
|
||||||
|
"ready": "EN_ATTENTE",
|
||||||
|
"started": "EN_COURS",
|
||||||
|
"completed": "SIGNE",
|
||||||
|
"closed": "SIGNE",
|
||||||
|
"refused": "REFUSE",
|
||||||
|
"expired": "EXPIRE",
|
||||||
|
"canceled": "REFUSE",
|
||||||
|
"failed": "ERREUR",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
LOCAL_TO_SAGE_STATUS: Dict[str, int] = {
|
||||||
|
"EN_ATTENTE": 0,
|
||||||
|
"EN_COURS": 1,
|
||||||
|
"SIGNE": 2,
|
||||||
|
"REFUSE": 3,
|
||||||
|
"EXPIRE": 4,
|
||||||
|
"ERREUR": 5,
|
||||||
|
}
|
||||||
|
|
||||||
|
STATUS_ACTIONS: Dict[str, Dict[str, any]] = {
|
||||||
|
"""
|
||||||
|
Actions automatiques à déclencher selon le statut
|
||||||
|
"""
|
||||||
|
"SIGNE": {
|
||||||
|
"update_sage_status": True, # Mettre à jour Sage
|
||||||
|
"trigger_workflow": True, # Déclencher transformation (devis→commande)
|
||||||
|
"send_notification": True, # Email de confirmation
|
||||||
|
"archive_document": True, # Archiver le PDF signé
|
||||||
|
"update_sage_field": "CB_DateSignature", # Champ libre Sage
|
||||||
|
},
|
||||||
|
"REFUSE": {
|
||||||
|
"update_sage_status": True,
|
||||||
|
"trigger_workflow": False,
|
||||||
|
"send_notification": True,
|
||||||
|
"archive_document": False,
|
||||||
|
"alert_sales": True, # Alerter commercial
|
||||||
|
},
|
||||||
|
"EXPIRE": {
|
||||||
|
"update_sage_status": True,
|
||||||
|
"trigger_workflow": False,
|
||||||
|
"send_notification": True,
|
||||||
|
"archive_document": False,
|
||||||
|
"schedule_reminder": True, # Programmer relance
|
||||||
|
},
|
||||||
|
"ERREUR": {
|
||||||
|
"update_sage_status": False,
|
||||||
|
"trigger_workflow": False,
|
||||||
|
"send_notification": False,
|
||||||
|
"log_error": True,
|
||||||
|
"retry_sync": True,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
ALLOWED_TRANSITIONS: Dict[str, list] = {
|
||||||
|
"""
|
||||||
|
Transitions de statuts autorisées (validation)
|
||||||
|
"""
|
||||||
|
"EN_ATTENTE": ["EN_COURS", "ERREUR"],
|
||||||
|
"EN_COURS": ["SIGNE", "REFUSE", "EXPIRE", "ERREUR"],
|
||||||
|
"SIGNE": [], # État final, pas de retour
|
||||||
|
"REFUSE": [], # État final
|
||||||
|
"EXPIRE": [], # État final
|
||||||
|
"ERREUR": ["EN_ATTENTE", "EN_COURS"], # Retry possible
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def map_universign_to_local(universign_status: str) -> str:
|
||||||
|
return UNIVERSIGN_TO_LOCAL.get(
|
||||||
|
universign_status.lower(),
|
||||||
|
"ERREUR", # Fallback si statut inconnu
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_sage_status_code(local_status: str) -> int:
|
||||||
|
return LOCAL_TO_SAGE_STATUS.get(local_status, 5)
|
||||||
|
|
||||||
|
|
||||||
|
def is_transition_allowed(from_status: str, to_status: str) -> bool:
|
||||||
|
if from_status == to_status:
|
||||||
|
return True # Même statut = OK (idempotence)
|
||||||
|
|
||||||
|
allowed = ALLOWED_TRANSITIONS.get(from_status, [])
|
||||||
|
return to_status in allowed
|
||||||
|
|
||||||
|
|
||||||
|
def get_status_actions(local_status: str) -> Dict[str, any]:
|
||||||
|
return STATUS_ACTIONS.get(local_status, {})
|
||||||
|
|
||||||
|
|
||||||
|
def is_final_status(local_status: str) -> bool:
|
||||||
|
return local_status in ["SIGNE", "REFUSE", "EXPIRE"]
|
||||||
|
|
||||||
|
|
||||||
|
STATUS_PRIORITY: Dict[str, int] = {
|
||||||
|
"ERREUR": 0,
|
||||||
|
"EN_ATTENTE": 1,
|
||||||
|
"EN_COURS": 2,
|
||||||
|
"EXPIRE": 3,
|
||||||
|
"REFUSE": 4,
|
||||||
|
"SIGNE": 5,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def resolve_status_conflict(status_a: str, status_b: str) -> str:
|
||||||
|
priority_a = STATUS_PRIORITY.get(status_a, 0)
|
||||||
|
priority_b = STATUS_PRIORITY.get(status_b, 0)
|
||||||
|
|
||||||
|
return status_a if priority_a >= priority_b else status_b
|
||||||
|
|
||||||
|
|
||||||
|
STATUS_MESSAGES: Dict[str, Dict[str, str]] = {
|
||||||
|
"EN_ATTENTE": {
|
||||||
|
"fr": "Document en attente d'envoi",
|
||||||
|
"en": "Document pending",
|
||||||
|
"icon": "⏳",
|
||||||
|
"color": "gray",
|
||||||
|
},
|
||||||
|
"EN_COURS": {
|
||||||
|
"fr": "En attente de signature",
|
||||||
|
"en": "Awaiting signature",
|
||||||
|
"icon": "✍️",
|
||||||
|
"color": "blue",
|
||||||
|
},
|
||||||
|
"SIGNE": {
|
||||||
|
"fr": "Signé avec succès",
|
||||||
|
"en": "Successfully signed",
|
||||||
|
"icon": "",
|
||||||
|
"color": "green",
|
||||||
|
},
|
||||||
|
"REFUSE": {
|
||||||
|
"fr": "Signature refusée",
|
||||||
|
"en": "Signature refused",
|
||||||
|
"icon": "",
|
||||||
|
"color": "red",
|
||||||
|
},
|
||||||
|
"EXPIRE": {
|
||||||
|
"fr": "Délai de signature expiré",
|
||||||
|
"en": "Signature expired",
|
||||||
|
"icon": "⏰",
|
||||||
|
"color": "orange",
|
||||||
|
},
|
||||||
|
"ERREUR": {
|
||||||
|
"fr": "Erreur technique",
|
||||||
|
"en": "Technical error",
|
||||||
|
"icon": "",
|
||||||
|
"color": "red",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def get_status_message(local_status: str, lang: str = "fr") -> str:
|
||||||
|
"""
|
||||||
|
Obtient le message utilisateur pour un statut
|
||||||
|
|
||||||
|
Args:
|
||||||
|
local_status: Statut local
|
||||||
|
lang: Langue (fr, en)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Message formaté
|
||||||
|
"""
|
||||||
|
status_info = STATUS_MESSAGES.get(local_status, {})
|
||||||
|
icon = status_info.get("icon", "")
|
||||||
|
message = status_info.get(lang, local_status)
|
||||||
|
|
||||||
|
return f"{icon} {message}"
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ["_preparer_lignes_document", "normaliser_type_doc"]
|
||||||
|
|
|
||||||
165
utils/universign_status_mapping.py
Normal file
165
utils/universign_status_mapping.py
Normal file
|
|
@ -0,0 +1,165 @@
|
||||||
|
from typing import Dict, Any
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logging.basicConfig(level=logging.INFO)
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
UNIVERSIGN_TO_LOCAL: Dict[str, str] = {
|
||||||
|
"draft": "EN_ATTENTE",
|
||||||
|
"ready": "EN_ATTENTE",
|
||||||
|
"started": "EN_COURS",
|
||||||
|
"completed": "SIGNE",
|
||||||
|
"closed": "SIGNE",
|
||||||
|
"refused": "REFUSE",
|
||||||
|
"expired": "EXPIRE",
|
||||||
|
"canceled": "REFUSE",
|
||||||
|
"failed": "ERREUR",
|
||||||
|
}
|
||||||
|
|
||||||
|
LOCAL_TO_SAGE_STATUS: Dict[str, int] = {
|
||||||
|
"EN_ATTENTE": 0,
|
||||||
|
"EN_COURS": 1,
|
||||||
|
"SIGNE": 2,
|
||||||
|
"REFUSE": 3,
|
||||||
|
"EXPIRE": 4,
|
||||||
|
"ERREUR": 5,
|
||||||
|
}
|
||||||
|
|
||||||
|
STATUS_ACTIONS: Dict[str, Dict[str, Any]] = {
|
||||||
|
"SIGNE": {
|
||||||
|
"update_sage_status": True,
|
||||||
|
"trigger_workflow": True,
|
||||||
|
"send_notification": True,
|
||||||
|
"archive_document": True,
|
||||||
|
"update_sage_field": "CB_DateSignature",
|
||||||
|
},
|
||||||
|
"REFUSE": {
|
||||||
|
"update_sage_status": True,
|
||||||
|
"trigger_workflow": False,
|
||||||
|
"send_notification": True,
|
||||||
|
"archive_document": False,
|
||||||
|
"alert_sales": True,
|
||||||
|
},
|
||||||
|
"EXPIRE": {
|
||||||
|
"update_sage_status": True,
|
||||||
|
"trigger_workflow": False,
|
||||||
|
"send_notification": True,
|
||||||
|
"archive_document": False,
|
||||||
|
"schedule_reminder": True,
|
||||||
|
},
|
||||||
|
"ERREUR": {
|
||||||
|
"update_sage_status": False,
|
||||||
|
"trigger_workflow": False,
|
||||||
|
"send_notification": False,
|
||||||
|
"log_error": True,
|
||||||
|
"retry_sync": True,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
ALLOWED_TRANSITIONS: Dict[str, list] = {
|
||||||
|
"EN_ATTENTE": ["EN_COURS", "ERREUR"],
|
||||||
|
"EN_COURS": ["SIGNE", "REFUSE", "EXPIRE", "ERREUR"],
|
||||||
|
"SIGNE": [],
|
||||||
|
"REFUSE": [],
|
||||||
|
"EXPIRE": [],
|
||||||
|
"ERREUR": ["EN_ATTENTE", "EN_COURS"],
|
||||||
|
}
|
||||||
|
|
||||||
|
STATUS_PRIORITY: Dict[str, int] = {
|
||||||
|
"ERREUR": 0,
|
||||||
|
"EN_ATTENTE": 1,
|
||||||
|
"EN_COURS": 2,
|
||||||
|
"EXPIRE": 3,
|
||||||
|
"REFUSE": 4,
|
||||||
|
"SIGNE": 5,
|
||||||
|
}
|
||||||
|
|
||||||
|
STATUS_MESSAGES: Dict[str, Dict[str, str]] = {
|
||||||
|
"EN_ATTENTE": {
|
||||||
|
"fr": "Document en attente d'envoi",
|
||||||
|
"en": "Document pending",
|
||||||
|
"icon": "⏳",
|
||||||
|
"color": "gray",
|
||||||
|
},
|
||||||
|
"EN_COURS": {
|
||||||
|
"fr": "En attente de signature",
|
||||||
|
"en": "Awaiting signature",
|
||||||
|
"icon": "✍️",
|
||||||
|
"color": "blue",
|
||||||
|
},
|
||||||
|
"SIGNE": {
|
||||||
|
"fr": "Signé avec succès",
|
||||||
|
"en": "Successfully signed",
|
||||||
|
"icon": "",
|
||||||
|
"color": "green",
|
||||||
|
},
|
||||||
|
"REFUSE": {
|
||||||
|
"fr": "Signature refusée",
|
||||||
|
"en": "Signature refused",
|
||||||
|
"icon": "",
|
||||||
|
"color": "red",
|
||||||
|
},
|
||||||
|
"EXPIRE": {
|
||||||
|
"fr": "Délai de signature expiré",
|
||||||
|
"en": "Signature expired",
|
||||||
|
"icon": "⏰",
|
||||||
|
"color": "orange",
|
||||||
|
},
|
||||||
|
"ERREUR": {
|
||||||
|
"fr": "Erreur technique",
|
||||||
|
"en": "Technical error",
|
||||||
|
"icon": "",
|
||||||
|
"color": "red",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def map_universign_to_local(universign_status: str) -> str:
|
||||||
|
"""Convertit un statut Universign en statut local avec fallback robuste."""
|
||||||
|
normalized = universign_status.lower().strip()
|
||||||
|
mapped = UNIVERSIGN_TO_LOCAL.get(normalized)
|
||||||
|
|
||||||
|
if not mapped:
|
||||||
|
logger.warning(
|
||||||
|
f"Statut Universign inconnu: '{universign_status}', mapping vers ERREUR"
|
||||||
|
)
|
||||||
|
return "ERREUR"
|
||||||
|
|
||||||
|
return mapped
|
||||||
|
|
||||||
|
|
||||||
|
def get_sage_status_code(local_status: str) -> int:
|
||||||
|
"""Obtient le code numérique pour Sage."""
|
||||||
|
return LOCAL_TO_SAGE_STATUS.get(local_status, 5)
|
||||||
|
|
||||||
|
|
||||||
|
def is_transition_allowed(from_status: str, to_status: str) -> bool:
|
||||||
|
"""Vérifie si une transition de statut est valide."""
|
||||||
|
if from_status == to_status:
|
||||||
|
return True
|
||||||
|
return to_status in ALLOWED_TRANSITIONS.get(from_status, [])
|
||||||
|
|
||||||
|
|
||||||
|
def get_status_actions(local_status: str) -> Dict[str, Any]:
|
||||||
|
"""Obtient les actions à exécuter pour un statut."""
|
||||||
|
return STATUS_ACTIONS.get(local_status, {})
|
||||||
|
|
||||||
|
|
||||||
|
def is_final_status(local_status: str) -> bool:
|
||||||
|
"""Détermine si le statut est final."""
|
||||||
|
return local_status in ["SIGNE", "REFUSE", "EXPIRE"]
|
||||||
|
|
||||||
|
|
||||||
|
def resolve_status_conflict(status_a: str, status_b: str) -> str:
|
||||||
|
"""Résout un conflit entre deux statuts (prend le plus prioritaire)."""
|
||||||
|
priority_a = STATUS_PRIORITY.get(status_a, 0)
|
||||||
|
priority_b = STATUS_PRIORITY.get(status_b, 0)
|
||||||
|
return status_a if priority_a >= priority_b else status_b
|
||||||
|
|
||||||
|
|
||||||
|
def get_status_message(local_status: str, lang: str = "fr") -> str:
|
||||||
|
"""Obtient le message utilisateur pour un statut."""
|
||||||
|
status_info = STATUS_MESSAGES.get(local_status, {})
|
||||||
|
icon = status_info.get("icon", "")
|
||||||
|
message = status_info.get(lang, local_status)
|
||||||
|
return f"{icon} {message}"
|
||||||
Loading…
Reference in a new issue