diff --git a/cleaner.py b/cleaner.py index 3476145..9b4946d 100644 --- a/cleaner.py +++ b/cleaner.py @@ -6,7 +6,7 @@ def supprimer_commentaires_ligne(fichier: str) -> None: lignes_filtrees = [] with path.open("r", encoding="utf-8") as f: for ligne in f: - if ligne.lstrip().startswith("#"): + if ligne.lstrip().startswith("logger"): continue lignes_filtrees.append(ligne) diff --git a/config.py b/config.py index cb59340..769d506 100644 --- a/config.py +++ b/config.py @@ -44,7 +44,7 @@ settings = Settings() def validate_settings(): """Validation au démarrage""" if not settings.chemin_base or not settings.mot_de_passe: - raise ValueError("❌ CHEMIN_BASE et MOT_DE_PASSE requis dans .env") + raise ValueError(" CHEMIN_BASE et MOT_DE_PASSE requis dans .env") if not settings.sage_gateway_token: - raise ValueError("❌ SAGE_GATEWAY_TOKEN requis (doit être identique sur Linux)") + raise ValueError(" SAGE_GATEWAY_TOKEN requis (doit être identique sur Linux)") return True diff --git a/main.py b/main.py index 9a09486..6f56d67 100644 --- a/main.py +++ b/main.py @@ -13,6 +13,7 @@ from config import settings, validate_settings from sage_connector import SageConnector import pyodbc import os +from utils.tiers import TiersListRequest logging.basicConfig( level=logging.INFO, @@ -496,7 +497,7 @@ class ClientCreateRequest(BaseModel): def to_sage_dict(self) -> dict: """ Convertit le modèle en dictionnaire compatible avec creer_client() - ✅ Mapping 1:1 avec les paramètres réels de la fonction + Mapping 1:1 avec les paramètres réels de la fonction """ stat01 = self.statistique01 or self.secteur @@ -1378,7 +1379,7 @@ def create_fournisseur_endpoint(req: FournisseurCreateRequest): return {"success": True, "data": resultat} except ValueError as e: - logger.warning(f"⚠️ Erreur métier création fournisseur: {e}") + logger.warning(f" Erreur métier création fournisseur: {e}") raise HTTPException(400, str(e)) except Exception as e: @@ -1450,7 +1451,7 @@ def avoir_get(req: CodeRequest): logger.info(f" Avoir {req.code} retourné depuis le cache") return {"success": True, "data": avoir, "source": "cache"} - logger.info(f"⚠️ Avoir {req.code} absent du cache, lecture depuis Sage...") + logger.info(f" Avoir {req.code} absent du cache, lecture depuis Sage...") avoir = sage.lire_avoir(req.code) if not avoir: @@ -1497,7 +1498,7 @@ def livraison_get(req: CodeRequest): logger.info(f" Livraison {req.code} retournée depuis le cache") return {"success": True, "data": livraison, "source": "cache"} - logger.info(f"⚠️ Livraison {req.code} absente du cache, lecture depuis Sage...") + logger.info(f" Livraison {req.code} absente du cache, lecture depuis Sage...") livraison = sage.lire_livraison(req.code) if not livraison: @@ -1922,7 +1923,7 @@ def lister_depots(): pass if not code: - logger.warning(f" ⚠️ Dépôt à l'index {index} sans code") + logger.warning(f" Dépôt à l'index {index} sans code") index += 1 continue @@ -2029,7 +2030,7 @@ def creer_entree_stock(req: EntreeStockRequest): return {"success": True, "data": resultat} except ValueError as e: - logger.warning(f"⚠️ Erreur métier entrée stock : {e}") + logger.warning(f" Erreur métier entrée stock : {e}") raise HTTPException(400, str(e)) except Exception as e: @@ -2059,7 +2060,7 @@ def creer_sortie_stock(req: SortieStockRequest): return {"success": True, "data": resultat} except ValueError as e: - logger.warning(f"⚠️ Erreur métier sortie stock : {e}") + logger.warning(f" Erreur métier sortie stock : {e}") raise HTTPException(400, str(e)) except Exception as e: @@ -2160,6 +2161,35 @@ def contacts_set_default(req: ContactGetRequest): raise HTTPException(500, str(e)) +@app.post("/sage/tiers/list", dependencies=[Depends(verify_token)]) +def tiers_list(req: TiersListRequest): + """Liste des tiers avec filtres optionnels""" + try: + tiers = sage.lister_tous_tiers( + type_tiers=req.type_tiers, + filtre=req.filtre + ) + return {"success": True, "data": tiers} + except Exception as e: + logger.error(f" Erreur liste tiers: {e}") + raise HTTPException(500, str(e)) + + +@app.post("/sage/tiers/get", dependencies=[Depends(verify_token)]) +def tiers_get(req: CodeRequest): + """Lecture d'un tiers par code""" + try: + tiers = sage.lire_tiers(req.code) + if not tiers: + raise HTTPException(404, f"Tiers {req.code} non trouvé") + return {"success": True, "data": tiers} + except HTTPException: + raise + except Exception as e: + logger.error(f" Erreur lecture tiers: {e}") + raise HTTPException(500, str(e)) + + if __name__ == "__main__": uvicorn.run( "main:app", diff --git a/main.py.bak b/main.py.bak index 5aafae5..1ff993e 100644 --- a/main.py.bak +++ b/main.py.bak @@ -237,7 +237,7 @@ class ArticleUpdateGatewayRequest(BaseModel): def verify_token(x_sage_token: str = Header(...)): """Vérification du token d'authentification""" if x_sage_token != settings.sage_gateway_token: - logger.warning(f"❌ Token invalide reçu: {x_sage_token[:20]}...") + logger.warning(f" Token invalide reçu: {x_sage_token[:20]}...") raise HTTPException(401, "Token invalide") return True @@ -274,9 +274,9 @@ def startup(): # Validation config try: validate_settings() - logger.info("✅ Configuration validée") + logger.info(" Configuration validée") except ValueError as e: - logger.error(f"❌ Configuration invalide: {e}") + logger.error(f" Configuration invalide: {e}") raise # Connexion Sage @@ -285,9 +285,9 @@ def startup(): ) if not sage.connecter(): - raise RuntimeError("❌ Impossible de se connecter à Sage 100c") + raise RuntimeError(" Impossible de se connecter à Sage 100c") - logger.info("✅ Sage Gateway démarré et connecté") + logger.info(" Sage Gateway démarré et connecté") @app.on_event("shutdown") @@ -430,11 +430,11 @@ def lire_devis(req: CodeRequest): """ 📄 Lecture d'un devis AVEC ses lignes (lecture Sage directe) - ⚠️ Plus lent que /list car charge les lignes depuis Sage + Plus lent que /list car charge les lignes depuis Sage 💡 Utiliser /list pour afficher une table rapide """ try: - # ✅ Lecture complète depuis Sage (avec lignes) + # Lecture complète depuis Sage (avec lignes) devis = sage.lire_devis(req.code) if not devis: raise HTTPException(404, f"Devis {req.code} non trouvé") @@ -459,7 +459,7 @@ def devis_list( 💡 Pour les détails avec lignes, utiliser GET /sage/devis/get """ try: - # ✅ Récupération depuis le cache (instantané) + # Récupération depuis le cache (instantané) devis_list = sage.lister_tous_devis_cache(filtre) # Filtrer par statut si demandé @@ -469,12 +469,12 @@ def devis_list( # Limiter le nombre de résultats devis_list = devis_list[:limit] - logger.info(f"✅ {len(devis_list)} devis retournés depuis le cache") + logger.info(f" {len(devis_list)} devis retournés depuis le cache") return {"success": True, "data": devis_list} except Exception as e: - logger.error(f"❌ Erreur liste devis: {e}", exc_info=True) + logger.error(f" Erreur liste devis: {e}", exc_info=True) raise HTTPException(500, str(e)) @@ -496,7 +496,7 @@ def changer_statut_devis_endpoint(numero: str, nouveau_statut: int): doc.DO_Statut = nouveau_statut doc.Write() - logger.info(f"✅ Statut devis {numero}: {statut_actuel} → {nouveau_statut}") + logger.info(f" Statut devis {numero}: {statut_actuel} → {nouveau_statut}") return { "success": True, @@ -540,7 +540,7 @@ def transformer_document( """ 🔧 Transformation de document - ✅ CORRECTION : Utilise les VRAIS types Sage Dataven + CORRECTION : Utilise les VRAIS types Sage Dataven Types valides : - 0: Devis @@ -563,7 +563,7 @@ def transformer_document( f"(type {type_source}) → type {type_cible}" ) - # ✅ Matrice des transformations valides pour VOTRE Sage + # Matrice des transformations valides pour VOTRE Sage transformations_valides = { (0, 10), # Devis → Commande (10, 30), # Commande → Bon de livraison @@ -574,7 +574,7 @@ def transformer_document( if (type_source, type_cible) not in transformations_valides: logger.error( - f"❌ Transformation non autorisée: {type_source} → {type_cible}" + f" Transformation non autorisée: {type_source} → {type_cible}" ) raise HTTPException( 400, @@ -586,7 +586,7 @@ def transformer_document( resultat = sage.transformer_document(numero_source, type_source, type_cible) logger.info( - f"✅ Transformation réussie: {numero_source} → " + f" Transformation réussie: {numero_source} → " f"{resultat.get('document_cible', '?')} " f"({resultat.get('nb_lignes', 0)} lignes)" ) @@ -596,10 +596,10 @@ def transformer_document( except HTTPException: raise except ValueError as e: - logger.error(f"❌ Erreur métier transformation: {e}") + logger.error(f" Erreur métier transformation: {e}") raise HTTPException(400, str(e)) except Exception as e: - logger.error(f"❌ Erreur technique transformation: {e}", exc_info=True) + logger.error(f" Erreur technique transformation: {e}", exc_info=True) raise HTTPException(500, f"Erreur transformation: {str(e)}") @@ -664,12 +664,12 @@ def commandes_list( commandes = commandes[:limit] - logger.info(f"✅ {len(commandes)} commandes retournées depuis le cache") + logger.info(f" {len(commandes)} commandes retournées depuis le cache") return {"success": True, "data": commandes} except Exception as e: - logger.error(f"❌ Erreur liste commandes: {e}", exc_info=True) + logger.error(f" Erreur liste commandes: {e}", exc_info=True) raise HTTPException(500, str(e)) @@ -693,12 +693,12 @@ def factures_list( factures = factures[:limit] - logger.info(f"✅ {len(factures)} factures retournées depuis le cache") + logger.info(f" {len(factures)} factures retournées depuis le cache") return {"success": True, "data": factures} except Exception as e: - logger.error(f"❌ Erreur liste factures: {e}", exc_info=True) + logger.error(f" Erreur liste factures: {e}", exc_info=True) raise HTTPException(500, str(e)) @@ -718,7 +718,7 @@ def lire_remise_max_client(code: str): except: pass - logger.info(f"✅ Remise max client {code}: {remise_max}%") + logger.info(f" Remise max client {code}: {remise_max}%") return { "success": True, @@ -1777,7 +1777,7 @@ def diagnostiquer_statuts_globaux(): matrice_complete[type_doc] = analyse_type logger.info( - f"[DIAG] ✅ Type {type_doc}: {analyse_type['nb_documents_total']} docs, " + f"[DIAG] Type {type_doc}: {analyse_type['nb_documents_total']} docs, " f"{len(analyse_type['statuts_observes'])} statuts différents" ) @@ -2045,7 +2045,7 @@ def diagnostiquer_statuts_permis(numero: str): resultat_test["autorise"] = True resultat_test["note"] = "Changement de statut réussi" - logger.info(f"[DIAG] ✅ Statut {statut_test} AUTORISÉ") + logger.info(f"[DIAG] Statut {statut_test} AUTORISÉ") # Restaurer le statut d'origine immédiatement doc.Read() @@ -2058,7 +2058,7 @@ def diagnostiquer_statuts_permis(numero: str): resultat_test["erreur"] = erreur_str logger.debug( - f"[DIAG] ❌ Statut {statut_test} REFUSÉ: {erreur_str[:100]}" + f"[DIAG] Statut {statut_test} REFUSÉ: {erreur_str[:100]}" ) # Restaurer en cas d'erreur @@ -2092,20 +2092,20 @@ def diagnostiquer_statuts_permis(numero: str): if 2 in statuts_autorises and statut_actuel == 0: recommendations.append( - "✅ Vous pouvez passer ce document de 'Brouillon' (0) à 'Accepté' (2)" + " Vous pouvez passer ce document de 'Brouillon' (0) à 'Accepté' (2)" ) if 5 in statuts_autorises: recommendations.append( - "✅ Le statut 'Transformé' (5) est disponible - utilisé après transformation" + " Le statut 'Transformé' (5) est disponible - utilisé après transformation" ) if 6 in statuts_autorises: - recommendations.append("✅ Vous pouvez annuler ce document (statut 6)") + recommendations.append(" Vous pouvez annuler ce document (statut 6)") if not any(s in statuts_autorises for s in [2, 3, 4]): recommendations.append( - "⚠️ Aucun statut de validation (2/3/4) n'est disponible - " + " Aucun statut de validation (2/3/4) n'est disponible - " "le document a peut-être déjà été traité" ) @@ -2365,11 +2365,11 @@ def diagnostiquer_erreur_transformation( if nb_bloquants == 0: diagnostic["suggestions"].append( - "✅ Aucun problème bloquant détecté. La transformation devrait fonctionner." + " Aucun problème bloquant détecté. La transformation devrait fonctionner." ) else: diagnostic["suggestions"].append( - f"❌ {nb_bloquants} problème(s) bloquant(s) doivent être résolus avant la transformation." + f" {nb_bloquants} problème(s) bloquant(s) doivent être résolus avant la transformation." ) return {"success": True, "diagnostic": diagnostic} @@ -2418,19 +2418,19 @@ def fournisseurs_list(req: FiltreRequest): """ ⚡ Liste rapide des fournisseurs depuis le CACHE - ✅ Utilise le cache mémoire pour une réponse instantanée + Utilise le cache mémoire pour une réponse instantanée 🔄 Cache actualisé automatiquement toutes les 15 minutes """ try: - # ✅ Utiliser le cache au lieu de la lecture directe + # Utiliser le cache au lieu de la lecture directe fournisseurs = sage.lister_tous_fournisseurs_cache(req.filtre) - logger.info(f"✅ {len(fournisseurs)} fournisseurs retournés depuis le cache") + logger.info(f" {len(fournisseurs)} fournisseurs retournés depuis le cache") return {"success": True, "data": fournisseurs} except Exception as e: - logger.error(f"❌ Erreur liste fournisseurs: {e}", exc_info=True) + logger.error(f" Erreur liste fournisseurs: {e}", exc_info=True) raise HTTPException(500, str(e)) @@ -2439,24 +2439,24 @@ def create_fournisseur_endpoint(req: FournisseurCreateRequest): """ ➕ Création d'un fournisseur dans Sage - ✅ Utilise FactoryFournisseur.Create() directement + Utilise FactoryFournisseur.Create() directement """ try: # Appel au connecteur Sage resultat = sage.creer_fournisseur(req.dict()) - logger.info(f"✅ Fournisseur créé: {resultat.get('numero')}") + logger.info(f" Fournisseur créé: {resultat.get('numero')}") return {"success": True, "data": resultat} except ValueError as e: # Erreur métier (ex: doublon) - logger.warning(f"⚠️ Erreur métier création fournisseur: {e}") + logger.warning(f" Erreur métier création fournisseur: {e}") raise HTTPException(400, str(e)) except Exception as e: # Erreur technique (ex: COM) - logger.error(f"❌ Erreur technique création fournisseur: {e}") + logger.error(f" Erreur technique création fournisseur: {e}") raise HTTPException(500, str(e)) @@ -2480,7 +2480,7 @@ def modifier_fournisseur_endpoint(req: FournisseurUpdateGatewayRequest): @app.post("/sage/fournisseurs/get", dependencies=[Depends(verify_token)]) def fournisseur_get(req: CodeRequest): """ - ✅ NOUVEAU : Lecture d'un fournisseur par code + NOUVEAU : Lecture d'un fournisseur par code """ try: fournisseur = sage.lire_fournisseur(req.code) @@ -2507,11 +2507,11 @@ def avoirs_list( 📋 Liste rapide des avoirs depuis le CACHE (avec lignes) ⚡ ULTRA-RAPIDE: Utilise le cache mémoire - ✅ LIGNES INCLUSES: Contrairement aux anciennes méthodes + LIGNES INCLUSES: Contrairement aux anciennes méthodes 💡 Pour forcer une relecture depuis Sage, utiliser /sage/avoirs/get """ try: - # ✅ Récupération depuis le cache (instantané) + # Récupération depuis le cache (instantané) avoirs = sage.lister_tous_avoirs_cache(filtre) # Filtrer par statut si demandé @@ -2521,12 +2521,12 @@ def avoirs_list( # Limiter le nombre de résultats avoirs = avoirs[:limit] - logger.info(f"✅ {len(avoirs)} avoirs retournés depuis le cache") + logger.info(f" {len(avoirs)} avoirs retournés depuis le cache") return {"success": True, "data": avoirs} except Exception as e: - logger.error(f"❌ Erreur liste avoirs: {e}", exc_info=True) + logger.error(f" Erreur liste avoirs: {e}", exc_info=True) raise HTTPException(500, str(e)) @@ -2539,15 +2539,15 @@ def avoir_get(req: CodeRequest): 🔄 Si introuvable, force une relecture depuis Sage """ try: - # ✅ Essayer le cache d'abord + # Essayer le cache d'abord avoir = sage.lire_avoir_cache(req.code) if avoir: - logger.info(f"✅ Avoir {req.code} retourné depuis le cache") + logger.info(f" Avoir {req.code} retourné depuis le cache") return {"success": True, "data": avoir, "source": "cache"} - # ❌ Pas dans le cache → Lecture directe depuis Sage - logger.info(f"⚠️ Avoir {req.code} absent du cache, lecture depuis Sage...") + # Pas dans le cache → Lecture directe depuis Sage + logger.info(f" Avoir {req.code} absent du cache, lecture depuis Sage...") avoir = sage.lire_avoir(req.code) if not avoir: @@ -2575,11 +2575,11 @@ def livraisons_list( 📋 Liste rapide des livraisons depuis le CACHE (avec lignes) ⚡ ULTRA-RAPIDE: Utilise le cache mémoire - ✅ LIGNES INCLUSES: Contrairement aux anciennes méthodes + LIGNES INCLUSES: Contrairement aux anciennes méthodes 💡 Pour forcer une relecture depuis Sage, utiliser /sage/livraisons/get """ try: - # ✅ Récupération depuis le cache (instantané) + # Récupération depuis le cache (instantané) livraisons = sage.lister_toutes_livraisons_cache(filtre) # Filtrer par statut si demandé @@ -2589,12 +2589,12 @@ def livraisons_list( # Limiter le nombre de résultats livraisons = livraisons[:limit] - logger.info(f"✅ {len(livraisons)} livraisons retournées depuis le cache") + logger.info(f" {len(livraisons)} livraisons retournées depuis le cache") return {"success": True, "data": livraisons} except Exception as e: - logger.error(f"❌ Erreur liste livraisons: {e}", exc_info=True) + logger.error(f" Erreur liste livraisons: {e}", exc_info=True) raise HTTPException(500, str(e)) @@ -2607,15 +2607,15 @@ def livraison_get(req: CodeRequest): 🔄 Si introuvable, force une relecture depuis Sage """ try: - # ✅ Essayer le cache d'abord + # Essayer le cache d'abord livraison = sage.lire_livraison_cache(req.code) if livraison: - logger.info(f"✅ Livraison {req.code} retournée depuis le cache") + logger.info(f" Livraison {req.code} retournée depuis le cache") return {"success": True, "data": livraison, "source": "cache"} - # ❌ Pas dans le cache → Lecture directe depuis Sage - logger.info(f"⚠️ Livraison {req.code} absente du cache, lecture depuis Sage...") + # Pas dans le cache → Lecture directe depuis Sage + logger.info(f" Livraison {req.code} absente du cache, lecture depuis Sage...") livraison = sage.lire_livraison(req.code) if not livraison: @@ -2804,7 +2804,7 @@ def creer_facture_endpoint(req: FactureCreateGatewayRequest): """ ➕ Création d'une facture dans Sage - ⚠️ NOTE: Les factures peuvent avoir des champs obligatoires supplémentaires + NOTE: Les factures peuvent avoir des champs obligatoires supplémentaires selon la configuration Sage (DO_CodeJournal, DO_Souche, etc.) """ try: @@ -2837,7 +2837,7 @@ def modifier_facture_endpoint(req: FactureUpdateGatewayRequest): """ ✏️ Modification d'une facture dans Sage - ⚠️ ATTENTION: Les factures comptabilisées peuvent être verrouillées + ATTENTION: Les factures comptabilisées peuvent être verrouillées """ try: resultat = sage.modifier_facture(req.numero, req.facture_data) @@ -2946,7 +2946,7 @@ def generer_pdf_document(req: PDFGenerationRequest): pdf_base64 = base64.b64encode(pdf_bytes).decode("utf-8") - logger.info(f"✅ PDF généré: {len(pdf_bytes)} octets") + logger.info(f" PDF généré: {len(pdf_bytes)} octets") return { "success": True, @@ -2961,7 +2961,7 @@ def generer_pdf_document(req: PDFGenerationRequest): except HTTPException: raise except Exception as e: - logger.error(f"❌ Erreur génération PDF: {e}", exc_info=True) + logger.error(f" Erreur génération PDF: {e}", exc_info=True) raise HTTPException(500, str(e)) @@ -2970,7 +2970,7 @@ def nettoyer_verrous_sage(): """ 🧹 Nettoyage des verrous Sage (cbRegFile) - ⚠️ À utiliser uniquement si l'API est bloquée + À utiliser uniquement si l'API est bloquée """ try: if not sage or not sage.cial: @@ -2981,7 +2981,7 @@ def nettoyer_verrous_sage(): for _ in range(10): try: sage.cial.CptaApplication.RollbackTrans() - logger.info("✅ Rollback effectué") + logger.info(" Rollback effectué") except: break @@ -4207,7 +4207,7 @@ def diagnostic_article_complet( "tous_champs_remplis": champs_remplis, "champs_vides": champs_vides, "conseil": ( - f"✅ {len(attributs_unite)} attribut(s) d'unité trouvé(s). " + f" {len(attributs_unite)} attribut(s) d'unité trouvé(s). " f"Utilisez mode='create' avec ce reference_modele pour tester la création." ) } @@ -4230,11 +4230,11 @@ def diagnostic_article_complet( logs_copie = [] article_modele_obj = None - # ✅ CHAMPS À EXCLURE (doivent être uniques ou auto-générés) + # CHAMPS À EXCLURE (doivent être uniques ou auto-générés) champs_exclus = { "AR_Ref", # Référence (on met la nôtre) "AR_Raccourci", # Doit être unique ! - "AR_CodeBarre", # Code-barres doit être unique ! ⚠️ CRITIQUE + "AR_CodeBarre", # Code-barres doit être unique ! CRITIQUE "AR_Photo", # Chemin photo spécifique "cbMarq", # ID interne Sage "cbCreateur", # Créateur @@ -4251,7 +4251,7 @@ def diagnostic_article_complet( if attr.startswith('_') or attr[0].islower(): continue - # ✅ Exclure les champs problématiques + # Exclure les champs problématiques if attr in champs_exclus: logs_copie.append(f"⏭️ {attr} EXCLU (doit être unique)") continue @@ -4266,14 +4266,14 @@ def diagnostic_article_complet( # Garder les valeurs non-None et non-vides if val is not None and str(val) not in ['None', '']: champs_modele[attr] = val - logs_copie.append(f"✅ {attr} = {val}") + logs_copie.append(f" {attr} = {val}") except: continue logger.info(f"📋 Modèle: {len(champs_modele)} champs extraits") except Exception as e: - logger.warning(f"⚠️ Modèle '{reference_modele}' non chargé: {e}") + logger.warning(f" Modèle '{reference_modele}' non chargé: {e}") # 🆕 Étape 2: Créer le nouvel article persist = factory.Create() @@ -4289,9 +4289,9 @@ def diagnostic_article_complet( for champ, valeur in champs_modele.items(): try: setattr(article, champ, valeur) - logs_application.append(f"✅ {champ} = {valeur}") + logs_application.append(f" {champ} = {valeur}") except Exception as e: - logs_application.append(f"❌ {champ}: {str(e)[:50]}") + logs_application.append(f" {champ}: {str(e)[:50]}") else: # Défauts minimaux si pas de modèle article.AR_Design = f"Test {reference}" @@ -4310,10 +4310,10 @@ def diagnostic_article_complet( famille_obj = getattr(article_modele_obj, "Famille", None) if famille_obj is not None: article.Famille = famille_obj - fallbacks_appliques.append(f"✅ Famille copiée depuis {reference_modele}") + fallbacks_appliques.append(f" Famille copiée depuis {reference_modele}") logger.info("[DIAGNOSTIC] Famille copiée avec succès") else: - fallbacks_echecs.append("⚠️ Famille: objet NULL dans modèle") + fallbacks_echecs.append(" Famille: objet NULL dans modèle") logger.warning("[DIAGNOSTIC] Famille NULL dans modèle") except Exception as e: fallbacks_echecs.append(f"Famille: {str(e)[:80]}") @@ -4324,7 +4324,7 @@ def diagnostic_article_complet( taxe_obj = getattr(article_modele_obj, "Taxe1", None) if taxe_obj is not None: article.Taxe1 = taxe_obj - fallbacks_appliques.append(f"✅ Taxe1 copiée depuis {reference_modele}") + fallbacks_appliques.append(f" Taxe1 copiée depuis {reference_modele}") logger.info("[DIAGNOSTIC] Taxe1 copiée avec succès") else: # Taxe NULL - essayer de charger une taxe par défaut @@ -4337,7 +4337,7 @@ def diagnostic_article_complet( taxe_defaut = factory_taxe.ReadIntitule(code_taxe) if taxe_defaut: article.Taxe1 = taxe_defaut - fallbacks_appliques.append(f"✅ Taxe par défaut '{code_taxe}' chargée") + fallbacks_appliques.append(f" Taxe par défaut '{code_taxe}' chargée") logger.info(f"[DIAGNOSTIC] Taxe '{code_taxe}' chargée") taxe_trouvee = True break @@ -4346,7 +4346,7 @@ def diagnostic_article_complet( continue if not taxe_trouvee: - fallbacks_echecs.append("⚠️ Taxe: aucune taxe par défaut trouvée") + fallbacks_echecs.append(" Taxe: aucune taxe par défaut trouvée") except Exception as e: fallbacks_echecs.append(f"Taxe: {str(e)[:80]}") logger.error(f"[DIAGNOSTIC] Erreur copie Taxe: {e}") @@ -4356,7 +4356,7 @@ def diagnostic_article_complet( unite_obj = getattr(article_modele_obj, "UniteVente", None) if unite_obj is not None: article.UniteVente = unite_obj - fallbacks_appliques.append(f"✅ UniteVente copiée depuis {reference_modele}") + fallbacks_appliques.append(f" UniteVente copiée depuis {reference_modele}") logger.info("[DIAGNOSTIC] UniteVente copiée avec succès") else: # Unité NULL - essayer de charger une unité par défaut @@ -4369,7 +4369,7 @@ def diagnostic_article_complet( unite_defaut = factory_unite.ReadIntitule(code_unite) if unite_defaut: article.UniteVente = unite_defaut - fallbacks_appliques.append(f"✅ Unité par défaut '{code_unite}' chargée") + fallbacks_appliques.append(f" Unité par défaut '{code_unite}' chargée") logger.info(f"[DIAGNOSTIC] Unité '{code_unite}' chargée") unite_trouvee = True break @@ -4378,13 +4378,13 @@ def diagnostic_article_complet( continue if not unite_trouvee: - fallbacks_echecs.append("⚠️ Unité: aucune unité par défaut trouvée") + fallbacks_echecs.append(" Unité: aucune unité par défaut trouvée") except Exception as e: fallbacks_echecs.append(f"Unité: {str(e)[:80]}") logger.error(f"[DIAGNOSTIC] Erreur copie Unité: {e}") else: - fallbacks_echecs.append("⚠️ Aucun modèle fourni - objets COM non définis") + fallbacks_echecs.append(" Aucun modèle fourni - objets COM non définis") # 📊 Étape 5: Scanner l'état final avant Write etat_final = {} @@ -4409,12 +4409,12 @@ def diagnostic_article_complet( try: article.Write() write_success = True - erreur_write = "✅ SUCCESS - Article créé !" - logger.info(f"[DIAGNOSTIC] ✅ Article {reference} créé avec succès") + erreur_write = " SUCCESS - Article créé !" + logger.info(f"[DIAGNOSTIC] Article {reference} créé avec succès") except Exception as e: erreur_write = str(e) - logger.error(f"[DIAGNOSTIC] ❌ Échec Write(): {e}") + logger.error(f"[DIAGNOSTIC] Échec Write(): {e}") # Extraire erreurs Sage try: @@ -4473,7 +4473,7 @@ def diagnostic_article_complet( if write_success: diagnostic["message"] = "🎉 Article créé avec succès (rollback effectué)" else: - diagnostic["message"] = "❌ Échec de création" + diagnostic["message"] = " Échec de création" # Analyser quels champs posent problème champs_none = [k for k, v in etat_final.items() if v in ['None', 'N/A']] diff --git a/sage_connector.py b/sage_connector.py index ed0cd02..a6d560e 100644 --- a/sage_connector.py +++ b/sage_connector.py @@ -12,11 +12,65 @@ import pywintypes import os import glob import tempfile -import logging from dataclasses import dataclass, field import zlib import struct +from utils.articles.articles_data_sql import ( + _enrichir_stock_emplacements, + _enrichir_gammes_articles, + _enrichir_tarifs_clients, + _enrichir_nomenclature, + _enrichir_compta_articles, + _enrichir_fournisseurs_multiples, + _enrichir_depots_details, + _enrichir_emplacements_details, + _enrichir_gammes_enumeres, + _enrichir_references_enumerees, + _enrichir_medias_articles, + _enrichir_prix_gammes, + _enrichir_conditionnements, + _mapper_article_depuis_row, + _enrichir_stocks_articles, + _enrichir_fournisseurs_articles, + _enrichir_familles_articles, + _enrichir_tva_articles, +) + +from utils.tiers.clients.clients_data import ( + _extraire_client, + _cast_client, + +) + +from utils.tiers.contacts.contacts import obtenir_contact + +from utils.articles.stock_check import verifier_stock_suffisant +from utils.articles.articles_data_com import _extraire_article +from utils.tiers.tiers_data_sql import _build_tiers_select_query + +from utils.functions.functions import ( + _safe_strip, + _safe_int, + _clean_str, + _try_set_attribute +) + +from utils.functions.items_to_dict import ( + _contact_to_dict, + _row_to_contact_dict, + _row_to_tiers_dict, +) + +from utils.functions.sage_utilities import ( + _verifier_devis_non_transforme, + verifier_si_deja_transforme_sql, + peut_etre_transforme, + lire_erreurs_sage +) + +from utils.documents.devis_extraction import _extraire_infos_devis + logger = logging.getLogger(__name__) @@ -126,10 +180,7 @@ class SageConnector: pass def lister_tous_fournisseurs(self, filtre=""): - """ - Liste tous les fournisseurs avec TOUS les champs - Symétrie complète avec lister_tous_clients - """ + try: with self._get_sql_connection() as conn: cursor = conn.cursor() @@ -202,46 +253,46 @@ class SageConnector: fournisseurs = [] for row in rows: fournisseur = { - "numero": self._safe_strip(row.CT_Num), - "intitule": self._safe_strip(row.CT_Intitule), + "numero": _safe_strip(row.CT_Num), + "intitule": _safe_strip(row.CT_Intitule), "type_tiers": row.CT_Type, - "qualite": self._safe_strip(row.CT_Qualite), - "classement": self._safe_strip(row.CT_Classement), - "raccourci": self._safe_strip(row.CT_Raccourci), - "siret": self._safe_strip(row.CT_Siret), - "tva_intra": self._safe_strip(row.CT_Identifiant), - "code_naf": self._safe_strip(row.CT_Ape), + "qualite": _safe_strip(row.CT_Qualite), + "classement": _safe_strip(row.CT_Classement), + "raccourci": _safe_strip(row.CT_Raccourci), + "siret": _safe_strip(row.CT_Siret), + "tva_intra": _safe_strip(row.CT_Identifiant), + "code_naf": _safe_strip(row.CT_Ape), - "contact": self._safe_strip(row.CT_Contact), - "adresse": self._safe_strip(row.CT_Adresse), - "complement": self._safe_strip(row.CT_Complement), - "code_postal": self._safe_strip(row.CT_CodePostal), - "ville": self._safe_strip(row.CT_Ville), - "region": self._safe_strip(row.CT_CodeRegion), - "pays": self._safe_strip(row.CT_Pays), + "contact": _safe_strip(row.CT_Contact), + "adresse": _safe_strip(row.CT_Adresse), + "complement": _safe_strip(row.CT_Complement), + "code_postal": _safe_strip(row.CT_CodePostal), + "ville": _safe_strip(row.CT_Ville), + "region": _safe_strip(row.CT_CodeRegion), + "pays": _safe_strip(row.CT_Pays), - "telephone": self._safe_strip(row.CT_Telephone), - "telecopie": self._safe_strip(row.CT_Telecopie), - "email": self._safe_strip(row.CT_EMail), - "site_web": self._safe_strip(row.CT_Site), - "facebook": self._safe_strip(row.CT_Facebook), - "linkedin": self._safe_strip(row.CT_LinkedIn), + "telephone": _safe_strip(row.CT_Telephone), + "telecopie": _safe_strip(row.CT_Telecopie), + "email": _safe_strip(row.CT_EMail), + "site_web": _safe_strip(row.CT_Site), + "facebook": _safe_strip(row.CT_Facebook), + "linkedin": _safe_strip(row.CT_LinkedIn), "taux01": row.CT_Taux01, "taux02": row.CT_Taux02, "taux03": row.CT_Taux03, "taux04": row.CT_Taux04, - "statistique01": self._safe_strip(row.CT_Statistique01), - "statistique02": self._safe_strip(row.CT_Statistique02), - "statistique03": self._safe_strip(row.CT_Statistique03), - "statistique04": self._safe_strip(row.CT_Statistique04), - "statistique05": self._safe_strip(row.CT_Statistique05), - "statistique06": self._safe_strip(row.CT_Statistique06), - "statistique07": self._safe_strip(row.CT_Statistique07), - "statistique08": self._safe_strip(row.CT_Statistique08), - "statistique09": self._safe_strip(row.CT_Statistique09), - "statistique10": self._safe_strip(row.CT_Statistique10), + "statistique01": _safe_strip(row.CT_Statistique01), + "statistique02": _safe_strip(row.CT_Statistique02), + "statistique03": _safe_strip(row.CT_Statistique03), + "statistique04": _safe_strip(row.CT_Statistique04), + "statistique05": _safe_strip(row.CT_Statistique05), + "statistique06": _safe_strip(row.CT_Statistique06), + "statistique07": _safe_strip(row.CT_Statistique07), + "statistique08": _safe_strip(row.CT_Statistique08), + "statistique09": _safe_strip(row.CT_Statistique09), + "statistique10": _safe_strip(row.CT_Statistique10), "encours_autorise": row.CT_Encours, "assurance_credit": row.CT_Assurance, @@ -265,22 +316,22 @@ class SageConnector: "delai_transport": row.CT_DelaiTransport, "delai_appro": row.CT_DelaiAppro, - "commentaire": self._safe_strip(row.CT_Commentaire), + "commentaire": _safe_strip(row.CT_Commentaire), - "section_analytique": self._safe_strip(row.CA_Num), + "section_analytique": _safe_strip(row.CA_Num), "mode_reglement_code": row.MR_No, "surveillance_active": (row.CT_Surveillance == 1), - "coface": self._safe_strip(row.CT_Coface), - "forme_juridique": self._safe_strip(row.CT_SvFormeJuri), - "effectif": self._safe_strip(row.CT_SvEffectif), - "sv_regularite": self._safe_strip(row.CT_SvRegul), - "sv_cotation": self._safe_strip(row.CT_SvCotation), - "sv_objet_maj": self._safe_strip(row.CT_SvObjetMaj), + "coface": _safe_strip(row.CT_Coface), + "forme_juridique": _safe_strip(row.CT_SvFormeJuri), + "effectif": _safe_strip(row.CT_SvEffectif), + "sv_regularite": _safe_strip(row.CT_SvRegul), + "sv_cotation": _safe_strip(row.CT_SvCotation), + "sv_objet_maj": _safe_strip(row.CT_SvObjetMaj), "sv_chiffre_affaires": row.CT_SvCA, "sv_resultat": row.CT_SvResultat, - "compte_general": self._safe_strip(row.CG_NumPrinc), + "compte_general": _safe_strip(row.CG_NumPrinc), "categorie_tarif": row.N_CatTarif, "categorie_compta": row.N_CatCompta, } @@ -289,19 +340,15 @@ class SageConnector: fournisseurs.append(fournisseur) - logger.info(f"✅ SQL: {len(fournisseurs)} fournisseurs avec {len(fournisseur)} champs") + logger.info(f" SQL: {len(fournisseurs)} fournisseurs avec {len(fournisseur)} champs") return fournisseurs except Exception as e: - logger.error(f"❌ Erreur SQL fournisseurs: {e}") + logger.error(f" Erreur SQL fournisseurs: {e}") raise RuntimeError(f"Erreur lecture fournisseurs: {str(e)}") def lire_fournisseur(self, code_fournisseur): - """ - Lit un fournisseur avec TOUS les champs (identique à lister_tous_fournisseurs) - Symétrie complète GET/POST - """ try: with self._get_sql_connection() as conn: cursor = conn.cursor() @@ -367,46 +414,46 @@ class SageConnector: return None fournisseur = { - "numero": self._safe_strip(row.CT_Num), - "intitule": self._safe_strip(row.CT_Intitule), + "numero": _safe_strip(row.CT_Num), + "intitule": _safe_strip(row.CT_Intitule), "type_tiers": row.CT_Type, - "qualite": self._safe_strip(row.CT_Qualite), - "classement": self._safe_strip(row.CT_Classement), - "raccourci": self._safe_strip(row.CT_Raccourci), - "siret": self._safe_strip(row.CT_Siret), - "tva_intra": self._safe_strip(row.CT_Identifiant), - "code_naf": self._safe_strip(row.CT_Ape), + "qualite": _safe_strip(row.CT_Qualite), + "classement": _safe_strip(row.CT_Classement), + "raccourci": _safe_strip(row.CT_Raccourci), + "siret": _safe_strip(row.CT_Siret), + "tva_intra": _safe_strip(row.CT_Identifiant), + "code_naf": _safe_strip(row.CT_Ape), - "contact": self._safe_strip(row.CT_Contact), - "adresse": self._safe_strip(row.CT_Adresse), - "complement": self._safe_strip(row.CT_Complement), - "code_postal": self._safe_strip(row.CT_CodePostal), - "ville": self._safe_strip(row.CT_Ville), - "region": self._safe_strip(row.CT_CodeRegion), - "pays": self._safe_strip(row.CT_Pays), + "contact": _safe_strip(row.CT_Contact), + "adresse": _safe_strip(row.CT_Adresse), + "complement": _safe_strip(row.CT_Complement), + "code_postal": _safe_strip(row.CT_CodePostal), + "ville": _safe_strip(row.CT_Ville), + "region": _safe_strip(row.CT_CodeRegion), + "pays": _safe_strip(row.CT_Pays), - "telephone": self._safe_strip(row.CT_Telephone), - "telecopie": self._safe_strip(row.CT_Telecopie), - "email": self._safe_strip(row.CT_EMail), - "site_web": self._safe_strip(row.CT_Site), - "facebook": self._safe_strip(row.CT_Facebook), - "linkedin": self._safe_strip(row.CT_LinkedIn), + "telephone": _safe_strip(row.CT_Telephone), + "telecopie": _safe_strip(row.CT_Telecopie), + "email": _safe_strip(row.CT_EMail), + "site_web": _safe_strip(row.CT_Site), + "facebook": _safe_strip(row.CT_Facebook), + "linkedin": _safe_strip(row.CT_LinkedIn), "taux01": row.CT_Taux01, "taux02": row.CT_Taux02, "taux03": row.CT_Taux03, "taux04": row.CT_Taux04, - "statistique01": self._safe_strip(row.CT_Statistique01), - "statistique02": self._safe_strip(row.CT_Statistique02), - "statistique03": self._safe_strip(row.CT_Statistique03), - "statistique04": self._safe_strip(row.CT_Statistique04), - "statistique05": self._safe_strip(row.CT_Statistique05), - "statistique06": self._safe_strip(row.CT_Statistique06), - "statistique07": self._safe_strip(row.CT_Statistique07), - "statistique08": self._safe_strip(row.CT_Statistique08), - "statistique09": self._safe_strip(row.CT_Statistique09), - "statistique10": self._safe_strip(row.CT_Statistique10), + "statistique01": _safe_strip(row.CT_Statistique01), + "statistique02": _safe_strip(row.CT_Statistique02), + "statistique03": _safe_strip(row.CT_Statistique03), + "statistique04": _safe_strip(row.CT_Statistique04), + "statistique05": _safe_strip(row.CT_Statistique05), + "statistique06": _safe_strip(row.CT_Statistique06), + "statistique07": _safe_strip(row.CT_Statistique07), + "statistique08": _safe_strip(row.CT_Statistique08), + "statistique09": _safe_strip(row.CT_Statistique09), + "statistique10": _safe_strip(row.CT_Statistique10), "encours_autorise": row.CT_Encours, "assurance_credit": row.CT_Assurance, @@ -430,37 +477,35 @@ class SageConnector: "delai_transport": row.CT_DelaiTransport, "delai_appro": row.CT_DelaiAppro, - "commentaire": self._safe_strip(row.CT_Commentaire), + "commentaire": _safe_strip(row.CT_Commentaire), - "section_analytique": self._safe_strip(row.CA_Num), + "section_analytique": _safe_strip(row.CA_Num), "mode_reglement_code": row.MR_No, "surveillance_active": (row.CT_Surveillance == 1), - "coface": self._safe_strip(row.CT_Coface), - "forme_juridique": self._safe_strip(row.CT_SvFormeJuri), - "effectif": self._safe_strip(row.CT_SvEffectif), - "sv_regularite": self._safe_strip(row.CT_SvRegul), - "sv_cotation": self._safe_strip(row.CT_SvCotation), - "sv_objet_maj": self._safe_strip(row.CT_SvObjetMaj), + "coface": _safe_strip(row.CT_Coface), + "forme_juridique": _safe_strip(row.CT_SvFormeJuri), + "effectif": _safe_strip(row.CT_SvEffectif), + "sv_regularite": _safe_strip(row.CT_SvRegul), + "sv_cotation": _safe_strip(row.CT_SvCotation), + "sv_objet_maj": _safe_strip(row.CT_SvObjetMaj), "sv_chiffre_affaires": row.CT_SvCA, "sv_resultat": row.CT_SvResultat, - "compte_general": self._safe_strip(row.CG_NumPrinc), + "compte_general": _safe_strip(row.CG_NumPrinc), "categorie_tarif": row.N_CatTarif, "categorie_compta": row.N_CatCompta, } fournisseur["contacts"] = self._get_contacts_client(row.CT_Num, conn) - logger.info(f"✅ SQL: Fournisseur {code_fournisseur} avec {len(fournisseur)} champs") + logger.info(f" SQL: Fournisseur {code_fournisseur} avec {len(fournisseur)} champs") return fournisseur except Exception as e: - logger.error(f"❌ Erreur SQL fournisseur {code_fournisseur}: {e}") + logger.error(f" Erreur SQL fournisseur {code_fournisseur}: {e}") return None - - - + def creer_fournisseur(self, fournisseur_data: Dict) -> Dict: if not self.cial: raise RuntimeError("Connexion Sage non établie") @@ -735,7 +780,7 @@ class SageConnector: if not persist: raise ValueError(f"Fournisseur {code} introuvable") - fournisseur = self._cast_client(persist) # Réutiliser _cast_client + fournisseur = _cast_client(persist) # Réutiliser _cast_client if not fournisseur: raise ValueError(f"Impossible de charger le fournisseur {code}") @@ -907,11 +952,7 @@ class SageConnector: raise RuntimeError(f"Erreur technique Sage: {error_message}") - def _get_contacts_client(self, numero: str, conn) -> list: - """ - Récupère tous les contacts d'un client avec indication du contact par défaut - """ try: cursor = conn.cursor() @@ -940,11 +981,11 @@ class SageConnector: nom_contact_defaut = None if client_row: - nom_contact_defaut = self._safe_strip(client_row.CT_Contact) + nom_contact_defaut = _safe_strip(client_row.CT_Contact) contacts = [] for row in rows: - contact = self._row_to_contact_dict(row) + contact = _row_to_contact_dict(row) if nom_contact_defaut: nom_complet = f"{contact.get('prenom', '')} {contact['nom']}".strip() @@ -960,7 +1001,7 @@ class SageConnector: return contacts except Exception as e: - logger.warning(f"⚠️ Impossible de récupérer contacts pour {numero}: {e}") + logger.warning(f" Impossible de récupérer contacts pour {numero}: {e}") return [] def lister_tous_clients(self, filtre=""): @@ -1040,46 +1081,46 @@ class SageConnector: clients = [] for row in rows: client = { - "numero": self._safe_strip(row.CT_Num), - "intitule": self._safe_strip(row.CT_Intitule), + "numero": _safe_strip(row.CT_Num), + "intitule": _safe_strip(row.CT_Intitule), "type_tiers": row.CT_Type, - "qualite": self._safe_strip(row.CT_Qualite), - "classement": self._safe_strip(row.CT_Classement), - "raccourci": self._safe_strip(row.CT_Raccourci), - "siret": self._safe_strip(row.CT_Siret), - "tva_intra": self._safe_strip(row.CT_Identifiant), - "code_naf": self._safe_strip(row.CT_Ape), + "qualite": _safe_strip(row.CT_Qualite), + "classement": _safe_strip(row.CT_Classement), + "raccourci": _safe_strip(row.CT_Raccourci), + "siret": _safe_strip(row.CT_Siret), + "tva_intra": _safe_strip(row.CT_Identifiant), + "code_naf": _safe_strip(row.CT_Ape), - "contact": self._safe_strip(row.CT_Contact), - "adresse": self._safe_strip(row.CT_Adresse), - "complement": self._safe_strip(row.CT_Complement), - "code_postal": self._safe_strip(row.CT_CodePostal), - "ville": self._safe_strip(row.CT_Ville), - "region": self._safe_strip(row.CT_CodeRegion), - "pays": self._safe_strip(row.CT_Pays), + "contact": _safe_strip(row.CT_Contact), + "adresse": _safe_strip(row.CT_Adresse), + "complement": _safe_strip(row.CT_Complement), + "code_postal": _safe_strip(row.CT_CodePostal), + "ville": _safe_strip(row.CT_Ville), + "region": _safe_strip(row.CT_CodeRegion), + "pays": _safe_strip(row.CT_Pays), - "telephone": self._safe_strip(row.CT_Telephone), - "telecopie": self._safe_strip(row.CT_Telecopie), - "email": self._safe_strip(row.CT_EMail), - "site_web": self._safe_strip(row.CT_Site), - "facebook": self._safe_strip(row.CT_Facebook), - "linkedin": self._safe_strip(row.CT_LinkedIn), + "telephone": _safe_strip(row.CT_Telephone), + "telecopie": _safe_strip(row.CT_Telecopie), + "email": _safe_strip(row.CT_EMail), + "site_web": _safe_strip(row.CT_Site), + "facebook": _safe_strip(row.CT_Facebook), + "linkedin": _safe_strip(row.CT_LinkedIn), "taux01": row.CT_Taux01, "taux02": row.CT_Taux02, "taux03": row.CT_Taux03, "taux04": row.CT_Taux04, - "statistique01": self._safe_strip(row.CT_Statistique01), - "statistique02": self._safe_strip(row.CT_Statistique02), - "statistique03": self._safe_strip(row.CT_Statistique03), - "statistique04": self._safe_strip(row.CT_Statistique04), - "statistique05": self._safe_strip(row.CT_Statistique05), - "statistique06": self._safe_strip(row.CT_Statistique06), - "statistique07": self._safe_strip(row.CT_Statistique07), - "statistique08": self._safe_strip(row.CT_Statistique08), - "statistique09": self._safe_strip(row.CT_Statistique09), - "statistique10": self._safe_strip(row.CT_Statistique10), + "statistique01": _safe_strip(row.CT_Statistique01), + "statistique02": _safe_strip(row.CT_Statistique02), + "statistique03": _safe_strip(row.CT_Statistique03), + "statistique04": _safe_strip(row.CT_Statistique04), + "statistique05": _safe_strip(row.CT_Statistique05), + "statistique06": _safe_strip(row.CT_Statistique06), + "statistique07": _safe_strip(row.CT_Statistique07), + "statistique08": _safe_strip(row.CT_Statistique08), + "statistique09": _safe_strip(row.CT_Statistique09), + "statistique10": _safe_strip(row.CT_Statistique10), "encours_autorise": row.CT_Encours, "assurance_credit": row.CT_Assurance, @@ -1103,22 +1144,22 @@ class SageConnector: "delai_transport": row.CT_DelaiTransport, "delai_appro": row.CT_DelaiAppro, - "commentaire": self._safe_strip(row.CT_Commentaire), + "commentaire": _safe_strip(row.CT_Commentaire), - "section_analytique": self._safe_strip(row.CA_Num), + "section_analytique": _safe_strip(row.CA_Num), "mode_reglement_code": row.MR_No, "surveillance_active": (row.CT_Surveillance == 1), - "coface": self._safe_strip(row.CT_Coface), - "forme_juridique": self._safe_strip(row.CT_SvFormeJuri), - "effectif": self._safe_strip(row.CT_SvEffectif), - "sv_regularite": self._safe_strip(row.CT_SvRegul), - "sv_cotation": self._safe_strip(row.CT_SvCotation), - "sv_objet_maj": self._safe_strip(row.CT_SvObjetMaj), + "coface": _safe_strip(row.CT_Coface), + "forme_juridique": _safe_strip(row.CT_SvFormeJuri), + "effectif": _safe_strip(row.CT_SvEffectif), + "sv_regularite": _safe_strip(row.CT_SvRegul), + "sv_cotation": _safe_strip(row.CT_SvCotation), + "sv_objet_maj": _safe_strip(row.CT_SvObjetMaj), "sv_chiffre_affaires": row.CT_SvCA, "sv_resultat": row.CT_SvResultat, - "compte_general": self._safe_strip(row.CG_NumPrinc), + "compte_general": _safe_strip(row.CG_NumPrinc), "categorie_tarif": row.N_CatTarif, "categorie_compta": row.N_CatCompta, } @@ -1127,13 +1168,12 @@ class SageConnector: clients.append(client) - logger.info(f"✅ SQL: {len(clients)} clients avec {len(client)} champs") + logger.info(f" SQL: {len(clients)} clients avec {len(client)} champs") return clients except Exception as e: - logger.error(f"❌ Erreur SQL clients: {e}") + logger.error(f" Erreur SQL clients: {e}") raise RuntimeError(f"Erreur lecture clients: {str(e)}") - def lire_client(self, code_client): """ @@ -1205,46 +1245,46 @@ class SageConnector: return None client = { - "numero": self._safe_strip(row.CT_Num), - "intitule": self._safe_strip(row.CT_Intitule), + "numero": _safe_strip(row.CT_Num), + "intitule": _safe_strip(row.CT_Intitule), "type_tiers": row.CT_Type, - "qualite": self._safe_strip(row.CT_Qualite), - "classement": self._safe_strip(row.CT_Classement), - "raccourci": self._safe_strip(row.CT_Raccourci), - "siret": self._safe_strip(row.CT_Siret), - "tva_intra": self._safe_strip(row.CT_Identifiant), - "code_naf": self._safe_strip(row.CT_Ape), + "qualite": _safe_strip(row.CT_Qualite), + "classement": _safe_strip(row.CT_Classement), + "raccourci": _safe_strip(row.CT_Raccourci), + "siret": _safe_strip(row.CT_Siret), + "tva_intra": _safe_strip(row.CT_Identifiant), + "code_naf": _safe_strip(row.CT_Ape), - "contact": self._safe_strip(row.CT_Contact), - "adresse": self._safe_strip(row.CT_Adresse), - "complement": self._safe_strip(row.CT_Complement), - "code_postal": self._safe_strip(row.CT_CodePostal), - "ville": self._safe_strip(row.CT_Ville), - "region": self._safe_strip(row.CT_CodeRegion), - "pays": self._safe_strip(row.CT_Pays), + "contact": _safe_strip(row.CT_Contact), + "adresse": _safe_strip(row.CT_Adresse), + "complement": _safe_strip(row.CT_Complement), + "code_postal": _safe_strip(row.CT_CodePostal), + "ville": _safe_strip(row.CT_Ville), + "region": _safe_strip(row.CT_CodeRegion), + "pays": _safe_strip(row.CT_Pays), - "telephone": self._safe_strip(row.CT_Telephone), - "telecopie": self._safe_strip(row.CT_Telecopie), - "email": self._safe_strip(row.CT_EMail), - "site_web": self._safe_strip(row.CT_Site), - "facebook": self._safe_strip(row.CT_Facebook), - "linkedin": self._safe_strip(row.CT_LinkedIn), + "telephone": _safe_strip(row.CT_Telephone), + "telecopie": _safe_strip(row.CT_Telecopie), + "email": _safe_strip(row.CT_EMail), + "site_web": _safe_strip(row.CT_Site), + "facebook": _safe_strip(row.CT_Facebook), + "linkedin": _safe_strip(row.CT_LinkedIn), "taux01": row.CT_Taux01, "taux02": row.CT_Taux02, "taux03": row.CT_Taux03, "taux04": row.CT_Taux04, - "statistique01": self._safe_strip(row.CT_Statistique01), - "statistique02": self._safe_strip(row.CT_Statistique02), - "statistique03": self._safe_strip(row.CT_Statistique03), - "statistique04": self._safe_strip(row.CT_Statistique04), - "statistique05": self._safe_strip(row.CT_Statistique05), - "statistique06": self._safe_strip(row.CT_Statistique06), - "statistique07": self._safe_strip(row.CT_Statistique07), - "statistique08": self._safe_strip(row.CT_Statistique08), - "statistique09": self._safe_strip(row.CT_Statistique09), - "statistique10": self._safe_strip(row.CT_Statistique10), + "statistique01": _safe_strip(row.CT_Statistique01), + "statistique02": _safe_strip(row.CT_Statistique02), + "statistique03": _safe_strip(row.CT_Statistique03), + "statistique04": _safe_strip(row.CT_Statistique04), + "statistique05": _safe_strip(row.CT_Statistique05), + "statistique06": _safe_strip(row.CT_Statistique06), + "statistique07": _safe_strip(row.CT_Statistique07), + "statistique08": _safe_strip(row.CT_Statistique08), + "statistique09": _safe_strip(row.CT_Statistique09), + "statistique10": _safe_strip(row.CT_Statistique10), "encours_autorise": row.CT_Encours, "assurance_credit": row.CT_Assurance, @@ -1268,36 +1308,35 @@ class SageConnector: "delai_transport": row.CT_DelaiTransport, "delai_appro": row.CT_DelaiAppro, - "commentaire": self._safe_strip(row.CT_Commentaire), + "commentaire": _safe_strip(row.CT_Commentaire), - "section_analytique": self._safe_strip(row.CA_Num), + "section_analytique": _safe_strip(row.CA_Num), "mode_reglement_code": row.MR_No, "surveillance_active": (row.CT_Surveillance == 1), - "coface": self._safe_strip(row.CT_Coface), - "forme_juridique": self._safe_strip(row.CT_SvFormeJuri), - "effectif": self._safe_strip(row.CT_SvEffectif), - "sv_regularite": self._safe_strip(row.CT_SvRegul), - "sv_cotation": self._safe_strip(row.CT_SvCotation), - "sv_objet_maj": self._safe_strip(row.CT_SvObjetMaj), + "coface": _safe_strip(row.CT_Coface), + "forme_juridique": _safe_strip(row.CT_SvFormeJuri), + "effectif": _safe_strip(row.CT_SvEffectif), + "sv_regularite": _safe_strip(row.CT_SvRegul), + "sv_cotation": _safe_strip(row.CT_SvCotation), + "sv_objet_maj": _safe_strip(row.CT_SvObjetMaj), "sv_chiffre_affaires": row.CT_SvCA, "sv_resultat": row.CT_SvResultat, - "compte_general": self._safe_strip(row.CG_NumPrinc), + "compte_general": _safe_strip(row.CG_NumPrinc), "categorie_tarif": row.N_CatTarif, "categorie_compta": row.N_CatCompta, } client["contacts"] = self._get_contacts_client(row.CT_Num, conn) - logger.info(f"✅ SQL: Client {code_client} avec {len(client)} champs") + logger.info(f" SQL: Client {code_client} avec {len(client)} champs") return client except Exception as e: - logger.error(f"❌ Erreur SQL client {code_client}: {e}") + logger.error(f" Erreur SQL client {code_client}: {e}") return None - def lister_tous_articles(self, filtre=""): try: with self._get_sql_connection() as conn: @@ -1463,1503 +1502,35 @@ class SageConnector: if "Marque commerciale" in row_data: logger.debug(f"[DEBUG] Marque commerciale trouvée: {row_data['Marque commerciale']}") - article_data = self._mapper_article_depuis_row(row_data, colonnes_config) + article_data = _mapper_article_depuis_row(row_data, colonnes_config) articles.append(article_data) - articles = self._enrichir_stocks_articles(articles, cursor) - articles = self._enrichir_familles_articles(articles, cursor) - articles = self._enrichir_fournisseurs_articles(articles, cursor) - articles = self._enrichir_tva_articles(articles, cursor) + articles = _enrichir_stocks_articles(articles, cursor) + articles = _enrichir_familles_articles(articles, cursor) + articles = _enrichir_fournisseurs_articles(articles, cursor) + articles = _enrichir_tva_articles(articles, cursor) - articles = self._enrichir_stock_emplacements(articles, cursor) - articles = self._enrichir_gammes_articles(articles, cursor) - articles = self._enrichir_tarifs_clients(articles, cursor) - articles = self._enrichir_nomenclature(articles, cursor) - articles = self._enrichir_compta_articles(articles, cursor) - articles = self._enrichir_fournisseurs_multiples(articles, cursor) - articles = self._enrichir_depots_details(articles, cursor) - articles = self._enrichir_emplacements_details(articles, cursor) - articles = self._enrichir_gammes_enumeres(articles, cursor) - articles = self._enrichir_references_enumerees(articles, cursor) - articles = self._enrichir_medias_articles(articles, cursor) - articles = self._enrichir_prix_gammes(articles, cursor) - articles = self._enrichir_conditionnements(articles, cursor) + articles = _enrichir_stock_emplacements(articles, cursor) + articles = _enrichir_gammes_articles(articles, cursor) + articles = _enrichir_tarifs_clients(articles, cursor) + articles = _enrichir_nomenclature(articles, cursor) + articles = _enrichir_compta_articles(articles, cursor) + articles = _enrichir_fournisseurs_multiples(articles, cursor) + articles = _enrichir_depots_details(articles, cursor) + articles = _enrichir_emplacements_details(articles, cursor) + articles = _enrichir_gammes_enumeres(articles, cursor) + articles = _enrichir_references_enumerees(articles, cursor) + articles = _enrichir_medias_articles(articles, cursor) + articles = _enrichir_prix_gammes(articles, cursor) + articles = _enrichir_conditionnements(articles, cursor) return articles except Exception as e: - logger.error(f"✗ Erreur SQL articles: {e}", exc_info=True) + logger.error(f" Erreur SQL articles: {e}", exc_info=True) raise RuntimeError(f"Erreur lecture articles: {str(e)}") - - def _enrichir_stock_emplacements(self, articles: List[Dict], cursor) -> List[Dict]: - """ - Enrichit avec le détail du stock par emplacement - Structure: articles[i]["emplacements"] = [{"depot": "01", "emplacement": "A1", "qte": 10}, ...] - """ - try: - logger.info(f" → Enrichissement stock emplacements...") - - references = [a["reference"] for a in articles if a["reference"]] - if not references: - return articles - - placeholders = ",".join(["?"] * len(references)) - query = f""" - SELECT - AR_Ref, - DE_No, - DP_No, - AE_QteSto, - AE_QtePrepa, - AE_QteAControler, - cbCreation, - cbModification - FROM F_ARTSTOCKEMPL - WHERE AR_Ref IN ({placeholders}) - ORDER BY AR_Ref, DE_No, DP_No - """ - - cursor.execute(query, references) - rows = cursor.fetchall() - - emplacements_map = {} - for row in rows: - ref = self._safe_strip(row[0]) - if not ref: - continue - - if ref not in emplacements_map: - emplacements_map[ref] = [] - - emplacements_map[ref].append({ - "depot": self._safe_strip(row[1]), - "emplacement": self._safe_strip(row[2]), - "qte_stockee": float(row[3]) if row[3] else 0.0, - "qte_preparee": float(row[4]) if row[4] else 0.0, - "qte_a_controler": float(row[5]) if row[5] else 0.0, - "date_creation": row[6], - "date_modification": row[7], - }) - - for article in articles: - article["emplacements"] = emplacements_map.get(article["reference"], []) - article["nb_emplacements"] = len(article["emplacements"]) - - logger.info(f" ✓ {len(emplacements_map)} articles avec emplacements") - return articles - - except Exception as e: - logger.error(f" ✗ Erreur stock emplacements: {e}") - for article in articles: - article["emplacements"] = [] - article["nb_emplacements"] = 0 - return articles - - def _enrichir_gammes_articles(self, articles: List[Dict], cursor) -> List[Dict]: - """ - Enrichit avec les gammes (taille, couleur, etc.) - Structure: articles[i]["gammes"] = [{"numero": 1, "enumere": "001", "type": 0}, ...] - """ - try: - logger.info(f" → Enrichissement gammes articles...") - - references = [a["reference"] for a in articles if a["reference"]] - if not references: - return articles - - placeholders = ",".join(["?"] * len(references)) - query = f""" - SELECT - AR_Ref, - AG_No, - EG_Enumere, - AG_Type, - cbCreation, - cbModification - FROM F_ARTGAMME - WHERE AR_Ref IN ({placeholders}) - ORDER BY AR_Ref, AG_No, EG_Enumere - """ - - cursor.execute(query, references) - rows = cursor.fetchall() - - gammes_map = {} - for row in rows: - ref = self._safe_strip(row[0]) - if not ref: - continue - - if ref not in gammes_map: - gammes_map[ref] = [] - - gammes_map[ref].append({ - "numero_gamme": int(row[1]) if row[1] else 0, - "enumere": self._safe_strip(row[2]), - "type_gamme": int(row[3]) if row[3] else 0, - "date_creation": row[4], - "date_modification": row[5], - }) - - for article in articles: - article["gammes"] = gammes_map.get(article["reference"], []) - article["nb_gammes"] = len(article["gammes"]) - - logger.info(f" ✓ {len(gammes_map)} articles avec gammes") - return articles - - except Exception as e: - logger.error(f" ✗ Erreur gammes: {e}") - for article in articles: - article["gammes"] = [] - article["nb_gammes"] = 0 - return articles - - def _enrichir_tarifs_clients(self, articles: List[Dict], cursor) -> List[Dict]: - """ - Enrichit avec les tarifs spécifiques par client/catégorie tarifaire - Structure: articles[i]["tarifs_clients"] = [{"client": "CLI001", "prix": 125.5}, ...] - """ - try: - logger.info(f" → Enrichissement tarifs clients...") - - references = [a["reference"] for a in articles if a["reference"]] - if not references: - return articles - - placeholders = ",".join(["?"] * len(references)) - query = f""" - SELECT - AR_Ref, - AC_Categorie, - CT_Num, - AC_PrixVen, - AC_Coef, - AC_PrixTTC, - AC_Arrondi, - AC_QteMont, - EG_Champ, - AC_PrixDev, - AC_Devise, - AC_Remise, - AC_Calcul, - AC_TypeRem, - AC_RefClient, - AC_CoefNouv, - AC_PrixVenNouv, - AC_PrixDevNouv, - AC_RemiseNouv, - AC_DateApplication, - cbCreation, - cbModification - FROM F_ARTCLIENT - WHERE AR_Ref IN ({placeholders}) - ORDER BY AR_Ref, AC_Categorie, CT_Num - """ - - cursor.execute(query, references) - rows = cursor.fetchall() - - tarifs_map = {} - for row in rows: - ref = self._safe_strip(row[0]) - if not ref: - continue - - if ref not in tarifs_map: - tarifs_map[ref] = [] - - tarifs_map[ref].append({ - "categorie": int(row[1]) if row[1] else 0, - "client_num": self._safe_strip(row[2]), - "prix_vente": float(row[3]) if row[3] else 0.0, - "coefficient": float(row[4]) if row[4] else 0.0, - "prix_ttc": float(row[5]) if row[5] else 0.0, - "arrondi": float(row[6]) if row[6] else 0.0, - "qte_montant": float(row[7]) if row[7] else 0.0, - "enumere_gamme": int(row[8]) if row[8] else 0, - "prix_devise": float(row[9]) if row[9] else 0.0, - "devise": int(row[10]) if row[10] else 0, - "remise": float(row[11]) if row[11] else 0.0, - "mode_calcul": int(row[12]) if row[12] else 0, - "type_remise": int(row[13]) if row[13] else 0, - "ref_client": self._safe_strip(row[14]), - "coef_nouveau": float(row[15]) if row[15] else 0.0, - "prix_vente_nouveau": float(row[16]) if row[16] else 0.0, - "prix_devise_nouveau": float(row[17]) if row[17] else 0.0, - "remise_nouvelle": float(row[18]) if row[18] else 0.0, - "date_application": row[19], - "date_creation": row[20], - "date_modification": row[21], - }) - - for article in articles: - article["tarifs_clients"] = tarifs_map.get(article["reference"], []) - article["nb_tarifs_clients"] = len(article["tarifs_clients"]) - - logger.info(f" ✓ {len(tarifs_map)} articles avec tarifs clients") - return articles - - except Exception as e: - logger.error(f" ✗ Erreur tarifs clients: {e}") - for article in articles: - article["tarifs_clients"] = [] - article["nb_tarifs_clients"] = 0 - return articles - - def _enrichir_nomenclature(self, articles: List[Dict], cursor) -> List[Dict]: - """ - Enrichit avec la nomenclature de production (composants, opérations) - Structure: articles[i]["composants"] = [{"operation": "OP10", "ressource": "RES01"}, ...] - """ - try: - logger.info(f" → Enrichissement nomenclature...") - - references = [a["reference"] for a in articles if a["reference"]] - if not references: - return articles - - placeholders = ",".join(["?"] * len(references)) - query = f""" - SELECT - AR_Ref, - AT_Operation, - RP_Code, - AT_Temps, - AT_Type, - AT_Description, - AT_Ordre, - AG_No1Comp, - AG_No2Comp, - AT_TypeRessource, - AT_Chevauche, - AT_Demarre, - AT_OperationChevauche, - AT_ValeurChevauche, - AT_TypeChevauche, - cbCreation, - cbModification - FROM F_ARTCOMPO - WHERE AR_Ref IN ({placeholders}) - ORDER BY AR_Ref, AT_Ordre, AT_Operation - """ - - cursor.execute(query, references) - rows = cursor.fetchall() - - composants_map = {} - for row in rows: - ref = self._safe_strip(row[0]) - if not ref: - continue - - if ref not in composants_map: - composants_map[ref] = [] - - composants_map[ref].append({ - "operation": self._safe_strip(row[1]), - "code_ressource": self._safe_strip(row[2]), - "temps": float(row[3]) if row[3] else 0.0, - "type": int(row[4]) if row[4] else 0, - "description": self._safe_strip(row[5]), - "ordre": int(row[6]) if row[6] else 0, - "gamme_1_comp": int(row[7]) if row[7] else 0, - "gamme_2_comp": int(row[8]) if row[8] else 0, - "type_ressource": int(row[9]) if row[9] else 0, - "chevauche": int(row[10]) if row[10] else 0, - "demarre": int(row[11]) if row[11] else 0, - "operation_chevauche": self._safe_strip(row[12]), - "valeur_chevauche": float(row[13]) if row[13] else 0.0, - "type_chevauche": int(row[14]) if row[14] else 0, - "date_creation": row[15], - "date_modification": row[16], - }) - - for article in articles: - article["composants"] = composants_map.get(article["reference"], []) - article["nb_composants"] = len(article["composants"]) - - logger.info(f" ✓ {len(composants_map)} articles avec nomenclature") - return articles - - except Exception as e: - logger.error(f" ✗ Erreur nomenclature: {e}") - for article in articles: - article["composants"] = [] - article["nb_composants"] = 0 - return articles - - def _enrichir_compta_articles(self, articles: List[Dict], cursor) -> List[Dict]: - """ - Enrichit avec les comptes comptables spécifiques par article - Structure: articles[i]["compta_vente/achat/stock"] = {...} - """ - try: - logger.info(f" → Enrichissement comptabilité articles...") - - references = [a["reference"] for a in articles if a["reference"]] - if not references: - return articles - - placeholders = ",".join(["?"] * len(references)) - query = f""" - SELECT - AR_Ref, - ACP_Type, - ACP_Champ, - ACP_ComptaCPT_CompteG, - ACP_ComptaCPT_CompteA, - ACP_ComptaCPT_Taxe1, - ACP_ComptaCPT_Taxe2, - ACP_ComptaCPT_Taxe3, - ACP_ComptaCPT_Date1, - ACP_ComptaCPT_Date2, - ACP_ComptaCPT_Date3, - ACP_ComptaCPT_TaxeAnc1, - ACP_ComptaCPT_TaxeAnc2, - ACP_ComptaCPT_TaxeAnc3, - ACP_TypeFacture, - cbCreation, - cbModification - FROM F_ARTCOMPTA - WHERE AR_Ref IN ({placeholders}) - ORDER BY AR_Ref, ACP_Type, ACP_Champ - """ - - cursor.execute(query, references) - rows = cursor.fetchall() - - compta_map = {} - for row in rows: - ref = self._safe_strip(row[0]) - if not ref: - continue - - if ref not in compta_map: - compta_map[ref] = {"vente": [], "achat": [], "stock": []} - - type_compta = int(row[1]) if row[1] else 0 - type_key = {0: "vente", 1: "achat", 2: "stock"}.get(type_compta, "autre") - - compta_entry = { - "champ": int(row[2]) if row[2] else 0, - "compte_general": self._safe_strip(row[3]), - "compte_auxiliaire": self._safe_strip(row[4]), - "taxe_1": self._safe_strip(row[5]), - "taxe_2": self._safe_strip(row[6]), - "taxe_3": self._safe_strip(row[7]), - "taxe_date_1": row[8], - "taxe_date_2": row[9], - "taxe_date_3": row[10], - "taxe_anc_1": self._safe_strip(row[11]), - "taxe_anc_2": self._safe_strip(row[12]), - "taxe_anc_3": self._safe_strip(row[13]), - "type_facture": int(row[14]) if row[14] else 0, - "date_creation": row[15], - "date_modification": row[16], - } - - if type_key in compta_map[ref]: - compta_map[ref][type_key].append(compta_entry) - - for article in articles: - compta = compta_map.get(article["reference"], {"vente": [], "achat": [], "stock": []}) - article["compta_vente"] = compta["vente"] - article["compta_achat"] = compta["achat"] - article["compta_stock"] = compta["stock"] - - logger.info(f" ✓ {len(compta_map)} articles avec compta spécifique") - return articles - - except Exception as e: - logger.error(f" ✗ Erreur comptabilité articles: {e}") - for article in articles: - article["compta_vente"] = [] - article["compta_achat"] = [] - article["compta_stock"] = [] - return articles - - def _enrichir_fournisseurs_multiples(self, articles: List[Dict], cursor) -> List[Dict]: - """ - Enrichit avec TOUS les fournisseurs (pas seulement le principal) - Structure: articles[i]["fournisseurs"] = [{"num": "F001", "ref": "REF123", "prix": 45.5}, ...] - """ - try: - logger.info(f" → Enrichissement fournisseurs multiples...") - - references = [a["reference"] for a in articles if a["reference"]] - if not references: - return articles - - placeholders = ",".join(["?"] * len(references)) - query = f""" - SELECT - AR_Ref, - CT_Num, - AF_RefFourniss, - AF_PrixAch, - AF_Unite, - AF_Conversion, - AF_DelaiAppro, - AF_Garantie, - AF_Colisage, - AF_QteMini, - AF_QteMont, - EG_Champ, - AF_Principal, - AF_PrixDev, - AF_Devise, - AF_Remise, - AF_ConvDiv, - AF_TypeRem, - AF_CodeBarre, - AF_PrixAchNouv, - AF_PrixDevNouv, - AF_RemiseNouv, - AF_DateApplication, - cbCreation, - cbModification - FROM F_ARTFOURNISS - WHERE AR_Ref IN ({placeholders}) - ORDER BY AR_Ref, AF_Principal DESC, CT_Num - """ - - cursor.execute(query, references) - rows = cursor.fetchall() - - fournisseurs_map = {} - for row in rows: - ref = self._safe_strip(row[0]) - if not ref: - continue - - if ref not in fournisseurs_map: - fournisseurs_map[ref] = [] - - fournisseurs_map[ref].append({ - "fournisseur_num": self._safe_strip(row[1]), - "ref_fournisseur": self._safe_strip(row[2]), - "prix_achat": float(row[3]) if row[3] else 0.0, - "unite": self._safe_strip(row[4]), - "conversion": float(row[5]) if row[5] else 0.0, - "delai_appro": int(row[6]) if row[6] else 0, - "garantie": int(row[7]) if row[7] else 0, - "colisage": int(row[8]) if row[8] else 0, - "qte_mini": float(row[9]) if row[9] else 0.0, - "qte_montant": float(row[10]) if row[10] else 0.0, - "enumere_gamme": int(row[11]) if row[11] else 0, - "est_principal": bool(row[12]), - "prix_devise": float(row[13]) if row[13] else 0.0, - "devise": int(row[14]) if row[14] else 0, - "remise": float(row[15]) if row[15] else 0.0, - "conversion_devise": float(row[16]) if row[16] else 0.0, - "type_remise": int(row[17]) if row[17] else 0, - "code_barre_fournisseur": self._safe_strip(row[18]), - "prix_achat_nouveau": float(row[19]) if row[19] else 0.0, - "prix_devise_nouveau": float(row[20]) if row[20] else 0.0, - "remise_nouvelle": float(row[21]) if row[21] else 0.0, - "date_application": row[22], - "date_creation": row[23], - "date_modification": row[24], - }) - - for article in articles: - article["fournisseurs"] = fournisseurs_map.get(article["reference"], []) - article["nb_fournisseurs"] = len(article["fournisseurs"]) - - logger.info(f" ✓ {len(fournisseurs_map)} articles avec fournisseurs multiples") - return articles - - except Exception as e: - logger.error(f" ✗ Erreur fournisseurs multiples: {e}") - for article in articles: - article["fournisseurs"] = [] - article["nb_fournisseurs"] = 0 - return articles - - def _enrichir_depots_details(self, articles: List[Dict], cursor) -> List[Dict]: - """ - Enrichit les stocks avec les informations détaillées des dépôts - Ajoute le nom du dépôt à chaque ligne de stock - """ - try: - logger.info(f" → Enrichissement détails dépôts...") - - query = """ - SELECT - DE_No, - DE_Intitule, - DE_Code, - DE_Adresse, - DE_Complement, - DE_CodePostal, - DE_Ville, - DE_Contact, - DE_Principal, - DE_CatCompta, - DE_Region, - DE_Pays, - DE_EMail, - DE_Telephone, - DE_Telecopie, - DP_NoDefaut, - DE_Exclure - FROM F_DEPOT - """ - - cursor.execute(query) - rows = cursor.fetchall() - - depots_map = {} - for row in rows: - de_no = self._safe_strip(row[0]) - if not de_no: - continue - - depots_map[de_no] = { - "depot_num": de_no, - "depot_nom": self._safe_strip(row[1]), - "depot_code": self._safe_strip(row[2]), - "depot_adresse": self._safe_strip(row[3]), - "depot_complement": self._safe_strip(row[4]), - "depot_code_postal": self._safe_strip(row[5]), - "depot_ville": self._safe_strip(row[6]), - "depot_contact": self._safe_strip(row[7]), - "depot_est_principal": bool(row[8]), - "depot_categorie_compta": int(row[9]) if row[9] else 0, - "depot_region": self._safe_strip(row[10]), - "depot_pays": self._safe_strip(row[11]), - "depot_email": self._safe_strip(row[12]), - "depot_telephone": self._safe_strip(row[13]), - "depot_fax": self._safe_strip(row[14]), - "depot_emplacement_defaut": self._safe_strip(row[15]), - "depot_exclu": bool(row[16]), - } - - logger.info(f" → {len(depots_map)} dépôts chargés") - - for article in articles: - for empl in article.get("emplacements", []): - depot_num = empl.get("depot") - if depot_num and depot_num in depots_map: - empl.update(depots_map[depot_num]) - - logger.info(f" ✓ Emplacements enrichis avec détails dépôts") - return articles - - except Exception as e: - logger.error(f" ✗ Erreur détails dépôts: {e}") - return articles - - def _enrichir_emplacements_details(self, articles: List[Dict], cursor) -> List[Dict]: - """ - Enrichit les emplacements avec leurs détails (zone, type, etc.) - """ - try: - logger.info(f" → Enrichissement détails emplacements...") - - query = """ - SELECT - DE_No, - DP_No, - DP_Code, - DP_Intitule, - DP_Zone, - DP_Type - FROM F_DEPOTEMPL - """ - - cursor.execute(query) - rows = cursor.fetchall() - - emplacements_map = {} - for row in rows: - de_no = self._safe_strip(row[0]) - dp_no = self._safe_strip(row[1]) - - if not de_no or not dp_no: - continue - - key = f"{de_no}_{dp_no}" - emplacements_map[key] = { - "emplacement_code": self._safe_strip(row[2]), - "emplacement_libelle": self._safe_strip(row[3]), - "emplacement_zone": self._safe_strip(row[4]), - "emplacement_type": int(row[5]) if row[5] else 0, - } - - logger.info(f" → {len(emplacements_map)} emplacements détaillés chargés") - - for article in articles: - for empl in article.get("emplacements", []): - depot = empl.get("depot") - emplacement = empl.get("emplacement") - if depot and emplacement: - key = f"{depot}_{emplacement}" - if key in emplacements_map: - empl.update(emplacements_map[key]) - - logger.info(f" ✓ Emplacements enrichis avec détails") - return articles - - except Exception as e: - logger.error(f" ✗ Erreur détails emplacements: {e}") - return articles - - def _enrichir_gammes_enumeres(self, articles: List[Dict], cursor) -> List[Dict]: - """ - Enrichit les gammes avec leurs libellés depuis F_ENUMGAMME et P_GAMME - """ - try: - logger.info(f" → Enrichissement énumérés gammes...") - - query_pgamme = "SELECT G_Intitule, G_Type FROM P_GAMME ORDER BY G_Type" - cursor.execute(query_pgamme) - pgamme_rows = cursor.fetchall() - - gammes_config = {} - for idx, row in enumerate(pgamme_rows): - gammes_config[idx + 1] = { - "nom": self._safe_strip(row[0]), - "type": int(row[1]) if row[1] else 0, - } - - logger.info(f" → Configuration gammes: {gammes_config}") - - query_enum = """ - SELECT - EG_Champ, - EG_Ligne, - EG_Enumere, - EG_BorneSup - FROM F_ENUMGAMME - ORDER BY EG_Champ, EG_Ligne - """ - - cursor.execute(query_enum) - enum_rows = cursor.fetchall() - - enumeres_map = {} - for row in enum_rows: - champ = int(row[0]) if row[0] else 0 - enumere = self._safe_strip(row[2]) - - if not enumere: - continue - - key = f"{champ}_{enumere}" - enumeres_map[key] = { - "ligne": int(row[1]) if row[1] else 0, - "enumere": enumere, - "borne_sup": float(row[3]) if row[3] else 0.0, - "gamme_nom": gammes_config.get(champ, {}).get("nom", f"Gamme {champ}"), - } - - logger.info(f" → {len(enumeres_map)} énumérés chargés") - - for article in articles: - for gamme in article.get("gammes", []): - num_gamme = gamme.get("numero_gamme") - enumere = gamme.get("enumere") - - if num_gamme and enumere: - key = f"{num_gamme}_{enumere}" - if key in enumeres_map: - gamme.update(enumeres_map[key]) - - logger.info(f" ✓ Gammes enrichies avec énumérés") - return articles - - except Exception as e: - logger.error(f" ✗ Erreur énumérés gammes: {e}") - return articles - - def _enrichir_references_enumerees(self, articles: List[Dict], cursor) -> List[Dict]: - """ - Enrichit avec les références énumérées (articles avec gammes) - Structure: articles[i]["refs_enumerees"] = [{"gamme1": 1, "gamme2": 3, "ref": "ART-R-B"}, ...] - """ - try: - logger.info(f" → Enrichissement références énumérées...") - - references = [a["reference"] for a in articles if a["reference"]] - if not references: - return articles - - placeholders = ",".join(["?"] * len(references)) - query = f""" - SELECT - AR_Ref, - AG_No1, - AG_No2, - AE_Ref, - AE_PrixAch, - AE_CodeBarre, - AE_PrixAchNouv, - AE_EdiCode, - AE_Sommeil, - cbCreation, - cbModification - FROM F_ARTENUMREF - WHERE AR_Ref IN ({placeholders}) - ORDER BY AR_Ref, AG_No1, AG_No2 - """ - - cursor.execute(query, references) - rows = cursor.fetchall() - - refs_enum_map = {} - for row in rows: - ref = self._safe_strip(row[0]) - if not ref: - continue - - if ref not in refs_enum_map: - refs_enum_map[ref] = [] - - refs_enum_map[ref].append({ - "gamme_1": int(row[1]) if row[1] else 0, - "gamme_2": int(row[2]) if row[2] else 0, - "reference_enumeree": self._safe_strip(row[3]), - "prix_achat": float(row[4]) if row[4] else 0.0, - "code_barre": self._safe_strip(row[5]), - "prix_achat_nouveau": float(row[6]) if row[6] else 0.0, - "edi_code": self._safe_strip(row[7]), - "en_sommeil": bool(row[8]), - "date_creation": row[9], - "date_modification": row[10], - }) - - for article in articles: - article["refs_enumerees"] = refs_enum_map.get(article["reference"], []) - article["nb_refs_enumerees"] = len(article["refs_enumerees"]) - - logger.info(f" ✓ {len(refs_enum_map)} articles avec références énumérées") - return articles - - except Exception as e: - logger.error(f" ✗ Erreur références énumérées: {e}") - for article in articles: - article["refs_enumerees"] = [] - article["nb_refs_enumerees"] = 0 - return articles - - def _enrichir_medias_articles(self, articles: List[Dict], cursor) -> List[Dict]: - """ - Enrichit avec les médias attachés (photos, documents, etc.) - Structure: articles[i]["medias"] = [{"fichier": "photo.jpg", "type": "image/jpeg"}, ...] - """ - try: - logger.info(f" → Enrichissement médias articles...") - - references = [a["reference"] for a in articles if a["reference"]] - if not references: - return articles - - placeholders = ",".join(["?"] * len(references)) - query = f""" - SELECT - AR_Ref, - ME_Commentaire, - ME_Fichier, - ME_TypeMIME, - ME_Origine, - ME_GedId, - cbCreation, - cbModification - FROM F_ARTICLEMEDIA - WHERE AR_Ref IN ({placeholders}) - ORDER BY AR_Ref, cbCreation - """ - - cursor.execute(query, references) - rows = cursor.fetchall() - - medias_map = {} - for row in rows: - ref = self._safe_strip(row[0]) - if not ref: - continue - - if ref not in medias_map: - medias_map[ref] = [] - - medias_map[ref].append({ - "commentaire": self._safe_strip(row[1]), - "fichier": self._safe_strip(row[2]), - "type_mime": self._safe_strip(row[3]), - "origine": int(row[4]) if row[4] else 0, - "ged_id": self._safe_strip(row[5]), - "date_creation": row[6], - "date_modification": row[7], - }) - - for article in articles: - article["medias"] = medias_map.get(article["reference"], []) - article["nb_medias"] = len(article["medias"]) - - logger.info(f" ✓ {len(medias_map)} articles avec médias") - return articles - - except Exception as e: - logger.error(f" ✗ Erreur médias: {e}") - for article in articles: - article["medias"] = [] - article["nb_medias"] = 0 - return articles - - def _enrichir_prix_gammes(self, articles: List[Dict], cursor) -> List[Dict]: - """ - Enrichit avec les prix spécifiques par combinaison de gammes - Structure: articles[i]["prix_gammes"] = [{"gamme1": 1, "gamme2": 3, "prix_net": 125.5}, ...] - """ - try: - logger.info(f" → Enrichissement prix par gammes...") - - references = [a["reference"] for a in articles if a["reference"]] - if not references: - return articles - - placeholders = ",".join(["?"] * len(references)) - query = f""" - SELECT - AR_Ref, - AG_No1, - AG_No2, - AR_PUNet, - AR_CoutStd, - cbCreation, - cbModification - FROM F_ARTPRIX - WHERE AR_Ref IN ({placeholders}) - ORDER BY AR_Ref, AG_No1, AG_No2 - """ - - cursor.execute(query, references) - rows = cursor.fetchall() - - prix_gammes_map = {} - for row in rows: - ref = self._safe_strip(row[0]) - if not ref: - continue - - if ref not in prix_gammes_map: - prix_gammes_map[ref] = [] - - prix_gammes_map[ref].append({ - "gamme_1": int(row[1]) if row[1] else 0, - "gamme_2": int(row[2]) if row[2] else 0, - "prix_net": float(row[3]) if row[3] else 0.0, - "cout_standard": float(row[4]) if row[4] else 0.0, - "date_creation": row[5], - "date_modification": row[6], - }) - - for article in articles: - article["prix_gammes"] = prix_gammes_map.get(article["reference"], []) - article["nb_prix_gammes"] = len(article["prix_gammes"]) - - logger.info(f" ✓ {len(prix_gammes_map)} articles avec prix par gammes") - return articles - - except Exception as e: - logger.error(f" ✗ Erreur prix gammes: {e}") - for article in articles: - article["prix_gammes"] = [] - article["nb_prix_gammes"] = 0 - return articles - - def _enrichir_conditionnements(self, articles: List[Dict], cursor) -> List[Dict]: - """ - Enrichit avec les conditionnements disponibles - """ - try: - logger.info(f" → Enrichissement conditionnements...") - - query = """ - SELECT - EC_Champ, - EC_Enumere, - EC_Quantite, - EC_EdiCode - FROM F_ENUMCOND - ORDER BY EC_Champ, EC_Enumere - """ - - cursor.execute(query) - rows = cursor.fetchall() - - cond_map = {} - for row in rows: - champ = int(row[0]) if row[0] else 0 - enumere = self._safe_strip(row[1]) - - if not enumere: - continue - - key = f"{champ}_{enumere}" - cond_map[key] = { - "champ": champ, - "enumere": enumere, - "quantite": float(row[2]) if row[2] else 0.0, - "edi_code": self._safe_strip(row[3]), - } - - logger.info(f" → {len(cond_map)} conditionnements chargés") - - for article in articles: - conditionnement = article.get("conditionnement") - if conditionnement: - for key, cond_data in cond_map.items(): - if cond_data["enumere"] == conditionnement: - article["conditionnement_qte"] = cond_data["quantite"] - article["conditionnement_edi"] = cond_data["edi_code"] - break - - logger.info(f" ✓ Conditionnements enrichis") - return articles - - except Exception as e: - logger.error(f" ✗ Erreur conditionnements: {e}") - return articles - - def _mapper_article_depuis_row(self, row_data: Dict, colonnes_config: Dict) -> Dict: - """ - Mappe une ligne SQL vers un dictionnaire article normalisé - - Args: - row_data: Dictionnaire avec noms de colonnes SQL comme clés - colonnes_config: Mapping SQL -> noms normalisés - - Returns: - Dictionnaire article avec noms normalisés - """ - article = {} - - def get_val(sql_col, default=None, convert_type=None): - val = row_data.get(sql_col, default) - if val is None: - return default - - if convert_type == float: - return float(val) if val not in (None, "") else (default or 0.0) - elif convert_type == int: - return int(val) if val not in (None, "") else (default or 0) - elif convert_type == bool: - return bool(val) if val not in (None, "") else (default or False) - elif convert_type == str: - return self._safe_strip(val) - - return val - - article["reference"] = get_val("AR_Ref", convert_type=str) - article["designation"] = get_val("AR_Design", convert_type=str) - article["code_ean"] = get_val("AR_CodeBarre", convert_type=str) - article["code_barre"] = get_val("AR_CodeBarre", convert_type=str) - article["edi_code"] = get_val("AR_EdiCode", convert_type=str) - article["raccourci"] = get_val("AR_Raccourci", convert_type=str) - - article["prix_vente"] = get_val("AR_PrixVen", 0.0, float) - article["prix_achat"] = get_val("AR_PrixAch", 0.0, float) - article["coef"] = get_val("AR_Coef", 0.0, float) - article["prix_net"] = get_val("AR_PUNet", 0.0, float) - article["prix_achat_nouveau"] = get_val("AR_PrixAchNouv", 0.0, float) - article["coef_nouveau"] = get_val("AR_CoefNouv", 0.0, float) - article["prix_vente_nouveau"] = get_val("AR_PrixVenNouv", 0.0, float) - - date_app = get_val("AR_DateApplication") - article["date_application_prix"] = str(date_app) if date_app else None - - article["cout_standard"] = get_val("AR_CoutStd", 0.0, float) - - article["unite_vente"] = get_val("AR_UniteVen", convert_type=str) - article["unite_poids"] = get_val("AR_UnitePoids", convert_type=str) - article["poids_net"] = get_val("AR_PoidsNet", 0.0, float) - article["poids_brut"] = get_val("AR_PoidsBrut", 0.0, float) - - article["gamme_1"] = get_val("AR_Gamme1", convert_type=str) - article["gamme_2"] = get_val("AR_Gamme2", convert_type=str) - - type_val = get_val("AR_Type", 0, int) - article["type_article"] = type_val - article["type_article_libelle"] = self._get_type_article_libelle(type_val) - article["famille_code"] = get_val("FA_CodeFamille", convert_type=str) - article["nature"] = get_val("AR_Nature", 0, int) - article["garantie"] = get_val("AR_Garantie", 0, int) - article["code_fiscal"] = get_val("AR_CodeFiscal", convert_type=str) - article["pays"] = get_val("AR_Pays", convert_type=str) - - article["fournisseur_principal"] = get_val("CO_No", 0, int) - article["conditionnement"] = get_val("AR_Condition", convert_type=str) - article["nb_colis"] = get_val("AR_NbColis", 0, int) - article["prevision"] = get_val("AR_Prevision", False, bool) - - article["suivi_stock"] = get_val("AR_SuiviStock", False, bool) - article["nomenclature"] = get_val("AR_Nomencl", False, bool) - article["qte_composant"] = get_val("AR_QteComp", 0.0, float) - article["qte_operatoire"] = get_val("AR_QteOperatoire", 0.0, float) - - sommeil = get_val("AR_Sommeil", 0, int) - article["est_actif"] = (sommeil == 0) - article["en_sommeil"] = (sommeil == 1) - article["article_substitut"] = get_val("AR_Substitut", convert_type=str) - article["soumis_escompte"] = get_val("AR_Escompte", False, bool) - article["delai"] = get_val("AR_Delai", 0, int) - - article["stat_01"] = get_val("AR_Stat01", convert_type=str) - article["stat_02"] = get_val("AR_Stat02", convert_type=str) - article["stat_03"] = get_val("AR_Stat03", convert_type=str) - article["stat_04"] = get_val("AR_Stat04", convert_type=str) - article["stat_05"] = get_val("AR_Stat05", convert_type=str) - article["hors_statistique"] = get_val("AR_HorsStat", False, bool) - - article["categorie_1"] = get_val("CL_No1", 0, int) - article["categorie_2"] = get_val("CL_No2", 0, int) - article["categorie_3"] = get_val("CL_No3", 0, int) - article["categorie_4"] = get_val("CL_No4", 0, int) - - date_modif = get_val("AR_DateModif") - article["date_modification"] = str(date_modif) if date_modif else None - - article["vente_debit"] = get_val("AR_VteDebit", False, bool) - article["non_imprimable"] = get_val("AR_NotImp", False, bool) - article["transfere"] = get_val("AR_Transfere", False, bool) - article["publie"] = get_val("AR_Publie", False, bool) - article["contremarque"] = get_val("AR_Contremarque", False, bool) - article["fact_poids"] = get_val("AR_FactPoids", False, bool) - article["fact_forfait"] = get_val("AR_FactForfait", False, bool) - article["saisie_variable"] = get_val("AR_SaisieVar", False, bool) - article["fictif"] = get_val("AR_Fictif", False, bool) - article["sous_traitance"] = get_val("AR_SousTraitance", False, bool) - article["criticite"] = get_val("AR_Criticite", 0, int) - - article["reprise_code_defaut"] = get_val("RP_CodeDefaut", convert_type=str) - article["delai_fabrication"] = get_val("AR_DelaiFabrication", 0, int) - article["delai_peremption"] = get_val("AR_DelaiPeremption", 0, int) - article["delai_securite"] = get_val("AR_DelaiSecurite", 0, int) - article["type_lancement"] = get_val("AR_TypeLancement", 0, int) - article["cycle"] = get_val("AR_Cycle", 1, int) - - article["photo"] = get_val("AR_Photo", convert_type=str) - article["langue_1"] = get_val("AR_Langue1", convert_type=str) - article["langue_2"] = get_val("AR_Langue2", convert_type=str) - - article["frais_01_denomination"] = get_val("AR_Frais01FR_Denomination", convert_type=str) - article["frais_02_denomination"] = get_val("AR_Frais02FR_Denomination", convert_type=str) - article["frais_03_denomination"] = get_val("AR_Frais03FR_Denomination", convert_type=str) - - article["marque_commerciale"] = get_val("Marque commerciale", convert_type=str) - - objectif_val = get_val("Objectif / Qtés vendues") - if objectif_val is not None: - article["objectif_qtes_vendues"] = str(float(objectif_val)) if objectif_val not in ("", 0, 0.0) else None - else: - article["objectif_qtes_vendues"] = None - - pourcentage_val = get_val("Pourcentage teneur en or") - if pourcentage_val is not None: - article["pourcentage_or"] = str(float(pourcentage_val)) if pourcentage_val not in ("", 0, 0.0) else None - else: - article["pourcentage_or"] = None - - date_com = get_val("1ère commercialisation") - article["premiere_commercialisation"] = str(date_com) if date_com else None - - article["interdire_commande"] = get_val("AR_InterdireCommande", False, bool) - article["exclure"] = get_val("AR_Exclure", False, bool) - - article["stock_reel"] = 0.0 - article["stock_mini"] = 0.0 - article["stock_maxi"] = 0.0 - article["stock_reserve"] = 0.0 - article["stock_commande"] = 0.0 - article["stock_disponible"] = 0.0 - - article["famille_libelle"] = None - article["famille_type"] = None - article["famille_unite_vente"] = None - article["famille_coef"] = None - article["famille_suivi_stock"] = None - article["famille_garantie"] = None - article["famille_unite_poids"] = None - article["famille_delai"] = None - article["famille_nb_colis"] = None - article["famille_code_fiscal"] = None - article["famille_escompte"] = None - article["famille_centrale"] = None - article["famille_nature"] = None - article["famille_hors_stat"] = None - article["famille_pays"] = None - - article["fournisseur_nom"] = None - article["tva_code"] = None - article["tva_taux"] = None - - return article - - def _enrichir_stocks_articles(self, articles: List[Dict], cursor) -> List[Dict]: - """Enrichit les articles avec les données de stock depuis F_ARTSTOCK""" - try: - logger.info(f" → Enrichissement stocks pour {len(articles)} articles...") - - references = [a["reference"] for a in articles if a["reference"]] - - if not references: - return articles - - placeholders = ",".join(["?"] * len(references)) - stock_query = f""" - SELECT - AR_Ref, - SUM(ISNULL(AS_QteSto, 0)) as Stock_Total, - MIN(ISNULL(AS_QteMini, 0)) as Stock_Mini, - MAX(ISNULL(AS_QteMaxi, 0)) as Stock_Maxi, - SUM(ISNULL(AS_QteRes, 0)) as Stock_Reserve, - SUM(ISNULL(AS_QteCom, 0)) as Stock_Commande - FROM F_ARTSTOCK - WHERE AR_Ref IN ({placeholders}) - GROUP BY AR_Ref - """ - - cursor.execute(stock_query, references) - stock_rows = cursor.fetchall() - - stock_map = {} - for stock_row in stock_rows: - ref = self._safe_strip(stock_row[0]) - if ref: - stock_map[ref] = { - "stock_reel": float(stock_row[1]) if stock_row[1] else 0.0, - "stock_mini": float(stock_row[2]) if stock_row[2] else 0.0, - "stock_maxi": float(stock_row[3]) if stock_row[3] else 0.0, - "stock_reserve": float(stock_row[4]) if stock_row[4] else 0.0, - "stock_commande": float(stock_row[5]) if stock_row[5] else 0.0, - } - - logger.info(f" → {len(stock_map)} articles avec stock trouvés dans F_ARTSTOCK") - - for article in articles: - if article["reference"] in stock_map: - stock_data = stock_map[article["reference"]] - article.update(stock_data) - article["stock_disponible"] = ( - article["stock_reel"] - article["stock_reserve"] - ) - - return articles - - except Exception as e: - logger.error(f" ✗ Erreur enrichissement stocks: {e}", exc_info=True) - return articles - - def _enrichir_fournisseurs_articles(self, articles: List[Dict], cursor) -> List[Dict]: - """Enrichit les articles avec le nom du fournisseur principal""" - try: - logger.info(f" → Enrichissement fournisseurs...") - - nums_fournisseurs = list(set([ - a["fournisseur_principal"] for a in articles - if a.get("fournisseur_principal") and a["fournisseur_principal"] > 0 - ])) - - if not nums_fournisseurs: - logger.warning(" ⚠ Aucun numéro de fournisseur trouvé dans les articles") - for article in articles: - article["fournisseur_nom"] = None - return articles - - logger.info(f" → {len(nums_fournisseurs)} fournisseurs uniques à chercher") - logger.info(f" → Exemples CO_No : {nums_fournisseurs[:5]}") - - placeholders = ",".join(["?"] * len(nums_fournisseurs)) - fournisseur_query = f""" - SELECT - CT_Num, - CT_Intitule, - CT_Type - FROM F_COMPTET - WHERE CT_Num IN ({placeholders}) - AND CT_Type = 1 - """ - - cursor.execute(fournisseur_query, nums_fournisseurs) - fournisseur_rows = cursor.fetchall() - - logger.info(f" → {len(fournisseur_rows)} fournisseurs trouvés dans F_COMPTET") - - if len(fournisseur_rows) == 0: - logger.warning(f" ⚠ Aucun fournisseur trouvé pour CT_Type=1 et CT_Num IN {nums_fournisseurs[:5]}") - cursor.execute(f"SELECT CT_Num, CT_Type FROM F_COMPTET WHERE CT_Num IN ({placeholders})", nums_fournisseurs) - tous_types = cursor.fetchall() - if tous_types: - logger.info(f" → Trouvé {len(tous_types)} comptes (tous types) : {[(r[0], r[1]) for r in tous_types[:5]]}") - - fournisseur_map = {} - for fourn_row in fournisseur_rows: - num = int(fourn_row[0]) # CT_Num - nom = self._safe_strip(fourn_row[1]) # CT_Intitule - type_ct = int(fourn_row[2]) # CT_Type - fournisseur_map[num] = nom - logger.debug(f" → Fournisseur mappé : {num} = {nom} (Type={type_ct})") - - nb_enrichis = 0 - for article in articles: - num_fourn = article.get("fournisseur_principal") - if num_fourn and num_fourn in fournisseur_map: - article["fournisseur_nom"] = fournisseur_map[num_fourn] - nb_enrichis += 1 - else: - article["fournisseur_nom"] = None - - logger.info(f" ✓ {nb_enrichis} articles enrichis avec nom fournisseur") - - return articles - - except Exception as e: - logger.error(f" ✗ Erreur enrichissement fournisseurs: {e}", exc_info=True) - for article in articles: - article["fournisseur_nom"] = None - return articles - - def _enrichir_familles_articles(self, articles: List[Dict], cursor) -> List[Dict]: - """Enrichit les articles avec les informations de famille depuis F_FAMILLE""" - try: - logger.info(f" → Enrichissement familles pour {len(articles)} articles...") - - codes_familles_bruts = [ - a.get("famille_code") for a in articles - if a.get("famille_code") not in (None, "", " ") - ] - - if codes_familles_bruts: - logger.info(f" → Exemples de codes familles : {codes_familles_bruts[:5]}") - - codes_familles = list(set([ - str(code).strip() for code in codes_familles_bruts if code - ])) - - if not codes_familles: - logger.warning(" ⚠ Aucun code famille trouvé dans les articles") - for article in articles: - self._init_champs_famille_vides(article) - return articles - - logger.info(f" → {len(codes_familles)} codes famille uniques") - - cursor.execute("SELECT TOP 1 * FROM F_FAMILLE") - colonnes_disponibles = [column[0] for column in cursor.description] - - colonnes_souhaitees = [ - "FA_CodeFamille", - "FA_Intitule", - "FA_Type", - "FA_UniteVen", - "FA_Coef", - "FA_SuiviStock", - "FA_Garantie", - "FA_UnitePoids", - "FA_Delai", - "FA_NbColis", - "FA_CodeFiscal", - "FA_Escompte", - "FA_Central", - "FA_Nature", - "FA_HorsStat", - "FA_Pays", - "FA_VteDebit", - "FA_NotImp", - "FA_Contremarque", - "FA_FactPoids", - "FA_FactForfait", - "FA_Publie", - "FA_RacineRef", - "FA_RacineCB", - "FA_Raccourci", - "FA_SousTraitance", - "FA_Fictif", - "FA_Criticite", - ] - - colonnes_a_lire = [col for col in colonnes_souhaitees if col in colonnes_disponibles] - - if "FA_CodeFamille" not in colonnes_a_lire or "FA_Intitule" not in colonnes_a_lire: - logger.error(" ✗ Colonnes essentielles manquantes !") - return articles - - logger.info(f" → Colonnes disponibles : {len(colonnes_a_lire)}") - - colonnes_str = ", ".join(colonnes_a_lire) - placeholders = ",".join(["?"] * len(codes_familles)) - - famille_query = f""" - SELECT {colonnes_str} - FROM F_FAMILLE - WHERE FA_CodeFamille IN ({placeholders}) - """ - - cursor.execute(famille_query, codes_familles) - famille_rows = cursor.fetchall() - - logger.info(f" → {len(famille_rows)} familles trouvées") - - famille_map = {} - for fam_row in famille_rows: - famille_data = {} - for idx, col in enumerate(colonnes_a_lire): - famille_data[col] = fam_row[idx] - - code = self._safe_strip(famille_data.get("FA_CodeFamille")) - if not code: - continue - - famille_map[code] = { - "famille_libelle": self._safe_strip(famille_data.get("FA_Intitule")), - "famille_type": int(famille_data.get("FA_Type", 0) or 0), - "famille_unite_vente": self._safe_strip(famille_data.get("FA_UniteVen")), - "famille_coef": float(famille_data.get("FA_Coef", 0) or 0), - "famille_suivi_stock": bool(famille_data.get("FA_SuiviStock", 0)), - "famille_garantie": int(famille_data.get("FA_Garantie", 0) or 0), - "famille_unite_poids": self._safe_strip(famille_data.get("FA_UnitePoids")), - "famille_delai": int(famille_data.get("FA_Delai", 0) or 0), - "famille_nb_colis": int(famille_data.get("FA_NbColis", 0) or 0), - "famille_code_fiscal": self._safe_strip(famille_data.get("FA_CodeFiscal")), - "famille_escompte": bool(famille_data.get("FA_Escompte", 0)), - "famille_centrale": bool(famille_data.get("FA_Central", 0)), - "famille_nature": int(famille_data.get("FA_Nature", 0) or 0), - "famille_hors_stat": bool(famille_data.get("FA_HorsStat", 0)), - "famille_pays": self._safe_strip(famille_data.get("FA_Pays")), - } - - logger.info(f" → {len(famille_map)} familles mappées") - - nb_enrichis = 0 - for article in articles: - code_fam = str(article.get("famille_code", "")).strip() - - if code_fam and code_fam in famille_map: - article.update(famille_map[code_fam]) - nb_enrichis += 1 - else: - self._init_champs_famille_vides(article) - - logger.info(f" ✓ {nb_enrichis} articles enrichis avec infos famille") - - return articles - - except Exception as e: - logger.error(f" Erreur enrichissement familles: {e}", exc_info=True) - for article in articles: - self._init_champs_famille_vides(article) - return articles - - def _init_champs_famille_vides(self, article: Dict): - """Initialise les champs famille à None/0""" - article["famille_libelle"] = None - article["famille_type"] = None - article["famille_unite_vente"] = None - article["famille_coef"] = None - article["famille_suivi_stock"] = None - article["famille_garantie"] = None - article["famille_unite_poids"] = None - article["famille_delai"] = None - article["famille_nb_colis"] = None - article["famille_code_fiscal"] = None - article["famille_escompte"] = None - article["famille_centrale"] = None - article["famille_nature"] = None - article["famille_hors_stat"] = None - article["famille_pays"] = None - - def _enrichir_tva_articles(self, articles: List[Dict], cursor) -> List[Dict]: - """Enrichit les articles avec le taux de TVA""" - try: - logger.info(f" → Enrichissement TVA...") - - codes_tva = list(set([ - a["code_fiscal"] for a in articles - if a.get("code_fiscal") - ])) - - if not codes_tva: - for article in articles: - article["tva_code"] = None - article["tva_taux"] = None - return articles - - placeholders = ",".join(["?"] * len(codes_tva)) - tva_query = f""" - SELECT - TA_Code, - TA_Taux - FROM F_TAXE - WHERE TA_Code IN ({placeholders}) - """ - - cursor.execute(tva_query, codes_tva) - tva_rows = cursor.fetchall() - - tva_map = {} - for tva_row in tva_rows: - code = self._safe_strip(tva_row[0]) - tva_map[code] = float(tva_row[1]) if tva_row[1] else 0.0 - - logger.info(f" → {len(tva_map)} codes TVA trouvés") - - for article in articles: - code_tva = article.get("code_fiscal") - if code_tva and code_tva in tva_map: - article["tva_code"] = code_tva - article["tva_taux"] = tva_map[code_tva] - else: - article["tva_code"] = code_tva - article["tva_taux"] = None - - return articles - - except Exception as e: - logger.error(f" ✗ Erreur enrichissement TVA: {e}", exc_info=True) - for article in articles: - article["tva_code"] = article.get("code_fiscal") - article["tva_taux"] = None - return articles - - def _get_type_article_libelle(self, type_val: int) -> str: - """Retourne le libellé du type d'article""" - types = { - 0: "Article", - 1: "Prestation", - 2: "Divers / Frais", - 3: "Nomenclature" - } - return types.get(type_val, f"Type {type_val}") - - def _safe_strip(self, value) -> Optional[str]: - """Nettoie une valeur string en toute sécurité""" - if value is None: - return None - if isinstance(value, str): - stripped = value.strip() - return stripped if stripped else None - return str(value).strip() or None - - - def _convertir_type_pour_sql(self, type_doc: int) -> int: - """COM → SQL : 0, 10, 20, 30... → 0, 1, 2, 3...""" - mapping = {0: 0, 10: 1, 20: 2, 30: 3, 40: 4, 50: 5, 60: 6} - return mapping.get(type_doc, type_doc) - - def _convertir_type_depuis_sql(self, type_sql: int) -> int: - """SQL → COM : 0, 1, 2, 3... → 0, 10, 20, 30...""" - mapping = {0: 0, 1: 10, 2: 20, 3: 30, 4: 40, 5: 50, 6: 60} - return mapping.get(type_sql, type_sql) - def _lire_document_sql(self, numero: str, type_doc: int): - """ - Lit un document spécifique par son numéro. - PAS de filtre par préfixe car on cherche un document précis. - """ try: with self._get_sql_connection() as conn: cursor = conn.cursor() @@ -2995,26 +1566,26 @@ class SageConnector: ) return None - numero_piece = self._safe_strip(row[0]) + numero_piece = _safe_strip(row[0]) logger.info(f"[SQL READ] Document trouvé: {numero_piece}") doc = { "numero": numero_piece, - "reference": self._safe_strip(row[2]), # DO_Ref + "reference": _safe_strip(row[2]), # DO_Ref "date": str(row[1]) if row[1] else "", # DO_Date "date_livraison": (str(row[7]) if row[7] else ""), # DO_DateLivr "date_expedition": ( str(row[8]) if row[8] else "" ), # DO_DateExpedition - "client_code": self._safe_strip(row[6]), # DO_Tiers - "client_intitule": self._safe_strip(row[39]), # CT_Intitule - "client_adresse": self._safe_strip(row[40]), # CT_Adresse - "client_code_postal": self._safe_strip(row[41]), # CT_CodePostal - "client_ville": self._safe_strip(row[42]), # CT_Ville - "client_telephone": self._safe_strip(row[43]), # CT_Telephone - "client_email": self._safe_strip(row[44]), # CT_EMail - "contact": self._safe_strip(row[9]), # DO_Contact + "client_code": _safe_strip(row[6]), # DO_Tiers + "client_intitule": _safe_strip(row[39]), # CT_Intitule + "client_adresse": _safe_strip(row[40]), # CT_Adresse + "client_code_postal": _safe_strip(row[41]), # CT_CodePostal + "client_ville": _safe_strip(row[42]), # CT_Ville + "client_telephone": _safe_strip(row[43]), # CT_Telephone + "client_email": _safe_strip(row[44]), # CT_EMail + "contact": _safe_strip(row[9]), # DO_Contact "total_ht": float(row[3]) if row[3] else 0.0, # DO_TotalHT "total_ht_net": float(row[10]) if row[10] else 0.0, # DO_TotalHTNet "total_ttc": float(row[4]) if row[4] else 0.0, # DO_TotalTTC @@ -3030,9 +1601,9 @@ class SageConnector: "taxe1": float(row[16]) if row[16] else 0.0, # DO_Taxe1 "taxe2": float(row[17]) if row[17] else 0.0, # DO_Taxe2 "taxe3": float(row[18]) if row[18] else 0.0, # DO_Taxe3 - "code_taxe1": self._safe_strip(row[19]), # DO_CodeTaxe1 - "code_taxe2": self._safe_strip(row[20]), # DO_CodeTaxe2 - "code_taxe3": self._safe_strip(row[21]), # DO_CodeTaxe3 + "code_taxe1": _safe_strip(row[19]), # DO_CodeTaxe1 + "code_taxe2": _safe_strip(row[20]), # DO_CodeTaxe2 + "code_taxe3": _safe_strip(row[21]), # DO_CodeTaxe3 "statut": int(row[5]) if row[5] is not None else 0, # DO_Statut "statut_estatut": ( int(row[22]) if row[22] is not None else 0 @@ -3044,10 +1615,10 @@ class SageConnector: int(row[26]) if row[26] is not None else 0 ), # DO_Transfere "souche": int(row[27]) if row[27] is not None else 0, # DO_Souche - "piece_origine": self._safe_strip(row[28]), # DO_PieceOrig - "guid": self._safe_strip(row[29]), # DO_GUID - "ca_num": self._safe_strip(row[30]), # CA_Num - "cg_num": self._safe_strip(row[31]), # CG_Num + "piece_origine": _safe_strip(row[28]), # DO_PieceOrig + "guid": _safe_strip(row[29]), # DO_GUID + "ca_num": _safe_strip(row[30]), # CA_Num + "cg_num": _safe_strip(row[31]), # CG_Num "expedition": ( int(row[32]) if row[32] is not None else 1 ), # DO_Expedit @@ -3120,9 +1691,9 @@ class SageConnector: "numero_ligne": ( int(ligne_row.DL_Ligne) if ligne_row.DL_Ligne else 0 ), - "article_code": self._safe_strip(ligne_row.AR_Ref), - "designation": self._safe_strip(ligne_row.DL_Design), - "designation_article": self._safe_strip(ligne_row.AR_Design), + "article_code": _safe_strip(ligne_row.AR_Ref), + "designation": _safe_strip(ligne_row.DL_Design), + "designation_article": _safe_strip(ligne_row.AR_Design), "quantite": ( float(ligne_row.DL_Qte) if ligne_row.DL_Qte else 0.0 ), @@ -3137,7 +1708,7 @@ class SageConnector: else 0.0 ), "unite": ( - self._safe_strip(ligne_row.DL_Unite) + _safe_strip(ligne_row.DL_Unite) if hasattr(ligne_row, "DL_Unite") else "" ), @@ -3194,13 +1765,13 @@ class SageConnector: "taux_taxe3": taux_taxe3, "montant_taxe3": montant_taxe3, "total_taxes": montant_taxe1 + montant_taxe2 + montant_taxe3, - "famille_article": self._safe_strip(ligne_row.FA_CodeFamille), - "gamme1": self._safe_strip(ligne_row.AR_Gamme1), - "gamme2": self._safe_strip(ligne_row.AR_Gamme2), - "code_barre": self._safe_strip(ligne_row.AR_CodeBarre), - "type_article": self._safe_strip(ligne_row.AR_Type), - "nature_article": self._safe_strip(ligne_row.AR_Nature), - "garantie": self._safe_strip(ligne_row.AR_Garantie), + "famille_article": _safe_strip(ligne_row.FA_CodeFamille), + "gamme1": _safe_strip(ligne_row.AR_Gamme1), + "gamme2": _safe_strip(ligne_row.AR_Gamme2), + "code_barre": _safe_strip(ligne_row.AR_CodeBarre), + "type_article": _safe_strip(ligne_row.AR_Type), + "nature_article": _safe_strip(ligne_row.AR_Nature), + "garantie": _safe_strip(ligne_row.AR_Garantie), "cout_standard": ( float(ligne_row.AR_CoutStd) if ligne_row.AR_CoutStd else 0.0 ), @@ -3214,7 +1785,7 @@ class SageConnector: if ligne_row.AR_PoidsBrut else 0.0 ), - "unite_vente": self._safe_strip(ligne_row.AR_UniteVen), + "unite_vente": _safe_strip(ligne_row.AR_UniteVen), "date_livraison_ligne": ( str(ligne_row.DL_DateLivr) if hasattr(ligne_row, "DL_DateLivr") @@ -3228,17 +1799,17 @@ class SageConnector: else 0 ), "depot": ( - self._safe_strip(ligne_row.DE_No) + _safe_strip(ligne_row.DE_No) if hasattr(ligne_row, "DE_No") else "" ), "numero_commande": ( - self._safe_strip(ligne_row.DL_NoColis) + _safe_strip(ligne_row.DL_NoColis) if hasattr(ligne_row, "DL_NoColis") else "" ), "num_colis": ( - self._safe_strip(ligne_row.DL_Colis) + _safe_strip(ligne_row.DL_Colis) if hasattr(ligne_row, "DL_Colis") else "" ), @@ -3267,8 +1838,6 @@ class SageConnector: type_doc: int, filtre: str = "", limit: int = None, - inclure_liaisons: bool = False, - calculer_transformations: bool = True, ): """Liste les documents avec leurs lignes.""" try: @@ -3325,7 +1894,7 @@ class SageConnector: } for idx, entete in enumerate(entetes): - numero = self._safe_strip(entete.DO_Piece) + numero = _safe_strip(entete.DO_Piece) logger.info( f"[SQL LIST] [{idx+1}/{len(entetes)}] Traitement {numero}..." ) @@ -3362,7 +1931,7 @@ class SageConnector: doc = { "numero": numero, "type": type_doc_depuis_sql, - "reference": self._safe_strip(entete.DO_Ref), + "reference": _safe_strip(entete.DO_Ref), "date": str(entete.DO_Date) if entete.DO_Date else "", "date_livraison": ( str(entete.DO_DateLivr) @@ -3374,18 +1943,18 @@ class SageConnector: if entete.DO_DateExpedition else "" ), - "client_code": self._safe_strip(entete.DO_Tiers), - "client_intitule": self._safe_strip(entete.CT_Intitule), - "client_adresse": self._safe_strip(entete.CT_Adresse), - "client_code_postal": self._safe_strip( + "client_code": _safe_strip(entete.DO_Tiers), + "client_intitule": _safe_strip(entete.CT_Intitule), + "client_adresse": _safe_strip(entete.CT_Adresse), + "client_code_postal": _safe_strip( entete.CT_CodePostal ), - "client_ville": self._safe_strip(entete.CT_Ville), - "client_telephone": self._safe_strip( + "client_ville": _safe_strip(entete.CT_Ville), + "client_telephone": _safe_strip( entete.CT_Telephone ), - "client_email": self._safe_strip(entete.CT_EMail), - "contact": self._safe_strip(entete.DO_Contact), + "client_email": _safe_strip(entete.CT_EMail), + "contact": _safe_strip(entete.DO_Contact), "total_ht": ( float(entete.DO_TotalHT) if entete.DO_TotalHT @@ -3435,9 +2004,9 @@ class SageConnector: "taxe3": ( float(entete.DO_Taxe3) if entete.DO_Taxe3 else 0.0 ), - "code_taxe1": self._safe_strip(entete.DO_CodeTaxe1), - "code_taxe2": self._safe_strip(entete.DO_CodeTaxe2), - "code_taxe3": self._safe_strip(entete.DO_CodeTaxe3), + "code_taxe1": _safe_strip(entete.DO_CodeTaxe1), + "code_taxe2": _safe_strip(entete.DO_CodeTaxe2), + "code_taxe3": _safe_strip(entete.DO_CodeTaxe3), "statut": ( int(entete.DO_Statut) if entete.DO_Statut is not None @@ -3468,14 +2037,14 @@ class SageConnector: if entete.DO_Transfere is not None else 0 ), - "souche": self._safe_strip(entete.DO_Souche), - "piece_origine": self._safe_strip(entete.DO_PieceOrig), - "guid": self._safe_strip(entete.DO_GUID), - "ca_num": self._safe_strip(entete.CA_Num), - "cg_num": self._safe_strip(entete.CG_Num), - "expedition": self._safe_strip(entete.DO_Expedit), - "condition": self._safe_strip(entete.DO_Condition), - "tarif": self._safe_strip(entete.DO_Tarif), + "souche": _safe_strip(entete.DO_Souche), + "piece_origine": _safe_strip(entete.DO_PieceOrig), + "guid": _safe_strip(entete.DO_GUID), + "ca_num": _safe_strip(entete.CA_Num), + "cg_num": _safe_strip(entete.CG_Num), + "expedition": _safe_strip(entete.DO_Expedit), + "condition": _safe_strip(entete.DO_Condition), + "tarif": _safe_strip(entete.DO_Tarif), "type_frais": ( int(entete.DO_TypeFrais) if entete.DO_TypeFrais is not None @@ -3572,11 +2141,11 @@ class SageConnector: if ligne_row.DL_Ligne else 0 ), - "article_code": self._safe_strip(ligne_row.AR_Ref), - "designation": self._safe_strip( + "article_code": _safe_strip(ligne_row.AR_Ref), + "designation": _safe_strip( ligne_row.DL_Design ), - "designation_article": self._safe_strip( + "designation_article": _safe_strip( ligne_row.AR_Design ), "quantite": ( @@ -3597,7 +2166,7 @@ class SageConnector: else 0.0 ), "unite": ( - self._safe_strip(ligne_row.DL_Unite) + _safe_strip(ligne_row.DL_Unite) if hasattr(ligne_row, "DL_Unite") else "" ), @@ -3662,19 +2231,19 @@ class SageConnector: "total_taxes": montant_taxe1 + montant_taxe2 + montant_taxe3, - "famille_article": self._safe_strip( + "famille_article": _safe_strip( ligne_row.FA_CodeFamille ), - "gamme1": self._safe_strip(ligne_row.AR_Gamme1), - "gamme2": self._safe_strip(ligne_row.AR_Gamme2), - "code_barre": self._safe_strip( + "gamme1": _safe_strip(ligne_row.AR_Gamme1), + "gamme2": _safe_strip(ligne_row.AR_Gamme2), + "code_barre": _safe_strip( ligne_row.AR_CodeBarre ), - "type_article": self._safe_strip(ligne_row.AR_Type), - "nature_article": self._safe_strip( + "type_article": _safe_strip(ligne_row.AR_Type), + "nature_article": _safe_strip( ligne_row.AR_Nature ), - "garantie": self._safe_strip(ligne_row.AR_Garantie), + "garantie": _safe_strip(ligne_row.AR_Garantie), "cout_standard": ( float(ligne_row.AR_CoutStd) if ligne_row.AR_CoutStd @@ -3690,7 +2259,7 @@ class SageConnector: if ligne_row.AR_PoidsBrut else 0.0 ), - "unite_vente": self._safe_strip( + "unite_vente": _safe_strip( ligne_row.AR_UniteVen ), "date_livraison_ligne": ( @@ -3706,17 +2275,17 @@ class SageConnector: else 0 ), "depot": ( - self._safe_strip(ligne_row.DE_No) + _safe_strip(ligne_row.DE_No) if hasattr(ligne_row, "DE_No") else "" ), "numero_commande": ( - self._safe_strip(ligne_row.DL_NoColis) + _safe_strip(ligne_row.DL_NoColis) if hasattr(ligne_row, "DL_NoColis") else "" ), "num_colis": ( - self._safe_strip(ligne_row.DL_Colis) + _safe_strip(ligne_row.DL_Colis) if hasattr(ligne_row, "DL_Colis") else "" ), @@ -3816,574 +2385,6 @@ class SageConnector: def lire_avoir_cache(self, numero): return self._lire_document_sql(numero, type_doc=5) - - def _cast_client(self, persist_obj): - try: - obj = win32com.client.CastTo(persist_obj, "IBOClient3") - obj.Read() - return obj - except Exception as e: - logger.debug(f" _cast_client échoue: {e}") # AJOUTER CE LOG - return None - - def _cast_article(self, persist_obj): - try: - obj = win32com.client.CastTo(persist_obj, "IBOArticle3") - obj.Read() - return obj - except: - return None - - def _extraire_client(self, client_obj): - try: - try: - numero = getattr(client_obj, "CT_Num", "").strip() - if not numero: - logger.debug("Objet sans CT_Num, skip") - return None - except Exception as e: - logger.debug(f" Erreur lecture CT_Num: {e}") - return None - - try: - intitule = getattr(client_obj, "CT_Intitule", "").strip() - if not intitule: - logger.debug(f"{numero} sans CT_Intitule") - except Exception as e: - logger.debug(f"Erreur CT_Intitule sur {numero}: {e}") - intitule = "" - - data = { - "numero": numero, - "intitule": intitule, - } - - try: - qualite_code = getattr(client_obj, "CT_Type", None) - - qualite_map = { - 0: "CLI", # Client - 1: "FOU", # Fournisseur - 2: "CLIFOU", # Client + Fournisseur - 3: "SAL", # Salarié - 4: "PRO", # Prospect - } - - data["qualite"] = qualite_map.get(qualite_code, "CLI") - data["est_fournisseur"] = qualite_code in [1, 2] - - except: - data["qualite"] = "CLI" - data["est_fournisseur"] = False - - try: - data["est_prospect"] = getattr(client_obj, "CT_Prospect", 0) == 1 - except: - data["est_prospect"] = False - - if data["est_prospect"]: - data["type_tiers"] = "prospect" - elif data["est_fournisseur"] and data["qualite"] != "CLIFOU": - data["type_tiers"] = "fournisseur" - elif data["qualite"] == "CLIFOU": - data["type_tiers"] = "client_fournisseur" - else: - data["type_tiers"] = "client" - - try: - sommeil = getattr(client_obj, "CT_Sommeil", 0) - data["est_actif"] = sommeil == 0 - data["est_en_sommeil"] = sommeil == 1 - except: - data["est_actif"] = True - data["est_en_sommeil"] = False - - try: - forme_juridique = getattr(client_obj, "CT_FormeJuridique", "").strip() - data["forme_juridique"] = forme_juridique - data["est_entreprise"] = bool(forme_juridique) - data["est_particulier"] = not bool(forme_juridique) - except: - data["forme_juridique"] = "" - data["est_entreprise"] = False - data["est_particulier"] = True - - try: - data["civilite"] = getattr(client_obj, "CT_Civilite", "").strip() - except: - data["civilite"] = "" - - try: - data["nom"] = getattr(client_obj, "CT_Nom", "").strip() - except: - data["nom"] = "" - - try: - data["prenom"] = getattr(client_obj, "CT_Prenom", "").strip() - except: - data["prenom"] = "" - - if data.get("nom") or data.get("prenom"): - parts = [] - if data.get("civilite"): - parts.append(data["civilite"]) - if data.get("prenom"): - parts.append(data["prenom"]) - if data.get("nom"): - parts.append(data["nom"]) - data["nom_complet"] = " ".join(parts) - else: - data["nom_complet"] = "" - - try: - data["contact"] = getattr(client_obj, "CT_Contact", "").strip() - except: - data["contact"] = "" - - try: - adresse_obj = getattr(client_obj, "Adresse", None) - if adresse_obj: - try: - data["adresse"] = getattr(adresse_obj, "Adresse", "").strip() - except: - data["adresse"] = "" - - try: - data["complement"] = getattr( - adresse_obj, "Complement", "" - ).strip() - except: - data["complement"] = "" - - try: - data["code_postal"] = getattr( - adresse_obj, "CodePostal", "" - ).strip() - except: - data["code_postal"] = "" - - try: - data["ville"] = getattr(adresse_obj, "Ville", "").strip() - except: - data["ville"] = "" - - try: - data["region"] = getattr(adresse_obj, "Region", "").strip() - except: - data["region"] = "" - - try: - data["pays"] = getattr(adresse_obj, "Pays", "").strip() - except: - data["pays"] = "" - else: - data["adresse"] = "" - data["complement"] = "" - data["code_postal"] = "" - data["ville"] = "" - data["region"] = "" - data["pays"] = "" - except Exception as e: - logger.debug(f"Erreur adresse sur {numero}: {e}") - data["adresse"] = "" - data["complement"] = "" - data["code_postal"] = "" - data["ville"] = "" - data["region"] = "" - data["pays"] = "" - - try: - telecom = getattr(client_obj, "Telecom", None) - if telecom: - try: - data["telephone"] = getattr(telecom, "Telephone", "").strip() - except: - data["telephone"] = "" - - try: - data["portable"] = getattr(telecom, "Portable", "").strip() - except: - data["portable"] = "" - - try: - data["telecopie"] = getattr(telecom, "Telecopie", "").strip() - except: - data["telecopie"] = "" - - try: - data["email"] = getattr(telecom, "EMail", "").strip() - except: - data["email"] = "" - - try: - site = ( - getattr(telecom, "Site", None) - or getattr(telecom, "Web", None) - or getattr(telecom, "SiteWeb", "") - ) - data["site_web"] = str(site).strip() if site else "" - except: - data["site_web"] = "" - else: - data["telephone"] = "" - data["portable"] = "" - data["telecopie"] = "" - data["email"] = "" - data["site_web"] = "" - except Exception as e: - logger.debug(f"Erreur telecom sur {numero}: {e}") - data["telephone"] = "" - data["portable"] = "" - data["telecopie"] = "" - data["email"] = "" - data["site_web"] = "" - - try: - data["siret"] = getattr(client_obj, "CT_Siret", "").strip() - except: - data["siret"] = "" - - try: - data["siren"] = getattr(client_obj, "CT_Siren", "").strip() - except: - data["siren"] = "" - - try: - data["tva_intra"] = getattr(client_obj, "CT_Identifiant", "").strip() - except: - data["tva_intra"] = "" - - try: - data["code_naf"] = ( - getattr(client_obj, "CT_CodeNAF", "").strip() - or getattr(client_obj, "CT_APE", "").strip() - ) - except: - data["code_naf"] = "" - - try: - data["secteur"] = getattr(client_obj, "CT_Secteur", "").strip() - except: - data["secteur"] = "" - - try: - effectif = getattr(client_obj, "CT_Effectif", None) - data["effectif"] = int(effectif) if effectif is not None else None - except: - data["effectif"] = None - - try: - ca = getattr(client_obj, "CT_ChiffreAffaire", None) - data["ca_annuel"] = float(ca) if ca is not None else None - except: - data["ca_annuel"] = None - - try: - data["commercial_code"] = getattr(client_obj, "CO_No", "").strip() - except: - try: - data["commercial_code"] = getattr( - client_obj, "CT_Commercial", "" - ).strip() - except: - data["commercial_code"] = "" - - if data.get("commercial_code"): - try: - commercial_obj = getattr(client_obj, "Commercial", None) - if commercial_obj: - commercial_obj.Read() - data["commercial_nom"] = getattr( - commercial_obj, "CO_Nom", "" - ).strip() - else: - data["commercial_nom"] = "" - except: - data["commercial_nom"] = "" - else: - data["commercial_nom"] = "" - - try: - data["categorie_tarifaire"] = getattr(client_obj, "N_CatTarif", None) - except: - data["categorie_tarifaire"] = None - - try: - data["categorie_comptable"] = getattr(client_obj, "N_CatCompta", None) - except: - data["categorie_comptable"] = None - - try: - data["encours_autorise"] = float(getattr(client_obj, "CT_Encours", 0.0)) - except: - data["encours_autorise"] = 0.0 - - try: - data["assurance_credit"] = float( - getattr(client_obj, "CT_Assurance", 0.0) - ) - except: - data["assurance_credit"] = 0.0 - - try: - data["compte_general"] = getattr(client_obj, "CG_Num", "").strip() - except: - data["compte_general"] = "" - - try: - date_creation = getattr(client_obj, "CT_DateCreate", None) - data["date_creation"] = str(date_creation) if date_creation else "" - except: - data["date_creation"] = "" - - try: - date_modif = getattr(client_obj, "CT_DateModif", None) - data["date_modification"] = str(date_modif) if date_modif else "" - except: - data["date_modification"] = "" - - return data - - except Exception as e: - logger.error(f" ERREUR GLOBALE _extraire_client: {e}", exc_info=True) - return None - - def _extraire_article(self, article_obj): - try: - data = { - "reference": getattr(article_obj, "AR_Ref", "").strip(), - "designation": getattr(article_obj, "AR_Design", "").strip(), - } - - data["code_ean"] = "" - data["code_barre"] = "" - - try: - code_barre = getattr(article_obj, "AR_CodeBarre", "").strip() - if code_barre: - data["code_ean"] = code_barre - data["code_barre"] = code_barre - - if not data["code_ean"]: - code_barre1 = getattr(article_obj, "AR_CodeBarre1", "").strip() - if code_barre1: - data["code_ean"] = code_barre1 - data["code_barre"] = code_barre1 - except: - pass - - try: - data["prix_vente"] = float(getattr(article_obj, "AR_PrixVen", 0.0)) - except: - data["prix_vente"] = 0.0 - - try: - data["prix_achat"] = float(getattr(article_obj, "AR_PrixAch", 0.0)) - except: - data["prix_achat"] = 0.0 - - try: - data["prix_revient"] = float( - getattr(article_obj, "AR_PrixRevient", 0.0) - ) - except: - data["prix_revient"] = 0.0 - - try: - data["stock_reel"] = float(getattr(article_obj, "AR_Stock", 0.0)) - except: - data["stock_reel"] = 0.0 - - try: - data["stock_mini"] = float(getattr(article_obj, "AR_StockMini", 0.0)) - except: - data["stock_mini"] = 0.0 - - try: - data["stock_maxi"] = float(getattr(article_obj, "AR_StockMaxi", 0.0)) - except: - data["stock_maxi"] = 0.0 - - try: - data["stock_reserve"] = float(getattr(article_obj, "AR_QteCom", 0.0)) - except: - data["stock_reserve"] = 0.0 - - try: - data["stock_commande"] = float( - getattr(article_obj, "AR_QteComFou", 0.0) - ) - except: - data["stock_commande"] = 0.0 - - try: - data["stock_disponible"] = data["stock_reel"] - data["stock_reserve"] - except: - data["stock_disponible"] = data["stock_reel"] - - try: - commentaire = getattr(article_obj, "AR_Commentaire", "").strip() - data["description"] = commentaire - except: - data["description"] = "" - - try: - design2 = getattr(article_obj, "AR_Design2", "").strip() - data["designation_complementaire"] = design2 - except: - data["designation_complementaire"] = "" - - try: - type_art = getattr(article_obj, "AR_Type", 0) - data["type_article"] = type_art - data["type_article_libelle"] = { - 0: "Article", - 1: "Prestation", - 2: "Divers", - }.get(type_art, "Inconnu") - except: - data["type_article"] = 0 - data["type_article_libelle"] = "Article" - - try: - famille_code = getattr(article_obj, "FA_CodeFamille", "").strip() - data["famille_code"] = famille_code - - if famille_code: - try: - famille_obj = getattr(article_obj, "Famille", None) - if famille_obj: - famille_obj.Read() - data["famille_libelle"] = getattr( - famille_obj, "FA_Intitule", "" - ).strip() - else: - data["famille_libelle"] = "" - except: - data["famille_libelle"] = "" - else: - data["famille_libelle"] = "" - except: - data["famille_code"] = "" - data["famille_libelle"] = "" - - try: - fournisseur_code = getattr(article_obj, "CT_Num", "").strip() - data["fournisseur_principal"] = fournisseur_code - - if fournisseur_code: - try: - fourn_obj = getattr(article_obj, "Fournisseur", None) - if fourn_obj: - fourn_obj.Read() - data["fournisseur_nom"] = getattr( - fourn_obj, "CT_Intitule", "" - ).strip() - else: - data["fournisseur_nom"] = "" - except: - data["fournisseur_nom"] = "" - else: - data["fournisseur_nom"] = "" - except: - data["fournisseur_principal"] = "" - data["fournisseur_nom"] = "" - - try: - data["unite_vente"] = getattr(article_obj, "AR_UniteVen", "").strip() - except: - data["unite_vente"] = "" - - try: - data["unite_achat"] = getattr(article_obj, "AR_UniteAch", "").strip() - except: - data["unite_achat"] = "" - - try: - data["poids"] = float(getattr(article_obj, "AR_Poids", 0.0)) - except: - data["poids"] = 0.0 - - try: - data["volume"] = float(getattr(article_obj, "AR_Volume", 0.0)) - except: - data["volume"] = 0.0 - - try: - sommeil = getattr(article_obj, "AR_Sommeil", 0) - data["est_actif"] = sommeil == 0 - data["en_sommeil"] = sommeil == 1 - except: - data["est_actif"] = True - data["en_sommeil"] = False - - try: - tva_code = getattr(article_obj, "TA_Code", "").strip() - data["tva_code"] = tva_code - - try: - tva_obj = getattr(article_obj, "Taxe1", None) - if tva_obj: - tva_obj.Read() - data["tva_taux"] = float(getattr(tva_obj, "TA_Taux", 20.0)) - else: - data["tva_taux"] = 20.0 - except: - data["tva_taux"] = 20.0 - except: - data["tva_code"] = "" - data["tva_taux"] = 20.0 - - try: - date_creation = getattr(article_obj, "AR_DateCreate", None) - data["date_creation"] = str(date_creation) if date_creation else "" - except: - data["date_creation"] = "" - - try: - date_modif = getattr(article_obj, "AR_DateModif", None) - data["date_modification"] = str(date_modif) if date_modif else "" - except: - data["date_modification"] = "" - - return data - - except Exception as e: - logger.error(f" Erreur extraction article: {e}", exc_info=True) - return { - "reference": getattr(article_obj, "AR_Ref", "").strip(), - "designation": getattr(article_obj, "AR_Design", "").strip(), - "prix_vente": 0.0, - "stock_reel": 0.0, - "code_ean": "", - "description": "", - "designation_complementaire": "", - "prix_achat": 0.0, - "prix_revient": 0.0, - "stock_mini": 0.0, - "stock_maxi": 0.0, - "stock_reserve": 0.0, - "stock_commande": 0.0, - "stock_disponible": 0.0, - "code_barre": "", - "type_article": 0, - "type_article_libelle": "Article", - "famille_code": "", - "famille_libelle": "", - "fournisseur_principal": "", - "fournisseur_nom": "", - "unite_vente": "", - "unite_achat": "", - "poids": 0.0, - "volume": 0.0, - "est_actif": True, - "en_sommeil": False, - "tva_code": "", - "tva_taux": 20.0, - "date_creation": "", - "date_modification": "", - } - def _extraire_fournisseur_enrichi(self, fourn_obj): try: numero = getattr(fourn_obj, "CT_Num", "").strip() @@ -4822,7 +2823,7 @@ class SageConnector: f" Client {devis_data['client']['code']} introuvable" ) - client_obj = self._cast_client(persist_client) + client_obj = _cast_client(persist_client) if not client_obj: raise ValueError( f" Impossible de charger le client {devis_data['client']['code']}" @@ -5118,18 +3119,6 @@ class SageConnector: return None def modifier_devis(self, numero: str, devis_data: Dict) -> Dict: - """ - Modifie un devis existant dans Sage - VERSION COMPLÈTE. - - Args: - numero: Numéro du devis - devis_data: dict contenant les champs à modifier: - - date_devis: str ou date (optionnel) - - date_livraison: str ou date (optionnel) - - reference: str (optionnel) - - statut: int (optionnel) - - lignes: list[dict] (optionnel) - """ logger.info("=" * 100) logger.info("=" * 100) logger.info(f" NOUVELLE MÉTHODE modifier_devis() APPELÉE POUR {numero} ") @@ -5141,7 +3130,8 @@ class SageConnector: raise RuntimeError("Connexion Sage non établie") try: - with self._com_context(), self._lock_com: + with self._com_context(), self._lock_com, self._get_sql_connection() as conn: + cursor = conn.cursor() logger.info("") logger.info("=" * 80) logger.info(f" [ÉTAPE 1] CHARGEMENT DU DEVIS {numero}") @@ -5151,10 +3141,10 @@ class SageConnector: logger.info(f" Devis {numero} chargé avec succès") logger.info("") - self._afficher_etat_document(doc, "📸 ÉTAT INITIAL") + _afficher_etat_document(doc, "📸 ÉTAT INITIAL") logger.info(" Vérification statut transformation...") - self._verifier_devis_non_transforme(numero, doc) + _verifier_devis_non_transforme(numero, doc, cursor) logger.info(" Devis non transformé - modification autorisée") logger.info("") @@ -5174,7 +3164,7 @@ class SageConnector: except Exception as e: logger.warning(f" Impossible de lire le client: {e}") - nb_lignes_initial = self._compter_lignes_document(doc) + nb_lignes_initial = _compter_lignes_document(doc) logger.info(f" Nombre de lignes actuelles: {nb_lignes_initial}") logger.info("") @@ -5267,7 +3257,7 @@ class SageConnector: logger.info(f" Cible: {nouvelle_date_str}") doc.DO_Date = pywintypes.Time(nouvelle_date) - logger.info(" ✓ doc.DO_Date affecté") + logger.info(" doc.DO_Date affecté") champs_modifies.append("date_devis") logger.info(f" Date devis sera modifiée: {ancienne_date_str} → {nouvelle_date_str}") @@ -5288,11 +3278,11 @@ class SageConnector: logger.info(f" Cible: {nouvelle_date_livr_str}") doc.DO_DateLivr = pywintypes.Time(nouvelle_date_livr) - logger.info(" ✓ doc.DO_DateLivr affecté") + logger.info(" doc.DO_DateLivr affecté") else: logger.info(" Cible: Effacement (None)") doc.DO_DateLivr = None - logger.info(" ✓ doc.DO_DateLivr = None") + logger.info(" doc.DO_DateLivr = None") champs_modifies.append("date_livraison") logger.info(" Date livraison sera modifiée") @@ -5310,7 +3300,7 @@ class SageConnector: logger.info(f" Cible: '{nouvelle_ref}'") doc.DO_Ref = nouvelle_ref - logger.info(" ✓ doc.DO_Ref affecté") + logger.info(" doc.DO_Ref affecté") champs_modifies.append("reference") logger.info(f" Référence sera modifiée: '{ancienne_ref}' → '{nouvelle_ref}'") @@ -5329,7 +3319,7 @@ class SageConnector: if nouveau_statut in [0, 1, 2, 3]: doc.DO_Statut = nouveau_statut - logger.info(" ✓ doc.DO_Statut affecté") + logger.info(" doc.DO_Statut affecté") champs_modifies.append("statut") logger.info(f" Statut sera modifié: {statut_actuel} → {nouveau_statut}") @@ -5437,7 +3427,7 @@ class SageConnector: article_obj = win32com.client.CastTo(persist_article, "IBOArticle3") article_obj.Read() - logger.info(f" ✓ Article chargé") + logger.info(f" Article chargé") ligne_persist = factory_lignes.Create() @@ -5448,25 +3438,25 @@ class SageConnector: try: ligne_obj.SetDefaultArticleReference(article_code, quantite) - logger.info(f" ✓ Article associé via SetDefaultArticleReference") + logger.info(f" Article associé via SetDefaultArticleReference") except: try: ligne_obj.SetDefaultArticle(article_obj, quantite) - logger.info(f" ✓ Article associé via SetDefaultArticle") + logger.info(f" Article associé via SetDefaultArticle") except: ligne_obj.DL_Design = ligne_data.get("designation", "") ligne_obj.DL_Qte = quantite - logger.info(f" ✓ Article associé manuellement") + logger.info(f" Article associé manuellement") if ligne_data.get("prix_unitaire_ht"): ligne_obj.DL_PrixUnitaire = float(ligne_data["prix_unitaire_ht"]) - logger.info(f" ✓ Prix unitaire défini") + logger.info(f" Prix unitaire défini") if ligne_data.get("remise_pourcentage", 0) > 0: try: ligne_obj.DL_Remise01REM_Valeur = float(ligne_data["remise_pourcentage"]) ligne_obj.DL_Remise01REM_Type = 0 - logger.info(f" ✓ Remise définie") + logger.info(f" Remise définie") except: logger.debug(f" Remise non supportée") @@ -5509,14 +3499,14 @@ class SageConnector: logger.info(f" Cible: '{nouvelle_ref}'") doc.DO_Ref = nouvelle_ref - logger.info(" ✓ doc.DO_Ref affecté") + logger.info(" doc.DO_Ref affecté") doc.Write() - logger.info(" ✓ Write()") + logger.info(" Write()") time.sleep(0.5) doc.Read() - logger.info(" ✓ Read()") + logger.info(" Read()") champs_modifies.append("reference") logger.info(f" Référence modifiée: '{ancienne_ref}' → '{nouvelle_ref}'") @@ -5538,14 +3528,14 @@ class SageConnector: if nouveau_statut != statut_actuel and nouveau_statut in [0, 1, 2, 3]: doc.DO_Statut = nouveau_statut - logger.info(" ✓ doc.DO_Statut affecté") + logger.info(" doc.DO_Statut affecté") doc.Write() - logger.info(" ✓ Write()") + logger.info(" Write()") time.sleep(0.5) doc.Read() - logger.info(" ✓ Read()") + logger.info(" Read()") champs_modifies.append("statut") logger.info(f" Statut modifié: {statut_actuel} → {nouveau_statut}") @@ -5570,14 +3560,14 @@ class SageConnector: logger.info(" Read() final") logger.info("") - self._afficher_etat_document(doc, "📸 ÉTAT FINAL") + _afficher_etat_document(doc, "📸 ÉTAT FINAL") logger.info("") logger.info("=" * 80) logger.info(" [ÉTAPE 9] EXTRACTION RÉSULTAT") logger.info("=" * 80) - resultat = self._extraire_infos_devis(doc, numero, champs_modifies) + resultat = _extraire_infos_devis(doc, numero, champs_modifies) logger.info(f" Résultat extrait:") logger.info(f" Numéro: {resultat['numero']}") @@ -5603,56 +3593,6 @@ class SageConnector: except Exception as e: logger.error(f" ERREUR TECHNIQUE: {e}", exc_info=True) raise RuntimeError(f"Erreur technique Sage: {str(e)}") - - def _afficher_etat_document(self, doc, titre: str): - """Affiche l'état complet d'un document.""" - logger.info("-" * 80) - logger.info(titre) - logger.info("-" * 80) - try: - logger.info(f" DO_Piece: {getattr(doc, 'DO_Piece', 'N/A')}") - logger.info(f" DO_Ref: '{getattr(doc, 'DO_Ref', 'N/A')}'") - logger.info(f" DO_Statut: {getattr(doc, 'DO_Statut', 'N/A')}") - - date_doc = getattr(doc, 'DO_Date', None) - date_str = date_doc.strftime('%Y-%m-%d') if date_doc else 'None' - logger.info(f" DO_Date: {date_str}") - - date_livr = getattr(doc, 'DO_DateLivr', None) - date_livr_str = date_livr.strftime('%Y-%m-%d') if date_livr else 'None' - logger.info(f" DO_DateLivr: {date_livr_str}") - - logger.info(f" DO_TotalHT: {getattr(doc, 'DO_TotalHT', 0)}€") - logger.info(f" DO_TotalTTC: {getattr(doc, 'DO_TotalTTC', 0)}€") - except Exception as e: - logger.error(f" Erreur affichage état: {e}") - logger.info("-" * 80) - - - def _compter_lignes_document(self, doc) -> int: - """Compte les lignes d'un document.""" - try: - try: - factory_lignes = doc.FactoryDocumentLigne - except: - factory_lignes = doc.FactoryDocumentVenteLigne - - count = 0 - index = 1 - while index <= 100: - try: - ligne_p = factory_lignes.List(index) - if ligne_p is None: - break - count += 1 - index += 1 - except: - break - return count - except Exception as e: - logger.warning(f" Erreur comptage lignes: {e}") - return 0 - def _charger_devis(self, numero: str): """Charge un devis depuis Sage.""" @@ -5665,7 +3605,7 @@ class SageConnector: if not persist: logger.warning(" ReadPiece a échoué, recherche dans la liste...") - persist = self._rechercher_devis_par_numero(numero, factory) + persist = _rechercher_devis_par_numero(numero, factory) if not persist: raise ValueError(f" Devis {numero} INTROUVABLE") @@ -5676,97 +3616,6 @@ class SageConnector: logger.info(f" Devis {numero} chargé") return doc - - def _rechercher_devis_par_numero(self, numero: str, factory): - """Recherche un devis par numéro dans la liste.""" - logger.info(f" Recherche de {numero} dans la liste...") - - index = 1 - while index < 10000: - try: - persist_test = factory.List(index) - if persist_test is None: - break - - doc_test = win32com.client.CastTo(persist_test, "IBODocumentVente3") - doc_test.Read() - - if ( - getattr(doc_test, "DO_Type", -1) == 0 - and getattr(doc_test, "DO_Piece", "") == numero - ): - logger.info(f" Trouvé à l'index {index}") - return persist_test - - index += 1 - except: - index += 1 - - logger.error(f" Devis {numero} non trouvé dans la liste") - return None - - - def _verifier_devis_non_transforme(self, numero: str, doc): - """Vérifie que le devis n'est pas transformé.""" - verification = self.verifier_si_deja_transforme_sql(numero, 0) - - if verification["deja_transforme"]: - docs_cibles = verification["documents_cibles"] - nums = [d["numero"] for d in docs_cibles] - raise ValueError( - f" Devis {numero} déjà transformé en {len(docs_cibles)} document(s): {', '.join(nums)}" - ) - - statut_actuel = getattr(doc, "DO_Statut", 0) - if statut_actuel == 5: - raise ValueError(f" Devis {numero} déjà transformé (statut=5)") - - - def _extraire_infos_devis(self, doc, numero: str, champs_modifies: list) -> Dict: - """Extrait les informations complètes du devis.""" - total_ht = float(getattr(doc, "DO_TotalHT", 0.0)) - total_ttc = float(getattr(doc, "DO_TotalTTC", 0.0)) - statut = getattr(doc, "DO_Statut", 0) - reference = getattr(doc, "DO_Ref", "") - - date_devis = None - try: - date_doc = getattr(doc, "DO_Date", None) - if date_doc: - date_devis = date_doc.strftime("%Y-%m-%d") - except: - pass - - date_livraison = None - try: - date_livr = getattr(doc, "DO_DateLivr", None) - if date_livr: - date_livraison = date_livr.strftime("%Y-%m-%d") - except: - pass - - client_code = "" - try: - client_obj = getattr(doc, "Client", None) - if client_obj: - client_obj.Read() - client_code = getattr(client_obj, "CT_Num", "") - except: - pass - - return { - "numero": numero, - "total_ht": total_ht, - "total_ttc": total_ttc, - "reference": reference, - "date_devis": date_devis, - "date_livraison": date_livraison, - "champs_modifies": champs_modifies, - "statut": statut, - "client_code": client_code, - } - - def lire_devis(self, numero_devis): try: devis = self._lire_document_sql(numero_devis, type_doc=0) @@ -5783,183 +3632,6 @@ class SageConnector: def lire_document(self, numero, type_doc): return self._lire_document_sql(numero, type_doc) - def verifier_si_deja_transforme_sql(self, numero_source, type_source): - """Version corrigée avec normalisation des types""" - logger.info( - f"[VERIF] Vérification transformations de {numero_source} (type {type_source})" - ) - - logger.info( - f"[VERIF] Vérification transformations de {numero_source} (type {type_source})" - ) - - logger.info(f"[DEBUG] Type source brut: {type_source}") - logger.info( - f"[DEBUG] Type source après normalisation: {self._normaliser_type_document(type_source)}" - ) - logger.info( - f"[DEBUG] Type source après normalisation SQL: {self._convertir_type_pour_sql(type_source)}" - ) - - type_source = self._convertir_type_pour_sql(type_source) - - champ_liaison_mapping = { - 0: "DL_PieceDE", - 1: "DL_PieceBC", - 3: "DL_PieceBL", - } - - champ_liaison = champ_liaison_mapping.get(type_source) - - if not champ_liaison: - logger.warning(f"[VERIF] Type source {type_source} non géré") - return {"deja_transforme": False, "documents_cibles": []} - - try: - with self._get_sql_connection() as conn: - cursor = conn.cursor() - - query = f""" - SELECT DISTINCT - dc.DO_Piece, - dc.DO_Type, - dc.DO_Statut, - (SELECT COUNT(*) FROM F_DOCLIGNE - WHERE DO_Piece = dc.DO_Piece AND DO_Type = dc.DO_Type) as NbLignes - FROM F_DOCENTETE dc - INNER JOIN F_DOCLIGNE dl ON dc.DO_Piece = dl.DO_Piece AND dc.DO_Type = dl.DO_Type - WHERE dl.{champ_liaison} = ? - ORDER BY dc.DO_Type, dc.DO_Piece - """ - - cursor.execute(query, (numero_source,)) - resultats = cursor.fetchall() - - documents_cibles = [] - for row in resultats: - type_brut = int(row.DO_Type) - type_normalise = self._convertir_type_depuis_sql(type_brut) - - doc = { - "numero": row.DO_Piece.strip() if row.DO_Piece else "", - "type": type_normalise, # ← TYPE NORMALISÉ - "type_brut": type_brut, # Garder aussi le type original - "type_libelle": self._get_type_libelle(type_brut), - "statut": int(row.DO_Statut) if row.DO_Statut else 0, - "nb_lignes": int(row.NbLignes) if row.NbLignes else 0, - } - documents_cibles.append(doc) - logger.info( - f"[VERIF] Trouvé: {doc['numero']} " - f"(type {type_brut}→{type_normalise} - {doc['type_libelle']}) " - f"- {doc['nb_lignes']} lignes" - ) - - deja_transforme = len(documents_cibles) > 0 - - if deja_transforme: - logger.info( - f"[VERIF] Document {numero_source} a {len(documents_cibles)} transformation(s)" - ) - else: - logger.info( - f"[VERIF] Document {numero_source} pas encore transformé" - ) - - return { - "deja_transforme": deja_transforme, - "documents_cibles": documents_cibles, - } - - except Exception as e: - logger.error(f"[VERIF] Erreur vérification: {e}") - return {"deja_transforme": False, "documents_cibles": []} - - def peut_etre_transforme(self, numero_source, type_source, type_cible): - """Version corrigée avec normalisation""" - type_source = self._normaliser_type_document(type_source) - type_cible = self._normaliser_type_document(type_cible) - - logger.info( - f"[VERIF_TRANSFO] {numero_source} " - f"(type {type_source}) → type {type_cible}" - ) - - verif = self.verifier_si_deja_transforme_sql(numero_source, type_source) - - docs_meme_type = [ - d for d in verif["documents_cibles"] if d["type"] == type_cible - ] - - if docs_meme_type: - nums = [d["numero"] for d in docs_meme_type] - return { - "possible": False, - "raison": f"Document déjà transformé en {self._get_type_libelle(type_cible)}", - "documents_existants": docs_meme_type, - "message_detaille": f"Document(s) existant(s): {', '.join(nums)}", - } - - return { - "possible": True, - "raison": "Transformation possible", - "documents_existants": [], - } - - def _get_type_libelle(self, type_doc: int) -> str: - """ - Retourne le libellé d'un type de document. - Gère les 2 formats : valeur réelle Sage (0,10,20...) ET valeur affichée (0,1,2...) - """ - types_officiels = { - 0: "Devis", - 10: "Bon de commande", - 20: "Préparation", - 30: "Bon de livraison", - 40: "Bon de retour", - 50: "Bon d'avoir", - 60: "Facture", - } - - types_alternatifs = { - 1: "Bon de commande", - 2: "Préparation", - 3: "Bon de livraison", - 4: "Bon de retour", - 5: "Bon d'avoir", - 6: "Facture", - } - - if type_doc in types_officiels: - return types_officiels[type_doc] - - if type_doc in types_alternatifs: - return types_alternatifs[type_doc] - - return f"Type {type_doc}" - - def _normaliser_type_document(self, type_doc: int) -> int: - """ - Normalise le type de document vers la valeur officielle Sage. - Convertit 1→10, 2→20, etc. si nécessaire - """ - - logger.info(f"[INFO] TYPE RECU{type_doc}") - - if type_doc in [0, 10, 20, 30, 40, 50, 60]: - return type_doc - - mapping_normalisation = { - 1: 10, # Commande - 2: 20, # Préparation - 3: 30, # BL - 4: 40, # Retour - 5: 50, # Avoir - 6: 60, # Facture - } - - return mapping_normalisation.get(type_doc, type_doc) - def transformer_document( self, numero_source, @@ -5998,7 +3670,7 @@ class SageConnector: if verifier_doublons: logger.info("[TRANSFORM] Vérification des doublons...") - verif = self.peut_etre_transforme(numero_source, type_source, type_cible) + verif = peut_etre_transforme(numero_source, type_source, type_cible) if not verif["possible"]: docs = [d["numero"] for d in verif.get("documents_existants", [])] @@ -6090,7 +3762,7 @@ class SageConnector: can_process = True if not can_process: - erreurs = self.lire_erreurs_sage(transformer, "Transformer") + erreurs = lire_erreurs_sage(transformer, "Transformer") if erreurs: msgs = [f"{e['field']}: {e['description']}" for e in erreurs] raise RuntimeError( @@ -6115,7 +3787,7 @@ class SageConnector: except Exception as e: logger.error(f"[TRANSFORM] Erreur Process(): {e}") - erreurs = self.lire_erreurs_sage(transformer, "Transformer") + erreurs = lire_erreurs_sage(transformer, "Transformer") if erreurs: msgs = [ f"{e['field']}: {e['description']}" for e in erreurs @@ -6238,123 +3910,6 @@ class SageConnector: logger.error(f"[TRANSFORM] Erreur inattendue: {e}", exc_info=True) raise RuntimeError(f"Échec transformation: {str(e)}") - def lire_erreurs_sage(self, obj, nom_obj=""): - """ - Lit toutes les erreurs d'un objet Sage COM. - Utilisé pour diagnostiquer les échecs de Process(). - """ - erreurs = [] - try: - if not hasattr(obj, "Errors") or obj.Errors is None: - return erreurs - - nb_erreurs = 0 - try: - nb_erreurs = obj.Errors.Count - except: - return erreurs - - if nb_erreurs == 0: - return erreurs - - for i in range(1, nb_erreurs + 1): - try: - err = None - try: - err = obj.Errors.Item(i) - except: - try: - err = obj.Errors(i) - except: - try: - err = obj.Errors.Item(i - 1) - except: - pass - - if err is not None: - description = "" - field = "" - number = "" - - for attr in ["Description", "Descr", "Message", "Text"]: - try: - val = getattr(err, attr, None) - if val: - description = str(val) - break - except: - pass - - for attr in ["Field", "FieldName", "Champ", "Property"]: - try: - val = getattr(err, attr, None) - if val: - field = str(val) - break - except: - pass - - for attr in ["Number", "Code", "ErrorCode", "Numero"]: - try: - val = getattr(err, attr, None) - if val is not None: - number = str(val) - break - except: - pass - - if description or field or number: - erreurs.append( - { - "source": nom_obj, - "index": i, - "description": description or "Erreur inconnue", - "field": field or "?", - "number": number or "?", - } - ) - - except Exception as e: - logger.debug(f"Erreur lecture erreur {i}: {e}") - continue - - except Exception as e: - logger.debug(f"Erreur globale lecture erreurs {nom_obj}: {e}") - - return erreurs - - def _find_document_in_list(self, numero, type_doc): - """Cherche un document dans List() si ReadPiece échoue""" - try: - factory = self.cial.FactoryDocumentVente - index = 1 - - while index < 10000: - try: - persist = factory.List(index) - if persist is None: - break - - doc = win32com.client.CastTo(persist, "IBODocumentVente3") - doc.Read() - - if ( - getattr(doc, "DO_Type", -1) == type_doc - and getattr(doc, "DO_Piece", "") == numero - ): - logger.info(f"[TRANSFORM] Document trouve a l'index {index}") - return persist - - index += 1 - except: - index += 1 - continue - - return None - except Exception as e: - logger.error(f"[TRANSFORM] Erreur recherche document: {e}") - return None - def mettre_a_jour_champ_libre(self, doc_id, type_doc, nom_champ, valeur): """Mise à jour champ libre pour Universign ID""" try: @@ -6389,7 +3944,7 @@ class SageConnector: persist = factory.ReadNumero(code_client) if persist: - return self._cast_client(persist) + return _cast_client(persist) except: pass @@ -6407,7 +3962,7 @@ class SageConnector: if not persist_client: return None - client = self._cast_client(persist_client) + client = _cast_client(persist_client) if not client: return None @@ -6475,13 +4030,13 @@ class SageConnector: for row in rows: prospects.append( { - "numero": self._safe_strip(row.CT_Num), - "intitule": self._safe_strip(row.CT_Intitule), - "adresse": self._safe_strip(row.CT_Adresse), - "ville": self._safe_strip(row.CT_Ville), - "code_postal": self._safe_strip(row.CT_CodePostal), - "telephone": self._safe_strip(row.CT_Telephone), - "email": self._safe_strip(row.CT_EMail), + "numero": _safe_strip(row.CT_Num), + "intitule": _safe_strip(row.CT_Intitule), + "adresse": _safe_strip(row.CT_Adresse), + "ville": _safe_strip(row.CT_Ville), + "code_postal": _safe_strip(row.CT_CodePostal), + "telephone": _safe_strip(row.CT_Telephone), + "email": _safe_strip(row.CT_EMail), "type": 0, "est_prospect": True, } @@ -6519,26 +4074,26 @@ class SageConnector: return None return { - "numero": self._safe_strip(row.CT_Num), - "intitule": self._safe_strip(row.CT_Intitule), + "numero": _safe_strip(row.CT_Num), + "intitule": _safe_strip(row.CT_Intitule), "type": 0, - "qualite": self._safe_strip(row.CT_Qualite), + "qualite": _safe_strip(row.CT_Qualite), "est_prospect": True, - "adresse": self._safe_strip(row.CT_Adresse), - "complement": self._safe_strip(row.CT_Complement), - "ville": self._safe_strip(row.CT_Ville), - "code_postal": self._safe_strip(row.CT_CodePostal), - "pays": self._safe_strip(row.CT_Pays), - "telephone": self._safe_strip(row.CT_Telephone), - "portable": self._safe_strip(row.CT_Portable), - "email": self._safe_strip(row.CT_EMail), - "telecopie": self._safe_strip(row.CT_Telecopie), - "siret": self._safe_strip(row.CT_Siret), - "tva_intra": self._safe_strip(row.CT_Identifiant), + "adresse": _safe_strip(row.CT_Adresse), + "complement": _safe_strip(row.CT_Complement), + "ville": _safe_strip(row.CT_Ville), + "code_postal": _safe_strip(row.CT_CodePostal), + "pays": _safe_strip(row.CT_Pays), + "telephone": _safe_strip(row.CT_Telephone), + "portable": _safe_strip(row.CT_Portable), + "email": _safe_strip(row.CT_EMail), + "telecopie": _safe_strip(row.CT_Telecopie), + "siret": _safe_strip(row.CT_Siret), + "tva_intra": _safe_strip(row.CT_Identifiant), "est_actif": (row.CT_Sommeil == 0), - "contact": self._safe_strip(row.CT_Contact), - "forme_juridique": self._safe_strip(row.CT_FormeJuridique), - "secteur": self._safe_strip(row.CT_Secteur), + "contact": _safe_strip(row.CT_Contact), + "forme_juridique": _safe_strip(row.CT_FormeJuridique), + "secteur": _safe_strip(row.CT_Secteur), } except Exception as e: @@ -6574,11 +4129,11 @@ class SageConnector: for row in rows: avoirs.append( { - "numero": self._safe_strip(row.DO_Piece), - "reference": self._safe_strip(row.DO_Ref), + "numero": _safe_strip(row.DO_Piece), + "reference": _safe_strip(row.DO_Ref), "date": str(row.DO_Date) if row.DO_Date else "", - "client_code": self._safe_strip(row.CT_Num), - "client_intitule": self._safe_strip(row.CT_Intitule), + "client_code": _safe_strip(row.CT_Num), + "client_intitule": _safe_strip(row.CT_Intitule), "total_ht": ( float(row.DO_TotalHT) if row.DO_TotalHT else 0.0 ), @@ -6598,67 +4153,11 @@ class SageConnector: def lire_avoir(self, numero): return self._lire_document_sql(numero, type_doc=50) - def lister_livraisons(self, limit=100, statut=None): - """ Liste les livraisons via SQL (méthode legacy)""" - try: - with self._get_sql_connection() as conn: - cursor = conn.cursor() - - query = f""" - SELECT TOP ({limit}) - d.DO_Piece, d.DO_Date, d.DO_Ref, d.DO_TotalHT, d.DO_TotalTTC, - d.DO_Statut, d.CT_Num, c.CT_Intitule - FROM F_DOCENTETE d - LEFT JOIN F_COMPTET c ON d.CT_Num = c.CT_Num - WHERE d.DO_Type = 30 - """ - - params = [] - - if statut is not None: - query += " AND d.DO_Statut = ?" - params.append(statut) - - query += " ORDER BY d.DO_Date DESC" - - cursor.execute(query, params) - rows = cursor.fetchall() - - livraisons = [] - for row in rows: - livraisons.append( - { - "numero": self._safe_strip(row.DO_Piece), - "reference": self._safe_strip(row.DO_Ref), - "date": str(row.DO_Date) if row.DO_Date else "", - "client_code": self._safe_strip(row.CT_Num), - "client_intitule": self._safe_strip(row.CT_Intitule), - "total_ht": ( - float(row.DO_TotalHT) if row.DO_TotalHT else 0.0 - ), - "total_ttc": ( - float(row.DO_TotalTTC) if row.DO_TotalTTC else 0.0 - ), - "statut": row.DO_Statut if row.DO_Statut is not None else 0, - } - ) - - return livraisons - - except Exception as e: - logger.error(f" Erreur SQL livraisons: {e}") - return [] - def lire_livraison(self, numero): """ Lit UNE livraison via SQL (avec lignes)""" return self._lire_document_sql(numero, type_doc=30) - def creer_contact(self, contact_data: Dict) -> Dict: - """ - Crée un nouveau contact dans F_CONTACTT via COM - VERSION FINALE COMPLÈTE - """ if not self.cial: raise RuntimeError("Connexion Sage non établie") @@ -6668,20 +4167,18 @@ class SageConnector: logger.info("[CREATION CONTACT F_CONTACTT]") logger.info("=" * 80) - # Validation if not contact_data.get("numero"): raise ValueError("numero (code client) obligatoire") if not contact_data.get("nom"): raise ValueError("nom obligatoire") - numero_client = self._clean_str(contact_data["numero"], 17).upper() - nom = self._clean_str(contact_data["nom"], 35) - prenom = self._clean_str(contact_data.get("prenom", ""), 35) + numero_client = _clean_str(contact_data["numero"], 17).upper() + nom = _clean_str(contact_data["nom"], 35) + prenom = _clean_str(contact_data.get("prenom", ""), 35) logger.info(f" CLIENT: {numero_client}") logger.info(f" CONTACT: {prenom} {nom}") - # Charger le client logger.info(f"[1] Chargement du client: {numero_client}") factory_client = self.cial.CptaApplication.FactoryClient try: @@ -6695,7 +4192,6 @@ class SageConnector: except Exception as e: raise ValueError(f"Client {numero_client} introuvable: {e}") - # Via FactoryTiersContact du client logger.info("[2] Creation via FactoryTiersContact") if not hasattr(client_obj, 'FactoryTiersContact'): @@ -6704,11 +4200,9 @@ class SageConnector: factory_contact = client_obj.FactoryTiersContact logger.info(f" OK FactoryTiersContact: {type(factory_contact).__name__}") - # Créer l'objet persist = factory_contact.Create() logger.info(f" Objet cree: {type(persist).__name__}") - # Cast vers IBOTiersContact3 contact = None interfaces_a_tester = [ "IBOTiersContact3", @@ -6721,7 +4215,6 @@ class SageConnector: try: temp = win32com.client.CastTo(persist, interface_name) - # Vérifier si Nom existe if hasattr(temp, '_prop_map_put_'): props = list(temp._prop_map_put_.keys()) logger.info(f" Test {interface_name}: props={props[:15]}") @@ -6737,15 +4230,12 @@ class SageConnector: logger.error(" ERROR Aucun cast ne fonctionne") raise RuntimeError("Impossible de caster vers une interface contact valide") - # Configuration du contact logger.info("[3] Configuration du contact") - # Vérifier les propriétés disponibles if hasattr(contact, '_prop_map_put_'): props = list(contact._prop_map_put_.keys()) logger.info(f" Proprietes disponibles: {props}") - # Nom (obligatoire) try: contact.Nom = nom logger.info(f" OK Nom = {nom}") @@ -6753,7 +4243,6 @@ class SageConnector: logger.error(f" ERROR Impossible de definir Nom: {e}") raise RuntimeError(f"Echec definition Nom: {e}") - # Prénom if prenom: try: contact.Prenom = prenom @@ -6761,7 +4250,6 @@ class SageConnector: except Exception as e: logger.warning(f" WARN Prenom: {e}") - # Civilité if contact_data.get("civilite"): civilite_map = {"M.": 0, "Mme": 1, "Mlle": 2, "Societe": 3} civilite_code = civilite_map.get(contact_data["civilite"]) @@ -6772,26 +4260,23 @@ class SageConnector: except Exception as e: logger.warning(f" WARN Civilite: {e}") - # Fonction if contact_data.get("fonction"): - fonction = self._clean_str(contact_data["fonction"], 35) + fonction = _clean_str(contact_data["fonction"], 35) try: contact.Fonction = fonction logger.info(f" OK Fonction = {fonction}") except Exception as e: logger.warning(f" WARN Fonction: {e}") - # Service if contact_data.get("service_code") is not None: try: - service = self._safe_int(contact_data["service_code"]) + service = _safe_int(contact_data["service_code"]) if service is not None and hasattr(contact, 'ServiceContact'): contact.ServiceContact = service logger.info(f" OK ServiceContact = {service}") except Exception as e: logger.warning(f" WARN ServiceContact: {e}") - # Telecom logger.info("[4] Coordonnees (Telecom)") if hasattr(contact, 'Telecom'): @@ -6800,33 +4285,32 @@ class SageConnector: logger.info(f" Type Telecom: {type(telecom).__name__}") if contact_data.get("telephone"): - telephone = self._clean_str(contact_data["telephone"], 21) - if self._try_set_attribute(telecom, "Telephone", telephone): + telephone = _clean_str(contact_data["telephone"], 21) + if _try_set_attribute(telecom, "Telephone", telephone): logger.info(f" Telephone = {telephone}") if contact_data.get("portable"): - portable = self._clean_str(contact_data["portable"], 21) - if self._try_set_attribute(telecom, "Portable", portable): + portable = _clean_str(contact_data["portable"], 21) + if _try_set_attribute(telecom, "Portable", portable): logger.info(f" Portable = {portable}") if contact_data.get("email"): - email = self._clean_str(contact_data["email"], 69) - if self._try_set_attribute(telecom, "EMail", email): + email = _clean_str(contact_data["email"], 69) + if _try_set_attribute(telecom, "EMail", email): logger.info(f" EMail = {email}") if contact_data.get("telecopie"): - fax = self._clean_str(contact_data["telecopie"], 21) - if self._try_set_attribute(telecom, "Telecopie", fax): + fax = _clean_str(contact_data["telecopie"], 21) + if _try_set_attribute(telecom, "Telecopie", fax): logger.info(f" Telecopie = {fax}") except Exception as e: logger.warning(f" WARN Erreur Telecom: {e}") - # Réseaux sociaux logger.info("[5] Reseaux sociaux") if contact_data.get("facebook"): - facebook = self._clean_str(contact_data["facebook"], 69) + facebook = _clean_str(contact_data["facebook"], 69) try: contact.Facebook = facebook logger.info(f" Facebook = {facebook}") @@ -6834,7 +4318,7 @@ class SageConnector: pass if contact_data.get("linkedin"): - linkedin = self._clean_str(contact_data["linkedin"], 69) + linkedin = _clean_str(contact_data["linkedin"], 69) try: contact.LinkedIn = linkedin logger.info(f" LinkedIn = {linkedin}") @@ -6842,21 +4326,19 @@ class SageConnector: pass if contact_data.get("skype"): - skype = self._clean_str(contact_data["skype"], 69) + skype = _clean_str(contact_data["skype"], 69) try: contact.Skype = skype logger.info(f" Skype = {skype}") except: pass - # SetDefault try: contact.SetDefault() logger.info(" OK SetDefault() applique") except Exception as e: logger.warning(f" WARN SetDefault(): {e}") - # Enregistrer logger.info("[6] Enregistrement du contact") try: contact.Write() @@ -6875,7 +4357,6 @@ class SageConnector: logger.error(f" ERROR Write: {error_detail}") raise RuntimeError(f"Echec enregistrement: {error_detail}") - # Récupérer les IDs contact_no = None n_contact = None try: @@ -6885,7 +4366,6 @@ class SageConnector: except: pass - # Contact par défaut est_defaut = contact_data.get("est_defaut", False) if est_defaut and (contact_no or n_contact): logger.info("[7] Definition comme contact par defaut") @@ -6920,8 +4400,7 @@ class SageConnector: logger.info(f" CT_No={contact_no}") logger.info("=" * 80) - # Utiliser _contact_to_dict - contact_dict = self._contact_to_dict( + contact_dict = _contact_to_dict( contact, numero_client=numero_client, contact_numero=contact_no, @@ -6947,10 +4426,6 @@ class SageConnector: raise RuntimeError(f"Erreur technique: {e}") def modifier_contact(self, numero: str, contact_numero: int, updates: Dict) -> Dict: - """ - Modifie un contact existant via COM - VERSION COMPLÈTE REFACTORISÉE - """ if not self.cial: raise RuntimeError("Connexion Sage non établie") @@ -6960,7 +4435,6 @@ class SageConnector: logger.info(f"[MODIFICATION CONTACT] CT_No={contact_numero}") logger.info("=" * 80) - # Charger le client logger.info("[1] Chargement du client") factory_client = self.cial.CptaApplication.FactoryClient try: @@ -6974,14 +4448,12 @@ class SageConnector: except Exception as e: raise ValueError(f"Client {numero} introuvable: {e}") - # Charger le contact via SQL puis DossierContact logger.info("[2] Chargement du contact") nom_recherche = None prenom_recherche = None try: - # Récupérer nom/prénom via SQL with self._get_sql_connection() as conn: cursor = conn.cursor() cursor.execute( @@ -7001,7 +4473,6 @@ class SageConnector: except Exception as e: raise ValueError(f"Contact introuvable: {e}") - # Charger via FactoryDossierContact factory_dossier = self.cial.CptaApplication.FactoryDossierContact persist = factory_dossier.ReadNomPrenom(nom_recherche, prenom_recherche) @@ -7012,11 +4483,9 @@ class SageConnector: contact.Read() logger.info(f" OK Contact charge: {contact.Nom}") - # Appliquer les modifications logger.info("[3] Application des modifications") modifications_appliquees = [] - # Identité if "civilite" in updates: civilite_map = {"M.": 0, "Mme": 1, "Mlle": 2, "Societe": 3} civilite_code = civilite_map.get(updates["civilite"]) @@ -7029,7 +4498,7 @@ class SageConnector: pass if "nom" in updates: - nom = self._clean_str(updates["nom"], 35) + nom = _clean_str(updates["nom"], 35) if nom: try: contact.Nom = nom @@ -7039,7 +4508,7 @@ class SageConnector: pass if "prenom" in updates: - prenom = self._clean_str(updates["prenom"], 35) + prenom = _clean_str(updates["prenom"], 35) try: contact.Prenom = prenom logger.info(f" Prenom = {prenom}") @@ -7048,7 +4517,7 @@ class SageConnector: pass if "fonction" in updates: - fonction = self._clean_str(updates["fonction"], 35) + fonction = _clean_str(updates["fonction"], 35) try: contact.Fonction = fonction logger.info(f" Fonction = {fonction}") @@ -7056,9 +4525,8 @@ class SageConnector: except: pass - # Service if "service_code" in updates: - service = self._safe_int(updates["service_code"]) + service = _safe_int(updates["service_code"]) if service is not None and hasattr(contact, 'ServiceContact'): try: contact.ServiceContact = service @@ -7067,40 +4535,38 @@ class SageConnector: except: pass - # Coordonnées via Telecom if hasattr(contact, 'Telecom'): try: telecom = contact.Telecom if "telephone" in updates: - telephone = self._clean_str(updates["telephone"], 21) - if self._try_set_attribute(telecom, "Telephone", telephone): + telephone = _clean_str(updates["telephone"], 21) + if _try_set_attribute(telecom, "Telephone", telephone): logger.info(f" Telephone = {telephone}") modifications_appliquees.append("telephone") if "portable" in updates: - portable = self._clean_str(updates["portable"], 21) - if self._try_set_attribute(telecom, "Portable", portable): + portable = _clean_str(updates["portable"], 21) + if _try_set_attribute(telecom, "Portable", portable): logger.info(f" Portable = {portable}") modifications_appliquees.append("portable") if "email" in updates: - email = self._clean_str(updates["email"], 69) - if self._try_set_attribute(telecom, "EMail", email): + email = _clean_str(updates["email"], 69) + if _try_set_attribute(telecom, "EMail", email): logger.info(f" EMail = {email}") modifications_appliquees.append("email") if "telecopie" in updates: - fax = self._clean_str(updates["telecopie"], 21) - if self._try_set_attribute(telecom, "Telecopie", fax): + fax = _clean_str(updates["telecopie"], 21) + if _try_set_attribute(telecom, "Telecopie", fax): logger.info(f" Telecopie = {fax}") modifications_appliquees.append("telecopie") except: pass - # Réseaux sociaux if "facebook" in updates: - facebook = self._clean_str(updates["facebook"], 69) + facebook = _clean_str(updates["facebook"], 69) try: contact.Facebook = facebook logger.info(f" Facebook = {facebook}") @@ -7109,7 +4575,7 @@ class SageConnector: pass if "linkedin" in updates: - linkedin = self._clean_str(updates["linkedin"], 69) + linkedin = _clean_str(updates["linkedin"], 69) try: contact.LinkedIn = linkedin logger.info(f" LinkedIn = {linkedin}") @@ -7118,7 +4584,7 @@ class SageConnector: pass if "skype" in updates: - skype = self._clean_str(updates["skype"], 69) + skype = _clean_str(updates["skype"], 69) try: contact.Skype = skype logger.info(f" Skype = {skype}") @@ -7126,7 +4592,6 @@ class SageConnector: except: pass - # Enregistrement du contact logger.info("[4] Enregistrement") try: contact.Write() @@ -7145,7 +4610,6 @@ class SageConnector: logger.info(f" Modifications appliquees: {', '.join(modifications_appliquees)}") - # Gestion du contact par défaut est_defaut_demande = updates.get("est_defaut") est_actuellement_defaut = False @@ -7180,8 +4644,7 @@ class SageConnector: logger.info(f"[SUCCES] Contact modifie: CT_No={contact_numero}") logger.info("=" * 80) - # Utiliser _contact_to_dict - contact_dict = self._contact_to_dict( + contact_dict = _contact_to_dict( contact, numero_client=numero, contact_numero=contact_numero, @@ -7198,12 +4661,7 @@ class SageConnector: logger.error(f"[ERREUR] {e}", exc_info=True) raise RuntimeError(f"Erreur technique: {e}") - def definir_contact_defaut(self, numero: str, contact_numero: int) -> Dict: - """ - Définit un contact comme contact par défaut du client - VERSION COMPLÈTE REFACTORISÉE - """ if not self.cial: raise RuntimeError("Connexion Sage non établie") @@ -7213,7 +4671,6 @@ class SageConnector: logger.info(f"[DEFINIR CONTACT PAR DEFAUT] Client={numero}, Contact={contact_numero}") logger.info("=" * 80) - # Récupérer le nom du contact via SQL logger.info("[1] Recuperation infos contact") nom_contact = None prenom_contact = None @@ -7239,7 +4696,6 @@ class SageConnector: except Exception as e: raise ValueError(f"Contact introuvable: {e}") - # Charger le client logger.info("[2] Chargement du client") factory_client = self.cial.CptaApplication.FactoryClient @@ -7255,7 +4711,6 @@ class SageConnector: except Exception as e: raise ValueError(f"Client introuvable: {e}") - # Définir le contact par défaut logger.info("[3] Definition du contact par defaut") ancien_contact = getattr(client, "CT_Contact", "") @@ -7269,7 +4724,6 @@ class SageConnector: except: pass - # Enregistrement logger.info("[4] Enregistrement") try: client.Write() @@ -7305,11 +4759,7 @@ class SageConnector: logger.error(f"[ERREUR] {e}", exc_info=True) raise RuntimeError(f"Erreur technique: {e}") - def lister_contacts(self, numero: str) -> List[Dict]: - """ - Liste tous les contacts d'un client - """ try: with self._get_sql_connection() as conn: return self._get_contacts_client(numero, conn) @@ -7317,98 +4767,7 @@ class SageConnector: logger.error(f"Erreur liste contacts: {e}") raise RuntimeError(f"Erreur lecture contacts: {str(e)}") - - def obtenir_contact(self, numero: str, contact_numero: int) -> Optional[Dict]: - """ - Récupère un contact spécifique par son CT_No - """ - try: - with self._get_sql_connection() as conn: - cursor = conn.cursor() - - query = """ - SELECT - CT_Num, CT_No, N_Contact, - CT_Civilite, CT_Nom, CT_Prenom, CT_Fonction, - N_Service, - CT_Telephone, CT_TelPortable, CT_Telecopie, CT_EMail, - CT_Facebook, CT_LinkedIn, CT_Skype - FROM F_CONTACTT - WHERE CT_Num = ? AND CT_No = ? - """ - - cursor.execute(query, [numero, contact_numero]) - row = cursor.fetchone() - - if not row: - return None - - return self._row_to_contact_dict(row) - - except Exception as e: - logger.error(f"Erreur obtention contact: {e}") - raise RuntimeError(f"Erreur lecture contact: {str(e)}") - - - def obtenir_contact_defaut(self, numero: str) -> Optional[Dict]: - """ - Récupère le contact par défaut d'un client - - Returns: - Dictionnaire avec les infos du contact par défaut, ou None si non défini - """ - if not self.cial: - raise RuntimeError("Connexion Sage non établie") - - try: - with self._com_context(), self._lock_com: - factory_client = self.cial.CptaApplication.FactoryClient - persist_client = factory_client.ReadNumero(numero) - - if not persist_client: - raise ValueError(f"Client {numero} non trouvé") - - client = win32com.client.CastTo(persist_client, "IBOClient3") - client.Read() - - ct_no_defaut = None - try: - ct_no_defaut = getattr(client, "CT_NoContact", None) - if ct_no_defaut: - logger.info(f"Contact par défaut via CT_NoContact: {ct_no_defaut}") - except: - pass - - nom_contact_defaut = None - try: - nom_contact_defaut = getattr(client, "CT_Contact", None) - if nom_contact_defaut: - logger.info(f"Contact par défaut via CT_Contact: {nom_contact_defaut}") - except: - pass - - if ct_no_defaut: - return self.obtenir_contact(numero, ct_no_defaut) - - if nom_contact_defaut: - contacts = self.lister_contacts(numero) - for contact in contacts: - nom_complet = f"{contact.get('prenom', '')} {contact['nom']}".strip() - if nom_complet == nom_contact_defaut or contact['nom'] == nom_contact_defaut: - return {**contact, "est_defaut": True} - - return None - - except Exception as e: - logger.error(f"Erreur obtention contact par défaut: {e}") - return None - - def supprimer_contact(self, numero: str, contact_numero: int) -> Dict: - """ - Supprime un contact via COM - VERSION COMPLÈTE REFACTORISÉE - """ if not self.cial: raise RuntimeError("Connexion Sage non établie") @@ -7418,7 +4777,6 @@ class SageConnector: logger.info(f"[SUPPRESSION CONTACT] CT_No={contact_numero}") logger.info("=" * 80) - # Récupérer le nom du contact via SQL logger.info("[1] Recuperation infos contact") nom_contact = None prenom_contact = None @@ -7443,7 +4801,6 @@ class SageConnector: except Exception as e: raise ValueError(f"Contact introuvable: {e}") - # Charger le contact via FactoryDossierContact logger.info("[2] Chargement du contact") factory_dossier = self.cial.CptaApplication.FactoryDossierContact @@ -7460,7 +4817,6 @@ class SageConnector: except Exception as e: raise ValueError(f"Contact introuvable: {e}") - # Supprimer logger.info("[3] Suppression") try: contact.Remove() @@ -7496,119 +4852,6 @@ class SageConnector: logger.error(f"[ERREUR] {e}", exc_info=True) raise RuntimeError(f"Erreur technique: {e}") - def _contact_to_dict(self, contact, numero_client=None, contact_numero=None, n_contact=None) -> Dict: - """ - Convertit un objet COM Contact (IBOTiersContact3) en dictionnaire - - Args: - contact: Objet COM contact - numero_client: Code du client (optionnel) - contact_numero: CT_No du contact (optionnel) - n_contact: N_Contact du contact (optionnel) - """ - try: - # Civilité - civilite_code = getattr(contact, "Civilite", None) - civilite_map = {0: "M.", 1: "Mme", 2: "Mlle", 3: "Société"} - civilite = civilite_map.get(civilite_code) if civilite_code is not None else None - - # Coordonnées via Telecom - telephone = None - portable = None - telecopie = None - email = None - - if hasattr(contact, 'Telecom'): - try: - telecom = contact.Telecom - telephone = self._safe_strip(getattr(telecom, "Telephone", None)) - portable = self._safe_strip(getattr(telecom, "Portable", None)) - telecopie = self._safe_strip(getattr(telecom, "Telecopie", None)) - email = self._safe_strip(getattr(telecom, "EMail", None)) - except: - pass - - return { - "numero": numero_client, - "contact_numero": contact_numero, - "n_contact": n_contact or contact_numero, - "civilite": civilite, - "nom": self._safe_strip(getattr(contact, "Nom", None)), - "prenom": self._safe_strip(getattr(contact, "Prenom", None)), - "fonction": self._safe_strip(getattr(contact, "Fonction", None)), - "service_code": getattr(contact, "ServiceContact", None), - "telephone": telephone, - "portable": portable, - "telecopie": telecopie, - "email": email, - "facebook": self._safe_strip(getattr(contact, "Facebook", None)), - "linkedin": self._safe_strip(getattr(contact, "LinkedIn", None)), - "skype": self._safe_strip(getattr(contact, "Skype", None)), - } - except Exception as e: - logger.warning(f"Erreur conversion contact: {e}") - return {} - - - def _row_to_contact_dict(self, row) -> Dict: - """Convertit une ligne SQL en dictionnaire contact""" - civilite_code = row.CT_Civilite - civilite_map = {0: "M.", 1: "Mme", 2: "Mlle", 3: "Société"} - - return { - "numero": self._safe_strip(row.CT_Num), - "contact_numero": row.CT_No, - "n_contact": row.N_Contact, - "civilite": civilite_map.get(civilite_code) if civilite_code is not None else None, - "nom": self._safe_strip(row.CT_Nom), - "prenom": self._safe_strip(row.CT_Prenom), - "fonction": self._safe_strip(row.CT_Fonction), - "service_code": row.N_Service, - "telephone": self._safe_strip(row.CT_Telephone), - "portable": self._safe_strip(row.CT_TelPortable), - "telecopie": self._safe_strip(row.CT_Telecopie), - "email": self._safe_strip(row.CT_EMail), - "facebook": self._safe_strip(row.CT_Facebook), - "linkedin": self._safe_strip(row.CT_LinkedIn), - "skype": self._safe_strip(row.CT_Skype), - } - - - def _clean_str(self, value, max_len: int) -> str: - """Nettoie et tronque une chaîne""" - if value is None or str(value).lower() in ('none', 'null', ''): - return "" - return str(value)[:max_len].strip() - - - def _safe_int(self, value, default=None): - """Conversion sécurisée en entier""" - if value is None: - return default - try: - return int(value) - except (ValueError, TypeError): - return default - - - def _try_set_attribute(self, obj, attr_name, value, variants=None): - """Essaie de définir un attribut avec plusieurs variantes""" - if variants is None: - variants = [attr_name] - else: - variants = [attr_name] + variants - - for variant in variants: - try: - if hasattr(obj, variant): - setattr(obj, variant, value) - return True - except Exception as e: - logger.debug(f" {variant} échec: {str(e)[:50]}") - - return False - - def creer_client(self, client_data: Dict) -> Dict: """ Creation client Sage - Version corrigée pour erreur cohérence @@ -8578,7 +5821,7 @@ class SageConnector: if not champs_modifies: logger.warning("Aucun champ à modifier") - return self._extraire_client(client) + return _extraire_client(client) logger.info("=" * 80) logger.info(f"[WRITE] {len(champs_modifies)} champs modifiés:") @@ -8606,7 +5849,7 @@ class SageConnector: logger.info(f"[SUCCES] CLIENT MODIFIÉ: {code} ({len(champs_modifies)} champs)") logger.info("=" * 80) - return self._extraire_client(client) + return _extraire_client(client) except ValueError as e: logger.error(f"[ERREUR VALIDATION] {e}") @@ -8616,17 +5859,6 @@ class SageConnector: raise RuntimeError(f"Erreur technique: {e}") def creer_commande_enrichi(self, commande_data: dict) -> Dict: - """ - Crée une commande dans Sage avec support des dates. - - Args: - commande_data: dict contenant: - - client: {code: str} - - date_commande: str ou date - - date_livraison: str ou date (optionnel) - - reference: str (optionnel) - - lignes: list[dict] - """ if not self.cial: raise RuntimeError("Connexion Sage non établie") @@ -8679,7 +5911,7 @@ class SageConnector: f"Client {commande_data['client']['code']} introuvable" ) - client_obj = self._cast_client(persist_client) + client_obj = _cast_client(persist_client) if not client_obj: raise ValueError(f"Impossible de charger le client") @@ -9415,7 +6647,7 @@ class SageConnector: f"Client {livraison_data['client']['code']} introuvable" ) - client_obj = self._cast_client(persist_client) + client_obj = _cast_client(persist_client) if not client_obj: raise ValueError(f"Impossible de charger le client") @@ -10070,7 +7302,7 @@ class SageConnector: f"Client {avoir_data['client']['code']} introuvable" ) - client_obj = self._cast_client(persist_client) + client_obj = _cast_client(persist_client) if not client_obj: raise ValueError(f"Impossible de charger le client") @@ -10800,7 +8032,7 @@ class SageConnector: f"Client {facture_data['client']['code']} introuvable" ) - client_obj = self._cast_client(persist_client) + client_obj = _cast_client(persist_client) if not client_obj: raise ValueError(f"Impossible de charger le client") @@ -11677,7 +8909,7 @@ class SageConnector: row = cursor.fetchone() if row: - article_modele_ref = self._safe_strip(row.AR_Ref) + article_modele_ref = _safe_strip(row.AR_Ref) logger.info( f" [SQL] Article modèle trouvé : {article_modele_ref}" ) @@ -11756,7 +8988,7 @@ class SageConnector: row = cursor.fetchone() if row: - famille_code_exact = self._safe_strip( + famille_code_exact = _safe_strip( row.FA_CodeFamille ) famille_existe_sql = True @@ -12036,7 +9268,7 @@ class SageConnector: stocks_par_depot.append( { - "depot_code": self._safe_strip( + "depot_code": _safe_strip( depot_row[0] ), "quantite": qte, @@ -12068,7 +9300,7 @@ class SageConnector: logger.info("[EXTRACTION] Extraction complète de l'article créé...") - resultat = self._extraire_article(article_cree) + resultat = _extraire_article(article_cree) if not resultat: resultat = { @@ -12205,7 +9437,7 @@ class SageConnector: row = cursor.fetchone() if row: - famille_code_exact = self._safe_strip( + famille_code_exact = _safe_strip( row.FA_CodeFamille ) famille_type = row.FA_Type if len(row) > 1 else 0 @@ -12391,7 +9623,7 @@ class SageConnector: if not champs_modifies: logger.warning("[ARTICLE] Aucun champ à modifier") - return self._extraire_article(article) + return _extraire_article(article) logger.info( f"[ARTICLE] Champs à modifier : {', '.join(champs_modifies)}" @@ -12424,7 +9656,7 @@ class SageConnector: f"[ARTICLE] MODIFIÉ : {reference} ({len(champs_modifies)} champs)" ) - resultat = self._extraire_article(article) + resultat = _extraire_article(article) if not resultat: resultat = { @@ -13047,25 +10279,15 @@ class SageConnector: familles.append(famille) type_msg = "DÉTAIL uniquement" if not inclure_totaux else "TOUS types" - logger.info(f"✓ {len(familles)} familles chargées ({type_msg})") + logger.info(f" {len(familles)} familles chargées ({type_msg})") return familles except Exception as e: logger.error(f"Erreur SQL familles: {e}", exc_info=True) raise RuntimeError(f"Erreur lecture familles: {str(e)}") - def lire_famille(self, code: str) -> Dict: - """ - Lit une seule famille - même structure que lister_toutes_familles - - Args: - code: Code de la famille à lire - - Returns: - Dict avec la structure identique à lister_toutes_familles - """ try: with self._get_sql_connection() as conn: cursor = conn.cursor() @@ -13240,7 +10462,6 @@ class SageConnector: logger.error(f"Erreur SQL famille: {e}", exc_info=True) raise RuntimeError(f"Erreur lecture famille: {str(e)}") - def creer_entree_stock(self, entree_data: Dict) -> Dict: try: with self._com_context(), self._lock_com: @@ -14034,7 +11255,8 @@ class SageConnector: def creer_sortie_stock(self, sortie_data: Dict) -> Dict: try: - with self._com_context(), self._lock_com: + with self._com_context(), self._lock_com, self._get_sql_connection() as conn: + cursor = conn.cursor() logger.info(f"[STOCK] === CRÉATION SORTIE STOCK ===") logger.info(f"[STOCK] {len(sortie_data.get('lignes', []))} ligne(s)") @@ -14097,8 +11319,8 @@ class SageConnector: logger.info(f"[STOCK] Article : {ar_design}") logger.info(f"[STOCK] AR_SuiviStock : {ar_suivi}") - stock_dispo = self.verifier_stock_suffisant( - article_ref, quantite, None + stock_dispo = verifier_stock_suffisant( + article_ref, quantite, cursor, None ) if not stock_dispo["suffisant"]: raise ValueError( @@ -14367,45 +11589,74 @@ class SageConnector: except Exception as e: logger.error(f"[MOUVEMENT] Erreur : {e}", exc_info=True) raise ValueError(f"Erreur lecture mouvement : {str(e)}") - - def verifier_stock_suffisant(self, article_ref, quantite, depot=None): - """Version thread-safe avec lock SQL""" + + def lister_tous_tiers( + self, + type_tiers: Optional[str] = None, + filtre: str = "" + ) -> List[Dict]: try: with self._get_sql_connection() as conn: cursor = conn.cursor() - - cursor.execute("SET TRANSACTION ISOLATION LEVEL SERIALIZABLE") - cursor.execute("BEGIN TRANSACTION") - - try: - cursor.execute( - """ - SELECT SUM(AS_QteSto) - FROM F_ARTSTOCK WITH (UPDLOCK, ROWLOCK) - WHERE AR_Ref = ? - """, - (article_ref.upper(),), - ) - - row = cursor.fetchone() - stock_dispo = float(row[0]) if row and row[0] else 0.0 - - suffisant = stock_dispo >= quantite - - cursor.execute("COMMIT") - - return { - "suffisant": suffisant, - "stock_disponible": stock_dispo, - "quantite_demandee": quantite, - } - - except: - cursor.execute("ROLLBACK") - raise - + + query = _build_tiers_select_query() + query += " FROM F_COMPTET WHERE 1=1" + + params = [] + + if type_tiers and type_tiers != "all": + if type_tiers == "prospect": + query += " AND CT_Type = 0 AND CT_Prospect = 1" + elif type_tiers == "client": + query += " AND CT_Type = 0 AND CT_Prospect = 0" + elif type_tiers == "fournisseur": + query += " AND CT_Type = 1" + + if filtre: + query += " AND (CT_Num LIKE ? OR CT_Intitule LIKE ?)" + params.extend([f"%{filtre}%", f"%{filtre}%"]) + + query += " ORDER BY CT_Intitule" + + cursor.execute(query, params) + rows = cursor.fetchall() + + tiers_list = [] + for row in rows: + tiers = _row_to_tiers_dict(row) + tiers["contacts"] = self._get_contacts_client(row.CT_Num, conn) + tiers_list.append(tiers) + + logger.info(f" SQL: {len(tiers_list)} tiers retournés (type={type_tiers}, filtre={filtre})") + return tiers_list + except Exception as e: - logger.error(f"Erreur vérification stock: {e}") - raise + logger.error(f" Erreur SQL tiers: {e}") + raise RuntimeError(f"Erreur lecture tiers: {str(e)}") + + def lire_tiers(self, code: str) -> Optional[Dict]: + """Lit un tiers (client/fournisseur/prospect) par code""" + try: + with self._get_sql_connection() as conn: + cursor = conn.cursor() + + query = _build_tiers_select_query() + query += " FROM F_COMPTET WHERE CT_Num = ?" + + cursor.execute(query, (code.upper(),)) + row = cursor.fetchone() + + if not row: + return None + + tiers = _row_to_tiers_dict(row) + tiers["contacts"] = self._get_contacts_client(row.CT_Num, conn) + + logger.info(f" SQL: Tiers {code} lu avec succès") + return tiers + + except Exception as e: + logger.error(f" Erreur SQL tiers {code}: {e}") + return None diff --git a/test.py b/test.py index 18a7022..65faaf9 100644 --- a/test.py +++ b/test.py @@ -35,7 +35,7 @@ def diagnostic_complet_crystal(): for chemin in chemins_installation: if os.path.exists(chemin): - print(f" ✅ Dossier trouvé : {chemin}") + print(f" Dossier trouvé : {chemin}") crystal_trouve = True chemin_crystal = chemin @@ -54,16 +54,16 @@ def diagnostic_complet_crystal(): print(f" Taille : {size_mb:.1f} MB") if size_mb < 100: - print(f" ⚠️ Taille suspecte (attendu: 300-800 MB)") + print(f" Taille suspecte (attendu: 300-800 MB)") problemes.append("Installation incomplète (taille trop petite)") except Exception as e: - print(f" ⚠️ Impossible de calculer taille : {e}") + print(f" Impossible de calculer taille : {e}") else: - print(f" ❌ Absent : {chemin}") + print(f" Absent : {chemin}") if not crystal_trouve: - print("\n❌ PROBLÈME MAJEUR : Crystal Reports n'est pas installé") + print("\n PROBLÈME MAJEUR : Crystal Reports n'est pas installé") problemes.append("Crystal Reports non installé") solutions.append("Télécharger et installer SAP Crystal Reports Runtime") return {"problemes": problemes, "solutions": solutions, "installe": False} @@ -89,18 +89,18 @@ def diagnostic_complet_crystal(): if dll_nom.lower() in [f.lower() for f in files]: dll_path = os.path.join(root, dll_nom) dll_trouvees[dll_nom] = dll_path - print(f" ✅ {dll_nom}") + print(f" {dll_nom}") print(f" {dll_path}") trouve = True break if not trouve: - print(f" ❌ {dll_nom} - {description}") + print(f" {dll_nom} - {description}") if "CRITIQUE" in description: problemes.append(f"{dll_nom} manquante") if len(dll_trouvees) < 2: - print("\n ⚠️ Trop peu de DLL trouvées - Installation corrompue") + print("\n Trop peu de DLL trouvées - Installation corrompue") problemes.append("DLL manquantes - Installation corrompue") solutions.append("Réinstaller Crystal Reports Runtime") @@ -123,7 +123,7 @@ def diagnostic_complet_crystal(): try: # Vérifier existence key = winreg.OpenKey(winreg.HKEY_CLASSES_ROOT, prog_id) - print(f" ✅ {prog_id}") + print(f" {prog_id}") # Lire le CLSID try: @@ -144,28 +144,28 @@ def diagnostic_complet_crystal(): # Vérifier que la DLL existe if not os.path.exists(dll_path): - print(f" ❌ DLL INTROUVABLE: {dll_path}") + print(f" DLL INTROUVABLE: {dll_path}") problemes.append(f"{prog_id}: DLL manquante ({dll_path})") else: prog_ids_trouves.append(prog_id) except: - print(f" ⚠️ InprocServer32 non trouvé") + print(f" InprocServer32 non trouvé") except: - print(f" ❌ CLSID {clsid} non trouvé dans registre") + print(f" CLSID {clsid} non trouvé dans registre") problemes.append(f"{prog_id}: CLSID cassé") except: - print(f" ⚠️ Pas de CLSID") + print(f" Pas de CLSID") winreg.CloseKey(key) except: - print(f" ❌ {prog_id}") + print(f" {prog_id}") if not prog_ids_trouves: - print("\n ⚠️ Aucun ProgID valide - Enregistrement COM échoué") + print("\n Aucun ProgID valide - Enregistrement COM échoué") problemes.append("ProgID non enregistrés correctement") solutions.append("Réenregistrer les DLL Crystal avec regsvr32") @@ -196,7 +196,7 @@ def diagnostic_complet_crystal(): print(f" Crystal : {crystal_arch}") if python_arch != crystal_arch: - print(f"\n ❌ INCOMPATIBILITÉ ARCHITECTURE") + print(f"\n INCOMPATIBILITÉ ARCHITECTURE") print(f" Python {python_arch} ne peut pas utiliser Crystal {crystal_arch}") problemes.append(f"Incompatibilité: Python {python_arch} vs Crystal {crystal_arch}") solutions.append(f"Réinstaller Crystal en version {python_arch}") @@ -224,14 +224,14 @@ def diagnostic_complet_crystal(): for service in services_crystal_attendus: if service.lower() in result.stdout.lower(): services_trouves.append(service) - print(f" ✅ Service trouvé: {service}") + print(f" Service trouvé: {service}") if not services_trouves: - print(f" ⚠️ Aucun service Crystal trouvé") + print(f" Aucun service Crystal trouvé") print(f" (Normal pour Runtime léger)") except Exception as e: - print(f" ⚠️ Impossible de vérifier services: {e}") + print(f" Impossible de vérifier services: {e}") # ========================================== # 6. TEST INSTANCIATION COM DÉTAILLÉ @@ -244,7 +244,7 @@ def diagnostic_complet_crystal(): print(f"\n Test: {prog_id}") try: obj = win32com.client.Dispatch(prog_id) - print(f" ✅ Instanciation RÉUSSIE") + print(f" Instanciation RÉUSSIE") # Lister méthodes disponibles print(f" Méthodes disponibles:") @@ -260,7 +260,7 @@ def diagnostic_complet_crystal(): } except Exception as e: - print(f" ❌ Échec: {e}") + print(f" Échec: {e}") print(f" Type erreur: {type(e).__name__}") print(f" Code: {e.args if hasattr(e, 'args') else 'N/A'}") @@ -277,7 +277,7 @@ def diagnostic_complet_crystal(): print(f"🔧 Architecture: Python {python_arch}, Crystal {crystal_arch or 'INCONNUE'}") if problemes: - print(f"\n❌ PROBLÈMES DÉTECTÉS ({len(problemes)}):") + print(f"\n PROBLÈMES DÉTECTÉS ({len(problemes)}):") for i, pb in enumerate(problemes, 1): print(f" {i}. {pb}") diff --git a/utils/articles/articles_data_com.py b/utils/articles/articles_data_com.py new file mode 100644 index 0000000..c50f001 --- /dev/null +++ b/utils/articles/articles_data_com.py @@ -0,0 +1,244 @@ +import logging + +logger = logging.getLogger(__name__) + +def _extraire_article(article_obj): + try: + data = { + "reference": getattr(article_obj, "AR_Ref", "").strip(), + "designation": getattr(article_obj, "AR_Design", "").strip(), + } + + data["code_ean"] = "" + data["code_barre"] = "" + + try: + code_barre = getattr(article_obj, "AR_CodeBarre", "").strip() + if code_barre: + data["code_ean"] = code_barre + data["code_barre"] = code_barre + + if not data["code_ean"]: + code_barre1 = getattr(article_obj, "AR_CodeBarre1", "").strip() + if code_barre1: + data["code_ean"] = code_barre1 + data["code_barre"] = code_barre1 + except: + pass + + try: + data["prix_vente"] = float(getattr(article_obj, "AR_PrixVen", 0.0)) + except: + data["prix_vente"] = 0.0 + + try: + data["prix_achat"] = float(getattr(article_obj, "AR_PrixAch", 0.0)) + except: + data["prix_achat"] = 0.0 + + try: + data["prix_revient"] = float( + getattr(article_obj, "AR_PrixRevient", 0.0) + ) + except: + data["prix_revient"] = 0.0 + + try: + data["stock_reel"] = float(getattr(article_obj, "AR_Stock", 0.0)) + except: + data["stock_reel"] = 0.0 + + try: + data["stock_mini"] = float(getattr(article_obj, "AR_StockMini", 0.0)) + except: + data["stock_mini"] = 0.0 + + try: + data["stock_maxi"] = float(getattr(article_obj, "AR_StockMaxi", 0.0)) + except: + data["stock_maxi"] = 0.0 + + try: + data["stock_reserve"] = float(getattr(article_obj, "AR_QteCom", 0.0)) + except: + data["stock_reserve"] = 0.0 + + try: + data["stock_commande"] = float( + getattr(article_obj, "AR_QteComFou", 0.0) + ) + except: + data["stock_commande"] = 0.0 + + try: + data["stock_disponible"] = data["stock_reel"] - data["stock_reserve"] + except: + data["stock_disponible"] = data["stock_reel"] + + try: + commentaire = getattr(article_obj, "AR_Commentaire", "").strip() + data["description"] = commentaire + except: + data["description"] = "" + + try: + design2 = getattr(article_obj, "AR_Design2", "").strip() + data["designation_complementaire"] = design2 + except: + data["designation_complementaire"] = "" + + try: + type_art = getattr(article_obj, "AR_Type", 0) + data["type_article"] = type_art + data["type_article_libelle"] = { + 0: "Article", + 1: "Prestation", + 2: "Divers", + }.get(type_art, "Inconnu") + except: + data["type_article"] = 0 + data["type_article_libelle"] = "Article" + + try: + famille_code = getattr(article_obj, "FA_CodeFamille", "").strip() + data["famille_code"] = famille_code + + if famille_code: + try: + famille_obj = getattr(article_obj, "Famille", None) + if famille_obj: + famille_obj.Read() + data["famille_libelle"] = getattr( + famille_obj, "FA_Intitule", "" + ).strip() + else: + data["famille_libelle"] = "" + except: + data["famille_libelle"] = "" + else: + data["famille_libelle"] = "" + except: + data["famille_code"] = "" + data["famille_libelle"] = "" + + try: + fournisseur_code = getattr(article_obj, "CT_Num", "").strip() + data["fournisseur_principal"] = fournisseur_code + + if fournisseur_code: + try: + fourn_obj = getattr(article_obj, "Fournisseur", None) + if fourn_obj: + fourn_obj.Read() + data["fournisseur_nom"] = getattr( + fourn_obj, "CT_Intitule", "" + ).strip() + else: + data["fournisseur_nom"] = "" + except: + data["fournisseur_nom"] = "" + else: + data["fournisseur_nom"] = "" + except: + data["fournisseur_principal"] = "" + data["fournisseur_nom"] = "" + + try: + data["unite_vente"] = getattr(article_obj, "AR_UniteVen", "").strip() + except: + data["unite_vente"] = "" + + try: + data["unite_achat"] = getattr(article_obj, "AR_UniteAch", "").strip() + except: + data["unite_achat"] = "" + + try: + data["poids"] = float(getattr(article_obj, "AR_Poids", 0.0)) + except: + data["poids"] = 0.0 + + try: + data["volume"] = float(getattr(article_obj, "AR_Volume", 0.0)) + except: + data["volume"] = 0.0 + + try: + sommeil = getattr(article_obj, "AR_Sommeil", 0) + data["est_actif"] = sommeil == 0 + data["en_sommeil"] = sommeil == 1 + except: + data["est_actif"] = True + data["en_sommeil"] = False + + try: + tva_code = getattr(article_obj, "TA_Code", "").strip() + data["tva_code"] = tva_code + + try: + tva_obj = getattr(article_obj, "Taxe1", None) + if tva_obj: + tva_obj.Read() + data["tva_taux"] = float(getattr(tva_obj, "TA_Taux", 20.0)) + else: + data["tva_taux"] = 20.0 + except: + data["tva_taux"] = 20.0 + except: + data["tva_code"] = "" + data["tva_taux"] = 20.0 + + try: + date_creation = getattr(article_obj, "AR_DateCreate", None) + data["date_creation"] = str(date_creation) if date_creation else "" + except: + data["date_creation"] = "" + + try: + date_modif = getattr(article_obj, "AR_DateModif", None) + data["date_modification"] = str(date_modif) if date_modif else "" + except: + data["date_modification"] = "" + + return data + + except Exception as e: + logger.error(f" Erreur extraction article: {e}", exc_info=True) + return { + "reference": getattr(article_obj, "AR_Ref", "").strip(), + "designation": getattr(article_obj, "AR_Design", "").strip(), + "prix_vente": 0.0, + "stock_reel": 0.0, + "code_ean": "", + "description": "", + "designation_complementaire": "", + "prix_achat": 0.0, + "prix_revient": 0.0, + "stock_mini": 0.0, + "stock_maxi": 0.0, + "stock_reserve": 0.0, + "stock_commande": 0.0, + "stock_disponible": 0.0, + "code_barre": "", + "type_article": 0, + "type_article_libelle": "Article", + "famille_code": "", + "famille_libelle": "", + "fournisseur_principal": "", + "fournisseur_nom": "", + "unite_vente": "", + "unite_achat": "", + "poids": 0.0, + "volume": 0.0, + "est_actif": True, + "en_sommeil": False, + "tva_code": "", + "tva_taux": 20.0, + "date_creation": "", + "date_modification": "", + } + + +__all__ = [ + "_extraire_article", +] \ No newline at end of file diff --git a/utils/articles/articles_data_sql.py b/utils/articles/articles_data_sql.py new file mode 100644 index 0000000..f9aada4 --- /dev/null +++ b/utils/articles/articles_data_sql.py @@ -0,0 +1,1421 @@ +from typing import Dict, List, Optional, Any +import win32com.client +import logging +from utils.functions.functions import _safe_strip + +logger = logging.getLogger(__name__) + +def _enrichir_stock_emplacements(articles: List[Dict], cursor) -> List[Dict]: + try: + logger.info(f" → Enrichissement stock emplacements...") + + references = [a["reference"] for a in articles if a["reference"]] + if not references: + return articles + + placeholders = ",".join(["?"] * len(references)) + query = f""" + SELECT + AR_Ref, + DE_No, + DP_No, + AE_QteSto, + AE_QtePrepa, + AE_QteAControler, + cbCreation, + cbModification + FROM F_ARTSTOCKEMPL + WHERE AR_Ref IN ({placeholders}) + ORDER BY AR_Ref, DE_No, DP_No + """ + + cursor.execute(query, references) + rows = cursor.fetchall() + + emplacements_map = {} + for row in rows: + ref = _safe_strip(row[0]) + if not ref: + continue + + if ref not in emplacements_map: + emplacements_map[ref] = [] + + emplacements_map[ref].append({ + "depot": _safe_strip(row[1]), + "emplacement": _safe_strip(row[2]), + "qte_stockee": float(row[3]) if row[3] else 0.0, + "qte_preparee": float(row[4]) if row[4] else 0.0, + "qte_a_controler": float(row[5]) if row[5] else 0.0, + "date_creation": row[6], + "date_modification": row[7], + }) + + for article in articles: + article["emplacements"] = emplacements_map.get(article["reference"], []) + article["nb_emplacements"] = len(article["emplacements"]) + + logger.info(f" {len(emplacements_map)} articles avec emplacements") + return articles + + except Exception as e: + logger.error(f" Erreur stock emplacements: {e}") + for article in articles: + article["emplacements"] = [] + article["nb_emplacements"] = 0 + return articles + +def _enrichir_gammes_articles(articles: List[Dict], cursor) -> List[Dict]: + try: + logger.info(f" → Enrichissement gammes articles...") + + references = [a["reference"] for a in articles if a["reference"]] + if not references: + return articles + + placeholders = ",".join(["?"] * len(references)) + query = f""" + SELECT + AR_Ref, + AG_No, + EG_Enumere, + AG_Type, + cbCreation, + cbModification + FROM F_ARTGAMME + WHERE AR_Ref IN ({placeholders}) + ORDER BY AR_Ref, AG_No, EG_Enumere + """ + + cursor.execute(query, references) + rows = cursor.fetchall() + + gammes_map = {} + for row in rows: + ref = _safe_strip(row[0]) + if not ref: + continue + + if ref not in gammes_map: + gammes_map[ref] = [] + + gammes_map[ref].append({ + "numero_gamme": int(row[1]) if row[1] else 0, + "enumere": _safe_strip(row[2]), + "type_gamme": int(row[3]) if row[3] else 0, + "date_creation": row[4], + "date_modification": row[5], + }) + + for article in articles: + article["gammes"] = gammes_map.get(article["reference"], []) + article["nb_gammes"] = len(article["gammes"]) + + logger.info(f" {len(gammes_map)} articles avec gammes") + return articles + + except Exception as e: + logger.error(f" Erreur gammes: {e}") + for article in articles: + article["gammes"] = [] + article["nb_gammes"] = 0 + return articles + +def _enrichir_tarifs_clients(articles: List[Dict], cursor) -> List[Dict]: + try: + logger.info(f" → Enrichissement tarifs clients...") + + references = [a["reference"] for a in articles if a["reference"]] + if not references: + return articles + + placeholders = ",".join(["?"] * len(references)) + query = f""" + SELECT + AR_Ref, + AC_Categorie, + CT_Num, + AC_PrixVen, + AC_Coef, + AC_PrixTTC, + AC_Arrondi, + AC_QteMont, + EG_Champ, + AC_PrixDev, + AC_Devise, + AC_Remise, + AC_Calcul, + AC_TypeRem, + AC_RefClient, + AC_CoefNouv, + AC_PrixVenNouv, + AC_PrixDevNouv, + AC_RemiseNouv, + AC_DateApplication, + cbCreation, + cbModification + FROM F_ARTCLIENT + WHERE AR_Ref IN ({placeholders}) + ORDER BY AR_Ref, AC_Categorie, CT_Num + """ + + cursor.execute(query, references) + rows = cursor.fetchall() + + tarifs_map = {} + for row in rows: + ref = _safe_strip(row[0]) + if not ref: + continue + + if ref not in tarifs_map: + tarifs_map[ref] = [] + + tarifs_map[ref].append({ + "categorie": int(row[1]) if row[1] else 0, + "client_num": _safe_strip(row[2]), + "prix_vente": float(row[3]) if row[3] else 0.0, + "coefficient": float(row[4]) if row[4] else 0.0, + "prix_ttc": float(row[5]) if row[5] else 0.0, + "arrondi": float(row[6]) if row[6] else 0.0, + "qte_montant": float(row[7]) if row[7] else 0.0, + "enumere_gamme": int(row[8]) if row[8] else 0, + "prix_devise": float(row[9]) if row[9] else 0.0, + "devise": int(row[10]) if row[10] else 0, + "remise": float(row[11]) if row[11] else 0.0, + "mode_calcul": int(row[12]) if row[12] else 0, + "type_remise": int(row[13]) if row[13] else 0, + "ref_client": _safe_strip(row[14]), + "coef_nouveau": float(row[15]) if row[15] else 0.0, + "prix_vente_nouveau": float(row[16]) if row[16] else 0.0, + "prix_devise_nouveau": float(row[17]) if row[17] else 0.0, + "remise_nouvelle": float(row[18]) if row[18] else 0.0, + "date_application": row[19], + "date_creation": row[20], + "date_modification": row[21], + }) + + for article in articles: + article["tarifs_clients"] = tarifs_map.get(article["reference"], []) + article["nb_tarifs_clients"] = len(article["tarifs_clients"]) + + logger.info(f" {len(tarifs_map)} articles avec tarifs clients") + return articles + + except Exception as e: + logger.error(f" Erreur tarifs clients: {e}") + for article in articles: + article["tarifs_clients"] = [] + article["nb_tarifs_clients"] = 0 + return articles + +def _enrichir_nomenclature(articles: List[Dict], cursor) -> List[Dict]: + try: + logger.info(f" → Enrichissement nomenclature...") + + references = [a["reference"] for a in articles if a["reference"]] + if not references: + return articles + + placeholders = ",".join(["?"] * len(references)) + query = f""" + SELECT + AR_Ref, + AT_Operation, + RP_Code, + AT_Temps, + AT_Type, + AT_Description, + AT_Ordre, + AG_No1Comp, + AG_No2Comp, + AT_TypeRessource, + AT_Chevauche, + AT_Demarre, + AT_OperationChevauche, + AT_ValeurChevauche, + AT_TypeChevauche, + cbCreation, + cbModification + FROM F_ARTCOMPO + WHERE AR_Ref IN ({placeholders}) + ORDER BY AR_Ref, AT_Ordre, AT_Operation + """ + + cursor.execute(query, references) + rows = cursor.fetchall() + + composants_map = {} + for row in rows: + ref = _safe_strip(row[0]) + if not ref: + continue + + if ref not in composants_map: + composants_map[ref] = [] + + composants_map[ref].append({ + "operation": _safe_strip(row[1]), + "code_ressource": _safe_strip(row[2]), + "temps": float(row[3]) if row[3] else 0.0, + "type": int(row[4]) if row[4] else 0, + "description": _safe_strip(row[5]), + "ordre": int(row[6]) if row[6] else 0, + "gamme_1_comp": int(row[7]) if row[7] else 0, + "gamme_2_comp": int(row[8]) if row[8] else 0, + "type_ressource": int(row[9]) if row[9] else 0, + "chevauche": int(row[10]) if row[10] else 0, + "demarre": int(row[11]) if row[11] else 0, + "operation_chevauche": _safe_strip(row[12]), + "valeur_chevauche": float(row[13]) if row[13] else 0.0, + "type_chevauche": int(row[14]) if row[14] else 0, + "date_creation": row[15], + "date_modification": row[16], + }) + + for article in articles: + article["composants"] = composants_map.get(article["reference"], []) + article["nb_composants"] = len(article["composants"]) + + logger.info(f" {len(composants_map)} articles avec nomenclature") + return articles + + except Exception as e: + logger.error(f" Erreur nomenclature: {e}") + for article in articles: + article["composants"] = [] + article["nb_composants"] = 0 + return articles + +def _enrichir_compta_articles(articles: List[Dict], cursor) -> List[Dict]: + try: + logger.info(f" → Enrichissement comptabilité articles...") + + references = [a["reference"] for a in articles if a["reference"]] + if not references: + return articles + + placeholders = ",".join(["?"] * len(references)) + query = f""" + SELECT + AR_Ref, + ACP_Type, + ACP_Champ, + ACP_ComptaCPT_CompteG, + ACP_ComptaCPT_CompteA, + ACP_ComptaCPT_Taxe1, + ACP_ComptaCPT_Taxe2, + ACP_ComptaCPT_Taxe3, + ACP_ComptaCPT_Date1, + ACP_ComptaCPT_Date2, + ACP_ComptaCPT_Date3, + ACP_ComptaCPT_TaxeAnc1, + ACP_ComptaCPT_TaxeAnc2, + ACP_ComptaCPT_TaxeAnc3, + ACP_TypeFacture, + cbCreation, + cbModification + FROM F_ARTCOMPTA + WHERE AR_Ref IN ({placeholders}) + ORDER BY AR_Ref, ACP_Type, ACP_Champ + """ + + cursor.execute(query, references) + rows = cursor.fetchall() + + compta_map = {} + for row in rows: + ref = _safe_strip(row[0]) + if not ref: + continue + + if ref not in compta_map: + compta_map[ref] = {"vente": [], "achat": [], "stock": []} + + type_compta = int(row[1]) if row[1] else 0 + type_key = {0: "vente", 1: "achat", 2: "stock"}.get(type_compta, "autre") + + compta_entry = { + "champ": int(row[2]) if row[2] else 0, + "compte_general": _safe_strip(row[3]), + "compte_auxiliaire": _safe_strip(row[4]), + "taxe_1": _safe_strip(row[5]), + "taxe_2": _safe_strip(row[6]), + "taxe_3": _safe_strip(row[7]), + "taxe_date_1": row[8], + "taxe_date_2": row[9], + "taxe_date_3": row[10], + "taxe_anc_1": _safe_strip(row[11]), + "taxe_anc_2": _safe_strip(row[12]), + "taxe_anc_3": _safe_strip(row[13]), + "type_facture": int(row[14]) if row[14] else 0, + "date_creation": row[15], + "date_modification": row[16], + } + + if type_key in compta_map[ref]: + compta_map[ref][type_key].append(compta_entry) + + for article in articles: + compta = compta_map.get(article["reference"], {"vente": [], "achat": [], "stock": []}) + article["compta_vente"] = compta["vente"] + article["compta_achat"] = compta["achat"] + article["compta_stock"] = compta["stock"] + + logger.info(f" {len(compta_map)} articles avec compta spécifique") + return articles + + except Exception as e: + logger.error(f" Erreur comptabilité articles: {e}") + for article in articles: + article["compta_vente"] = [] + article["compta_achat"] = [] + article["compta_stock"] = [] + return articles + +def _enrichir_fournisseurs_multiples(articles: List[Dict], cursor) -> List[Dict]: + try: + logger.info(f" → Enrichissement fournisseurs multiples...") + + references = [a["reference"] for a in articles if a["reference"]] + if not references: + return articles + + placeholders = ",".join(["?"] * len(references)) + query = f""" + SELECT + AR_Ref, + CT_Num, + AF_RefFourniss, + AF_PrixAch, + AF_Unite, + AF_Conversion, + AF_DelaiAppro, + AF_Garantie, + AF_Colisage, + AF_QteMini, + AF_QteMont, + EG_Champ, + AF_Principal, + AF_PrixDev, + AF_Devise, + AF_Remise, + AF_ConvDiv, + AF_TypeRem, + AF_CodeBarre, + AF_PrixAchNouv, + AF_PrixDevNouv, + AF_RemiseNouv, + AF_DateApplication, + cbCreation, + cbModification + FROM F_ARTFOURNISS + WHERE AR_Ref IN ({placeholders}) + ORDER BY AR_Ref, AF_Principal DESC, CT_Num + """ + + cursor.execute(query, references) + rows = cursor.fetchall() + + fournisseurs_map = {} + for row in rows: + ref = _safe_strip(row[0]) + if not ref: + continue + + if ref not in fournisseurs_map: + fournisseurs_map[ref] = [] + + fournisseurs_map[ref].append({ + "fournisseur_num": _safe_strip(row[1]), + "ref_fournisseur": _safe_strip(row[2]), + "prix_achat": float(row[3]) if row[3] else 0.0, + "unite": _safe_strip(row[4]), + "conversion": float(row[5]) if row[5] else 0.0, + "delai_appro": int(row[6]) if row[6] else 0, + "garantie": int(row[7]) if row[7] else 0, + "colisage": int(row[8]) if row[8] else 0, + "qte_mini": float(row[9]) if row[9] else 0.0, + "qte_montant": float(row[10]) if row[10] else 0.0, + "enumere_gamme": int(row[11]) if row[11] else 0, + "est_principal": bool(row[12]), + "prix_devise": float(row[13]) if row[13] else 0.0, + "devise": int(row[14]) if row[14] else 0, + "remise": float(row[15]) if row[15] else 0.0, + "conversion_devise": float(row[16]) if row[16] else 0.0, + "type_remise": int(row[17]) if row[17] else 0, + "code_barre_fournisseur": _safe_strip(row[18]), + "prix_achat_nouveau": float(row[19]) if row[19] else 0.0, + "prix_devise_nouveau": float(row[20]) if row[20] else 0.0, + "remise_nouvelle": float(row[21]) if row[21] else 0.0, + "date_application": row[22], + "date_creation": row[23], + "date_modification": row[24], + }) + + for article in articles: + article["fournisseurs"] = fournisseurs_map.get(article["reference"], []) + article["nb_fournisseurs"] = len(article["fournisseurs"]) + + logger.info(f" {len(fournisseurs_map)} articles avec fournisseurs multiples") + return articles + + except Exception as e: + logger.error(f" Erreur fournisseurs multiples: {e}") + for article in articles: + article["fournisseurs"] = [] + article["nb_fournisseurs"] = 0 + return articles + +def _enrichir_depots_details(articles: List[Dict], cursor) -> List[Dict]: + try: + logger.info(f" → Enrichissement détails dépôts...") + + query = """ + SELECT + DE_No, + DE_Intitule, + DE_Code, + DE_Adresse, + DE_Complement, + DE_CodePostal, + DE_Ville, + DE_Contact, + DE_Principal, + DE_CatCompta, + DE_Region, + DE_Pays, + DE_EMail, + DE_Telephone, + DE_Telecopie, + DP_NoDefaut, + DE_Exclure + FROM F_DEPOT + """ + + cursor.execute(query) + rows = cursor.fetchall() + + depots_map = {} + for row in rows: + de_no = _safe_strip(row[0]) + if not de_no: + continue + + depots_map[de_no] = { + "depot_num": de_no, + "depot_nom": _safe_strip(row[1]), + "depot_code": _safe_strip(row[2]), + "depot_adresse": _safe_strip(row[3]), + "depot_complement": _safe_strip(row[4]), + "depot_code_postal": _safe_strip(row[5]), + "depot_ville": _safe_strip(row[6]), + "depot_contact": _safe_strip(row[7]), + "depot_est_principal": bool(row[8]), + "depot_categorie_compta": int(row[9]) if row[9] else 0, + "depot_region": _safe_strip(row[10]), + "depot_pays": _safe_strip(row[11]), + "depot_email": _safe_strip(row[12]), + "depot_telephone": _safe_strip(row[13]), + "depot_fax": _safe_strip(row[14]), + "depot_emplacement_defaut": _safe_strip(row[15]), + "depot_exclu": bool(row[16]), + } + + logger.info(f" → {len(depots_map)} dépôts chargés") + + for article in articles: + for empl in article.get("emplacements", []): + depot_num = empl.get("depot") + if depot_num and depot_num in depots_map: + empl.update(depots_map[depot_num]) + + logger.info(f" Emplacements enrichis avec détails dépôts") + return articles + + except Exception as e: + logger.error(f" Erreur détails dépôts: {e}") + return articles + +def _enrichir_emplacements_details(articles: List[Dict], cursor) -> List[Dict]: + try: + logger.info(f" → Enrichissement détails emplacements...") + + query = """ + SELECT + DE_No, + DP_No, + DP_Code, + DP_Intitule, + DP_Zone, + DP_Type + FROM F_DEPOTEMPL + """ + + cursor.execute(query) + rows = cursor.fetchall() + + emplacements_map = {} + for row in rows: + de_no = _safe_strip(row[0]) + dp_no = _safe_strip(row[1]) + + if not de_no or not dp_no: + continue + + key = f"{de_no}_{dp_no}" + emplacements_map[key] = { + "emplacement_code": _safe_strip(row[2]), + "emplacement_libelle": _safe_strip(row[3]), + "emplacement_zone": _safe_strip(row[4]), + "emplacement_type": int(row[5]) if row[5] else 0, + } + + logger.info(f" → {len(emplacements_map)} emplacements détaillés chargés") + + for article in articles: + for empl in article.get("emplacements", []): + depot = empl.get("depot") + emplacement = empl.get("emplacement") + if depot and emplacement: + key = f"{depot}_{emplacement}" + if key in emplacements_map: + empl.update(emplacements_map[key]) + + logger.info(f" Emplacements enrichis avec détails") + return articles + + except Exception as e: + logger.error(f" Erreur détails emplacements: {e}") + return articles + +def _enrichir_gammes_enumeres(articles: List[Dict], cursor) -> List[Dict]: + try: + logger.info(f" → Enrichissement énumérés gammes...") + + query_pgamme = "SELECT G_Intitule, G_Type FROM P_GAMME ORDER BY G_Type" + cursor.execute(query_pgamme) + pgamme_rows = cursor.fetchall() + + gammes_config = {} + for idx, row in enumerate(pgamme_rows): + gammes_config[idx + 1] = { + "nom": _safe_strip(row[0]), + "type": int(row[1]) if row[1] else 0, + } + + logger.info(f" → Configuration gammes: {gammes_config}") + + query_enum = """ + SELECT + EG_Champ, + EG_Ligne, + EG_Enumere, + EG_BorneSup + FROM F_ENUMGAMME + ORDER BY EG_Champ, EG_Ligne + """ + + cursor.execute(query_enum) + enum_rows = cursor.fetchall() + + enumeres_map = {} + for row in enum_rows: + champ = int(row[0]) if row[0] else 0 + enumere = _safe_strip(row[2]) + + if not enumere: + continue + + key = f"{champ}_{enumere}" + enumeres_map[key] = { + "ligne": int(row[1]) if row[1] else 0, + "enumere": enumere, + "borne_sup": float(row[3]) if row[3] else 0.0, + "gamme_nom": gammes_config.get(champ, {}).get("nom", f"Gamme {champ}"), + } + + logger.info(f" → {len(enumeres_map)} énumérés chargés") + + for article in articles: + for gamme in article.get("gammes", []): + num_gamme = gamme.get("numero_gamme") + enumere = gamme.get("enumere") + + if num_gamme and enumere: + key = f"{num_gamme}_{enumere}" + if key in enumeres_map: + gamme.update(enumeres_map[key]) + + logger.info(f" Gammes enrichies avec énumérés") + return articles + + except Exception as e: + logger.error(f" Erreur énumérés gammes: {e}") + return articles + +def _enrichir_references_enumerees(articles: List[Dict], cursor) -> List[Dict]: + try: + logger.info(f" → Enrichissement références énumérées...") + + references = [a["reference"] for a in articles if a["reference"]] + if not references: + return articles + + placeholders = ",".join(["?"] * len(references)) + query = f""" + SELECT + AR_Ref, + AG_No1, + AG_No2, + AE_Ref, + AE_PrixAch, + AE_CodeBarre, + AE_PrixAchNouv, + AE_EdiCode, + AE_Sommeil, + cbCreation, + cbModification + FROM F_ARTENUMREF + WHERE AR_Ref IN ({placeholders}) + ORDER BY AR_Ref, AG_No1, AG_No2 + """ + + cursor.execute(query, references) + rows = cursor.fetchall() + + refs_enum_map = {} + for row in rows: + ref = _safe_strip(row[0]) + if not ref: + continue + + if ref not in refs_enum_map: + refs_enum_map[ref] = [] + + refs_enum_map[ref].append({ + "gamme_1": int(row[1]) if row[1] else 0, + "gamme_2": int(row[2]) if row[2] else 0, + "reference_enumeree": _safe_strip(row[3]), + "prix_achat": float(row[4]) if row[4] else 0.0, + "code_barre": _safe_strip(row[5]), + "prix_achat_nouveau": float(row[6]) if row[6] else 0.0, + "edi_code": _safe_strip(row[7]), + "en_sommeil": bool(row[8]), + "date_creation": row[9], + "date_modification": row[10], + }) + + for article in articles: + article["refs_enumerees"] = refs_enum_map.get(article["reference"], []) + article["nb_refs_enumerees"] = len(article["refs_enumerees"]) + + logger.info(f" {len(refs_enum_map)} articles avec références énumérées") + return articles + + except Exception as e: + logger.error(f" Erreur références énumérées: {e}") + for article in articles: + article["refs_enumerees"] = [] + article["nb_refs_enumerees"] = 0 + return articles + +def _enrichir_medias_articles(articles: List[Dict], cursor) -> List[Dict]: + try: + logger.info(f" → Enrichissement médias articles...") + + references = [a["reference"] for a in articles if a["reference"]] + if not references: + return articles + + placeholders = ",".join(["?"] * len(references)) + query = f""" + SELECT + AR_Ref, + ME_Commentaire, + ME_Fichier, + ME_TypeMIME, + ME_Origine, + ME_GedId, + cbCreation, + cbModification + FROM F_ARTICLEMEDIA + WHERE AR_Ref IN ({placeholders}) + ORDER BY AR_Ref, cbCreation + """ + + cursor.execute(query, references) + rows = cursor.fetchall() + + medias_map = {} + for row in rows: + ref = _safe_strip(row[0]) + if not ref: + continue + + if ref not in medias_map: + medias_map[ref] = [] + + medias_map[ref].append({ + "commentaire": _safe_strip(row[1]), + "fichier": _safe_strip(row[2]), + "type_mime": _safe_strip(row[3]), + "origine": int(row[4]) if row[4] else 0, + "ged_id": _safe_strip(row[5]), + "date_creation": row[6], + "date_modification": row[7], + }) + + for article in articles: + article["medias"] = medias_map.get(article["reference"], []) + article["nb_medias"] = len(article["medias"]) + + logger.info(f" {len(medias_map)} articles avec médias") + return articles + + except Exception as e: + logger.error(f" Erreur médias: {e}") + for article in articles: + article["medias"] = [] + article["nb_medias"] = 0 + return articles + +def _enrichir_prix_gammes(articles: List[Dict], cursor) -> List[Dict]: + try: + logger.info(f" → Enrichissement prix par gammes...") + + references = [a["reference"] for a in articles if a["reference"]] + if not references: + return articles + + placeholders = ",".join(["?"] * len(references)) + query = f""" + SELECT + AR_Ref, + AG_No1, + AG_No2, + AR_PUNet, + AR_CoutStd, + cbCreation, + cbModification + FROM F_ARTPRIX + WHERE AR_Ref IN ({placeholders}) + ORDER BY AR_Ref, AG_No1, AG_No2 + """ + + cursor.execute(query, references) + rows = cursor.fetchall() + + prix_gammes_map = {} + for row in rows: + ref = _safe_strip(row[0]) + if not ref: + continue + + if ref not in prix_gammes_map: + prix_gammes_map[ref] = [] + + prix_gammes_map[ref].append({ + "gamme_1": int(row[1]) if row[1] else 0, + "gamme_2": int(row[2]) if row[2] else 0, + "prix_net": float(row[3]) if row[3] else 0.0, + "cout_standard": float(row[4]) if row[4] else 0.0, + "date_creation": row[5], + "date_modification": row[6], + }) + + for article in articles: + article["prix_gammes"] = prix_gammes_map.get(article["reference"], []) + article["nb_prix_gammes"] = len(article["prix_gammes"]) + + logger.info(f" {len(prix_gammes_map)} articles avec prix par gammes") + return articles + + except Exception as e: + logger.error(f" Erreur prix gammes: {e}") + for article in articles: + article["prix_gammes"] = [] + article["nb_prix_gammes"] = 0 + return articles + +def _enrichir_conditionnements(articles: List[Dict], cursor) -> List[Dict]: + try: + logger.info(f" → Enrichissement conditionnements...") + + query = """ + SELECT + EC_Champ, + EC_Enumere, + EC_Quantite, + EC_EdiCode + FROM F_ENUMCOND + ORDER BY EC_Champ, EC_Enumere + """ + + cursor.execute(query) + rows = cursor.fetchall() + + cond_map = {} + for row in rows: + champ = int(row[0]) if row[0] else 0 + enumere = _safe_strip(row[1]) + + if not enumere: + continue + + key = f"{champ}_{enumere}" + cond_map[key] = { + "champ": champ, + "enumere": enumere, + "quantite": float(row[2]) if row[2] else 0.0, + "edi_code": _safe_strip(row[3]), + } + + logger.info(f" → {len(cond_map)} conditionnements chargés") + + for article in articles: + conditionnement = article.get("conditionnement") + if conditionnement: + for key, cond_data in cond_map.items(): + if cond_data["enumere"] == conditionnement: + article["conditionnement_qte"] = cond_data["quantite"] + article["conditionnement_edi"] = cond_data["edi_code"] + break + + logger.info(f" Conditionnements enrichis") + return articles + + except Exception as e: + logger.error(f" Erreur conditionnements: {e}") + return articles + +def _mapper_article_depuis_row(row_data: Dict, colonnes_config: Dict) -> Dict: + article = {} + + def get_val(sql_col, default=None, convert_type=None): + val = row_data.get(sql_col, default) + if val is None: + return default + + if convert_type == float: + return float(val) if val not in (None, "") else (default or 0.0) + elif convert_type == int: + return int(val) if val not in (None, "") else (default or 0) + elif convert_type == bool: + return bool(val) if val not in (None, "") else (default or False) + elif convert_type == str: + return _safe_strip(val) + + return val + + article["reference"] = get_val("AR_Ref", convert_type=str) + article["designation"] = get_val("AR_Design", convert_type=str) + article["code_ean"] = get_val("AR_CodeBarre", convert_type=str) + article["code_barre"] = get_val("AR_CodeBarre", convert_type=str) + article["edi_code"] = get_val("AR_EdiCode", convert_type=str) + article["raccourci"] = get_val("AR_Raccourci", convert_type=str) + + article["prix_vente"] = get_val("AR_PrixVen", 0.0, float) + article["prix_achat"] = get_val("AR_PrixAch", 0.0, float) + article["coef"] = get_val("AR_Coef", 0.0, float) + article["prix_net"] = get_val("AR_PUNet", 0.0, float) + article["prix_achat_nouveau"] = get_val("AR_PrixAchNouv", 0.0, float) + article["coef_nouveau"] = get_val("AR_CoefNouv", 0.0, float) + article["prix_vente_nouveau"] = get_val("AR_PrixVenNouv", 0.0, float) + + date_app = get_val("AR_DateApplication") + article["date_application_prix"] = str(date_app) if date_app else None + + article["cout_standard"] = get_val("AR_CoutStd", 0.0, float) + + article["unite_vente"] = get_val("AR_UniteVen", convert_type=str) + article["unite_poids"] = get_val("AR_UnitePoids", convert_type=str) + article["poids_net"] = get_val("AR_PoidsNet", 0.0, float) + article["poids_brut"] = get_val("AR_PoidsBrut", 0.0, float) + + article["gamme_1"] = get_val("AR_Gamme1", convert_type=str) + article["gamme_2"] = get_val("AR_Gamme2", convert_type=str) + + type_val = get_val("AR_Type", 0, int) + article["type_article"] = type_val + article["type_article_libelle"] = _get_type_article_libelle(type_val) + article["famille_code"] = get_val("FA_CodeFamille", convert_type=str) + article["nature"] = get_val("AR_Nature", 0, int) + article["garantie"] = get_val("AR_Garantie", 0, int) + article["code_fiscal"] = get_val("AR_CodeFiscal", convert_type=str) + article["pays"] = get_val("AR_Pays", convert_type=str) + + article["fournisseur_principal"] = get_val("CO_No", 0, int) + article["conditionnement"] = get_val("AR_Condition", convert_type=str) + article["nb_colis"] = get_val("AR_NbColis", 0, int) + article["prevision"] = get_val("AR_Prevision", False, bool) + + article["suivi_stock"] = get_val("AR_SuiviStock", False, bool) + article["nomenclature"] = get_val("AR_Nomencl", False, bool) + article["qte_composant"] = get_val("AR_QteComp", 0.0, float) + article["qte_operatoire"] = get_val("AR_QteOperatoire", 0.0, float) + + sommeil = get_val("AR_Sommeil", 0, int) + article["est_actif"] = (sommeil == 0) + article["en_sommeil"] = (sommeil == 1) + article["article_substitut"] = get_val("AR_Substitut", convert_type=str) + article["soumis_escompte"] = get_val("AR_Escompte", False, bool) + article["delai"] = get_val("AR_Delai", 0, int) + + article["stat_01"] = get_val("AR_Stat01", convert_type=str) + article["stat_02"] = get_val("AR_Stat02", convert_type=str) + article["stat_03"] = get_val("AR_Stat03", convert_type=str) + article["stat_04"] = get_val("AR_Stat04", convert_type=str) + article["stat_05"] = get_val("AR_Stat05", convert_type=str) + article["hors_statistique"] = get_val("AR_HorsStat", False, bool) + + article["categorie_1"] = get_val("CL_No1", 0, int) + article["categorie_2"] = get_val("CL_No2", 0, int) + article["categorie_3"] = get_val("CL_No3", 0, int) + article["categorie_4"] = get_val("CL_No4", 0, int) + + date_modif = get_val("AR_DateModif") + article["date_modification"] = str(date_modif) if date_modif else None + + article["vente_debit"] = get_val("AR_VteDebit", False, bool) + article["non_imprimable"] = get_val("AR_NotImp", False, bool) + article["transfere"] = get_val("AR_Transfere", False, bool) + article["publie"] = get_val("AR_Publie", False, bool) + article["contremarque"] = get_val("AR_Contremarque", False, bool) + article["fact_poids"] = get_val("AR_FactPoids", False, bool) + article["fact_forfait"] = get_val("AR_FactForfait", False, bool) + article["saisie_variable"] = get_val("AR_SaisieVar", False, bool) + article["fictif"] = get_val("AR_Fictif", False, bool) + article["sous_traitance"] = get_val("AR_SousTraitance", False, bool) + article["criticite"] = get_val("AR_Criticite", 0, int) + + article["reprise_code_defaut"] = get_val("RP_CodeDefaut", convert_type=str) + article["delai_fabrication"] = get_val("AR_DelaiFabrication", 0, int) + article["delai_peremption"] = get_val("AR_DelaiPeremption", 0, int) + article["delai_securite"] = get_val("AR_DelaiSecurite", 0, int) + article["type_lancement"] = get_val("AR_TypeLancement", 0, int) + article["cycle"] = get_val("AR_Cycle", 1, int) + + article["photo"] = get_val("AR_Photo", convert_type=str) + article["langue_1"] = get_val("AR_Langue1", convert_type=str) + article["langue_2"] = get_val("AR_Langue2", convert_type=str) + + article["frais_01_denomination"] = get_val("AR_Frais01FR_Denomination", convert_type=str) + article["frais_02_denomination"] = get_val("AR_Frais02FR_Denomination", convert_type=str) + article["frais_03_denomination"] = get_val("AR_Frais03FR_Denomination", convert_type=str) + + article["marque_commerciale"] = get_val("Marque commerciale", convert_type=str) + + objectif_val = get_val("Objectif / Qtés vendues") + if objectif_val is not None: + article["objectif_qtes_vendues"] = str(float(objectif_val)) if objectif_val not in ("", 0, 0.0) else None + else: + article["objectif_qtes_vendues"] = None + + pourcentage_val = get_val("Pourcentage teneur en or") + if pourcentage_val is not None: + article["pourcentage_or"] = str(float(pourcentage_val)) if pourcentage_val not in ("", 0, 0.0) else None + else: + article["pourcentage_or"] = None + + date_com = get_val("1ère commercialisation") + article["premiere_commercialisation"] = str(date_com) if date_com else None + + article["interdire_commande"] = get_val("AR_InterdireCommande", False, bool) + article["exclure"] = get_val("AR_Exclure", False, bool) + + article["stock_reel"] = 0.0 + article["stock_mini"] = 0.0 + article["stock_maxi"] = 0.0 + article["stock_reserve"] = 0.0 + article["stock_commande"] = 0.0 + article["stock_disponible"] = 0.0 + + article["famille_libelle"] = None + article["famille_type"] = None + article["famille_unite_vente"] = None + article["famille_coef"] = None + article["famille_suivi_stock"] = None + article["famille_garantie"] = None + article["famille_unite_poids"] = None + article["famille_delai"] = None + article["famille_nb_colis"] = None + article["famille_code_fiscal"] = None + article["famille_escompte"] = None + article["famille_centrale"] = None + article["famille_nature"] = None + article["famille_hors_stat"] = None + article["famille_pays"] = None + + article["fournisseur_nom"] = None + article["tva_code"] = None + article["tva_taux"] = None + + return article + +def _enrichir_stocks_articles(articles: List[Dict], cursor) -> List[Dict]: + """Enrichit les articles avec les données de stock depuis F_ARTSTOCK""" + try: + logger.info(f" → Enrichissement stocks pour {len(articles)} articles...") + + references = [a["reference"] for a in articles if a["reference"]] + + if not references: + return articles + + placeholders = ",".join(["?"] * len(references)) + stock_query = f""" + SELECT + AR_Ref, + SUM(ISNULL(AS_QteSto, 0)) as Stock_Total, + MIN(ISNULL(AS_QteMini, 0)) as Stock_Mini, + MAX(ISNULL(AS_QteMaxi, 0)) as Stock_Maxi, + SUM(ISNULL(AS_QteRes, 0)) as Stock_Reserve, + SUM(ISNULL(AS_QteCom, 0)) as Stock_Commande + FROM F_ARTSTOCK + WHERE AR_Ref IN ({placeholders}) + GROUP BY AR_Ref + """ + + cursor.execute(stock_query, references) + stock_rows = cursor.fetchall() + + stock_map = {} + for stock_row in stock_rows: + ref = _safe_strip(stock_row[0]) + if ref: + stock_map[ref] = { + "stock_reel": float(stock_row[1]) if stock_row[1] else 0.0, + "stock_mini": float(stock_row[2]) if stock_row[2] else 0.0, + "stock_maxi": float(stock_row[3]) if stock_row[3] else 0.0, + "stock_reserve": float(stock_row[4]) if stock_row[4] else 0.0, + "stock_commande": float(stock_row[5]) if stock_row[5] else 0.0, + } + + logger.info(f" → {len(stock_map)} articles avec stock trouvés dans F_ARTSTOCK") + + for article in articles: + if article["reference"] in stock_map: + stock_data = stock_map[article["reference"]] + article.update(stock_data) + article["stock_disponible"] = ( + article["stock_reel"] - article["stock_reserve"] + ) + + return articles + + except Exception as e: + logger.error(f" Erreur enrichissement stocks: {e}", exc_info=True) + return articles + +def _enrichir_fournisseurs_articles(articles: List[Dict], cursor) -> List[Dict]: + """Enrichit les articles avec le nom du fournisseur principal""" + try: + logger.info(f" → Enrichissement fournisseurs...") + + nums_fournisseurs = list(set([ + a["fournisseur_principal"] for a in articles + if a.get("fournisseur_principal") and a["fournisseur_principal"] > 0 + ])) + + if not nums_fournisseurs: + logger.warning(" ⚠ Aucun numéro de fournisseur trouvé dans les articles") + for article in articles: + article["fournisseur_nom"] = None + return articles + + logger.info(f" → {len(nums_fournisseurs)} fournisseurs uniques à chercher") + logger.info(f" → Exemples CO_No : {nums_fournisseurs[:5]}") + + placeholders = ",".join(["?"] * len(nums_fournisseurs)) + fournisseur_query = f""" + SELECT + CT_Num, + CT_Intitule, + CT_Type + FROM F_COMPTET + WHERE CT_Num IN ({placeholders}) + AND CT_Type = 1 + """ + + cursor.execute(fournisseur_query, nums_fournisseurs) + fournisseur_rows = cursor.fetchall() + + logger.info(f" → {len(fournisseur_rows)} fournisseurs trouvés dans F_COMPTET") + + if len(fournisseur_rows) == 0: + logger.warning(f" ⚠ Aucun fournisseur trouvé pour CT_Type=1 et CT_Num IN {nums_fournisseurs[:5]}") + cursor.execute(f"SELECT CT_Num, CT_Type FROM F_COMPTET WHERE CT_Num IN ({placeholders})", nums_fournisseurs) + tous_types = cursor.fetchall() + if tous_types: + logger.info(f" → Trouvé {len(tous_types)} comptes (tous types) : {[(r[0], r[1]) for r in tous_types[:5]]}") + + fournisseur_map = {} + for fourn_row in fournisseur_rows: + num = int(fourn_row[0]) # CT_Num + nom = _safe_strip(fourn_row[1]) # CT_Intitule + type_ct = int(fourn_row[2]) # CT_Type + fournisseur_map[num] = nom + logger.debug(f" → Fournisseur mappé : {num} = {nom} (Type={type_ct})") + + nb_enrichis = 0 + for article in articles: + num_fourn = article.get("fournisseur_principal") + if num_fourn and num_fourn in fournisseur_map: + article["fournisseur_nom"] = fournisseur_map[num_fourn] + nb_enrichis += 1 + else: + article["fournisseur_nom"] = None + + logger.info(f" {nb_enrichis} articles enrichis avec nom fournisseur") + + return articles + + except Exception as e: + logger.error(f" Erreur enrichissement fournisseurs: {e}", exc_info=True) + for article in articles: + article["fournisseur_nom"] = None + return articles + +def _enrichir_familles_articles(articles: List[Dict], cursor) -> List[Dict]: + """Enrichit les articles avec les informations de famille depuis F_FAMILLE""" + try: + logger.info(f" → Enrichissement familles pour {len(articles)} articles...") + + codes_familles_bruts = [ + a.get("famille_code") for a in articles + if a.get("famille_code") not in (None, "", " ") + ] + + if codes_familles_bruts: + logger.info(f" → Exemples de codes familles : {codes_familles_bruts[:5]}") + + codes_familles = list(set([ + str(code).strip() for code in codes_familles_bruts if code + ])) + + if not codes_familles: + logger.warning(" ⚠ Aucun code famille trouvé dans les articles") + for article in articles: + _init_champs_famille_vides(article) + return articles + + logger.info(f" → {len(codes_familles)} codes famille uniques") + + cursor.execute("SELECT TOP 1 * FROM F_FAMILLE") + colonnes_disponibles = [column[0] for column in cursor.description] + + colonnes_souhaitees = [ + "FA_CodeFamille", + "FA_Intitule", + "FA_Type", + "FA_UniteVen", + "FA_Coef", + "FA_SuiviStock", + "FA_Garantie", + "FA_UnitePoids", + "FA_Delai", + "FA_NbColis", + "FA_CodeFiscal", + "FA_Escompte", + "FA_Central", + "FA_Nature", + "FA_HorsStat", + "FA_Pays", + "FA_VteDebit", + "FA_NotImp", + "FA_Contremarque", + "FA_FactPoids", + "FA_FactForfait", + "FA_Publie", + "FA_RacineRef", + "FA_RacineCB", + "FA_Raccourci", + "FA_SousTraitance", + "FA_Fictif", + "FA_Criticite", + ] + + colonnes_a_lire = [col for col in colonnes_souhaitees if col in colonnes_disponibles] + + if "FA_CodeFamille" not in colonnes_a_lire or "FA_Intitule" not in colonnes_a_lire: + logger.error(" Colonnes essentielles manquantes !") + return articles + + logger.info(f" → Colonnes disponibles : {len(colonnes_a_lire)}") + + colonnes_str = ", ".join(colonnes_a_lire) + placeholders = ",".join(["?"] * len(codes_familles)) + + famille_query = f""" + SELECT {colonnes_str} + FROM F_FAMILLE + WHERE FA_CodeFamille IN ({placeholders}) + """ + + cursor.execute(famille_query, codes_familles) + famille_rows = cursor.fetchall() + + logger.info(f" → {len(famille_rows)} familles trouvées") + + famille_map = {} + for fam_row in famille_rows: + famille_data = {} + for idx, col in enumerate(colonnes_a_lire): + famille_data[col] = fam_row[idx] + + code = _safe_strip(famille_data.get("FA_CodeFamille")) + if not code: + continue + + famille_map[code] = { + "famille_libelle": _safe_strip(famille_data.get("FA_Intitule")), + "famille_type": int(famille_data.get("FA_Type", 0) or 0), + "famille_unite_vente": _safe_strip(famille_data.get("FA_UniteVen")), + "famille_coef": float(famille_data.get("FA_Coef", 0) or 0), + "famille_suivi_stock": bool(famille_data.get("FA_SuiviStock", 0)), + "famille_garantie": int(famille_data.get("FA_Garantie", 0) or 0), + "famille_unite_poids": _safe_strip(famille_data.get("FA_UnitePoids")), + "famille_delai": int(famille_data.get("FA_Delai", 0) or 0), + "famille_nb_colis": int(famille_data.get("FA_NbColis", 0) or 0), + "famille_code_fiscal": _safe_strip(famille_data.get("FA_CodeFiscal")), + "famille_escompte": bool(famille_data.get("FA_Escompte", 0)), + "famille_centrale": bool(famille_data.get("FA_Central", 0)), + "famille_nature": int(famille_data.get("FA_Nature", 0) or 0), + "famille_hors_stat": bool(famille_data.get("FA_HorsStat", 0)), + "famille_pays": _safe_strip(famille_data.get("FA_Pays")), + } + + logger.info(f" → {len(famille_map)} familles mappées") + + nb_enrichis = 0 + for article in articles: + code_fam = str(article.get("famille_code", "")).strip() + + if code_fam and code_fam in famille_map: + article.update(famille_map[code_fam]) + nb_enrichis += 1 + else: + _init_champs_famille_vides(article) + + logger.info(f" {nb_enrichis} articles enrichis avec infos famille") + + return articles + + except Exception as e: + logger.error(f" Erreur enrichissement familles: {e}", exc_info=True) + for article in articles: + _init_champs_famille_vides(article) + return articles + +def _init_champs_famille_vides(article: Dict): + """Initialise les champs famille à None/0""" + article["famille_libelle"] = None + article["famille_type"] = None + article["famille_unite_vente"] = None + article["famille_coef"] = None + article["famille_suivi_stock"] = None + article["famille_garantie"] = None + article["famille_unite_poids"] = None + article["famille_delai"] = None + article["famille_nb_colis"] = None + article["famille_code_fiscal"] = None + article["famille_escompte"] = None + article["famille_centrale"] = None + article["famille_nature"] = None + article["famille_hors_stat"] = None + article["famille_pays"] = None + +def _enrichir_tva_articles(articles: List[Dict], cursor) -> List[Dict]: + """Enrichit les articles avec le taux de TVA""" + try: + logger.info(f" → Enrichissement TVA...") + + codes_tva = list(set([ + a["code_fiscal"] for a in articles + if a.get("code_fiscal") + ])) + + if not codes_tva: + for article in articles: + article["tva_code"] = None + article["tva_taux"] = None + return articles + + placeholders = ",".join(["?"] * len(codes_tva)) + tva_query = f""" + SELECT + TA_Code, + TA_Taux + FROM F_TAXE + WHERE TA_Code IN ({placeholders}) + """ + + cursor.execute(tva_query, codes_tva) + tva_rows = cursor.fetchall() + + tva_map = {} + for tva_row in tva_rows: + code = _safe_strip(tva_row[0]) + tva_map[code] = float(tva_row[1]) if tva_row[1] else 0.0 + + logger.info(f" → {len(tva_map)} codes TVA trouvés") + + for article in articles: + code_tva = article.get("code_fiscal") + if code_tva and code_tva in tva_map: + article["tva_code"] = code_tva + article["tva_taux"] = tva_map[code_tva] + else: + article["tva_code"] = code_tva + article["tva_taux"] = None + + return articles + + except Exception as e: + logger.error(f" Erreur enrichissement TVA: {e}", exc_info=True) + for article in articles: + article["tva_code"] = article.get("code_fiscal") + article["tva_taux"] = None + return articles + +def _get_type_article_libelle(type_val: int) -> str: + """Retourne le libellé du type d'article""" + types = { + 0: "Article", + 1: "Prestation", + 2: "Divers / Frais", + 3: "Nomenclature" + } + return types.get(type_val, f"Type {type_val}") + +def _cast_article(persist_obj): + try: + obj = win32com.client.CastTo(persist_obj, "IBOArticle3") + obj.Read() + return obj + except: + return None + +__all__ = [ + "_enrichir_stock_emplacements", + "_enrichir_gammes_articles", + "_enrichir_tarifs_clients", + "_enrichir_nomenclature", + "_enrichir_compta_articles", + "_enrichir_fournisseurs_multiples", + "_enrichir_depots_details", + "_enrichir_emplacements_details", + "_enrichir_gammes_enumeres", + "_enrichir_references_enumerees", + "_enrichir_medias_articles", + "_enrichir_prix_gammes", + "_enrichir_conditionnements", + "_mapper_article_depuis_row", + "_enrichir_stocks_articles", + "_enrichir_fournisseurs_articles", + "_enrichir_familles_articles", + "_init_champs_famille_vides", + "_enrichir_tva_articles", + "_cast_article" +] \ No newline at end of file diff --git a/utils/articles/stock_check.py b/utils/articles/stock_check.py new file mode 100644 index 0000000..2cb31e3 --- /dev/null +++ b/utils/articles/stock_check.py @@ -0,0 +1,41 @@ +import logging + +logger = logging.getLogger(__name__) + +def verifier_stock_suffisant(article_ref, quantite, cursor, depot=None): + """Version thread-safe avec lock SQL""" + try: + + cursor.execute("SET TRANSACTION ISOLATION LEVEL SERIALIZABLE") + cursor.execute("BEGIN TRANSACTION") + + try: + cursor.execute( + """ + SELECT SUM(AS_QteSto) + FROM F_ARTSTOCK WITH (UPDLOCK, ROWLOCK) + WHERE AR_Ref = ? + """, + (article_ref.upper(),), + ) + + row = cursor.fetchone() + stock_dispo = float(row[0]) if row and row[0] else 0.0 + + suffisant = stock_dispo >= quantite + + cursor.execute("COMMIT") + + return { + "suffisant": suffisant, + "stock_disponible": stock_dispo, + "quantite_demandee": quantite, + } + + except: + cursor.execute("ROLLBACK") + raise + + except Exception as e: + logger.error(f"Erreur vérification stock: {e}") + raise diff --git a/utils/documents/devis/devis_data_sql.py b/utils/documents/devis/devis_data_sql.py new file mode 100644 index 0000000..3540061 --- /dev/null +++ b/utils/documents/devis/devis_data_sql.py @@ -0,0 +1,89 @@ +import win32com.client +from typing import Optional +import logging + +logger = logging.getLogger(__name__) + +def _afficher_etat_document(doc, titre: str): + """Affiche l'état complet d'un document.""" + logger.info("-" * 80) + logger.info(titre) + logger.info("-" * 80) + try: + logger.info(f" DO_Piece: {getattr(doc, 'DO_Piece', 'N/A')}") + logger.info(f" DO_Ref: '{getattr(doc, 'DO_Ref', 'N/A')}'") + logger.info(f" DO_Statut: {getattr(doc, 'DO_Statut', 'N/A')}") + + date_doc = getattr(doc, 'DO_Date', None) + date_str = date_doc.strftime('%Y-%m-%d') if date_doc else 'None' + logger.info(f" DO_Date: {date_str}") + + date_livr = getattr(doc, 'DO_DateLivr', None) + date_livr_str = date_livr.strftime('%Y-%m-%d') if date_livr else 'None' + logger.info(f" DO_DateLivr: {date_livr_str}") + + logger.info(f" DO_TotalHT: {getattr(doc, 'DO_TotalHT', 0)}€") + logger.info(f" DO_TotalTTC: {getattr(doc, 'DO_TotalTTC', 0)}€") + except Exception as e: + logger.error(f" Erreur affichage état: {e}") + logger.info("-" * 80) + + +def _compter_lignes_document(doc) -> int: + """Compte les lignes d'un document.""" + try: + try: + factory_lignes = doc.FactoryDocumentLigne + except: + factory_lignes = doc.FactoryDocumentVenteLigne + + count = 0 + index = 1 + while index <= 100: + try: + ligne_p = factory_lignes.List(index) + if ligne_p is None: + break + count += 1 + index += 1 + except: + break + return count + except Exception as e: + logger.warning(f" Erreur comptage lignes: {e}") + return 0 + + +def _rechercher_devis_par_numero(numero: str, factory): + """Recherche un devis par numéro dans la liste.""" + logger.info(f" Recherche de {numero} dans la liste...") + + index = 1 + while index < 10000: + try: + persist_test = factory.List(index) + if persist_test is None: + break + + doc_test = win32com.client.CastTo(persist_test, "IBODocumentVente3") + doc_test.Read() + + if ( + getattr(doc_test, "DO_Type", -1) == 0 + and getattr(doc_test, "DO_Piece", "") == numero + ): + logger.info(f" Trouvé à l'index {index}") + return persist_test + + index += 1 + except: + index += 1 + + logger.error(f" Devis {numero} non trouvé dans la liste") + return None + +__all__ = [ + "_afficher_etat_document", + "_compter_lignes_document", + "_rechercher_devis_par_numero" +] \ No newline at end of file diff --git a/utils/documents/devis_extraction.py b/utils/documents/devis_extraction.py new file mode 100644 index 0000000..ddc56f5 --- /dev/null +++ b/utils/documents/devis_extraction.py @@ -0,0 +1,49 @@ +from typing import Dict, List, Optional, Any + +def _extraire_infos_devis(doc, numero: str, champs_modifies: list) -> Dict: + """Extrait les informations complètes du devis.""" + total_ht = float(getattr(doc, "DO_TotalHT", 0.0)) + total_ttc = float(getattr(doc, "DO_TotalTTC", 0.0)) + statut = getattr(doc, "DO_Statut", 0) + reference = getattr(doc, "DO_Ref", "") + + date_devis = None + try: + date_doc = getattr(doc, "DO_Date", None) + if date_doc: + date_devis = date_doc.strftime("%Y-%m-%d") + except: + pass + + date_livraison = None + try: + date_livr = getattr(doc, "DO_DateLivr", None) + if date_livr: + date_livraison = date_livr.strftime("%Y-%m-%d") + except: + pass + + client_code = "" + try: + client_obj = getattr(doc, "Client", None) + if client_obj: + client_obj.Read() + client_code = getattr(client_obj, "CT_Num", "") + except: + pass + + return { + "numero": numero, + "total_ht": total_ht, + "total_ttc": total_ttc, + "reference": reference, + "date_devis": date_devis, + "date_livraison": date_livraison, + "champs_modifies": champs_modifies, + "statut": statut, + "client_code": client_code, + } + +__all__ = [ + "_extraire_infos_devis", +] \ No newline at end of file diff --git a/utils/documents/documents_data_sql.py b/utils/documents/documents_data_sql.py new file mode 100644 index 0000000..e69de29 diff --git a/utils/functions/functions.py b/utils/functions/functions.py new file mode 100644 index 0000000..dae1a57 --- /dev/null +++ b/utils/functions/functions.py @@ -0,0 +1,118 @@ +from typing import Optional +import logging + +logger = logging.getLogger(__name__) + +def _clean_str(value, max_len: int) -> str: + """Nettoie et tronque une chaîne""" + if value is None or str(value).lower() in ('none', 'null', ''): + return "" + return str(value)[:max_len].strip() + + +def _safe_strip(value) -> Optional[str]: + """Nettoie une valeur string en toute sécurité""" + if value is None: + return None + if isinstance(value, str): + stripped = value.strip() + return stripped if stripped else None + return str(value).strip() or None + + +def _safe_int(value, default=None): + """Conversion sécurisée en entier""" + if value is None: + return default + try: + return int(value) + except (ValueError, TypeError): + return default + + +def _try_set_attribute(obj, attr_name, value, variants=None): + """Essaie de définir un attribut avec plusieurs variantes""" + if variants is None: + variants = [attr_name] + else: + variants = [attr_name] + variants + + for variant in variants: + try: + if hasattr(obj, variant): + setattr(obj, variant, value) + return True + except Exception as e: + logger.debug(f" {variant} échec: {str(e)[:50]}") + + return False + + +def _get_type_libelle(type_doc: int) -> str: + types_officiels = { + 0: "Devis", + 10: "Bon de commande", + 20: "Préparation", + 30: "Bon de livraison", + 40: "Bon de retour", + 50: "Bon d'avoir", + 60: "Facture", + } + + types_alternatifs = { + 1: "Bon de commande", + 2: "Préparation", + 3: "Bon de livraison", + 4: "Bon de retour", + 5: "Bon d'avoir", + 6: "Facture", + } + + if type_doc in types_officiels: + return types_officiels[type_doc] + + if type_doc in types_alternatifs: + return types_alternatifs[type_doc] + + return f"Type {type_doc}" + + +def _convertir_type_pour_sql(self, type_doc: int) -> int: + """COM → SQL : 0, 10, 20, 30... → 0, 1, 2, 3...""" + mapping = {0: 0, 10: 1, 20: 2, 30: 3, 40: 4, 50: 5, 60: 6} + return mapping.get(type_doc, type_doc) + +def _convertir_type_depuis_sql(self, type_sql: int) -> int: + """SQL → COM : 0, 1, 2, 3... → 0, 10, 20, 30...""" + mapping = {0: 0, 1: 10, 2: 20, 3: 30, 4: 40, 5: 50, 6: 60} + return mapping.get(type_sql, type_sql) + + +def _normaliser_type_document(type_doc: int) -> int: + logger.info(f"[INFO] TYPE RECU{type_doc}") + + if type_doc in [0, 10, 20, 30, 40, 50, 60]: + return type_doc + + mapping_normalisation = { + 1: 10, # Commande + 2: 20, # Préparation + 3: 30, # BL + 4: 40, # Retour + 5: 50, # Avoir + 6: 60, # Facture + } + + return mapping_normalisation.get(type_doc, type_doc) + +__all__ = [ + "_clean_str", + "_safe_strip", + "_safe_int", + "_try_set_attribute", + "_get_type_libelle", + "_normaliser_type_document", + "_convertir_type_depuis_sql", + "_convertir_type_pour_sql" + +] \ No newline at end of file diff --git a/utils/functions/items_to_dict.py b/utils/functions/items_to_dict.py new file mode 100644 index 0000000..fec0293 --- /dev/null +++ b/utils/functions/items_to_dict.py @@ -0,0 +1,162 @@ +from typing import Dict, List, Optional, Any +import logging +from utils.functions.functions import _safe_strip + +logger = logging.getLogger(__name__) + +def _contact_to_dict(contact, numero_client=None, contact_numero=None, n_contact=None) -> Dict: + try: + civilite_code = getattr(contact, "Civilite", None) + civilite_map = {0: "M.", 1: "Mme", 2: "Mlle", 3: "Société"} + civilite = civilite_map.get(civilite_code) if civilite_code is not None else None + + telephone = None + portable = None + telecopie = None + email = None + + if hasattr(contact, 'Telecom'): + try: + telecom = contact.Telecom + telephone = _safe_strip(getattr(telecom, "Telephone", None)) + portable = _safe_strip(getattr(telecom, "Portable", None)) + telecopie = _safe_strip(getattr(telecom, "Telecopie", None)) + email = _safe_strip(getattr(telecom, "EMail", None)) + except: + pass + + return { + "numero": numero_client, + "contact_numero": contact_numero, + "n_contact": n_contact or contact_numero, + "civilite": civilite, + "nom": _safe_strip(getattr(contact, "Nom", None)), + "prenom": _safe_strip(getattr(contact, "Prenom", None)), + "fonction": _safe_strip(getattr(contact, "Fonction", None)), + "service_code": getattr(contact, "ServiceContact", None), + "telephone": telephone, + "portable": portable, + "telecopie": telecopie, + "email": email, + "facebook": _safe_strip(getattr(contact, "Facebook", None)), + "linkedin": _safe_strip(getattr(contact, "LinkedIn", None)), + "skype": _safe_strip(getattr(contact, "Skype", None)), + } + except Exception as e: + logger.warning(f"Erreur conversion contact: {e}") + return {} + +def _row_to_contact_dict(row) -> Dict: + """Convertit une ligne SQL en dictionnaire contact""" + civilite_code = row.CT_Civilite + civilite_map = {0: "M.", 1: "Mme", 2: "Mlle", 3: "Société"} + + return { + "numero": _safe_strip(row.CT_Num), + "contact_numero": row.CT_No, + "n_contact": row.N_Contact, + "civilite": civilite_map.get(civilite_code) if civilite_code is not None else None, + "nom": _safe_strip(row.CT_Nom), + "prenom": _safe_strip(row.CT_Prenom), + "fonction": _safe_strip(row.CT_Fonction), + "service_code": row.N_Service, + "telephone": _safe_strip(row.CT_Telephone), + "portable": _safe_strip(row.CT_TelPortable), + "telecopie": _safe_strip(row.CT_Telecopie), + "email": _safe_strip(row.CT_EMail), + "facebook": _safe_strip(row.CT_Facebook), + "linkedin": _safe_strip(row.CT_LinkedIn), + "skype": _safe_strip(row.CT_Skype), + } + +def _row_to_tiers_dict(row) -> Dict: + """Convertit une ligne SQL en dictionnaire tiers (factorisation DRY)""" + return { + "numero": _safe_strip(row.CT_Num), + "intitule": _safe_strip(row.CT_Intitule), + "type_tiers": row.CT_Type, + "qualite": _safe_strip(row.CT_Qualite), + "classement": _safe_strip(row.CT_Classement), + "raccourci": _safe_strip(row.CT_Raccourci), + "siret": _safe_strip(row.CT_Siret), + "tva_intra": _safe_strip(row.CT_Identifiant), + "code_naf": _safe_strip(row.CT_Ape), + + "contact": _safe_strip(row.CT_Contact), + "adresse": _safe_strip(row.CT_Adresse), + "complement": _safe_strip(row.CT_Complement), + "code_postal": _safe_strip(row.CT_CodePostal), + "ville": _safe_strip(row.CT_Ville), + "region": _safe_strip(row.CT_CodeRegion), + "pays": _safe_strip(row.CT_Pays), + + "telephone": _safe_strip(row.CT_Telephone), + "telecopie": _safe_strip(row.CT_Telecopie), + "email": _safe_strip(row.CT_EMail), + "site_web": _safe_strip(row.CT_Site), + "facebook": _safe_strip(row.CT_Facebook), + "linkedin": _safe_strip(row.CT_LinkedIn), + + "taux01": row.CT_Taux01, + "taux02": row.CT_Taux02, + "taux03": row.CT_Taux03, + "taux04": row.CT_Taux04, + + "statistique01": _safe_strip(row.CT_Statistique01), + "statistique02": _safe_strip(row.CT_Statistique02), + "statistique03": _safe_strip(row.CT_Statistique03), + "statistique04": _safe_strip(row.CT_Statistique04), + "statistique05": _safe_strip(row.CT_Statistique05), + "statistique06": _safe_strip(row.CT_Statistique06), + "statistique07": _safe_strip(row.CT_Statistique07), + "statistique08": _safe_strip(row.CT_Statistique08), + "statistique09": _safe_strip(row.CT_Statistique09), + "statistique10": _safe_strip(row.CT_Statistique10), + + "encours_autorise": row.CT_Encours, + "assurance_credit": row.CT_Assurance, + "langue": row.CT_Langue, + "commercial_code": row.CO_No, + + "lettrage_auto": (row.CT_Lettrage == 1), + "est_actif": (row.CT_Sommeil == 0), + "type_facture": row.CT_Facture, + "est_prospect": (row.CT_Prospect == 1), + "bl_en_facture": row.CT_BLFact, + "saut_page": row.CT_Saut, + "validation_echeance": row.CT_ValidEch, + "controle_encours": row.CT_ControlEnc, + "exclure_relance": (row.CT_NotRappel == 1), + "exclure_penalites": (row.CT_NotPenal == 1), + "bon_a_payer": row.CT_BonAPayer, + + "priorite_livraison": row.CT_PrioriteLivr, + "livraison_partielle": row.CT_LivrPartielle, + "delai_transport": row.CT_DelaiTransport, + "delai_appro": row.CT_DelaiAppro, + + "commentaire": _safe_strip(row.CT_Commentaire), + + "section_analytique": _safe_strip(row.CA_Num), + + "mode_reglement_code": row.MR_No, + "surveillance_active": (row.CT_Surveillance == 1), + "coface": _safe_strip(row.CT_Coface), + "forme_juridique": _safe_strip(row.CT_SvFormeJuri), + "effectif": _safe_strip(row.CT_SvEffectif), + "sv_regularite": _safe_strip(row.CT_SvRegul), + "sv_cotation": _safe_strip(row.CT_SvCotation), + "sv_objet_maj": _safe_strip(row.CT_SvObjetMaj), + "sv_chiffre_affaires": row.CT_SvCA, + "sv_resultat": row.CT_SvResultat, + + "compte_general": _safe_strip(row.CG_NumPrinc), + "categorie_tarif": row.N_CatTarif, + "categorie_compta": row.N_CatCompta, + } + +__all__ = [ + "_contact_to_dict", + "_row_to_contact_dict", + "_row_to_tiers_dict", +] \ No newline at end of file diff --git a/utils/functions/sage_utilities.py b/utils/functions/sage_utilities.py new file mode 100644 index 0000000..0c8eaef --- /dev/null +++ b/utils/functions/sage_utilities.py @@ -0,0 +1,234 @@ +import logging +from utils.functions.functions import ( + _convertir_type_depuis_sql, + _convertir_type_pour_sql, + _normaliser_type_document, + _get_type_libelle +) + +logger = logging.getLogger(__name__) + + +def _verifier_devis_non_transforme(numero: str, doc, cursor): + """Vérifie que le devis n'est pas transformé.""" + verification = verifier_si_deja_transforme_sql(numero, cursor, 0) + + if verification["deja_transforme"]: + docs_cibles = verification["documents_cibles"] + nums = [d["numero"] for d in docs_cibles] + raise ValueError( + f" Devis {numero} déjà transformé en {len(docs_cibles)} document(s): {', '.join(nums)}" + ) + + statut_actuel = getattr(doc, "DO_Statut", 0) + if statut_actuel == 5: + raise ValueError(f" Devis {numero} déjà transformé (statut=5)") + +def verifier_si_deja_transforme_sql(numero_source, cursor, type_source): + """Version corrigée avec normalisation des types""" + logger.info( + f"[VERIF] Vérification transformations de {numero_source} (type {type_source})" + ) + + logger.info( + f"[VERIF] Vérification transformations de {numero_source} (type {type_source})" + ) + + logger.info(f"[DEBUG] Type source brut: {type_source}") + logger.info( + f"[DEBUG] Type source après normalisation: {_normaliser_type_document(type_source)}" + ) + logger.info( + f"[DEBUG] Type source après normalisation SQL: {_convertir_type_pour_sql(type_source)}" + ) + + type_source = _convertir_type_pour_sql(type_source) + + champ_liaison_mapping = { + 0: "DL_PieceDE", + 1: "DL_PieceBC", + 3: "DL_PieceBL", + } + + champ_liaison = champ_liaison_mapping.get(type_source) + + if not champ_liaison: + logger.warning(f"[VERIF] Type source {type_source} non géré") + return {"deja_transforme": False, "documents_cibles": []} + + try: + + query = f""" + SELECT DISTINCT + dc.DO_Piece, + dc.DO_Type, + dc.DO_Statut, + (SELECT COUNT(*) FROM F_DOCLIGNE + WHERE DO_Piece = dc.DO_Piece AND DO_Type = dc.DO_Type) as NbLignes + FROM F_DOCENTETE dc + INNER JOIN F_DOCLIGNE dl ON dc.DO_Piece = dl.DO_Piece AND dc.DO_Type = dl.DO_Type + WHERE dl.{champ_liaison} = ? + ORDER BY dc.DO_Type, dc.DO_Piece + """ + + cursor.execute(query, (numero_source,)) + resultats = cursor.fetchall() + + documents_cibles = [] + for row in resultats: + type_brut = int(row.DO_Type) + type_normalise = _convertir_type_depuis_sql(type_brut) + + doc = { + "numero": row.DO_Piece.strip() if row.DO_Piece else "", + "type": type_normalise, # ← TYPE NORMALISÉ + "type_brut": type_brut, # Garder aussi le type original + "type_libelle": _get_type_libelle(type_brut), + "statut": int(row.DO_Statut) if row.DO_Statut else 0, + "nb_lignes": int(row.NbLignes) if row.NbLignes else 0, + } + documents_cibles.append(doc) + logger.info( + f"[VERIF] Trouvé: {doc['numero']} " + f"(type {type_brut}→{type_normalise} - {doc['type_libelle']}) " + f"- {doc['nb_lignes']} lignes" + ) + + deja_transforme = len(documents_cibles) > 0 + + if deja_transforme: + logger.info( + f"[VERIF] Document {numero_source} a {len(documents_cibles)} transformation(s)" + ) + else: + logger.info( + f"[VERIF] Document {numero_source} pas encore transformé" + ) + + return { + "deja_transforme": deja_transforme, + "documents_cibles": documents_cibles, + } + + except Exception as e: + logger.error(f"[VERIF] Erreur vérification: {e}") + return {"deja_transforme": False, "documents_cibles": []} + +def peut_etre_transforme(numero_source, type_source, type_cible): + """Version corrigée avec normalisation""" + type_source = _normaliser_type_document(type_source) + type_cible = _normaliser_type_document(type_cible) + + logger.info( + f"[VERIF_TRANSFO] {numero_source} " + f"(type {type_source}) → type {type_cible}" + ) + + verif = verifier_si_deja_transforme_sql(numero_source, type_source) + + docs_meme_type = [ + d for d in verif["documents_cibles"] if d["type"] == type_cible + ] + + if docs_meme_type: + nums = [d["numero"] for d in docs_meme_type] + return { + "possible": False, + "raison": f"Document déjà transformé en {_get_type_libelle(type_cible)}", + "documents_existants": docs_meme_type, + "message_detaille": f"Document(s) existant(s): {', '.join(nums)}", + } + + return { + "possible": True, + "raison": "Transformation possible", + "documents_existants": [], + } + +def lire_erreurs_sage(obj, nom_obj=""): + erreurs = [] + try: + if not hasattr(obj, "Errors") or obj.Errors is None: + return erreurs + + nb_erreurs = 0 + try: + nb_erreurs = obj.Errors.Count + except: + return erreurs + + if nb_erreurs == 0: + return erreurs + + for i in range(1, nb_erreurs + 1): + try: + err = None + try: + err = obj.Errors.Item(i) + except: + try: + err = obj.Errors(i) + except: + try: + err = obj.Errors.Item(i - 1) + except: + pass + + if err is not None: + description = "" + field = "" + number = "" + + for attr in ["Description", "Descr", "Message", "Text"]: + try: + val = getattr(err, attr, None) + if val: + description = str(val) + break + except: + pass + + for attr in ["Field", "FieldName", "Champ", "Property"]: + try: + val = getattr(err, attr, None) + if val: + field = str(val) + break + except: + pass + + for attr in ["Number", "Code", "ErrorCode", "Numero"]: + try: + val = getattr(err, attr, None) + if val is not None: + number = str(val) + break + except: + pass + + if description or field or number: + erreurs.append( + { + "source": nom_obj, + "index": i, + "description": description or "Erreur inconnue", + "field": field or "?", + "number": number or "?", + } + ) + + except Exception as e: + logger.debug(f"Erreur lecture erreur {i}: {e}") + continue + + except Exception as e: + logger.debug(f"Erreur globale lecture erreurs {nom_obj}: {e}") + + return erreurs + +__all__ = [ + "_verifier_devis_non_transforme", + "verifier_si_deja_transforme_sql", + "peut_etre_transforme", + "lire_erreurs_sage" +] \ No newline at end of file diff --git a/utils/tiers/__init__.py b/utils/tiers/__init__.py new file mode 100644 index 0000000..71131a8 --- /dev/null +++ b/utils/tiers/__init__.py @@ -0,0 +1,5 @@ +from utils.tiers.tiers import (TiersListRequest,) + +__all__ = [ + "TiersListRequest", + ] \ No newline at end of file diff --git a/utils/tiers/clients/clients_data.py b/utils/tiers/clients/clients_data.py new file mode 100644 index 0000000..961da5d --- /dev/null +++ b/utils/tiers/clients/clients_data.py @@ -0,0 +1,332 @@ +import win32com.client +import logging + +logger = logging.getLogger(__name__) + +def _cast_client(persist_obj): + try: + obj = win32com.client.CastTo(persist_obj, "IBOClient3") + obj.Read() + return obj + except Exception as e: + logger.debug(f" _cast_client échoue: {e}") + return None + +def _extraire_client(client_obj): + try: + try: + numero = getattr(client_obj, "CT_Num", "").strip() + if not numero: + logger.debug("Objet sans CT_Num, skip") + return None + except Exception as e: + logger.debug(f" Erreur lecture CT_Num: {e}") + return None + + try: + intitule = getattr(client_obj, "CT_Intitule", "").strip() + if not intitule: + logger.debug(f"{numero} sans CT_Intitule") + except Exception as e: + logger.debug(f"Erreur CT_Intitule sur {numero}: {e}") + intitule = "" + + data = { + "numero": numero, + "intitule": intitule, + } + + try: + qualite_code = getattr(client_obj, "CT_Type", None) + + qualite_map = { + 0: "CLI", # Client + 1: "FOU", # Fournisseur + 2: "CLIFOU", # Client + Fournisseur + 3: "SAL", # Salarié + 4: "PRO", # Prospect + } + + data["qualite"] = qualite_map.get(qualite_code, "CLI") + data["est_fournisseur"] = qualite_code in [1, 2] + + except: + data["qualite"] = "CLI" + data["est_fournisseur"] = False + + try: + data["est_prospect"] = getattr(client_obj, "CT_Prospect", 0) == 1 + except: + data["est_prospect"] = False + + if data["est_prospect"]: + data["type_tiers"] = "prospect" + elif data["est_fournisseur"] and data["qualite"] != "CLIFOU": + data["type_tiers"] = "fournisseur" + elif data["qualite"] == "CLIFOU": + data["type_tiers"] = "client_fournisseur" + else: + data["type_tiers"] = "client" + + try: + sommeil = getattr(client_obj, "CT_Sommeil", 0) + data["est_actif"] = sommeil == 0 + data["est_en_sommeil"] = sommeil == 1 + except: + data["est_actif"] = True + data["est_en_sommeil"] = False + + try: + forme_juridique = getattr(client_obj, "CT_FormeJuridique", "").strip() + data["forme_juridique"] = forme_juridique + data["est_entreprise"] = bool(forme_juridique) + data["est_particulier"] = not bool(forme_juridique) + except: + data["forme_juridique"] = "" + data["est_entreprise"] = False + data["est_particulier"] = True + + try: + data["civilite"] = getattr(client_obj, "CT_Civilite", "").strip() + except: + data["civilite"] = "" + + try: + data["nom"] = getattr(client_obj, "CT_Nom", "").strip() + except: + data["nom"] = "" + + try: + data["prenom"] = getattr(client_obj, "CT_Prenom", "").strip() + except: + data["prenom"] = "" + + if data.get("nom") or data.get("prenom"): + parts = [] + if data.get("civilite"): + parts.append(data["civilite"]) + if data.get("prenom"): + parts.append(data["prenom"]) + if data.get("nom"): + parts.append(data["nom"]) + data["nom_complet"] = " ".join(parts) + else: + data["nom_complet"] = "" + + try: + data["contact"] = getattr(client_obj, "CT_Contact", "").strip() + except: + data["contact"] = "" + + try: + adresse_obj = getattr(client_obj, "Adresse", None) + if adresse_obj: + try: + data["adresse"] = getattr(adresse_obj, "Adresse", "").strip() + except: + data["adresse"] = "" + + try: + data["complement"] = getattr( + adresse_obj, "Complement", "" + ).strip() + except: + data["complement"] = "" + + try: + data["code_postal"] = getattr( + adresse_obj, "CodePostal", "" + ).strip() + except: + data["code_postal"] = "" + + try: + data["ville"] = getattr(adresse_obj, "Ville", "").strip() + except: + data["ville"] = "" + + try: + data["region"] = getattr(adresse_obj, "Region", "").strip() + except: + data["region"] = "" + + try: + data["pays"] = getattr(adresse_obj, "Pays", "").strip() + except: + data["pays"] = "" + else: + data["adresse"] = "" + data["complement"] = "" + data["code_postal"] = "" + data["ville"] = "" + data["region"] = "" + data["pays"] = "" + except Exception as e: + logger.debug(f"Erreur adresse sur {numero}: {e}") + data["adresse"] = "" + data["complement"] = "" + data["code_postal"] = "" + data["ville"] = "" + data["region"] = "" + data["pays"] = "" + + try: + telecom = getattr(client_obj, "Telecom", None) + if telecom: + try: + data["telephone"] = getattr(telecom, "Telephone", "").strip() + except: + data["telephone"] = "" + + try: + data["portable"] = getattr(telecom, "Portable", "").strip() + except: + data["portable"] = "" + + try: + data["telecopie"] = getattr(telecom, "Telecopie", "").strip() + except: + data["telecopie"] = "" + + try: + data["email"] = getattr(telecom, "EMail", "").strip() + except: + data["email"] = "" + + try: + site = ( + getattr(telecom, "Site", None) + or getattr(telecom, "Web", None) + or getattr(telecom, "SiteWeb", "") + ) + data["site_web"] = str(site).strip() if site else "" + except: + data["site_web"] = "" + else: + data["telephone"] = "" + data["portable"] = "" + data["telecopie"] = "" + data["email"] = "" + data["site_web"] = "" + except Exception as e: + logger.debug(f"Erreur telecom sur {numero}: {e}") + data["telephone"] = "" + data["portable"] = "" + data["telecopie"] = "" + data["email"] = "" + data["site_web"] = "" + + try: + data["siret"] = getattr(client_obj, "CT_Siret", "").strip() + except: + data["siret"] = "" + + try: + data["siren"] = getattr(client_obj, "CT_Siren", "").strip() + except: + data["siren"] = "" + + try: + data["tva_intra"] = getattr(client_obj, "CT_Identifiant", "").strip() + except: + data["tva_intra"] = "" + + try: + data["code_naf"] = ( + getattr(client_obj, "CT_CodeNAF", "").strip() + or getattr(client_obj, "CT_APE", "").strip() + ) + except: + data["code_naf"] = "" + + try: + data["secteur"] = getattr(client_obj, "CT_Secteur", "").strip() + except: + data["secteur"] = "" + + try: + effectif = getattr(client_obj, "CT_Effectif", None) + data["effectif"] = int(effectif) if effectif is not None else None + except: + data["effectif"] = None + + try: + ca = getattr(client_obj, "CT_ChiffreAffaire", None) + data["ca_annuel"] = float(ca) if ca is not None else None + except: + data["ca_annuel"] = None + + try: + data["commercial_code"] = getattr(client_obj, "CO_No", "").strip() + except: + try: + data["commercial_code"] = getattr( + client_obj, "CT_Commercial", "" + ).strip() + except: + data["commercial_code"] = "" + + if data.get("commercial_code"): + try: + commercial_obj = getattr(client_obj, "Commercial", None) + if commercial_obj: + commercial_obj.Read() + data["commercial_nom"] = getattr( + commercial_obj, "CO_Nom", "" + ).strip() + else: + data["commercial_nom"] = "" + except: + data["commercial_nom"] = "" + else: + data["commercial_nom"] = "" + + try: + data["categorie_tarifaire"] = getattr(client_obj, "N_CatTarif", None) + except: + data["categorie_tarifaire"] = None + + try: + data["categorie_comptable"] = getattr(client_obj, "N_CatCompta", None) + except: + data["categorie_comptable"] = None + + try: + data["encours_autorise"] = float(getattr(client_obj, "CT_Encours", 0.0)) + except: + data["encours_autorise"] = 0.0 + + try: + data["assurance_credit"] = float( + getattr(client_obj, "CT_Assurance", 0.0) + ) + except: + data["assurance_credit"] = 0.0 + + try: + data["compte_general"] = getattr(client_obj, "CG_Num", "").strip() + except: + data["compte_general"] = "" + + try: + date_creation = getattr(client_obj, "CT_DateCreate", None) + data["date_creation"] = str(date_creation) if date_creation else "" + except: + data["date_creation"] = "" + + try: + date_modif = getattr(client_obj, "CT_DateModif", None) + data["date_modification"] = str(date_modif) if date_modif else "" + except: + data["date_modification"] = "" + + return data + + except Exception as e: + logger.error(f" ERREUR GLOBALE _extraire_client: {e}", exc_info=True) + return None + +__all__ = [ + "_extraire_client", + "_cast_client" +] diff --git a/utils/tiers/contacts/contacts.py b/utils/tiers/contacts/contacts.py new file mode 100644 index 0000000..d833fe7 --- /dev/null +++ b/utils/tiers/contacts/contacts.py @@ -0,0 +1,37 @@ +from typing import Dict, List, Optional, Any +from utils.functions.items_to_dict import _row_to_contact_dict +import logging + +logger = logging.getLogger(__name__) + + +def obtenir_contact(self, numero: str, contact_numero: int) -> Optional[Dict]: + """ + Récupère un contact spécifique par son CT_No + """ + try: + with self._get_sql_connection() as conn: + cursor = conn.cursor() + + query = """ + SELECT + CT_Num, CT_No, N_Contact, + CT_Civilite, CT_Nom, CT_Prenom, CT_Fonction, + N_Service, + CT_Telephone, CT_TelPortable, CT_Telecopie, CT_EMail, + CT_Facebook, CT_LinkedIn, CT_Skype + FROM F_CONTACTT + WHERE CT_Num = ? AND CT_No = ? + """ + + cursor.execute(query, [numero, contact_numero]) + row = cursor.fetchone() + + if not row: + return None + + return _row_to_contact_dict(row) + + except Exception as e: + logger.error(f"Erreur obtention contact: {e}") + raise RuntimeError(f"Erreur lecture contact: {str(e)}") diff --git a/utils/tiers/tiers.py b/utils/tiers/tiers.py new file mode 100644 index 0000000..9520ffb --- /dev/null +++ b/utils/tiers/tiers.py @@ -0,0 +1,13 @@ +from pydantic import BaseModel, Field, validator, EmailStr, field_validator +from typing import Optional, List, Dict + +class TiersListRequest(BaseModel): + """Requête de listage des tiers""" + type_tiers: Optional[str] = Field( + None, + description="Type: client, fournisseur, prospect, all" + ) + filtre: str = Field( + "", + description="Filtre sur code ou intitulé" + ) \ No newline at end of file diff --git a/utils/tiers/tiers_data_sql.py b/utils/tiers/tiers_data_sql.py new file mode 100644 index 0000000..81dc2d6 --- /dev/null +++ b/utils/tiers/tiers_data_sql.py @@ -0,0 +1,55 @@ +def _build_tiers_select_query() -> str: + """Construit la requête SELECT pour les tiers (factorisation)""" + return """ + SELECT + -- IDENTIFICATION (9) + CT_Num, CT_Intitule, CT_Type, CT_Qualite, + CT_Classement, CT_Raccourci, CT_Siret, CT_Identifiant, + CT_Ape, + + -- ADRESSE (7) + CT_Contact, CT_Adresse, CT_Complement, + CT_CodePostal, CT_Ville, CT_CodeRegion, CT_Pays, + + -- TELECOM (6) + CT_Telephone, CT_Telecopie, CT_EMail, CT_Site, + CT_Facebook, CT_LinkedIn, + + -- TAUX (4) + CT_Taux01, CT_Taux02, CT_Taux03, CT_Taux04, + + -- STATISTIQUES (10) + CT_Statistique01, CT_Statistique02, CT_Statistique03, + CT_Statistique04, CT_Statistique05, CT_Statistique06, + CT_Statistique07, CT_Statistique08, CT_Statistique09, + CT_Statistique10, + + -- COMMERCIAL (4) + CT_Encours, CT_Assurance, CT_Langue, CO_No, + + -- FACTURATION (11) + CT_Lettrage, CT_Sommeil, CT_Facture, CT_Prospect, + CT_BLFact, CT_Saut, CT_ValidEch, CT_ControlEnc, + CT_NotRappel, CT_NotPenal, CT_BonAPayer, + + -- LOGISTIQUE (4) + CT_PrioriteLivr, CT_LivrPartielle, + CT_DelaiTransport, CT_DelaiAppro, + + -- COMMENTAIRE (1) + CT_Commentaire, + + -- ANALYTIQUE (1) + CA_Num, + + -- ORGANISATION / SURVEILLANCE (10) + MR_No, CT_Surveillance, CT_Coface, + CT_SvFormeJuri, CT_SvEffectif, CT_SvRegul, + CT_SvCotation, CT_SvObjetMaj, CT_SvCA, CT_SvResultat, + + -- COMPTE GENERAL ET CATEGORIES (3) + CG_NumPrinc, N_CatTarif, N_CatCompta + """ +__all__ = [ + "_build_tiers_select_query" +] \ No newline at end of file