1483 lines
51 KiB
Python
1483 lines
51 KiB
Python
from typing import Dict, List
|
|
import win32com.client
|
|
import logging
|
|
from utils.functions.functions import _safe_strip
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
def _enrichir_stock_emplacements(articles: List[Dict], cursor) -> List[Dict]:
|
|
try:
|
|
logger.info(" → Enrichissement stock emplacements...")
|
|
|
|
references = [a["reference"] for a in articles if a["reference"]]
|
|
if not references:
|
|
return articles
|
|
|
|
placeholders = ",".join(["?"] * len(references))
|
|
query = f"""
|
|
SELECT
|
|
AR_Ref,
|
|
DE_No,
|
|
DP_No,
|
|
AE_QteSto,
|
|
AE_QtePrepa,
|
|
AE_QteAControler,
|
|
cbCreation,
|
|
cbModification
|
|
FROM F_ARTSTOCKEMPL
|
|
WHERE AR_Ref IN ({placeholders})
|
|
ORDER BY AR_Ref, DE_No, DP_No
|
|
"""
|
|
|
|
cursor.execute(query, references)
|
|
rows = cursor.fetchall()
|
|
|
|
emplacements_map = {}
|
|
for row in rows:
|
|
ref = _safe_strip(row[0])
|
|
if not ref:
|
|
continue
|
|
|
|
if ref not in emplacements_map:
|
|
emplacements_map[ref] = []
|
|
|
|
emplacements_map[ref].append(
|
|
{
|
|
"depot": _safe_strip(row[1]),
|
|
"emplacement": _safe_strip(row[2]),
|
|
"qte_stockee": float(row[3]) if row[3] else 0.0,
|
|
"qte_preparee": float(row[4]) if row[4] else 0.0,
|
|
"qte_a_controler": float(row[5]) if row[5] else 0.0,
|
|
"date_creation": row[6],
|
|
"date_modification": row[7],
|
|
}
|
|
)
|
|
|
|
for article in articles:
|
|
article["emplacements"] = emplacements_map.get(article["reference"], [])
|
|
article["nb_emplacements"] = len(article["emplacements"])
|
|
|
|
logger.info(f" {len(emplacements_map)} articles avec emplacements")
|
|
return articles
|
|
|
|
except Exception as e:
|
|
logger.error(f" Erreur stock emplacements: {e}")
|
|
for article in articles:
|
|
article["emplacements"] = []
|
|
article["nb_emplacements"] = 0
|
|
return articles
|
|
|
|
|
|
def _enrichir_gammes_articles(articles: List[Dict], cursor) -> List[Dict]:
|
|
try:
|
|
logger.info(" → Enrichissement gammes articles...")
|
|
|
|
references = [a["reference"] for a in articles if a["reference"]]
|
|
if not references:
|
|
return articles
|
|
|
|
placeholders = ",".join(["?"] * len(references))
|
|
query = f"""
|
|
SELECT
|
|
AR_Ref,
|
|
AG_No,
|
|
EG_Enumere,
|
|
AG_Type,
|
|
cbCreation,
|
|
cbModification
|
|
FROM F_ARTGAMME
|
|
WHERE AR_Ref IN ({placeholders})
|
|
ORDER BY AR_Ref, AG_No, EG_Enumere
|
|
"""
|
|
|
|
cursor.execute(query, references)
|
|
rows = cursor.fetchall()
|
|
|
|
gammes_map = {}
|
|
for row in rows:
|
|
ref = _safe_strip(row[0])
|
|
if not ref:
|
|
continue
|
|
|
|
if ref not in gammes_map:
|
|
gammes_map[ref] = []
|
|
|
|
gammes_map[ref].append(
|
|
{
|
|
"numero_gamme": int(row[1]) if row[1] else 0,
|
|
"enumere": _safe_strip(row[2]),
|
|
"type_gamme": int(row[3]) if row[3] else 0,
|
|
"date_creation": row[4],
|
|
"date_modification": row[5],
|
|
}
|
|
)
|
|
|
|
for article in articles:
|
|
article["gammes"] = gammes_map.get(article["reference"], [])
|
|
article["nb_gammes"] = len(article["gammes"])
|
|
|
|
logger.info(f" {len(gammes_map)} articles avec gammes")
|
|
return articles
|
|
|
|
except Exception as e:
|
|
logger.error(f" Erreur gammes: {e}")
|
|
for article in articles:
|
|
article["gammes"] = []
|
|
article["nb_gammes"] = 0
|
|
return articles
|
|
|
|
|
|
def _enrichir_tarifs_clients(articles: List[Dict], cursor) -> List[Dict]:
|
|
try:
|
|
logger.info(" → Enrichissement tarifs clients...")
|
|
|
|
references = [a["reference"] for a in articles if a["reference"]]
|
|
if not references:
|
|
return articles
|
|
|
|
placeholders = ",".join(["?"] * len(references))
|
|
query = f"""
|
|
SELECT
|
|
AR_Ref,
|
|
AC_Categorie,
|
|
CT_Num,
|
|
AC_PrixVen,
|
|
AC_Coef,
|
|
AC_PrixTTC,
|
|
AC_Arrondi,
|
|
AC_QteMont,
|
|
EG_Champ,
|
|
AC_PrixDev,
|
|
AC_Devise,
|
|
AC_Remise,
|
|
AC_Calcul,
|
|
AC_TypeRem,
|
|
AC_RefClient,
|
|
AC_CoefNouv,
|
|
AC_PrixVenNouv,
|
|
AC_PrixDevNouv,
|
|
AC_RemiseNouv,
|
|
AC_DateApplication,
|
|
cbCreation,
|
|
cbModification
|
|
FROM F_ARTCLIENT
|
|
WHERE AR_Ref IN ({placeholders})
|
|
ORDER BY AR_Ref, AC_Categorie, CT_Num
|
|
"""
|
|
|
|
cursor.execute(query, references)
|
|
rows = cursor.fetchall()
|
|
|
|
tarifs_map = {}
|
|
for row in rows:
|
|
ref = _safe_strip(row[0])
|
|
if not ref:
|
|
continue
|
|
|
|
if ref not in tarifs_map:
|
|
tarifs_map[ref] = []
|
|
|
|
tarifs_map[ref].append(
|
|
{
|
|
"categorie": int(row[1]) if row[1] else 0,
|
|
"client_num": _safe_strip(row[2]),
|
|
"prix_vente": float(row[3]) if row[3] else 0.0,
|
|
"coefficient": float(row[4]) if row[4] else 0.0,
|
|
"prix_ttc": float(row[5]) if row[5] else 0.0,
|
|
"arrondi": float(row[6]) if row[6] else 0.0,
|
|
"qte_montant": float(row[7]) if row[7] else 0.0,
|
|
"enumere_gamme": int(row[8]) if row[8] else 0,
|
|
"prix_devise": float(row[9]) if row[9] else 0.0,
|
|
"devise": int(row[10]) if row[10] else 0,
|
|
"remise": float(row[11]) if row[11] else 0.0,
|
|
"mode_calcul": int(row[12]) if row[12] else 0,
|
|
"type_remise": int(row[13]) if row[13] else 0,
|
|
"ref_client": _safe_strip(row[14]),
|
|
"coef_nouveau": float(row[15]) if row[15] else 0.0,
|
|
"prix_vente_nouveau": float(row[16]) if row[16] else 0.0,
|
|
"prix_devise_nouveau": float(row[17]) if row[17] else 0.0,
|
|
"remise_nouvelle": float(row[18]) if row[18] else 0.0,
|
|
"date_application": row[19],
|
|
"date_creation": row[20],
|
|
"date_modification": row[21],
|
|
}
|
|
)
|
|
|
|
for article in articles:
|
|
article["tarifs_clients"] = tarifs_map.get(article["reference"], [])
|
|
article["nb_tarifs_clients"] = len(article["tarifs_clients"])
|
|
|
|
logger.info(f" {len(tarifs_map)} articles avec tarifs clients")
|
|
return articles
|
|
|
|
except Exception as e:
|
|
logger.error(f" Erreur tarifs clients: {e}")
|
|
for article in articles:
|
|
article["tarifs_clients"] = []
|
|
article["nb_tarifs_clients"] = 0
|
|
return articles
|
|
|
|
|
|
def _enrichir_nomenclature(articles: List[Dict], cursor) -> List[Dict]:
|
|
try:
|
|
logger.info(" → Enrichissement nomenclature...")
|
|
|
|
references = [a["reference"] for a in articles if a["reference"]]
|
|
if not references:
|
|
return articles
|
|
|
|
placeholders = ",".join(["?"] * len(references))
|
|
query = f"""
|
|
SELECT
|
|
AR_Ref,
|
|
AT_Operation,
|
|
RP_Code,
|
|
AT_Temps,
|
|
AT_Type,
|
|
AT_Description,
|
|
AT_Ordre,
|
|
AG_No1Comp,
|
|
AG_No2Comp,
|
|
AT_TypeRessource,
|
|
AT_Chevauche,
|
|
AT_Demarre,
|
|
AT_OperationChevauche,
|
|
AT_ValeurChevauche,
|
|
AT_TypeChevauche,
|
|
cbCreation,
|
|
cbModification
|
|
FROM F_ARTCOMPO
|
|
WHERE AR_Ref IN ({placeholders})
|
|
ORDER BY AR_Ref, AT_Ordre, AT_Operation
|
|
"""
|
|
|
|
cursor.execute(query, references)
|
|
rows = cursor.fetchall()
|
|
|
|
composants_map = {}
|
|
for row in rows:
|
|
ref = _safe_strip(row[0])
|
|
if not ref:
|
|
continue
|
|
|
|
if ref not in composants_map:
|
|
composants_map[ref] = []
|
|
|
|
composants_map[ref].append(
|
|
{
|
|
"operation": _safe_strip(row[1]),
|
|
"code_ressource": _safe_strip(row[2]),
|
|
"temps": float(row[3]) if row[3] else 0.0,
|
|
"type": int(row[4]) if row[4] else 0,
|
|
"description": _safe_strip(row[5]),
|
|
"ordre": int(row[6]) if row[6] else 0,
|
|
"gamme_1_comp": int(row[7]) if row[7] else 0,
|
|
"gamme_2_comp": int(row[8]) if row[8] else 0,
|
|
"type_ressource": int(row[9]) if row[9] else 0,
|
|
"chevauche": int(row[10]) if row[10] else 0,
|
|
"demarre": int(row[11]) if row[11] else 0,
|
|
"operation_chevauche": _safe_strip(row[12]),
|
|
"valeur_chevauche": float(row[13]) if row[13] else 0.0,
|
|
"type_chevauche": int(row[14]) if row[14] else 0,
|
|
"date_creation": row[15],
|
|
"date_modification": row[16],
|
|
}
|
|
)
|
|
|
|
for article in articles:
|
|
article["composants"] = composants_map.get(article["reference"], [])
|
|
article["nb_composants"] = len(article["composants"])
|
|
|
|
logger.info(f" {len(composants_map)} articles avec nomenclature")
|
|
return articles
|
|
|
|
except Exception as e:
|
|
logger.error(f" Erreur nomenclature: {e}")
|
|
for article in articles:
|
|
article["composants"] = []
|
|
article["nb_composants"] = 0
|
|
return articles
|
|
|
|
|
|
def _enrichir_compta_articles(articles: List[Dict], cursor) -> List[Dict]:
|
|
try:
|
|
logger.info(" → Enrichissement comptabilité articles...")
|
|
|
|
references = [a["reference"] for a in articles if a["reference"]]
|
|
if not references:
|
|
return articles
|
|
|
|
placeholders = ",".join(["?"] * len(references))
|
|
query = f"""
|
|
SELECT
|
|
AR_Ref,
|
|
ACP_Type,
|
|
ACP_Champ,
|
|
ACP_ComptaCPT_CompteG,
|
|
ACP_ComptaCPT_CompteA,
|
|
ACP_ComptaCPT_Taxe1,
|
|
ACP_ComptaCPT_Taxe2,
|
|
ACP_ComptaCPT_Taxe3,
|
|
ACP_ComptaCPT_Date1,
|
|
ACP_ComptaCPT_Date2,
|
|
ACP_ComptaCPT_Date3,
|
|
ACP_ComptaCPT_TaxeAnc1,
|
|
ACP_ComptaCPT_TaxeAnc2,
|
|
ACP_ComptaCPT_TaxeAnc3,
|
|
ACP_TypeFacture,
|
|
cbCreation,
|
|
cbModification
|
|
FROM F_ARTCOMPTA
|
|
WHERE AR_Ref IN ({placeholders})
|
|
ORDER BY AR_Ref, ACP_Type, ACP_Champ
|
|
"""
|
|
|
|
cursor.execute(query, references)
|
|
rows = cursor.fetchall()
|
|
|
|
compta_map = {}
|
|
for row in rows:
|
|
ref = _safe_strip(row[0])
|
|
if not ref:
|
|
continue
|
|
|
|
if ref not in compta_map:
|
|
compta_map[ref] = {"vente": [], "achat": [], "stock": []}
|
|
|
|
type_compta = int(row[1]) if row[1] else 0
|
|
type_key = {0: "vente", 1: "achat", 2: "stock"}.get(type_compta, "autre")
|
|
|
|
compta_entry = {
|
|
"champ": int(row[2]) if row[2] else 0,
|
|
"compte_general": _safe_strip(row[3]),
|
|
"compte_auxiliaire": _safe_strip(row[4]),
|
|
"taxe_1": _safe_strip(row[5]),
|
|
"taxe_2": _safe_strip(row[6]),
|
|
"taxe_3": _safe_strip(row[7]),
|
|
"taxe_date_1": row[8],
|
|
"taxe_date_2": row[9],
|
|
"taxe_date_3": row[10],
|
|
"taxe_anc_1": _safe_strip(row[11]),
|
|
"taxe_anc_2": _safe_strip(row[12]),
|
|
"taxe_anc_3": _safe_strip(row[13]),
|
|
"type_facture": int(row[14]) if row[14] else 0,
|
|
"date_creation": row[15],
|
|
"date_modification": row[16],
|
|
}
|
|
|
|
if type_key in compta_map[ref]:
|
|
compta_map[ref][type_key].append(compta_entry)
|
|
|
|
for article in articles:
|
|
compta = compta_map.get(
|
|
article["reference"], {"vente": [], "achat": [], "stock": []}
|
|
)
|
|
article["compta_vente"] = compta["vente"]
|
|
article["compta_achat"] = compta["achat"]
|
|
article["compta_stock"] = compta["stock"]
|
|
|
|
logger.info(f" {len(compta_map)} articles avec compta spécifique")
|
|
return articles
|
|
|
|
except Exception as e:
|
|
logger.error(f" Erreur comptabilité articles: {e}")
|
|
for article in articles:
|
|
article["compta_vente"] = []
|
|
article["compta_achat"] = []
|
|
article["compta_stock"] = []
|
|
return articles
|
|
|
|
|
|
def _enrichir_fournisseurs_multiples(articles: List[Dict], cursor) -> List[Dict]:
|
|
try:
|
|
logger.info(" → Enrichissement fournisseurs multiples...")
|
|
|
|
references = [a["reference"] for a in articles if a["reference"]]
|
|
if not references:
|
|
return articles
|
|
|
|
placeholders = ",".join(["?"] * len(references))
|
|
query = f"""
|
|
SELECT
|
|
AR_Ref,
|
|
CT_Num,
|
|
AF_RefFourniss,
|
|
AF_PrixAch,
|
|
AF_Unite,
|
|
AF_Conversion,
|
|
AF_DelaiAppro,
|
|
AF_Garantie,
|
|
AF_Colisage,
|
|
AF_QteMini,
|
|
AF_QteMont,
|
|
EG_Champ,
|
|
AF_Principal,
|
|
AF_PrixDev,
|
|
AF_Devise,
|
|
AF_Remise,
|
|
AF_ConvDiv,
|
|
AF_TypeRem,
|
|
AF_CodeBarre,
|
|
AF_PrixAchNouv,
|
|
AF_PrixDevNouv,
|
|
AF_RemiseNouv,
|
|
AF_DateApplication,
|
|
cbCreation,
|
|
cbModification
|
|
FROM F_ARTFOURNISS
|
|
WHERE AR_Ref IN ({placeholders})
|
|
ORDER BY AR_Ref, AF_Principal DESC, CT_Num
|
|
"""
|
|
|
|
cursor.execute(query, references)
|
|
rows = cursor.fetchall()
|
|
|
|
fournisseurs_map = {}
|
|
for row in rows:
|
|
ref = _safe_strip(row[0])
|
|
if not ref:
|
|
continue
|
|
|
|
if ref not in fournisseurs_map:
|
|
fournisseurs_map[ref] = []
|
|
|
|
fournisseurs_map[ref].append(
|
|
{
|
|
"fournisseur_num": _safe_strip(row[1]),
|
|
"ref_fournisseur": _safe_strip(row[2]),
|
|
"prix_achat": float(row[3]) if row[3] else 0.0,
|
|
"unite": _safe_strip(row[4]),
|
|
"conversion": float(row[5]) if row[5] else 0.0,
|
|
"delai_appro": int(row[6]) if row[6] else 0,
|
|
"garantie": int(row[7]) if row[7] else 0,
|
|
"colisage": int(row[8]) if row[8] else 0,
|
|
"qte_mini": float(row[9]) if row[9] else 0.0,
|
|
"qte_montant": float(row[10]) if row[10] else 0.0,
|
|
"enumere_gamme": int(row[11]) if row[11] else 0,
|
|
"est_principal": bool(row[12]),
|
|
"prix_devise": float(row[13]) if row[13] else 0.0,
|
|
"devise": int(row[14]) if row[14] else 0,
|
|
"remise": float(row[15]) if row[15] else 0.0,
|
|
"conversion_devise": float(row[16]) if row[16] else 0.0,
|
|
"type_remise": int(row[17]) if row[17] else 0,
|
|
"code_barre_fournisseur": _safe_strip(row[18]),
|
|
"prix_achat_nouveau": float(row[19]) if row[19] else 0.0,
|
|
"prix_devise_nouveau": float(row[20]) if row[20] else 0.0,
|
|
"remise_nouvelle": float(row[21]) if row[21] else 0.0,
|
|
"date_application": row[22],
|
|
"date_creation": row[23],
|
|
"date_modification": row[24],
|
|
}
|
|
)
|
|
|
|
for article in articles:
|
|
article["fournisseurs"] = fournisseurs_map.get(article["reference"], [])
|
|
article["nb_fournisseurs"] = len(article["fournisseurs"])
|
|
|
|
logger.info(f" {len(fournisseurs_map)} articles avec fournisseurs multiples")
|
|
return articles
|
|
|
|
except Exception as e:
|
|
logger.error(f" Erreur fournisseurs multiples: {e}")
|
|
for article in articles:
|
|
article["fournisseurs"] = []
|
|
article["nb_fournisseurs"] = 0
|
|
return articles
|
|
|
|
|
|
def _enrichir_depots_details(articles: List[Dict], cursor) -> List[Dict]:
|
|
try:
|
|
logger.info(" → Enrichissement détails dépôts...")
|
|
|
|
query = """
|
|
SELECT
|
|
DE_No,
|
|
DE_Intitule,
|
|
DE_Code,
|
|
DE_Adresse,
|
|
DE_Complement,
|
|
DE_CodePostal,
|
|
DE_Ville,
|
|
DE_Contact,
|
|
DE_Principal,
|
|
DE_CatCompta,
|
|
DE_Region,
|
|
DE_Pays,
|
|
DE_EMail,
|
|
DE_Telephone,
|
|
DE_Telecopie,
|
|
DP_NoDefaut,
|
|
DE_Exclure
|
|
FROM F_DEPOT
|
|
"""
|
|
|
|
cursor.execute(query)
|
|
rows = cursor.fetchall()
|
|
|
|
depots_map = {}
|
|
for row in rows:
|
|
de_no = _safe_strip(row[0])
|
|
if not de_no:
|
|
continue
|
|
|
|
depots_map[de_no] = {
|
|
"depot_num": de_no,
|
|
"depot_nom": _safe_strip(row[1]),
|
|
"depot_code": _safe_strip(row[2]),
|
|
"depot_adresse": _safe_strip(row[3]),
|
|
"depot_complement": _safe_strip(row[4]),
|
|
"depot_code_postal": _safe_strip(row[5]),
|
|
"depot_ville": _safe_strip(row[6]),
|
|
"depot_contact": _safe_strip(row[7]),
|
|
"depot_est_principal": bool(row[8]),
|
|
"depot_categorie_compta": int(row[9]) if row[9] else 0,
|
|
"depot_region": _safe_strip(row[10]),
|
|
"depot_pays": _safe_strip(row[11]),
|
|
"depot_email": _safe_strip(row[12]),
|
|
"depot_telephone": _safe_strip(row[13]),
|
|
"depot_fax": _safe_strip(row[14]),
|
|
"depot_emplacement_defaut": _safe_strip(row[15]),
|
|
"depot_exclu": bool(row[16]),
|
|
}
|
|
|
|
logger.info(f" → {len(depots_map)} dépôts chargés")
|
|
|
|
for article in articles:
|
|
for empl in article.get("emplacements", []):
|
|
depot_num = empl.get("depot")
|
|
if depot_num and depot_num in depots_map:
|
|
empl.update(depots_map[depot_num])
|
|
|
|
logger.info(" Emplacements enrichis avec détails dépôts")
|
|
return articles
|
|
|
|
except Exception as e:
|
|
logger.error(f" Erreur détails dépôts: {e}")
|
|
return articles
|
|
|
|
|
|
def _enrichir_emplacements_details(articles: List[Dict], cursor) -> List[Dict]:
|
|
try:
|
|
logger.info(" → Enrichissement détails emplacements...")
|
|
|
|
query = """
|
|
SELECT
|
|
DE_No,
|
|
DP_No,
|
|
DP_Code,
|
|
DP_Intitule,
|
|
DP_Zone,
|
|
DP_Type
|
|
FROM F_DEPOTEMPL
|
|
"""
|
|
|
|
cursor.execute(query)
|
|
rows = cursor.fetchall()
|
|
|
|
emplacements_map = {}
|
|
for row in rows:
|
|
de_no = _safe_strip(row[0])
|
|
dp_no = _safe_strip(row[1])
|
|
|
|
if not de_no or not dp_no:
|
|
continue
|
|
|
|
key = f"{de_no}_{dp_no}"
|
|
emplacements_map[key] = {
|
|
"emplacement_code": _safe_strip(row[2]),
|
|
"emplacement_libelle": _safe_strip(row[3]),
|
|
"emplacement_zone": _safe_strip(row[4]),
|
|
"emplacement_type": int(row[5]) if row[5] else 0,
|
|
}
|
|
|
|
logger.info(f" → {len(emplacements_map)} emplacements détaillés chargés")
|
|
|
|
for article in articles:
|
|
for empl in article.get("emplacements", []):
|
|
depot = empl.get("depot")
|
|
emplacement = empl.get("emplacement")
|
|
if depot and emplacement:
|
|
key = f"{depot}_{emplacement}"
|
|
if key in emplacements_map:
|
|
empl.update(emplacements_map[key])
|
|
|
|
logger.info(" Emplacements enrichis avec détails")
|
|
return articles
|
|
|
|
except Exception as e:
|
|
logger.error(f" Erreur détails emplacements: {e}")
|
|
return articles
|
|
|
|
|
|
def _enrichir_gammes_enumeres(articles: List[Dict], cursor) -> List[Dict]:
|
|
try:
|
|
logger.info(" → Enrichissement énumérés gammes...")
|
|
|
|
query_pgamme = "SELECT G_Intitule, G_Type FROM P_GAMME ORDER BY G_Type"
|
|
cursor.execute(query_pgamme)
|
|
pgamme_rows = cursor.fetchall()
|
|
|
|
gammes_config = {}
|
|
for idx, row in enumerate(pgamme_rows):
|
|
gammes_config[idx + 1] = {
|
|
"nom": _safe_strip(row[0]),
|
|
"type": int(row[1]) if row[1] else 0,
|
|
}
|
|
|
|
logger.info(f" → Configuration gammes: {gammes_config}")
|
|
|
|
query_enum = """
|
|
SELECT
|
|
EG_Champ,
|
|
EG_Ligne,
|
|
EG_Enumere,
|
|
EG_BorneSup
|
|
FROM F_ENUMGAMME
|
|
ORDER BY EG_Champ, EG_Ligne
|
|
"""
|
|
|
|
cursor.execute(query_enum)
|
|
enum_rows = cursor.fetchall()
|
|
|
|
enumeres_map = {}
|
|
for row in enum_rows:
|
|
champ = int(row[0]) if row[0] else 0
|
|
enumere = _safe_strip(row[2])
|
|
|
|
if not enumere:
|
|
continue
|
|
|
|
key = f"{champ}_{enumere}"
|
|
enumeres_map[key] = {
|
|
"ligne": int(row[1]) if row[1] else 0,
|
|
"enumere": enumere,
|
|
"borne_sup": float(row[3]) if row[3] else 0.0,
|
|
"gamme_nom": gammes_config.get(champ, {}).get("nom", f"Gamme {champ}"),
|
|
}
|
|
|
|
logger.info(f" → {len(enumeres_map)} énumérés chargés")
|
|
|
|
for article in articles:
|
|
for gamme in article.get("gammes", []):
|
|
num_gamme = gamme.get("numero_gamme")
|
|
enumere = gamme.get("enumere")
|
|
|
|
if num_gamme and enumere:
|
|
key = f"{num_gamme}_{enumere}"
|
|
if key in enumeres_map:
|
|
gamme.update(enumeres_map[key])
|
|
|
|
logger.info(" Gammes enrichies avec énumérés")
|
|
return articles
|
|
|
|
except Exception as e:
|
|
logger.error(f" Erreur énumérés gammes: {e}")
|
|
return articles
|
|
|
|
|
|
def _enrichir_references_enumerees(articles: List[Dict], cursor) -> List[Dict]:
|
|
try:
|
|
logger.info(" → Enrichissement références énumérées...")
|
|
|
|
references = [a["reference"] for a in articles if a["reference"]]
|
|
if not references:
|
|
return articles
|
|
|
|
placeholders = ",".join(["?"] * len(references))
|
|
query = f"""
|
|
SELECT
|
|
AR_Ref,
|
|
AG_No1,
|
|
AG_No2,
|
|
AE_Ref,
|
|
AE_PrixAch,
|
|
AE_CodeBarre,
|
|
AE_PrixAchNouv,
|
|
AE_EdiCode,
|
|
AE_Sommeil,
|
|
cbCreation,
|
|
cbModification
|
|
FROM F_ARTENUMREF
|
|
WHERE AR_Ref IN ({placeholders})
|
|
ORDER BY AR_Ref, AG_No1, AG_No2
|
|
"""
|
|
|
|
cursor.execute(query, references)
|
|
rows = cursor.fetchall()
|
|
|
|
refs_enum_map = {}
|
|
for row in rows:
|
|
ref = _safe_strip(row[0])
|
|
if not ref:
|
|
continue
|
|
|
|
if ref not in refs_enum_map:
|
|
refs_enum_map[ref] = []
|
|
|
|
refs_enum_map[ref].append(
|
|
{
|
|
"gamme_1": int(row[1]) if row[1] else 0,
|
|
"gamme_2": int(row[2]) if row[2] else 0,
|
|
"reference_enumeree": _safe_strip(row[3]),
|
|
"prix_achat": float(row[4]) if row[4] else 0.0,
|
|
"code_barre": _safe_strip(row[5]),
|
|
"prix_achat_nouveau": float(row[6]) if row[6] else 0.0,
|
|
"edi_code": _safe_strip(row[7]),
|
|
"en_sommeil": bool(row[8]),
|
|
"date_creation": row[9],
|
|
"date_modification": row[10],
|
|
}
|
|
)
|
|
|
|
for article in articles:
|
|
article["refs_enumerees"] = refs_enum_map.get(article["reference"], [])
|
|
article["nb_refs_enumerees"] = len(article["refs_enumerees"])
|
|
|
|
logger.info(f" {len(refs_enum_map)} articles avec références énumérées")
|
|
return articles
|
|
|
|
except Exception as e:
|
|
logger.error(f" Erreur références énumérées: {e}")
|
|
for article in articles:
|
|
article["refs_enumerees"] = []
|
|
article["nb_refs_enumerees"] = 0
|
|
return articles
|
|
|
|
|
|
def _enrichir_medias_articles(articles: List[Dict], cursor) -> List[Dict]:
|
|
try:
|
|
logger.info(" → Enrichissement médias articles...")
|
|
|
|
references = [a["reference"] for a in articles if a["reference"]]
|
|
if not references:
|
|
return articles
|
|
|
|
placeholders = ",".join(["?"] * len(references))
|
|
query = f"""
|
|
SELECT
|
|
AR_Ref,
|
|
ME_Commentaire,
|
|
ME_Fichier,
|
|
ME_TypeMIME,
|
|
ME_Origine,
|
|
ME_GedId,
|
|
cbCreation,
|
|
cbModification
|
|
FROM F_ARTICLEMEDIA
|
|
WHERE AR_Ref IN ({placeholders})
|
|
ORDER BY AR_Ref, cbCreation
|
|
"""
|
|
|
|
cursor.execute(query, references)
|
|
rows = cursor.fetchall()
|
|
|
|
medias_map = {}
|
|
for row in rows:
|
|
ref = _safe_strip(row[0])
|
|
if not ref:
|
|
continue
|
|
|
|
if ref not in medias_map:
|
|
medias_map[ref] = []
|
|
|
|
medias_map[ref].append(
|
|
{
|
|
"commentaire": _safe_strip(row[1]),
|
|
"fichier": _safe_strip(row[2]),
|
|
"type_mime": _safe_strip(row[3]),
|
|
"origine": int(row[4]) if row[4] else 0,
|
|
"ged_id": _safe_strip(row[5]),
|
|
"date_creation": row[6],
|
|
"date_modification": row[7],
|
|
}
|
|
)
|
|
|
|
for article in articles:
|
|
article["medias"] = medias_map.get(article["reference"], [])
|
|
article["nb_medias"] = len(article["medias"])
|
|
|
|
logger.info(f" {len(medias_map)} articles avec médias")
|
|
return articles
|
|
|
|
except Exception as e:
|
|
logger.error(f" Erreur médias: {e}")
|
|
for article in articles:
|
|
article["medias"] = []
|
|
article["nb_medias"] = 0
|
|
return articles
|
|
|
|
|
|
def _enrichir_prix_gammes(articles: List[Dict], cursor) -> List[Dict]:
|
|
try:
|
|
logger.info(" → Enrichissement prix par gammes...")
|
|
|
|
references = [a["reference"] for a in articles if a["reference"]]
|
|
if not references:
|
|
return articles
|
|
|
|
placeholders = ",".join(["?"] * len(references))
|
|
query = f"""
|
|
SELECT
|
|
AR_Ref,
|
|
AG_No1,
|
|
AG_No2,
|
|
AR_PUNet,
|
|
AR_CoutStd,
|
|
cbCreation,
|
|
cbModification
|
|
FROM F_ARTPRIX
|
|
WHERE AR_Ref IN ({placeholders})
|
|
ORDER BY AR_Ref, AG_No1, AG_No2
|
|
"""
|
|
|
|
cursor.execute(query, references)
|
|
rows = cursor.fetchall()
|
|
|
|
prix_gammes_map = {}
|
|
for row in rows:
|
|
ref = _safe_strip(row[0])
|
|
if not ref:
|
|
continue
|
|
|
|
if ref not in prix_gammes_map:
|
|
prix_gammes_map[ref] = []
|
|
|
|
prix_gammes_map[ref].append(
|
|
{
|
|
"gamme_1": int(row[1]) if row[1] else 0,
|
|
"gamme_2": int(row[2]) if row[2] else 0,
|
|
"prix_net": float(row[3]) if row[3] else 0.0,
|
|
"cout_standard": float(row[4]) if row[4] else 0.0,
|
|
"date_creation": row[5],
|
|
"date_modification": row[6],
|
|
}
|
|
)
|
|
|
|
for article in articles:
|
|
article["prix_gammes"] = prix_gammes_map.get(article["reference"], [])
|
|
article["nb_prix_gammes"] = len(article["prix_gammes"])
|
|
|
|
logger.info(f" {len(prix_gammes_map)} articles avec prix par gammes")
|
|
return articles
|
|
|
|
except Exception as e:
|
|
logger.error(f" Erreur prix gammes: {e}")
|
|
for article in articles:
|
|
article["prix_gammes"] = []
|
|
article["nb_prix_gammes"] = 0
|
|
return articles
|
|
|
|
|
|
def _enrichir_conditionnements(articles: List[Dict], cursor) -> List[Dict]:
|
|
try:
|
|
logger.info(" → Enrichissement conditionnements...")
|
|
|
|
query = """
|
|
SELECT
|
|
EC_Champ,
|
|
EC_Enumere,
|
|
EC_Quantite,
|
|
EC_EdiCode
|
|
FROM F_ENUMCOND
|
|
ORDER BY EC_Champ, EC_Enumere
|
|
"""
|
|
|
|
cursor.execute(query)
|
|
rows = cursor.fetchall()
|
|
|
|
cond_map = {}
|
|
for row in rows:
|
|
champ = int(row[0]) if row[0] else 0
|
|
enumere = _safe_strip(row[1])
|
|
|
|
if not enumere:
|
|
continue
|
|
|
|
key = f"{champ}_{enumere}"
|
|
cond_map[key] = {
|
|
"champ": champ,
|
|
"enumere": enumere,
|
|
"quantite": float(row[2]) if row[2] else 0.0,
|
|
"edi_code": _safe_strip(row[3]),
|
|
}
|
|
|
|
logger.info(f" → {len(cond_map)} conditionnements chargés")
|
|
|
|
for article in articles:
|
|
conditionnement = article.get("conditionnement")
|
|
if conditionnement:
|
|
for key, cond_data in cond_map.items():
|
|
if cond_data["enumere"] == conditionnement:
|
|
article["conditionnement_qte"] = cond_data["quantite"]
|
|
article["conditionnement_edi"] = cond_data["edi_code"]
|
|
break
|
|
|
|
logger.info(" Conditionnements enrichis")
|
|
return articles
|
|
|
|
except Exception as e:
|
|
logger.error(f" Erreur conditionnements: {e}")
|
|
return articles
|
|
|
|
|
|
def _mapper_article_depuis_row(row_data: Dict, colonnes_config: Dict) -> Dict:
|
|
article = {}
|
|
|
|
def get_val(sql_col, default=None, convert_type=None):
|
|
val = row_data.get(sql_col, default)
|
|
if val is None:
|
|
return default
|
|
|
|
if isinstance(convert_type, float):
|
|
return float(val) if val not in (None, "") else (default or 0.0)
|
|
elif isinstance(convert_type, int):
|
|
return int(val) if val not in (None, "") else (default or 0)
|
|
elif isinstance(convert_type, bool):
|
|
return bool(val) if val not in (None, "") else (default or False)
|
|
elif isinstance(convert_type, str):
|
|
return _safe_strip(val)
|
|
|
|
return val
|
|
|
|
article["reference"] = get_val("AR_Ref", convert_type=str)
|
|
article["designation"] = get_val("AR_Design", convert_type=str)
|
|
article["code_ean"] = get_val("AR_CodeBarre", convert_type=str)
|
|
article["code_barre"] = get_val("AR_CodeBarre", convert_type=str)
|
|
article["edi_code"] = get_val("AR_EdiCode", convert_type=str)
|
|
article["raccourci"] = get_val("AR_Raccourci", convert_type=str)
|
|
|
|
article["prix_vente"] = get_val("AR_PrixVen", 0.0, float)
|
|
article["prix_achat"] = get_val("AR_PrixAch", 0.0, float)
|
|
article["coef"] = get_val("AR_Coef", 0.0, float)
|
|
article["prix_net"] = get_val("AR_PUNet", 0.0, float)
|
|
article["prix_achat_nouveau"] = get_val("AR_PrixAchNouv", 0.0, float)
|
|
article["coef_nouveau"] = get_val("AR_CoefNouv", 0.0, float)
|
|
article["prix_vente_nouveau"] = get_val("AR_PrixVenNouv", 0.0, float)
|
|
|
|
date_app = get_val("AR_DateApplication")
|
|
article["date_application_prix"] = str(date_app) if date_app else None
|
|
|
|
article["cout_standard"] = get_val("AR_CoutStd", 0.0, float)
|
|
|
|
article["unite_vente"] = get_val("AR_UniteVen", convert_type=str)
|
|
article["unite_poids"] = get_val("AR_UnitePoids", convert_type=str)
|
|
article["poids_net"] = get_val("AR_PoidsNet", 0.0, float)
|
|
article["poids_brut"] = get_val("AR_PoidsBrut", 0.0, float)
|
|
|
|
article["gamme_1"] = get_val("AR_Gamme1", convert_type=str)
|
|
article["gamme_2"] = get_val("AR_Gamme2", convert_type=str)
|
|
|
|
type_val = get_val("AR_Type", 0, int)
|
|
article["type_article"] = type_val
|
|
article["type_article_libelle"] = _get_type_article_libelle(type_val)
|
|
article["famille_code"] = get_val("FA_CodeFamille", convert_type=str)
|
|
article["nature"] = get_val("AR_Nature", 0, int)
|
|
article["garantie"] = get_val("AR_Garantie", 0, int)
|
|
article["code_fiscal"] = get_val("AR_CodeFiscal", convert_type=str)
|
|
article["pays"] = get_val("AR_Pays", convert_type=str)
|
|
|
|
article["fournisseur_principal"] = get_val("CO_No", 0, int)
|
|
article["conditionnement"] = get_val("AR_Condition", convert_type=str)
|
|
article["nb_colis"] = get_val("AR_NbColis", 0, int)
|
|
article["prevision"] = get_val("AR_Prevision", False, bool)
|
|
|
|
article["suivi_stock"] = get_val("AR_SuiviStock", False, bool)
|
|
article["nomenclature"] = get_val("AR_Nomencl", False, bool)
|
|
article["qte_composant"] = get_val("AR_QteComp", 0.0, float)
|
|
article["qte_operatoire"] = get_val("AR_QteOperatoire", 0.0, float)
|
|
|
|
sommeil = get_val("AR_Sommeil", 0, int)
|
|
article["est_actif"] = sommeil == 0
|
|
article["en_sommeil"] = sommeil == 1
|
|
article["article_substitut"] = get_val("AR_Substitut", convert_type=str)
|
|
article["soumis_escompte"] = get_val("AR_Escompte", False, bool)
|
|
article["delai"] = get_val("AR_Delai", 0, int)
|
|
|
|
article["stat_01"] = get_val("AR_Stat01", convert_type=str)
|
|
article["stat_02"] = get_val("AR_Stat02", convert_type=str)
|
|
article["stat_03"] = get_val("AR_Stat03", convert_type=str)
|
|
article["stat_04"] = get_val("AR_Stat04", convert_type=str)
|
|
article["stat_05"] = get_val("AR_Stat05", convert_type=str)
|
|
article["hors_statistique"] = get_val("AR_HorsStat", False, bool)
|
|
|
|
article["categorie_1"] = get_val("CL_No1", 0, int)
|
|
article["categorie_2"] = get_val("CL_No2", 0, int)
|
|
article["categorie_3"] = get_val("CL_No3", 0, int)
|
|
article["categorie_4"] = get_val("CL_No4", 0, int)
|
|
|
|
date_modif = get_val("AR_DateModif")
|
|
article["date_modification"] = str(date_modif) if date_modif else None
|
|
|
|
article["vente_debit"] = get_val("AR_VteDebit", False, bool)
|
|
article["non_imprimable"] = get_val("AR_NotImp", False, bool)
|
|
article["transfere"] = get_val("AR_Transfere", False, bool)
|
|
article["publie"] = get_val("AR_Publie", False, bool)
|
|
article["contremarque"] = get_val("AR_Contremarque", False, bool)
|
|
article["fact_poids"] = get_val("AR_FactPoids", False, bool)
|
|
article["fact_forfait"] = get_val("AR_FactForfait", False, bool)
|
|
article["saisie_variable"] = get_val("AR_SaisieVar", False, bool)
|
|
article["fictif"] = get_val("AR_Fictif", False, bool)
|
|
article["sous_traitance"] = get_val("AR_SousTraitance", False, bool)
|
|
article["criticite"] = get_val("AR_Criticite", 0, int)
|
|
|
|
article["reprise_code_defaut"] = get_val("RP_CodeDefaut", convert_type=str)
|
|
article["delai_fabrication"] = get_val("AR_DelaiFabrication", 0, int)
|
|
article["delai_peremption"] = get_val("AR_DelaiPeremption", 0, int)
|
|
article["delai_securite"] = get_val("AR_DelaiSecurite", 0, int)
|
|
article["type_lancement"] = get_val("AR_TypeLancement", 0, int)
|
|
article["cycle"] = get_val("AR_Cycle", 1, int)
|
|
|
|
article["photo"] = get_val("AR_Photo", convert_type=str)
|
|
article["langue_1"] = get_val("AR_Langue1", convert_type=str)
|
|
article["langue_2"] = get_val("AR_Langue2", convert_type=str)
|
|
|
|
article["frais_01_denomination"] = get_val(
|
|
"AR_Frais01FR_Denomination", convert_type=str
|
|
)
|
|
article["frais_02_denomination"] = get_val(
|
|
"AR_Frais02FR_Denomination", convert_type=str
|
|
)
|
|
article["frais_03_denomination"] = get_val(
|
|
"AR_Frais03FR_Denomination", convert_type=str
|
|
)
|
|
|
|
article["marque_commerciale"] = get_val("Marque commerciale", convert_type=str)
|
|
|
|
objectif_val = get_val("Objectif / Qtés vendues")
|
|
if objectif_val is not None:
|
|
article["objectif_qtes_vendues"] = (
|
|
str(float(objectif_val)) if objectif_val not in ("", 0, 0.0) else None
|
|
)
|
|
else:
|
|
article["objectif_qtes_vendues"] = None
|
|
|
|
pourcentage_val = get_val("Pourcentage teneur en or")
|
|
if pourcentage_val is not None:
|
|
article["pourcentage_or"] = (
|
|
str(float(pourcentage_val)) if pourcentage_val not in ("", 0, 0.0) else None
|
|
)
|
|
else:
|
|
article["pourcentage_or"] = None
|
|
|
|
date_com = get_val("1ère commercialisation")
|
|
article["premiere_commercialisation"] = str(date_com) if date_com else None
|
|
|
|
article["interdire_commande"] = get_val("AR_InterdireCommande", False, bool)
|
|
article["exclure"] = get_val("AR_Exclure", False, bool)
|
|
|
|
article["stock_reel"] = 0.0
|
|
article["stock_mini"] = 0.0
|
|
article["stock_maxi"] = 0.0
|
|
article["stock_reserve"] = 0.0
|
|
article["stock_commande"] = 0.0
|
|
article["stock_disponible"] = 0.0
|
|
|
|
article["famille_libelle"] = None
|
|
article["famille_type"] = None
|
|
article["famille_unite_vente"] = None
|
|
article["famille_coef"] = None
|
|
article["famille_suivi_stock"] = None
|
|
article["famille_garantie"] = None
|
|
article["famille_unite_poids"] = None
|
|
article["famille_delai"] = None
|
|
article["famille_nb_colis"] = None
|
|
article["famille_code_fiscal"] = None
|
|
article["famille_escompte"] = None
|
|
article["famille_centrale"] = None
|
|
article["famille_nature"] = None
|
|
article["famille_hors_stat"] = None
|
|
article["famille_pays"] = None
|
|
|
|
article["fournisseur_nom"] = None
|
|
article["tva_code"] = None
|
|
article["tva_taux"] = None
|
|
|
|
return article
|
|
|
|
|
|
def _enrichir_stocks_articles(articles: List[Dict], cursor) -> List[Dict]:
|
|
"""Enrichit les articles avec les données de stock depuis F_ARTSTOCK"""
|
|
try:
|
|
logger.info(f" → Enrichissement stocks pour {len(articles)} articles...")
|
|
|
|
references = [a["reference"] for a in articles if a["reference"]]
|
|
|
|
if not references:
|
|
return articles
|
|
|
|
placeholders = ",".join(["?"] * len(references))
|
|
stock_query = f"""
|
|
SELECT
|
|
AR_Ref,
|
|
SUM(ISNULL(AS_QteSto, 0)) as Stock_Total,
|
|
MIN(ISNULL(AS_QteMini, 0)) as Stock_Mini,
|
|
MAX(ISNULL(AS_QteMaxi, 0)) as Stock_Maxi,
|
|
SUM(ISNULL(AS_QteRes, 0)) as Stock_Reserve,
|
|
SUM(ISNULL(AS_QteCom, 0)) as Stock_Commande
|
|
FROM F_ARTSTOCK
|
|
WHERE AR_Ref IN ({placeholders})
|
|
GROUP BY AR_Ref
|
|
"""
|
|
|
|
cursor.execute(stock_query, references)
|
|
stock_rows = cursor.fetchall()
|
|
|
|
stock_map = {}
|
|
for stock_row in stock_rows:
|
|
ref = _safe_strip(stock_row[0])
|
|
if ref:
|
|
stock_map[ref] = {
|
|
"stock_reel": float(stock_row[1]) if stock_row[1] else 0.0,
|
|
"stock_mini": float(stock_row[2]) if stock_row[2] else 0.0,
|
|
"stock_maxi": float(stock_row[3]) if stock_row[3] else 0.0,
|
|
"stock_reserve": float(stock_row[4]) if stock_row[4] else 0.0,
|
|
"stock_commande": float(stock_row[5]) if stock_row[5] else 0.0,
|
|
}
|
|
|
|
logger.info(f" → {len(stock_map)} articles avec stock trouvés dans F_ARTSTOCK")
|
|
|
|
for article in articles:
|
|
if article["reference"] in stock_map:
|
|
stock_data = stock_map[article["reference"]]
|
|
article.update(stock_data)
|
|
article["stock_disponible"] = (
|
|
article["stock_reel"] - article["stock_reserve"]
|
|
)
|
|
|
|
return articles
|
|
|
|
except Exception as e:
|
|
logger.error(f" Erreur enrichissement stocks: {e}", exc_info=True)
|
|
return articles
|
|
|
|
|
|
def _enrichir_fournisseurs_articles(articles: List[Dict], cursor) -> List[Dict]:
|
|
"""Enrichit les articles avec le nom du fournisseur principal"""
|
|
try:
|
|
logger.info(" → Enrichissement fournisseurs...")
|
|
|
|
nums_fournisseurs = list(
|
|
set(
|
|
[
|
|
a["fournisseur_principal"]
|
|
for a in articles
|
|
if a.get("fournisseur_principal") and a["fournisseur_principal"] > 0
|
|
]
|
|
)
|
|
)
|
|
|
|
if not nums_fournisseurs:
|
|
logger.warning(" ⚠ Aucun numéro de fournisseur trouvé dans les articles")
|
|
for article in articles:
|
|
article["fournisseur_nom"] = None
|
|
return articles
|
|
|
|
logger.info(f" → {len(nums_fournisseurs)} fournisseurs uniques à chercher")
|
|
logger.info(f" → Exemples CO_No : {nums_fournisseurs[:5]}")
|
|
|
|
placeholders = ",".join(["?"] * len(nums_fournisseurs))
|
|
fournisseur_query = f"""
|
|
SELECT
|
|
CT_Num,
|
|
CT_Intitule,
|
|
CT_Type
|
|
FROM F_COMPTET
|
|
WHERE CT_Num IN ({placeholders})
|
|
AND CT_Type = 1
|
|
"""
|
|
|
|
cursor.execute(fournisseur_query, nums_fournisseurs)
|
|
fournisseur_rows = cursor.fetchall()
|
|
|
|
logger.info(f" → {len(fournisseur_rows)} fournisseurs trouvés dans F_COMPTET")
|
|
|
|
if len(fournisseur_rows) == 0:
|
|
logger.warning(
|
|
f" ⚠ Aucun fournisseur trouvé pour CT_Type=1 et CT_Num IN {nums_fournisseurs[:5]}"
|
|
)
|
|
cursor.execute(
|
|
f"SELECT CT_Num, CT_Type FROM F_COMPTET WHERE CT_Num IN ({placeholders})",
|
|
nums_fournisseurs,
|
|
)
|
|
tous_types = cursor.fetchall()
|
|
if tous_types:
|
|
logger.info(
|
|
f" → Trouvé {len(tous_types)} comptes (tous types) : {[(r[0], r[1]) for r in tous_types[:5]]}"
|
|
)
|
|
|
|
fournisseur_map = {}
|
|
for fourn_row in fournisseur_rows:
|
|
num = int(fourn_row[0])
|
|
nom = _safe_strip(fourn_row[1])
|
|
type_ct = int(fourn_row[2])
|
|
fournisseur_map[num] = nom
|
|
logger.debug(f" → Fournisseur mappé : {num} = {nom} (Type={type_ct})")
|
|
|
|
nb_enrichis = 0
|
|
for article in articles:
|
|
num_fourn = article.get("fournisseur_principal")
|
|
if num_fourn and num_fourn in fournisseur_map:
|
|
article["fournisseur_nom"] = fournisseur_map[num_fourn]
|
|
nb_enrichis += 1
|
|
else:
|
|
article["fournisseur_nom"] = None
|
|
|
|
logger.info(f" {nb_enrichis} articles enrichis avec nom fournisseur")
|
|
|
|
return articles
|
|
|
|
except Exception as e:
|
|
logger.error(f" Erreur enrichissement fournisseurs: {e}", exc_info=True)
|
|
for article in articles:
|
|
article["fournisseur_nom"] = None
|
|
return articles
|
|
|
|
|
|
def _enrichir_familles_articles(articles: List[Dict], cursor) -> List[Dict]:
|
|
"""Enrichit les articles avec les informations de famille depuis F_FAMILLE"""
|
|
try:
|
|
logger.info(f" → Enrichissement familles pour {len(articles)} articles...")
|
|
|
|
codes_familles_bruts = [
|
|
a.get("famille_code")
|
|
for a in articles
|
|
if a.get("famille_code") not in (None, "", " ")
|
|
]
|
|
|
|
if codes_familles_bruts:
|
|
logger.info(f" → Exemples de codes familles : {codes_familles_bruts[:5]}")
|
|
|
|
codes_familles = list(
|
|
set([str(code).strip() for code in codes_familles_bruts if code])
|
|
)
|
|
|
|
if not codes_familles:
|
|
logger.warning(" ⚠ Aucun code famille trouvé dans les articles")
|
|
for article in articles:
|
|
_init_champs_famille_vides(article)
|
|
return articles
|
|
|
|
logger.info(f" → {len(codes_familles)} codes famille uniques")
|
|
|
|
cursor.execute("SELECT TOP 1 * FROM F_FAMILLE")
|
|
colonnes_disponibles = [column[0] for column in cursor.description]
|
|
|
|
colonnes_souhaitees = [
|
|
"FA_CodeFamille",
|
|
"FA_Intitule",
|
|
"FA_Type",
|
|
"FA_UniteVen",
|
|
"FA_Coef",
|
|
"FA_SuiviStock",
|
|
"FA_Garantie",
|
|
"FA_UnitePoids",
|
|
"FA_Delai",
|
|
"FA_NbColis",
|
|
"FA_CodeFiscal",
|
|
"FA_Escompte",
|
|
"FA_Central",
|
|
"FA_Nature",
|
|
"FA_HorsStat",
|
|
"FA_Pays",
|
|
"FA_VteDebit",
|
|
"FA_NotImp",
|
|
"FA_Contremarque",
|
|
"FA_FactPoids",
|
|
"FA_FactForfait",
|
|
"FA_Publie",
|
|
"FA_RacineRef",
|
|
"FA_RacineCB",
|
|
"FA_Raccourci",
|
|
"FA_SousTraitance",
|
|
"FA_Fictif",
|
|
"FA_Criticite",
|
|
]
|
|
|
|
colonnes_a_lire = [
|
|
col for col in colonnes_souhaitees if col in colonnes_disponibles
|
|
]
|
|
|
|
if (
|
|
"FA_CodeFamille" not in colonnes_a_lire
|
|
or "FA_Intitule" not in colonnes_a_lire
|
|
):
|
|
logger.error(" Colonnes essentielles manquantes !")
|
|
return articles
|
|
|
|
logger.info(f" → Colonnes disponibles : {len(colonnes_a_lire)}")
|
|
|
|
colonnes_str = ", ".join(colonnes_a_lire)
|
|
placeholders = ",".join(["?"] * len(codes_familles))
|
|
|
|
famille_query = f"""
|
|
SELECT {colonnes_str}
|
|
FROM F_FAMILLE
|
|
WHERE FA_CodeFamille IN ({placeholders})
|
|
"""
|
|
|
|
cursor.execute(famille_query, codes_familles)
|
|
famille_rows = cursor.fetchall()
|
|
|
|
logger.info(f" → {len(famille_rows)} familles trouvées")
|
|
|
|
famille_map = {}
|
|
for fam_row in famille_rows:
|
|
famille_data = {}
|
|
for idx, col in enumerate(colonnes_a_lire):
|
|
famille_data[col] = fam_row[idx]
|
|
|
|
code = _safe_strip(famille_data.get("FA_CodeFamille"))
|
|
if not code:
|
|
continue
|
|
|
|
famille_map[code] = {
|
|
"famille_libelle": _safe_strip(famille_data.get("FA_Intitule")),
|
|
"famille_type": int(famille_data.get("FA_Type", 0) or 0),
|
|
"famille_unite_vente": _safe_strip(famille_data.get("FA_UniteVen")),
|
|
"famille_coef": float(famille_data.get("FA_Coef", 0) or 0),
|
|
"famille_suivi_stock": bool(famille_data.get("FA_SuiviStock", 0)),
|
|
"famille_garantie": int(famille_data.get("FA_Garantie", 0) or 0),
|
|
"famille_unite_poids": _safe_strip(famille_data.get("FA_UnitePoids")),
|
|
"famille_delai": int(famille_data.get("FA_Delai", 0) or 0),
|
|
"famille_nb_colis": int(famille_data.get("FA_NbColis", 0) or 0),
|
|
"famille_code_fiscal": _safe_strip(famille_data.get("FA_CodeFiscal")),
|
|
"famille_escompte": bool(famille_data.get("FA_Escompte", 0)),
|
|
"famille_centrale": bool(famille_data.get("FA_Central", 0)),
|
|
"famille_nature": int(famille_data.get("FA_Nature", 0) or 0),
|
|
"famille_hors_stat": bool(famille_data.get("FA_HorsStat", 0)),
|
|
"famille_pays": _safe_strip(famille_data.get("FA_Pays")),
|
|
}
|
|
|
|
logger.info(f" → {len(famille_map)} familles mappées")
|
|
|
|
nb_enrichis = 0
|
|
for article in articles:
|
|
code_fam = str(article.get("famille_code", "")).strip()
|
|
|
|
if code_fam and code_fam in famille_map:
|
|
article.update(famille_map[code_fam])
|
|
nb_enrichis += 1
|
|
else:
|
|
_init_champs_famille_vides(article)
|
|
|
|
logger.info(f" {nb_enrichis} articles enrichis avec infos famille")
|
|
|
|
return articles
|
|
|
|
except Exception as e:
|
|
logger.error(f" Erreur enrichissement familles: {e}", exc_info=True)
|
|
for article in articles:
|
|
_init_champs_famille_vides(article)
|
|
return articles
|
|
|
|
|
|
def _init_champs_famille_vides(article: Dict):
|
|
"""Initialise les champs famille à None/0"""
|
|
article["famille_libelle"] = None
|
|
article["famille_type"] = None
|
|
article["famille_unite_vente"] = None
|
|
article["famille_coef"] = None
|
|
article["famille_suivi_stock"] = None
|
|
article["famille_garantie"] = None
|
|
article["famille_unite_poids"] = None
|
|
article["famille_delai"] = None
|
|
article["famille_nb_colis"] = None
|
|
article["famille_code_fiscal"] = None
|
|
article["famille_escompte"] = None
|
|
article["famille_centrale"] = None
|
|
article["famille_nature"] = None
|
|
article["famille_hors_stat"] = None
|
|
article["famille_pays"] = None
|
|
|
|
|
|
def _enrichir_tva_articles(articles: List[Dict], cursor) -> List[Dict]:
|
|
"""Enrichit les articles avec le taux de TVA"""
|
|
try:
|
|
logger.info(" → Enrichissement TVA...")
|
|
|
|
codes_tva = list(
|
|
set([a["code_fiscal"] for a in articles if a.get("code_fiscal")])
|
|
)
|
|
|
|
if not codes_tva:
|
|
for article in articles:
|
|
article["tva_code"] = None
|
|
article["tva_taux"] = None
|
|
return articles
|
|
|
|
placeholders = ",".join(["?"] * len(codes_tva))
|
|
tva_query = f"""
|
|
SELECT
|
|
TA_Code,
|
|
TA_Taux
|
|
FROM F_TAXE
|
|
WHERE TA_Code IN ({placeholders})
|
|
"""
|
|
|
|
cursor.execute(tva_query, codes_tva)
|
|
tva_rows = cursor.fetchall()
|
|
|
|
tva_map = {}
|
|
for tva_row in tva_rows:
|
|
code = _safe_strip(tva_row[0])
|
|
tva_map[code] = float(tva_row[1]) if tva_row[1] else 0.0
|
|
|
|
logger.info(f" → {len(tva_map)} codes TVA trouvés")
|
|
|
|
for article in articles:
|
|
code_tva = article.get("code_fiscal")
|
|
if code_tva and code_tva in tva_map:
|
|
article["tva_code"] = code_tva
|
|
article["tva_taux"] = tva_map[code_tva]
|
|
else:
|
|
article["tva_code"] = code_tva
|
|
article["tva_taux"] = None
|
|
|
|
return articles
|
|
|
|
except Exception as e:
|
|
logger.error(f" Erreur enrichissement TVA: {e}", exc_info=True)
|
|
for article in articles:
|
|
article["tva_code"] = article.get("code_fiscal")
|
|
article["tva_taux"] = None
|
|
return articles
|
|
|
|
|
|
def _get_type_article_libelle(type_val: int) -> str:
|
|
"""Retourne le libellé du type d'article"""
|
|
types = {0: "Article", 1: "Prestation", 2: "Divers / Frais", 3: "Nomenclature"}
|
|
return types.get(type_val, f"Type {type_val}")
|
|
|
|
|
|
def _cast_article(persist_obj):
|
|
try:
|
|
obj = win32com.client.CastTo(persist_obj, "IBOArticle3")
|
|
obj.Read()
|
|
return obj
|
|
except Exception:
|
|
return None
|
|
|
|
|
|
__all__ = [
|
|
"_enrichir_stock_emplacements",
|
|
"_enrichir_gammes_articles",
|
|
"_enrichir_tarifs_clients",
|
|
"_enrichir_nomenclature",
|
|
"_enrichir_compta_articles",
|
|
"_enrichir_fournisseurs_multiples",
|
|
"_enrichir_depots_details",
|
|
"_enrichir_emplacements_details",
|
|
"_enrichir_gammes_enumeres",
|
|
"_enrichir_references_enumerees",
|
|
"_enrichir_medias_articles",
|
|
"_enrichir_prix_gammes",
|
|
"_enrichir_conditionnements",
|
|
"_mapper_article_depuis_row",
|
|
"_enrichir_stocks_articles",
|
|
"_enrichir_fournisseurs_articles",
|
|
"_enrichir_familles_articles",
|
|
"_init_champs_famille_vides",
|
|
"_enrichir_tva_articles",
|
|
"_cast_article",
|
|
]
|