浏览代码

Correction du problème de lowercase avec pypyodbc; intégration du
logging au systeme de gestion des exceptions; CustomDb.read retourne
désormais des namedtuples

olivier.massot 8 年之前
父节点
当前提交
dc2891e767
共有 6 个文件被更改,包括 49 次插入27 次删除
  1. 8 4
      core/db.py
  2. 15 1
      core/logconf.py
  3. 6 2
      core/pde.py
  4. 3 8
      gf2analytique.py
  5. 1 1
      gf2factures.py
  6. 16 11
      pda2suiviactivite.py

+ 8 - 4
core/db.py

@@ -1,15 +1,17 @@
 '''
     Convenient access to various databases
 '''
+from collections import namedtuple
 from datetime import datetime
 import logging
+import pypyodbc
 
-from pypyodbc import Connection
+pypyodbc.lowercase = False
 
 
 logger = logging.getLogger("database")
 
-class CustomDb(Connection):
+class CustomDb(pypyodbc.Connection):
     """ Connexion to a database """
     _cache = {}
     default_name = ""
@@ -35,15 +37,17 @@ class CustomDb(Connection):
         """ yield rows as NamedTupleRow """
         cursor = self.execute(sql)
         row = cursor.fetchone()
+        rowmodel = namedtuple("Row", [column[0] for column in cursor.description])
         while row:
-            yield dict(zip([column[0] for column in cursor.description], row))
+            yield rowmodel(*row)
             row = cursor.fetchone()
         cursor.close()
 
     def read_all(self, sql, *args):
         """ return the selection as a list of dictionnaries """
         cursor = self.execute(sql)
-        data = [dict(zip([column[0] for column in cursor.description], row)) for row in cursor.fetchall()]
+        rowmodel = namedtuple("Row", [column[0] for column in cursor.description])
+        data = [rowmodel(*row) for row in cursor.fetchall()]
         cursor.close()
         return data
 

+ 15 - 1
core/logconf.py

@@ -5,10 +5,17 @@ Created on 6 juil. 2017
 '''
 from datetime import datetime
 import logging.config
+import sys
+import traceback
 
 from path import Path
 import yaml
 
+from core.pde import DEFAULT_LOG_DIR
+
+
+SYS_EXCEPT_HOOK = sys.excepthook
+
 def start(name="main", level=0, filename=""):
     # charge la configuration du logging depuis le fichier 'logging.yaml'
     configfile = Path(__file__).parent
@@ -19,8 +26,15 @@ def start(name="main", level=0, filename=""):
         conf["loggers"][name]["level"] = level
 
     if not filename:
-        filename = r'log\{}_{:%Y%m%d_%H%M}.log'.format(name, datetime.now())
+        filename = DEFAULT_LOG_DIR / r'{}_{:%Y%m%d_%H%M}.log'.format(name, datetime.now())
     conf["handlers"]["file"]["filename"] = filename
 
     logging.config.dictConfig(conf)
 
+    logger = logging.getLogger(name)
+    def _excepthook(typ, value, trace):
+        """ Remplace la gestion d'erreur standard, pour logger aussi les erreurs non gérées """
+        logger.error("{}\n{}\n{}".format(typ.__name__, value, ''.join(traceback.format_tb(trace))))
+        SYS_EXCEPT_HOOK(typ, value, trace)
+    sys.excepthook = _excepthook
+

+ 6 - 2
core/pde.py

@@ -14,14 +14,18 @@ UID = "olivier"
 PWD = "massot"
 
 # Dirs
-WRK = Path(r".\work")
+MAIN = Path(__file__).parent.parent.abspath()
+WRK = MAIN / "work"
+WRK.mkdir_p()
 
 def mk_workdir(name):
-    WRK.mkdir_p()
     workdir = WRK / name
     workdir.mkdir_p()
     return workdir
 
+DEFAULT_LOG_DIR = MAIN / "log"
+DEFAULT_LOG_DIR.mkdir_p()
+
 DB_DIRPATH = Path(r"\\h2o\local\4-transversal\BDD\mdb")
 # DB_DIRPATH = Path(r"C:\wrktmp\mdb")
 

+ 3 - 8
gf2analytique.py

@@ -22,8 +22,6 @@ from core import logconf
 from core.pde import AnalytiqueDb, mk_workdir
 from core.webservice import GfWebservice
 
-# TODO: envoi mail auto
-
 logger = logging.getLogger("gf2analytique")
 logconf.start("gf2analytique", logging.INFO)
 
@@ -76,7 +74,6 @@ class Facture():
         facture = cls()
         for key, value in wsdata.items():
             setattr(facture, key, value)
-#             facture.__dict__[key] = value
         facture.autocorrection()
         return facture
 
@@ -125,7 +122,7 @@ class Facture():
                                             WHERE txtMateriel='{codeCout}' or txtMateriel='ZZ {codeCout}'
                                             """.format(codeCout=self.codeCout))
             if row:
-                self.codeCout = row["txtmateriel"]
+                self.codeCout = row.txtMateriel
 
     def is_valid(self):
         """ controle la validité des données d'une facture """
@@ -192,9 +189,7 @@ class Facture():
                            self.numLigneMandat,
                            self.codeCout)
 
-#         logger.debug("> %s", sql)
-        factureId = analytique_db.first(sql)["dblfactureid"]
-#         logger.debug("retrieve dblFactureId: %s", factureId)
+        factureId = analytique_db.first(sql).dblFactureId
         return factureId
 
     def _insert_factures(self):
@@ -264,7 +259,7 @@ class Facture():
             raise InvalidAxe()
 
         materiel = analytique_db.first("SELECT intlMaterielID FROM tbl_Materiel WHERE [txtMateriel]='{}'".format(self.codeCout))
-        materielId = materiel["intlmaterielid"] if materiel else '859'
+        materielId = materiel.intlMaterielID if materiel else '859'
         logger.debug("retrieve intlMaterielID: %s", materielId)
 
         sql = """INSERT INTO tbl_Facture_Engin ( intlMaterielID, txtMateriel, dblFactureId, strLibelle, dblMontant, strType )

+ 1 - 1
gf2factures.py

@@ -38,7 +38,7 @@ for titre in ws:
         logger.warning("(!) no record found where '{}'".format(strfilter))
         continue
 
-    memobs = record.get("memobsinterne", "")
+    memobs = record.memobsinterne if record.memobsinterne else ""
 
     if "Titre n° : {}".format(titre["titreId"]) in memobs:
         # already imported

+ 16 - 11
pda2suiviactivite.py

@@ -29,12 +29,11 @@ target_dir = Path(r"\\h2o\LOCAL\4-transversal\BDD\mdb\PDA\Fichiers_PDA")
 
 # Mapping: (nom de la table, {correspondance champ : alias}, nom des noeuds, nom du fichier cible)
 mapping = [
-            ("pdaEngin", {"strEnginId": "Id", "strEnginLibelleLong": "Nom"}, "Attelage", "attelages.xml"),
-            ("pdaorigine", {"lngorigine": "Id", "strorigine": "Nom"}, "Depart", "depart.xml"),
-            ("pdaEquip", {"strEquipesId": "Id", "strEquipesLibelle": "Nom"}, "Equipe", "equipes.xml"),
-            ("pdalocalisation", {"lngTiersId": "Id", "strTiersMnemo": "Nom"}, "Localisation", "localisation.xml"),
-            ("pdaNatInterv", {"strCategorieInterventioinId": "Id", "strCategorieInterventioinLibelle": "Nom"}, "NatureRealisation", "naturesinterventions.xml"),
-            ("pdaHeures", {"annee": "annee", "mois": "mois", "heures": "heures"}, "pdaHeures", "heures.xml"),
+            ("pdaEngin", ("strEnginId", "strEnginLibelleLong"), "Attelage", "attelages.xml"),
+            ("pdaorigine", ("lngorigine", "strorigine"), "Depart", "depart.xml"),
+            ("pdaEquip", ("strEquipesId", "strEquipesLibelle"), "Equipe", "equipes.xml"),
+            ("pdalocalisation", ("lngTiersId", "strTiersMnemo"), "Localisation", "localisation.xml"),
+            ("pdaNatInterv", ("strCategorieInterventioinId", "strCategorieInterventioinLibelle"), "NatureRealisation", "naturesinterventions.xml")
             ]
 
 for tablename, fieldmapping, eltname, filename in mapping:
@@ -43,12 +42,15 @@ for tablename, fieldmapping, eltname, filename in mapping:
                                         'xsi': "http://www.w3.org/2001/XMLSchema-instance"})
     nodes = []
 
-    sql = "SELECT {} FROM {}".format(
-                                    ",".join([("{} AS {}".format(name, alias) if name != alias else name) for name, alias in fieldmapping.items()]),
-                                    tablename
-                                    )
+    fieldid, fieldnom = fieldmapping
 
-    data = pda_db.read_all(sql)
+    sql = "SELECT {id} as Id, {nom} as Nom FROM {tbl} ORDER BY {nom}".format(
+                                                                            id=fieldid,
+                                                                            nom=fieldnom,
+                                                                            tbl=tablename
+                                                                            )
+
+    data = [record._asdict() for record in pda_db.read_all(sql)]
 
     for record in data:
         node = maker.__call__(eltname, *[maker.__call__(field, str(value)) for field, value in record.items()])
@@ -56,6 +58,9 @@ for tablename, fieldmapping, eltname, filename in mapping:
 
     root = maker.__call__("ArrayOf" + eltname, *nodes)
 
+    if not filename[-4:] == ".xml":
+        filename += ".xml"
+
     with open(target_dir / filename, "wb") as f:
         f.write(etree.tostring(root, xml_declaration=True, encoding='utf-8', pretty_print=True))
     logger.info("> Exporte: {}".format(filename))