浏览代码

optimize and document

Olivier Massot 5 年之前
父节点
当前提交
35a7bea8c6

+ 1 - 0
.gitignore

@@ -1 +1,2 @@
 /.project
+/log/clonedb.log

+ 135 - 155
clonedb.py

@@ -17,9 +17,7 @@ Options:
 @author: olivier.massot, 05-2020
 """
 import logging
-import os
 import re
-import threading
 from subprocess import Popen, PIPE, CalledProcessError
 import sys
 
@@ -27,12 +25,14 @@ import pymysql
 import yaml
 from docopt import docopt
 from path import Path
-from sshtunnel import SSHTunnelForwarder
 
-import logging_
-from locker import Lockfile
+from core import logging_
+from core.locker import Lockfile
+from core.pipe_handler import PipeHandler
+from core.ssh import SshTunnel
+from core.prompt import ask_confirmation
 
-__VERSION__ = "0.1"
+__VERSION__ = "0.2"
 
 HERE = Path(__file__).parent
 
@@ -44,115 +44,72 @@ logging_.start("clonedb", replace=True)
 sys.stdout = open(sys.stdout.fileno(), mode='w', encoding='utf8', buffering=1)
 
 # Options
-VERBOSE_DUMP = True
+SHOW_PROGRESSION = True
+MAX_ALLOWED_PACKET = 1073741824
+
+LOG_MYSQL_QUERIES = False
+LOG_MYSQLDUMP_OUTPUT = False
 
 
 # Utilities
 
 def load_settings():
-    # Load settings
+    """ Load the settings from the 'settings.yml' file
+    """
     with open(HERE / 'settings.yml', 'r') as f:
-        settings = yaml.load(f, Loader=yaml.FullLoader)
-    return settings
-
-
-class StderrPipeHandler(threading.Thread):
-    """ Handle the stderr output from a Popen object """
-
-    def __init__(self, logger_name, default_level=logging.INFO):
-        """Setup the object with a logger and a loglevel
-        and start the thread
-        """
-        threading.Thread.__init__(self)
-        self.logger = logging.getLogger(logger_name)
-        self.daemon = False
-        self.default_level = default_level
-        self.fdRead, self.fdWrite = os.pipe()
-        self.pipeReader = os.fdopen(self.fdRead)
-        self.start()
-
-    def fileno(self):
-        """Return the write file descriptor of the pipe
-        """
-        return self.fdWrite
-
-    def process(self, line):
-        self.logger.log(self.default_level, line.strip('\n'))
-
-    def run(self):
-        """Run the thread, logging everything.
-        """
-        for line in iter(self.pipeReader.readline, ''):
-            self.process(line)
-
-        self.pipeReader.close()
-
-    def close(self):
-        """Close the write end of the pipe.
-        """
-        os.close(self.fdWrite)
+        return yaml.load(f, Loader=yaml.FullLoader)
 
 
-class StderrMysqlDumpHandler(StderrPipeHandler):
-    """ Handle  and process the stderr output from a mysqldump process """
+class MysqldumpHandler(PipeHandler):
+    """ Handle and process the stdout / stderr output from a mysqldump process
+    """
     _rx_newtable = re.compile(r'Retrieving table structure for table (\w+)')
 
     def process(self, line):
+        """ Process the last line that was read
+        """
         line = line.strip('\n')
-        match = self._rx_newtable.search(line)
-        if match:
-            print('.', end="", flush=True)
-            # logger.info('** %s', match.group(1))
-        logger.debug(line)
+        if SHOW_PROGRESSION:
+            match = self._rx_newtable.search(line)
+            if match:
+                if not LOG_MYSQLDUMP_OUTPUT:
+                    logger.debug('... %s', match.group(1))
+                print('.', end="", flush=True)
+        if LOG_MYSQLDUMP_OUTPUT:
+            logger.debug(line)
 
     def close(self):
+        """ Close the write end of the pipe.
+        """
         print('', flush=True)
         super().close()
 
 
-class SshTunnel:
-    LOCAL_ADRESS = ('127.0.0.1', 6000)
-
-    def __init__(self, host, remote_port, port=22, user="root", key_file="~/.ssh/id_rsa"):
-        self.host = host
-        self.remote_port = remote_port
-        self.port = int(port)
-        self.user = user
-        self.key_file = key_file
-
-        self._tunnel = SSHTunnelForwarder(
-            (self.host, self.port),
-            ssh_username=self.user,
-            ssh_pkey=self.key_file,
-            local_bind_address=self.LOCAL_ADRESS,
-            remote_bind_address=('127.0.0.1', self.remote_port)
-        )
-
-    def start(self):
-        self._tunnel.start()
-        if not self._tunnel.tunnel_is_up[self.LOCAL_ADRESS]:
-            raise RuntimeError('Unable to open the SSH Tunnel')
-
-    def stop(self):
-        self._tunnel.stop()
-
-
 class MySqlServer:
+    """ A server hosting a Mysql instance
+    """
+
     def __init__(self, host, port, username, password, description="", ssh_tunnel=None):
         self.host = host
         self.port = port
         self.username = username
         self.password = password
-        self.description = description or "no description"
+        self.description = description[:30]
         self.ssh_tunnel = ssh_tunnel
 
         self.cnn = None
         self.active_db = ""
 
     def __repr__(self):
-        return f"{self.host}:{self.port} as {self.username} ({self.description})"
+        s = f"{self.host}:{self.port} as {self.username}"
+        if self.description:
+            s = f"{self.description} ({s})"
+        return s
 
     def connect(self, autocommit=True):
+        """ Establish the connection to the Mysql server
+        @see https://pymysql.readthedocs.io/en/latest/modules/connections.html
+        """
         if self.ssh_tunnel:
             self.ssh_tunnel.start()
             host, port = self.ssh_tunnel.LOCAL_ADRESS
@@ -163,42 +120,45 @@ class MySqlServer:
                                    port=port,
                                    user=self.username,
                                    password=self.password,
-                                   autocommit=autocommit)
+                                   autocommit=autocommit,
+                                   max_allowed_packet=MAX_ALLOWED_PACKET,
+                                   )
 
         if not self.cnn.open:
             raise RuntimeError(f'Unable to connect to {self}')
 
         return self.cnn
 
-    def set_constraints(self, active):
-        self.exec_query(f"SET FOREIGN_KEY_CHECKS={int(bool(active))};")
-
-    def warmup(self):
-        self.set_constraints(True)
-        self.exec_query(""" SET MAX_ALLOWED_PACKETS=1073741824;
-                            SET CONNECT_TIMEOUT=28800;
-                            SET WAIT_TIMEOUT=28800;
-                            SET INTERACTIVE_TIMEOUT=28800;
-                            """)
-
     def set_active_db(self, dbname):
+        """ set the active database
+        """
         self.cnn.select_db(dbname)
         self.active_db = dbname
 
     def close(self):
+        """ Close the connection to the database
+        and the ssh tunnel if one is opened
+        """
         if self.cnn:
             self.cnn.close()
         if self.ssh_tunnel:
             self.ssh_tunnel.stop()
+        logger.debug(f'{self} - connection closed')
 
     def exec_query(self, sql):
-        """ Execute the sql code and return the cursor """
+        """ Execute the sql code and return the resulting cursor
+        @see https://pymysql.readthedocs.io/en/latest/modules/cursors.html
+        """
+        self.cnn.ping(reconnect=True)
         cursor = self.cnn.cursor()
-        logger.debug(sql)
+        if LOG_MYSQL_QUERIES:
+            logger.debug(sql)
         cursor.execute(sql)
         return cursor
 
     def db_exists(self, dbname):
+        """ Return True if the database exists
+        """
         cursor = self.exec_query(f"""SELECT SCHEMA_NAME
                            FROM INFORMATION_SCHEMA.SCHEMATA
                            WHERE SCHEMA_NAME = '{dbname}'""")
@@ -206,51 +166,67 @@ class MySqlServer:
         return row is not None
 
     def list_tables(self, dbname=""):
-        """ Return a list of tables (not views!)
-        for either the current database or the given one"""
+        """ Return a list of tables (but not views!)
+        for either the currently selected database,
+        or the one given as a parameter"""
         cursor = self.exec_query(
             "SHOW FULL TABLES{} WHERE Table_type='BASE TABLE';".format(f" FROM {dbname}" if dbname else ""))
         return (row[0] for row in cursor.fetchall())
 
     def list_views(self, dbname=""):
         """ Return a list of views
-        for either the current database or the given one"""
+        for either the currently selected database,
+        or the one given as a parameter"""
         cursor = self.exec_query(
             "SHOW FULL TABLES{} WHERE Table_type='VIEW';".format(f" FROM {dbname}" if dbname else ""))
         return (row[0] for row in cursor.fetchall())
 
-    def get_view_definition(self, view_name):
+    def get_view_definition(self, view_name, set_definer=""):
+        """ Return the SQL create statement for the view
+        If 'set_definer' is not empty, the username in the 'SET DEFINER' part
+        of the create statement is replace by the one given
+        """
         cursor = self.exec_query(f"show create view {view_name}")
-        return cursor.fetchone()[1]
+        definition = cursor.fetchone()[1]
+        if set_definer:
+            # force a new definer
+            definition = re.sub(r'DEFINER=`\w+`@`[\w\-.]+`',
+                                f"DEFINER=`{set_definer}`@`\1`",
+                                definition)
+        return definition
 
 
+# Beahviors for the tables cloning
 IGNORE = 0
 STRUCTURE_ONLY = 1
-STRUCTURE_AND_DATA = 2  # default
+STRUCTURE_AND_DATA = 2  # -> default behavior
 
 
 class CloningOperation:
+    """ A database cloning operation between two Mysql servers
+    """
+
     def __init__(self, dbname, from_server, to_server, is_default=True, ignore_tables=None, structure_only=None,
-                 filter_tables=None, ignore_views=None, ignore_procedures=None, ignore_functions=None):
+                 filter_tables=None, ignore_views=None, compress=True):
         self.dbname = dbname
         self.from_server = from_server
         self.to_server = to_server
-        self.is_default = is_default
 
+        self.is_default = is_default
+        self.compress = compress
         self.ignore_tables = [re.compile(r) for r in ignore_tables] if ignore_tables else []
         self.structure_only = [re.compile(r) for r in structure_only] if structure_only else []
         self.filter_tables = [re.compile(r) for r in filter_tables] if filter_tables else []
         self.ignore_views = [re.compile(r) for r in ignore_views] if ignore_views else []
-        self.ignore_procedures = [re.compile(r) for r in ignore_procedures] if ignore_procedures else []
-        self.ignore_functions = [re.compile(r) for r in ignore_functions] if ignore_functions else []
-
-        self.table_count = None
 
     def __repr__(self):
-        return f"Clone {self.dbname} from {self.from_server} to {self.to_server} [ignored tables: {len(self.ignore_tables)}]"
+        return f"Cloning {self.dbname} from {self.from_server} to {self.to_server}"
 
     def _build_dump_command(self, dump_options=None, tables=None):
-        # > https://dev.mysql.com/doc/refman/5.7/en/mysqldump.html#mysqldump-option-summary
+        """ Build a mysqldump command line and return it as a
+        ready-to-consume list for Popen
+        @see https://dev.mysql.com/doc/refman/5.7/en/mysqldump.html#mysqldump-option-summary
+        """
         tables = tables or []
         dump_options = dump_options or []
 
@@ -258,10 +234,15 @@ class CloningOperation:
                     "--single-transaction",
                     "-u", self.from_server.username,
                     f"--password={self.from_server.password}",
-                    # "--compress"
+                    "--skip-add-drop-table",
+                    "--skip-add-locks",
+                    "--skip-comments",
                     ]
 
-        if VERBOSE_DUMP:
+        if self.compress:
+            base_cmd.append("--compress")
+
+        if SHOW_PROGRESSION:
             base_cmd.append("--verbose")
 
         if self.from_server.ssh_tunnel:
@@ -272,27 +253,43 @@ class CloningOperation:
         return base_cmd + dump_options + [self.dbname] + tables
 
     def _build_restore_command(self):
-        return ["mysql",
-                "-h", self.to_server.host,
-                "-P", str(self.to_server.port),
-                "-u", self.to_server.username,
-                f"--password={self.to_server.password}",
-                "-D", self.dbname
-                ]
-
-    def _run_piped_processes(self, dump_cmd, restore_cmd):
-        logger.debug("Run: %s | %s", " ".join(map(str, dump_cmd)), " ".join(map(str, restore_cmd)))
-
-        logger.debug("Dump command: %s", " ".join(map(str, dump_cmd)))
-        logger.debug("Piped into: %s", " ".join(map(str, restore_cmd)))
+        """ Build a mysql command line and return it as a
+        ready-to-consume list for Popen
+        @see https://dev.mysql.com/doc/refman/8.0/en/mysql-command-options.html#option_mysql_quick
+        """
+        cmd = ["mysql",
+               "-h", self.to_server.host,
+               "-P", str(self.to_server.port),
+               "-u", self.to_server.username,
+               f"--password={self.to_server.password}",
+               f"--max-allowed-packet={MAX_ALLOWED_PACKET}",
+               "--reconnect",
+               "--quick",
+               "-D", self.dbname
+               ]
+
+        if LOG_MYSQL_QUERIES:
+            cmd.append("--verbose")
+
+        if self.compress:
+            cmd.append("--compress")
+        return cmd
+
+    @staticmethod
+    def _run_piped_processes(dump_cmd, restore_cmd):
+        """ Run the dump and the restore commands by piping them
+        The output of the mysqldump process is piped into the input of the mysql one
+        """
+        logger.debug(">>> Dump command: %s", " ".join(map(str, dump_cmd)))
+        logger.debug(">>> Piped into: %s", " ".join(map(str, restore_cmd)))
 
-        stderr_handler = StderrMysqlDumpHandler(logger.name)
+        pipe_handler = MysqldumpHandler(logger.name)
 
         try:
             # noinspection PyTypeChecker
-            with Popen(restore_cmd, stdin=PIPE, stdout=PIPE, stderr=stderr_handler) as mysql:
+            with Popen(restore_cmd, stdin=PIPE, stdout=pipe_handler, stderr=pipe_handler) as mysql:
                 # noinspection PyTypeChecker
-                with Popen(dump_cmd, stdout=PIPE, stderr=stderr_handler) as mysqldump:
+                with Popen(dump_cmd, stdout=PIPE, stderr=pipe_handler) as mysqldump:
                     mysql.stdin.write(mysqldump.stdout.read())
 
             if mysqldump.returncode or mysql.returncode:
@@ -303,9 +300,11 @@ class CloningOperation:
             raise RuntimeError(f"An error happened at runtime: {e}")
 
         finally:
-            stderr_handler.close()
+            pipe_handler.close()
 
     def run(self):
+        """ Run the cloning op
+        """
         logger.info(f"*** Cloning {self.dbname} ***")
         logger.info(f"> From {self.from_server}")
         logger.info(f"> To {self.to_server}")
@@ -365,12 +364,7 @@ class CloningOperation:
                     if any(rx.match(v) for rx in self.ignore_views):
                         continue
                     logger.debug('* cloning view %s', v)
-                    definition = self.from_server.get_view_definition(v)
-
-                    # force the definer
-                    definition = re.sub(r'DEFINER=`\w+`@`[\w\-.]+`',
-                                        f"DEFINER=`{self.to_server.username}`@`localhost`",
-                                        definition)
+                    definition = self.from_server.get_view_definition(v, self.to_server.username)
 
                     try:
                         self.to_server.exec_query(definition)
@@ -382,32 +376,16 @@ class CloningOperation:
                 logger.error(f"<!> An error happened while cloning the '{self.dbname}' database")
 
         finally:
-            self.to_server.set_constraints(True)
             self.from_server.close()
             self.to_server.close()
 
 
-def ask_confirmation(msg):
-    logger.debug('Ask for confirmation...')
-    msg += "\nWould you like to continue? (yes/no)"
-    while 1:
-        answer = input(msg)
-        if answer in ('oui', 'yes', 'y', 'o'):
-            logger.debug(f"> user confirmed by answering '{answer}'")
-            return True
-        elif answer in ('non', 'no', 'n'):
-            logger.debug(f"> user cancelled by answering '{answer}'")
-            return False
-        else:
-            msg = "The answer could'nt be understood. Continue? (yes/no)"
-
-
 def main(settings, arguments):
     prompt = not arguments["--yes"]
 
     logger.info("Start db cloning utility...")
-    logger.debug(f"Settings: {settings}")
-    logger.debug(f"Arguments: {arguments}")
+    logger.debug(f"Settings: %s", str(settings).replace('\r', '').replace('\n', ''))
+    logger.debug(f"Arguments: %s", str(arguments).replace('\r', '').replace('\n', ''))
 
     # Load the servers' configuration
     servers = {}
@@ -448,7 +426,7 @@ def main(settings, arguments):
 
     # Ask for confirmation (except if '--yes' is in arguments)
     if prompt:
-        # Ask for confirmation
+        logger.debug('Ask for confirmation...')
         msg = "The following operations will be launched:\n{}\n" \
               "WARNING: the existing local databases will be replaced" \
               "".format("\n".join(f"* {op}" for op in selected_ops))
@@ -456,7 +434,9 @@ def main(settings, arguments):
         if not ask_confirmation(msg):
             logger.info("-- Operation cancelled by user --")
             return
+        logger.debug('> User confirmed')
 
+    # Run the cloning operations
     for op in selected_ops:
         op.run()
 

+ 0 - 0
core/__init__.py


二进制
core/__pycache__/__init__.cpython-36.pyc


二进制
core/__pycache__/locker.cpython-36.pyc


二进制
core/__pycache__/logging_.cpython-36.pyc


二进制
core/__pycache__/pipe_handler.cpython-36.pyc


二进制
core/__pycache__/prompt.cpython-36.pyc


二进制
core/__pycache__/ssh.cpython-36.pyc


+ 32 - 0
core/clonedb.log

@@ -0,0 +1,32 @@
+2020-05-20 13:54:37,153 - INFO - Start db cloning utility...
+2020-05-20 13:54:37,153 - DEBUG - Settings: {'servers': {'docker': {'host': 'localhost', 'description': 'Docker mariaDb', 'mysql': {'port': 3306, 'username': 'root', 'password': 'mysql660'}}, 'localhost': {'host': 'localhost', 'description': 'Local Mysql', 'mysql': {'port': 3305, 'username': 'root', 'password': 'mysql660'}}, 'preprod': {'host': 'preprod.2iopenservice.com', 'description': 'Preprod', 'mysql': {'port': 3306, 'username': 'dbcloner', 'password': 'wWZ4hYcrmHLW2mUK'}, 'ssh': {'key_file': '~/.ssh/id_rsa_exploitation', 'port': 22, 'user': 'exploitation'}}, 'prod-back': {'host': 'prod-back.2iopenservice.com', 'description': 'Prod-Back', 'mysql': {'port': 3306, 'username': 'dbcloner', 'password': 'wWZ4hYcrmHLW2mUK'}, 'ssh': {'key_file': '~/.ssh/id_rsa_exploitation', 'port': 22, 'user': 'exploitation'}}, 'prod-front': {'host': 'prod-front.2iopenservice.com', 'description': 'Prod-Front', 'mysql': {'port': 3306, 'username': 'dbcloner', 'password': 'wWZ4hYcrmHLW2mUK'}, 'ssh': {'key_file': '~/.ssh/id_rsa_exploitation', 'port': 22, 'user': 'exploitation'}}}, 'operations': {'crm': {'dbname': 'crm', 'from_server': 'preprod', 'to_server': 'docker', 'is_default': True, 'ignore_tables': [], 'ignore_views': [], 'structure_only': [], 'compress': True}, 'adminassos': {'dbname': 'adminassos', 'from_server': 'preprod', 'to_server': 'docker'}, 'openassos': {'dbname': 'openassos', 'from_server': 'preprod', 'to_server': 'docker', 'is_default': True, 'ignore_tables': ['Access', 'matomo.*', 'user_piwik.*', 'zzz_.*', 'tt_content_save', 'tt_content_v59'], 'structure_only': ['cf_cache_.*', 'sys_log']}, 'opentalent': {'dbname': 'opentalent', 'from_server': 'preprod', 'to_server': 'docker', 'structure_only': [], 'ignore_tables': ['Audit_.*'], 'ignore_views': ['view_audit']}, 'metabase': {'dbname': 'metabase', 'from_server': 'preprod', 'to_server': 'docker'}}}
+2020-05-20 13:54:37,153 - DEBUG - Arguments: {'--help': False, '--version': False, '--yes': True, '-v': False, '<dbname>': 'crm'}
+2020-05-20 13:54:37,155 - INFO - *** Cloning crm ***
+2020-05-20 13:54:37,155 - INFO - > From Preprod (preprod.2iopenservice.com:3306 as dbcloner)
+2020-05-20 13:54:37,155 - INFO - > To Docker mariaDb (localhost:3306 as root)
+2020-05-20 13:54:37,895 - DEBUG - Connected to Preprod (preprod.2iopenservice.com:3306 as dbcloner)
+2020-05-20 13:54:37,897 - DEBUG - Connected to Docker mariaDb (localhost:3306 as root)
+2020-05-20 13:54:38,001 - INFO - (Re)create the database
+2020-05-20 13:54:38,004 - INFO - Cloning structure for 16 tables (on 16)...
+2020-05-20 13:54:38,005 - DEBUG - >>> Dump command: mysqldump --single-transaction -u dbcloner --password=wWZ4hYcrmHLW2mUK --skip-add-drop-table --skip-add-locks --skip-comments --compress --verbose --host 127.0.0.1 --port 6000 --single-transaction --no-data --routines crm cm_action cm_action_com cm_action_histo cm_asso cm_asso_trader cm_bank cm_contact cm_invoice cm_miscellaneous cm_miscellaneous_com cm_pes_assos_config cm_plugin_assos cm_plugins cm_product cm_product_net cm_trader
+2020-05-20 13:54:38,005 - DEBUG - >>> Piped into: mysql -h localhost -P 3306 -u root --password=mysql660 --max-allowed-packet=1073741824 --reconnect --quick -D crm --compress
+2020-05-20 13:54:39,663 - DEBUG - ... cm_action
+2020-05-20 13:54:40,335 - DEBUG - ... cm_action_com
+2020-05-20 13:54:41,006 - DEBUG - ... cm_action_histo
+2020-05-20 13:54:41,672 - DEBUG - ... cm_asso
+2020-05-20 13:54:42,340 - DEBUG - ... cm_asso_trader
+2020-05-20 13:54:43,013 - DEBUG - ... cm_bank
+2020-05-20 13:54:43,687 - DEBUG - ... cm_contact
+2020-05-20 13:54:44,360 - DEBUG - ... cm_invoice
+2020-05-20 13:54:45,122 - DEBUG - ... cm_miscellaneous
+2020-05-20 13:54:45,970 - DEBUG - ... cm_miscellaneous_com
+2020-05-20 13:54:46,662 - DEBUG - ... cm_pes_assos_config
+2020-05-20 13:54:47,347 - DEBUG - ... cm_plugin_assos
+2020-05-20 13:54:48,033 - DEBUG - ... cm_plugins
+2020-05-20 13:54:48,746 - DEBUG - ... cm_product
+2020-05-20 13:54:49,427 - DEBUG - ... cm_product_net
+2020-05-20 13:54:50,110 - DEBUG - ... cm_trader
+2020-05-20 13:54:51,173 - ERROR - Execution failed: 
+2020-05-20 13:54:51,174 - ERROR - <!> An error happened while cloning the 'crm' database
+2020-05-20 13:54:51,234 - DEBUG - Preprod (preprod.2iopenservice.com:3306 as dbcloner) - connection closed
+2020-05-20 13:54:51,235 - DEBUG - Docker mariaDb (localhost:3306 as root) - connection closed

+ 5 - 1
locker.py → core/locker.py

@@ -9,10 +9,15 @@ from path import Path
 class AlreadyRunning(RuntimeError):
     pass
 
+
 def on_error():
     raise AlreadyRunning
 
+
 class Lockfile:
+    """ A lockfile that can be used as a context manager
+    """
+
     def __init__(self, path, on_error=None):
         self.path = Path(path)
         self.on_error = on_error
@@ -24,4 +29,3 @@ class Lockfile:
 
     def __exit__(self, type, value, traceback):
         self.path.remove()
-

+ 0 - 0
logging.yml → core/logging.yml


+ 0 - 0
logging_.py → core/logging_.py


+ 47 - 0
core/pipe_handler.py

@@ -0,0 +1,47 @@
+"""
+An handler for the stdout and the stderr output
+
+@author: olivier.massot, 05-2020
+"""
+import logging
+import os
+import threading
+
+
+class PipeHandler(threading.Thread):
+    """ Handle the stderr output from a Popen object """
+
+    def __init__(self, logger_name, default_level=logging.INFO):
+        """ Setup the object with a logger and a loglevel
+        and start the thread
+        """
+        threading.Thread.__init__(self)
+        self.daemon = False
+        self.fdRead, self.fdWrite = os.pipe()
+        self.pipeReader = os.fdopen(self.fdRead)
+        self.start()
+
+        self.logger = logging.getLogger(logger_name)
+        self.default_level = default_level
+
+    def fileno(self):
+        """ Return the write file descriptor of the pipe
+        """
+        return self.fdWrite
+
+    def process(self, line):
+        """ Process the last line that was read
+        """
+        self.logger.log(self.default_level, line)
+
+    def run(self):
+        """ Run the thread, logging everything.
+        """
+        for line in iter(self.pipeReader.readline, ''):
+            self.process(line.strip('\n'))
+        self.pipeReader.close()
+
+    def close(self):
+        """ Close the write end of the pipe.
+        """
+        os.close(self.fdWrite)

+ 20 - 0
core/prompt.py

@@ -0,0 +1,20 @@
+"""
+Prompting CLI utilities
+
+@author: olivier.massot, 05-2020
+"""
+
+
+def ask_confirmation(msg):
+    """ Ask confirmation to the user with the given message.
+    Returns True if the user confirmed
+    """
+    msg += "\nWould you like to continue? (yes/no)"
+    while 1:
+        answer = input(msg)
+        if answer in ('oui', 'yes', 'y', 'o'):
+            return True
+        elif answer in ('non', 'no', 'n'):
+            return False
+        else:
+            msg = "The answer could'nt be understood. Continue? (yes/no)"

+ 37 - 0
core/ssh.py

@@ -0,0 +1,37 @@
+"""
+An SSH Tunneling tool
+
+@author: olivier.massot, 05-2020
+"""
+from sshtunnel import SSHTunnelForwarder
+
+
+class SshTunnel:
+    LOCAL_ADRESS = ('127.0.0.1', 6000)
+
+    def __init__(self, host, remote_port, port=22, user="root", key_file="~/.ssh/id_rsa"):
+        self.host = host
+        self.remote_port = remote_port
+        self.port = int(port)
+        self.user = user
+        self.key_file = key_file
+
+        self._tunnel = SSHTunnelForwarder(
+            (self.host, self.port),
+            ssh_username=self.user,
+            ssh_pkey=self.key_file,
+            local_bind_address=self.LOCAL_ADRESS,
+            remote_bind_address=('127.0.0.1', self.remote_port)
+        )
+
+    def start(self):
+        """ Start the ssh tunnel
+        """
+        self._tunnel.start()
+        if not self._tunnel.tunnel_is_up[self.LOCAL_ADRESS]:
+            raise RuntimeError('Unable to open the SSH Tunnel')
+
+    def stop(self):
+        """ Stop the ssh tunnel
+        """
+        self._tunnel.stop()

+ 65 - 6
readme.md

@@ -2,12 +2,10 @@
 
 Script de clonage des bases de données MySql
 
-
-
 ## Usage
 
 Usage:
-  clonedb.py [-v] [-y] [<dbname>]
+  clonedb.py [-v] [-y] [<opname>]
   clonedb.py (-h | --help)
   clonedb.py --version
 
@@ -18,7 +16,7 @@ Options:
 
 ## Installation
 
-> requiert python 3.6+
+> requiert python 3.6+: `sudo apt-get install python3 python3-pip`
 
     git clone https://gitlab.2iopenservice.com/olivier/clonedb
     cd clonedb
@@ -26,9 +24,70 @@ Options:
 
 ## Configuration
 
-> Configuration: settings.yml
+Pour configurer les opérations de clônage, ouvrez le fichier `settings.yml`.
+La structure du fichier est la suivante:
+
+    servers:
+      [server_name]:
+        host: host
+        description: ...
+        mysql:
+          port: 3306
+          username: user
+          password: my_password
+        ssh:
+          key_file: ~/.ssh/id_rsa
+          port: 22
+          user: user
+
+    operations:
+      [op_name]:
+        dbname: my_db
+        from_server: [server_name]
+        to_server: [server_name]
+        is_default: True
+        ignore_tables: []
+        ignore_views: []
+        structure_only: []
+        compress: True
+        filter_tables: []
+
+### Configuration minimum
+
+Ajouter une entrée dans la section `servers` pour 
+le serveur d'origine et le serveur cible de l'opération de clônage.
+
+On peut ajouter une courte description, par exemple "Prod", ou "Test"
+
+Pour chaque serveur, ajouter une section `mysql` dans laquelle 
+devront apparaitre les paramètres de connexion au serveur mysql: 
+port, username, password
+
+Si l'accès au serveur doit passer par un tunnel SSH, ajouter 
+une section `ssh`, dans laquelle 
+pourront apparaitre les paramètres de connexion SSH:
+`key_file` (chemin d'accès à la clé privée ssh), `port`, `user`
+
+> Les noms donnés à chaque serveur (`[server_name]`) n'ont pas d'importance 
+
+Ensuite, ajouter une entrée dans la section `operations` et définir au moins les trois paramètres suivants: 
+   * dbname: le nom de la base de données à cloner
+   * from_server: le nom du serveur source tel qu'il a été défini à la section servers
+   * to_server: le nom du serveur cible tel qu'il a été défini à la section servers
+
+On pourra aussi ajouter les paramètres suivants:
+
+* `is_default`: l'opération sera lancée par défaut, losque *clonedb* est lancé sans l'argument `<opname>`
+* `compress`: compresse les dumps du serveur source; diminue le volume de données mais augmente la charge du serveur source,
+* `ignore_tables`: liste de noms de tables ou d'expressions régulières, les tables correspondantes seront ignorées
+* `structure_only`: liste de noms de tables ou d'expressions régulières, seule la structure des tables 
+correspondantes sera clonée, pas les données contenues.
+* `ignore_views`: liste de noms de tables ou d'expressions régulières, les vues correspondantes seront ignorées
+* `filter_tables`: liste de noms de tables ou d'expressions régulières; si une liste est donnée, 
+seules les tables correspondantes seront traitées.
 
 
 ## Workarounds
 
-> `The user specified as a definer ('username'@'%') does not exist`: add a user account to the targeted mysql server named `username@%` with admin privileges
+* `The user specified as a definer ('username'@'%') does not exist`: add a user account 
+to the targeted mysql server named `username@%` with admin privileges

+ 6 - 7
settings.yml

@@ -56,14 +56,13 @@ operations:
   crm:
     dbname: crm
     from_server: preprod
-    to_server: localhost
+    to_server: docker
     is_default: True
     ignore_tables: []
+    ignore_views: []
     structure_only: []
+    compress: True
 #    filter_tables: []
-    ignore_views: []
-    ignore_procedures: []
-    ignore_functions: []
 
   adminassos:
     dbname: adminassos
@@ -73,7 +72,7 @@ operations:
   openassos:
     dbname: openassos
     from_server: preprod
-    to_server: localhost
+    to_server: docker
     is_default: True
     ignore_tables: ['Access', 'matomo.*', 'user_piwik.*', 'zzz_.*', 'tt_content_save', 'tt_content_v59']
     structure_only: ['cf_cache_.*', 'sys_log']
@@ -81,8 +80,8 @@ operations:
   opentalent:
     dbname: opentalent
     from_server: preprod
-    to_server: localhost
-    structure_only: ['.*']
+    to_server: docker
+    structure_only: []
     ignore_tables: ['Audit_.*']
     ignore_views: ['view_audit']