[PATCH 03 of 10 logilab-database] Paint it black!

Laurent Peuch cortex at worlddomination.be
Fri Apr 3 09:14:11 CEST 2020


# HG changeset patch
# User Laurent Peuch <cortex at worlddomination.be>
# Date 1585788707 -7200
#      Thu Apr 02 02:51:47 2020 +0200
# Node ID de107abc16067a18c08d1953a9e3c227fa6e90ac
# Parent  51884058d7319432daeae1c21392403117580c2b
# Available At https://hg.logilab.org/users/lpeuch/logilab-database
#              hg pull https://hg.logilab.org/users/lpeuch/logilab-database -r de107abc1606
# EXP-Topic good-practice-modernisation
Paint it black!

diff --git a/__pkginfo__.py b/__pkginfo__.py
--- a/__pkginfo__.py
+++ b/__pkginfo__.py
@@ -17,11 +17,11 @@
 # with logilab-database. If not, see <http://www.gnu.org/licenses/>.
 """logilab.database packaging information."""
 
-distname = 'logilab-database'
-modname = 'database'
+distname = "logilab-database"
+modname = "database"
 numversion = (1, 17, 1)
-version = '.'.join([str(num) for num in numversion])
-license = 'LGPL'
+version = ".".join([str(num) for num in numversion])
+license = "LGPL"
 
 author = "Logilab"
 author_email = "contact at logilab.fr"
@@ -31,21 +31,22 @@ description = "true unified database acc
 web = "http://www.logilab.org/project/%s" % distname
 mailinglist = "mailto://python-projects@lists.logilab.org"
 
-subpackage_of = 'logilab'
+subpackage_of = "logilab"
 
 install_requires = [
-    'setuptools',
-    'logilab-common >= 0.63.2',
-    'Yapps2',
-    'python-dateutil',
-    ]
+    "setuptools",
+    "logilab-common >= 0.63.2",
+    "Yapps2",
+    "python-dateutil",
+]
 
 tests_require = [
-    'psycopg2',
-    ]
+    "psycopg2",
+]
 
-classifiers = ["Topic :: Database",
-               "Programming Language :: Python",
-               "Programming Language :: Python :: 3",
-               "Programming Language :: Python :: 3 :: Only",
-               ]
+classifiers = [
+    "Topic :: Database",
+    "Programming Language :: Python",
+    "Programming Language :: Python :: 3",
+    "Programming Language :: Python :: 3 :: Only",
+]
diff --git a/docs/conf.py b/docs/conf.py
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -19,14 +19,14 @@
 
 # -- Project information -----------------------------------------------------
 
-project = u'logilab-database'
-copyright = u'2019, Logilab'
-author = u'Logilab'
+project = u"logilab-database"
+copyright = u"2019, Logilab"
+author = u"Logilab"
 
 # The short X.Y version
-version = u''
+version = u""
 # The full version, including alpha/beta/rc tags
-release = u''
+release = u""
 
 
 # -- General configuration ---------------------------------------------------
@@ -39,23 +39,23 @@ release = u''
 # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
 # ones.
 extensions = [
-    'sphinx.ext.autodoc',
-    'sphinx.ext.doctest',
-    'sphinx.ext.intersphinx',
-    'sphinx.ext.viewcode',
+    "sphinx.ext.autodoc",
+    "sphinx.ext.doctest",
+    "sphinx.ext.intersphinx",
+    "sphinx.ext.viewcode",
 ]
 
 # Add any paths that contain templates here, relative to this directory.
-templates_path = ['_templates']
+templates_path = ["_templates"]
 
 # The suffix(es) of source filenames.
 # You can specify multiple suffix as a list of string:
 #
 # source_suffix = ['.rst', '.md']
-source_suffix = '.rst'
+source_suffix = ".rst"
 
 # The master toctree document.
-master_doc = 'index'
+master_doc = "index"
 
 # The language for content autogenerated by Sphinx. Refer to documentation
 # for a list of supported languages.
@@ -67,7 +67,7 @@ language = None
 # List of patterns, relative to source directory, that match files and
 # directories to ignore when looking for source files.
 # This pattern also affects html_static_path and html_extra_path.
-exclude_patterns = [u'_build', 'Thumbs.db', '.DS_Store']
+exclude_patterns = [u"_build", "Thumbs.db", ".DS_Store"]
 
 # The name of the Pygments (syntax highlighting) style to use.
 pygments_style = None
@@ -78,7 +78,7 @@ pygments_style = None
 # The theme to use for HTML and HTML Help pages.  See the documentation for
 # a list of builtin themes.
 #
-html_theme = 'alabaster'
+html_theme = "alabaster"
 
 # Theme options are theme-specific and customize the look and feel of a theme
 # further.  For a list of options available for each theme, see the
@@ -89,7 +89,7 @@ html_theme = 'alabaster'
 # Add any paths that contain custom static files (such as style sheets) here,
 # relative to this directory. They are copied after the builtin static files,
 # so a file named "default.css" will overwrite the builtin "default.css".
-html_static_path = ['_static']
+html_static_path = ["_static"]
 
 # Custom sidebar templates, must be a dictionary that maps document names
 # to template names.
@@ -105,7 +105,7 @@ html_static_path = ['_static']
 # -- Options for HTMLHelp output ---------------------------------------------
 
 # Output file base name for HTML help builder.
-htmlhelp_basename = 'logilab-databasedoc'
+htmlhelp_basename = "logilab-databasedoc"
 
 
 # -- Options for LaTeX output ------------------------------------------------
@@ -114,15 +114,12 @@ latex_elements = {
     # The paper size ('letterpaper' or 'a4paper').
     #
     # 'papersize': 'letterpaper',
-
     # The font size ('10pt', '11pt' or '12pt').
     #
     # 'pointsize': '10pt',
-
     # Additional stuff for the LaTeX preamble.
     #
     # 'preamble': '',
-
     # Latex figure (float) alignment
     #
     # 'figure_align': 'htbp',
@@ -132,8 +129,13 @@ latex_elements = {
 # (source start file, target name, title,
 #  author, documentclass [howto, manual, or own class]).
 latex_documents = [
-    (master_doc, 'logilab-database.tex', u'logilab-database Documentation',
-     u'Logilab', 'manual'),
+    (
+        master_doc,
+        "logilab-database.tex",
+        u"logilab-database Documentation",
+        u"Logilab",
+        "manual",
+    ),
 ]
 
 
@@ -142,8 +144,7 @@ latex_documents = [
 # One entry per manual page. List of tuples
 # (source start file, name, description, authors, manual section).
 man_pages = [
-    (master_doc, 'logilab-database', u'logilab-database Documentation',
-     [author], 1)
+    (master_doc, "logilab-database", u"logilab-database Documentation", [author], 1)
 ]
 
 
@@ -153,9 +154,15 @@ man_pages = [
 # (source start file, target name, title, author,
 #  dir menu entry, description, category)
 texinfo_documents = [
-    (master_doc, 'logilab-database', u'logilab-database Documentation',
-     author, 'logilab-database', 'One line description of project.',
-     'Miscellaneous'),
+    (
+        master_doc,
+        "logilab-database",
+        u"logilab-database Documentation",
+        author,
+        "logilab-database",
+        "One line description of project.",
+        "Miscellaneous",
+    ),
 ]
 
 
@@ -174,7 +181,7 @@ epub_title = project
 # epub_uid = ''
 
 # A list of files that should not be packed into the epub file.
-epub_exclude_files = ['search.html']
+epub_exclude_files = ["search.html"]
 
 
 # -- Extension configuration -------------------------------------------------
@@ -182,4 +189,4 @@ epub_exclude_files = ['search.html']
 # -- Options for intersphinx extension ---------------------------------------
 
 # Example configuration for intersphinx: refer to the Python standard library.
-intersphinx_mapping = {'https://docs.python.org/': None}
+intersphinx_mapping = {"https://docs.python.org/": None}
diff --git a/logilab/__init__.py b/logilab/__init__.py
--- a/logilab/__init__.py
+++ b/logilab/__init__.py
@@ -1,1 +1,1 @@
-__import__('pkg_resources').declare_namespace(__name__)
+__import__("pkg_resources").declare_namespace(__name__)
diff --git a/logilab/database/__init__.py b/logilab/database/__init__.py
--- a/logilab/database/__init__.py
+++ b/logilab/database/__init__.py
@@ -48,7 +48,7 @@ from logilab.common.modutils import load
 from logilab.common.date import todate, todatetime, utcdatetime, utctime
 from logilab.common.deprecation import deprecated
 
-_LOGGER = logging.getLogger('logilab.database')
+_LOGGER = logging.getLogger("logilab.database")
 
 USE_MX_DATETIME = False
 
@@ -57,34 +57,50 @@ USE_MX_DATETIME = False
 _PREFERED_DRIVERS = {}
 _ADV_FUNC_HELPER_DIRECTORY = {}
 
+
 def _ensure_module_loaded(driver):
-    if driver in ('postgres', 'sqlite', 'mysql', 'sqlserver2005'):
+    if driver in ("postgres", "sqlite", "mysql", "sqlserver2005"):
         with _LOAD_MODULES_LOCK:
-            __import__('logilab.database.%s' % driver)
+            __import__("logilab.database.%s" % driver)
+
 
 # main functions ###############################################################
 
+
 def get_db_helper(driver):
     """returns an advanced function helper for the given driver"""
     _ensure_module_loaded(driver)
     return _ADV_FUNC_HELPER_DIRECTORY[driver]()
 
-def get_dbapi_compliant_module(driver, prefered_drivers=None, quiet=False,
-                               pywrap=False):
+
+def get_dbapi_compliant_module(
+    driver, prefered_drivers=None, quiet=False, pywrap=False
+):
     """returns a fully dbapi compliant module"""
     _ensure_module_loaded(driver)
     try:
         mod = _ADAPTER_DIRECTORY.adapt(driver, prefered_drivers, pywrap=pywrap)
     except NoAdapterFound as err:
         if not quiet:
-            msg = 'No Adapter found for %s, returning native module'
-            _LOGGER.warning(msg,  err.objname)
+            msg = "No Adapter found for %s, returning native module"
+            _LOGGER.warning(msg, err.objname)
         mod = err.adapted_obj
     return mod
 
-def get_connection(driver='postgres', host='', database='', user='',
-                  password='', port='', quiet=False, drivers=_PREFERED_DRIVERS,
-                  pywrap=False, schema=None, extra_args=None):
+
+def get_connection(
+    driver="postgres",
+    host="",
+    database="",
+    user="",
+    password="",
+    port="",
+    quiet=False,
+    drivers=_PREFERED_DRIVERS,
+    pywrap=False,
+    schema=None,
+    extra_args=None,
+):
     """return a db connection according to given arguments
 
     extra_args is an optional string that is appended to the DSN"""
@@ -94,21 +110,22 @@ def get_connection(driver='postgres', ho
         adapter = _ADAPTER_DIRECTORY.get_adapter(driver, modname)
     except NoAdapterFound as err:
         if not quiet:
-            msg = 'No Adapter found for %s, using default one'
+            msg = "No Adapter found for %s, using default one"
             _LOGGER.warning(msg, err.objname)
         adapted_module = DBAPIAdapter(module, pywrap)
     else:
         adapted_module = adapter(module, pywrap)
     if host and not port:
         try:
-            host, port = host.split(':', 1)
+            host, port = host.split(":", 1)
         except ValueError:
             pass
     if port:
         port = int(port)
-    return adapted_module.connect(host, database, user, password,
-                                  port=port, schema=schema,
-                                  extra_args=extra_args)
+    return adapted_module.connect(
+        host, database, user, password, port=port, schema=schema, extra_args=extra_args
+    )
+
 
 def set_prefered_driver(driver, module, _drivers=_PREFERED_DRIVERS):
     """sets the preferred driver module for driver
@@ -122,54 +139,62 @@ def set_prefered_driver(driver, module, 
         try:
             modules = _drivers[driver]
         except KeyError:
-            raise UnknownDriver('Unknown driver %s' % driver)
+            raise UnknownDriver("Unknown driver %s" % driver)
         # Remove module from modules list, and re-insert it in first position
         try:
             modules.remove(module)
         except ValueError:
-            raise UnknownDriver('Unknown module %s for %s' % (module, driver))
+            raise UnknownDriver("Unknown module %s for %s" % (module, driver))
         modules.insert(0, module)
 
 
 # types converters #############################################################
 
+
 def convert_datetime(value):
     # Note: use is __class__ since issubclass(datetime, date)
     if type(value) is date:
         value = todatetime(value)
     return value
 
+
 def convert_date(value):
     if isinstance(value, datetime):
         value = todate(value)
     return value
 
+
 def convert_tzdatetime(value):
     # Note: use is __class__ since issubclass(datetime, date)
     if type(value) is date:
         value = todatetime(value)
-    elif getattr(value, 'tzinfo', None):
+    elif getattr(value, "tzinfo", None):
         value = utcdatetime(value)
     return value
 
+
 def convert_tztime(value):
-    if getattr(value, 'tzinfo', None):
+    if getattr(value, "tzinfo", None):
         value = utctime(value)
     return value
 
 
 # unified db api ###############################################################
 
+
 class UnknownDriver(Exception):
     """raised when a unknown driver is given to get connection"""
 
+
 class NoAdapterFound(Exception):
     """Raised when no Adapter to DBAPI was found"""
-    def __init__(self, obj, objname=None, protocol='DBAPI'):
+
+    def __init__(self, obj, objname=None, protocol="DBAPI"):
         if objname is None:
             objname = obj.__name__
-        Exception.__init__(self, "Could not adapt %s to protocol %s" %
-                           (objname, protocol))
+        Exception.__init__(
+            self, "Could not adapt %s to protocol %s" % (objname, protocol)
+        )
         self.adapted_obj = obj
         self.objname = objname
         self._protocol = protocol
@@ -179,6 +204,7 @@ class NoAdapterFound(Exception):
 # This one would become an adapter for 'DBAPI' protocol
 class _AdapterDirectory(dict):
     """A simple dict that registers all adapters"""
+
     def register_adapter(self, adapter, driver, modname):
         """Registers 'adapter' in directory as adapting 'mod'"""
         try:
@@ -202,6 +228,7 @@ class _AdapterDirectory(dict):
         except KeyError:
             raise NoAdapterFound(None, modname)
 
+
 _ADAPTER_DIRECTORY = _AdapterDirectory()
 del _AdapterDirectory
 
@@ -219,24 +246,26 @@ def _import_driver_module(driver, driver
         for modname in drivers[driver]:
             try:
                 if not quiet:
-                    _LOGGER.info('Trying %s', modname)
+                    _LOGGER.info("Trying %s", modname)
                 module = load_module_from_name(modname, use_sys=False)
                 break
             except ImportError:
                 if not quiet:
-                    _LOGGER.warning('%s is not available', modname)
+                    _LOGGER.warning("%s is not available", modname)
                 continue
         else:
-            raise ImportError('Unable to import a %s module' % driver)
+            raise ImportError("Unable to import a %s module" % driver)
     return module, modname
 
 
 ## base connection and cursor wrappers #####################
 
+
 class _SimpleConnectionWrapper(object):
     """A simple connection wrapper in python to decorated C-level connections
     with additional attributes
     """
+
     def __init__(self, cnx):
         """Wraps the original connection object"""
         self._cnx = cnx
@@ -266,6 +295,7 @@ class PyConnection(_SimpleConnectionWrap
     """A simple connection wrapper in python, generating wrapper for cursors as
     well (useful for profiling)
     """
+
     def __init__(self, cnx):
         """Wraps the original connection object"""
         self._cnx = cnx
@@ -277,6 +307,7 @@ class PyConnection(_SimpleConnectionWrap
 
 class PyCursor(object):
     """A simple cursor wrapper in python (useful for profiling)"""
+
     def __init__(self, cursor):
         self._cursor = cursor
 
@@ -310,8 +341,10 @@ class PyCursor(object):
 
 ## abstract class for dbapi adapters #######################
 
+
 class DBAPIAdapter(object):
     """Base class for all DBAPI adapters"""
+
     UNKNOWN = None
     # True if the fetch*() methods return a mutable structure (i.e. not a tuple)
     row_is_mutable = False
@@ -330,19 +363,37 @@ class DBAPIAdapter(object):
         self.logger = _LOGGER
         # optimization: copy type codes from the native module to this instance
         # since the .process_value method may be heavily used
-        for typecode in ('STRING', 'BOOLEAN', 'BINARY', 'DATETIME', 'NUMBER',
-                         'UNKNOWN'):
+        for typecode in (
+            "STRING",
+            "BOOLEAN",
+            "BINARY",
+            "DATETIME",
+            "NUMBER",
+            "UNKNOWN",
+        ):
             try:
                 setattr(self, typecode, getattr(self, typecode))
             except AttributeError:
-                self.logger.warning('%s adapter has no %s type code',
-                                    self, typecode)
-
-    def connect(self, host='', database='', user='', password='', port='',
-                schema=None, extra_args=None):
+                self.logger.warning("%s adapter has no %s type code", self, typecode)
+
+    def connect(
+        self,
+        host="",
+        database="",
+        user="",
+        password="",
+        port="",
+        schema=None,
+        extra_args=None,
+    ):
         """Wraps the native module connect method"""
-        kwargs = {'host' : host, 'port' : port, 'database' : database,
-                  'user' : user, 'password' : password}
+        kwargs = {
+            "host": host,
+            "port": port,
+            "database": database,
+            "user": user,
+            "password": password,
+        }
         return self._wrap_if_needed(self._native_module.connect(**kwargs))
 
     def _wrap_if_needed(self, cnx):
@@ -356,13 +407,13 @@ class DBAPIAdapter(object):
     def __getattr__(self, attrname):
         return getattr(self._native_module, attrname)
 
-    #@cached ?
-    def _transformation_callback(self, description, encoding='utf-8', binarywrap=None):
+    # @cached ?
+    def _transformation_callback(self, description, encoding="utf-8", binarywrap=None):
         typecode = description[1]
         assert typecode is not None, self
         transform = None
         if typecode == self.STRING and not self.returns_unicode:
-            transform = lambda v: str(v, encoding, 'replace')
+            transform = lambda v: str(v, encoding, "replace")
         elif typecode == self.BOOLEAN:
             transform = bool
         elif typecode == self.BINARY and binarywrap is not None:
@@ -370,7 +421,9 @@ class DBAPIAdapter(object):
         elif typecode == self.UNKNOWN:
             # may occurs on constant selection for instance (e.g. SELECT 'hop')
             # with postgresql at least
-            transform = lambda v: str(v, encoding, 'replace') if isinstance(v, bytes) else v
+            transform = (
+                lambda v: str(v, encoding, "replace") if isinstance(v, bytes) else v
+            )
         return transform
 
     def process_cursor(self, cursor, encoding, binarywrap=None):
@@ -384,7 +437,9 @@ class DBAPIAdapter(object):
         """
         cursor.arraysize = 100
         # compute transformations (str->unicode, int->bool, etc.) required for each cell
-        transformations = self._transformations(cursor.description, encoding, binarywrap)
+        transformations = self._transformations(
+            cursor.description, encoding, binarywrap
+        )
         row_is_mutable = self.row_is_mutable
         while True:
             results = cursor.fetchmany()
@@ -399,7 +454,7 @@ class DBAPIAdapter(object):
                     result[col] = transform(result[col])
                 yield result
 
-    def _transformations(self, description, encoding='utf-8', binarywrap=None):
+    def _transformations(self, description, encoding="utf-8", binarywrap=None):
         """returns the set of required transformations on the resultset
 
         Transformations are the functions used to convert raw results as
@@ -410,16 +465,13 @@ class DBAPIAdapter(object):
         for i, coldescr in enumerate(description):
             transform = self._transformation_callback(coldescr, encoding, binarywrap)
             if transform is not None:
-                transformations.append( (i, transform) )
+                transformations.append((i, transform))
         return transformations
 
-    def process_value(self, value, description, encoding='utf-8',
-                      binarywrap=None):
+    def process_value(self, value, description, encoding="utf-8", binarywrap=None):
         # if the dbapi module isn't supporting type codes, override to return
         # value directly
-        transform = self._transformation_callback(description,
-                                                  encoding,
-                                                  binarywrap)
+        transform = self._transformation_callback(description, encoding, binarywrap)
         if transform is not None:
             value = transform(value)
         return value
@@ -428,17 +480,24 @@ class DBAPIAdapter(object):
         """turn raw value returned by the db-api module into a python string"""
         return bytes(value)
 
+
 # advanced database helper #####################################################
 
 from logilab.database.fti import FTIndexerMixIn
 
+
 class BadQuery(Exception):
     pass
+
+
 class UnsupportedFunction(BadQuery):
     pass
+
+
 class UnknownFunction(BadQuery):
     pass
 
+
 # set of hooks that should be called at connection opening time.
 # mostly for sqlite'stored procedures that have to be registered...
 SQL_CONNECT_HOOKS = {}
@@ -447,9 +506,10 @@ ALL_BACKENDS = object()
 # In that case, functions should implement dynamic_rtype() method
 DYNAMIC_RTYPE = object()
 
+
 class FunctionDescr(object):
     supported_backends = ALL_BACKENDS
-    rtype = None # None <-> returned type should be the same as the first argument
+    rtype = None  # None <-> returned type should be the same as the first argument
     aggregat = False
     minargs = 1
     maxargs = 1
@@ -467,23 +527,25 @@ class FunctionDescr(object):
             self.supported_backends += (backend,)
 
     def check_nbargs(cls, nbargs):
-        if cls.minargs is not None and \
-               nbargs < cls.minargs:
-            raise BadQuery('not enough argument for function %s' % cls.__name__)
-        if cls.maxargs is not None and \
-               nbargs > cls.maxargs:
-            raise BadQuery('too many arguments for function %s' % cls.__name__)
+        if cls.minargs is not None and nbargs < cls.minargs:
+            raise BadQuery("not enough argument for function %s" % cls.__name__)
+        if cls.maxargs is not None and nbargs > cls.maxargs:
+            raise BadQuery("too many arguments for function %s" % cls.__name__)
+
     check_nbargs = classmethod(check_nbargs)
 
     def as_sql(self, backend, args):
         try:
-            return getattr(self, 'as_sql_%s' % backend)(args)
+            return getattr(self, "as_sql_%s" % backend)(args)
         except AttributeError:
             funcname = self.name_mapping.get(backend, self.name)
-            return '%s(%s)' % (funcname, ', '.join(args))
+            return "%s(%s)" % (funcname, ", ".join(args))
 
     def supports(self, backend):
-        if self.supported_backends is ALL_BACKENDS or backend in self.supported_backends:
+        if (
+            self.supported_backends is ALL_BACKENDS
+            or backend in self.supported_backends
+        ):
             return True
         return False
 
@@ -492,97 +554,137 @@ class AggrFunctionDescr(FunctionDescr):
     aggregat = True
     rtype = None
 
-class MAX(AggrFunctionDescr): pass
-class MIN(AggrFunctionDescr): pass
-class SUM(AggrFunctionDescr): pass
+
+class MAX(AggrFunctionDescr):
+    pass
+
+
+class MIN(AggrFunctionDescr):
+    pass
+
+
+class SUM(AggrFunctionDescr):
+    pass
+
 
 class COUNT(AggrFunctionDescr):
-    rtype = 'Int'
+    rtype = "Int"
     maxargs = 2
 
     def as_sql(self, backend, args):
         if len(args) == 2:
             # deprecated COUNT DISTINCT form, suppose 2nd argument is true
-            warn('[lgdb 1.10] use COUNTDISTINCT instead of COUNT(X, TRUE)',
-                 DeprecationWarning)
-            return '%s(DISTINCT %s)' % (self.name, args[0])
-        return '%s(%s)' % (self.name, args[0])
+            warn(
+                "[lgdb 1.10] use COUNTDISTINCT instead of COUNT(X, TRUE)",
+                DeprecationWarning,
+            )
+            return "%s(DISTINCT %s)" % (self.name, args[0])
+        return "%s(%s)" % (self.name, args[0])
+
 
 class COUNTDISTINCT(AggrFunctionDescr):
-    rtype = 'Int'
+    rtype = "Int"
 
     def as_sql(self, backend, args):
-        return 'COUNT(DISTINCT %s)' % args[0]
+        return "COUNT(DISTINCT %s)" % args[0]
+
 
 class AVG(AggrFunctionDescr):
-    rtype = 'Float'
+    rtype = "Float"
+
 
 class ABS(FunctionDescr):
-    rtype = 'Float'
+    rtype = "Float"
+
+
 class UPPER(FunctionDescr):
-    rtype = 'String'
+    rtype = "String"
+
+
 class LOWER(FunctionDescr):
-    rtype = 'String'
+    rtype = "String"
+
+
 class IN(FunctionDescr):
     """this is actually a 'keyword' function..."""
+
     maxargs = None
+
+
 class LENGTH(FunctionDescr):
-    rtype = 'Int'
-
-class DATE(FunctionDescr): # XXX deprecates now we've CAST
-    rtype = 'Date'
+    rtype = "Int"
+
+
+class DATE(FunctionDescr):  # XXX deprecates now we've CAST
+    rtype = "Date"
+
 
 class RANDOM(FunctionDescr):
-    rtype = 'Float'
+    rtype = "Float"
     minargs = maxargs = 0
-    name_mapping = {'postgres': 'RANDOM',
-                    'mysql': 'RAND',
-                    }
+    name_mapping = {
+        "postgres": "RANDOM",
+        "mysql": "RAND",
+    }
+
+
 class SUBSTRING(FunctionDescr):
-    rtype = 'String'
+    rtype = "String"
     minargs = maxargs = 3
-    name_mapping = {'postgres': 'SUBSTR',
-                    'mysql': 'SUBSTRING',
-                    'sqlite': 'SUBSTR',
-                    'sqlserver': 'SUBSTRING'}
+    name_mapping = {
+        "postgres": "SUBSTR",
+        "mysql": "SUBSTRING",
+        "sqlite": "SUBSTR",
+        "sqlserver": "SUBSTRING",
+    }
+
 
 class ExtractDateField(FunctionDescr):
-    rtype = 'Int'
+    rtype = "Int"
     minargs = maxargs = 1
-    field = None # YEAR, MONTH, DAY, etc.
+    field = None  # YEAR, MONTH, DAY, etc.
 
     def as_sql_sqlserver2005(self, args):
-        return 'DATEPART(%s, %s)' % (self.field, ', '.join(args))
+        return "DATEPART(%s, %s)" % (self.field, ", ".join(args))
 
     def as_sql_mysql(self, args):
-        return 'EXTRACT(%s from %s)' % (self.field, ', '.join(args))
+        return "EXTRACT(%s from %s)" % (self.field, ", ".join(args))
 
     def as_sql_postgres(self, args):
-        return 'CAST(EXTRACT(%s from %s) AS INTEGER)' % (self.field, ', '.join(args))
+        return "CAST(EXTRACT(%s from %s) AS INTEGER)" % (self.field, ", ".join(args))
+
 
 class MONTH(ExtractDateField):
-    field = 'MONTH'
+    field = "MONTH"
+
 
 class YEAR(ExtractDateField):
-    field = 'YEAR'
+    field = "YEAR"
+
 
 class DAY(ExtractDateField):
-    field = 'DAY'
+    field = "DAY"
+
 
 class HOUR(ExtractDateField):
-    field = 'HOUR'
+    field = "HOUR"
+
 
 class MINUTE(ExtractDateField):
-    field = 'MINUTE'
+    field = "MINUTE"
+
 
 class SECOND(ExtractDateField):
-    field = 'SECOND'
+    field = "SECOND"
+
 
 class EPOCH(ExtractDateField):
     """Return EPOCH timestamp from a datetime/date ;
     return number of seconds for an interval.
     """
-    field = 'EPOCH'
+
+    field = "EPOCH"
+
 
 class WEEKDAY(FunctionDescr):
     """Return the day of the week represented by the date.
@@ -592,23 +694,26 @@ class WEEKDAY(FunctionDescr):
     (pick those values since that's what mysql and sqlserver do, and what's in
     the ODBC standard)
     """
-    rtype = 'Int'
+
+    rtype = "Int"
     minargs = maxargs = 1
 
     def as_sql_sqlserver2005(self, args):
-        return 'DATEPART(WEEKDAY, %s)' % (', '.join(args))
+        return "DATEPART(WEEKDAY, %s)" % (", ".join(args))
 
     def as_sql_mysql(self, args):
-        return 'DAYOFWEEK(%s)' % (', '.join(args))
+        return "DAYOFWEEK(%s)" % (", ".join(args))
 
     def as_sql_postgres(self, args):
         # for postgres, sunday is 0
-        return '(CAST(EXTRACT(DOW from %s) AS INTEGER) + 1)' % (', '.join(args))
+        return "(CAST(EXTRACT(DOW from %s) AS INTEGER) + 1)" % (", ".join(args))
+
 
 class AT_TZ(FunctionDescr):
     """AT_TZ(TZDatetime, timezone) -> Return a datetime at a given time zone.
     """
-    supported_backends = ('postgres',)
+
+    supported_backends = ("postgres",)
     minargs = maxargs = 2
 
     def as_sql_postgres(self, args):
@@ -620,15 +725,16 @@ class CAST(FunctionDescr):
 
     sql-92 standard says (CAST <expr> as <type>)
     """
+
     minargs = maxargs = 2
-    supported_backends = ('postgres', 'sqlite', 'mysql', 'sqlserver2005')
+    supported_backends = ("postgres", "sqlite", "mysql", "sqlserver2005")
     rtype = DYNAMIC_RTYPE
 
     def as_sql(self, backend, args):
         yamstype, varname = args
         db_helper = get_db_helper(backend)
         sqltype = db_helper.TYPE_MAPPING[yamstype]
-        return 'CAST(%s AS %s)' % (varname, sqltype)
+        return "CAST(%s AS %s)" % (varname, sqltype)
 
 
 class _FunctionRegistry(object):
@@ -642,7 +748,7 @@ class _FunctionRegistry(object):
         try:
             if issubclass(funcdef, FunctionDescr):
                 funcdef = funcdef()
-        except TypeError: # issubclass is quite strict
+        except TypeError:  # issubclass is quite strict
             pass
         assert isinstance(funcdef, FunctionDescr)
         funcname = funcname or funcdef.name
@@ -666,22 +772,42 @@ class _FunctionRegistry(object):
             registry.register_function(funcdef, funcname=funcname)
         return registry
 
+
 SQL_FUNCTIONS_REGISTRY = _FunctionRegistry()
 
 for func_class in (
     # aggregate functions
-    MIN, MAX, SUM, COUNT, COUNTDISTINCT, AVG,
+    MIN,
+    MAX,
+    SUM,
+    COUNT,
+    COUNTDISTINCT,
+    AVG,
     # transformation functions
-    ABS, RANDOM,
-    UPPER, LOWER, SUBSTRING, LENGTH,
+    ABS,
+    RANDOM,
+    UPPER,
+    LOWER,
+    SUBSTRING,
+    LENGTH,
     DATE,
-    YEAR, MONTH, DAY, HOUR, MINUTE, SECOND, WEEKDAY, EPOCH, AT_TZ,
+    YEAR,
+    MONTH,
+    DAY,
+    HOUR,
+    MINUTE,
+    SECOND,
+    WEEKDAY,
+    EPOCH,
+    AT_TZ,
     # cast functions
     CAST,
     # keyword function
-    IN):
+    IN,
+):
     SQL_FUNCTIONS_REGISTRY.register_function(func_class())
 
+
 def register_function(funcdef):
     """register the function `funcdef` on supported backends"""
     SQL_FUNCTIONS_REGISTRY.register_function(funcdef)
@@ -692,10 +818,10 @@ class _TypeMapping(dict):
         try:
             return dict.__getitem__(self, key)
         except KeyError:
-            if key == 'TZDatetime':
-                return self['Datetime']
-            if key == 'TZTime':
-                return self['Time']
+            if key == "TZDatetime":
+                return self["Datetime"]
+            if key == "TZTime":
+                return self["Time"]
             raise
 
     def copy(self):
@@ -708,35 +834,38 @@ class _GenericAdvFuncHelper(FTIndexerMix
 
     An exception is raised when the functionality is not emulatable
     """
+
     # 'canonical' types are `yams` types. This dictionnary map those types to
     # backend specific types
-    TYPE_MAPPING = _TypeMapping({
-        'String' :   'text',
-        'SizeConstrainedString': 'varchar(%s)',
-        'Password' : 'bytea',
-        'Bytes' :    'bytea',
-        'Int' :      'integer',
-        'BigInt':    'bigint',
-        'Float' :    'float',
-        'Decimal' :  'decimal',
-        'Boolean' :  'boolean',
-        'Date' :     'date',
-        'Time' :     'time',
-        'Datetime' : 'timestamp',
-        'Interval' : 'interval',
-        })
+    TYPE_MAPPING = _TypeMapping(
+        {
+            "String": "text",
+            "SizeConstrainedString": "varchar(%s)",
+            "Password": "bytea",
+            "Bytes": "bytea",
+            "Int": "integer",
+            "BigInt": "bigint",
+            "Float": "float",
+            "Decimal": "decimal",
+            "Boolean": "boolean",
+            "Date": "date",
+            "Time": "time",
+            "Datetime": "timestamp",
+            "Interval": "interval",
+        }
+    )
 
     TYPE_CONVERTERS = {
-        'Boolean': bool,
+        "Boolean": bool,
         # XXX needed for sqlite but I don't think it is for other backends
-        'Datetime': convert_datetime,
-        'Date': convert_date,
-        'TZDatetime': convert_tzdatetime,
-        'TZTime': convert_tztime,
-        }
+        "Datetime": convert_datetime,
+        "Date": convert_date,
+        "TZDatetime": convert_tzdatetime,
+        "TZTime": convert_tztime,
+    }
 
     # DBMS resources descriptors and accessors
-    backend_name = None # overridden in subclasses ('postgres', 'sqlite', etc.)
+    backend_name = None  # overridden in subclasses ('postgres', 'sqlite', etc.)
     needs_from_clause = False
     union_parentheses_support = True
     intersect_all_support = True
@@ -748,9 +877,11 @@ class _GenericAdvFuncHelper(FTIndexerMix
 
     # allow call to [backup|restore]_commands without previous call to
     # record_connection_information but by specifying argument explicitly
-    dbname = dbhost = dbport = dbuser = dbpassword = dbextraargs = dbencoding = dbschema = None
-
-    def __init__(self, encoding='utf-8', _cnx=None):
+    dbname = (
+        dbhost
+    ) = dbport = dbuser = dbpassword = dbextraargs = dbencoding = dbschema = None
+
+    def __init__(self, encoding="utf-8", _cnx=None):
         self.dbencoding = encoding
         self._cnx = _cnx
         self.dbapi_module = get_dbapi_compliant_module(self.backend_name)
@@ -758,13 +889,25 @@ class _GenericAdvFuncHelper(FTIndexerMix
 
     def __repr__(self):
         if self.dbname is not None:
-            return '<lgdbhelper %s@%s [%s] @%#x>' % (self.dbname, self.dbhost,
-                                                     self.backend_name, id(self))
+            return "<lgdbhelper %s@%s [%s] @%#x>" % (
+                self.dbname,
+                self.dbhost,
+                self.backend_name,
+                id(self),
+            )
         return super(_GenericAdvFuncHelper, self).__repr__()
 
-    def record_connection_info(self, dbname, dbhost=None, dbport=None,
-                               dbuser=None, dbpassword=None, dbextraargs=None,
-                               dbencoding=None, dbschema=None):
+    def record_connection_info(
+        self,
+        dbname,
+        dbhost=None,
+        dbport=None,
+        dbuser=None,
+        dbpassword=None,
+        dbextraargs=None,
+        dbencoding=None,
+        dbschema=None,
+    ):
         self.dbname = dbname
         self.dbhost = dbhost
         self.dbport = dbport
@@ -782,16 +925,25 @@ class _GenericAdvFuncHelper(FTIndexerMix
         paramaters.
         """
         if self.dbuser:
-            self.logger.info('connecting to %s@%s for user %s', self.dbname,
-                         self.dbhost or 'localhost', self.dbuser)
+            self.logger.info(
+                "connecting to %s@%s for user %s",
+                self.dbname,
+                self.dbhost or "localhost",
+                self.dbuser,
+            )
         else:
-            self.logger.info('connecting to %s@%s', self.dbname,
-                         self.dbhost or 'localhost')
-        cnx = self.dbapi_module.connect(self.dbhost, self.dbname,
-                                        self.dbuser,self.dbpasswd,
-                                        port=self.dbport,
-                                        schema=self.dbschema,
-                                        extra_args=self.dbextraargs)
+            self.logger.info(
+                "connecting to %s@%s", self.dbname, self.dbhost or "localhost"
+            )
+        cnx = self.dbapi_module.connect(
+            self.dbhost,
+            self.dbname,
+            self.dbuser,
+            self.dbpasswd,
+            port=self.dbport,
+            schema=self.dbschema,
+            extra_args=self.dbextraargs,
+        )
         if initcnx:
             for hook in SQL_CONNECT_HOOKS.get(self.backend_name, ()):
                 hook(cnx)
@@ -803,31 +955,47 @@ class _GenericAdvFuncHelper(FTIndexerMix
     @classmethod
     def function_description(cls, funcname):
         """return the description (`FunctionDescription`) for a SQL function"""
-        return SQL_FUNCTIONS_REGISTRY.get_backend_function(
-            funcname, cls.backend_name)
+        return SQL_FUNCTIONS_REGISTRY.get_backend_function(funcname, cls.backend_name)
 
     def func_as_sql(self, funcname, args):
         funcdef = SQL_FUNCTIONS_REGISTRY.get_backend_function(
-            funcname, self.backend_name)
+            funcname, self.backend_name
+        )
         return funcdef.as_sql(self.backend_name, args)
 
     def system_database(self):
         """return the system database for the given driver"""
-        raise NotImplementedError('not supported by this DBMS')
-
-    def backup_commands(self, backupfile, keepownership=True,
-                        dbname=None, dbhost=None, dbport=None, dbuser=None, dbschema=None):
+        raise NotImplementedError("not supported by this DBMS")
+
+    def backup_commands(
+        self,
+        backupfile,
+        keepownership=True,
+        dbname=None,
+        dbhost=None,
+        dbport=None,
+        dbuser=None,
+        dbschema=None,
+    ):
         """Return a list of commands to backup the given database.
 
         Each command may be given as a list or as a string. In the latter case,
         expected to be used with a subshell (for instance using `os.system(cmd)`
         or `subprocess.call(cmd, shell=True)`
         """
-        raise NotImplementedError('not supported by this DBMS')
-
-    def restore_commands(self, backupfile, keepownership=True, drop=True,
-                         dbname=None, dbhost=None, dbport=None, dbuser=None,
-                         dbencoding=None):
+        raise NotImplementedError("not supported by this DBMS")
+
+    def restore_commands(
+        self,
+        backupfile,
+        keepownership=True,
+        drop=True,
+        dbname=None,
+        dbhost=None,
+        dbport=None,
+        dbuser=None,
+        dbencoding=None,
+    ):
         """Return a list of commands to restore a backup of the given database.
 
 
@@ -835,7 +1003,7 @@ class _GenericAdvFuncHelper(FTIndexerMix
         expected to be used with a subshell (for instance using `os.system(cmd)`
         or `subprocess.call(cmd, shell=True)`
         """
-        raise NotImplementedError('not supported by this DBMS')
+        raise NotImplementedError("not supported by this DBMS")
 
     # helpers to standardize SQL according to the database #####################
 
@@ -845,7 +1013,7 @@ class _GenericAdvFuncHelper(FTIndexerMix
         Take care default implementation return date at the beginning of the
         transaction on some backend (eg postgres)
         """
-        return 'CURRENT_DATE'
+        return "CURRENT_DATE"
 
     def sql_current_time(self):
         """Return sql for the current time.
@@ -853,7 +1021,7 @@ class _GenericAdvFuncHelper(FTIndexerMix
         Take care default implementation return time at the beginning of the
         transaction on some backend (eg postgres)
         """
-        return 'CURRENT_TIME'
+        return "CURRENT_TIME"
 
     def sql_current_timestamp(self):
         """Return sql for the current date and time.
@@ -861,71 +1029,82 @@ class _GenericAdvFuncHelper(FTIndexerMix
         Take care default implementation return date and time at the beginning
         of the transaction on some backend (eg postgres)
         """
-        return 'CURRENT_TIMESTAMP'
+        return "CURRENT_TIMESTAMP"
 
     def sql_concat_string(self, lhs, rhs):
         """Return sql for concatenating given arguments (expected to be
         evaluated as string when executing the query).
         """
-        return '%s || %s' % (lhs, rhs)
+        return "%s || %s" % (lhs, rhs)
 
     def sql_regexp_match_expression(self, pattern):
         """pattern matching using regexp"""
-        raise NotImplementedError('not supported by this DBMS')
+        raise NotImplementedError("not supported by this DBMS")
 
     def sql_create_index(self, table, column, unique=False):
         idx = self._index_name(table, column, unique)
         if unique:
-            return 'ALTER TABLE %s ADD UNIQUE(%s)' % (table, column)
+            return "ALTER TABLE %s ADD UNIQUE(%s)" % (table, column)
         else:
-            return 'CREATE INDEX %s ON %s(%s);' % (idx, table, column)
+            return "CREATE INDEX %s ON %s(%s);" % (idx, table, column)
 
     def sql_drop_index(self, table, column, unique=False):
         idx = self._index_name(table, column, unique)
         if unique:
-            return 'ALTER TABLE %s DROP CONSTRAINT %s' % (table, idx)
+            return "ALTER TABLE %s DROP CONSTRAINT %s" % (table, idx)
         else:
-            return 'DROP INDEX IF EXISTS %s' % idx
+            return "DROP INDEX IF EXISTS %s" % idx
 
     def sqls_create_multicol_unique_index(self, table, columns, indexname=None):
         columns = sorted(columns)
         if indexname is None:
-            warn('You should provide an explicit index name else you risk '
-                 'a silent truncation of the computed index name.',
-                 DeprecationWarning)
-            indexname = 'unique_%s_%s_idx' % (table, '_'.join(columns))
-        sql = 'CREATE UNIQUE INDEX %s ON %s(%s);' % (indexname.lower(),
-                                                     table, ','.join(columns))
+            warn(
+                "You should provide an explicit index name else you risk "
+                "a silent truncation of the computed index name.",
+                DeprecationWarning,
+            )
+            indexname = "unique_%s_%s_idx" % (table, "_".join(columns))
+        sql = "CREATE UNIQUE INDEX %s ON %s(%s);" % (
+            indexname.lower(),
+            table,
+            ",".join(columns),
+        )
         return [sql]
 
     def sqls_drop_multicol_unique_index(self, table, columns, indexname=None):
         columns = sorted(columns)
         if indexname is None:
-            warn('You should provide an explicit index name else you risk '
-                 'a silent truncation of the computed index name.',
-                 DeprecationWarning)
-            indexname = 'unique_%s_%s_idx' % (table, '_'.join(columns))
-        sql = 'DROP INDEX IF EXISTS %s;' % (indexname.lower())
+            warn(
+                "You should provide an explicit index name else you risk "
+                "a silent truncation of the computed index name.",
+                DeprecationWarning,
+            )
+            indexname = "unique_%s_%s_idx" % (table, "_".join(columns))
+        sql = "DROP INDEX IF EXISTS %s;" % (indexname.lower())
         return [sql]
 
     # sequence protocol
 
     def sql_create_sequence(self, seq_name):
-        return ('CREATE TABLE %s (last INTEGER);'
-                'INSERT INTO %s VALUES (0);') % (seq_name, seq_name)
+        return ("CREATE TABLE %s (last INTEGER);" "INSERT INTO %s VALUES (0);") % (
+            seq_name,
+            seq_name,
+        )
 
     def sql_restart_sequence(self, seq_name, initial_value=1):
-        return 'UPDATE %s SET last=%s;' % (seq_name, initial_value)
+        return "UPDATE %s SET last=%s;" % (seq_name, initial_value)
 
     def sql_sequence_current_state(self, seq_name):
-        return 'SELECT last FROM %s;' % seq_name
+        return "SELECT last FROM %s;" % seq_name
 
     def sql_drop_sequence(self, seq_name):
-        return 'DROP TABLE %s;' % seq_name
+        return "DROP TABLE %s;" % seq_name
 
     def sqls_increment_sequence(self, seq_name):
-        return ('UPDATE %s SET last=last+1;' % seq_name,
-                'SELECT last FROM %s;' % seq_name)
+        return (
+            "UPDATE %s SET last=last+1;" % seq_name,
+            "SELECT last FROM %s;" % seq_name,
+        )
 
     # /sequence
     # numrange protocol
@@ -936,8 +1115,10 @@ class _GenericAdvFuncHelper(FTIndexerMix
     sql_drop_numrange = sql_drop_sequence
 
     def sqls_increment_numrange(self, seq_name, count=1):
-        return ('UPDATE %s SET last=last+%s;' % (seq_name, count),
-                'SELECT last FROM %s;' % seq_name)
+        return (
+            "UPDATE %s SET last=last+%s;" % (seq_name, count),
+            "SELECT last FROM %s;" % seq_name,
+        )
 
     # /numrange
 
@@ -952,12 +1133,14 @@ class _GenericAdvFuncHelper(FTIndexerMix
             return sql
         sql = [sql]
         if limit is not None:
-            sql.append('LIMIT %d' % limit)
+            sql.append("LIMIT %d" % limit)
         if offset is not None and offset > 0:
-            sql.append('OFFSET %d' % offset)
-        return '\n'.join(sql)
-
-    def sql_add_order_by(self, sql, sortterms, selection, needwrap, has_limit_or_offset):
+            sql.append("OFFSET %d" % offset)
+        return "\n".join(sql)
+
+    def sql_add_order_by(
+        self, sql, sortterms, selection, needwrap, has_limit_or_offset
+    ):
         """
         add an ORDER BY clause to the SQL query, and wrap the query if necessary
         :sql: the original sql query
@@ -966,28 +1149,24 @@ class _GenericAdvFuncHelper(FTIndexerMix
         :needwrap: boolean, True if the query must be wrapped in a subquery
         :has_limit_or_offset: not used (sqlserver helper needs this)
         """
-        sql += '\nORDER BY %s' % ','.join(sortterms)
+        sql += "\nORDER BY %s" % ",".join(sortterms)
         if sortterms and needwrap:
-            selection = ['T1.C%s' % i for i in range(len(selection))]
-            sql = 'SELECT %s FROM (%s) AS T1' % (','.join(selection), sql)
+            selection = ["T1.C%s" % i for i in range(len(selection))]
+            sql = "SELECT %s FROM (%s) AS T1" % (",".join(selection), sql)
         return sql
 
-
     def sql_rename_col(self, table, column, newname, coltype, null_allowed):
-        return 'ALTER TABLE %s RENAME COLUMN %s TO %s' % (
-            table, column, newname)
+        return "ALTER TABLE %s RENAME COLUMN %s TO %s" % (table, column, newname)
 
     def sql_rename_table(self, oldname, newname):
-        return  'ALTER TABLE %s RENAME TO %s' % (oldname, newname)
+        return "ALTER TABLE %s RENAME TO %s" % (oldname, newname)
 
     def sql_change_col_type(self, table, column, coltype, null_allowed):
-        return 'ALTER TABLE %s ALTER COLUMN %s TYPE %s' % (
-            table, column, coltype)
+        return "ALTER TABLE %s ALTER COLUMN %s TYPE %s" % (table, column, coltype)
 
     def sql_set_null_allowed(self, table, column, coltype, null_allowed):
-        cmd = null_allowed and 'DROP' or 'SET'
-        return 'ALTER TABLE %s ALTER COLUMN %s %s NOT NULL' % (
-            table, column, cmd)
+        cmd = null_allowed and "DROP" or "SET"
+        return "ALTER TABLE %s ALTER COLUMN %s %s NOT NULL" % (table, column, cmd)
 
     def temporary_table_name(self, table_name):
         """
@@ -997,8 +1176,7 @@ class _GenericAdvFuncHelper(FTIndexerMix
         """
         return table_name
 
-    def sql_temporary_table(self, table_name, table_schema,
-                            drop_on_commit=True):
+    def sql_temporary_table(self, table_name, table_schema, drop_on_commit=True):
         table_name = self.temporary_table_name(table_name)
         return "CREATE TEMPORARY TABLE %s (%s);" % (table_name, table_schema)
 
@@ -1015,17 +1193,18 @@ class _GenericAdvFuncHelper(FTIndexerMix
     def create_user(self, cursor, user, password):
         """create a new database user"""
         if not self.users_support:
-            raise NotImplementedError('not supported by this DBMS')
-        cursor.execute("CREATE USER %(user)s "
-                       "WITH PASSWORD '%(password)s'" % locals())
+            raise NotImplementedError("not supported by this DBMS")
+        cursor.execute(
+            "CREATE USER %(user)s " "WITH PASSWORD '%(password)s'" % locals()
+        )
 
     def _index_name(self, table, column, unique=False):
         if unique:
             # note: this naming is consistent with indices automatically
             # created by postgres when UNIQUE appears in a table schema
-            return '%s_%s_key' % (table.lower(), column.lower())
+            return "%s_%s_key" % (table.lower(), column.lower())
         else:
-            return '%s_%s_idx' % (table.lower(), column.lower())
+            return "%s_%s_idx" % (table.lower(), column.lower())
 
     def create_index(self, cursor, table, column, unique=False):
         if not self.index_exists(cursor, table, column, unique):
@@ -1051,32 +1230,32 @@ class _GenericAdvFuncHelper(FTIndexerMix
 
     def create_database(self, cursor, dbname, owner=None, dbencoding=None):
         """create a new database"""
-        raise NotImplementedError('not supported by this DBMS')
+        raise NotImplementedError("not supported by this DBMS")
 
     def create_schema(self, cursor, schema, granted_user=None):
         """create a new database schema"""
-        raise NotImplementedError('not supported by this DBMS')
+        raise NotImplementedError("not supported by this DBMS")
 
     def drop_schema(self, cursor, schema):
         """drop a database schema"""
-        raise NotImplementedError('not supported by this DBMS')
+        raise NotImplementedError("not supported by this DBMS")
 
     def list_databases(self):
         """return the list of existing databases"""
-        raise NotImplementedError('not supported by this DBMS')
+        raise NotImplementedError("not supported by this DBMS")
 
     def list_users(self, cursor):
         """return the list of existing database users"""
-        raise NotImplementedError('not supported by this DBMS')
+        raise NotImplementedError("not supported by this DBMS")
 
     def list_tables(self, cursor):
         """return the list of tables of a database"""
-        raise NotImplementedError('not supported by this DBMS')
+        raise NotImplementedError("not supported by this DBMS")
 
     def list_indices(self, cursor, table=None):
         """return the list of indices of a database, only for the given table if specified"""
-        raise NotImplementedError('not supported by this DBMS')
-
-    @deprecated('[lgdb 1.10] deprecated method')
+        raise NotImplementedError("not supported by this DBMS")
+
+    @deprecated("[lgdb 1.10] deprecated method")
     def boolean_value(self, value):
         return int(bool(value))
diff --git a/logilab/database/fti.py b/logilab/database/fti.py
--- a/logilab/database/fti.py
+++ b/logilab/database/fti.py
@@ -22,9 +22,11 @@ from logilab.common.textutils import uno
 
 REM_PUNC = re.compile(r"[,.;:!?\n\r\t\)\(«»\<\>/\\\|\[\]{}^#@$£_=+\-*&§]")
 
+
 class StopWord(Exception):
     """Raised to indicate that a stop word has been encountered."""
 
+
 def normalize(word):
     """Return the normalized form for a word.
 
@@ -39,13 +41,14 @@ def normalize(word):
     stop words are :
        _ single letter
     """
-    assert isinstance(word, str), '%r should be unicode' % word
+    assert isinstance(word, str), "%r should be unicode" % word
     # do not index single letters
     if len(word) == 1:
         raise StopWord()
-    word = unormalize(word.lower(), substitute='')
+    word = unormalize(word.lower(), substitute="")
     # we need an ascii-only unicode string, not bytes
-    return word.encode('ascii', 'ignore').decode('ascii')
+    return word.encode("ascii", "ignore").decode("ascii")
+
 
 def normalize_words(rawwords):
     words = []
@@ -56,17 +59,20 @@ def normalize_words(rawwords):
             continue
     return words
 
+
 RE_DATE = r"\d{2,4}[/-]\d{2,2}[/-]\d{2,4}"
 RE_HOUR = r"\d{1,2}[:h]\d{2,2}:?\d{0,2}"
 # indexation tokenizer
 RE_WORD_INDEX = u"[^ ,.;:!?\"\n\r\t)(«»\\<\\>/\\\\\\|\\[\\]{}^#@$£_'=+\\-&§\u2019]+"
-TOKENIZE_RE_INDEX = re.compile('(?:%s)|(?:%s)|(?:%s)' % (RE_DATE, RE_HOUR, RE_WORD_INDEX),
-                               re.L & re.U)
+TOKENIZE_RE_INDEX = re.compile(
+    "(?:%s)|(?:%s)|(?:%s)" % (RE_DATE, RE_HOUR, RE_WORD_INDEX), re.L & re.U
+)
 tokenize = TOKENIZE_RE_INDEX.findall
 # query tokenizer
 RE_WORD_QUERY = u"[^ ,.;:!?\"\n\r\t)(«»\\<\\>/\\\\\\|\\[\\]{}^#@$£_'=+\\-&§\u2019]+"
-TOKENIZE_RE_QUERY = re.compile('(?:%s)|(?:%s)|(?:%s)' % (RE_DATE, RE_HOUR, RE_WORD_QUERY),
-                               re.L & re.U)
+TOKENIZE_RE_QUERY = re.compile(
+    "(?:%s)|(?:%s)|(?:%s)" % (RE_DATE, RE_HOUR, RE_WORD_QUERY), re.L & re.U
+)
 tokenize_query = TOKENIZE_RE_QUERY.findall
 
 
@@ -76,8 +82,9 @@ class FTIndexerMixIn(object):
 
     Provide an inefficient but generic indexing method which can be overridden.
     """
-    fti_table = 'appears'
-    fti_uid_attr = 'uid'
+
+    fti_table = "appears"
+    fti_uid_attr = "uid"
     fti_need_distinct = True
     fti_sql_schema = """
 %s
@@ -159,7 +166,7 @@ CREATE INDEX appears_word_id ON appears 
                 position += 1
 
     def cursor_unindex_object(self, uid, cursor):
-        cursor.execute('DELETE FROM appears WHERE uid=%s' % uid)
+        cursor.execute("DELETE FROM appears WHERE uid=%s" % uid)
 
     def cursor_reindex_object(self, uid, obj, cursor):
         self.cursor_unindex_object(uid, cursor)
@@ -170,34 +177,40 @@ CREATE INDEX appears_word_id ON appears 
             word = normalize(word)
         except StopWord:
             return
-        cursor.execute("SELECT word_id FROM word WHERE word=%(word)s;",
-                       {'word':word})
+        cursor.execute("SELECT word_id FROM word WHERE word=%(word)s;", {"word": word})
         wid = cursor.fetchone()
         if wid is None:
-            wid = self.increment_sequence(cursor, 'word_id_seq')
+            wid = self.increment_sequence(cursor, "word_id_seq")
             try:
-                cursor.execute('''INSERT INTO word(word_id, word)
-                VALUES (%(uid)s,%(word)s);''', {'uid':wid, 'word':word})
+                cursor.execute(
+                    """INSERT INTO word(word_id, word)
+                VALUES (%(uid)s,%(word)s);""",
+                    {"uid": wid, "word": word},
+                )
             except:
                 # Race condition occured.
                 # someone inserted the word before we did.
                 # Never mind, let's use the new entry...
-                cursor.execute("SELECT word_id FROM word WHERE word=%(word)s;",
-                               {'word':word})
+                cursor.execute(
+                    "SELECT word_id FROM word WHERE word=%(word)s;", {"word": word}
+                )
                 wid = cursor.fetchone()[0]
         else:
             wid = wid[0]
-        cursor.execute("INSERT INTO appears(uid, word_id, pos) "
-                       "VALUES (%(uid)s,%(wid)s,%(position)s);",
-                       {'uid': uid, 'wid': wid, 'position': position})
+        cursor.execute(
+            "INSERT INTO appears(uid, word_id, pos) "
+            "VALUES (%(uid)s,%(wid)s,%(position)s);",
+            {"uid": uid, "wid": wid, "position": position},
+        )
 
     def fulltext_search(self, query_string, cursor=None):
         """execute a full text query and return a list of 2-uple (rating, uid)
         """
         from logilab.database.ftiparser import IndexerQuery, IndexerQueryScanner
         from logilab.database.ftiquery import Query
+
         query = Query(normalize)
-        parser = IndexerQuery(IndexerQueryScanner(REM_PUNC.sub(' ', query_string)))
+        parser = IndexerQuery(IndexerQueryScanner(REM_PUNC.sub(" ", query_string)))
         parser.goal(query)
         return query.execute(cursor or self._cnx.cursor())
 
@@ -208,29 +221,38 @@ CREATE INDEX appears_word_id ON appears 
                 words.append("'%s'" % normalize(word))
             except StopWord:
                 continue
-        sql = '%s.word_id IN (SELECT word_id FROM word WHERE word in (%s))' % (
-            tablename, ', '.join(words))
+        sql = "%s.word_id IN (SELECT word_id FROM word WHERE word in (%s))" % (
+            tablename,
+            ", ".join(words),
+        )
         if not_:
-            sql = 'NOT (%s)' % sql
+            sql = "NOT (%s)" % sql
         if jointo is None:
             return sql
-        return '%s AND %s.uid=%s' % (sql, tablename, jointo)
+        return "%s AND %s.uid=%s" % (sql, tablename, jointo)
 
     def fti_rank_order(self, tablename, querystr):
         return None
 
     def sql_init_fti(self):
         """return the sql definition of table()s used by the full text index"""
-        return self.fti_sql_schema % self.sql_create_sequence('word_id_seq')
+        return self.fti_sql_schema % self.sql_create_sequence("word_id_seq")
 
     def sql_drop_fti(self):
         """drop tables used by the full text index"""
-        return '''DROP TABLE appears;
-DROP TABLE word; %s''' % self.sql_drop_sequence('word_id_seq')
+        return """DROP TABLE appears;
+DROP TABLE word; %s""" % self.sql_drop_sequence(
+            "word_id_seq"
+        )
 
     def sql_grant_user_on_fti(self, user):
-        return '''GRANT ALL ON appears_uid TO %s;
+        return """GRANT ALL ON appears_uid TO %s;
 GRANT ALL ON appears_word_id TO %s;
 GRANT ALL ON appears TO %s;
 GRANT ALL ON word TO %s;
-''' % (user, user, user, user)
+""" % (
+            user,
+            user,
+            user,
+            user,
+        )
diff --git a/logilab/database/ftiparser.py b/logilab/database/ftiparser.py
--- a/logilab/database/ftiparser.py
+++ b/logilab/database/ftiparser.py
@@ -26,32 +26,39 @@ from __future__ import print_function
 import sys, re
 from yapps import runtime
 
+
 class IndexerQueryScanner(runtime.Scanner):
     patterns = [
-        ("'$'", re.compile('$')),
-        ('\\s+', re.compile('\\s+')),
-        ('WORD', re.compile('\\w+')),
-        ('STRING', re.compile('\'([^\\\'\\\\]|\\\\.)*\'|\\"([^\\\\\\"\\\\]|\\\\.)*\\"')),
+        ("'$'", re.compile("$")),
+        ("\\s+", re.compile("\\s+")),
+        ("WORD", re.compile("\\w+")),
+        (
+            "STRING",
+            re.compile("'([^\\'\\\\]|\\\\.)*'|\\\"([^\\\\\\\"\\\\]|\\\\.)*\\\""),
+        ),
     ]
-    def __init__(self, str,*args,**kw):
-        runtime.Scanner.__init__(self,None,{'\\s+':None,},str,*args,**kw)
+
+    def __init__(self, str, *args, **kw):
+        runtime.Scanner.__init__(self, None, {"\\s+": None,}, str, *args, **kw)
+
 
 class IndexerQuery(runtime.Parser):
     Context = runtime.Context
+
     def goal(self, Q, _parent=None):
-        _context = self.Context(_parent, self._scanner, 'goal', [Q])
-        while self._peek( context=_context) != "'$'":
+        _context = self.Context(_parent, self._scanner, "goal", [Q])
+        while self._peek(context=_context) != "'$'":
             all = self.all(Q, _context)
         self._scan("'$'", context=_context)
 
     def all(self, Q, _parent=None):
-        _context = self.Context(_parent, self._scanner, 'all', [Q])
-        _token = self._peek('WORD', 'STRING', context=_context)
-        if _token == 'WORD':
-            WORD = self._scan('WORD', context=_context)
+        _context = self.Context(_parent, self._scanner, "all", [Q])
+        _token = self._peek("WORD", "STRING", context=_context)
+        if _token == "WORD":
+            WORD = self._scan("WORD", context=_context)
             Q.add_word(WORD)
-        else: # == 'STRING'
-            STRING = self._scan('STRING', context=_context)
+        else:  # == 'STRING'
+            STRING = self._scan("STRING", context=_context)
             Q.add_phrase(STRING)
 
 
@@ -59,14 +66,16 @@ def parse(rule, text):
     P = IndexerQuery(IndexerQueryScanner(text))
     return runtime.wrap_error_reporter(P, rule)
 
-if __name__ == '__main__':
+
+if __name__ == "__main__":
     from sys import argv, stdin
+
     if len(argv) >= 2:
         if len(argv) >= 3:
-            f = open(argv[2],'r')
+            f = open(argv[2], "r")
         else:
             f = stdin
         print(parse(argv[1], f.read()))
     else:
-        print('Args:  <rule> [<filename>]', file=sys.stderr)
+        print("Args:  <rule> [<filename>]", file=sys.stderr)
 # End -- grammar generated by Yapps
diff --git a/logilab/database/ftiquery.py b/logilab/database/ftiquery.py
--- a/logilab/database/ftiquery.py
+++ b/logilab/database/ftiquery.py
@@ -23,6 +23,7 @@
 
 from logilab.database.fti import StopWord, tokenize
 
+
 class Query(object):
     """ a query is the object manipulated by the indexer
     the query parser'll call add_word and add_phrase on this object accoring to
@@ -104,16 +105,19 @@ class KeywordsQuery(object):
         attrs = {}
         tables, select = [], []
         for i in range(len(self.words)):
-            tables.append('appears as appears%d, word as word%d'% (i, i))
-            select.append('word%d.word = %%(word%d)s ' % (i, i))
-            select.append('word%d.word_id = appears%d.word_id ' % (i, i))
-            attrs['word%d' % i] = self.words[i]
+            tables.append("appears as appears%d, word as word%d" % (i, i))
+            select.append("word%d.word = %%(word%d)s " % (i, i))
+            select.append("word%d.word_id = appears%d.word_id " % (i, i))
+            attrs["word%d" % i] = self.words[i]
             if i > 0:
-                select.append('appears%d.uid = appears%d.uid ' % (i-1, i))
-        query = ('SELECT count(*) as rating, appears0.uid FROM ' +
-                 ', '.join(tables) +
-                 ' WHERE ' + ' AND '.join(select) +
-                 ' GROUP BY appears0.uid ;')
+                select.append("appears%d.uid = appears%d.uid " % (i - 1, i))
+        query = (
+            "SELECT count(*) as rating, appears0.uid FROM "
+            + ", ".join(tables)
+            + " WHERE "
+            + " AND ".join(select)
+            + " GROUP BY appears0.uid ;"
+        )
         cursor.execute(query, attrs)
         for rating, uid in cursor.fetchall():
             results[uid] = rating
@@ -136,30 +140,33 @@ class PhraseQuery(object):
         """
         results = {}
         if uids is not None:
-            uids = ', '.join([str(uid) for uid in uids])
+            uids = ", ".join([str(uid) for uid in uids])
             restrict = "AND uid in (%s)" % uids
         else:
             restrict = ""
-        query = ('SELECT uid, pos FROM appears,word '
-                 "WHERE word.word = '%s'"
-                 'AND word.word_id = appears.word_id %s' % (self.tokens[0],
-                                                            restrict))
+        query = (
+            "SELECT uid, pos FROM appears,word "
+            "WHERE word.word = '%s'"
+            "AND word.word_id = appears.word_id %s" % (self.tokens[0], restrict)
+        )
         cursor.execute(query)
         for uid, pos in cursor.fetchall():
             w_pos = pos
             matches_all = 1
             for t in self.tokens[1:]:
                 w_pos += 1
-                cursor.execute('SELECT appears.uid '
-                               'FROM appears,word '
-                               'WHERE word.word = %(word)s '
-                               'AND appears.pos = %(pos)s '
-                               'AND appears.uid = %(uid)s '
-                               'AND word.word_id = appears.word_id ;',
-                               {'word':t, 'uid':uid,'pos':w_pos})
+                cursor.execute(
+                    "SELECT appears.uid "
+                    "FROM appears,word "
+                    "WHERE word.word = %(word)s "
+                    "AND appears.pos = %(pos)s "
+                    "AND appears.uid = %(uid)s "
+                    "AND word.word_id = appears.word_id ;",
+                    {"word": t, "uid": uid, "pos": w_pos},
+                )
                 if not cursor.fetchall():
                     matches_all = 0
                     break
             if matches_all:
-                results[uid] = results.get(uid, 0)+ 1
+                results[uid] = results.get(uid, 0) + 1
         return results
diff --git a/logilab/database/mysql.py b/logilab/database/mysql.py
--- a/logilab/database/mysql.py
+++ b/logilab/database/mysql.py
@@ -26,10 +26,12 @@ from warnings import warn
 from logilab import database as db
 from logilab.database.fti import normalize_words, tokenize
 
+
 class _MySqlDBAdapter(db.DBAPIAdapter):
     """Simple mysql Adapter to DBAPI
     """
-    BOOLEAN = 'XXX' # no specific type code for boolean
+
+    BOOLEAN = "XXX"  # no specific type code for boolean
 
     def __init__(self, native_module, pywrap=False):
         db.DBAPIAdapter.__init__(self, native_module, pywrap)
@@ -38,13 +40,14 @@ class _MySqlDBAdapter(db.DBAPIAdapter):
     def _init_module(self):
         """initialize mysqldb to use mx.DateTime for date and timestamps"""
         natmod = self._native_module
-        if hasattr(natmod, '_lc_initialized'):
+        if hasattr(natmod, "_lc_initialized"):
             return
         natmod._lc_initialized = 1
         # date/time types handling
         if db.USE_MX_DATETIME:
             from MySQLdb import times
             from mx import DateTime as mxdt
+
             times.Date = times.date = mxdt.Date
             times.Time = times.time = mxdt.Time
             times.Timestamp = times.datetime = mxdt.DateTime
@@ -52,99 +55,126 @@ class _MySqlDBAdapter(db.DBAPIAdapter):
             times.DateTimeType = mxdt.DateTimeType
             times.DateTimeDeltaType = mxdt.DateTimeDeltaType
 
-    def connect(self, host='', database='', user='', password='', port=None,
-                unicode=True, charset='utf8', schema=None, extra_args=None):
+    def connect(
+        self,
+        host="",
+        database="",
+        user="",
+        password="",
+        port=None,
+        unicode=True,
+        charset="utf8",
+        schema=None,
+        extra_args=None,
+    ):
         """Handles mysqldb connection format
         the unicode named argument asks to use Unicode objects for strings
         in result sets and query parameters
         """
         if schema is not None:
-            warn('schema support is not implemented on mysql backends, ignoring schema %s'
-                 % schema)
-        kwargs = {'host' : host or '', 'db' : database,
-                  'user' : user, 'passwd' : password,
-                  'use_unicode' : unicode}
+            warn(
+                "schema support is not implemented on mysql backends, ignoring schema %s"
+                % schema
+            )
+        kwargs = {
+            "host": host or "",
+            "db": database,
+            "user": user,
+            "passwd": password,
+            "use_unicode": unicode,
+        }
         # MySQLdb doesn't support None port
         if port:
-            kwargs['port'] = int(port)
+            kwargs["port"] = int(port)
         cnx = self._native_module.connect(**kwargs)
         if unicode:
-            if charset.lower() == 'utf-8':
-                charset = 'utf8'
+            if charset.lower() == "utf-8":
+                charset = "utf8"
             cnx.set_character_set(charset)
         return self._wrap_if_needed(cnx)
 
-    def _transformation_callback(self, description, encoding='utf-8', binarywrap=None):
+    def _transformation_callback(self, description, encoding="utf-8", binarywrap=None):
         typecode = description[1]
         # hack to differentiate mediumtext (String) and tinyblob/longblog
         # (Password/Bytes) which are all sharing the same type code :(
         if typecode == self.BINARY:
+
             def _transform(value):
-                if hasattr(value, 'tostring'): # may be an array
+                if hasattr(value, "tostring"):  # may be an array
                     value = value.tostring()
                 maxsize = description[3]
                 # mediumtext can hold up to (2**24 - 1) characters (16777215)
                 # but if utf8 is set, each character is stored on 3 bytes words,
                 # so we have to test for 3 * (2**24 - 1)  (i.e. 50331645)
                 # XXX: what about other encodings ??
-                if maxsize in (16777215, 50331645): # mediumtext (2**24 - 1)
+                if maxsize in (16777215, 50331645):  # mediumtext (2**24 - 1)
                     return value
-                #if maxsize == 255: # tinyblob (2**8 - 1)
+                # if maxsize == 255: # tinyblob (2**8 - 1)
                 #    return value
                 if binarywrap is None:
                     return value
                 return binarywrap(value)
+
         else:
-            return super(_MySqlDBAdapter, self)._transformation_callback(description,
-                                                                         encoding, binarywrap)
+            return super(_MySqlDBAdapter, self)._transformation_callback(
+                description, encoding, binarywrap
+            )
 
     def binary_to_str(self, value):
         """turn raw value returned by the db-api module into a python string"""
-        if hasattr(value, 'tostring'): # may be an array
+        if hasattr(value, "tostring"):  # may be an array
             return value.tostring()
         return value
 
     def type_code_test(self, cursor):
-        for typename in ('STRING', 'BOOLEAN', 'BINARY', 'DATETIME', 'NUMBER'):
-            self.logger.debug('%s %s', typename, getattr(self, typename))
+        for typename in ("STRING", "BOOLEAN", "BINARY", "DATETIME", "NUMBER"):
+            self.logger.debug("%s %s", typename, getattr(self, typename))
         try:
-            cursor.execute("""CREATE TABLE _type_code_test(
+            cursor.execute(
+                """CREATE TABLE _type_code_test(
             varchar_field varchar(50),
             text_field text unicode,
             mtext_field mediumtext,
             binary_field tinyblob,
             blob_field blob,
             lblob_field longblob
-            )""")
-            cursor.execute("INSERT INTO _type_code_test VALUES ('1','2','3','4', '5', '6')")
+            )"""
+            )
+            cursor.execute(
+                "INSERT INTO _type_code_test VALUES ('1','2','3','4', '5', '6')"
+            )
             cursor.execute("SELECT * FROM _type_code_test")
             descr = cursor.description
-            self.logger.info('db fields type codes')
-            for i, name in enumerate(('varchar', 'text', 'mediumtext',
-                                      'binary', 'blob', 'longblob')):
-                self.logger.info('%s %s', name, descr[i])
+            self.logger.info("db fields type codes")
+            for i, name in enumerate(
+                ("varchar", "text", "mediumtext", "binary", "blob", "longblob")
+            ):
+                self.logger.info("%s %s", name, descr[i])
         finally:
             cursor.execute("DROP TABLE _type_code_test")
 
 
-db._PREFERED_DRIVERS['mysql'] = ['MySQLdb']#, 'pyMySQL.MySQL']
-db._ADAPTER_DIRECTORY['mysql'] = {'MySQLdb': _MySqlDBAdapter,
-                         }
+db._PREFERED_DRIVERS["mysql"] = ["MySQLdb"]  # , 'pyMySQL.MySQL']
+db._ADAPTER_DIRECTORY["mysql"] = {
+    "MySQLdb": _MySqlDBAdapter,
+}
+
 
 class _MyAdvFuncHelper(db._GenericAdvFuncHelper):
     """MySQL helper, taking advantage of postgres SEQUENCE support
     """
-    backend_name = 'mysql'
+
+    backend_name = "mysql"
     needs_from_clause = True
-    ilike_support = False # insensitive search by default
+    ilike_support = False  # insensitive search by default
     case_sensitive = True
 
     TYPE_MAPPING = db._GenericAdvFuncHelper.TYPE_MAPPING.copy()
-    TYPE_MAPPING['Password'] = 'tinyblob'
-    TYPE_MAPPING['String'] = 'mediumtext'
-    TYPE_MAPPING['Bytes'] = 'longblob'
+    TYPE_MAPPING["Password"] = "tinyblob"
+    TYPE_MAPPING["String"] = "mediumtext"
+    TYPE_MAPPING["Bytes"] = "longblob"
     # don't use timestamp which is automatically updated on row update
-    TYPE_MAPPING['Datetime'] = 'datetime'
+    TYPE_MAPPING["Datetime"] = "datetime"
 
     TYPE_CONVERTERS = db._GenericAdvFuncHelper.TYPE_CONVERTERS.copy()
 
@@ -153,47 +183,65 @@ class _MyAdvFuncHelper(db._GenericAdvFun
         # XXX compress
         dbhost = dbhost or self.dbhost
         if dbhost is not None:
-            cmd += ('-h', dbhost)
+            cmd += ("-h", dbhost)
         dbport = dbport or self.dbport
         if dbport is not None:
-            cmd += ('-P', str(dbport))
-        cmd += ('-u', dbuser or self.dbuser)
+            cmd += ("-P", str(dbport))
+        cmd += ("-u", dbuser or self.dbuser)
         return cmd
 
     def system_database(self):
         """return the system database for the given driver"""
-        return ''
-
-    def backup_commands(self, backupfile, keepownership=True,
-                        dbname=None, dbhost=None, dbport=None, dbuser=None, dbschema=None):
-        cmd = self.mycmd('mysqldump', dbhost, dbport, dbuser)
-        cmd += ('-p', '-r', backupfile, dbname or self.dbname)
+        return ""
+
+    def backup_commands(
+        self,
+        backupfile,
+        keepownership=True,
+        dbname=None,
+        dbhost=None,
+        dbport=None,
+        dbuser=None,
+        dbschema=None,
+    ):
+        cmd = self.mycmd("mysqldump", dbhost, dbport, dbuser)
+        cmd += ("-p", "-r", backupfile, dbname or self.dbname)
         return [cmd]
 
-    def restore_commands(self, backupfile, keepownership=True, drop=True,
-                         dbname=None, dbhost=None, dbport=None, dbuser=None,
-                         dbencoding=None, dbschema=None):
+    def restore_commands(
+        self,
+        backupfile,
+        keepownership=True,
+        drop=True,
+        dbname=None,
+        dbhost=None,
+        dbport=None,
+        dbuser=None,
+        dbencoding=None,
+        dbschema=None,
+    ):
         dbname = dbname or self.dbname
         cmds = []
-        mysqlcmd = ' '.join(self.mycmd('mysql', dbhost, dbport, dbuser))
+        mysqlcmd = " ".join(self.mycmd("mysql", dbhost, dbport, dbuser))
         if drop:
-            cmd = 'echo "DROP DATABASE %s;" | %s -p' % (
-                dbname, mysqlcmd)
+            cmd = 'echo "DROP DATABASE %s;" | %s -p' % (dbname, mysqlcmd)
             cmds.append(cmd)
         cmd = 'echo "%s;" | %s -p' % (
-            self.sql_create_database(dbname, dbencoding), mysqlcmd)
+            self.sql_create_database(dbname, dbencoding),
+            mysqlcmd,
+        )
         cmds.append(cmd)
-        cmd = '%s -p %s < %s' % (mysqlcmd, dbname, backupfile)
+        cmd = "%s -p %s < %s" % (mysqlcmd, dbname, backupfile)
         cmds.append(cmd)
         return cmds
 
-    def sql_temporary_table(self, table_name, table_schema,
-                            drop_on_commit=True):
+    def sql_temporary_table(self, table_name, table_schema, drop_on_commit=True):
         if not drop_on_commit:
-            return "CREATE TEMPORARY TABLE %s (%s);" % (
-                table_name, table_schema)
+            return "CREATE TEMPORARY TABLE %s (%s);" % (table_name, table_schema)
         return "CREATE TEMPORARY TABLE %s (%s) ON COMMIT DROP;" % (
-            table_name, table_schema)
+            table_name,
+            table_schema,
+        )
 
     def sql_create_database(self, dbname, dbencoding=None):
         sql = 'CREATE DATABASE "%(dbname)s"'
@@ -204,19 +252,28 @@ class _MyAdvFuncHelper(db._GenericAdvFun
 
     def sql_rename_col(self, table, column, newname, coltype, null_allowed):
         if null_allowed:
-            cmd = 'DEFAULT'
+            cmd = "DEFAULT"
         else:
-            cmd = 'NOT'
-        return 'ALTER TABLE %s CHANGE %s %s %s %s NULL' % (
-            table, column, newname, coltype, cmd)
+            cmd = "NOT"
+        return "ALTER TABLE %s CHANGE %s %s %s %s NULL" % (
+            table,
+            column,
+            newname,
+            coltype,
+            cmd,
+        )
 
     def sql_change_col_type(self, table, column, coltype, null_allowed):
         if null_allowed:
-            cmd = 'DEFAULT'
+            cmd = "DEFAULT"
         else:
-            cmd = 'NOT'
-        return 'ALTER TABLE %s MODIFY COLUMN %s %s %s NULL' % (
-            table, column, coltype, cmd)
+            cmd = "NOT"
+        return "ALTER TABLE %s MODIFY COLUMN %s %s %s NULL" % (
+            table,
+            column,
+            coltype,
+            cmd,
+        )
 
     def sql_set_null_allowed(self, table, column, coltype, null_allowed):
         return self.sql_change_col_type(table, column, coltype, null_allowed)
@@ -225,7 +282,7 @@ class _MyAdvFuncHelper(db._GenericAdvFun
         """create a new database"""
         cursor.execute(self.sql_create_database(dbname, dbencoding))
         if owner:
-            cursor.execute('GRANT ALL ON `%s`.* to %s' % (dbname, owner))
+            cursor.execute("GRANT ALL ON `%s`.* to %s" % (dbname, owner))
 
     def sql_regexp_match_expression(self, pattern):
         """pattern matching using regexp"""
@@ -239,7 +296,7 @@ class _MyAdvFuncHelper(db._GenericAdvFun
 
     def list_databases(self, cursor):
         """return the list of existing databases"""
-        cursor.execute('SHOW DATABASES')
+        cursor.execute("SHOW DATABASES")
         return [r[0] for r in cursor.fetchall()]
 
     def list_tables(self, cursor):
@@ -261,7 +318,7 @@ class _MyAdvFuncHelper(db._GenericAdvFun
 
     # full-text search customization ###########################################
 
-    fti_table = 'appears'
+    fti_table = "appears"
     fti_need_distinct = False
 
     def cursor_index_object(self, uid, obj, cursor):
@@ -273,9 +330,10 @@ class _MyAdvFuncHelper(db._GenericAdvFun
         for weight, words in sorted(obj.get_words().items()):
             ftwords += normalize_words(words)
         if ftwords:
-            cursor.execute("INSERT INTO appears(uid, words) "
-                           "VALUES (%(uid)s, %(wrds)s);",
-                           {'uid':uid, 'wrds': ' '.join(ftwords)})
+            cursor.execute(
+                "INSERT INTO appears(uid, words) " "VALUES (%(uid)s, %(wrds)s);",
+                {"uid": uid, "wrds": " ".join(ftwords)},
+            )
 
     def fulltext_search(self, querystr, cursor=None):
         """Execute a full text query and return a list of 2-uple (rating, uid).
@@ -284,18 +342,23 @@ class _MyAdvFuncHelper(db._GenericAdvFun
             querystr = unicode(querystr, self.dbencoding)
         words = normalize_words(tokenize(querystr))
         cursor = cursor or self._cnx.cursor()
-        cursor.execute('SELECT 1, uid FROM appears '
-                       'WHERE MATCH (words) AGAINST (%(words)s IN BOOLEAN MODE)',
-                       {'words': ' '.join(words)})
+        cursor.execute(
+            "SELECT 1, uid FROM appears "
+            "WHERE MATCH (words) AGAINST (%(words)s IN BOOLEAN MODE)",
+            {"words": " ".join(words)},
+        )
         return cursor.fetchall()
 
     def fti_restriction_sql(self, tablename, querystr, jointo=None, not_=False):
         """Execute a full text query and return a list of 2-uple (rating, uid).
         """
         words = normalize_words(tokenize(querystr))
-        sql = "MATCH (%s.words) AGAINST ('%s' IN BOOLEAN MODE)" % (tablename, ' '.join(words))
+        sql = "MATCH (%s.words) AGAINST ('%s' IN BOOLEAN MODE)" % (
+            tablename,
+            " ".join(words),
+        )
         if not_:
-            sql = 'NOT (%s)' % sql
+            sql = "NOT (%s)" % sql
         if jointo is None:
             return sql
         return "%s AND %s.uid=%s" % (sql, tablename, jointo)
@@ -312,10 +375,10 @@ class _MyAdvFuncHelper(db._GenericAdvFun
 
     def sql_drop_fti(self):
         """Drop tables used by the full text index."""
-        return 'DROP TABLE appears;'
+        return "DROP TABLE appears;"
 
     def sql_grant_user_on_fti(self, user):
-        return 'GRANT ALL ON appears TO %s;' % (user)
-
-
-db._ADV_FUNC_HELPER_DIRECTORY['mysql'] = _MyAdvFuncHelper
+        return "GRANT ALL ON appears TO %s;" % (user)
+
+
+db._ADV_FUNC_HELPER_DIRECTORY["mysql"] = _MyAdvFuncHelper
diff --git a/logilab/database/postgres.py b/logilab/database/postgres.py
--- a/logilab/database/postgres.py
+++ b/logilab/database/postgres.py
@@ -39,13 +39,15 @@ from logilab import database as db
 from logilab.database.fti import normalize_words, tokenize_query
 
 
-TSEARCH_SCHEMA_PATH = ('/usr/share/postgresql/?.?/contrib/tsearch2.sql', # current debian
-                       '/usr/lib/postgresql/share/contrib/tsearch2.sql',
-                       '/usr/share/postgresql/contrib/tsearch2.sql',
-                       '/usr/lib/postgresql-?.?/share/contrib/tsearch2.sql',
-                       '/usr/share/postgresql-?.?/contrib/tsearch2.sql',
-                       join(dirname(__file__), 'tsearch2.sql'),
-                       'tsearch2.sql')
+TSEARCH_SCHEMA_PATH = (
+    "/usr/share/postgresql/?.?/contrib/tsearch2.sql",  # current debian
+    "/usr/lib/postgresql/share/contrib/tsearch2.sql",
+    "/usr/share/postgresql/contrib/tsearch2.sql",
+    "/usr/lib/postgresql-?.?/share/contrib/tsearch2.sql",
+    "/usr/share/postgresql-?.?/contrib/tsearch2.sql",
+    join(dirname(__file__), "tsearch2.sql"),
+    "tsearch2.sql",
+)
 
 
 class _Psycopg2Adapter(db.DBAPIAdapter):
@@ -54,6 +56,7 @@ class _Psycopg2Adapter(db.DBAPIAdapter):
     It provides basic support for postgresql schemas :
     cf. http://www.postgresql.org/docs/current/static/ddl-schemas.html
     """
+
     # not defined in psycopg2.extensions
     # "select typname from pg_type where oid=705";
     UNKNOWN = 705
@@ -63,70 +66,85 @@ class _Psycopg2Adapter(db.DBAPIAdapter):
 
     def __init__(self, native_module, pywrap=False):
         from psycopg2 import extensions
+
         extensions.register_type(extensions.UNICODE)
         try:
             unicodearray = extensions.UNICODEARRAY
         except AttributeError:
             from psycopg2 import _psycopg
+
             unicodearray = _psycopg.UNICODEARRAY
         extensions.register_type(unicodearray)
         self.BOOLEAN = extensions.BOOLEAN
         db.DBAPIAdapter.__init__(self, native_module, pywrap)
         self._init_psycopg2()
 
-    def connect(self, host='', database='', user='', password='', port='', schema=None, extra_args=None):
+    def connect(
+        self,
+        host="",
+        database="",
+        user="",
+        password="",
+        port="",
+        schema=None,
+        extra_args=None,
+    ):
         """Handles psycopg connection format"""
         args = {}
         if host:
-            args.setdefault('host', host)
+            args.setdefault("host", host)
         if database:
-            args.setdefault('dbname', database)
+            args.setdefault("dbname", database)
         if user:
-            args.setdefault('user', user)
+            args.setdefault("user", user)
         if port:
-            args.setdefault('port', port)
+            args.setdefault("port", port)
         if password:
-            args.setdefault('password', password)
-        cnx_string = ' '.join('%s=%s' % item for item in args.items())
+            args.setdefault("password", password)
+        cnx_string = " ".join("%s=%s" % item for item in args.items())
         if extra_args is not None:
-            cnx_string += ' ' + extra_args
+            cnx_string += " " + extra_args
         cnx = self._native_module.connect(cnx_string)
         cnx.set_isolation_level(1)
         self.set_search_path(cnx, schema)
         return self._wrap_if_needed(cnx)
 
     def _schema_exists(self, cursor, schema):
-        cursor.execute('SELECT nspname FROM pg_namespace WHERE nspname=%(s)s',
-                       {'s': schema})
+        cursor.execute(
+            "SELECT nspname FROM pg_namespace WHERE nspname=%(s)s", {"s": schema}
+        )
         return cursor.fetchone() is not None
 
     def set_search_path(self, cnx, schema):
         if schema:
             cursor = cnx.cursor()
             if not self._schema_exists(cursor, schema):
-                warn("%s schema doesn't exist, search path can't be set" % schema,
-                     UserWarning)
+                warn(
+                    "%s schema doesn't exist, search path can't be set" % schema,
+                    UserWarning,
+                )
                 return
-            cursor.execute('SHOW search_path')
-            schemas = cursor.fetchone()[0].split(',')
+            cursor.execute("SHOW search_path")
+            schemas = cursor.fetchone()[0].split(",")
             if schema not in schemas:
                 schemas.insert(0, schema)
             else:
                 schemas.pop(schemas.index(schema))
                 schemas.insert(0, schema)
-            cursor.execute('SET search_path TO %s;' % ','.join(schemas))
+            cursor.execute("SET search_path TO %s;" % ",".join(schemas))
             cursor.close()
 
     def _init_psycopg2(self):
         """initialize psycopg2 to use mx.DateTime for date and timestamps
         instead for datetime.datetime"""
         psycopg2 = self._native_module
-        if hasattr(psycopg2, '_lc_initialized'):
+        if hasattr(psycopg2, "_lc_initialized"):
             return
         psycopg2._lc_initialized = 1
         # use mxDateTime instead of datetime if available
         if db.USE_MX_DATETIME:
             from psycopg2 import extensions
+
             extensions.register_type(psycopg2._psycopg.MXDATETIME)
             extensions.register_type(psycopg2._psycopg.MXINTERVAL)
             extensions.register_type(psycopg2._psycopg.MXDATE)
@@ -134,13 +152,13 @@ class _Psycopg2Adapter(db.DBAPIAdapter):
             # StringIO/cStringIO adaptation
             # XXX (syt) todo, see my december discussion on the psycopg2 list
             # for a working solution
-            #def adapt_stringio(stringio):
+            # def adapt_stringio(stringio):
             #    self.logger.info('ADAPTING %s', stringio)
             #    return psycopg2.Binary(stringio.getvalue())
-            #import StringIO
-            #extensions.register_adapter(StringIO.StringIO, adapt_stringio)
-            #import cStringIO
-            #extensions.register_adapter(cStringIO.StringIO, adapt_stringio)
+            # import StringIO
+            # extensions.register_adapter(StringIO.StringIO, adapt_stringio)
+            # import cStringIO
+            # extensions.register_adapter(cStringIO.StringIO, adapt_stringio)
 
 
 class _Psycopg2CtypesAdapter(_Psycopg2Adapter):
@@ -148,135 +166,168 @@ class _Psycopg2CtypesAdapter(_Psycopg2Ad
 
     cf. https://github.com/mvantellingen/psycopg2-ctypes
     """
+
     def __init__(self, native_module, pywrap=False):
         # install psycopg2 compatibility
         from psycopg2ct import compat
+
         compat.register()
         _Psycopg2Adapter.__init__(self, native_module, pywrap)
 
+
 class _Psycopg2CffiAdapter(_Psycopg2Adapter):
     """psycopg2cffi adapter
 
     cf. https://pypi.python.org/pypi/psycopg2cffi
     """
+
     def __init__(self, native_module, pywrap=False):
         # install psycopg2 compatibility
         from psycopg2cffi import compat
+
         compat.register()
         _Psycopg2Adapter.__init__(self, native_module, pywrap)
 
 
-db._PREFERED_DRIVERS['postgres'] = [
+db._PREFERED_DRIVERS["postgres"] = [
     #'logilab.database._pyodbcwrap',
-    'psycopg2', 'psycopg2ct', 'psycopg2cffi',
-    ]
-db._ADAPTER_DIRECTORY['postgres'] = {
-    'psycopg2' : _Psycopg2Adapter,
-    'psycopg2ct' : _Psycopg2CtypesAdapter,
-    'psycopg2cffi' : _Psycopg2CffiAdapter,
-    }
+    "psycopg2",
+    "psycopg2ct",
+    "psycopg2cffi",
+]
+db._ADAPTER_DIRECTORY["postgres"] = {
+    "psycopg2": _Psycopg2Adapter,
+    "psycopg2ct": _Psycopg2CtypesAdapter,
+    "psycopg2cffi": _Psycopg2CffiAdapter,
+}
 
 
 class _PGAdvFuncHelper(db._GenericAdvFuncHelper):
     """Postgres helper, taking advantage of postgres SEQUENCE support
     """
 
-    backend_name = 'postgres'
+    backend_name = "postgres"
     TYPE_MAPPING = db._GenericAdvFuncHelper.TYPE_MAPPING.copy()
-    TYPE_MAPPING.update({
-        'TZTime' :   'time with time zone',
-        'TZDatetime':'timestamp with time zone'})
+    TYPE_MAPPING.update(
+        {"TZTime": "time with time zone", "TZDatetime": "timestamp with time zone"}
+    )
     TYPE_CONVERTERS = db._GenericAdvFuncHelper.TYPE_CONVERTERS.copy()
 
     def pgdbcmd(self, cmd, dbhost, dbport, dbuser, dbschema, *args):
         cmd = [cmd]
         cmd += args
         if dbhost or self.dbhost:
-            cmd.append('--host=%s' % (dbhost or self.dbhost))
+            cmd.append("--host=%s" % (dbhost or self.dbhost))
         if dbport or self.dbport:
-            cmd.append('--port=%s' % (dbport or self.dbport))
+            cmd.append("--port=%s" % (dbport or self.dbport))
         if dbuser or self.dbuser:
-            cmd.append('--username=%s' % (dbuser or self.dbuser))
+            cmd.append("--username=%s" % (dbuser or self.dbuser))
         if dbschema or self.dbschema:
-            cmd.append('--schema=%s' % (dbschema or self.dbschema))
+            cmd.append("--schema=%s" % (dbschema or self.dbschema))
         return cmd
 
     def system_database(self):
         """return the system database for the given driver"""
-        return 'template1'
-
-    def backup_commands(self, backupfile, keepownership=True,
-                        dbname=None, dbhost=None, dbport=None, dbuser=None, dbschema=None):
-        cmd = self.pgdbcmd('pg_dump', dbhost, dbport, dbuser, dbschema, '-Fc')
+        return "template1"
+
+    def backup_commands(
+        self,
+        backupfile,
+        keepownership=True,
+        dbname=None,
+        dbhost=None,
+        dbport=None,
+        dbuser=None,
+        dbschema=None,
+    ):
+        cmd = self.pgdbcmd("pg_dump", dbhost, dbport, dbuser, dbschema, "-Fc")
         if not keepownership:
-            cmd.append('--no-owner')
-        cmd.append('--file')
+            cmd.append("--no-owner")
+        cmd.append("--file")
         cmd.append(backupfile)
         cmd.append(dbname or self.dbname)
         return [cmd]
 
-    def restore_commands(self, backupfile, keepownership=True, drop=True,
-                         dbname=None, dbhost=None, dbport=None, dbuser=None,
-                         dbencoding=None, dbschema=None):
+    def restore_commands(
+        self,
+        backupfile,
+        keepownership=True,
+        drop=True,
+        dbname=None,
+        dbhost=None,
+        dbport=None,
+        dbuser=None,
+        dbencoding=None,
+        dbschema=None,
+    ):
         # XXX what about dbschema ?
         dbname = dbname or self.dbname
         cmds = []
         if drop:
-            cmd = self.pgdbcmd('dropdb', dbhost, dbport, dbuser, None)
+            cmd = self.pgdbcmd("dropdb", dbhost, dbport, dbuser, None)
             cmd.append(dbname)
             cmds.append(cmd)
-        cmd = self.pgdbcmd('createdb', dbhost, dbport, dbuser, None,
-                           '-T', 'template0',
-                           '-E', dbencoding or self.dbencoding)
+        cmd = self.pgdbcmd(
+            "createdb",
+            dbhost,
+            dbport,
+            dbuser,
+            None,
+            "-T",
+            "template0",
+            "-E",
+            dbencoding or self.dbencoding,
+        )
         cmd.append(dbname)
         cmds.append(cmd)
-        cmd = self.pgdbcmd('pg_restore', dbhost, dbport, dbuser, None, '-Fc')
-        cmd.append('--dbname')
+        cmd = self.pgdbcmd("pg_restore", dbhost, dbport, dbuser, None, "-Fc")
+        cmd.append("--dbname")
         cmd.append(dbname)
         if not keepownership:
-            cmd.append('--no-owner')
+            cmd.append("--no-owner")
         cmd.append(backupfile)
         cmds.append(cmd)
         return cmds
 
     def sql_current_date(self):
-        return 'CAST(clock_timestamp() AS DATE)'
+        return "CAST(clock_timestamp() AS DATE)"
 
     def sql_current_time(self):
-        return 'CAST(clock_timestamp() AS TIME)'
+        return "CAST(clock_timestamp() AS TIME)"
 
     def sql_current_timestamp(self):
-        return 'clock_timestamp()'
+        return "clock_timestamp()"
 
     def sql_regexp_match_expression(self, pattern):
         """pattern matching using regexp"""
         return "~ %s" % (pattern)
 
     def sql_create_sequence(self, seq_name):
-        return 'CREATE SEQUENCE %s;' % seq_name
+        return "CREATE SEQUENCE %s;" % seq_name
 
     def sql_restart_sequence(self, seq_name, initial_value=1):
-        return 'ALTER SEQUENCE %s RESTART WITH %s;' % (seq_name, initial_value)
+        return "ALTER SEQUENCE %s RESTART WITH %s;" % (seq_name, initial_value)
 
     def sql_sequence_current_state(self, seq_name):
-        return 'SELECT last_value FROM %s;' % seq_name
+        return "SELECT last_value FROM %s;" % seq_name
 
     def sql_drop_sequence(self, seq_name):
-        return 'DROP SEQUENCE %s;' % seq_name
+        return "DROP SEQUENCE %s;" % seq_name
 
     def sqls_increment_sequence(self, seq_name):
         return ("SELECT nextval('%s');" % seq_name,)
 
-    def sql_temporary_table(self, table_name, table_schema,
-                            drop_on_commit=True):
+    def sql_temporary_table(self, table_name, table_schema, drop_on_commit=True):
         if not drop_on_commit:
-            return "CREATE TEMPORARY TABLE %s (%s);" % (table_name,
-                                                        table_schema)
-        return "CREATE TEMPORARY TABLE %s (%s) ON COMMIT DROP;" % (table_name,
-                                                                   table_schema)
-
-    def create_database(self, cursor, dbname, owner=None, dbencoding=None,
-                        template=None):
+            return "CREATE TEMPORARY TABLE %s (%s);" % (table_name, table_schema)
+        return "CREATE TEMPORARY TABLE %s (%s) ON COMMIT DROP;" % (
+            table_name,
+            table_schema,
+        )
+
+    def create_database(
+        self, cursor, dbname, owner=None, dbencoding=None, template=None
+    ):
         """create a new database"""
         sql = 'CREATE DATABASE "%(dbname)s"'
         if owner:
@@ -290,24 +341,24 @@ class _PGAdvFuncHelper(db._GenericAdvFun
 
     def create_schema(self, cursor, schema, granted_user=None):
         """create a new database schema"""
-        sql = 'CREATE SCHEMA %s' % schema
+        sql = "CREATE SCHEMA %s" % schema
         if granted_user is not None:
-            sql += ' AUTHORIZATION %s' % granted_user
+            sql += " AUTHORIZATION %s" % granted_user
         cursor.execute(sql)
 
     def drop_schema(self, cursor, schema):
         """drop a database schema"""
-        cursor.execute('DROP SCHEMA %s CASCADE' % schema)
+        cursor.execute("DROP SCHEMA %s CASCADE" % schema)
 
     def create_language(self, cursor, extlang):
         """postgres specific method to install a procedural language on a database"""
         # make sure plpythonu is not directly in template1
         cursor.execute("SELECT * FROM pg_language WHERE lanname='%s';" % extlang)
         if cursor.fetchall():
-            self.logger.warning('%s language already installed', extlang)
+            self.logger.warning("%s language already installed", extlang)
         else:
-            cursor.execute('CREATE LANGUAGE %s' % extlang)
-            self.logger.info('%s language installed', extlang)
+            cursor.execute("CREATE LANGUAGE %s" % extlang)
+            self.logger.info("%s language installed", extlang)
 
     def list_users(self, cursor):
         """return the list of existing database users"""
@@ -316,7 +367,7 @@ class _PGAdvFuncHelper(db._GenericAdvFun
 
     def list_databases(self, cursor):
         """return the list of existing databases"""
-        cursor.execute('SELECT datname FROM pg_database')
+        cursor.execute("SELECT datname FROM pg_database")
         return [r[0] for r in cursor.fetchall()]
 
     def list_tables(self, cursor, schema=None):
@@ -325,7 +376,7 @@ class _PGAdvFuncHelper(db._GenericAdvFun
         sql = "SELECT tablename FROM pg_tables"
         if schema:
             sql += " WHERE schemaname=%(s)s"
-        cursor.execute(sql, {'s': schema})
+        cursor.execute(sql, {"s": schema})
         return [r[0] for r in cursor.fetchall()]
 
     def list_indices(self, cursor, table=None):
@@ -334,30 +385,30 @@ class _PGAdvFuncHelper(db._GenericAdvFun
         restrictions = []
         if table:
             table = table.lower()
-            restrictions.append('LOWER(tablename)=%(table)s')
+            restrictions.append("LOWER(tablename)=%(table)s")
         if self.dbschema:
-            restrictions.append('schemaname=%(s)s')
+            restrictions.append("schemaname=%(s)s")
         if restrictions:
-            sql += ' WHERE %s' % ' AND '.join(restrictions)
-        cursor.execute(sql, {'s': self.dbschema, 'table': table})
+            sql += " WHERE %s" % " AND ".join(restrictions)
+        cursor.execute(sql, {"s": self.dbschema, "table": table})
         return [r[0] for r in cursor.fetchall()]
 
     # full-text search customization ###########################################
 
-    fti_table = 'appears'
+    fti_table = "appears"
     fti_need_distinct = False
-    config = 'default'
-    max_indexed = 500000 # 500KB, avoid "string is too long for tsvector"
+    config = "default"
+    max_indexed = 500000  # 500KB, avoid "string is too long for tsvector"
 
     def has_fti_table(self, cursor):
         if super(_PGAdvFuncHelper, self).has_fti_table(cursor):
-            self.config = 'simple'
+            self.config = "simple"
         return self.fti_table in self.list_tables(cursor)
 
     def cursor_index_object(self, uid, obj, cursor):
         """Index an object, using the db pointed by the given cursor.
         """
-        ctx = {'config': self.config, 'uid': int(uid)}
+        ctx = {"config": self.config, "uid": int(uid)}
         tsvectors, size, oversized = [], 0, False
         # sort for test predictability
         for (weight, words) in sorted(obj.get_words().items()):
@@ -369,16 +420,20 @@ class _PGAdvFuncHelper(db._GenericAdvFun
                     oversized = True
                     break
             if words:
-                tsvectors.append("setweight(to_tsvector(%%(config)s, "
-                                 "%%(wrds_%(w)s)s), '%(w)s')"
-                                 % {'w': weight})
-                ctx['wrds_%s' % weight] = ' '.join(words)
+                tsvectors.append(
+                    "setweight(to_tsvector(%%(config)s, "
+                    "%%(wrds_%(w)s)s), '%(w)s')" % {"w": weight}
+                )
+                ctx["wrds_%s" % weight] = " ".join(words)
             if oversized:
                 break
         if tsvectors:
-            cursor.execute("INSERT INTO appears(uid, words, weight) "
-                           "VALUES (%%(uid)s, %s, %s);"
-                           % ('||'.join(tsvectors), obj.entity_weight), ctx)
+            cursor.execute(
+                "INSERT INTO appears(uid, words, weight) "
+                "VALUES (%%(uid)s, %s, %s);"
+                % ("||".join(tsvectors), obj.entity_weight),
+                ctx,
+            )
 
     def _fti_query_to_tsquery_words(self, querystr):
         if isinstance(querystr, bytes):
@@ -386,16 +441,20 @@ class _PGAdvFuncHelper(db._GenericAdvFun
         words = normalize_words(tokenize_query(querystr))
         # XXX replace '%' since it makes tsearch fail, dunno why yet, should
         # be properly fixed
-        return '&'.join(words).replace('*', ':*').replace('%', '')
+        return "&".join(words).replace("*", ":*").replace("%", "")
 
     def fulltext_search(self, querystr, cursor=None):
         """Execute a full text query and return a list of 2-uple (rating, uid).
         """
         cursor = cursor or self._cnx.cursor()
-        cursor.execute('SELECT 1, uid FROM appears '
-                       "WHERE words @@ to_tsquery(%(config)s, %(words)s)",
-                       {'config': self.config,
-                        'words': self._fti_query_to_tsquery_words(querystr)})
+        cursor.execute(
+            "SELECT 1, uid FROM appears "
+            "WHERE words @@ to_tsquery(%(config)s, %(words)s)",
+            {
+                "config": self.config,
+                "words": self._fti_query_to_tsquery_words(querystr),
+            },
+        )
         return cursor.fetchall()
 
     def fti_restriction_sql(self, tablename, querystr, jointo=None, not_=False):
@@ -404,24 +463,28 @@ class _PGAdvFuncHelper(db._GenericAdvFun
         searched = self._fti_query_to_tsquery_words(querystr)
         sql = "%s.words @@ to_tsquery('%s', '%s')" % (tablename, self.config, searched)
         if not_:
-            sql = 'NOT (%s)' % sql
+            sql = "NOT (%s)" % sql
         if jointo is None:
             return sql
         return "%s AND %s.uid=%s" % (sql, tablename, jointo)
 
-
     def fti_rank_order(self, tablename, querystr):
         """Execute a full text query and return a list of 2-uple (rating, uid).
         """
         searched = self._fti_query_to_tsquery_words(querystr)
         return "ts_rank(%s.words, to_tsquery('%s', '%s'))*%s.weight" % (
-            tablename, self.config, searched, tablename)
+            tablename,
+            self.config,
+            searched,
+            tablename,
+        )
 
     # XXX not needed with postgres >= 8.3 right?
     def find_tsearch2_schema(self):
         """Looks up for tsearch2.sql in a list of default paths.
         """
         import glob
+
         for path in TSEARCH_SCHEMA_PATH:
             for fullpath in glob.glob(path):
                 if isfile(fullpath):
@@ -435,19 +498,19 @@ class _PGAdvFuncHelper(db._GenericAdvFun
         For postgres, install tsearch2 if not installed by the template.
         """
         tstables = []
-        for table in self.list_tables(cursor, schema='pg_catalog'):
-            if table.startswith('pg_ts'):
+        for table in self.list_tables(cursor, schema="pg_catalog"):
+            if table.startswith("pg_ts"):
                 tstables.append(table)
         if tstables:
-            self.logger.info('pg_ts_dict already present, do not execute tsearch2.sql')
+            self.logger.info("pg_ts_dict already present, do not execute tsearch2.sql")
             if owner:
-                self.logger.info('reset pg_ts* owners')
+                self.logger.info("reset pg_ts* owners")
                 for table in tstables:
-                    cursor.execute('ALTER TABLE %s OWNER TO %s' % (table, owner))
+                    cursor.execute("ALTER TABLE %s OWNER TO %s" % (table, owner))
         else:
             fullpath = self.find_tsearch2_schema()
             cursor.execute(open(fullpath).read())
-            self.logger.info('tsearch2.sql installed')
+            self.logger.info("tsearch2.sql installed")
 
     def sql_init_fti(self):
         """Return the sql definition of table()s used by the full text index.
@@ -466,17 +529,17 @@ CREATE INDEX appears_words_idx ON appear
 
     def sql_drop_fti(self):
         """Drop tables used by the full text index."""
-        return 'DROP TABLE appears;'
+        return "DROP TABLE appears;"
 
     def sql_grant_user_on_fti(self, user):
-        return 'GRANT ALL ON appears TO %s;' % (user)
-
-    @deprecated('[lgdb 1.10] deprecated method')
+        return "GRANT ALL ON appears TO %s;" % (user)
+
+    @deprecated("[lgdb 1.10] deprecated method")
     def boolean_value(self, value):
         if value:
-            return 'TRUE'
+            return "TRUE"
         else:
-            return 'FALSE'
-
-
-db._ADV_FUNC_HELPER_DIRECTORY['postgres'] = _PGAdvFuncHelper
+            return "FALSE"
+
+
+db._ADV_FUNC_HELPER_DIRECTORY["postgres"] = _PGAdvFuncHelper
diff --git a/logilab/database/sqlgen.py b/logilab/database/sqlgen.py
--- a/logilab/database/sqlgen.py
+++ b/logilab/database/sqlgen.py
@@ -24,6 +24,7 @@ from __future__ import print_function
 
 # SQLGenerator ################################################################
 
+
 class SQLExpression(object):
     """Use this class when you need direct SQL expression in statements
     generated by SQLGenerator. Arguments:
@@ -48,10 +49,11 @@ class SQLExpression(object):
         self.kwargs = kwargs
 
 
-class SQLGenerator :
+class SQLGenerator:
     """
     Helper class to generate SQL strings to use with python's DB-API.
     """
+
     def _iterate_params(self, params):
         """ Iterate a parameters dictionnary and yield the correct column name
         and value (base types or SQL functions) """
@@ -87,7 +89,9 @@ class SQLGenerator :
         """
         # Do not need SQLExpression here, as we have the addon argument.
         if isinstance(keys, dict):
-            restriction = ["%s = %s" % (col, val) for col, val in self._iterate_params(keys)]
+            restriction = [
+                "%s = %s" % (col, val) for col, val in self._iterate_params(keys)
+            ]
         else:
             restriction = ["%s = %%(%s)s" % (x, x) for x in keys]
         if addon:
@@ -105,7 +109,9 @@ class SQLGenerator :
         'nom = %(nom)s, prenom = %(prenom)s'
         """
         if isinstance(keys, dict):
-            set_parts = ["%s = %s" % (col, val) for col, val in self._iterate_params(keys)]
+            set_parts = [
+                "%s = %s" % (col, val) for col, val in self._iterate_params(keys)
+            ]
         else:
             set_parts = ["%s = %%(%s)s" % (x, x) for x in keys]
         return ", ".join(set_parts)
@@ -130,7 +136,11 @@ class SQLGenerator :
         for column, value in self._iterate_params(params):
             columns.append(column)
             values.append(value)
-        sql = 'INSERT INTO %s ( %s ) VALUES ( %s )' % (table, ', '.join(columns), ', '.join(values))
+        sql = "INSERT INTO %s ( %s ) VALUES ( %s )" % (
+            table,
+            ", ".join(columns),
+            ", ".join(values),
+        )
         return sql
 
     def select(self, table, params=None, selection=None):
@@ -147,16 +157,16 @@ class SQLGenerator :
         'SELECT * FROM test WHERE nom = %(nom)s AND prenom = %(prenom)s'
         """
         if selection is None:
-            sql = 'SELECT * FROM %s' % table
+            sql = "SELECT * FROM %s" % table
         else:
-            sql = 'SELECT %s FROM %s' % (','.join(col for col in selection), table)
+            sql = "SELECT %s FROM %s" % (",".join(col for col in selection), table)
         if params is not None:
             where = self.where(params)
-            if where :
-                sql = sql + ' WHERE %s' % where
-        return sql
-
-    def adv_select(self, model, tables, params, joins=None) :
+            if where:
+                sql = sql + " WHERE %s" % where
+        return sql
+
+    def adv_select(self, model, tables, params, joins=None):
         """
         :param model:  list of columns to select
         :param tables: list of tables used in from
@@ -171,15 +181,15 @@ class SQLGenerator :
         'SELECT column FROM test AS t WHERE nom = %(nom)s'
         """
         table_names = ["%s AS %s" % (k, v) for k, v in tables]
-        sql = 'SELECT %s FROM %s' % (', '.join(model), ', '.join(table_names))
-        if joins and type(joins) != type(''):
-            joins = ' AND '.join(joins)
+        sql = "SELECT %s FROM %s" % (", ".join(model), ", ".join(table_names))
+        if joins and type(joins) != type(""):
+            joins = " AND ".join(joins)
         where = self.where(params, joins)
-        if where :
-            sql = sql + ' WHERE %s' % where
-        return sql
-
-    def delete(self, table, params, addon=None) :
+        if where:
+            sql = sql + " WHERE %s" % where
+        return sql
+
+    def delete(self, table, params, addon=None):
         """
         :param table: name of the table
         :param params: dictionary that will be used as in cursor.execute(sql,params)
@@ -191,7 +201,7 @@ class SQLGenerator :
         'DELETE FROM test WHERE nom = %(nom)s AND prenom = %(prenom)s'
         """
         where = self.where(params, addon=addon)
-        sql = 'DELETE FROM %s WHERE %s' % (table, where)
+        sql = "DELETE FROM %s WHERE %s" % (table, where)
         return sql
 
     def delete_many(self, table, params):
@@ -199,14 +209,16 @@ class SQLGenerator :
         """
         addons = []
         for key, value in list(params.items()):
-            if not isinstance(value, SQLExpression) and value.startswith('('): # we want IN
-                addons.append('%s IN %s' % (key, value))
+            if not isinstance(value, SQLExpression) and value.startswith(
+                "("
+            ):  # we want IN
+                addons.append("%s IN %s" % (key, value))
                 # The value is pop as it is not needed for substitution
                 # (the value is directly written in the SQL IN statement)
                 params.pop(key)
-        return self.delete(table, params, addon=' AND '.join(addons))
-
-    def update(self, table, params, unique) :
+        return self.delete(table, params, addon=" AND ".join(addons))
+
+    def update(self, table, params, unique):
         """
         :param table: name of the table
         :param params: dictionary that will be used as in cursor.execute(sql,params)
@@ -228,7 +240,7 @@ class SQLGenerator :
         # Add the removed unique params to the (now possibly updated)
         # params dict (if there were some SQLExpressions)
         params.update(unique_params)
-        sql = 'UPDATE %s SET %s WHERE %s' % (table, set, where)
+        sql = "UPDATE %s SET %s WHERE %s" % (table, set, where)
         return sql
 
 
@@ -236,6 +248,7 @@ class BaseTable:
     """
     Another helper class to ease SQL table manipulation.
     """
+
     # table_name = "default"
     # supported types are s/i/d
     # table_fields = ( ('first_field','s'), )
@@ -250,29 +263,36 @@ class BaseTable:
         self._table_fields = table_fields
         self._table_name = table_name
         info = {
-            'key' : self._primary_key,
-            'table' : self._table_name,
-            'columns' : ",".join( [ f for f,t in self._table_fields ] ),
-            'values' : ",".join( [sql_repr(t, "%%(%s)s" % f)
-                                  for f,t in self._table_fields] ),
-            'updates' : ",".join( ["%s=%s" % (f, sql_repr(t, "%%(%s)s" % f))
-                                   for f,t in self._table_fields] ),
-            }
-        self._insert_stmt = ("INSERT into %(table)s (%(columns)s) "
-                             "VALUES (%(values)s) WHERE %(key)s=%%(key)s") % info
-        self._update_stmt = ("UPDATE %(table)s SET (%(updates)s) "
-                             "VALUES WHERE %(key)s=%%(key)s") % info
-        self._select_stmt = ("SELECT %(columns)s FROM %(table)s "
-                             "WHERE %(key)s=%%(key)s") % info
-        self._delete_stmt = ("DELETE FROM %(table)s "
-                             "WHERE %(key)s=%%(key)s") % info
+            "key": self._primary_key,
+            "table": self._table_name,
+            "columns": ",".join([f for f, t in self._table_fields]),
+            "values": ",".join(
+                [sql_repr(t, "%%(%s)s" % f) for f, t in self._table_fields]
+            ),
+            "updates": ",".join(
+                [
+                    "%s=%s" % (f, sql_repr(t, "%%(%s)s" % f))
+                    for f, t in self._table_fields
+                ]
+            ),
+        }
+        self._insert_stmt = (
+            "INSERT into %(table)s (%(columns)s) "
+            "VALUES (%(values)s) WHERE %(key)s=%%(key)s"
+        ) % info
+        self._update_stmt = (
+            "UPDATE %(table)s SET (%(updates)s) " "VALUES WHERE %(key)s=%%(key)s"
+        ) % info
+        self._select_stmt = (
+            "SELECT %(columns)s FROM %(table)s " "WHERE %(key)s=%%(key)s"
+        ) % info
+        self._delete_stmt = ("DELETE FROM %(table)s " "WHERE %(key)s=%%(key)s") % info
 
         for k, t in table_fields:
             if hasattr(self, k):
                 raise ValueError("Cannot use %s as a table field" % k)
             setattr(self, k, None)
 
-
     def as_dict(self):
         d = {}
         for k, t in self._table_fields:
@@ -280,10 +300,10 @@ class BaseTable:
         return d
 
     def select(self, cursor):
-        d = { 'key' : getattr(self, self._primary_key) }
+        d = {"key": getattr(self, self._primary_key)}
         cursor.execute(self._select_stmt % d)
         rows = cursor.fetchall()
-        if len(rows)!=1:
+        if len(rows) != 1:
             msg = "Select: ambiguous query returned %d rows"
             raise ValueError(msg % len(rows))
         for (f, t), v in zip(self._table_fields, rows[0]):
@@ -294,13 +314,14 @@ class BaseTable:
         cursor.execute(self._update_stmt % d)
 
     def delete(self, cursor):
-        d = { 'key' : getattr(self, self._primary_key) }
+        d = {"key": getattr(self, self._primary_key)}
         cursor.execute(self._delete_stmt % d)
 
 
 # Helper functions #############################################################
 
-def name_fields(cursor, records) :
+
+def name_fields(cursor, records):
     """
     Take a cursor and a list of records fetched with that cursor, then return a
     list of dictionaries (one for each record) whose keys are column names and
@@ -310,15 +331,16 @@ def name_fields(cursor, records) :
     :param records: list returned by fetch*()
     """
     result = []
-    for record in records :
+    for record in records:
         record_dict = {}
-        for i in range(len(record)) :
+        for i in range(len(record)):
             record_dict[cursor.description[i][0]] = record[i]
         result.append(record_dict)
     return result
 
+
 def sql_repr(type, val):
-    if type == 's':
+    if type == "s":
         return "'%s'" % (val,)
     else:
         return val
@@ -327,4 +349,5 @@ def sql_repr(type, val):
 if __name__ == "__main__":
     import doctest
     from logilab.database import sqlgen
+
     print(doctest.testmod(sqlgen))
diff --git a/logilab/database/sqlite.py b/logilab/database/sqlite.py
--- a/logilab/database/sqlite.py
+++ b/logilab/database/sqlite.py
@@ -34,11 +34,11 @@ from logilab import database as db
 
 class _Sqlite3Adapter(db.DBAPIAdapter):
     # no type code in sqlite3
-    BINARY = 'XXX'
-    STRING = 'XXX'
-    DATETIME = 'XXX'
-    NUMBER = 'XXX'
-    BOOLEAN = 'XXX'
+    BINARY = "XXX"
+    STRING = "XXX"
+    DATETIME = "XXX"
+    NUMBER = "XXX"
+    BOOLEAN = "XXX"
     _module_is_initialized = False
 
     def __init__(self, native_module, pywrap=False):
@@ -62,8 +62,10 @@ class _Sqlite3Adapter(db.DBAPIAdapter):
 
         # bytea type handling
         from io import BytesIO
+
         def adapt_bytea(data):
             return data.getvalue()
+
         sqlite.register_adapter(BytesIO, adapt_bytea)
         try:
             from StringIO import StringIO
@@ -71,65 +73,85 @@ class _Sqlite3Adapter(db.DBAPIAdapter):
             pass
         else:
             sqlite.register_adapter(StringIO, adapt_bytea)
+
         def convert_bytea(data, Binary=sqlite.Binary):
             return Binary(data)
-        sqlite.register_converter('bytea', convert_bytea)
+
+        sqlite.register_converter("bytea", convert_bytea)
 
         # decimal type handling
         from decimal import Decimal
+
         def adapt_decimal(data):
             return str(data)
+
         sqlite.register_adapter(Decimal, adapt_decimal)
 
         def convert_decimal(data):
             return Decimal(data)
-        sqlite.register_converter('decimal', convert_decimal)
+
+        sqlite.register_converter("decimal", convert_decimal)
 
         # date/time types handling
         if db.USE_MX_DATETIME:
             from mx.DateTime import DateTimeType, DateTimeDeltaType, strptime
+
             def adapt_mxdatetime(mxd):
-                return mxd.strftime('%Y-%m-%d %H:%M:%S')
+                return mxd.strftime("%Y-%m-%d %H:%M:%S")
+
             sqlite.register_adapter(DateTimeType, adapt_mxdatetime)
+
             def adapt_mxdatetimedelta(mxd):
-                return mxd.strftime('%H:%M:%S')
+                return mxd.strftime("%H:%M:%S")
+
             sqlite.register_adapter(DateTimeDeltaType, adapt_mxdatetimedelta)
 
             def convert_mxdate(ustr):
-                return strptime(ustr, '%Y-%m-%d %H:%M:%S')
-            sqlite.register_converter('date', convert_mxdate)
+                return strptime(ustr, "%Y-%m-%d %H:%M:%S")
+
+            sqlite.register_converter("date", convert_mxdate)
+
             def convert_mxdatetime(ustr):
-                return strptime(ustr, '%Y-%m-%d %H:%M:%S')
-            sqlite.register_converter('timestamp', convert_mxdatetime)
+                return strptime(ustr, "%Y-%m-%d %H:%M:%S")
+
+            sqlite.register_converter("timestamp", convert_mxdatetime)
+
             def convert_mxtime(ustr):
                 try:
-                    return strptime(ustr, '%H:%M:%S')
+                    return strptime(ustr, "%H:%M:%S")
                 except:
                     # DateTime used as Time?
-                    return strptime(ustr, '%Y-%m-%d %H:%M:%S')
-            sqlite.register_converter('time', convert_mxtime)
+                    return strptime(ustr, "%Y-%m-%d %H:%M:%S")
+
+            sqlite.register_converter("time", convert_mxtime)
         # else use datetime.datetime
         else:
             from datetime import time, timedelta
+
             # datetime.time
             def adapt_time(data):
-                return data.strftime('%H:%M:%S')
+                return data.strftime("%H:%M:%S")
+
             sqlite.register_adapter(time, adapt_time)
+
             def convert_time(data):
-                return time(*[int(i) for i in data.split(':')])
-            sqlite.register_converter('time', convert_time)
+                return time(*[int(i) for i in data.split(":")])
+
+            sqlite.register_converter("time", convert_time)
             # datetime.timedelta
             def adapt_timedelta(data):
-                '''the sign in the result only refers to the number of days.  day
+                """the sign in the result only refers to the number of days.  day
                 fractions always indicate a positive offset.  this may seem strange,
                 but it is the same that is done by the default __str__ method.  we
                 redefine it here anyways (instead of simply doing "str") because we
                 do not want any "days," string within the representation.
-                '''
+                """
                 days = data.days
                 frac = data - timedelta(days)
                 return "%d %s" % (data.days, frac)
+
             sqlite.register_adapter(timedelta, adapt_timedelta)
+
             def convert_timedelta(data):
                 parts = data.split(b" ")
                 if len(parts) == 2:
@@ -144,48 +166,61 @@ class _Sqlite3Adapter(db.DBAPIAdapter):
                     microseconds = int(float("0." + timepart_full[1]) * 1000000)
                 else:
                     microseconds = 0
-                return timedelta(days,
-                                 hours*3600 + minutes*60 + seconds,
-                                 microseconds)
-
-            sqlite.register_converter('interval', convert_timedelta)
+                return timedelta(
+                    days, hours * 3600 + minutes * 60 + seconds, microseconds
+                )
+
+            sqlite.register_converter("interval", convert_timedelta)
 
             def convert_tzdatetime(data):
                 dt = parser.parse(data)
                 if dt.tzinfo is None:
                     dt = dt.replace(tzinfo=tz.tzutc())
                 return dt
-            sqlite.register_converter('tzdatetime', convert_tzdatetime)
-
-
-    def connect(self, host='', database='', user='', password='', port=None,
-                schema=None, extra_args=None):
+
+            sqlite.register_converter("tzdatetime", convert_tzdatetime)
+
+    def connect(
+        self,
+        host="",
+        database="",
+        user="",
+        password="",
+        port=None,
+        schema=None,
+        extra_args=None,
+    ):
         """Handles sqlite connection format"""
         sqlite = self._native_module
         if schema is not None:
-            warn('schema support is not implemented on sqlite backends, ignoring schema %s'
-                 % schema)
+            warn(
+                "schema support is not implemented on sqlite backends, ignoring schema %s"
+                % schema
+            )
 
         class Sqlite3Cursor(sqlite.Cursor):
             """cursor adapting usual dict format to pysqlite named format
             in SQL queries
             """
+
             def _replace_parameters(self, sql, kwargs):
                 if isinstance(kwargs, dict):
-                    return re.sub(r'%\(([^\)]+)\)s', r':\1', sql)
-                return re.sub(r'%s', r'?', sql)
+                    return re.sub(r"%\(([^\)]+)\)s", r":\1", sql)
+                return re.sub(r"%s", r"?", sql)
 
             def execute(self, sql, kwargs=None):
                 if kwargs is None:
                     self.__class__.__bases__[0].execute(self, sql)
                 else:
                     final_sql = self._replace_parameters(sql, kwargs)
-                    self.__class__.__bases__[0].execute(self, final_sql , kwargs)
+                    self.__class__.__bases__[0].execute(self, final_sql, kwargs)
 
             def executemany(self, sql, kwargss):
                 if not isinstance(kwargss, (list, tuple)):
                     kwargss = tuple(kwargss)
-                self.__class__.__bases__[0].executemany(self, self._replace_parameters(sql, kwargss[0]), kwargss)
+                self.__class__.__bases__[0].executemany(
+                    self, self._replace_parameters(sql, kwargss[0]), kwargss
+                )
 
         class Sqlite3CnxWrapper:
             def __init__(self, cnx):
@@ -193,30 +228,32 @@ class _Sqlite3Adapter(db.DBAPIAdapter):
 
             def cursor(self):
                 return self._cnx.cursor(Sqlite3Cursor)
+
             def __getattr__(self, attrname):
                 return getattr(self._cnx, attrname)
 
         # abspath so we can change cwd without breaking further queries on the
         # database
-        if database != ':memory:':
+        if database != ":memory:":
             database = abspath(database)
-        cnx = sqlite.connect(database,
-                             detect_types=sqlite.PARSE_DECLTYPES, check_same_thread=False)
+        cnx = sqlite.connect(
+            database, detect_types=sqlite.PARSE_DECLTYPES, check_same_thread=False
+        )
         return self._wrap_if_needed(Sqlite3CnxWrapper(cnx))
 
-    def _transformation_callback(self, description, encoding='utf-8', binarywrap=None):
+    def _transformation_callback(self, description, encoding="utf-8", binarywrap=None):
         def _transform(value):
             if binarywrap is not None and isinstance(value, self._native_module.Binary):
                 return binarywrap(value)
-            return value # no type code support, can't do anything
+            return value  # no type code support, can't do anything
+
         return _transform
 
 
-db._PREFERED_DRIVERS['sqlite'] = [
-    'sqlite3']
-db._ADAPTER_DIRECTORY['sqlite'] = {
-    'sqlite3' : _Sqlite3Adapter,
-    }
+db._PREFERED_DRIVERS["sqlite"] = ["sqlite3"]
+db._ADAPTER_DIRECTORY["sqlite"] = {
+    "sqlite3": _Sqlite3Adapter,
+}
 
 
 class _SqliteAdvFuncHelper(db._GenericAdvFuncHelper):
@@ -225,7 +262,8 @@ class _SqliteAdvFuncHelper(db._GenericAd
 
     An exception is raised when the functionality is not emulatable
     """
-    backend_name = 'sqlite'
+
+    backend_name = "sqlite"
 
     users_support = groups_support = False
     ilike_support = False
@@ -236,30 +274,46 @@ class _SqliteAdvFuncHelper(db._GenericAd
     TYPE_CONVERTERS = db._GenericAdvFuncHelper.TYPE_CONVERTERS.copy()
 
     TYPE_MAPPING = db._GenericAdvFuncHelper.TYPE_MAPPING.copy()
-    TYPE_MAPPING.update({
-        'TZTime': 'tztime',
-        'TZDatetime': 'tzdatetime',
-    })
-
-    def backup_commands(self, backupfile, keepownership=True,
-                        dbname=None, dbhost=None, dbport=None, dbuser=None, dbschema=None):
+    TYPE_MAPPING.update(
+        {"TZTime": "tztime", "TZDatetime": "tzdatetime",}
+    )
+
+    def backup_commands(
+        self,
+        backupfile,
+        keepownership=True,
+        dbname=None,
+        dbhost=None,
+        dbport=None,
+        dbuser=None,
+        dbschema=None,
+    ):
         dbname = dbname or self.dbname
-        return ['gzip -c %s > %s' % (dbname, backupfile)]
-
-    def restore_commands(self, backupfile, keepownership=True, drop=True,
-                         dbname=None, dbhost=None, dbport=None, dbuser=None,
-                         dbencoding=None, dbschema=None):
-        return ['zcat %s > %s' % (backupfile, dbname or self.dbname)]
+        return ["gzip -c %s > %s" % (dbname, backupfile)]
+
+    def restore_commands(
+        self,
+        backupfile,
+        keepownership=True,
+        drop=True,
+        dbname=None,
+        dbhost=None,
+        dbport=None,
+        dbuser=None,
+        dbencoding=None,
+        dbschema=None,
+    ):
+        return ["zcat %s > %s" % (backupfile, dbname or self.dbname)]
 
     def sql_create_index(self, table, column, unique=False):
         idx = self._index_name(table, column, unique)
         if unique:
-            return 'CREATE UNIQUE INDEX %s ON %s(%s);' % (idx, table, column)
+            return "CREATE UNIQUE INDEX %s ON %s(%s);" % (idx, table, column)
         else:
-            return 'CREATE INDEX %s ON %s(%s);' % (idx, table, column)
+            return "CREATE INDEX %s ON %s(%s);" % (idx, table, column)
 
     def sql_drop_index(self, table, column, unique=False):
-        return 'DROP INDEX %s' % self._index_name(table, column, unique)
+        return "DROP INDEX %s" % self._index_name(table, column, unique)
 
     def list_tables(self, cursor):
         """return the list of tables of a database"""
@@ -280,63 +334,74 @@ class _SqliteAdvFuncHelper(db._GenericAd
         return "REGEXP %s" % pattern
 
 
-db._ADV_FUNC_HELPER_DIRECTORY['sqlite'] = _SqliteAdvFuncHelper
-
+db._ADV_FUNC_HELPER_DIRECTORY["sqlite"] = _SqliteAdvFuncHelper
 
 
 def init_sqlite_connexion(cnx):
     def _parse_sqlite_date(date):
         if isinstance(date, str):
-            date = date.split('.')[0] # remove microseconds
+            date = date.split(".")[0]  # remove microseconds
             try:
-                date = strptime(date, '%Y-%m-%d %H:%M:%S')
+                date = strptime(date, "%Y-%m-%d %H:%M:%S")
             except:
-                date = strptime(date, '%Y-%m-%d')
+                date = strptime(date, "%Y-%m-%d")
         return date
 
     def year(date):
         date = _parse_sqlite_date(date)
         return date.year
+
     def month(date):
         date = _parse_sqlite_date(date)
         return date.month
+
     def day(date):
         date = _parse_sqlite_date(date)
         return date.day
+
     def hour(date):
         date = _parse_sqlite_date(date)
         return date.hour
+
     def minute(date):
         date = _parse_sqlite_date(date)
         return date.minute
+
     def second(date):
         date = _parse_sqlite_date(date)
         return date.second
-    cnx.create_function('MONTH', 1, month)
-    cnx.create_function('YEAR', 1, year)
-    cnx.create_function('DAY', 1, day)
-    cnx.create_function('HOUR', 1, hour)
-    cnx.create_function('MINUTE', 1, minute)
-    cnx.create_function('SECOND', 1, second)
+
+    cnx.create_function("MONTH", 1, month)
+    cnx.create_function("YEAR", 1, year)
+    cnx.create_function("DAY", 1, day)
+    cnx.create_function("HOUR", 1, hour)
+    cnx.create_function("MINUTE", 1, minute)
+    cnx.create_function("SECOND", 1, second)
 
     from random import random
-    cnx.create_function('RANDOM', 0, random)
+
+    cnx.create_function("RANDOM", 0, random)
 
     def regexp_match(pattern, tested_value):
         return re.search(pattern, tested_value) is not None
-    cnx.create_function('REGEXP', 2, regexp_match)
-
-sqlite_hooks = db.SQL_CONNECT_HOOKS.setdefault('sqlite', [])
+
+    cnx.create_function("REGEXP", 2, regexp_match)
+
+
+sqlite_hooks = db.SQL_CONNECT_HOOKS.setdefault("sqlite", [])
 sqlite_hooks.append(init_sqlite_connexion)
 
+
 def register_sqlite_pyfunc(pyfunc, nb_params=None, funcname=None):
     if nb_params is None:
         nb_params = len(inspect.getargspec(pyfunc).args)
     if funcname is None:
         funcname = pyfunc.__name__.upper()
+
     def init_sqlite_connection(cnx):
         cnx.create_function(funcname, nb_params, pyfunc)
-    sqlite_hooks = db.SQL_CONNECT_HOOKS.setdefault('sqlite', [])
+
+    sqlite_hooks = db.SQL_CONNECT_HOOKS.setdefault("sqlite", [])
     sqlite_hooks.append(init_sqlite_connection)
     funcdescr = db.SQL_FUNCTIONS_REGISTRY.get_function(funcname)
-    funcdescr.add_support('sqlite')
+    funcdescr.add_support("sqlite")
diff --git a/logilab/database/sqlserver.py b/logilab/database/sqlserver.py
--- a/logilab/database/sqlserver.py
+++ b/logilab/database/sqlserver.py
@@ -31,8 +31,9 @@ from warnings import warn
 
 from logilab import database as db
 
+
 class _BaseSqlServerAdapter(db.DBAPIAdapter):
-    driver = 'Override in subclass'
+    driver = "Override in subclass"
     _use_trusted_connection = False
     _use_autocommit = False
 
@@ -54,14 +55,22 @@ class _BaseSqlServerAdapter(db.DBAPIAdap
 
     @classmethod
     def _process_extra_args(cls, arguments):
-        arguments = arguments.lower().split(';')
-        if 'trusted_connection' in arguments:
+        arguments = arguments.lower().split(";")
+        if "trusted_connection" in arguments:
             cls.use_trusted_connection(True)
-        if 'autocommit' in arguments:
+        if "autocommit" in arguments:
             cls.use_autocommit(True)
 
-    def connect(self, host='', database='', user='', password='', port=None,
-                schema=None, extra_args=None):
+    def connect(
+        self,
+        host="",
+        database="",
+        user="",
+        password="",
+        port=None,
+        schema=None,
+        extra_args=None,
+    ):
         """Handles pyodbc connection format
 
         If extra_args is not None, it is expected to be a string
@@ -75,23 +84,29 @@ class _BaseSqlServerAdapter(db.DBAPIAdap
         if schema is not None:
             # NOTE: SQLServer supports schemas
             # cf. http://msdn.microsoft.com/en-us/library/ms189462%28v=SQL.90%29.aspx
-            warn('schema support is not implemented on sqlserver backends, ignoring schema %s'
-                 % schema)
+            warn(
+                "schema support is not implemented on sqlserver backends, ignoring schema %s"
+                % schema
+            )
+
         class SqlServerCursor(object):
             """cursor adapting usual dict format to pyodbc/adobdapi format
             in SQL queries
             """
+
             def __init__(self, cursor):
                 self._cursor = cursor
 
-            def _replace_parameters(self, sql, kwargs, _date_class=datetime.date, many=False):
+            def _replace_parameters(
+                self, sql, kwargs, _date_class=datetime.date, many=False
+            ):
                 if not many:
                     kwargs = [kwargs]
 
                 if isinstance(kwargs[0], dict):
                     args_list = []
-                    new_sql = re.sub(r'%\(([^\)]+)\)s', r'?', sql)
-                    key_order = re.findall(r'%\(([^\)]+)\)s', sql)
+                    new_sql = re.sub(r"%\(([^\)]+)\)s", r"?", sql)
+                    key_order = re.findall(r"%\(([^\)]+)\)s", sql)
                     for kwarg in kwargs:
                         args = []
                         for key in key_order:
@@ -99,7 +114,7 @@ class _BaseSqlServerAdapter(db.DBAPIAdap
                             if arg.__class__ == _date_class:
                                 arg = datetime.datetime.combine(arg, datetime.time(0))
                             elif isinstance(arg, str):
-                                arg = arg.decode('utf-8')
+                                arg = arg.decode("utf-8")
                             args.append(arg)
                         args_list.append(tuple(args))
 
@@ -109,14 +124,14 @@ class _BaseSqlServerAdapter(db.DBAPIAdap
                         return new_sql, args_list[0]
                 else:
                     # XXX dumb
-                    return re.sub(r'%s', r'?', sql), kwargs
+                    return re.sub(r"%s", r"?", sql), kwargs
 
             def execute(self, sql, kwargs=None):
                 if kwargs is None:
                     self._cursor.execute(sql)
                 else:
                     final_sql, args = self._replace_parameters(sql, kwargs)
-                    self._cursor.execute(final_sql , args)
+                    self._cursor.execute(final_sql, args)
 
             def executemany(self, sql, kwargss):
                 if not isinstance(kwargss, (list, tuple)):
@@ -137,7 +152,7 @@ class _BaseSqlServerAdapter(db.DBAPIAdap
                 row = self._cursor.fetchone()
                 return self._replace_smalldate(row, smalldate_cols)
 
-            def fetchall (self):
+            def fetchall(self):
                 smalldate_cols = self._get_smalldate_columns()
                 rows = []
                 for row in self._cursor.fetchall():
@@ -167,19 +182,29 @@ class _BaseSqlServerAdapter(db.DBAPIAdap
         class SqlServerCnxWrapper:
             def __init__(self, cnx):
                 self._cnx = cnx
+
             def cursor(self):
                 return SqlServerCursor(self._cnx.cursor())
+
             def __getattr__(self, attrname):
                 return getattr(self._cnx, attrname)
-        cnx = self._connect(host=host, database=database, user=user, password=password, port=port, extra_args=extra_args)
+
+        cnx = self._connect(
+            host=host,
+            database=database,
+            user=user,
+            password=password,
+            port=port,
+            extra_args=extra_args,
+        )
         return self._wrap_if_needed(SqlServerCnxWrapper(cnx))
 
-    def _transformation_callback(self, description, encoding='utf-8', binarywrap=None):
+    def _transformation_callback(self, description, encoding="utf-8", binarywrap=None):
         typecode = description[1]
         assert typecode is not None, self
         transform = None
         if typecode == self.STRING and not self.returns_unicode:
-            transform = lambda v: unicode(v, encoding, 'replace')
+            transform = lambda v: unicode(v, encoding, "replace")
         elif typecode == self.BINARY:  # value is a python buffer
             if binarywrap is None:
                 transforn = lambda v: v[:]
@@ -188,52 +213,62 @@ class _BaseSqlServerAdapter(db.DBAPIAdap
         elif typecode == self.UNKNOWN:
             # may occurs on constant selection for instance (e.g. SELECT 'hop')
             # with postgresql at least
-            transform = lambda v: unicode(value, encoding, 'replace') if isinstance(v, str) else v
+            transform = (
+                lambda v: unicode(value, encoding, "replace")
+                if isinstance(v, str)
+                else v
+            )
         return transform
 
 
 class _PyodbcAdapter(_BaseSqlServerAdapter):
-    def _connect(self, host='', database='', user='', password='',
-                 port=None, extra_args=None):
+    def _connect(
+        self, host="", database="", user="", password="", port=None, extra_args=None
+    ):
         if extra_args is not None:
             self._process_extra_args(extra_args)
-        #cnx_string_bits = ['DRIVER={%(driver)s}']
-        variables = {'host' : host,
-                     'database' : database,
-                     'user' : user, 'password' : password,
-                     'driver': self.driver}
+        # cnx_string_bits = ['DRIVER={%(driver)s}']
+        variables = {
+            "host": host,
+            "database": database,
+            "user": user,
+            "password": password,
+            "driver": self.driver,
+        }
         if self._use_trusted_connection:
-            variables['Trusted_Connection'] = 'yes'
-            del variables['user']
-            del variables['password']
+            variables["Trusted_Connection"] = "yes"
+            del variables["user"]
+            del variables["password"]
         if self._use_autocommit:
-            variables['autocommit'] = True
+            variables["autocommit"] = True
         return self._native_module.connect(**variables)
 
-    def _transformation_callback(self, description, encoding='utf-8', binarywrap=None):
+    def _transformation_callback(self, description, encoding="utf-8", binarywrap=None):
         # Work around pyodbc setting BINARY to bytearray but description[1] to buffer
         # https://github.com/mkleehammer/pyodbc/pull/34
         typecode = description[1]
         if typecode is buffer:
             return binarywrap
-        return super(_PyodbcAdapter, self)._transformation_callback(description, encoding, binarywrap)
-
+        return super(_PyodbcAdapter, self)._transformation_callback(
+            description, encoding, binarywrap
+        )
 
 
 class _AdodbapiAdapter(_BaseSqlServerAdapter):
-
-    def _connect(self, host='', database='', user='', password='',
-                 port=None, extra_args=None):
+    def _connect(
+        self, host="", database="", user="", password="", port=None, extra_args=None
+    ):
         if extra_args is not None:
             self._process_extra_args(extra_args)
         if self._use_trusted_connection:
             # this will open a MS-SQL table with Windows authentication
-            auth = 'Integrated Security=SSPI'
+            auth = "Integrated Security=SSPI"
         else:
             # this set opens a MS-SQL table with SQL authentication
-            auth = 'user ID=%s; Password=%s;' % (user, password)
-        constr = r"Initial Catalog=%s; Data Source=%s; Provider=SQLOLEDB.1; %s"\
-                 % (database, host, auth)
+            auth = "user ID=%s; Password=%s;" % (user, password)
+        constr = r"Initial Catalog=%s; Data Source=%s; Provider=SQLOLEDB.1; %s" % (
+            database,
+            host,
+            auth,
+        )
         return self._native_module.connect(constr)
-
-
diff --git a/logilab/database/sqlserver2000.py b/logilab/database/sqlserver2000.py
--- a/logilab/database/sqlserver2000.py
+++ b/logilab/database/sqlserver2000.py
@@ -28,6 +28,7 @@ Supported drivers, in order of preferenc
 from logilab import database as db
 from logilab.database.sqlserver import _PyodbcAdapter, _AdodbapiAdapter
 
+
 class _PyodbcSqlServer2000Adapter(_PyodbcAdapter):
     driver = "SQL Server"
 
@@ -35,13 +36,16 @@ class _PyodbcSqlServer2000Adapter(_Pyodb
 class _AdodbapiSqlServer2000Adapter(_AdodbapiAdapter):
     driver = "SQL Server"
 
-db._PREFERED_DRIVERS.update({
-    'sqlserver2000' : ['pyodbc', 'adodbapi', ],
-    })
 
-db._ADAPTER_DIRECTORY.update({
-    'sqlserver2000' : {'adodbapi': _AdodbapiSqlServer2000Adapter,
-                       'pyodbc': _PyodbcSqlServer2000Adapter},
-    })
+db._PREFERED_DRIVERS.update(
+    {"sqlserver2000": ["pyodbc", "adodbapi",],}
+)
 
-
+db._ADAPTER_DIRECTORY.update(
+    {
+        "sqlserver2000": {
+            "adodbapi": _AdodbapiSqlServer2000Adapter,
+            "pyodbc": _PyodbcSqlServer2000Adapter,
+        },
+    }
+)
diff --git a/logilab/database/sqlserver2005.py b/logilab/database/sqlserver2005.py
--- a/logilab/database/sqlserver2005.py
+++ b/logilab/database/sqlserver2005.py
@@ -30,32 +30,39 @@ from warnings import warn
 from logilab import database as db
 from logilab.database.sqlserver import _PyodbcAdapter, _AdodbapiAdapter
 
+
 class _PyodbcSqlServer2005Adapter(_PyodbcAdapter):
     driver = "SQL Server Native Client 10.0"
 
+
 class _AdodbapiSqlServer2005Adapter(_AdodbapiAdapter):
     driver = "SQL Server Native Client 10.0"
 
-db._PREFERED_DRIVERS.update({
-    'sqlserver2005' : ['pyodbc', 'adodbapi', ],
-    })
-db._ADAPTER_DIRECTORY.update({
-    'sqlserver2005' : {'adodbapi': _AdodbapiSqlServer2005Adapter,
-                       'pyodbc': _PyodbcSqlServer2005Adapter},
-    })
+
+db._PREFERED_DRIVERS.update(
+    {"sqlserver2005": ["pyodbc", "adodbapi",],}
+)
+db._ADAPTER_DIRECTORY.update(
+    {
+        "sqlserver2005": {
+            "adodbapi": _AdodbapiSqlServer2005Adapter,
+            "pyodbc": _PyodbcSqlServer2005Adapter,
+        },
+    }
+)
 
 
 class _SqlServer2005FuncHelper(db._GenericAdvFuncHelper):
-    backend_name = 'sqlserver2005'
+    backend_name = "sqlserver2005"
     ilike_support = False
     TYPE_MAPPING = db._GenericAdvFuncHelper.TYPE_MAPPING.copy()
-    TYPE_MAPPING['String'] =   'nvarchar(max)'
-    TYPE_MAPPING['Boolean'] =  'bit'
-    TYPE_MAPPING['Date'] =     'smalldatetime'
-    TYPE_MAPPING['Datetime'] = 'datetime'
-    TYPE_MAPPING['Password'] = 'varbinary(255)'
-    TYPE_MAPPING['Bytes'] =    'varbinary(max)'
-    TYPE_MAPPING['SizeConstrainedString'] = 'nvarchar(%s)'
+    TYPE_MAPPING["String"] = "nvarchar(max)"
+    TYPE_MAPPING["Boolean"] = "bit"
+    TYPE_MAPPING["Date"] = "smalldatetime"
+    TYPE_MAPPING["Datetime"] = "datetime"
+    TYPE_MAPPING["Password"] = "varbinary(255)"
+    TYPE_MAPPING["Bytes"] = "varbinary(max)"
+    TYPE_MAPPING["SizeConstrainedString"] = "nvarchar(%s)"
     TYPE_CONVERTERS = db._GenericAdvFuncHelper.TYPE_CONVERTERS.copy()
 
     def list_tables(self, cursor):
@@ -66,36 +73,67 @@ class _SqlServer2005FuncHelper(db._Gener
         # return  [row.table_name for row in cursor.fetchall()]
 
     def list_views(self, cursor):
-        cursor.execute('SELECT table_name FROM INFORMATION_SCHEMA.VIEWS;')
+        cursor.execute("SELECT table_name FROM INFORMATION_SCHEMA.VIEWS;")
         return [row[0] for row in cursor.fetchall()]
 
     def list_indices(self, cursor, table=None):
         """return the list of indices of a database, only for the given table if specified"""
         sql = "SELECT name FROM sys.indexes"
         if table:
-            sql = ("SELECT ind.name FROM sys.indexes as ind, sys.objects as obj WHERE "
-                   "obj.object_id = ind.object_id AND obj.name = '%s'"
-                   % table)
+            sql = (
+                "SELECT ind.name FROM sys.indexes as ind, sys.objects as obj WHERE "
+                "obj.object_id = ind.object_id AND obj.name = '%s'" % table
+            )
         cursor.execute(sql)
         return [r[0] for r in cursor.fetchall()]
 
-    def backup_commands(self, backupfile, keepownership=True,
-                        dbname=None, dbhost=None, dbport=None, dbuser=None, dbschema=None):
-        return [[sys.executable, os.path.normpath(__file__),
-                 "_SqlServer2005FuncHelper._do_backup", dbhost or self.dbhost,
-                 dbname or self.dbname, backupfile]]
-
-    def restore_commands(self, backupfile, keepownership=True, drop=True,
-                         dbname=None, dbhost=None, dbport=None, dbuser=None,
-                         dbencoding=None, dbschema=None):
-        return [[sys.executable, os.path.normpath(__file__),
-                "_SqlServer2005FuncHelper._do_restore", dbhost or self.dbhost,
-                 dbname or self.dbname, backupfile],
-                ]
+    def backup_commands(
+        self,
+        backupfile,
+        keepownership=True,
+        dbname=None,
+        dbhost=None,
+        dbport=None,
+        dbuser=None,
+        dbschema=None,
+    ):
+        return [
+            [
+                sys.executable,
+                os.path.normpath(__file__),
+                "_SqlServer2005FuncHelper._do_backup",
+                dbhost or self.dbhost,
+                dbname or self.dbname,
+                backupfile,
+            ]
+        ]
+
+    def restore_commands(
+        self,
+        backupfile,
+        keepownership=True,
+        drop=True,
+        dbname=None,
+        dbhost=None,
+        dbport=None,
+        dbuser=None,
+        dbencoding=None,
+        dbschema=None,
+    ):
+        return [
+            [
+                sys.executable,
+                os.path.normpath(__file__),
+                "_SqlServer2005FuncHelper._do_restore",
+                dbhost or self.dbhost,
+                dbname or self.dbname,
+                backupfile,
+            ],
+        ]
 
     def sql_current_date(self):
         """Return sql for the current date. """
-        return 'GETDATE()'
+        return "GETDATE()"
 
     def _index_names(self, cursor, table, column):
         """
@@ -119,7 +157,7 @@ AND i.object_id = OBJECT_ID('%(table)s')
 AND k.name = '%(col)s'
 AND k.object_id=i.object_id
 AND j.column_id = k.column_id;"""
-        cursor.execute(has_index_sql % {'table': table, 'col': column})
+        cursor.execute(has_index_sql % {"table": table, "col": column})
         return cursor.fetchall()
 
     def index_exists(self, cursor, table, column, unique=False):
@@ -132,21 +170,20 @@ AND j.column_id = k.column_id;"""
         return len(indexes) > 0
 
     def sql_concat_string(self, lhs, rhs):
-        return '%s + %s' % (lhs, rhs)
-
-    def sql_temporary_table(self, table_name, table_schema,
-                            drop_on_commit=True):
+        return "%s + %s" % (lhs, rhs)
+
+    def sql_temporary_table(self, table_name, table_schema, drop_on_commit=True):
         table_name = self.temporary_table_name(table_name)
         return "CREATE TABLE %s (%s);" % (table_name, table_schema)
 
     def sql_change_col_type(self, table, column, coltype, null_allowed):
-        raise NotImplementedError('use .change_col_type()')
+        raise NotImplementedError("use .change_col_type()")
 
     def sql_set_null_allowed(self, table, column, coltype, null_allowed):
-        raise NotImplementedError('use .set_null_allowed()')
+        raise NotImplementedError("use .set_null_allowed()")
 
     def sql_rename_table(self, oldname, newname):
-        return  'EXEC sp_rename %s, %s' % (oldname, newname)
+        return "EXEC sp_rename %s, %s" % (oldname, newname)
 
     def sql_add_limit_offset(self, sql, limit=None, offset=0, orderby=None):
         """
@@ -156,29 +193,31 @@ AND j.column_id = k.column_id;"""
         """
         if limit is None and not offset:
             return sql
-        if offset is None: # not sure if this can happen
+        if offset is None:  # not sure if this can happen
             offset = 0
-        if not sql.startswith('SELECT ') or 'FROM' not in sql:
+        if not sql.startswith("SELECT ") or "FROM" not in sql:
             raise ValueError(sql)
-        union_queries = sql.split('UNION ALL')
+        union_queries = sql.split("UNION ALL")
         rewritten_union_queries = []
         for _sql in union_queries:
             _sql = _sql.strip()
-            raw_columns, tables = _sql.split('FROM', 1)
-            columns = raw_columns[7:].split(',') # 7 == len('SELECT ')
-            aliases_cols = [] # list of (colname, alias)
+            raw_columns, tables = _sql.split("FROM", 1)
+            columns = raw_columns[7:].split(",")  # 7 == len('SELECT ')
+            aliases_cols = []  # list of (colname, alias)
             alias_counter = 1
             for c in columns:
-                if 'AS' in c:
-                    aliases_cols.append(c.strip().split('AS'))
+                if "AS" in c:
+                    aliases_cols.append(c.strip().split("AS"))
                 else:
-                    aliases_cols.append([c.strip(), '_L%02d' % alias_counter])
+                    aliases_cols.append([c.strip(), "_L%02d" % alias_counter])
                     alias_counter += 1
-            cooked_columns = ', '.join(' AS '.join(alias_col) for alias_col in aliases_cols)
-            new_sql = ' '.join(['SELECT', cooked_columns, 'FROM', tables])
+            cooked_columns = ", ".join(
+                " AS ".join(alias_col) for alias_col in aliases_cols
+            )
+            new_sql = " ".join(["SELECT", cooked_columns, "FROM", tables])
             rewritten_union_queries.append(new_sql)
-        new_sql = '\nUNION ALL\n'.join(rewritten_union_queries)
-        outer_aliases = ', '.join(alias for _colname, alias in aliases_cols)
+        new_sql = "\nUNION ALL\n".join(rewritten_union_queries)
+        outer_aliases = ", ".join(alias for _colname, alias in aliases_cols)
         if orderby is None:
             order_by = outer_aliases
         else:
@@ -186,31 +225,37 @@ AND j.column_id = k.column_id;"""
             for i, term in enumerate(orderby):
                 split = term.split()
                 try:
-                    idx = int(split[0]) - 1 
+                    idx = int(split[0]) - 1
                 except ValueError:
                     idx = i
                 split[0] = aliases_cols[idx][1]
-                order_by.append(' '.join(split))
-            order_by = ', '.join(order_by)
-        new_query = ['WITH orderedrows AS (',
-                     'SELECT ', outer_aliases, ', '
-                     "ROW_NUMBER() OVER (ORDER BY %s) AS __RowNumber" % order_by,
-                     'FROM (',
-                     new_sql,
-                     ') AS _SQ1 )',
-                     #columns,
-                     'SELECT ', outer_aliases,
-                     'FROM orderedrows WHERE ']
+                order_by.append(" ".join(split))
+            order_by = ", ".join(order_by)
+        new_query = [
+            "WITH orderedrows AS (",
+            "SELECT ",
+            outer_aliases,
+            ", " "ROW_NUMBER() OVER (ORDER BY %s) AS __RowNumber" % order_by,
+            "FROM (",
+            new_sql,
+            ") AS _SQ1 )",
+            # columns,
+            "SELECT ",
+            outer_aliases,
+            "FROM orderedrows WHERE ",
+        ]
         limitation = []
         if limit is not None:
-            limitation.append('__RowNumber <= %d' % (offset+limit))
+            limitation.append("__RowNumber <= %d" % (offset + limit))
         if offset:
-            limitation.append('__RowNumber > %d' % offset) # row number is 1 based
-        new_query.append(' AND '.join(limitation))
-        sql = '\n'.join(new_query)
+            limitation.append("__RowNumber > %d" % offset)  # row number is 1 based
+        new_query.append(" AND ".join(limitation))
+        sql = "\n".join(new_query)
         return sql
 
-    def sql_add_order_by(self, sql, sortterms, selection, needwrap, has_limit_or_offset):
+    def sql_add_order_by(
+        self, sql, sortterms, selection, needwrap, has_limit_or_offset
+    ):
         """
         add an ORDER BY clause to the SQL query, and wrap the query if necessary
         :sql: the original sql query
@@ -223,89 +268,108 @@ AND j.column_id = k.column_id;"""
         if has_limit_or_offset:
             return sql
         if sortterms and needwrap:
-            selection = ['T1.C%s' % i for i in range(len(selection))]
+            selection = ["T1.C%s" % i for i in range(len(selection))]
             renamed_sortterms = []
             for term in sortterms:
                 split = term.split()
-                split[0] = 'T1.C%s' % (int(split[0])-1)
-                renamed_sortterms.append(' '.join(split))
-            sql = 'SELECT %s FROM (%s) AS T1\nORDER BY %s' % (','.join(selection),
-                                                            sql,
-                                                            ','.join(renamed_sortterms))
+                split[0] = "T1.C%s" % (int(split[0]) - 1)
+                renamed_sortterms.append(" ".join(split))
+            sql = "SELECT %s FROM (%s) AS T1\nORDER BY %s" % (
+                ",".join(selection),
+                sql,
+                ",".join(renamed_sortterms),
+            )
         else:
-            sql += '\nORDER BY %s' % ','.join(sortterms)
+            sql += "\nORDER BY %s" % ",".join(sortterms)
 
         return sql
 
     def sqls_create_multicol_unique_index(self, table, columns, indexname=None):
         columns = sorted(columns)
-        view = 'utv_%s_%s' % (table, indexname or '_'.join(columns))
-        where = ' AND '.join(['%s IS NOT NULL' % c for c in columns])
+        view = "utv_%s_%s" % (table, indexname or "_".join(columns))
+        where = " AND ".join(["%s IS NOT NULL" % c for c in columns])
         if indexname is None:
-            warn('You should provide an explicit index name else you risk '
-                 'a silent truncation of the computed index name.',
-                 DeprecationWarning)
-            indexname = 'unique_%s_%s_idx' % (table, '_'.join(columns))
-        sql = ['CREATE VIEW %s WITH SCHEMABINDING AS SELECT %s FROM dbo.%s WHERE %s ;'%(view.lower(), 
-                                                      ', '.join(columns),
-                                                      table,
-                                                      where),
-               'CREATE UNIQUE CLUSTERED INDEX %s ON %s(%s);' % (indexname.lower(),
-                                                                view.lower(),
-                                                                ','.join(columns))
-            ]
+            warn(
+                "You should provide an explicit index name else you risk "
+                "a silent truncation of the computed index name.",
+                DeprecationWarning,
+            )
+            indexname = "unique_%s_%s_idx" % (table, "_".join(columns))
+        sql = [
+            "CREATE VIEW %s WITH SCHEMABINDING AS SELECT %s FROM dbo.%s WHERE %s ;"
+            % (view.lower(), ", ".join(columns), table, where),
+            "CREATE UNIQUE CLUSTERED INDEX %s ON %s(%s);"
+            % (indexname.lower(), view.lower(), ",".join(columns)),
+        ]
         return sql
 
     def sqls_drop_multicol_unique_index(self, table, columns, indexname=None):
         if indexname is None:
-            warn('You should provide an explicit index name else you risk '
-                 'a silent truncation of the computed index name.',
-                 DeprecationWarning)
+            warn(
+                "You should provide an explicit index name else you risk "
+                "a silent truncation of the computed index name.",
+                DeprecationWarning,
+            )
         columns = sorted(columns)
-        view = 'utv_%s_%s' % (table, indexname or '_'.join(columns))
-        sql = ("IF OBJECT_ID ('%(v)s', 'V') IS NOT NULL DROP VIEW %(v)s"
-               % {'v': view.lower()}) # also drops the index
+        view = "utv_%s_%s" % (table, indexname or "_".join(columns))
+        sql = "IF OBJECT_ID ('%(v)s', 'V') IS NOT NULL DROP VIEW %(v)s" % {
+            "v": view.lower()
+        }  # also drops the index
         return [sql]
 
     def sql_drop_index(self, table, column, unique=False):
         if unique:
-            return super(_SqlServer2005FuncHelper, self).sql_drop_index(table, column, unique)
+            return super(_SqlServer2005FuncHelper, self).sql_drop_index(
+                table, column, unique
+            )
         else:
             idx = self._index_name(table, column, unique)
-            return 'DROP INDEX %s ON %s;' % (idx, table)
-
+            return "DROP INDEX %s ON %s;" % (idx, table)
 
     def change_col_type(self, cursor, table, column, coltype, null_allowed):
         alter = []
         drops = []
         creates = []
-        for idx_name, idx_type, is_unique, is_unique_cstr in self._index_names(cursor, table, column):
+        for idx_name, idx_type, is_unique, is_unique_cstr in self._index_names(
+            cursor, table, column
+        ):
             if is_unique_cstr:
-                drops.append('ALTER TABLE %s DROP CONSTRAINT %s' % (table, idx_name))
-                creates.append('ALTER TABLE %s ADD CONSTRAINT %s UNIQUE (%s)' % (table, idx_name, column))
+                drops.append("ALTER TABLE %s DROP CONSTRAINT %s" % (table, idx_name))
+                creates.append(
+                    "ALTER TABLE %s ADD CONSTRAINT %s UNIQUE (%s)"
+                    % (table, idx_name, column)
+                )
             else:
-                drops.append('DROP INDEX %s ON %s' % (idx_name, table))
+                drops.append("DROP INDEX %s ON %s" % (idx_name, table))
                 if is_unique:
-                    unique = 'UNIQUE'
+                    unique = "UNIQUE"
                 else:
-                    unique = ''
-                creates.append('CREATE %s %s INDEX %s ON %s(%s)' % (unique, idx_type, idx_name, table, column))
+                    unique = ""
+                creates.append(
+                    "CREATE %s %s INDEX %s ON %s(%s)"
+                    % (unique, idx_type, idx_name, table, column)
+                )
         for view_name in self.list_views(cursor):
-            if not view_name.startswith('utv_%s_' % table.lower()):
+            if not view_name.startswith("utv_%s_" % table.lower()):
                 continue
-            cursor.execute('SELECT column_name from information_schema.columns where table_name = %(t)s', {'t': view_name})
+            cursor.execute(
+                "SELECT column_name from information_schema.columns where table_name = %(t)s",
+                {"t": view_name},
+            )
             columns = [row[0] for row in cursor.fetchall()]
             if column not in columns:
                 continue
-            indexname = view_name.split('_', 3)[-1]
+            indexname = view_name.split("_", 3)[-1]
             drops += self.sqls_drop_multicol_unique_index(table, columns, indexname)
             creates += self.sqls_create_multicol_unique_index(table, columns, indexname)
 
         if null_allowed:
-            null = 'NULL'
+            null = "NULL"
         else:
-            null = 'NOT NULL'
-        alter.append('ALTER TABLE %s ALTER COLUMN %s %s %s' % (table, column, coltype, null))
+            null = "NOT NULL"
+        alter.append(
+            "ALTER TABLE %s ALTER COLUMN %s %s %s" % (table, column, coltype, null)
+        )
         for stmt in drops + alter + creates:
             cursor.execute(stmt)
 
@@ -313,27 +377,31 @@ AND j.column_id = k.column_id;"""
         return self.change_col_type(cursor, table, column, coltype, null_allowed)
 
     def temporary_table_name(self, table_name):
-        if not table_name.startswith('#'):
-            table_name = '#' + table_name
+        if not table_name.startswith("#"):
+            table_name = "#" + table_name
         return table_name
 
     @staticmethod
     def _do_backup():
         import time
         from logilab.database import get_connection
+
         dbhost = sys.argv[2]
         dbname = sys.argv[3]
         filename = sys.argv[4]
-        cnx = get_connection(driver='sqlserver2005',
-                             host=dbhost, database=dbname,
-                             extra_args='autocommit;trusted_connection')
+        cnx = get_connection(
+            driver="sqlserver2005",
+            host=dbhost,
+            database=dbname,
+            extra_args="autocommit;trusted_connection",
+        )
         cursor = cnx.cursor()
         sql_server_local_filename = r"C:\Backups\%s" % dbname
         file_share_filename = r"\\%s\Backups\%s" % (dbhost, dbname)
-        cursor.execute("BACKUP DATABASE %(db)s TO DISK= %(path)s ",
-                       {'db':dbname,
-                        'path':sql_server_local_filename,
-                        })
+        cursor.execute(
+            "BACKUP DATABASE %(db)s TO DISK= %(path)s ",
+            {"db": dbname, "path": sql_server_local_filename,},
+        )
         prev_size = -1
         err_count = 0
         same_size_count = 0
@@ -342,7 +410,7 @@ AND j.column_id = k.column_id;"""
             try:
                 size = os.path.getsize(file_share_filename)
             except OSError as exc:
-                self.logger.exception('error accessing %s', file_share_filename)
+                self.logger.exception("error accessing %s", file_share_filename)
                 err_count += 1
             if size > prev_size:
                 same_size_count = 0
@@ -358,41 +426,47 @@ AND j.column_id = k.column_id;"""
     def _do_restore():
         """return the SQL statement to restore a backup of the given database"""
         from logilab.database import get_connection
+
         dbhost = sys.argv[2]
         dbname = sys.argv[3]
         filename = sys.argv[4]
         sql_server_local_filename = r"C:\Backups\%s" % dbname
         file_share_filename = r"\\%s\Backups\%s" % (dbhost, dbname)
         shutil.copy(filename, file_share_filename)
-        cnx = get_connection(driver='sqlserver2005',
-                             host=dbhost, database='master',
-                             extra_args='autocommit;trusted_connection')
+        cnx = get_connection(
+            driver="sqlserver2005",
+            host=dbhost,
+            database="master",
+            extra_args="autocommit;trusted_connection",
+        )
 
         cursor = cnx.cursor()
-        cursor.execute("RESTORE DATABASE %(db)s FROM DISK= %(path)s WITH REPLACE",
-                       {'db':dbname,
-                        'path':sql_server_local_filename,
-                        })
+        cursor.execute(
+            "RESTORE DATABASE %(db)s FROM DISK= %(path)s WITH REPLACE",
+            {"db": dbname, "path": sql_server_local_filename,},
+        )
         import time
+
         sleeptime = 10
         while True:
             time.sleep(sleeptime)
             try:
-                cnx = get_connection(driver='sqlserver2005',
-                                     host=dbhost, database=dbname,
-                                     extra_args='trusted_connection')
+                cnx = get_connection(
+                    driver="sqlserver2005",
+                    host=dbhost,
+                    database=dbname,
+                    extra_args="trusted_connection",
+                )
                 break
             except:
-                sleeptime = min(sleeptime*2, 300)
+                sleeptime = min(sleeptime * 2, 300)
         os.remove(file_share_filename)
         sys.exit(0)
 
 
-db._ADV_FUNC_HELPER_DIRECTORY['sqlserver2005'] = _SqlServer2005FuncHelper
-
-
-
-
-if __name__ == "__main__": # used to backup sql server db
+db._ADV_FUNC_HELPER_DIRECTORY["sqlserver2005"] = _SqlServer2005FuncHelper
+
+
+if __name__ == "__main__":  # used to backup sql server db
     func_call = sys.argv[1]
-    eval(func_call+'()')
+    eval(func_call + "()")
diff --git a/logilab/database/sqlserver2008.py b/logilab/database/sqlserver2008.py
--- a/logilab/database/sqlserver2008.py
+++ b/logilab/database/sqlserver2008.py
@@ -28,6 +28,7 @@ Supported drivers, in order of preferenc
 from logilab import database as db
 from logilab.database.sqlserver import _PyodbcAdapter, _AdodbapiAdapter
 
+
 class _PyodbcSqlServer2008Adapter(_PyodbcAdapter):
     driver = "SQL Server Native Client 10.0"
 
@@ -35,11 +36,16 @@ class _PyodbcSqlServer2008Adapter(_Pyodb
 class _AdodbapiSqlServer2008Adapter(_AdodbapiAdapter):
     driver = "SQL Server Native Client 10.0"
 
-db._PREFERED_DRIVERS.update({
-    'sqlserver2008' : ['pyodbc', 'adodbapi', ],
-    })
 
-db._ADAPTER_DIRECTORY.update({
-    'sqlserver2008' : {'adodbapi': _AdodbapiSqlServer2008Adapter,
-                       'pyodbc': _PyodbcSqlServer2008Adapter},
-    })
+db._PREFERED_DRIVERS.update(
+    {"sqlserver2008": ["pyodbc", "adodbapi",],}
+)
+
+db._ADAPTER_DIRECTORY.update(
+    {
+        "sqlserver2008": {
+            "adodbapi": _AdodbapiSqlServer2008Adapter,
+            "pyodbc": _PyodbcSqlServer2008Adapter,
+        },
+    }
+)
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -28,27 +28,27 @@ from os import path
 here = path.abspath(path.dirname(__file__))
 
 pkginfo = {}
-with open(path.join(here, '__pkginfo__.py')) as f:
+with open(path.join(here, "__pkginfo__.py")) as f:
     exec(f.read(), pkginfo)
 
 # Get the long description from the relevant file
-with open(path.join(here, 'README'), encoding='utf-8') as f:
+with open(path.join(here, "README"), encoding="utf-8") as f:
     long_description = f.read()
 
 setup(
-    name=pkginfo['distname'],
-    version=pkginfo['version'],
-    description=pkginfo['description'],
+    name=pkginfo["distname"],
+    version=pkginfo["version"],
+    description=pkginfo["description"],
     long_description=long_description,
-    url=pkginfo['web'],
-    author=pkginfo['author'],
-    author_email=pkginfo['author_email'],
-    license=pkginfo['license'],
+    url=pkginfo["web"],
+    author=pkginfo["author"],
+    author_email=pkginfo["author_email"],
+    license=pkginfo["license"],
     # See https://pypi.python.org/pypi?%3Aaction=list_classifiers
-    classifiers=pkginfo['classifiers'],
-    packages=find_packages(exclude=['contrib', 'docs']),
-    namespace_packages=[pkginfo['subpackage_of']],
-    python_requires='>=3',
-    install_requires=pkginfo['install_requires'],
-    tests_require=pkginfo['tests_require'],
+    classifiers=pkginfo["classifiers"],
+    packages=find_packages(exclude=["contrib", "docs"]),
+    namespace_packages=[pkginfo["subpackage_of"]],
+    python_requires=">=3",
+    install_requires=pkginfo["install_requires"],
+    tests_require=pkginfo["tests_require"],
 )
diff --git a/test/unittest_converters.py b/test/unittest_converters.py
--- a/test/unittest_converters.py
+++ b/test/unittest_converters.py
@@ -22,32 +22,31 @@ from logilab.database import get_db_help
 
 
 class TYPEConvertersTC(unittest.TestCase):
-
     def test_existing_converters(self):
-        self.helper = get_db_helper('sqlite')
+        self.helper = get_db_helper("sqlite")
         self.assertEqual(len(self.helper.TYPE_CONVERTERS), 5)
 
     def test_convert_boolean(self):
-        self.helper = get_db_helper('sqlite')
-        self.assertEqual(self.helper.TYPE_CONVERTERS['Boolean'](False), False)
-        self.assertEqual(self.helper.TYPE_CONVERTERS['Boolean'](True), True)
-        self.assertEqual(self.helper.TYPE_CONVERTERS['Boolean'](0), False)
-        self.assertEqual(self.helper.TYPE_CONVERTERS['Boolean'](1), True)
-        self.assertEqual(self.helper.TYPE_CONVERTERS['Boolean'](''), False)
-        self.assertEqual(self.helper.TYPE_CONVERTERS['Boolean']('1'), True)
+        self.helper = get_db_helper("sqlite")
+        self.assertEqual(self.helper.TYPE_CONVERTERS["Boolean"](False), False)
+        self.assertEqual(self.helper.TYPE_CONVERTERS["Boolean"](True), True)
+        self.assertEqual(self.helper.TYPE_CONVERTERS["Boolean"](0), False)
+        self.assertEqual(self.helper.TYPE_CONVERTERS["Boolean"](1), True)
+        self.assertEqual(self.helper.TYPE_CONVERTERS["Boolean"](""), False)
+        self.assertEqual(self.helper.TYPE_CONVERTERS["Boolean"]("1"), True)
 
     def test_convert_datetime(self):
-        _date = date(1900,10,1)
+        _date = date(1900, 10, 1)
         _datetime = datetime(1900, 10, 1, 0, 0)
-        self.helper = get_db_helper('sqlite')
-        self.assertEqual(self.helper.TYPE_CONVERTERS['Datetime'](_date), _datetime)
+        self.helper = get_db_helper("sqlite")
+        self.assertEqual(self.helper.TYPE_CONVERTERS["Datetime"](_date), _datetime)
 
     def test_convert_date(self):
-        _date = date(1900,10,1)
+        _date = date(1900, 10, 1)
         _datetime = datetime(1900, 10, 1, 0, 0)
-        self.helper = get_db_helper('sqlite')
-        self.assertEqual(self.helper.TYPE_CONVERTERS['Date'](_datetime), _date)
+        self.helper = get_db_helper("sqlite")
+        self.assertEqual(self.helper.TYPE_CONVERTERS["Date"](_datetime), _date)
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
diff --git a/test/unittest_db.py b/test/unittest_db.py
--- a/test/unittest_db.py
+++ b/test/unittest_db.py
@@ -29,7 +29,7 @@ from logilab.database import *
 from logilab.database import _PREFERED_DRIVERS as PREFERED_DRIVERS
 from logilab.database.postgres import _PGAdvFuncHelper
 from logilab.database.sqlite import _SqliteAdvFuncHelper
-from logilab.database import mysql, sqlserver # trigger registration
+from logilab.database import mysql, sqlserver  # trigger registration
 
 # from logilab.common.adbh import (_GenericAdvFuncHelper, _SqliteAdvFuncHelper,
 #                                  _PGAdvFuncHelper, _MyAdvFuncHelper,
@@ -38,75 +38,73 @@ from logilab.database import mysql, sqls
 #                                  UnsupportedFunction)
 
 
-
 class PreferedDriverTC(unittest.TestCase):
     def setUp(self):
-        self.drivers = {"pg":[('foo', None), ('bar', None)]}
-        self.drivers = {'pg' : ["foo", "bar"]}
-        self.old_drivers = PREFERED_DRIVERS['postgres'][:]
+        self.drivers = {"pg": [("foo", None), ("bar", None)]}
+        self.drivers = {"pg": ["foo", "bar"]}
+        self.old_drivers = PREFERED_DRIVERS["postgres"][:]
 
     def tearDown(self):
         """Reset PREFERED_DRIVERS as it was"""
-        PREFERED_DRIVERS['postgres'] = self.old_drivers
+        PREFERED_DRIVERS["postgres"] = self.old_drivers
 
     def testNormal(self):
-        set_prefered_driver('pg','bar', self.drivers)
-        self.assertEqual('bar', self.drivers['pg'][0])
+        set_prefered_driver("pg", "bar", self.drivers)
+        self.assertEqual("bar", self.drivers["pg"][0])
 
     def testFailuresDb(self):
         with self.assertRaises(UnknownDriver) as cm:
-            set_prefered_driver('oracle','bar', self.drivers)
-        self.assertEqual(str(cm.exception), 'Unknown driver oracle')
+            set_prefered_driver("oracle", "bar", self.drivers)
+        self.assertEqual(str(cm.exception), "Unknown driver oracle")
 
     def testFailuresDriver(self):
         with self.assertRaises(UnknownDriver) as cm:
-            set_prefered_driver('pg','baz', self.drivers)
-        self.assertEqual(str(cm.exception), 'Unknown module baz for pg')
+            set_prefered_driver("pg", "baz", self.drivers)
+        self.assertEqual(str(cm.exception), "Unknown module baz for pg")
 
     def testGlobalVar(self):
         # XXX: Is this test supposed to be useful ? Is it supposed to test
         #      set_prefered_driver ?
-        old_drivers = PREFERED_DRIVERS['postgres'][:]
+        old_drivers = PREFERED_DRIVERS["postgres"][:]
         expected = old_drivers[:]
-        expected.insert(0, expected.pop(expected.index('psycopg2ct')))
-        set_prefered_driver('postgres', 'psycopg2ct')
-        self.assertEqual(PREFERED_DRIVERS['postgres'], expected)
-        set_prefered_driver('postgres', 'psycopg2')
+        expected.insert(0, expected.pop(expected.index("psycopg2ct")))
+        set_prefered_driver("postgres", "psycopg2ct")
+        self.assertEqual(PREFERED_DRIVERS["postgres"], expected)
+        set_prefered_driver("postgres", "psycopg2")
         # self.assertEqual(PREFERED_DRIVERS['postgres'], old_drivers)
-        expected.insert(0, expected.pop(expected.index('psycopg2')))
-        self.assertEqual(PREFERED_DRIVERS['postgres'], expected)
+        expected.insert(0, expected.pop(expected.index("psycopg2")))
+        self.assertEqual(PREFERED_DRIVERS["postgres"], expected)
 
 
 class GetCnxTC(unittest.TestCase):
     def setUp(self):
-        self.host = 'localhost'
+        self.host = "localhost"
         try:
             socket.gethostbyname(self.host)
         except:
             self.skipTest("those tests require specific DB configuration")
-        self.db = 'template1'
+        self.db = "template1"
         self.user = getlogin()
         self.passwd = getlogin()
-        self.old_drivers = PREFERED_DRIVERS['postgres'][:]
+        self.old_drivers = PREFERED_DRIVERS["postgres"][:]
 
     def tearDown(self):
         """Reset PREFERED_DRIVERS as it was"""
-        PREFERED_DRIVERS['postgres'] = self.old_drivers
+        PREFERED_DRIVERS["postgres"] = self.old_drivers
 
     def testMysql(self):
-        PREFERED_DRIVERS['mysql'] = ['MySQLdb']
+        PREFERED_DRIVERS["mysql"] = ["MySQLdb"]
         try:
             import MySQLdb
         except ImportError:
-            self.skipTest('python-mysqldb is not installed')
-        try:
-            cnx = get_connection('mysql', self.host, database='', user='root',
-                                 quiet=1)
-        except  MySQLdb.OperationalError as ex:
-            if ex.args[0] == 1045: # find MysqlDb
-                self.skipTest('mysql test requires a specific configuration')
+            self.skipTest("python-mysqldb is not installed")
+        try:
+            cnx = get_connection("mysql", self.host, database="", user="root", quiet=1)
+        except MySQLdb.OperationalError as ex:
+            if ex.args[0] == 1045:  # find MysqlDb
+                self.skipTest("mysql test requires a specific configuration")
             elif ex.args[0] in (2002, 2003):
-                self.skipTest('could not connect to mysql')
+                self.skipTest("could not connect to mysql")
             raise
 
     def test_connection_wrap(self):
@@ -114,37 +112,45 @@ class GetCnxTC(unittest.TestCase):
         try:
             import psycopg2
         except ImportError:
-            self.skipTest('psycopg2 module not installed')
-        try:
-            cnx = get_connection('postgres',
-                                 self.host, self.db, self.user, self.passwd,
-                                 quiet=1)
+            self.skipTest("psycopg2 module not installed")
+        try:
+            cnx = get_connection(
+                "postgres", self.host, self.db, self.user, self.passwd, quiet=1
+            )
         except psycopg2.OperationalError as ex:
-            self.skipTest('pgsql test requires a specific configuration')
-        self.failIf(isinstance(cnx, PyConnection),
-                    'cnx should *not* be a PyConnection instance')
-        cnx = get_connection('postgres',
-                             self.host, self.db, self.user, self.passwd,
-                             quiet=1, pywrap=True)
-        self.failUnless(isinstance(cnx, PyConnection),
-                        'cnx should be a PyConnection instance')
-
+            self.skipTest("pgsql test requires a specific configuration")
+        self.failIf(
+            isinstance(cnx, PyConnection), "cnx should *not* be a PyConnection instance"
+        )
+        cnx = get_connection(
+            "postgres", self.host, self.db, self.user, self.passwd, quiet=1, pywrap=True
+        )
+        self.failUnless(
+            isinstance(cnx, PyConnection), "cnx should be a PyConnection instance"
+        )
 
     def test_cursor_wrap(self):
         """Tests cursor wrapping"""
         try:
             import psycopg2
         except ImportError:
-            self.skipTest('psycopg2 module not installed')
-        try:
-            cnx = get_connection('postgres',
-                                 self.host, self.db, self.user, self.passwd,
-                                 quiet=1, pywrap=True)
+            self.skipTest("psycopg2 module not installed")
+        try:
+            cnx = get_connection(
+                "postgres",
+                self.host,
+                self.db,
+                self.user,
+                self.passwd,
+                quiet=1,
+                pywrap=True,
+            )
         except psycopg2.OperationalError as ex:
-            self.skipTest('pgsql test requires a specific configuration')
+            self.skipTest("pgsql test requires a specific configuration")
         cursor = cnx.cursor()
-        self.failUnless(isinstance(cursor, PyCursor),
-                        'cnx should be a PyCursor instance')
+        self.failUnless(
+            isinstance(cursor, PyCursor), "cnx should be a PyCursor instance"
+        )
 
 
 class DBAPIAdaptersTC(unittest.TestCase):
@@ -152,111 +158,126 @@ class DBAPIAdaptersTC(unittest.TestCase)
 
     def setUp(self):
         """Memorize original PREFERED_DRIVERS"""
-        self.old_drivers = PREFERED_DRIVERS['postgres'][:]
+        self.old_drivers = PREFERED_DRIVERS["postgres"][:]
 
     def tearDown(self):
         """Reset PREFERED_DRIVERS as it was"""
-        PREFERED_DRIVERS['postgres'] = self.old_drivers
+        PREFERED_DRIVERS["postgres"] = self.old_drivers
 
     def test_raise(self):
-        self.assertRaises(UnknownDriver, get_dbapi_compliant_module, 'pougloup')
+        self.assertRaises(UnknownDriver, get_dbapi_compliant_module, "pougloup")
 
     def test_adv_func_helper(self):
         try:
-            helper = get_db_helper('postgres')
-        except ImportError:
-            self.skipTest('postgresql dbapi module not installed')
+            helper = get_db_helper("postgres")
+        except ImportError:
+            self.skipTest("postgresql dbapi module not installed")
         self.failUnless(isinstance(helper, _PGAdvFuncHelper))
         try:
-            helper = get_db_helper('sqlite')
-        except ImportError:
-            self.skipTest('sqlite dbapi module not installed')
+            helper = get_db_helper("sqlite")
+        except ImportError:
+            self.skipTest("sqlite dbapi module not installed")
         self.failUnless(isinstance(helper, _SqliteAdvFuncHelper))
 
-
     def test_register_funcdef(self):
         class MYFUNC(FunctionDescr):
-            supported_backends = ('postgres', )
-            name_mapping = {'postgres': 'MYFUNC',
-                            'mysql': 'MYF',
-                            'sqlite': 'SQLITE_MYFUNC'}
+            supported_backends = ("postgres",)
+            name_mapping = {
+                "postgres": "MYFUNC",
+                "mysql": "MYF",
+                "sqlite": "SQLITE_MYFUNC",
+            }
+
         register_function(MYFUNC)
 
-        pghelper = get_db_helper('postgres')
-        slhelper = get_db_helper('sqlite')
-        self.assertRaises(UnsupportedFunction, slhelper.function_description, 'MYFUNC')
-        try:
-            pghelper.function_description('MYFUNC')
+        pghelper = get_db_helper("postgres")
+        slhelper = get_db_helper("sqlite")
+        self.assertRaises(UnsupportedFunction, slhelper.function_description, "MYFUNC")
+        try:
+            pghelper.function_description("MYFUNC")
         except UnsupportedFunction:
             self.fail('MYFUNC should support "postgres"')
 
     def test_funcname_with_different_backend_names(self):
         class MYFUNC(FunctionDescr):
-            supported_backends = ('postgres', 'mysql', 'sqlite')
-            name_mapping = {'postgres': 'MYFUNC',
-                            'mysql': 'MYF',
-                            'sqlite': 'SQLITE_MYFUNC'}
+            supported_backends = ("postgres", "mysql", "sqlite")
+            name_mapping = {
+                "postgres": "MYFUNC",
+                "mysql": "MYF",
+                "sqlite": "SQLITE_MYFUNC",
+            }
+
         register_function(MYFUNC)
 
-        pghelper = get_db_helper('postgres')
-        try:
-            mshelper = get_db_helper('mysql')
+        pghelper = get_db_helper("postgres")
+        try:
+            mshelper = get_db_helper("mysql")
         except ImportError:
             mshelper = None
-        slhelper = get_db_helper('sqlite')
-        self.assertEqual(slhelper.func_as_sql('MYFUNC', ()), 'SQLITE_MYFUNC()')
-        self.assertEqual(pghelper.func_as_sql('MYFUNC', ('foo',)), 'MYFUNC(foo)')
+        slhelper = get_db_helper("sqlite")
+        self.assertEqual(slhelper.func_as_sql("MYFUNC", ()), "SQLITE_MYFUNC()")
+        self.assertEqual(pghelper.func_as_sql("MYFUNC", ("foo",)), "MYFUNC(foo)")
         if mshelper is not None:
-            self.assertEqual(mshelper.func_as_sql('MYFUNC', ('foo', 'bar')), 'MYF(foo, bar)')
+            self.assertEqual(
+                mshelper.func_as_sql("MYFUNC", ("foo", "bar")), "MYF(foo, bar)"
+            )
+
 
 class BaseSqlServer(unittest.TestCase):
     def tearDown(self):
         cursor = self.cnx.cursor()
-        cursor.execute('drop table TestBlob')
-        cursor.execute('drop table TestLargeString')
+        cursor.execute("drop table TestBlob")
+        cursor.execute("drop table TestLargeString")
         self.cnx.commit()
         cursor.close()
         self.cnx.close()
 
     def blob(self):
         cursor = self.cnx.cursor()
-        data_length = range(400*1024-10, 400*1024+10)
+        data_length = range(400 * 1024 - 10, 400 * 1024 + 10)
         for length in data_length:
-            data = buffer('\x00'*length)
+            data = buffer("\x00" * length)
             print("inserting string of length", len(data))
-            cursor.execute('insert into TestBlob(id, data) VALUES(%(id)s, %(data)s)',
-                           {'id': length, 'data': data})
+            cursor.execute(
+                "insert into TestBlob(id, data) VALUES(%(id)s, %(data)s)",
+                {"id": length, "data": data},
+            )
             self.cnx.commit()
-        cursor.execute('select count(*) from TestBlob')
-        print('%d rows in table' % (cursor.fetchone()[0]))
+        cursor.execute("select count(*) from TestBlob")
+        print("%d rows in table" % (cursor.fetchone()[0]))
         cursor.close()
 
     def large_string(self):
         cursor = self.cnx.cursor()
-        data_length = range(400*1024-10, 400*1024+10)
+        data_length = range(400 * 1024 - 10, 400 * 1024 + 10)
         for length in data_length:
-            data = '1'*length
+            data = "1" * length
             print("inserting string of length", len(data))
-            cursor.execute('insert into TestLargeString(id, data) VALUES(%(id)s, %(data)s)',
-                           {'id': length, 'data': data})
+            cursor.execute(
+                "insert into TestLargeString(id, data) VALUES(%(id)s, %(data)s)",
+                {"id": length, "data": data},
+            )
             self.cnx.commit()
-        cursor.execute('select count(*) from TestLargeString')
-        print('%d rows in table' % (cursor.fetchone()[0]))
+        cursor.execute("select count(*) from TestLargeString")
+        print("%d rows in table" % (cursor.fetchone()[0]))
         cursor.close()
 
     def varbinary_none(self):
         cursor = self.cnx.cursor()
-        cursor.execute('insert into TestBlob (id) values (42)')
+        cursor.execute("insert into TestBlob (id) values (42)")
         self.cnx.commit()
-        cursor.execute('select * from TestBlob where id=42')
+        cursor.execute("select * from TestBlob where id=42")
         print(cursor.fetchall())
-        cursor.execute('update TestBlob set id=43, data=NULL where id=42')
+        cursor.execute("update TestBlob set id=43, data=NULL where id=42")
         self.cnx.commit()
-        cursor.execute('select * from TestBlob where id=43')
+        cursor.execute("select * from TestBlob where id=43")
         print(cursor.fetchall())
-        cursor.execute('update TestBlob set id = %(id)s, data=%(data)s where id=%(old_id)s', {'data': None, 'id': 42, 'old_id': 43})
+        cursor.execute(
+            "update TestBlob set id = %(id)s, data=%(data)s where id=%(old_id)s",
+            {"data": None, "id": 42, "old_id": 43},
+        )
         self.cnx.commit()
-        cursor.execute('select * from TestBlob where id=42')
+        cursor.execute("select * from TestBlob where id=42")
         print(cursor.fetchall())
         cursor.close()
 
@@ -266,17 +287,24 @@ try:
 except ImportError:
     print("pyodbc tests skipped")
 else:
+
     class pyodbcTC(BaseSqlServer):
         def setUp(self):
             try:
-                self.cnx = get_connection(driver='sqlserver2005', database='alf',
-                                      host='localhost', extra_args='Trusted_Connection')
+                self.cnx = get_connection(
+                    driver="sqlserver2005",
+                    database="alf",
+                    host="localhost",
+                    extra_args="Trusted_Connection",
+                )
             except pyodbc.Error as exc:
                 self.skipTest(str(exc))
             cursor = self.cnx.cursor()
             try:
-                cursor.execute('create table TestLargeString (id int, data varchar(max))')
-                cursor.execute('create table TestBlob (id int, data varbinary(max))')
+                cursor.execute(
+                    "create table TestLargeString (id int, data varchar(max))"
+                )
+                cursor.execute("create table TestBlob (id int, data varbinary(max))")
             except Exception as exc:
                 print(exc)
             cursor.close()
@@ -290,23 +318,31 @@ else:
         def test_varbinary_none(self):
             self.varbinary_none()
 
+
 try:
     import adodbapi as adb
 except ImportError:
     print("adodbapi tests skipped")
 else:
+
     class adodbapiTC(BaseSqlServer):
         def setUp(self):
             try:
-                self.cnx = get_connection(driver='sqlserver2005', database='alf',
-                                      host='localhost', extra_args='Trusted_Connection')
+                self.cnx = get_connection(
+                    driver="sqlserver2005",
+                    database="alf",
+                    host="localhost",
+                    extra_args="Trusted_Connection",
+                )
             except adb.Error as exc:
                 self.skipTest(str(exc))
             cursor = self.cnx.cursor()
             try:
 
-                cursor.execute('create table TestLargeString (id int, data varchar(max))')
-                cursor.execute('create table TestBlob (id int, data varbinary(max))')
+                cursor.execute(
+                    "create table TestLargeString (id int, data varchar(max))"
+                )
+                cursor.execute("create table TestBlob (id int, data varbinary(max))")
             except Exception as exc:
                 print(exc)
             cursor.close()
@@ -322,27 +358,29 @@ else:
 
 
 class PostgresqlDatabaseSchemaTC(unittest.TestCase):
-    host = 'localhost'
-    database = 'template1'
+    host = "localhost"
+    database = "template1"
     user = password = getlogin()
-    schema = 'tests'
+    schema = "tests"
 
     def setUp(self):
         try:
-            self.module = get_dbapi_compliant_module('postgres')
-        except ImportError:
-            self.skipTest('postgresql dbapi module not installed')
+            self.module = get_dbapi_compliant_module("postgres")
+        except ImportError:
+            self.skipTest("postgresql dbapi module not installed")
         try:
             cnx = self.get_connection()
         except Exception:
-            self.skipTest('could not connect to %s:%s@%s/%s'
-                          % (self.user, self.password, self.host, self.database))
-        self._execute(cnx, 'CREATE SCHEMA %s' % self.schema)
+            self.skipTest(
+                "could not connect to %s:%s@%s/%s"
+                % (self.user, self.password, self.host, self.database)
+            )
+        self._execute(cnx, "CREATE SCHEMA %s" % self.schema)
         cnx.close()
 
     def tearDown(self):
         cnx = self.get_connection()
-        self._execute(cnx, 'DROP SCHEMA %s' % self.schema)
+        self._execute(cnx, "DROP SCHEMA %s" % self.schema)
         cnx.close()
 
     def _execute(self, cnx, sql):
@@ -353,9 +391,13 @@ class PostgresqlDatabaseSchemaTC(unittes
         cnx.close()
 
     def get_connection(self, schema=None):
-        return self.module.connect(host=self.host, database=self.database,
-                                   user=self.user, password=self.password,
-                                   schema=schema)
+        return self.module.connect(
+            host=self.host,
+            database=self.database,
+            user=self.user,
+            password=self.password,
+            schema=schema,
+        )
 
     def assertRsetEqual(self, rset, expected_rset):
         # NOTE: different drivers will use different result structures
@@ -369,40 +411,42 @@ class PostgresqlDatabaseSchemaTC(unittes
         cnx = self.get_connection(schema=self.schema)
         cursor = cnx.cursor()
         try:
-            cursor.execute('CREATE TABLE x(x integer)')
-            cursor.execute('INSERT INTO x VALUES(12)')
-            cursor.execute('SELECT x from x')
+            cursor.execute("CREATE TABLE x(x integer)")
+            cursor.execute("INSERT INTO x VALUES(12)")
+            cursor.execute("SELECT x from x")
             self.assertRsetEqual(cursor.fetchall(), [[12]])
-            cursor.execute('SELECT x from tests.x')
+            cursor.execute("SELECT x from tests.x")
             self.assertRsetEqual(cursor.fetchall(), [[12]])
-            self.assertRaises(self.module.Error, cursor.execute, 'SELECT x from public.x')
+            self.assertRaises(
+                self.module.Error, cursor.execute, "SELECT x from public.x"
+            )
         finally:
             cnx.rollback()
             cnx.close()
 
     def test_list_tables(self):
-        helper = get_db_helper('postgres')
+        helper = get_db_helper("postgres")
         cnx = self.get_connection(schema=self.schema)
         cursor = cnx.cursor()
         try:
-            cursor.execute('CREATE TABLE x(x integer)')
-            self.assertNotIn('x', helper.list_tables(cursor))
-            self.assertIn('x', helper.list_tables(cursor, schema=self.schema))
+            cursor.execute("CREATE TABLE x(x integer)")
+            self.assertNotIn("x", helper.list_tables(cursor))
+            self.assertIn("x", helper.list_tables(cursor, schema=self.schema))
         finally:
             cnx.close()
 
     def test_list_indices(self):
-        helper = get_db_helper('postgres')
+        helper = get_db_helper("postgres")
         cnx = self.get_connection(schema=self.schema)
         cursor = cnx.cursor()
         try:
-            cursor.execute('CREATE TABLE x(x integer)')
-            cursor.execute('CREATE INDEX x_idx ON x(x)')
-            self.assertIn('x_idx', helper.list_indices(cursor))
-            self.assertIn('x_idx', helper.list_indices(cursor, table='x'))
+            cursor.execute("CREATE TABLE x(x integer)")
+            cursor.execute("CREATE INDEX x_idx ON x(x)")
+            self.assertIn("x_idx", helper.list_indices(cursor))
+            self.assertIn("x_idx", helper.list_indices(cursor, table="x"))
         finally:
             cnx.close()
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
diff --git a/test/unittest_fti.py b/test/unittest_fti.py
--- a/test/unittest_fti.py
+++ b/test/unittest_fti.py
@@ -20,8 +20,8 @@ import unittest
 
 from logilab.common.testlib import MockConnection
 
-from logilab.database.fti import FTIndexerMixIn, tokenize, normalize, \
-     StopWord
+from logilab.database.fti import FTIndexerMixIn, tokenize, normalize, StopWord
+
 
 def _tokenize(string):
     words = []
@@ -34,77 +34,113 @@ def _tokenize(string):
 
 
 class TokenizeTC(unittest.TestCase):
-
     def test_utf8(self):
-        self.assertEqual(_tokenize(u'n°2'),
-                          ['n2'])
+        self.assertEqual(_tokenize(u"n°2"), ["n2"])
 
     def test_numbers(self):
-        self.assertEqual(_tokenize(u'123'),
-                          ['123'])
+        self.assertEqual(_tokenize(u"123"), ["123"])
 
     def test_aphostrophe(self):
-        self.assertEqual(_tokenize(u"l\u2019Échelle"),
-                          ['echelle'])
+        self.assertEqual(_tokenize(u"l\u2019Échelle"), ["echelle"])
 
 
 class IndexableObject:
     entity_weight = 1.0
+
     def get_words(self):
-        return {'A': tokenize(u'gïnco-jpl blâ blîp blôp blàp'),
-                'B': tokenize(u'cubic 456')}
+        return {
+            "A": tokenize(u"gïnco-jpl blâ blîp blôp blàp"),
+            "B": tokenize(u"cubic 456"),
+        }
 
 
 class IndexerTC(unittest.TestCase):
-
     def setUp(self):
-        self.cnx = MockConnection( ([1, 2],) )
+        self.cnx = MockConnection(([1, 2],))
         self.indexer = FTIndexerMixIn()
         self.indexer._cnx = self.cnx
-        #Indexer('sqlite', self.cnx)
+        # Indexer('sqlite', self.cnx)
 
     def test_index_object(self):
         self.indexer.index_object(1, IndexableObject())
-        self.assertListEqual(self.cnx.received,
-                          [('SELECT word_id FROM word WHERE word=%(word)s;', {'word': 'ginco'}),
-                           ('INSERT INTO appears(uid, word_id, pos) VALUES (%(uid)s,%(wid)s,%(position)s);', {'position': 0, 'wid': 1, 'uid': 1}),
-                           ('SELECT word_id FROM word WHERE word=%(word)s;', {'word': 'jpl'}),
-                           ('INSERT INTO appears(uid, word_id, pos) VALUES (%(uid)s,%(wid)s,%(position)s);', {'position': 1, 'wid': 1, 'uid': 1}),
-                           ('SELECT word_id FROM word WHERE word=%(word)s;', {'word': 'bla'}),
-                           ('INSERT INTO appears(uid, word_id, pos) VALUES (%(uid)s,%(wid)s,%(position)s);', {'position': 2, 'wid': 1, 'uid': 1}),
-                           ('SELECT word_id FROM word WHERE word=%(word)s;', {'word': 'blip'}),
-                           ('INSERT INTO appears(uid, word_id, pos) VALUES (%(uid)s,%(wid)s,%(position)s);', {'position': 3, 'wid': 1, 'uid': 1}),
-                           ('SELECT word_id FROM word WHERE word=%(word)s;', {'word': 'blop'}),
-                           ('INSERT INTO appears(uid, word_id, pos) VALUES (%(uid)s,%(wid)s,%(position)s);', {'position': 4, 'wid': 1, 'uid': 1}),
-                           ('SELECT word_id FROM word WHERE word=%(word)s;', {'word': 'blap'}),
-                           ('INSERT INTO appears(uid, word_id, pos) VALUES (%(uid)s,%(wid)s,%(position)s);', {'position': 5, 'wid': 1, 'uid': 1}),
-                           ('SELECT word_id FROM word WHERE word=%(word)s;', {'word': 'cubic'}),
-                           ('INSERT INTO appears(uid, word_id, pos) VALUES (%(uid)s,%(wid)s,%(position)s);', {'position': 6, 'wid': 1, 'uid': 1}),
-                           ('SELECT word_id FROM word WHERE word=%(word)s;', {'word': '456'}),
-                           ('INSERT INTO appears(uid, word_id, pos) VALUES (%(uid)s,%(wid)s,%(position)s);', {'position': 7, 'wid': 1, 'uid': 1})])
+        self.assertListEqual(
+            self.cnx.received,
+            [
+                ("SELECT word_id FROM word WHERE word=%(word)s;", {"word": "ginco"}),
+                (
+                    "INSERT INTO appears(uid, word_id, pos) VALUES (%(uid)s,%(wid)s,%(position)s);",
+                    {"position": 0, "wid": 1, "uid": 1},
+                ),
+                ("SELECT word_id FROM word WHERE word=%(word)s;", {"word": "jpl"}),
+                (
+                    "INSERT INTO appears(uid, word_id, pos) VALUES (%(uid)s,%(wid)s,%(position)s);",
+                    {"position": 1, "wid": 1, "uid": 1},
+                ),
+                ("SELECT word_id FROM word WHERE word=%(word)s;", {"word": "bla"}),
+                (
+                    "INSERT INTO appears(uid, word_id, pos) VALUES (%(uid)s,%(wid)s,%(position)s);",
+                    {"position": 2, "wid": 1, "uid": 1},
+                ),
+                ("SELECT word_id FROM word WHERE word=%(word)s;", {"word": "blip"}),
+                (
+                    "INSERT INTO appears(uid, word_id, pos) VALUES (%(uid)s,%(wid)s,%(position)s);",
+                    {"position": 3, "wid": 1, "uid": 1},
+                ),
+                ("SELECT word_id FROM word WHERE word=%(word)s;", {"word": "blop"}),
+                (
+                    "INSERT INTO appears(uid, word_id, pos) VALUES (%(uid)s,%(wid)s,%(position)s);",
+                    {"position": 4, "wid": 1, "uid": 1},
+                ),
+                ("SELECT word_id FROM word WHERE word=%(word)s;", {"word": "blap"}),
+                (
+                    "INSERT INTO appears(uid, word_id, pos) VALUES (%(uid)s,%(wid)s,%(position)s);",
+                    {"position": 5, "wid": 1, "uid": 1},
+                ),
+                ("SELECT word_id FROM word WHERE word=%(word)s;", {"word": "cubic"}),
+                (
+                    "INSERT INTO appears(uid, word_id, pos) VALUES (%(uid)s,%(wid)s,%(position)s);",
+                    {"position": 6, "wid": 1, "uid": 1},
+                ),
+                ("SELECT word_id FROM word WHERE word=%(word)s;", {"word": "456"}),
+                (
+                    "INSERT INTO appears(uid, word_id, pos) VALUES (%(uid)s,%(wid)s,%(position)s);",
+                    {"position": 7, "wid": 1, "uid": 1},
+                ),
+            ],
+        )
 
     def test_fulltext_search(self):
-        list(self.indexer.fulltext_search(u'ginco'))
-        self.assertEqual(self.cnx.received,
-                          [('SELECT count(*) as rating, appears0.uid FROM appears as appears0, word as word0 WHERE word0.word = %(word0)s  AND word0.word_id = appears0.word_id  GROUP BY appears0.uid ;',
-                            {'word0': 'ginco'})
-                           ])
+        list(self.indexer.fulltext_search(u"ginco"))
+        self.assertEqual(
+            self.cnx.received,
+            [
+                (
+                    "SELECT count(*) as rating, appears0.uid FROM appears as appears0, word as word0 WHERE word0.word = %(word0)s  AND word0.word_id = appears0.word_id  GROUP BY appears0.uid ;",
+                    {"word0": "ginco"},
+                )
+            ],
+        )
 
     def test_fulltext_search2(self):
-        list(self.indexer.fulltext_search(u'ginco-jpl'))
-        self.assertEqual(self.cnx.received,
-                          [('SELECT count(*) as rating, appears0.uid FROM appears as appears0, word as word0, appears as appears1, word as word1 WHERE word0.word = %(word0)s  AND word0.word_id = appears0.word_id  AND word1.word = %(word1)s  AND word1.word_id = appears1.word_id  AND appears0.uid = appears1.uid  GROUP BY appears0.uid ;',
-                            {'word1': 'jpl', 'word0': 'ginco'})
-                           ])
+        list(self.indexer.fulltext_search(u"ginco-jpl"))
+        self.assertEqual(
+            self.cnx.received,
+            [
+                (
+                    "SELECT count(*) as rating, appears0.uid FROM appears as appears0, word as word0, appears as appears1, word as word1 WHERE word0.word = %(word0)s  AND word0.word_id = appears0.word_id  AND word1.word = %(word1)s  AND word1.word_id = appears1.word_id  AND appears0.uid = appears1.uid  GROUP BY appears0.uid ;",
+                    {"word1": "jpl", "word0": "ginco"},
+                )
+            ],
+        )
 
 
 class GetSchemaTC(unittest.TestCase):
-
     def test(self):
         indexer = FTIndexerMixIn()
-        indexer.sql_create_sequence = lambda x: 'CREATE SEQUENCE %s;' % x
-        self.assertEqual(indexer.sql_init_fti(),
-                          '''
+        indexer.sql_create_sequence = lambda x: "CREATE SEQUENCE %s;" % x
+        self.assertEqual(
+            indexer.sql_init_fti(),
+            """
 CREATE SEQUENCE word_id_seq;
 
 CREATE TABLE word (
@@ -120,7 +156,9 @@ CREATE TABLE appears(
 
 CREATE INDEX appears_uid ON appears (uid);
 CREATE INDEX appears_word_id ON appears (word_id);
-''')
+""",
+        )
 
-if __name__ == '__main__':
+
+if __name__ == "__main__":
     unittest.main()
diff --git a/test/unittest_mysql.py b/test/unittest_mysql.py
--- a/test/unittest_mysql.py
+++ b/test/unittest_mysql.py
@@ -25,33 +25,44 @@ from unittest_fti import IndexableObject
 
 
 class MyHelperTC(unittest.TestCase):
-
     def setUp(self):
-        self.cnx = MockConnection( () )
+        self.cnx = MockConnection(())
         try:
-            self.helper = get_db_helper('mysql')
+            self.helper = get_db_helper("mysql")
         except ImportError as exc:
             raise unittest.SkipTest(str(exc))
         self.helper._cnx = self.cnx
 
     def test_type_map(self):
-        self.assertEqual(self.helper.TYPE_MAPPING['Datetime'], 'datetime')
-        self.assertEqual(self.helper.TYPE_MAPPING['String'], 'mediumtext')
-        self.assertEqual(self.helper.TYPE_MAPPING['Password'], 'tinyblob')
-        self.assertEqual(self.helper.TYPE_MAPPING['Bytes'], 'longblob')
+        self.assertEqual(self.helper.TYPE_MAPPING["Datetime"], "datetime")
+        self.assertEqual(self.helper.TYPE_MAPPING["String"], "mediumtext")
+        self.assertEqual(self.helper.TYPE_MAPPING["Password"], "tinyblob")
+        self.assertEqual(self.helper.TYPE_MAPPING["Bytes"], "longblob")
 
     def test_index_object(self):
         self.helper.index_object(1, IndexableObject())
-        self.assertEqual(self.cnx.received,
-                          [('INSERT INTO appears(uid, words) VALUES (%(uid)s, %(wrds)s);',
-                            {'wrds': 'ginco jpl bla blip blop blap cubic 456', 'uid': 1})])
+        self.assertEqual(
+            self.cnx.received,
+            [
+                (
+                    "INSERT INTO appears(uid, words) VALUES (%(uid)s, %(wrds)s);",
+                    {"wrds": "ginco jpl bla blip blop blap cubic 456", "uid": 1},
+                )
+            ],
+        )
 
     def test_fulltext_search(self):
-        self.helper.fulltext_search(u'ginco-jpl')
-        self.assertEqual(self.cnx.received,
-                          [('SELECT 1, uid FROM appears WHERE MATCH (words) AGAINST (%(words)s IN BOOLEAN MODE)',
-                            {'words': 'ginco jpl'})])
+        self.helper.fulltext_search(u"ginco-jpl")
+        self.assertEqual(
+            self.cnx.received,
+            [
+                (
+                    "SELECT 1, uid FROM appears WHERE MATCH (words) AGAINST (%(words)s IN BOOLEAN MODE)",
+                    {"words": "ginco jpl"},
+                )
+            ],
+        )
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
diff --git a/test/unittest_postgres.py b/test/unittest_postgres.py
--- a/test/unittest_postgres.py
+++ b/test/unittest_postgres.py
@@ -26,47 +26,74 @@ from logilab.database import get_db_help
 
 class PGHelperTC(unittest.TestCase):
     def setUp(self):
-        self.helper = get_db_helper('postgres')
-        self.cnx = MockConnection( () )
+        self.helper = get_db_helper("postgres")
+        self.cnx = MockConnection(())
         self.helper._cnx = self.cnx
 
     def test_type_map(self):
-        self.assertEqual(self.helper.TYPE_MAPPING['Datetime'], 'timestamp')
-        self.assertEqual(self.helper.TYPE_MAPPING['String'], 'text')
-        self.assertEqual(self.helper.TYPE_MAPPING['Password'], 'bytea')
-        self.assertEqual(self.helper.TYPE_MAPPING['Bytes'], 'bytea')
+        self.assertEqual(self.helper.TYPE_MAPPING["Datetime"], "timestamp")
+        self.assertEqual(self.helper.TYPE_MAPPING["String"], "text")
+        self.assertEqual(self.helper.TYPE_MAPPING["Password"], "bytea")
+        self.assertEqual(self.helper.TYPE_MAPPING["Bytes"], "bytea")
 
     def test_index_object(self):
         self.helper.index_object(1, IndexableObject())
-        self.assertEqual(self.cnx.received,
-                          [("INSERT INTO appears(uid, words, weight) "
-                            "VALUES (%(uid)s, setweight(to_tsvector(%(config)s, %(wrds_A)s), 'A')||setweight(to_tsvector(%(config)s, %(wrds_B)s), 'B'), 1.0);",
-                            {'wrds_B': 'cubic 456',
-                             'wrds_A': 'ginco jpl bla blip blop blap',
-                             'config': 'default',
-                             'uid': 1})])
+        self.assertEqual(
+            self.cnx.received,
+            [
+                (
+                    "INSERT INTO appears(uid, words, weight) "
+                    "VALUES (%(uid)s, setweight(to_tsvector(%(config)s, %(wrds_A)s), 'A')||setweight(to_tsvector(%(config)s, %(wrds_B)s), 'B'), 1.0);",
+                    {
+                        "wrds_B": "cubic 456",
+                        "wrds_A": "ginco jpl bla blip blop blap",
+                        "config": "default",
+                        "uid": 1,
+                    },
+                )
+            ],
+        )
 
     def test_fulltext_search_base(self):
-        self.helper.fulltext_search(u'ginco-jpl')
-        self.assertEqual(self.cnx.received,
-                          [("SELECT 1, uid FROM appears WHERE words @@ to_tsquery(%(config)s, %(words)s)",
-                            {'config': 'default', 'words': 'ginco&jpl'})])
+        self.helper.fulltext_search(u"ginco-jpl")
+        self.assertEqual(
+            self.cnx.received,
+            [
+                (
+                    "SELECT 1, uid FROM appears WHERE words @@ to_tsquery(%(config)s, %(words)s)",
+                    {"config": "default", "words": "ginco&jpl"},
+                )
+            ],
+        )
 
     def test_fulltext_search_prefix_1(self):
-        self.helper.fulltext_search(u'ginco*')
-        self.assertEqual(self.cnx.received,
-                          [("SELECT 1, uid FROM appears WHERE words @@ to_tsquery(%(config)s, %(words)s)",
-                            {'config': 'default', 'words': 'ginco:*'})])
+        self.helper.fulltext_search(u"ginco*")
+        self.assertEqual(
+            self.cnx.received,
+            [
+                (
+                    "SELECT 1, uid FROM appears WHERE words @@ to_tsquery(%(config)s, %(words)s)",
+                    {"config": "default", "words": "ginco:*"},
+                )
+            ],
+        )
 
     def test_fulltext_search_prefix_2(self):
-        self.helper.fulltext_search(u'ginc*o')
-        self.assertEqual(self.cnx.received,
-                          [("SELECT 1, uid FROM appears WHERE words @@ to_tsquery(%(config)s, %(words)s)",
-                            {'config': 'default', 'words': 'ginc:*o'})])
+        self.helper.fulltext_search(u"ginc*o")
+        self.assertEqual(
+            self.cnx.received,
+            [
+                (
+                    "SELECT 1, uid FROM appears WHERE words @@ to_tsquery(%(config)s, %(words)s)",
+                    {"config": "default", "words": "ginc:*o"},
+                )
+            ],
+        )
 
     # def test_embedded_tsearch2_is_found(self):
     #     # just make sure that something is found
     #     fullpath = self.helper.find_tsearch2_schema()
 
-if __name__ == '__main__':
+
+if __name__ == "__main__":
     unittest.main()
diff --git a/test/unittest_sqlgen.py b/test/unittest_sqlgen.py
--- a/test/unittest_sqlgen.py
+++ b/test/unittest_sqlgen.py
@@ -23,138 +23,232 @@ from logilab.database.sqlgen import SQLG
 
 
 class SQLGenTC(unittest.TestCase):
-
     def test_set_values(self):
         s = SQLGenerator()
-        self.assertEqual(s.set(['nom']), 'nom = %(nom)s')
-        self.assertEqual(s.set(['nom','prenom']), 'nom = %(nom)s, prenom = %(prenom)s')
-        params = {'nom': 'dupont', 'prenom': 'jean'}
-        self.assertEqual(s.set(params), 'nom = %(nom)s, prenom = %(prenom)s')
-        self.assertEqual(params, {'nom': 'dupont', 'prenom': 'jean'})
+        self.assertEqual(s.set(["nom"]), "nom = %(nom)s")
+        self.assertEqual(s.set(["nom", "prenom"]), "nom = %(nom)s, prenom = %(prenom)s")
+        params = {"nom": "dupont", "prenom": "jean"}
+        self.assertEqual(s.set(params), "nom = %(nom)s, prenom = %(prenom)s")
+        self.assertEqual(params, {"nom": "dupont", "prenom": "jean"})
 
     def test_set_functions(self):
         s = SQLGenerator()
-        params = {'nom': 'dupont', 'prenom': 'jean', 'age': SQLExpression('YEARS(%(date)s)', date='2013/01/01')}
-        self.assertEqual(s.set(params), 'age = YEARS(%(date)s), nom = %(nom)s, prenom = %(prenom)s')
-        self.assertEqual(params, {'nom': 'dupont', 'prenom': 'jean', 'date': '2013/01/01'})
+        params = {
+            "nom": "dupont",
+            "prenom": "jean",
+            "age": SQLExpression("YEARS(%(date)s)", date="2013/01/01"),
+        }
+        self.assertEqual(
+            s.set(params), "age = YEARS(%(date)s), nom = %(nom)s, prenom = %(prenom)s"
+        )
+        self.assertEqual(
+            params, {"nom": "dupont", "prenom": "jean", "date": "2013/01/01"}
+        )
 
     def test_where_values(self):
         s = SQLGenerator()
-        self.assertEqual(s.where(['nom']), 'nom = %(nom)s')
-        self.assertEqual(s.where(['nom','prenom']), 'nom = %(nom)s AND prenom = %(prenom)s')
-        self.assertEqual(s.where(['nom','prenom'], 'x.id = y.id'),
-                         'x.id = y.id AND nom = %(nom)s AND prenom = %(prenom)s')
-        params = {'nom': 'dupont', 'prenom': 'jean'}
-        self.assertEqual(s.where(params), 'nom = %(nom)s AND prenom = %(prenom)s')
-        self.assertEqual(s.where(params, 'x.id = y.id'),
-                         'x.id = y.id AND nom = %(nom)s AND prenom = %(prenom)s')
+        self.assertEqual(s.where(["nom"]), "nom = %(nom)s")
+        self.assertEqual(
+            s.where(["nom", "prenom"]), "nom = %(nom)s AND prenom = %(prenom)s"
+        )
+        self.assertEqual(
+            s.where(["nom", "prenom"], "x.id = y.id"),
+            "x.id = y.id AND nom = %(nom)s AND prenom = %(prenom)s",
+        )
+        params = {"nom": "dupont", "prenom": "jean"}
+        self.assertEqual(s.where(params), "nom = %(nom)s AND prenom = %(prenom)s")
+        self.assertEqual(
+            s.where(params, "x.id = y.id"),
+            "x.id = y.id AND nom = %(nom)s AND prenom = %(prenom)s",
+        )
 
     def test_where_functions(self):
         s = SQLGenerator()
-        params = {'nom': 'dupont', 'prenom': 'jean', 'age': SQLExpression('YEARS(%(date)s)', date='2013/01/01')}
-        self.assertEqual(s.where(params), 'age = YEARS(%(date)s) AND nom = %(nom)s AND prenom = %(prenom)s')
-        self.assertEqual(params, {'nom': 'dupont', 'prenom': 'jean', 'date': '2013/01/01'})
-        params = {'nom': 'dupont', 'prenom': 'jean', 'age': SQLExpression('YEARS(%(date)s)', date='2013/01/01')}
-        self.assertEqual(s.where(params, 'x.id = y.id'),
-                         'x.id = y.id AND age = YEARS(%(date)s) AND nom = %(nom)s AND prenom = %(prenom)s')
-        self.assertEqual(params, {'nom': 'dupont', 'prenom': 'jean', 'date': '2013/01/01'})
+        params = {
+            "nom": "dupont",
+            "prenom": "jean",
+            "age": SQLExpression("YEARS(%(date)s)", date="2013/01/01"),
+        }
+        self.assertEqual(
+            s.where(params),
+            "age = YEARS(%(date)s) AND nom = %(nom)s AND prenom = %(prenom)s",
+        )
+        self.assertEqual(
+            params, {"nom": "dupont", "prenom": "jean", "date": "2013/01/01"}
+        )
+        params = {
+            "nom": "dupont",
+            "prenom": "jean",
+            "age": SQLExpression("YEARS(%(date)s)", date="2013/01/01"),
+        }
+        self.assertEqual(
+            s.where(params, "x.id = y.id"),
+            "x.id = y.id AND age = YEARS(%(date)s) AND nom = %(nom)s AND prenom = %(prenom)s",
+        )
+        self.assertEqual(
+            params, {"nom": "dupont", "prenom": "jean", "date": "2013/01/01"}
+        )
 
     def test_insert_values(self):
         s = SQLGenerator()
-        params = {'nom': 'dupont'}
-        sqlstr = s.insert('test', params)
-        self.assertEqual(sqlstr, 'INSERT INTO test ( nom ) VALUES ( %(nom)s )')
-        self.assertEqual(params, {'nom': 'dupont'})
+        params = {"nom": "dupont"}
+        sqlstr = s.insert("test", params)
+        self.assertEqual(sqlstr, "INSERT INTO test ( nom ) VALUES ( %(nom)s )")
+        self.assertEqual(params, {"nom": "dupont"})
 
     def test_insert_functions(self):
         s = SQLGenerator()
-        params = {'nom':'dupont', 'prenom':'jean',
-                  'age': SQLExpression('YEARS(%(date)s)', date='2013/01/01')}
-        sqlstr = s.insert('test', params)
-        self.assertEqual(sqlstr,  'INSERT INTO test ( age, nom, prenom ) VALUES '
-                         '( YEARS(%(date)s), %(nom)s, %(prenom)s )')
-        self.assertEqual(params, {'nom':'dupont', 'prenom':'jean', 'date': '2013/01/01'})
+        params = {
+            "nom": "dupont",
+            "prenom": "jean",
+            "age": SQLExpression("YEARS(%(date)s)", date="2013/01/01"),
+        }
+        sqlstr = s.insert("test", params)
+        self.assertEqual(
+            sqlstr,
+            "INSERT INTO test ( age, nom, prenom ) VALUES "
+            "( YEARS(%(date)s), %(nom)s, %(prenom)s )",
+        )
+        self.assertEqual(
+            params, {"nom": "dupont", "prenom": "jean", "date": "2013/01/01"}
+        )
 
     def test_select_values(self):
         s = SQLGenerator()
-        self.assertEqual(s.select('test',{}), 'SELECT * FROM test')
-        self.assertEqual(s.select('test',{'nom':'dupont'}),
-                         'SELECT * FROM test WHERE nom = %(nom)s')
-        self.assertEqual(s.select('test',{'nom':'dupont','prenom':'jean'}),
-                         'SELECT * FROM test WHERE nom = %(nom)s AND prenom = %(prenom)s')
+        self.assertEqual(s.select("test", {}), "SELECT * FROM test")
+        self.assertEqual(
+            s.select("test", {"nom": "dupont"}),
+            "SELECT * FROM test WHERE nom = %(nom)s",
+        )
+        self.assertEqual(
+            s.select("test", {"nom": "dupont", "prenom": "jean"}),
+            "SELECT * FROM test WHERE nom = %(nom)s AND prenom = %(prenom)s",
+        )
 
     def test_select_functions(self):
         s = SQLGenerator()
-        params = {'nom':'dupont', 'prenom':'jean',
-                  'age': SQLExpression('YEARS(%(date)s)', date='2013/01/01')}
-        self.assertEqual(s.select('test', params),
-                         'SELECT * FROM test WHERE age = YEARS(%(date)s) '
-                         'AND nom = %(nom)s AND prenom = %(prenom)s')
-        self.assertEqual(params, {'nom': 'dupont', 'prenom': 'jean', 'date': '2013/01/01'})
+        params = {
+            "nom": "dupont",
+            "prenom": "jean",
+            "age": SQLExpression("YEARS(%(date)s)", date="2013/01/01"),
+        }
+        self.assertEqual(
+            s.select("test", params),
+            "SELECT * FROM test WHERE age = YEARS(%(date)s) "
+            "AND nom = %(nom)s AND prenom = %(prenom)s",
+        )
+        self.assertEqual(
+            params, {"nom": "dupont", "prenom": "jean", "date": "2013/01/01"}
+        )
 
     def test_adv_select_values(self):
         s = SQLGenerator()
-        self.assertEqual(s.adv_select(['column'],[('test', 't')], {}),
-                         'SELECT column FROM test AS t')
-        self.assertEqual( s.adv_select(['column'],[('test', 't')], {'nom':'dupont'}),
-                          'SELECT column FROM test AS t WHERE nom = %(nom)s')
+        self.assertEqual(
+            s.adv_select(["column"], [("test", "t")], {}),
+            "SELECT column FROM test AS t",
+        )
+        self.assertEqual(
+            s.adv_select(["column"], [("test", "t")], {"nom": "dupont"}),
+            "SELECT column FROM test AS t WHERE nom = %(nom)s",
+        )
 
     def test_adv_select_functions(self):
         s = SQLGenerator()
-        params = {'nom':'dupont', 'prenom':'jean',
-                  'age': SQLExpression('YEARS(%(date)s)', date='2013/01/01')}
-        self.assertEqual( s.adv_select(['column'],[('test', 't')], params),
-                          'SELECT column FROM test AS t WHERE age = YEARS(%(date)s) '
-                         'AND nom = %(nom)s AND prenom = %(prenom)s')
-        self.assertEqual(params, {'nom': 'dupont', 'prenom': 'jean', 'date': '2013/01/01'})
+        params = {
+            "nom": "dupont",
+            "prenom": "jean",
+            "age": SQLExpression("YEARS(%(date)s)", date="2013/01/01"),
+        }
+        self.assertEqual(
+            s.adv_select(["column"], [("test", "t")], params),
+            "SELECT column FROM test AS t WHERE age = YEARS(%(date)s) "
+            "AND nom = %(nom)s AND prenom = %(prenom)s",
+        )
+        self.assertEqual(
+            params, {"nom": "dupont", "prenom": "jean", "date": "2013/01/01"}
+        )
 
     def test_delete_values(self):
         s = SQLGenerator()
-        self.assertEqual(s.delete('test',{'nom':'dupont'}),
-                         'DELETE FROM test WHERE nom = %(nom)s')
-        self.assertEqual(s.delete('test',{'nom':'dupont','prenom':'jean'}),
-                         'DELETE FROM test WHERE nom = %(nom)s AND prenom = %(prenom)s')
+        self.assertEqual(
+            s.delete("test", {"nom": "dupont"}), "DELETE FROM test WHERE nom = %(nom)s"
+        )
+        self.assertEqual(
+            s.delete("test", {"nom": "dupont", "prenom": "jean"}),
+            "DELETE FROM test WHERE nom = %(nom)s AND prenom = %(prenom)s",
+        )
 
     def test_delete_functions(self):
         s = SQLGenerator()
-        params = {'nom':'dupont', 'prenom':'jean',
-                  'age': SQLExpression('YEARS(%(date)s)', date='2013/01/01')}
-        self.assertEqual( s.delete('test', params),
-                          'DELETE FROM test WHERE age = YEARS(%(date)s) '
-                         'AND nom = %(nom)s AND prenom = %(prenom)s')
-        self.assertEqual(params, {'nom': 'dupont', 'prenom': 'jean', 'date': '2013/01/01'})
+        params = {
+            "nom": "dupont",
+            "prenom": "jean",
+            "age": SQLExpression("YEARS(%(date)s)", date="2013/01/01"),
+        }
+        self.assertEqual(
+            s.delete("test", params),
+            "DELETE FROM test WHERE age = YEARS(%(date)s) "
+            "AND nom = %(nom)s AND prenom = %(prenom)s",
+        )
+        self.assertEqual(
+            params, {"nom": "dupont", "prenom": "jean", "date": "2013/01/01"}
+        )
 
     def test_delete_many_values(self):
         s = SQLGenerator()
-        params = {'nom':'dupont', 'eid': '(1, 2, 3)'}
-        self.assertEqual(s.delete_many('test', params),
-                         'DELETE FROM test WHERE eid IN (1, 2, 3) AND nom = %(nom)s')
-        self.assertEqual(params, {'nom':'dupont'})
+        params = {"nom": "dupont", "eid": "(1, 2, 3)"}
+        self.assertEqual(
+            s.delete_many("test", params),
+            "DELETE FROM test WHERE eid IN (1, 2, 3) AND nom = %(nom)s",
+        )
+        self.assertEqual(params, {"nom": "dupont"})
 
     def test_delete_many_functions(self):
         s = SQLGenerator()
-        params = {'nom':'dupont', 'prenom':'jean', 'eid': '(1, 2, 3)',
-                  'age': SQLExpression('YEARS(%(date)s)', date='2013/01/01')}
-        self.assertEqual( s.delete_many('test', params),
-                          'DELETE FROM test WHERE eid IN (1, 2, 3) AND age = YEARS(%(date)s) '
-                          'AND nom = %(nom)s AND prenom = %(prenom)s')
-        self.assertEqual(params, {'nom': 'dupont', 'prenom': 'jean', 'date': '2013/01/01'})
+        params = {
+            "nom": "dupont",
+            "prenom": "jean",
+            "eid": "(1, 2, 3)",
+            "age": SQLExpression("YEARS(%(date)s)", date="2013/01/01"),
+        }
+        self.assertEqual(
+            s.delete_many("test", params),
+            "DELETE FROM test WHERE eid IN (1, 2, 3) AND age = YEARS(%(date)s) "
+            "AND nom = %(nom)s AND prenom = %(prenom)s",
+        )
+        self.assertEqual(
+            params, {"nom": "dupont", "prenom": "jean", "date": "2013/01/01"}
+        )
 
     def test_update_values(self):
         s = SQLGenerator()
-        self.assertEqual(s.update('test', {'id':'001','nom':'dupont'}, ['id']),
-                         'UPDATE test SET nom = %(nom)s WHERE id = %(id)s')
-        self.assertEqual(s.update('test',{'id':'001','nom':'dupont','prenom':'jean'},['id']),
-                         'UPDATE test SET nom = %(nom)s, prenom = %(prenom)s WHERE id = %(id)s')
+        self.assertEqual(
+            s.update("test", {"id": "001", "nom": "dupont"}, ["id"]),
+            "UPDATE test SET nom = %(nom)s WHERE id = %(id)s",
+        )
+        self.assertEqual(
+            s.update("test", {"id": "001", "nom": "dupont", "prenom": "jean"}, ["id"]),
+            "UPDATE test SET nom = %(nom)s, prenom = %(prenom)s WHERE id = %(id)s",
+        )
 
     def test_update_functions(self):
         s = SQLGenerator()
-        params = {'id': '001', 'nom':'dupont', 'prenom':'jean',
-                  'age': SQLExpression('YEARS(%(date)s)', date='2013/01/01')}
-        self.assertEqual( s.update('test', params, ['id']),
-                          'UPDATE test SET age = YEARS(%(date)s), nom = %(nom)s, '
-                          'prenom = %(prenom)s WHERE id = %(id)s')
-        self.assertEqual(params, {'nom': 'dupont', 'prenom': 'jean', 'date': '2013/01/01', 'id': '001'})
-
-if __name__ == '__main__':
+        params = {
+            "id": "001",
+            "nom": "dupont",
+            "prenom": "jean",
+            "age": SQLExpression("YEARS(%(date)s)", date="2013/01/01"),
+        }
+        self.assertEqual(
+            s.update("test", params, ["id"]),
+            "UPDATE test SET age = YEARS(%(date)s), nom = %(nom)s, "
+            "prenom = %(prenom)s WHERE id = %(id)s",
+        )
+        self.assertEqual(
+            params,
+            {"nom": "dupont", "prenom": "jean", "date": "2013/01/01", "id": "001"},
+        )
+
+
+if __name__ == "__main__":
     unittest.main()
diff --git a/test/unittest_sqlite.py b/test/unittest_sqlite.py
--- a/test/unittest_sqlite.py
+++ b/test/unittest_sqlite.py
@@ -28,21 +28,19 @@ from logilab.database import get_connect
 
 
 class SQLiteHelperTC(unittest.TestCase):
-
     def setUp(self):
-        self.cnx = MockConnection( () )
-        self.helper = get_db_helper('sqlite')
+        self.cnx = MockConnection(())
+        self.helper = get_db_helper("sqlite")
 
     def test_type_map(self):
-        self.assertEqual(self.helper.TYPE_MAPPING['TZDatetime'], 'tzdatetime')
-        self.assertEqual(self.helper.TYPE_MAPPING['Datetime'], 'timestamp')
-        self.assertEqual(self.helper.TYPE_MAPPING['String'], 'text')
-        self.assertEqual(self.helper.TYPE_MAPPING['Password'], 'bytea')
-        self.assertEqual(self.helper.TYPE_MAPPING['Bytes'], 'bytea')
+        self.assertEqual(self.helper.TYPE_MAPPING["TZDatetime"], "tzdatetime")
+        self.assertEqual(self.helper.TYPE_MAPPING["Datetime"], "timestamp")
+        self.assertEqual(self.helper.TYPE_MAPPING["String"], "text")
+        self.assertEqual(self.helper.TYPE_MAPPING["Password"], "bytea")
+        self.assertEqual(self.helper.TYPE_MAPPING["Bytes"], "bytea")
 
 
 class SQLiteAdapterTC(unittest.TestCase):
-
     @unittest.expectedFailure
     def test_only_one_lazy_module_initialization(self):
         self.assertFalse(lgdbsqlite._Sqlite3Adapter._module_is_initialized)
@@ -50,19 +48,20 @@ class SQLiteAdapterTC(unittest.TestCase)
         self.assertTrue(adapter._module_is_initialized)
 
     def test_tzsupport(self):
-        cnx = get_connection(database=':memory:', driver='sqlite')
+        cnx = get_connection(database=":memory:", driver="sqlite")
         cu = cnx.cursor()
-        cu.execute('CREATE TABLE tztest(tzt tzdatetime)')
+        cu.execute("CREATE TABLE tztest(tzt tzdatetime)")
         now = datetime.now(tzutc())
-        cu.execute('INSERT INTO tztest VALUES (%(tzt)s)', {'tzt': now})
-        cu.execute('SELECT * FROM tztest')
+        cu.execute("INSERT INTO tztest VALUES (%(tzt)s)", {"tzt": now})
+        cu.execute("SELECT * FROM tztest")
         dbnow = cu.fetchone()[0]
         self.assertEqual(dbnow, now)
 
-        cu.execute('UPDATE tztest SET tzt=(%(tzt)s)', {'tzt': datetime.utcnow()})
-        cu.execute('SELECT * FROM tztest')
+        cu.execute("UPDATE tztest SET tzt=(%(tzt)s)", {"tzt": datetime.utcnow()})
+        cu.execute("SELECT * FROM tztest")
         dbnow = cu.fetchone()[0]
         self.assertEqual(dbnow.tzinfo, tzutc())
 
-if __name__ == '__main__':
+
+if __name__ == "__main__":
     unittest.main()
diff --git a/test/unittest_sqlserver2005.py b/test/unittest_sqlserver2005.py
--- a/test/unittest_sqlserver2005.py
+++ b/test/unittest_sqlserver2005.py
@@ -25,65 +25,80 @@ from logilab.common.testlib import MockC
 from logilab.database import get_db_helper
 
 from logilab import database as db
+
+
 def monkey_patch_import_driver_module(driver, drivers, quiet=True):
     if not driver in drivers:
         raise db.UnknownDriver(driver)
     for modname in drivers[driver]:
         try:
             if not quiet:
-                print('Trying %s' % modname, file=sys.stderr)
+                print("Trying %s" % modname, file=sys.stderr)
             module = db.load_module_from_name(modname, use_sys=False)
             break
         except ImportError:
             if not quiet:
-                print('%s is not available' % modname, file=sys.stderr)
+                print("%s is not available" % modname, file=sys.stderr)
             continue
     else:
         return None, drivers[driver][0]
     return module, modname
 
+
 def setUpModule():
     db._backup_import_driver_module = db._import_driver_module
     db._import_driver_module = monkey_patch_import_driver_module
 
+
 def tearDownModule():
     db._import_driver_module = db._backup_import_driver_module
     del db._backup_import_driver_module
 
+
 class SqlServer2005HelperTC(unittest.TestCase):
     def setUp(self):
-        self.helper = get_db_helper('sqlserver2005')
-        self.cnx = MockConnection( () )
+        self.helper = get_db_helper("sqlserver2005")
+        self.cnx = MockConnection(())
         self.helper._cnx = self.cnx
 
     def test_type_map(self):
-        self.assertEqual(self.helper.TYPE_MAPPING['Datetime'], 'datetime')
-        self.assertEqual(self.helper.TYPE_MAPPING['Date'], 'smalldatetime')
-        self.assertEqual(self.helper.TYPE_MAPPING['String'], 'nvarchar(max)')
-        self.assertEqual(self.helper.TYPE_MAPPING['Password'], 'varbinary(255)')
-        self.assertEqual(self.helper.TYPE_MAPPING['Bytes'], 'varbinary(max)')
+        self.assertEqual(self.helper.TYPE_MAPPING["Datetime"], "datetime")
+        self.assertEqual(self.helper.TYPE_MAPPING["Date"], "smalldatetime")
+        self.assertEqual(self.helper.TYPE_MAPPING["String"], "nvarchar(max)")
+        self.assertEqual(self.helper.TYPE_MAPPING["Password"], "varbinary(255)")
+        self.assertEqual(self.helper.TYPE_MAPPING["Bytes"], "varbinary(max)")
 
     def test_order_by_simple(self):
-        sql = 'SELECT A, B, C FROM Table1, Table2 WHERE Table1.D = Table2.D'
-        new_sql = self.helper.sql_add_order_by(sql, ['A', 'B'], None, False, False)
-        self.assertEqual(new_sql,
-                          'SELECT A, B, C FROM Table1, Table2 WHERE Table1.D = Table2.D\nORDER BY A,B')
+        sql = "SELECT A, B, C FROM Table1, Table2 WHERE Table1.D = Table2.D"
+        new_sql = self.helper.sql_add_order_by(sql, ["A", "B"], None, False, False)
+        self.assertEqual(
+            new_sql,
+            "SELECT A, B, C FROM Table1, Table2 WHERE Table1.D = Table2.D\nORDER BY A,B",
+        )
+
     def test_order_by_wrapped(self):
-        sql = 'SELECT A AS C0, B AS C1, C FROM Table1, Table2 WHERE Table1.D = Table2.D'
-        new_sql = self.helper.sql_add_order_by(sql, ['1', '2'], [1, 2], True, False)
-        self.assertEqual(new_sql,
-                         'SELECT T1.C0,T1.C1 FROM (SELECT A AS C0, B AS C1, C FROM Table1, Table2 WHERE Table1.D = Table2.D) AS T1\nORDER BY T1.C0,T1.C1')
+        sql = "SELECT A AS C0, B AS C1, C FROM Table1, Table2 WHERE Table1.D = Table2.D"
+        new_sql = self.helper.sql_add_order_by(sql, ["1", "2"], [1, 2], True, False)
+        self.assertEqual(
+            new_sql,
+            "SELECT T1.C0,T1.C1 FROM (SELECT A AS C0, B AS C1, C FROM Table1, Table2 WHERE Table1.D = Table2.D) AS T1\nORDER BY T1.C0,T1.C1",
+        )
 
     def test_order_by_with_limit(self):
-        sql = 'SELECT A AS C0, B AS C1, C FROM Table1, Table2 WHERE Table1.D = Table2.D'
-        new_sql = self.helper.sql_add_order_by(sql, ['1', '2'], [1, 2], True, True)
+        sql = "SELECT A AS C0, B AS C1, C FROM Table1, Table2 WHERE Table1.D = Table2.D"
+        new_sql = self.helper.sql_add_order_by(sql, ["1", "2"], [1, 2], True, True)
         self.assertEqual(new_sql, sql)
 
     def test_limit_offset_with_order_by(self):
-        sql = 'SELECT A AS C0, B AS C1, C FROM Table1, Table2 WHERE Table1.D = Table2.D'
-        new_sql = self.helper.sql_add_limit_offset(sql, limit=10, offset=10,
-                                                   orderby = ['1', '2'])
-        self.assertEqual(new_sql, '''WITH orderedrows AS (\nSELECT \n C0,  C1, _L01\n, ROW_NUMBER() OVER (ORDER BY  C0,  C1) AS __RowNumber\nFROM (\nSELECT A  AS  C0, B  AS  C1, C AS _L01 FROM  Table1, Table2 WHERE Table1.D = Table2.D\n) AS _SQ1 )\nSELECT \n C0,  C1, _L01\nFROM orderedrows WHERE \n__RowNumber <= 20 AND __RowNumber > 10''')
+        sql = "SELECT A AS C0, B AS C1, C FROM Table1, Table2 WHERE Table1.D = Table2.D"
+        new_sql = self.helper.sql_add_limit_offset(
+            sql, limit=10, offset=10, orderby=["1", "2"]
+        )
+        self.assertEqual(
+            new_sql,
+            """WITH orderedrows AS (\nSELECT \n C0,  C1, _L01\n, ROW_NUMBER() OVER (ORDER BY  C0,  C1) AS __RowNumber\nFROM (\nSELECT A  AS  C0, B  AS  C1, C AS _L01 FROM  Table1, Table2 WHERE Table1.D = Table2.D\n) AS _SQ1 )\nSELECT \n C0,  C1, _L01\nFROM orderedrows WHERE \n__RowNumber <= 20 AND __RowNumber > 10""",
+        )
 
-if __name__ == '__main__':
+
+if __name__ == "__main__":
     unittest.main()



More information about the cubicweb-devel mailing list