[IMP] module loading and tests profiling

Add a loglevel 25 to log some INFO to runbot
Add timers for loading and testing
Silence empty test suite
ir_logging use relative pathname, warning this doesnt work for --addons-path modules yet.
This commit is contained in:
Antony Lesuisse 2014-06-29 14:42:27 +02:00
parent c4c13f1b61
commit f9e24e1918
3 changed files with 30 additions and 11 deletions

View File

@ -29,6 +29,7 @@ import logging
import os
import sys
import threading
import time
import openerp
import openerp.modules.db
@ -110,7 +111,7 @@ def load_module_graph(cr, graph, status=None, perform_checks=True, skip_modules=
if kind in ('demo', 'test'):
threading.currentThread().testing = True
for filename in _get_files_of_kind(kind):
_logger.info("module %s: loading %s", module_name, filename)
_logger.info("loading %s/%s", module_name, filename)
noupdate = False
if kind in ('demo', 'demo_xml') or (filename.endswith('.csv') and kind in ('init', 'init_xml')):
noupdate = True
@ -137,6 +138,8 @@ def load_module_graph(cr, graph, status=None, perform_checks=True, skip_modules=
registry.fields_by_model.setdefault(field['model'], []).append(field)
# register, instantiate and initialize models for each modules
ta0 = time.time()
for index, package in enumerate(graph):
module_name = package.name
module_id = package.id
@ -144,7 +147,8 @@ def load_module_graph(cr, graph, status=None, perform_checks=True, skip_modules=
if skip_modules and module_name in skip_modules:
continue
_logger.debug('module %s: loading objects', package.name)
tm0 = time.time()
migrations.migrate_module(package, 'pre')
load_openerp_module(package.name)
@ -222,9 +226,13 @@ def load_module_graph(cr, graph, status=None, perform_checks=True, skip_modules=
if hasattr(package, kind):
delattr(package, kind)
#_logger.log(25, "%s loaded in %.2fs", package.name, time.time() - tm0)
registry._init_modules.add(package.name)
cr.commit()
_logger.log(25, "%s modules loaded in %.2fs", len(graph), time.time() - ta0)
# The query won't be valid for models created later (i.e. custom model
# created after the registry has been loaded), so empty its result.
registry.fields_by_model = None
@ -451,10 +459,13 @@ def load_modules(db, force_demo=False, status=None, update_module=False):
# STEP 9: Run the post-install tests
cr.commit()
ta0 = time.time()
if openerp.tools.config['test_enable']:
cr.execute("SELECT name FROM ir_module_module WHERE state='installed'")
for module_name in cr.fetchall():
report.record_result(openerp.modules.module.run_unit_tests(module_name[0], cr.dbname, position=runs_post_install))
_logger.log(25, "All post-tested in %.2fs", time.time() - ta0)
finally:
cr.close()

View File

@ -27,6 +27,7 @@ import logging
import os
import re
import sys
import time
import unittest
from os.path import join as opj
@ -422,14 +423,16 @@ def run_unit_tests(module_name, dbname, position=runs_at_install):
for m in mods:
tests = unwrap_suite(unittest2.TestLoader().loadTestsFromModule(m))
suite = unittest2.TestSuite(itertools.ifilter(position, tests))
_logger.info('running %s tests.', m.__name__)
result = unittest2.TextTestRunner(verbosity=2, stream=TestStream(m.__name__)).run(suite)
if suite.countTestCases():
tm0 = time.time()
_logger.info('%s running tests.', m.__name__)
result = unittest2.TextTestRunner(verbosity=2, stream=TestStream(m.__name__)).run(suite)
_logger.log(25, "%s tested in %.2fs", m.__name__, time.time() - tm0)
if not result.wasSuccessful():
r = False
_logger.error("Module %s: %d failures, %d errors", module_name, len(result.failures), len(result.errors))
if not result.wasSuccessful():
r = False
_logger.error("Module %s: %d failures, %d errors",
module_name, len(result.failures), len(result.errors))
current_test = None
return r

View File

@ -68,6 +68,8 @@ def LocalService(name):
with registry.cursor() as cr:
return registry['ir.actions.report.xml']._lookup_report(cr, name[len('report.'):])
path_prefix = os.path.realpath(os.path.dirname(os.path.dirname(__file__)))
class PostgreSQLHandler(logging.Handler):
""" PostgreSQL Loggin Handler will store logs in the database, by default
the current database, can be set using --log-db=DBNAME
@ -87,7 +89,8 @@ class PostgreSQLHandler(logging.Handler):
msg = "%s\n%s" % (msg, traceback)
# we do not use record.levelname because it may have been changed by ColoredFormatter.
levelname = logging.getLevelName(record.levelno)
val = ('server', ct_db, record.name, levelname, msg, record.pathname, record.lineno, record.funcName)
val = ('server', ct_db, record.name, levelname, msg, record.pathname[len(path_prefix)+1:], record.lineno, record.funcName)
cr.execute("""
INSERT INTO ir_logging(create_date, type, dbname, name, level, message, path, line, func)
VALUES (NOW() at time zone 'UTC', %s, %s, %s, %s, %s, %s, %s, %s)
@ -116,7 +119,7 @@ class DBFormatter(logging.Formatter):
class ColoredFormatter(DBFormatter):
def format(self, record):
fg_color, bg_color = LEVEL_COLOR_MAPPING[record.levelno]
fg_color, bg_color = LEVEL_COLOR_MAPPING.get(record.levelno, (GREEN, DEFAULT))
record.levelname = COLOR_PATTERN % (30 + fg_color, 40 + bg_color, record.levelname)
return DBFormatter.format(self, record)
@ -127,6 +130,8 @@ def init_logger():
return
_logger_init = True
logging.addLevelName(25, "INFO")
from tools.translate import resetlocale
resetlocale()
@ -180,7 +185,7 @@ def init_logger():
if tools.config['log_db']:
postgresqlHandler = PostgreSQLHandler()
postgresqlHandler.setLevel(logging.WARNING)
postgresqlHandler.setLevel(25)
logging.getLogger().addHandler(postgresqlHandler)
# Configure loggers levels