From 182b9a7182400fe43ec966f74563a5acfceda49f Mon Sep 17 00:00:00 2001 From: Vo Minh Thu Date: Tue, 24 Jan 2012 16:34:19 +0100 Subject: [PATCH 1/7] [IMP] document_webdav.webdav_server: remove the so called `uniform log handling` of the HttpLogHandler class. bzr revid: vmt@openerp.com-20120124153419-w6oed6z1ui8mouo7 --- addons/document_webdav/webdav_server.py | 37 +++++++++++-------------- 1 file changed, 16 insertions(+), 21 deletions(-) diff --git a/addons/document_webdav/webdav_server.py b/addons/document_webdav/webdav_server.py index 81435a72137..ac7f87933ef 100644 --- a/addons/document_webdav/webdav_server.py +++ b/addons/document_webdav/webdav_server.py @@ -55,6 +55,8 @@ from DAV.propfind import PROPFIND from xml.dom import minidom from redirect import RedirectHTTPHandler +_logger = logging.getLogger(__name__) + khtml_re = re.compile(r' KHTML/([0-9\.]+) ') def OpenDAVConfig(**kw): @@ -73,7 +75,6 @@ def OpenDAVConfig(**kw): class DAVHandler(HttpOptions, FixSendError, DAVRequestHandler): verbose = False - _logger = logging.getLogger('webdav') protocol_version = 'HTTP/1.1' _HTTP_OPTIONS= { 'DAV' : ['1', '2'], 'Allow' : [ 'GET', 'HEAD', 'COPY', 'MOVE', 'POST', 'PUT', @@ -85,7 +86,7 @@ class DAVHandler(HttpOptions, FixSendError, DAVRequestHandler): return False def _log(self, message): - self._logger.debug(message) + _logger.debug(message) def handle(self): self._init_buffer() @@ -106,7 +107,7 @@ class DAVHandler(HttpOptions, FixSendError, DAVRequestHandler): if hasattr(self.request, 'getsockname'): addr, port = self.request.getsockname() except Exception, e: - self.log_error("Cannot calculate own address: %s" , e) + _logger.warning("Cannot calculate own address: %s", e) # Too early here to use self.headers self.baseuri = "%s://%s:%d/"% (server_proto, addr, port) self.IFACE_CLASS = openerp_dav_handler(self, self.verbose) @@ -126,12 +127,6 @@ class DAVHandler(HttpOptions, FixSendError, DAVRequestHandler): def get_davpath(self): return self.davpath - def log_message(self, format, *args): - self._logger.log(netsvc.logging.DEBUG_RPC,format % args) - - def log_error(self, format, *args): - self._logger.warning(format % args) - def _prep_OPTIONS(self, opts): ret = opts dc=self.IFACE_CLASS @@ -142,7 +137,7 @@ class DAVHandler(HttpOptions, FixSendError, DAVRequestHandler): except DAV_Error, (ec,dd): pass except Exception,e: - self.log_error("Error at options: %s", str(e)) + _logger.warning("Error at options: %s", str(e)) raise return ret @@ -245,7 +240,7 @@ class DAVHandler(HttpOptions, FixSendError, DAVRequestHandler): try: location = dc.put(uri, body, ct) except DAV_Error, (ec,dd): - self.log_error("Cannot PUT to %s: %s", uri, dd) + _logger.warning("Cannot PUT to %s: %s", uri, dd) return self.send_status(ec) headers = {} @@ -284,7 +279,7 @@ class DAVHandler(HttpOptions, FixSendError, DAVRequestHandler): """ Unlocks given resource """ dc = self.IFACE_CLASS - self.log_message('UNLOCKing resource %s' % self.headers) + _logger.log(netsvc.logging.DEBUG_RPC, 'UNLOCKing resource %s' % self.headers) uri = urlparse.urljoin(self.get_baseuri(dc), self.path) uri = urllib.unquote(uri) @@ -318,7 +313,7 @@ class DAVHandler(HttpOptions, FixSendError, DAVRequestHandler): dc = self.IFACE_CLASS lock_data = {} - self.log_message('LOCKing resource %s' % self.headers) + _logger.log(netsvc.logging.DEBUG_RPC, 'LOCKing resource %s' % self.headers) body = None if self.headers.has_key('Content-Length'): @@ -329,7 +324,7 @@ class DAVHandler(HttpOptions, FixSendError, DAVRequestHandler): uri = urlparse.urljoin(self.get_baseuri(dc), self.path) uri = urllib.unquote(uri) - self.log_message('do_LOCK: uri = %s' % uri) + _logger.log(netsvc.logging.DEBUG_RPC, 'do_LOCK: uri = %s' % uri) ifheader = self.headers.get('If') @@ -477,7 +472,7 @@ class dummy_dav_interface(object): uri2 = uri.split('/') if len(uri2) < 3: return True - logging.getLogger('webdav').debug("Requested uri: %s", uri) + _logger.debug("Requested uri: %s", uri) return None # no def is_collection(self, uri): @@ -532,7 +527,7 @@ class DAVStaticHandler(http_server.StaticHTTPHandler): except DAV_Error, (ec,dd): return self.send_error(ec,dd) except Exception: - self.log_exception("Cannot PROPFIND") + _logger.exception("Cannot PROPFIND") raise # work around MSIE DAV bug for creation and modified date @@ -573,7 +568,7 @@ try: conf = OpenDAVConfig(**_dc) handler._config = conf reg_http_service(directory, DAVHandler, DAVAuthProvider) - logging.getLogger('webdav').info("WebDAV service registered at path: %s/ "% directory) + _logger.info("WebDAV service registered at path: %s/ ", directory) if not (config.get_misc('webdav', 'no_root_hack', False)): # Now, replace the static http handler with the dav-enabled one. @@ -595,7 +590,7 @@ try: reg_http_service('/', DAVStaticHandler) except Exception, e: - logging.getLogger('webdav').error('Cannot launch webdav: %s' % e) + _logger.error('Cannot launch webdav: %s', e) def init_well_known(): @@ -639,9 +634,9 @@ def init_principals_redirect(): if dbname: PrincipalsRedirect.redirect_paths[''] = '/webdav/%s/principals' % dbname reg_http_service('/principals', PrincipalsRedirect) - logging.getLogger("web-services").info( - "Registered HTTP redirect handler for /principals to the %s db.", - dbname) + _logger.info( + "Registered HTTP redirect handler for /principals to the %s db.", + dbname) init_principals_redirect() From 85f0ad1ea87f017a375d187d6d92cbdfffba61c7 Mon Sep 17 00:00:00 2001 From: Vo Minh Thu Date: Tue, 24 Jan 2012 16:57:59 +0100 Subject: [PATCH 2/7] [IMP] document_webdav.webdav_server: remove the so called `uniform log handling` of the HttpLogHandler class. bzr revid: vmt@openerp.com-20120124155759-6g40dmhz6ywk9xe9 --- addons/document_webdav/dav_fs.py | 57 +++++++++++++++--------------- addons/document_webdav/redirect.py | 10 +++--- 2 files changed, 34 insertions(+), 33 deletions(-) diff --git a/addons/document_webdav/dav_fs.py b/addons/document_webdav/dav_fs.py index 772b7600f20..a292019eeab 100644 --- a/addons/document_webdav/dav_fs.py +++ b/addons/document_webdav/dav_fs.py @@ -175,7 +175,7 @@ class openerp_dav_handler(dav_interface): def get_propnames(self, uri): props = self.PROPS - self.parent.log_message('get propnames: %s' % uri) + _logger.log(netsvc.logging.DEBUG_RPC, 'get propnames: %s' % uri) cr, uid, pool, dbname, uri2 = self.get_cr(uri) if not dbname: if cr: cr.close() @@ -203,21 +203,21 @@ class openerp_dav_handler(dav_interface): except NotImplementedError, e: if cr: cr.close() import traceback - self.parent.log_error("Cannot %s: %s", opname, str(e)) - self.parent.log_message("Exc: %s",traceback.format_exc()) + _logger.warning("Cannot %s: %s", opname, str(e)) + _logger.log(netsvc.logging.DEBUG_RPC, "Exc: %s" % traceback.format_exc()) # see par 9.3.1 of rfc raise DAV_Error(403, str(e) or 'Not supported at this path') except EnvironmentError, err: if cr: cr.close() import traceback - self.parent.log_error("Cannot %s: %s", opname, err.strerror) - self.parent.log_message("Exc: %s",traceback.format_exc()) + _logger.warning("Cannot %s: %s", opname, err.strerror) + _logger.log(netsvc.logging.DEBUG_RPC, "Exc: %s" % traceback.format_exc()) raise default_exc(err.strerror) except Exception, e: import traceback if cr: cr.close() - self.parent.log_error("Cannot %s: %s", opname, str(e)) - self.parent.log_message("Exc: %s",traceback.format_exc()) + _logger.warning("Cannot %s: %s", opname, str(e)) + _logger.log(netsvc.logging.DEBUG_RPC, "Exc: %s" % traceback.format_exc()) raise default_exc("Operation failed") def _get_dav_lockdiscovery(self, uri): @@ -245,7 +245,7 @@ class openerp_dav_handler(dav_interface): def prep_http_options(self, uri, opts): """see HttpOptions._prep_OPTIONS """ - self.parent.log_message('get options: %s' % uri) + _logger.log(netsvc.logging.DEBUG_RPC, 'get options: %s' % uri) cr, uid, pool, dbname, uri2 = self.get_cr(uri, allow_last=True) if not dbname: @@ -268,7 +268,7 @@ class openerp_dav_handler(dav_interface): ret[key] = [] ret[key].extend(val) - self.parent.log_message('options: %s' % ret) + _logger.log(netsvc.logging.DEBUG_RPC, 'options: %s' % ret) else: ret = opts cr.close() @@ -369,7 +369,7 @@ class openerp_dav_handler(dav_interface): if res and len(res): self.db_name_list.append(db_name) except Exception, e: - self.parent.log_error("Exception in db list: %s" % e) + _logger.warning("Exception in db list: %s", e) finally: if cr: cr.close() @@ -377,7 +377,7 @@ class openerp_dav_handler(dav_interface): def get_childs(self,uri, filters=None): """ return the child objects as self.baseuris for the given URI """ - self.parent.log_message('get children: %s' % uri) + _logger.log(netsvc.logging.DEBUG_RPC, 'get children: %s' % uri) cr, uid, pool, dbname, uri2 = self.get_cr(uri, allow_last=True) if not dbname: @@ -394,7 +394,7 @@ class openerp_dav_handler(dav_interface): fp = node.full_path() if fp and len(fp): fp = '/'.join(fp) - self.parent.log_message('children for: %s' % fp) + _logger.log(netsvc.logging.DEBUG_RPC, 'children for: %s' % fp) else: fp = None domain = None @@ -420,13 +420,13 @@ class openerp_dav_handler(dav_interface): if turi.startswith(ul): result.append( turi[len(self.parent.davpath):]) else: - self.parent.log_error("ignore href %s because it is not under request path %s", turi, ul) + _logger.warning("ignore href %s because it is not under request path %s", turi, ul) return result # We don't want to continue with the children found below # Note the exceptions and that 'finally' will close the # cursor for d in node.children(cr, domain): - self.parent.log_message('child: %s' % d.path) + _logger.log(netsvc.logging.DEBUG_RPC, 'child: %s' % d.path) if fp: result.append( self.urijoin(dbname,fp,d.path) ) else: @@ -434,7 +434,7 @@ class openerp_dav_handler(dav_interface): except DAV_Error: raise except Exception, e: - self.parent.log_error("cannot get_children: "+ str(e)) + _logger.warning("cannot get_children: "+ str(e)) raise finally: if cr: cr.close() @@ -479,7 +479,7 @@ class openerp_dav_handler(dav_interface): return pool.get('document.directory').get_object(cr, uid, uri, context=context) def get_data(self,uri, rrange=None): - self.parent.log_message('GET: %s' % uri) + _logger.log(netsvc.logging.DEBUG_RPC, 'GET: %s' % uri) cr, uid, pool, dbname, uri2 = self.get_cr(uri) try: if not dbname: @@ -499,7 +499,7 @@ class openerp_dav_handler(dav_interface): start = 0 assert start >= 0 if end and end < start: - self.parent.log_error("Invalid range for data: %s-%s" %(start, end)) + _logger.warning("Invalid range for data: %s-%s" %(start, end)) raise DAV_Error(416, "Invalid range for data") if end: if end >= res.size(): @@ -514,12 +514,12 @@ class openerp_dav_handler(dav_interface): # says we'd better just return 200 OK with empty data return '' except IndexError,e : - self.parent.log_error("GET IndexError: %s", str(e)) + _logger.warning("GET IndexError: %s", str(e)) raise DAV_NotFound2(uri2) except Exception,e: import traceback - self.parent.log_error("GET exception: %s",str(e)) - self.parent.log_message("Exc: %s", traceback.format_exc()) + _logger.warning("GET exception: %s",str(e)) + _logger.log(netsvc.logging.DEBUG_RPC, "Exc: %s" % traceback.format_exc()) raise DAV_Error, 409 return res finally: @@ -528,7 +528,7 @@ class openerp_dav_handler(dav_interface): @memoize(CACHE_SIZE) def _get_dav_resourcetype(self, uri): """ return type of object """ - self.parent.log_message('get RT: %s' % uri) + _logger.log(netsvc.logging.DEBUG_RPC, 'get RT: %s' % uri) cr, uid, pool, dbname, uri2 = self.get_cr(uri) try: if not dbname: @@ -546,7 +546,7 @@ class openerp_dav_handler(dav_interface): if cr: cr.close() def _get_dav_displayname(self,uri): - self.parent.log_message('get DN: %s' % uri) + _logger.log(netsvc.logging.DEBUG_RPC, 'get DN: %s' % uri) cr, uid, pool, dbname, uri2 = self.get_cr(uri) if not dbname: if cr: cr.close() @@ -565,7 +565,7 @@ class openerp_dav_handler(dav_interface): @memoize(CACHE_SIZE) def _get_dav_getcontentlength(self, uri): """ return the content length of an object """ - self.parent.log_message('get length: %s' % uri) + _logger.log(netsvc.logging.DEBUG_RPC, 'get length: %s' % uri) result = 0 cr, uid, pool, dbname, uri2 = self.get_cr(uri) if not dbname: @@ -582,7 +582,7 @@ class openerp_dav_handler(dav_interface): @memoize(CACHE_SIZE) def _get_dav_getetag(self,uri): """ return the ETag of an object """ - self.parent.log_message('get etag: %s' % uri) + _logger.log(netsvc.logging.DEBUG_RPC, 'get etag: %s' % uri) result = 0 cr, uid, pool, dbname, uri2 = self.get_cr(uri) if not dbname: @@ -638,7 +638,7 @@ class openerp_dav_handler(dav_interface): @memoize(CACHE_SIZE) def _get_dav_getcontenttype(self,uri): - self.parent.log_message('get contenttype: %s' % uri) + _logger.log(netsvc.logging.DEBUG_RPC, 'get contenttype: %s' % uri) cr, uid, pool, dbname, uri2 = self.get_cr(uri) if not dbname: if cr: cr.close() @@ -657,7 +657,7 @@ class openerp_dav_handler(dav_interface): """ create a new collection see par. 9.3 of rfc4918 """ - self.parent.log_message('MKCOL: %s' % uri) + _logger.log(netsvc.logging.DEBUG_RPC, 'MKCOL: %s' % uri) cr, uid, pool, dbname, uri2 = self.get_cr(uri) if not uri2[-1]: if cr: cr.close() @@ -681,7 +681,8 @@ class openerp_dav_handler(dav_interface): def put(self, uri, data, content_type=None): """ put the object into the filesystem """ - self.parent.log_message('Putting %s (%d), %s'%( misc.ustr(uri), data and len(data) or 0, content_type)) + _logger.log(netsvc.logging.DEBUG_RPC, 'Putting %s (%d), %s' % \ + (misc.ustr(uri), data and len(data) or 0, content_type)) cr, uid, pool,dbname, uri2 = self.get_cr(uri) if not dbname: if cr: cr.close() @@ -723,7 +724,7 @@ class openerp_dav_handler(dav_interface): try: etag = str(newchild.get_etag(cr)) except Exception, e: - self.parent.log_error("Cannot get etag for node: %s" % e) + _logger.warning("Cannot get etag for node: %s", e) ret = (str(hurl), etag) else: self._try_function(node.set_data, (cr, data), "save %s" % objname, cr=cr) diff --git a/addons/document_webdav/redirect.py b/addons/document_webdav/redirect.py index 7bc0883f9e8..7d04ffb51f5 100644 --- a/addons/document_webdav/redirect.py +++ b/addons/document_webdav/redirect.py @@ -23,10 +23,10 @@ import logging import urlparse from service.websrv_lib import FixSendError, HTTPHandler, HttpOptions -from service.http_server import HttpLogHandler -class RedirectHTTPHandler(HttpLogHandler, FixSendError, HttpOptions, HTTPHandler): - _logger = logging.getLogger('httpd.well-known') +_logger = logging.getLogger(__name__) + +class RedirectHTTPHandler(FixSendError, HttpOptions, HTTPHandler): _HTTP_OPTIONS = { 'Allow': ['OPTIONS', 'GET', 'HEAD', 'PROPFIND'] } redirect_paths = {} @@ -62,7 +62,7 @@ class RedirectHTTPHandler(HttpLogHandler, FixSendError, HttpOptions, HTTPHandler try: addr, port = self.request.getsockname() except Exception, e: - self.log_error("Cannot calculate own address:" , e) + _logger.error("Cannot calculate own address:", e) if self.headers.has_key('Host'): uparts = list(urlparse.urlparse("%s://%s:%d"% (server_proto, addr,port))) @@ -80,7 +80,7 @@ class RedirectHTTPHandler(HttpLogHandler, FixSendError, HttpOptions, HTTPHandler self.send_header("Content-Length", 0) self.end_headers() # Do we need a Cache-content: header here? - self._logger.debug("redirecting %s to %s", self.path, redir_path) + _logger.debug("redirecting %s to %s", self.path, redir_path) return None def do_PROPFIND(self): From 7c4e08eb46a20e37ac490e6c2d4b916d56b8f63b Mon Sep 17 00:00:00 2001 From: Vo Minh Thu Date: Tue, 24 Jan 2012 17:28:50 +0100 Subject: [PATCH 3/7] [IMP] account,document: changed from class __logger to module _logger. bzr revid: vmt@openerp.com-20120124162850-qr1or3piq9f91c7u --- addons/account/installer.py | 5 +++-- addons/document/content_index.py | 16 ++++++++-------- 2 files changed, 11 insertions(+), 10 deletions(-) diff --git a/addons/account/installer.py b/addons/account/installer.py index d1b19ef4319..a9122adf488 100644 --- a/addons/account/installer.py +++ b/addons/account/installer.py @@ -31,10 +31,11 @@ from osv import fields, osv import netsvc import tools +_logger = logging.getLogger(__name__) + class account_installer(osv.osv_memory): _name = 'account.installer' _inherit = 'res.config.installer' - __logger = logging.getLogger(_name) def _get_charts(self, cr, uid, context=None): modules = self.pool.get('ir.module.module') @@ -142,7 +143,7 @@ class account_installer(osv.osv_memory): cr, uid, ids, context=context) chart = self.read(cr, uid, ids, ['charts'], context=context)[0]['charts'] - self.__logger.debug('Installing chart of accounts %s', chart) + _logger.debug('Installing chart of accounts %s', chart) return modules | set([chart]) account_installer() diff --git a/addons/document/content_index.py b/addons/document/content_index.py index 64b480ee736..3cb408428c9 100644 --- a/addons/document/content_index.py +++ b/addons/document/content_index.py @@ -23,10 +23,11 @@ import os import tempfile from subprocess import Popen, PIPE +_logger = logging.getLogger(__name__) + class NhException(Exception): pass - class indexer(object): """ An indexer knows how to parse the content of some file. @@ -116,7 +117,6 @@ def mime_match(mime, mdict): return (None, None) class contentIndex(object): - __logger = logging.getLogger('addons.document.content_index') def __init__(self): self.mimes = {} self.exts = {} @@ -132,7 +132,7 @@ class contentIndex(object): f = True if f: - self.__logger.debug('Register content indexer: %r', obj) + _logger.debug('Register content indexer: %r', obj) if not f: raise Exception("Your indexer should at least suport a mimetype or extension") @@ -169,22 +169,22 @@ class contentIndex(object): (result, _) = pop.communicate() mime2 = result.split(';')[0] - self.__logger.debug('File gave us: %s', mime2) + _logger.debug('File gave us: %s', mime2) # Note that the temporary file still exists now. mime,fobj = mime_match(mime2, self.mimes) if not mime: mime = mime2 except Exception: - self.__logger.exception('Cannot determine mime type') + _logger.exception('Cannot determine mime type') try: if fobj: res = (mime, fobj.indexContent(content,filename,fname or realfname) ) else: - self.__logger.debug("Have no object, return (%s, None)", mime) + _logger.debug("Have no object, return (%s, None)", mime) res = (mime, None ) except Exception: - self.__logger.exception("Could not index file %s (%s)", + _logger.exception("Could not index file %s (%s)", filename, fname or realfname) res = None @@ -193,7 +193,7 @@ class contentIndex(object): try: os.unlink(fname) except Exception: - self.__logger.exception("Could not unlink %s", fname) + _logger.exception("Could not unlink %s", fname) return res From 57b41aae43f0f2626f08e3e212aad4bab0394d42 Mon Sep 17 00:00:00 2001 From: Vo Minh Thu Date: Wed, 25 Jan 2012 10:44:51 +0100 Subject: [PATCH 4/7] [IMP] logging: use logging instead of netsvc, use module __name__ instead of something else. bzr revid: vmt@openerp.com-20120125094451-a6mejmnaxa2vp1bu --- addons/account/account.py | 20 +- .../report/bank_statement_balance_report.py | 4 - addons/account_coda/account_coda.py | 5 +- .../wizard/account_coda_import.py | 26 +- addons/account_payment/account_payment.py | 7 +- .../wizard/wizard_tech_guide_rst.py | 25 +- .../base_module_quality.py | 21 +- addons/document/document.py | 6 +- addons/document/document_directory.py | 7 +- addons/document/document_storage.py | 63 ++- addons/document/nodes.py | 24 +- addons/document/std_index.py | 7 +- addons/document_ftp/ftpserver/__init__.py | 10 +- addons/edi/__init__.py | 2 +- addons/edi/edi_service.py | 2 +- addons/edi/models/edi.py | 6 +- addons/edi/models/res_partner.py | 6 +- addons/fetchmail/fetchmail.py | 14 +- addons/import_base/import_framework.py | 15 +- addons/l10n_be_invoice_bba/invoice.py | 433 +++++++++--------- addons/l10n_be_invoice_bba/partner.py | 5 +- addons/l10n_multilang/l10n_multilang.py | 13 +- addons/mail/mail_message.py | 2 +- addons/mail/mail_thread.py | 3 - addons/project_messages/project_messages.py | 2 - addons/report_webkit/webkit_report.py | 11 +- addons/stock/stock.py | 6 +- 27 files changed, 369 insertions(+), 376 deletions(-) diff --git a/addons/account/account.py b/addons/account/account.py index 432a90a653e..2b1834002e7 100644 --- a/addons/account/account.py +++ b/addons/account/account.py @@ -19,17 +19,19 @@ # ############################################################################## -import time from datetime import datetime from dateutil.relativedelta import relativedelta +import logging from operator import itemgetter +import time -import netsvc -import pooler -from osv import fields, osv import decimal_precision as dp +from osv import fields, osv +import pooler from tools.translate import _ +_logger = logging.getLogger(__name__) + def check_cycle(self, cr, uid, ids, context=None): """ climbs the ``self._table.parent_id`` chains for 100 levels or until it can't find any more parent(s) @@ -212,7 +214,6 @@ class account_account(osv.osv): _name = "account.account" _description = "Account" _parent_store = True - logger = netsvc.Logger() def search(self, cr, uid, args, offset=0, limit=None, order=None, context=None, count=False): @@ -295,8 +296,7 @@ class account_account(osv.osv): if aml_query.strip(): wheres.append(aml_query.strip()) filters = " AND ".join(wheres) - self.logger.notifyChannel('addons.'+self._name, netsvc.LOG_DEBUG, - 'Filters: %s'%filters) + _logger.debug('Filters: %s', filters) # IN might not work ideally in case there are too many # children_and_consolidated, in that case join on a # values() e.g.: @@ -312,8 +312,7 @@ class account_account(osv.osv): " GROUP BY l.account_id") params = (tuple(children_and_consolidated),) + query_params cr.execute(request, params) - self.logger.notifyChannel('addons.'+self._name, netsvc.LOG_DEBUG, - 'Status: %s'%cr.statusmessage) + _logger.debug('Status: %s', cr.statusmessage) for res in cr.dictfetchall(): accounts[res['id']] = res @@ -2083,8 +2082,7 @@ class account_tax(osv.osv): } def compute(self, cr, uid, taxes, price_unit, quantity, address_id=None, product=None, partner=None): - logger = netsvc.Logger() - logger.notifyChannel("warning", netsvc.LOG_WARNING, + _logger.warning( "Deprecated, use compute_all(...)['taxes'] instead of compute(...) to manage prices with tax included") return self._compute(cr, uid, taxes, price_unit, quantity, address_id, product, partner) diff --git a/addons/account_bank_statement_extensions/report/bank_statement_balance_report.py b/addons/account_bank_statement_extensions/report/bank_statement_balance_report.py index 8e1e4b09e47..50b5bcd8d36 100644 --- a/addons/account_bank_statement_extensions/report/bank_statement_balance_report.py +++ b/addons/account_bank_statement_extensions/report/bank_statement_balance_report.py @@ -22,14 +22,10 @@ import time from report import report_sxw -import pooler -import netsvc -logger=netsvc.Logger() class bank_statement_balance_report(report_sxw.rml_parse): def set_context(self, objects, data, ids, report_type=None): - #logger.notifyChannel('addons.'+__name__, netsvc.LOG_WARNING, 'set_context, objects = %s, data = %s, ids = %s' % (objects, data, ids)) cr = self.cr uid = self.uid context = self.context diff --git a/addons/account_coda/account_coda.py b/addons/account_coda/account_coda.py index f967669b98e..b4723794578 100644 --- a/addons/account_coda/account_coda.py +++ b/addons/account_coda/account_coda.py @@ -21,11 +21,10 @@ ############################################################################## import time -from osv import osv, fields + import decimal_precision as dp -import netsvc +from osv import osv, fields from tools.translate import _ -logger=netsvc.Logger() class coda_bank_account(osv.osv): _name= 'coda.bank.account' diff --git a/addons/account_coda/wizard/account_coda_import.py b/addons/account_coda/wizard/account_coda_import.py index 02e72bc3562..ac9a623e2fc 100644 --- a/addons/account_coda/wizard/account_coda_import.py +++ b/addons/account_coda/wizard/account_coda_import.py @@ -20,15 +20,16 @@ # ############################################################################## -import time import base64 +import re +from sys import exc_info +import time +from traceback import format_exception + from osv import fields,osv from tools.translate import _ -import netsvc -import re -from traceback import format_exception -from sys import exc_info -logger=netsvc.Logger() + +_logger = logging.getLogger(__name__) class account_coda_import(osv.osv_memory): _name = 'account.coda.import' @@ -816,7 +817,6 @@ class account_coda_import(osv.osv_memory): ttype = line['type'] == 'supplier' and 'payment' or 'receipt', date = line['val_date'], context = context) - #logger.notifyChannel('addons.'+self._name, netsvc.LOG_WARNING, 'voucher_dict = %s' % voucher_dict) voucher_line_vals = False if voucher_dict['value']['line_ids']: for line_dict in voucher_dict['value']['line_ids']: @@ -889,22 +889,22 @@ class account_coda_import(osv.osv_memory): nb_err += 1 err_string += _('\nError ! ') + str(e) tb = ''.join(format_exception(*exc_info())) - logger.notifyChannel('addons.'+self._name, netsvc.LOG_ERROR, - 'Application Error while processing Statement %s\n%s' % (statement.get('name', '/'),tb)) + _logger.error('Application Error while processing Statement %s\n%s', + statement.get('name', '/'), tb) except Exception, e: cr.rollback() nb_err += 1 err_string += _('\nSystem Error : ') + str(e) tb = ''.join(format_exception(*exc_info())) - logger.notifyChannel('addons.'+self._name, netsvc.LOG_ERROR, - 'System Error while processing Statement %s\n%s' % (statement.get('name', '/'),tb)) + _logger.error('System Error while processing Statement %s\n%s', + statement.get('name', '/'), tb) except : cr.rollback() nb_err += 1 err_string = _('\nUnknown Error : ') + str(e) tb = ''.join(format_exception(*exc_info())) - logger.notifyChannel('addons.'+self._name, netsvc.LOG_ERROR, - 'Unknown Error while processing Statement %s\n%s' % (statement.get('name', '/'),tb)) + _logger.error('Unknown Error while processing Statement %s\n%s', + statement.get('name', '/'), tb) # end 'for statement in coda_statements' diff --git a/addons/account_payment/account_payment.py b/addons/account_payment/account_payment.py index 1de715dc777..11d4f98af8f 100644 --- a/addons/account_payment/account_payment.py +++ b/addons/account_payment/account_payment.py @@ -19,11 +19,14 @@ # ############################################################################## +import logging import time from osv import osv, fields import netsvc +_logger = logging.getLogger(__name__) + class payment_mode(osv.osv): _name= 'payment.mode' _description= 'Payment Mode' @@ -70,9 +73,7 @@ class payment_order(osv.osv): #dead code def get_wizard(self, type): - logger = netsvc.Logger() - logger.notifyChannel("warning", netsvc.LOG_WARNING, - "No wizard found for the payment type '%s'." % type) + _logger.warning("No wizard found for the payment type '%s'.", type) return None def _total(self, cursor, user, ids, name, args, context=None): diff --git a/addons/base_module_doc_rst/wizard/wizard_tech_guide_rst.py b/addons/base_module_doc_rst/wizard/wizard_tech_guide_rst.py index 39933c39a3e..9f4c75918b9 100644 --- a/addons/base_module_doc_rst/wizard/wizard_tech_guide_rst.py +++ b/addons/base_module_doc_rst/wizard/wizard_tech_guide_rst.py @@ -18,17 +18,18 @@ # along with this program. If not, see . # ############################################################################## -from os.path import join import base64 -import tempfile -import tarfile import httplib +import logging +import os +from os.path import join +import tarfile +import tempfile -import netsvc import wizard import pooler -import os -import tools + +_logger = logging.getLogger(__name__) choose_file_form = '''
@@ -99,9 +100,8 @@ class RstDoc(object): if res.status in (200, ): status_good = True except (Exception, ), e: - logger = netsvc.Logger() msg = "error connecting to server '%s' with link '%s'. Error message: %s" % (server, link, str(e)) - logger.notifyChannel("base_module_doc_rst", netsvc.LOG_ERROR, msg) + _logger.error(msg) status_good = False return status_good @@ -241,9 +241,8 @@ class RstDoc(object): def _write_objects(self): def write_field(field_def): if not isinstance(field_def, tuple): - logger = netsvc.Logger() msg = "Error on Object %s: field_def: %s [type: %s]" % (obj_name.encode('utf8'), field_def.encode('utf8'), type(field_def)) - logger.notifyChannel("base_module_doc_rst", netsvc.LOG_ERROR, msg) + _logger.error(msg) return "" field_name = field_def[0] @@ -392,9 +391,8 @@ class wizard_tech_guide_rst(wizard.interface): try: os.unlink(tgz_tmp_filename) except Exception, e: - logger = netsvc.Logger() msg = "Temporary file %s could not be deleted. (%s)" % (tgz_tmp_filename, e) - logger.notifyChannel("warning", netsvc.LOG_WARNING, msg) + _logger.warning(msg) return { 'rst_file': base64.encodestring(out), @@ -483,9 +481,8 @@ class wizard_tech_guide_rst(wizard.interface): res = modobj.fields_get(cr, uid).items() return res else: - logger = netsvc.Logger() msg = "Object %s not found" % (obj) - logger.notifyChannel("base_module_doc_rst", netsvc.LOG_ERROR, msg) + _logger.error(msg) return "" states = { diff --git a/addons/base_module_quality/base_module_quality.py b/addons/base_module_quality/base_module_quality.py index bbe20bc0b4f..76a9214c10d 100644 --- a/addons/base_module_quality/base_module_quality.py +++ b/addons/base_module_quality/base_module_quality.py @@ -25,6 +25,9 @@ from tools.translate import _ from osv import osv, fields import logging import addons + +_logger = logging.getLogger(__name__) + class abstract_quality_check(object): ''' This Class is abstract class for all test @@ -78,7 +81,6 @@ class abstract_quality_check(object): #This variable used to give message if test result is good or not self.message = '' - self.log = logging.getLogger('module.quality') #The tests have to subscribe itselfs in this list, that contains #all the test that have to be performed. @@ -108,11 +110,11 @@ class abstract_quality_check(object): model_data = pool.get('ir.model.data').browse(cr, uid, ids2) for model in model_data: model_list.append(model.res_id) - self.log.debug('get_objects() model_list: %s', ','.join(map(str, model_list))) + _logger.debug('get_objects() model_list: %s', ','.join(map(str, model_list))) obj_list = [] for mod in pool.get('ir.model').browse(cr, uid, model_list): obj_list.append(str(mod.model)) - self.log.debug('get_objects() obj_list: %s', ','.join(obj_list)) + _logger.debug('get_objects() obj_list: %s', ','.join(obj_list)) return obj_list def get_model_ids(self, cr, uid, models=[]): @@ -120,7 +122,7 @@ class abstract_quality_check(object): if not models: return [] pool = pooler.get_pool(cr.dbname) - self.log.debug('get_model_ids([%s])', ', '.join(models)) + _logger.debug('get_model_ids([%s])', ', '.join(models)) return pool.get('ir.model').search(cr, uid, [('model', 'in', models)]) def get_ids(self, cr, uid, object_list): @@ -211,7 +213,6 @@ class module_quality_check(osv.osv): So here the detail result is in html format and summary will be in text_wiki format. ''' pool = pooler.get_pool(cr.dbname) - log = logging.getLogger('module.quality') obj_module = pool.get('ir.module.module') if not module_state: module_id = obj_module.search(cr, uid, [('name', '=', module_name)]) @@ -223,14 +224,14 @@ class module_quality_check(osv.osv): ponderation_sum = 0.0 create_ids = [] module_path = addons.get_module_path(module_name) - log.info('Performing quality tests for %s', module_name) + _logger.info('Performing quality tests for %s', module_name) for test in abstract_obj.tests: val = test.quality_test() if not val.active: - log.info('Skipping inactive step %s for %s', val.name, module_name) + _logger.info('Skipping inactive step %s for %s', val.name, module_name) continue - log.info('Performing step %s for %s', val.name, module_name) + _logger.info('Performing step %s for %s', val.name, module_name) # Get a separate cursor per test, so that an SQL error in one # will not block the others. cr2 = pooler.get_db(cr.dbname).cursor() @@ -269,9 +270,9 @@ class module_quality_check(osv.osv): 'summary': _("The module has to be installed before running this test.") } create_ids.append((0, 0, data)) - log.info('Finished quality test step') + _logger.info('Finished quality test step') except Exception, e: - log.exception("Could not finish test step %s due to %s", val.name, e) + _logger.exception("Could not finish test step %s due to %s", val.name, e) finally: cr2.rollback() cr2.close() diff --git a/addons/document/document.py b/addons/document/document.py index 9ea7f97a321..e6fe4cde13d 100644 --- a/addons/document/document.py +++ b/addons/document/document.py @@ -30,6 +30,8 @@ from tools.translate import _ import nodes import logging +_logger = logging.getLogger(__name__) + DMS_ROOT_PATH = tools.config.get('document_path', os.path.join(tools.config['root_path'], 'filestore')) class document_file(osv.osv): @@ -54,7 +56,7 @@ class document_file(osv.osv): parent_id = self.pool.get('document.directory')._get_root_directory(cr,uid) if not parent_id: - logging.getLogger('document').warning("at _attach_parent_id(), still not able to set the parent!") + _logger.warning("at _attach_parent_id(), still not able to set the parent!") return False if ids is not None: @@ -339,7 +341,7 @@ class document_file(osv.osv): if r: unres.append(r) else: - logging.getLogger('document').warning("Unlinking attachment #%s %s that has no storage", + _logger.warning("Unlinking attachment #%s %s that has no storage", f.id, f.name) res = super(document_file, self).unlink(cr, uid, ids, context) stor.do_unlink(cr, uid, unres) diff --git a/addons/document/document_directory.py b/addons/document/document_directory.py index d09d2cc86d5..787888cd4ee 100644 --- a/addons/document/document_directory.py +++ b/addons/document/document_directory.py @@ -19,6 +19,7 @@ # ############################################################################## +import logging from osv import osv, fields from osv.orm import except_orm @@ -26,6 +27,8 @@ from osv.orm import except_orm import nodes from tools.translate import _ +_logger = logging.getLogger(__name__) + class document_directory(osv.osv): _name = 'document.directory' _description = 'Directory' @@ -78,9 +81,7 @@ class document_directory(osv.osv): root_id = objid.read(cr, uid, mid, ['res_id'])['res_id'] return root_id except Exception, e: - import netsvc - logger = netsvc.Logger() - logger.notifyChannel("document", netsvc.LOG_WARNING, 'Cannot set directory root:'+ str(e)) + _logger.warning('Cannot set directory root:' + str(e)) return False return objid.browse(cr, uid, mid, context=context).res_id diff --git a/addons/document/document_storage.py b/addons/document/document_storage.py index fe757e7876d..577ea84f112 100644 --- a/addons/document/document_storage.py +++ b/addons/document/document_storage.py @@ -41,6 +41,8 @@ import pooler import nodes from content_index import cntIndex +_logger = logging.getLogger(__name__) + DMS_ROOT_PATH = tools.config.get('document_path', os.path.join(tools.config.get('root_path'), 'filestore')) @@ -130,7 +132,7 @@ class nodefd_file(nodes.node_descriptor): mime, icont = cntIndex.doIndex(None, filename=filename, content_type=None, realfname=fname) except Exception: - logging.getLogger('document.storage').debug('Cannot index file:', exc_info=True) + _logger.debug('Cannot index file:', exc_info=True) pass try: @@ -150,7 +152,7 @@ class nodefd_file(nodes.node_descriptor): cr.commit() cr.close() except Exception: - logging.getLogger('document.storage').warning('Cannot save file indexed content:', exc_info=True) + _logger.warning('Cannot save file indexed content:', exc_info=True) elif self.mode in ('a', 'a+' ): try: @@ -164,7 +166,7 @@ class nodefd_file(nodes.node_descriptor): cr.commit() cr.close() except Exception: - logging.getLogger('document.storage').warning('Cannot save file appended content:', exc_info=True) + _logger.warning('Cannot save file appended content:', exc_info=True) @@ -191,7 +193,7 @@ class nodefd_db(StringIO, nodes.node_descriptor): elif mode == 'a': StringIO.__init__(self, None) else: - logging.getLogger('document.storage').error("Incorrect mode %s specified", mode) + _logger.error("Incorrect mode %s specified", mode) raise IOError(errno.EINVAL, "Invalid file mode") self.mode = mode @@ -217,7 +219,7 @@ class nodefd_db(StringIO, nodes.node_descriptor): mime, icont = cntIndex.doIndex(data, filename=filename, content_type=None, realfname=None) except Exception: - logging.getLogger('document.storage').debug('Cannot index file:', exc_info=True) + _logger.debug('Cannot index file:', exc_info=True) pass try: @@ -241,7 +243,7 @@ class nodefd_db(StringIO, nodes.node_descriptor): (out, len(data), par.file_id)) cr.commit() except Exception: - logging.getLogger('document.storage').exception('Cannot update db file #%d for close:', par.file_id) + _logger.exception('Cannot update db file #%d for close:', par.file_id) raise finally: cr.close() @@ -271,7 +273,7 @@ class nodefd_db64(StringIO, nodes.node_descriptor): elif mode == 'a': StringIO.__init__(self, None) else: - logging.getLogger('document.storage').error("Incorrect mode %s specified", mode) + _logger.error("Incorrect mode %s specified", mode) raise IOError(errno.EINVAL, "Invalid file mode") self.mode = mode @@ -297,7 +299,7 @@ class nodefd_db64(StringIO, nodes.node_descriptor): mime, icont = cntIndex.doIndex(data, filename=filename, content_type=None, realfname=None) except Exception: - logging.getLogger('document.storage').debug('Cannot index file:', exc_info=True) + _logger.debug('Cannot index file:', exc_info=True) pass try: @@ -320,7 +322,7 @@ class nodefd_db64(StringIO, nodes.node_descriptor): (base64.encodestring(data), len(data), par.file_id)) cr.commit() except Exception: - logging.getLogger('document.storage').exception('Cannot update db file #%d for close:', par.file_id) + _logger.exception('Cannot update db file #%d for close:', par.file_id) raise finally: cr.close() @@ -339,7 +341,6 @@ class document_storage(osv.osv): """ _name = 'document.storage' _description = 'Storage Media' - _doclog = logging.getLogger('document') _columns = { 'name': fields.char('Name', size=64, required=True, select=1), @@ -401,8 +402,6 @@ class document_storage(osv.osv): # npath may contain empty elements, for root directory etc. npath = filter(lambda x: x is not None, npath) - # if self._debug: - # self._doclog.debug('Npath: %s', npath) for n in npath: if n == '..': raise ValueError("Invalid '..' element in path") @@ -413,7 +412,7 @@ class document_storage(osv.osv): dpath += npath[:-1] path = os.path.join(*dpath) if not os.path.isdir(path): - self._doclog.debug("Create dirs: %s", path) + _logger.debug("Create dirs: %s", path) os.makedirs(path) return path, npath @@ -451,7 +450,7 @@ class document_storage(osv.osv): # try to fix their directory. if mode in ('r','r+'): if ira.file_size: - self._doclog.warning( "ir.attachment #%d does not have a filename, but is at filestore, fix it!" % ira.id) + _logger.warning( "ir.attachment #%d does not have a filename, but is at filestore, fix it!" % ira.id) raise IOError(errno.ENOENT, 'No file can be located') else: store_fname = self.__get_random_fname(boo.path) @@ -493,7 +492,7 @@ class document_storage(osv.osv): # On a migrated db, some files may have the wrong storage type # try to fix their directory. if ira.file_size: - self._doclog.warning( "ir.attachment #%d does not have a filename, but is at filestore, fix it!" % ira.id) + _logger.warning( "ir.attachment #%d does not have a filename, but is at filestore, fix it!" % ira.id) return None fpath = os.path.join(boo.path, ira.store_fname) return file(fpath, 'rb').read() @@ -517,7 +516,7 @@ class document_storage(osv.osv): # On a migrated db, some files may have the wrong storage type # try to fix their directory. if ira.file_size: - self._doclog.warning("ir.attachment #%d does not have a filename, trying the name." %ira.id) + _logger.warning("ir.attachment #%d does not have a filename, trying the name." %ira.id) # sfname = ira.name fpath = os.path.join(boo.path,ira.store_fname or ira.name) if os.path.exists(fpath): @@ -550,7 +549,7 @@ class document_storage(osv.osv): if boo.readonly: raise IOError(errno.EPERM, "Readonly medium") - self._doclog.debug( "Store data for ir.attachment #%d" % ira.id) + _logger.debug( "Store data for ir.attachment #%d" % ira.id) store_fname = None fname = None if boo.type == 'filestore': @@ -563,13 +562,13 @@ class document_storage(osv.osv): fp.write(data) finally: fp.close() - self._doclog.debug( "Saved data to %s" % fname) + _logger.debug( "Saved data to %s" % fname) filesize = len(data) # os.stat(fname).st_size # TODO Here, an old file would be left hanging. except Exception, e: - self._doclog.warning( "Couldn't save data to %s", path, exc_info=True) + _logger.warning( "Couldn't save data to %s", path, exc_info=True) raise except_orm(_('Error!'), str(e)) elif boo.type == 'db': filesize = len(data) @@ -592,12 +591,12 @@ class document_storage(osv.osv): fp.write(data) finally: fp.close() - self._doclog.debug("Saved data to %s", fname) + _logger.debug("Saved data to %s", fname) filesize = len(data) # os.stat(fname).st_size store_fname = os.path.join(*npath) # TODO Here, an old file would be left hanging. except Exception,e : - self._doclog.warning("Couldn't save data:", exc_info=True) + _logger.warning("Couldn't save data:", exc_info=True) raise except_orm(_('Error!'), str(e)) elif boo.type == 'virtual': @@ -616,7 +615,7 @@ class document_storage(osv.osv): mime, icont = cntIndex.doIndex(data, ira.datas_fname, ira.file_type or None, fname) except Exception: - self._doclog.debug('Cannot index file:', exc_info=True) + _logger.debug('Cannot index file:', exc_info=True) pass try: @@ -633,7 +632,7 @@ class document_storage(osv.osv): file_node.content_type = mime return True except Exception, e : - self._doclog.warning("Couldn't save data:", exc_info=True) + _logger.warning("Couldn't save data:", exc_info=True) # should we really rollback once we have written the actual data? # at the db case (only), that rollback would be safe raise except_orm(_('Error at doc write!'), str(e)) @@ -671,9 +670,9 @@ class document_storage(osv.osv): try: os.unlink(fname) except Exception: - self._doclog.warning("Could not remove file %s, please remove manually.", fname, exc_info=True) + _logger.warning("Could not remove file %s, please remove manually.", fname, exc_info=True) else: - self._doclog.warning("Unknown unlink key %s" % ktype) + _logger.warning("Unknown unlink key %s" % ktype) return True @@ -703,9 +702,9 @@ class document_storage(osv.osv): fname = ira.store_fname if not fname: - self._doclog.warning("Trying to rename a non-stored file") + _logger.warning("Trying to rename a non-stored file") if fname != os.path.join(*npath): - self._doclog.warning("inconsistency in realstore: %s != %s" , fname, repr(npath)) + _logger.warning("inconsistency in realstore: %s != %s" , fname, repr(npath)) oldpath = os.path.join(path, npath[-1]) newpath = os.path.join(path, new_name) @@ -743,7 +742,7 @@ class document_storage(osv.osv): break par = par.parent_id if file_node.storage_id != psto: - self._doclog.debug('Cannot move file %r from %r to %r', file_node, file_node.parent, ndir_bro.name) + _logger.debug('Cannot move file %r from %r to %r', file_node, file_node.parent, ndir_bro.name) raise NotImplementedError('Cannot move files between storage media') if sbro.type in ('filestore', 'db', 'db64'): @@ -756,9 +755,9 @@ class document_storage(osv.osv): fname = ira.store_fname if not fname: - self._doclog.warning("Trying to rename a non-stored file") + _logger.warning("Trying to rename a non-stored file") if fname != os.path.join(*opath): - self._doclog.warning("inconsistency in realstore: %s != %s" , fname, repr(opath)) + _logger.warning("inconsistency in realstore: %s != %s" , fname, repr(opath)) oldpath = os.path.join(path, opath[-1]) @@ -766,12 +765,12 @@ class document_storage(osv.osv): npath = filter(lambda x: x is not None, npath) newdir = os.path.join(*npath) if not os.path.isdir(newdir): - self._doclog.debug("Must create dir %s", newdir) + _logger.debug("Must create dir %s", newdir) os.makedirs(newdir) npath.append(opath[-1]) newpath = os.path.join(*npath) - self._doclog.debug("Going to move %s from %s to %s", opath[-1], oldpath, newpath) + _logger.debug("Going to move %s from %s to %s", opath[-1], oldpath, newpath) shutil.move(oldpath, newpath) store_path = npath[1:] + [opath[-1],] diff --git a/addons/document/nodes.py b/addons/document/nodes.py index ee89f3d4c6c..15d23f11760 100644 --- a/addons/document/nodes.py +++ b/addons/document/nodes.py @@ -42,7 +42,7 @@ from StringIO import StringIO # root: if we are at the first directory of a ressource # -logger = logging.getLogger('doc2.nodes') +_logger = logging.getLogger(__name__) def _str2time(cre): """ Convert a string with time representation (from db) into time (float) @@ -328,7 +328,7 @@ class node_class(object): if self.DAV_M_NS.has_key(ns): prefix = self.DAV_M_NS[ns] else: - logger.debug('No namespace: %s ("%s")',ns, prop) + _logger.debug('No namespace: %s ("%s")',ns, prop) return None mname = prefix + "_" + prop.replace('-','_') @@ -341,7 +341,7 @@ class node_class(object): r = m(cr) return r except AttributeError: - logger.debug('Property %s not supported' % prop, exc_info=True) + _logger.debug('Property %s not supported' % prop, exc_info=True) return None def get_dav_resourcetype(self, cr): @@ -384,13 +384,13 @@ class node_class(object): def create_child(self, cr, path, data=None): """ Create a regular file under this node """ - logger.warning("Attempted to create a file under %r, not possible.", self) + _logger.warning("Attempted to create a file under %r, not possible.", self) raise IOError(errno.EPERM, "Not allowed to create files here") def create_child_collection(self, cr, objname): """ Create a child collection (directory) under self """ - logger.warning("Attempted to create a collection under %r, not possible.", self) + _logger.warning("Attempted to create a collection under %r, not possible.", self) raise IOError(errno.EPERM, "Not allowed to create folders here") def rm(self, cr): @@ -725,7 +725,7 @@ class node_dir(node_database): assert self.parent if self.parent != ndir_node: - logger.debug('Cannot move dir %r from %r to %r', self, self.parent, ndir_node) + _logger.debug('Cannot move dir %r from %r to %r', self, self.parent, ndir_node) raise NotImplementedError('Cannot move dir to another dir') ret = {} @@ -998,7 +998,7 @@ class node_res_obj(node_class): def get_dav_eprop_DEPR(self, cr, ns, prop): # Deprecated! if ns != 'http://groupdav.org/' or prop != 'resourcetype': - logger.warning("Who asked for %s:%s?" % (ns, prop)) + _logger.warning("Who asked for %s:%s?" % (ns, prop)) return None cntobj = self.context._dirobj.pool.get('document.directory.content') uid = self.context.uid @@ -1328,7 +1328,7 @@ class node_file(node_class): ret = {} if ndir_node and self.parent != ndir_node: if not (isinstance(self.parent, node_dir) and isinstance(ndir_node, node_dir)): - logger.debug('Cannot move file %r from %r to %r', self, self.parent, ndir_node) + _logger.debug('Cannot move file %r from %r to %r', self, self.parent, ndir_node) raise NotImplementedError('Cannot move files between dynamic folders') if not ndir_obj: @@ -1473,7 +1473,7 @@ class nodefd_content(StringIO, node_descriptor): elif mode == 'a': StringIO.__init__(self, None) else: - logging.getLogger('document.content').error("Incorrect mode %s specified", mode) + _logger.error("Incorrect mode %s specified", mode) raise IOError(errno.EINVAL, "Invalid file mode") self.mode = mode @@ -1499,7 +1499,7 @@ class nodefd_content(StringIO, node_descriptor): raise NotImplementedError cr.commit() except Exception: - logging.getLogger('document.content').exception('Cannot update db content #%d for close:', par.cnt_id) + _logger.exception('Cannot update db content #%d for close:', par.cnt_id) raise finally: cr.close() @@ -1526,7 +1526,7 @@ class nodefd_static(StringIO, node_descriptor): elif mode == 'a': StringIO.__init__(self, None) else: - logging.getLogger('document.nodes').error("Incorrect mode %s specified", mode) + _logger.error("Incorrect mode %s specified", mode) raise IOError(errno.EINVAL, "Invalid file mode") self.mode = mode @@ -1551,7 +1551,7 @@ class nodefd_static(StringIO, node_descriptor): raise NotImplementedError cr.commit() except Exception: - logging.getLogger('document.nodes').exception('Cannot update db content #%d for close:', par.cnt_id) + _logger.exception('Cannot update db content #%d for close:', par.cnt_id) raise finally: cr.close() diff --git a/addons/document/std_index.py b/addons/document/std_index.py index e153d018bf0..a3059bd8a76 100644 --- a/addons/document/std_index.py +++ b/addons/document/std_index.py @@ -26,6 +26,8 @@ import odt2txt import sys, zipfile, xml.dom.minidom import logging +_logger = logging.getLogger(__name__) + def _to_unicode(s): try: return s.decode('utf-8') @@ -101,9 +103,8 @@ class DocIndex(indexer): (data, _) = pop.communicate() return _to_unicode(data) except OSError: - logger = logging.getLogger('document.DocIndex') - logger.warn("Failed attempt to execute antiword (MS Word reader). Antiword is necessary to index the file %s of MIME type %s. Detailed error available at DEBUG level.", fname, self._getMimeTypes()[0]) - logger.debug("Trace of the failed file indexing attempt: ", exc_info=True) + _logger.warning("Failed attempt to execute antiword (MS Word reader). Antiword is necessary to index the file %s of MIME type %s. Detailed error available at DEBUG level.", fname, self._getMimeTypes()[0]) + _logger.debug("Trace of the failed file indexing attempt: ", exc_info=True) return False cntIndex.register(DocIndex()) diff --git a/addons/document_ftp/ftpserver/__init__.py b/addons/document_ftp/ftpserver/__init__.py index 2e09a938a05..11b041e074b 100644 --- a/addons/document_ftp/ftpserver/__init__.py +++ b/addons/document_ftp/ftpserver/__init__.py @@ -19,6 +19,7 @@ # ############################################################################## +import logging import threading import ftpserver import authorizer @@ -26,6 +27,8 @@ import abstracted_fs import netsvc from tools import config +_logger = logging.getLogger(__name__) + def start_server(): HOST = config.get('ftp_server_host', '127.0.0.1') PORT = int(config.get('ftp_server_port', '8021')) @@ -36,8 +39,7 @@ def start_server(): class ftp_server(threading.Thread): def log(self, level, message): - logger = netsvc.Logger() - logger.notifyChannel('FTP', level, message) + _logger.log(level, message) def run(self): autho = authorizer.authorizer() @@ -56,9 +58,9 @@ def start_server(): ftpd.serve_forever() if HOST.lower() == 'none': - netsvc.Logger().notifyChannel("FTP", netsvc.LOG_INFO, "\n Server FTP Not Started\n") + _logger.info("\n Server FTP Not Started\n") else: - netsvc.Logger().notifyChannel("FTP", netsvc.LOG_INFO, "\n Serving FTP on %s:%s\n" % (HOST, PORT)) + _logger.info(\n Serving FTP on %s:%s\n", HOST, PORT) ds = ftp_server() ds.daemon = True ds.start() diff --git a/addons/edi/__init__.py b/addons/edi/__init__.py index b4a80c0f1dd..70723aea361 100644 --- a/addons/edi/__init__.py +++ b/addons/edi/__init__.py @@ -28,7 +28,7 @@ from models.edi import EDIMixin, edi_document try: import controllers except ImportError: - logging.getLogger('init.load').warn( + logging.getLogger(__name__).warning( """Could not load openerp-web section of EDI, EDI will not behave correctly To fix, launch openerp-web in embedded mode""") diff --git a/addons/edi/edi_service.py b/addons/edi/edi_service.py index 074720a03e9..f48cc60950a 100644 --- a/addons/edi/edi_service.py +++ b/addons/edi/edi_service.py @@ -23,7 +23,7 @@ import logging import netsvc import openerp -_logger = logging.getLogger('edi.service') +_logger = logging.getLogger(__name__) class edi(netsvc.ExportService): diff --git a/addons/edi/models/edi.py b/addons/edi/models/edi.py index c573115618e..6f5a69af470 100644 --- a/addons/edi/models/edi.py +++ b/addons/edi/models/edi.py @@ -36,6 +36,8 @@ from osv import osv,fields,orm from tools.translate import _ from tools.safe_eval import safe_eval as eval +_logger = logging.getLogger(__name__) + EXTERNAL_ID_PATTERN = re.compile(r'^([^.:]+)(?::([^.]+))?\.(\S+)$') EDI_VIEW_WEB_URL = '%s/edi/view?debug=1&db=%s&token=%s' EDI_PROTOCOL_VERSION = 1 # arbitrary ever-increasing version number @@ -72,8 +74,6 @@ def last_update_for(record): return record_log.get('write_date') or record_log.get('create_date') or False return False -_logger = logging.getLogger('edi') - class edi_document(osv.osv): _name = 'edi.document' _description = 'EDI Document' @@ -682,4 +682,4 @@ class EDIMixin(object): return record_id -# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: \ No newline at end of file +# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: diff --git a/addons/edi/models/res_partner.py b/addons/edi/models/res_partner.py index 7c40a5c7354..6ec511269b3 100644 --- a/addons/edi/models/res_partner.py +++ b/addons/edi/models/res_partner.py @@ -25,6 +25,8 @@ from edi import EDIMixin from openerp import SUPERUSER_ID from tools.translate import _ +_logger = logging.getLogger(__name__) + RES_PARTNER_ADDRESS_EDI_STRUCT = { 'name': True, 'email': True, @@ -72,7 +74,7 @@ class res_partner_address(osv.osv, EDIMixin): code, label = 'edi_generic', 'Generic Bank Type (auto-created for EDI)' bank_code_ids = res_partner_bank_type.search(cr, uid, [('code','=',code)], context=context) if not bank_code_ids: - logging.getLogger('edi.res_partner').info('Normal bank account type is missing, creating ' + _logger.info('Normal bank account type is missing, creating ' 'a generic bank account type for EDI.') self.res_partner_bank_type.create(cr, SUPERUSER_ID, {'name': label, 'code': label}) @@ -98,7 +100,7 @@ class res_partner_address(osv.osv, EDIMixin): bank_name, ext_bank_id, context=import_ctx) except osv.except_osv: # failed to import it, try again with unrestricted default type - logging.getLogger('edi.res_partner').warning('Failed to import bank account using' + _logger.warning('Failed to import bank account using' 'bank type: %s, ignoring', import_ctx['default_state'], exc_info=True) return address_id diff --git a/addons/fetchmail/fetchmail.py b/addons/fetchmail/fetchmail.py index d1e23f49166..51c6f147cff 100644 --- a/addons/fetchmail/fetchmail.py +++ b/addons/fetchmail/fetchmail.py @@ -39,7 +39,7 @@ from osv import osv, fields import tools from tools.translate import _ -logger = logging.getLogger('fetchmail') +_logger = logging.getLogger(__name__) class fetchmail_server(osv.osv): """Incoming POP/IMAP mail server account""" @@ -151,7 +151,7 @@ openerp_mailgate.py -u %(uid)d -p PASSWORD -o %(model)s -d %(dbname)s --host=HOS connection = server.connect() server.write({'state':'done'}) except Exception, e: - logger.exception("Failed to connect to %s server %s", server.type, server.name) + _logger.exception("Failed to connect to %s server %s", server.type, server.name) raise osv.except_osv(_("Connection test failed!"), _("Here is what we got instead:\n %s") % tools.ustr(e)) finally: try: @@ -177,7 +177,7 @@ openerp_mailgate.py -u %(uid)d -p PASSWORD -o %(model)s -d %(dbname)s --host=HOS mail_thread = self.pool.get('mail.thread') action_pool = self.pool.get('ir.actions.server') for server in self.browse(cr, uid, ids, context=context): - logger.info('start checking for new emails on %s server %s', server.type, server.name) + _logger.info('start checking for new emails on %s server %s', server.type, server.name) context.update({'fetchmail_server_id': server.id, 'server_type': server.type}) count = 0 if server.type == 'imap': @@ -196,9 +196,9 @@ openerp_mailgate.py -u %(uid)d -p PASSWORD -o %(model)s -d %(dbname)s --host=HOS imap_server.store(num, '+FLAGS', '\\Seen') cr.commit() count += 1 - logger.info("fetched/processed %s email(s) on %s server %s", count, server.type, server.name) + _logger.info("fetched/processed %s email(s) on %s server %s", count, server.type, server.name) except Exception, e: - logger.exception("Failed to fetch mail from %s server %s", server.type, server.name) + _logger.exception("Failed to fetch mail from %s server %s", server.type, server.name) finally: if imap_server: imap_server.close() @@ -220,9 +220,9 @@ openerp_mailgate.py -u %(uid)d -p PASSWORD -o %(model)s -d %(dbname)s --host=HOS action_pool.run(cr, uid, [server.action_id.id], {'active_id': res_id, 'active_ids':[res_id]}) pop_server.dele(num) cr.commit() - logger.info("fetched/processed %s email(s) on %s server %s", numMsgs, server.type, server.name) + _logger.info("fetched/processed %s email(s) on %s server %s", numMsgs, server.type, server.name) except Exception, e: - logger.exception("Failed to fetch mail from %s server %s", server.type, server.name) + _logger.exception("Failed to fetch mail from %s server %s", server.type, server.name) finally: if pop_server: pop_server.quit() diff --git a/addons/import_base/import_framework.py b/addons/import_base/import_framework.py index 44243aa952c..2ae51fde6b5 100644 --- a/addons/import_base/import_framework.py +++ b/addons/import_base/import_framework.py @@ -29,11 +29,11 @@ import datetime import logging import StringIO import traceback + +_logger = logging.getLogger(__name__) + pp = pprint.PrettyPrinter(indent=4) - - - class import_framework(Thread): """ This class should be extends, @@ -60,7 +60,6 @@ class import_framework(Thread): self.context = context or {} self.email = email_to_notify self.table_list = [] - self.logger = logging.getLogger(module_name) self.initialize() """ @@ -165,7 +164,7 @@ class import_framework(Thread): data_i is a map external field_name => value and each data_i have a external id => in data_id['id'] """ - self.logger.info(' Importing %s into %s' % (table, model)) + _logger.info(' Importing %s into %s', table, model) if not datas: return (0, 'No data found') mapping['id'] = 'id_new' @@ -188,7 +187,7 @@ class import_framework(Thread): model_obj = self.obj.pool.get(model) if not model_obj: raise ValueError(_("%s is not a valid model name") % model) - self.logger.debug(_(" fields imported : ") + str(fields)) + _logger.debug(_(" fields imported : ") + str(fields)) (p, r, warning, s) = model_obj.import_data(self.cr, self.uid, fields, res, mode='update', current_module=self.module_name, noupdate=True, context=self.context) for (field, field_name) in self_dependencies: self._import_self_dependencies(model_obj, field, datas) @@ -431,9 +430,9 @@ class import_framework(Thread): 'auto_delete' : True}) email_obj.send(self.cr, self.uid, [email_id]) if error: - self.logger.error(_("Import failed due to an unexpected error")) + _logger.error(_("Import failed due to an unexpected error")) else: - self.logger.info(_("Import finished, notification email sended")) + _logger.info(_("Import finished, notification email sended")) def get_email_subject(self, result, error=False): """ diff --git a/addons/l10n_be_invoice_bba/invoice.py b/addons/l10n_be_invoice_bba/invoice.py index 997036c0cec..937e49e83a8 100644 --- a/addons/l10n_be_invoice_bba/invoice.py +++ b/addons/l10n_be_invoice_bba/invoice.py @@ -1,219 +1,218 @@ -# -*- encoding: utf-8 -*- -############################################################################## -# -# OpenERP, Open Source Management Solution -# -# Copyright (c) 2011 Noviat nv/sa (www.noviat.be). All rights reserved. -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see . -# -############################################################################## - -import re, time, random -from osv import fields, osv -from tools.translate import _ -import netsvc -logger=netsvc.Logger() - -""" -account.invoice object: - - Add support for Belgian structured communication - - Rename 'reference' field labels to 'Communication' -""" - -class account_invoice(osv.osv): - _inherit = 'account.invoice' - - def _get_reference_type(self, cursor, user, context=None): - """Add BBA Structured Communication Type and change labels from 'reference' into 'communication' """ - res = super(account_invoice, self)._get_reference_type(cursor, user, - context=context) - res[[i for i,x in enumerate(res) if x[0] == 'none'][0]] = ('none', 'Free Communication') - res.append(('bba', 'BBA Structured Communication')) - #logger.notifyChannel('addons.'+self._name, netsvc.LOG_WARNING, 'reference_type = %s' %res ) - return res - - def check_bbacomm(self, val): - supported_chars = '0-9+*/ ' - pattern = re.compile('[^' + supported_chars + ']') - if pattern.findall(val or ''): - return False - bbacomm = re.sub('\D', '', val or '') - if len(bbacomm) == 12: - base = int(bbacomm[:10]) - mod = base % 97 or 97 - if mod == int(bbacomm[-2:]): - return True - return False - - def _check_communication(self, cr, uid, ids): - for inv in self.browse(cr, uid, ids): - if inv.reference_type == 'bba': - return self.check_bbacomm(inv.reference) - return True - - def onchange_partner_id(self, cr, uid, ids, type, partner_id, - date_invoice=False, payment_term=False, partner_bank_id=False, company_id=False): - result = super(account_invoice, self).onchange_partner_id(cr, uid, ids, type, partner_id, - date_invoice, payment_term, partner_bank_id, company_id) -# reference_type = self.default_get(cr, uid, ['reference_type'])['reference_type'] -# logger.notifyChannel('addons.'+self._name, netsvc.LOG_WARNING, 'partner_id %s' % partner_id) - reference = False - reference_type = 'none' - if partner_id: - if (type == 'out_invoice'): - reference_type = self.pool.get('res.partner').browse(cr, uid, partner_id).out_inv_comm_type - if reference_type: - algorithm = self.pool.get('res.partner').browse(cr, uid, partner_id).out_inv_comm_algorithm - if not algorithm: - algorithm = 'random' - reference = self.generate_bbacomm(cr, uid, ids, type, reference_type, algorithm, partner_id, '')['value']['reference'] - res_update = { - 'reference_type': reference_type or 'none', - 'reference': reference, - } - result['value'].update(res_update) - return result - - def generate_bbacomm(self, cr, uid, ids, type, reference_type, algorithm, partner_id, reference): - partner_obj = self.pool.get('res.partner') - reference = reference or '' - if (type == 'out_invoice'): - if reference_type == 'bba': - if not algorithm: - if partner_id: - algorithm = partner_obj.browse(cr, uid, partner_id).out_inv_comm_algorithm - if not algorithm: - if not algorithm: - algorithm = 'random' - if algorithm == 'date': - if not self.check_bbacomm(reference): - doy = time.strftime('%j') - year = time.strftime('%Y') - seq = '001' - seq_ids = self.search(cr, uid, - [('type', '=', 'out_invoice'), ('reference_type', '=', 'bba'), - ('reference', 'like', '+++%s/%s/%%' % (doy, year))], order='reference') - if seq_ids: - prev_seq = int(self.browse(cr, uid, seq_ids[-1]).reference[12:15]) - if prev_seq < 999: - seq = '%03d' % (prev_seq + 1) - else: - raise osv.except_osv(_('Warning!'), - _('The daily maximum of outgoing invoices with an automatically generated BBA Structured Communications has been exceeded!' \ - '\nPlease create manually a unique BBA Structured Communication.')) - bbacomm = doy + year + seq - base = int(bbacomm) - mod = base % 97 or 97 - reference = '+++%s/%s/%s%02d+++' % (doy, year, seq, mod) - elif algorithm == 'partner_ref': - if not self.check_bbacomm(reference): - partner_ref = self.pool.get('res.partner').browse(cr, uid, partner_id).ref - partner_ref_nr = re.sub('\D', '', partner_ref or '') - if (len(partner_ref_nr) < 3) or (len(partner_ref_nr) > 7): - raise osv.except_osv(_('Warning!'), - _('The Partner should have a 3-7 digit Reference Number for the generation of BBA Structured Communications!' \ - '\nPlease correct the Partner record.')) - else: - partner_ref_nr = partner_ref_nr.ljust(7, '0') - seq = '001' - seq_ids = self.search(cr, uid, - [('type', '=', 'out_invoice'), ('reference_type', '=', 'bba'), - ('reference', 'like', '+++%s/%s/%%' % (partner_ref_nr[:3], partner_ref_nr[3:]))], order='reference') - if seq_ids: - prev_seq = int(self.browse(cr, uid, seq_ids[-1]).reference[12:15]) - if prev_seq < 999: - seq = '%03d' % (prev_seq + 1) - else: - raise osv.except_osv(_('Warning!'), - _('The daily maximum of outgoing invoices with an automatically generated BBA Structured Communications has been exceeded!' \ - '\nPlease create manually a unique BBA Structured Communication.')) - bbacomm = partner_ref_nr + seq - base = int(bbacomm) - mod = base % 97 or 97 - reference = '+++%s/%s/%s%02d+++' % (partner_ref_nr[:3], partner_ref_nr[3:], seq, mod) - elif algorithm == 'random': - if not self.check_bbacomm(reference): - base = random.randint(1, 9999999999) - bbacomm = str(base).rjust(7, '0') - base = int(bbacomm) - mod = base % 97 or 97 - mod = str(mod).rjust(2, '0') - reference = '+++%s/%s/%s%s+++' % (bbacomm[:3], bbacomm[3:7], bbacomm[7:], mod) - else: - raise osv.except_osv(_('Error!'), - _("Unsupported Structured Communication Type Algorithm '%s' !" \ - "\nPlease contact your OpenERP support channel.") % algorithm) - return {'value': {'reference': reference}} - - def create(self, cr, uid, vals, context=None): - if vals.has_key('reference_type'): - reference_type = vals['reference_type'] - if reference_type == 'bba': - if vals.has_key('reference'): - bbacomm = vals['reference'] - else: - raise osv.except_osv(_('Warning!'), - _('Empty BBA Structured Communication!' \ - '\nPlease fill in a unique BBA Structured Communication.')) - if self.check_bbacomm(bbacomm): - reference = re.sub('\D', '', bbacomm) - vals['reference'] = '+++' + reference[0:3] + '/' + reference[3:7] + '/' + reference[7:] + '+++' - same_ids = self.search(cr, uid, - [('type', '=', 'out_invoice'), ('reference_type', '=', 'bba'), - ('reference', '=', vals['reference'])]) - if same_ids: - raise osv.except_osv(_('Warning!'), - _('The BBA Structured Communication has already been used!' \ - '\nPlease create manually a unique BBA Structured Communication.')) - return super(account_invoice, self).create(cr, uid, vals, context=context) - - def write(self, cr, uid, ids, vals, context={}): - if isinstance(ids, (int, long)): - ids = [ids] - for inv in self.browse(cr, uid, ids, context): - if vals.has_key('reference_type'): - reference_type = vals['reference_type'] - else: - reference_type = inv.reference_type or '' - if reference_type == 'bba': - if vals.has_key('reference'): - bbacomm = vals['reference'] - else: - bbacomm = inv.reference or '' - if self.check_bbacomm(bbacomm): - reference = re.sub('\D', '', bbacomm) - vals['reference'] = '+++' + reference[0:3] + '/' + reference[3:7] + '/' + reference[7:] + '+++' - same_ids = self.search(cr, uid, - [('id', '!=', inv.id), ('type', '=', 'out_invoice'), - ('reference_type', '=', 'bba'), ('reference', '=', vals['reference'])]) - if same_ids: - raise osv.except_osv(_('Warning!'), - _('The BBA Structured Communication has already been used!' \ - '\nPlease create manually a unique BBA Structured Communication.')) - return super(account_invoice, self).write(cr, uid, ids, vals, context) - - _columns = { - 'reference': fields.char('Communication', size=64, help="The partner reference of this invoice."), - 'reference_type': fields.selection(_get_reference_type, 'Communication Type', - required=True), - } - - _constraints = [ - (_check_communication, 'Invalid BBA Structured Communication !', ['Communication']), - ] - -account_invoice() +# -*- encoding: utf-8 -*- +############################################################################## +# +# OpenERP, Open Source Management Solution +# +# Copyright (c) 2011 Noviat nv/sa (www.noviat.be). All rights reserved. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +############################################################################## + +import random +import re +import time + +from osv import fields, osv +from tools.translate import _ + +""" +account.invoice object: + - Add support for Belgian structured communication + - Rename 'reference' field labels to 'Communication' +""" + +class account_invoice(osv.osv): + _inherit = 'account.invoice' + + def _get_reference_type(self, cursor, user, context=None): + """Add BBA Structured Communication Type and change labels from 'reference' into 'communication' """ + res = super(account_invoice, self)._get_reference_type(cursor, user, + context=context) + res[[i for i,x in enumerate(res) if x[0] == 'none'][0]] = ('none', 'Free Communication') + res.append(('bba', 'BBA Structured Communication')) + return res + + def check_bbacomm(self, val): + supported_chars = '0-9+*/ ' + pattern = re.compile('[^' + supported_chars + ']') + if pattern.findall(val or ''): + return False + bbacomm = re.sub('\D', '', val or '') + if len(bbacomm) == 12: + base = int(bbacomm[:10]) + mod = base % 97 or 97 + if mod == int(bbacomm[-2:]): + return True + return False + + def _check_communication(self, cr, uid, ids): + for inv in self.browse(cr, uid, ids): + if inv.reference_type == 'bba': + return self.check_bbacomm(inv.reference) + return True + + def onchange_partner_id(self, cr, uid, ids, type, partner_id, + date_invoice=False, payment_term=False, partner_bank_id=False, company_id=False): + result = super(account_invoice, self).onchange_partner_id(cr, uid, ids, type, partner_id, + date_invoice, payment_term, partner_bank_id, company_id) +# reference_type = self.default_get(cr, uid, ['reference_type'])['reference_type'] + reference = False + reference_type = 'none' + if partner_id: + if (type == 'out_invoice'): + reference_type = self.pool.get('res.partner').browse(cr, uid, partner_id).out_inv_comm_type + if reference_type: + algorithm = self.pool.get('res.partner').browse(cr, uid, partner_id).out_inv_comm_algorithm + if not algorithm: + algorithm = 'random' + reference = self.generate_bbacomm(cr, uid, ids, type, reference_type, algorithm, partner_id, '')['value']['reference'] + res_update = { + 'reference_type': reference_type or 'none', + 'reference': reference, + } + result['value'].update(res_update) + return result + + def generate_bbacomm(self, cr, uid, ids, type, reference_type, algorithm, partner_id, reference): + partner_obj = self.pool.get('res.partner') + reference = reference or '' + if (type == 'out_invoice'): + if reference_type == 'bba': + if not algorithm: + if partner_id: + algorithm = partner_obj.browse(cr, uid, partner_id).out_inv_comm_algorithm + if not algorithm: + if not algorithm: + algorithm = 'random' + if algorithm == 'date': + if not self.check_bbacomm(reference): + doy = time.strftime('%j') + year = time.strftime('%Y') + seq = '001' + seq_ids = self.search(cr, uid, + [('type', '=', 'out_invoice'), ('reference_type', '=', 'bba'), + ('reference', 'like', '+++%s/%s/%%' % (doy, year))], order='reference') + if seq_ids: + prev_seq = int(self.browse(cr, uid, seq_ids[-1]).reference[12:15]) + if prev_seq < 999: + seq = '%03d' % (prev_seq + 1) + else: + raise osv.except_osv(_('Warning!'), + _('The daily maximum of outgoing invoices with an automatically generated BBA Structured Communications has been exceeded!' \ + '\nPlease create manually a unique BBA Structured Communication.')) + bbacomm = doy + year + seq + base = int(bbacomm) + mod = base % 97 or 97 + reference = '+++%s/%s/%s%02d+++' % (doy, year, seq, mod) + elif algorithm == 'partner_ref': + if not self.check_bbacomm(reference): + partner_ref = self.pool.get('res.partner').browse(cr, uid, partner_id).ref + partner_ref_nr = re.sub('\D', '', partner_ref or '') + if (len(partner_ref_nr) < 3) or (len(partner_ref_nr) > 7): + raise osv.except_osv(_('Warning!'), + _('The Partner should have a 3-7 digit Reference Number for the generation of BBA Structured Communications!' \ + '\nPlease correct the Partner record.')) + else: + partner_ref_nr = partner_ref_nr.ljust(7, '0') + seq = '001' + seq_ids = self.search(cr, uid, + [('type', '=', 'out_invoice'), ('reference_type', '=', 'bba'), + ('reference', 'like', '+++%s/%s/%%' % (partner_ref_nr[:3], partner_ref_nr[3:]))], order='reference') + if seq_ids: + prev_seq = int(self.browse(cr, uid, seq_ids[-1]).reference[12:15]) + if prev_seq < 999: + seq = '%03d' % (prev_seq + 1) + else: + raise osv.except_osv(_('Warning!'), + _('The daily maximum of outgoing invoices with an automatically generated BBA Structured Communications has been exceeded!' \ + '\nPlease create manually a unique BBA Structured Communication.')) + bbacomm = partner_ref_nr + seq + base = int(bbacomm) + mod = base % 97 or 97 + reference = '+++%s/%s/%s%02d+++' % (partner_ref_nr[:3], partner_ref_nr[3:], seq, mod) + elif algorithm == 'random': + if not self.check_bbacomm(reference): + base = random.randint(1, 9999999999) + bbacomm = str(base).rjust(7, '0') + base = int(bbacomm) + mod = base % 97 or 97 + mod = str(mod).rjust(2, '0') + reference = '+++%s/%s/%s%s+++' % (bbacomm[:3], bbacomm[3:7], bbacomm[7:], mod) + else: + raise osv.except_osv(_('Error!'), + _("Unsupported Structured Communication Type Algorithm '%s' !" \ + "\nPlease contact your OpenERP support channel.") % algorithm) + return {'value': {'reference': reference}} + + def create(self, cr, uid, vals, context=None): + if vals.has_key('reference_type'): + reference_type = vals['reference_type'] + if reference_type == 'bba': + if vals.has_key('reference'): + bbacomm = vals['reference'] + else: + raise osv.except_osv(_('Warning!'), + _('Empty BBA Structured Communication!' \ + '\nPlease fill in a unique BBA Structured Communication.')) + if self.check_bbacomm(bbacomm): + reference = re.sub('\D', '', bbacomm) + vals['reference'] = '+++' + reference[0:3] + '/' + reference[3:7] + '/' + reference[7:] + '+++' + same_ids = self.search(cr, uid, + [('type', '=', 'out_invoice'), ('reference_type', '=', 'bba'), + ('reference', '=', vals['reference'])]) + if same_ids: + raise osv.except_osv(_('Warning!'), + _('The BBA Structured Communication has already been used!' \ + '\nPlease create manually a unique BBA Structured Communication.')) + return super(account_invoice, self).create(cr, uid, vals, context=context) + + def write(self, cr, uid, ids, vals, context={}): + if isinstance(ids, (int, long)): + ids = [ids] + for inv in self.browse(cr, uid, ids, context): + if vals.has_key('reference_type'): + reference_type = vals['reference_type'] + else: + reference_type = inv.reference_type or '' + if reference_type == 'bba': + if vals.has_key('reference'): + bbacomm = vals['reference'] + else: + bbacomm = inv.reference or '' + if self.check_bbacomm(bbacomm): + reference = re.sub('\D', '', bbacomm) + vals['reference'] = '+++' + reference[0:3] + '/' + reference[3:7] + '/' + reference[7:] + '+++' + same_ids = self.search(cr, uid, + [('id', '!=', inv.id), ('type', '=', 'out_invoice'), + ('reference_type', '=', 'bba'), ('reference', '=', vals['reference'])]) + if same_ids: + raise osv.except_osv(_('Warning!'), + _('The BBA Structured Communication has already been used!' \ + '\nPlease create manually a unique BBA Structured Communication.')) + return super(account_invoice, self).write(cr, uid, ids, vals, context) + + _columns = { + 'reference': fields.char('Communication', size=64, help="The partner reference of this invoice."), + 'reference_type': fields.selection(_get_reference_type, 'Communication Type', + required=True), + } + + _constraints = [ + (_check_communication, 'Invalid BBA Structured Communication !', ['Communication']), + ] + +account_invoice() # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: diff --git a/addons/l10n_be_invoice_bba/partner.py b/addons/l10n_be_invoice_bba/partner.py index 04a51c3a6ab..4cb6079e0a2 100644 --- a/addons/l10n_be_invoice_bba/partner.py +++ b/addons/l10n_be_invoice_bba/partner.py @@ -21,11 +21,10 @@ # ############################################################################## -from osv import fields, osv import time + +from osv import fields, osv from tools.translate import _ -import netsvc -logger=netsvc.Logger() class res_partner(osv.osv): """ add field to indicate default 'Communication Type' on customer invoices """ diff --git a/addons/l10n_multilang/l10n_multilang.py b/addons/l10n_multilang/l10n_multilang.py index c4130f2761d..331ec12d76f 100644 --- a/addons/l10n_multilang/l10n_multilang.py +++ b/addons/l10n_multilang/l10n_multilang.py @@ -19,11 +19,13 @@ # ############################################################################## -from osv import fields, osv +import logging import os + +from osv import fields, osv from tools.translate import _ -import netsvc -logger=netsvc.Logger() + +_logger = logging.getLogger(__name__) class wizard_multi_charts_accounts(osv.osv_memory): """ @@ -80,8 +82,9 @@ class wizard_multi_charts_accounts(osv.osv_memory): if context.get('lang') == lang: self.pool.get(out_obj._name).write(cr, uid, out_ids[j], {in_field: value[in_id]}) else: - logger.notifyChannel('addons.'+self._name, netsvc.LOG_WARNING, - 'Language: %s. Translation from template: there is no translation available for %s!' %(lang, src[in_id]))#out_obj._name)) + _logger.warning( + 'Language: %s. Translation from template: there is no translation available for %s!', + lang, src[in_id]) return True def execute(self, cr, uid, ids, context=None): diff --git a/addons/mail/mail_message.py b/addons/mail/mail_message.py index 1693397f6fa..b2fcd93c779 100644 --- a/addons/mail/mail_message.py +++ b/addons/mail/mail_message.py @@ -34,7 +34,7 @@ from osv import osv from osv import fields from tools.translate import _ -_logger = logging.getLogger('mail') +_logger = logging.getLogger(__name__) def format_date_tz(date, tz=None): if not date: diff --git a/addons/mail/mail_thread.py b/addons/mail/mail_thread.py index 70b536296bf..7317f954066 100644 --- a/addons/mail/mail_thread.py +++ b/addons/mail/mail_thread.py @@ -25,14 +25,11 @@ import base64 import email from email.utils import parsedate -import logging import xmlrpclib from osv import osv, fields from tools.translate import _ from mail_message import decode, to_email -_logger = logging.getLogger('mail') - class mail_thread(osv.osv): '''Mixin model, meant to be inherited by any model that needs to act as a discussion topic on which messages can be attached. diff --git a/addons/project_messages/project_messages.py b/addons/project_messages/project_messages.py index 93f83d62f4a..fa0b34a0c47 100644 --- a/addons/project_messages/project_messages.py +++ b/addons/project_messages/project_messages.py @@ -20,14 +20,12 @@ ############################################################################## from osv import fields, osv -import netsvc class messages(osv.osv): """ Message from one user to another within a project """ _name = 'project.messages' - logger = netsvc.Logger() _columns = { 'create_date': fields.datetime('Creation Date', readonly=True), diff --git a/addons/report_webkit/webkit_report.py b/addons/report_webkit/webkit_report.py index d3057e5a1ee..a535e54d005 100644 --- a/addons/report_webkit/webkit_report.py +++ b/addons/report_webkit/webkit_report.py @@ -41,7 +41,6 @@ from mako.template import Template from mako.lookup import TemplateLookup from mako import exceptions -import netsvc import pooler from report_helper import WebKitHelper from report.report_sxw import * @@ -50,7 +49,7 @@ import tools from tools.translate import _ from osv.osv import except_osv -logger = logging.getLogger('report_webkit') +_logger = logging.getLogger(__name__) def mako_template(text): """Build a Mako template. @@ -248,7 +247,7 @@ class WebKitParser(report_sxw): htmls.append(html) except Exception, e: msg = exceptions.text_error_template().render() - logger.error(msg) + _logger.error(msg) raise except_osv(_('Webkit render'), msg) else: try : @@ -259,7 +258,7 @@ class WebKitParser(report_sxw): htmls.append(html) except Exception, e: msg = exceptions.text_error_template().render() - logger.error(msg) + _logger.error(msg) raise except_osv(_('Webkit render'), msg) head_mako_tpl = mako_template(header) try : @@ -281,7 +280,7 @@ class WebKitParser(report_sxw): **self.parser_instance.localcontext) except: msg = exceptions.text_error_template().render() - logger.error(msg) + _logger.error(msg) raise except_osv(_('Webkit render'), msg) if report_xml.webkit_debug : try : @@ -292,7 +291,7 @@ class WebKitParser(report_sxw): **self.parser_instance.localcontext) except Exception, e: msg = exceptions.text_error_template().render() - logger.error(msg) + _logger.error(msg) raise except_osv(_('Webkit render'), msg) return (deb, 'html') bin = self.get_lib(cursor, uid, company.id) diff --git a/addons/stock/stock.py b/addons/stock/stock.py index e9928eb0aa7..24467199fc1 100644 --- a/addons/stock/stock.py +++ b/addons/stock/stock.py @@ -31,6 +31,7 @@ import tools import decimal_precision as dp import logging +_logger = logging.getLogger(__name__) #---------------------------------------------------------- # Incoterms @@ -408,9 +409,8 @@ class stock_location(osv.osv): # so we ROLLBACK to the SAVEPOINT to restore the transaction to its earlier # state, we return False as if the products were not available, and log it: cr.execute("ROLLBACK TO stock_location_product_reserve") - logger = logging.getLogger('stock.location') - logger.warn("Failed attempt to reserve %s x product %s, likely due to another transaction already in progress. Next attempt is likely to work. Detailed error available at DEBUG level.", product_qty, product_id) - logger.debug("Trace of the failed product reservation attempt: ", exc_info=True) + _logger.warning("Failed attempt to reserve %s x product %s, likely due to another transaction already in progress. Next attempt is likely to work. Detailed error available at DEBUG level.", product_qty, product_id) + _logger.debug("Trace of the failed product reservation attempt: ", exc_info=True) return False # XXX TODO: rewrite this with one single query, possibly even the quantity conversion From 7b45ea1d88dfe415abcc41345a103acd65ab1450 Mon Sep 17 00:00:00 2001 From: Vo Minh Thu Date: Wed, 25 Jan 2012 11:06:08 +0100 Subject: [PATCH 5/7] [FIX] account_code: forgot import logging. bzr revid: vmt@openerp.com-20120125100608-ny79sc3tp1d9sak2 --- addons/account_coda/wizard/account_coda_import.py | 1 + 1 file changed, 1 insertion(+) diff --git a/addons/account_coda/wizard/account_coda_import.py b/addons/account_coda/wizard/account_coda_import.py index ac9a623e2fc..b7e7eb5eb10 100644 --- a/addons/account_coda/wizard/account_coda_import.py +++ b/addons/account_coda/wizard/account_coda_import.py @@ -21,6 +21,7 @@ ############################################################################## import base64 +import logging import re from sys import exc_info import time From e582a6a893a96e5ce29e6057ee6467b7c63be482 Mon Sep 17 00:00:00 2001 From: Vo Minh Thu Date: Wed, 25 Jan 2012 13:45:26 +0100 Subject: [PATCH 6/7] [IMP] logging: use logging instead of netsvc, use module __name__ instead of something else. bzr revid: vmt@openerp.com-20120125124526-nqaaw1rfmy8qa9of --- addons/auth_openid/controllers/main.py | 6 +-- addons/base_crypt/crypt.py | 6 ++- .../pylint_test/pylint_test.py | 7 ++- .../speed_test/speed_test.py | 8 ++- .../workflow_test/workflow_test.py | 9 ++-- addons/base_vat/base_vat.py | 4 +- addons/caldav/caldav_node.py | 17 +++--- addons/caldav/calendar.py | 9 ++-- addons/caldav/calendar_collection.py | 5 +- addons/crm/crm_meeting.py | 4 +- .../document_ftp/ftpserver/abstracted_fs.py | 16 +++--- addons/document_webdav/test_davclient.py | 52 +++++++++---------- addons/email_template/email_template.py | 6 ++- addons/hr/hr.py | 4 +- addons/import_sugarcrm/sugar.py | 4 +- addons/portal/ir_ui_menu.py | 5 +- addons/portal/wizard/portal_wizard.py | 4 +- addons/portal/wizard/share_wizard.py | 10 ++-- addons/share/wizard/share_wizard.py | 29 ++++++----- addons/stock_planning/stock_planning.py | 3 +- addons/users_ldap/users_ldap.py | 13 +++-- 21 files changed, 121 insertions(+), 100 deletions(-) diff --git a/addons/auth_openid/controllers/main.py b/addons/auth_openid/controllers/main.py index c95189b7736..dc7c7b21ccd 100644 --- a/addons/auth_openid/controllers/main.py +++ b/addons/auth_openid/controllers/main.py @@ -39,11 +39,7 @@ from openid.extensions import ax, sreg from .. import utils - - -_logger = logging.getLogger('web.auth_openid') -oidutil.log = logging.getLogger('openid').debug - +oidutil.log = logging.getLogger(__name__ + '(oidutil)').debug class GoogleAppsAwareConsumer(consumer.GenericConsumer): def complete(self, message, endpoint, return_to): diff --git a/addons/base_crypt/crypt.py b/addons/base_crypt/crypt.py index c2fd0a25ef8..6d74e482044 100644 --- a/addons/base_crypt/crypt.py +++ b/addons/base_crypt/crypt.py @@ -36,6 +36,7 @@ # Boston, MA 02111-1307 # USA. +import logging from random import seed, sample from string import ascii_letters, digits from osv import fields,osv @@ -43,6 +44,8 @@ import pooler from tools.translate import _ from service import security +_logger = logging.getLogger(__name__) + magic_md5 = '$1$' def gen_salt( length=8, symbols=ascii_letters + digits ): @@ -179,8 +182,7 @@ class users(osv.osv): cr = pooler.get_db(db).cursor() return self._login(cr, db, login, password) except Exception: - import logging - logging.getLogger('netsvc').exception('Could not authenticate') + _logger.exception('Could not authenticate') return Exception('Access Denied') finally: if cr is not None: diff --git a/addons/base_module_quality/pylint_test/pylint_test.py b/addons/base_module_quality/pylint_test/pylint_test.py index 102f93c469e..bfa9e7d1f16 100644 --- a/addons/base_module_quality/pylint_test/pylint_test.py +++ b/addons/base_module_quality/pylint_test/pylint_test.py @@ -19,11 +19,14 @@ # ############################################################################## +import logging import os import addons from tools.translate import _ from base_module_quality import base_module_quality +_logger = logging.getLogger(__name__) + class quality_test(base_module_quality.abstract_quality_check): def __init__(self): @@ -57,7 +60,7 @@ class quality_test(base_module_quality.abstract_quality_check): res = os.popen('pylint --rcfile=' + config_file_path + ' ' + file_path).read() except Exception: self.error = True - self.log.exception("Cannot run pylint test for %s", file_path) + _logger.exception("Cannot run pylint test for %s", file_path) self.result += _("Error. Is pylint correctly installed? (http://pypi.python.org/pypi/pylint)")+"\n" return None count += 1 @@ -66,7 +69,7 @@ class quality_test(base_module_quality.abstract_quality_check): score += float(scr) dict_py[file_py] = [file_py, scr] except Exception: - self.log.warning("Cannot parse pylint result", exc_info=True) + _logger.warning("Cannot parse pylint result", exc_info=True) score += 0 dict_py[file_py] = [file_py, _("Unable to parse the result. Check the details.")] replace_string = '' diff --git a/addons/base_module_quality/speed_test/speed_test.py b/addons/base_module_quality/speed_test/speed_test.py index 21ddee92ea1..26f3ea176b1 100644 --- a/addons/base_module_quality/speed_test/speed_test.py +++ b/addons/base_module_quality/speed_test/speed_test.py @@ -19,9 +19,13 @@ # ############################################################################## +import logging + from tools.translate import _ import pooler +_logger = logging.getLogger(__name__) + from base_module_quality import base_module_quality class CounterCursor(object): @@ -77,7 +81,7 @@ This test checks the speed of the module. Note that at least 5 demo data is need try: obj_ids = self.get_ids(cr, uid, obj_list) except Exception,e: - self.log.warning("Cannot get ids:", exc_info=True) + _logger.warning("Cannot get ids:", exc_info=True) obj_ids= {} self.result_details += e.message result_dict = {} @@ -111,7 +115,7 @@ This test checks the speed of the module. Note that at least 5 demo data is need code_size_complexity = ccr.count except Exception, e: - self.log.warning('Error in read method', exc_info=True) + _logger.warning('Error in read method', exc_info=True) list2 = [obj, _("Error in Read method")] speed_list = [obj, size, code_base_complexity, code_half_complexity, code_size_complexity, _("Error in Read method: %s") % e] else: diff --git a/addons/base_module_quality/workflow_test/workflow_test.py b/addons/base_module_quality/workflow_test/workflow_test.py index 11f7bcf12c3..1ce3f56b83a 100644 --- a/addons/base_module_quality/workflow_test/workflow_test.py +++ b/addons/base_module_quality/workflow_test/workflow_test.py @@ -19,6 +19,7 @@ # ############################################################################## +import loggging import xml.dom.minidom import tools @@ -26,6 +27,8 @@ from tools.translate import _ from base_module_quality import base_module_quality import pooler +_logger = logging.getLogger(__name__) + class quality_test(base_module_quality.abstract_quality_check): def __init__(self): @@ -80,8 +83,8 @@ class quality_test(base_module_quality.abstract_quality_check): #Activity of workflow checking... activity_ids = wkf_activity_obj.search(cr, uid, [('wkf_id', 'in', wkf_ids)]) activities = wkf_activity_obj.browse(cr, uid, activity_ids) - self.log.debug("quality test: wkf_ids = %r", wkf_ids) - self.log.debug("quality test: activity_ids = %r", activity_ids) + _logger.debug("quality test: wkf_ids = %r", wkf_ids) + _logger.debug("quality test: activity_ids = %r", activity_ids) for activity in activities: if activity.flow_start: activity_chk[activity.wkf_id.osv]['start'] = 'ok' @@ -155,4 +158,4 @@ class quality_test(base_module_quality.abstract_quality_check): count = self.count_button(node, count) return count -# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: \ No newline at end of file +# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: diff --git a/addons/base_vat/base_vat.py b/addons/base_vat/base_vat.py index 02471f67442..8b9b9fd26c1 100644 --- a/addons/base_vat/base_vat.py +++ b/addons/base_vat/base_vat.py @@ -19,7 +19,6 @@ # ############################################################################## -import logging import string import datetime import re @@ -27,7 +26,8 @@ import re try: import vatnumber except ImportError: - logging.getLogger('base_vat').warning("VAT validation partially unavailable because the `vatnumber` Python library cannot be found. " + import logging + logging.getLogger(__name__).warning("VAT validation partially unavailable because the `vatnumber` Python library cannot be found. " "Install it to support more countries, for example with `easy_install vatnumber`.") vatnumber = None diff --git a/addons/caldav/caldav_node.py b/addons/caldav/caldav_node.py index 71e9bdc2468..edd0a742467 100644 --- a/addons/caldav/caldav_node.py +++ b/addons/caldav/caldav_node.py @@ -29,6 +29,8 @@ try: except ImportError: from document.dict_tools import dict_merge2 +_logger = logging.getLogger(__name__) + # TODO: implement DAV-aware errors, inherit from IOError # Assuming that we have set global properties right, we mark *all* @@ -223,7 +225,6 @@ class node_calendar(nodes.node_class): res = [] if not filters: return res - _log = logging.getLogger('caldav.query') if filters.localName == 'calendar-query': res = [] for filter_child in filters.childNodes: @@ -245,27 +246,27 @@ class node_calendar(nodes.node_class): for cfe in vevent_filter.childNodes: if cfe.localName == 'time-range': if cfe.getAttribute('start'): - _log.warning("Ignore start.. ") + _logger.warning("Ignore start.. ") # No, it won't work in this API #val = cfe.getAttribute('start') #res += [('dtstart','=', cfe)] elif cfe.getAttribute('end'): - _log.warning("Ignore end.. ") + _logger.warning("Ignore end.. ") else: - _log.debug("Unknown comp-filter: %s", cfe.localName) + _logger.debug("Unknown comp-filter: %s", cfe.localName) else: - _log.debug("Unknown comp-filter: %s", vevent_filter.localName) + _logger.debug("Unknown comp-filter: %s", vevent_filter.localName) else: - _log.debug("Unknown filter element: %s", vcalendar_filter.localName) + _logger.debug("Unknown filter element: %s", vcalendar_filter.localName) else: - _log.debug("Unknown calendar-query element: %s", filter_child.localName) + _logger.debug("Unknown calendar-query element: %s", filter_child.localName) return res elif filters.localName == 'calendar-multiget': # this is not the place to process, as it wouldn't support multi-level # hrefs. So, the code is moved to document_webdav/dav_fs.py pass else: - _log.debug("Unknown element in REPORT: %s", filters.localName) + _logger.debug("Unknown element in REPORT: %s", filters.localName) return res def children(self, cr, domain=None): diff --git a/addons/caldav/calendar.py b/addons/caldav/calendar.py index 94d28e4ad37..770865175cd 100644 --- a/addons/caldav/calendar.py +++ b/addons/caldav/calendar.py @@ -40,6 +40,8 @@ try: except ImportError: raise osv.except_osv(_('vobject Import Error!'), _('Please install python-vobject from http://vobject.skyhouseconsulting.com/')) +_logger = logging.getLogger(__name__) + # O-1 Optional and can come only once # O-n Optional and can come more than once # R-1 Required and can come only once @@ -240,7 +242,6 @@ def map_data(cr, uid, obj, context=None): class CalDAV(object): __attribute__ = {} - _logger = logging.getLogger('document.caldav') def ical_set(self, name, value, type): """ set calendar Attribute @@ -725,13 +726,13 @@ class Calendar(CalDAV, osv.osv): objs.append(cal_children[child.name.lower()]) elif child.name.upper() == 'CALSCALE': if child.value.upper() != 'GREGORIAN': - self._logger.warning('How do I handle %s calendars?',child.value) + _logger.warning('How do I handle %s calendars?',child.value) elif child.name.upper() in ('PRODID', 'VERSION'): pass elif child.name.upper().startswith('X-'): - self._logger.debug("skipping custom node %s", child.name) + _logger.debug("skipping custom node %s", child.name) else: - self._logger.debug("skipping node %s", child.name) + _logger.debug("skipping node %s", child.name) res = [] for obj_name in list(set(objs)): diff --git a/addons/caldav/calendar_collection.py b/addons/caldav/calendar_collection.py index 1abbf573ef0..8c845aff5e6 100644 --- a/addons/caldav/calendar_collection.py +++ b/addons/caldav/calendar_collection.py @@ -24,6 +24,8 @@ from tools.translate import _ import caldav_node import logging +_logger = logging.getLogger(__name__) + class calendar_collection(osv.osv): _inherit = 'document.directory' _columns = { @@ -44,8 +46,7 @@ class calendar_collection(osv.osv): root_cal_dir = self.browse(cr,uid, root_id, context=context) return root_cal_dir.name except Exception: - logger = logging.getLogger('document') - logger.warning('Cannot set root directory for Calendars:', exc_info=True) + _logger.warning('Cannot set root directory for Calendars:', exc_info=True) return False return False diff --git a/addons/crm/crm_meeting.py b/addons/crm/crm_meeting.py index 40ba8662f13..f28212051eb 100644 --- a/addons/crm/crm_meeting.py +++ b/addons/crm/crm_meeting.py @@ -25,6 +25,8 @@ from osv import fields, osv from tools.translate import _ import logging +_logger = logging.getLogger(__name__) + class crm_lead(crm_case, osv.osv): """ CRM Leads """ _name = 'crm.lead' @@ -149,7 +151,7 @@ class res_users(osv.osv): 'user_id': user_id}, context=context) except: # Tolerate a missing shortcut. See product/product.py for similar code. - logging.getLogger('orm').debug('Skipped meetings shortcut for user "%s"', data.get('name','", cnod, node.tagName) + _logger.debug("Found %r inside <%s>", cnod, node.tagName) continue if namespaces and (cnod.namespaceURI not in namespaces): - log.debug("Ignoring <%s> in <%s>", cnod.tagName, node.localName) + _logger.debug("Ignoring <%s> in <%s>", cnod.tagName, node.localName) continue yield cnod @@ -533,10 +533,10 @@ class DAVClient(object): assert htver == 'HTTP/1.1' rstatus = int(sta) else: - log.debug("What is <%s> inside a ?", pno.tagName) + _logger.debug("What is <%s> inside a ?", pno.tagName) else: - log.debug("Unknown node: %s", cno.tagName) + _logger.debug("Unknown node: %s", cno.tagName) res.setdefault(href,[]).append((status, res_nss)) @@ -637,7 +637,7 @@ class DAVClient(object): if lsp[1] in davprops: lsline[lsp[0]] = lsp[2] else: - log.debug("Strange status: %s", st) + _logger.debug("Strange status: %s", st) res.append(lsline) diff --git a/addons/email_template/email_template.py b/addons/email_template/email_template.py index 90eda625759..4d64dd1a4c6 100644 --- a/addons/email_template/email_template.py +++ b/addons/email_template/email_template.py @@ -30,10 +30,12 @@ import tools from tools.translate import _ from urllib import quote as quote +_logger = logging.getLogger(__name__) + try: from mako.template import Template as MakoTemplate except ImportError: - logging.getLogger('init').warning("email_template: mako templates not available, templating features will not work!") + _logger.warning("email_template: mako templates not available, templating features will not work!") class email_template(osv.osv): "Templates for sending email" @@ -73,7 +75,7 @@ class email_template(osv.osv): result = u'' return result except Exception: - logging.exception("failed to render mako template value %r", template) + _logger.exception("failed to render mako template value %r", template) return u"" def get_email_template(self, cr, uid, template_id=False, record_id=None, context=None): diff --git a/addons/hr/hr.py b/addons/hr/hr.py index 76f9295a829..2dfd018eec1 100644 --- a/addons/hr/hr.py +++ b/addons/hr/hr.py @@ -23,6 +23,8 @@ from osv import fields, osv import logging import addons +_logger = logging.getLogger(__name__) + class hr_employee_category(osv.osv): def name_get(self, cr, uid, ids, context=None): @@ -270,7 +272,7 @@ class res_users(osv.osv): 'user_id': user_id}, context=context) except: # Tolerate a missing shortcut. See product/product.py for similar code. - logging.getLogger('orm').debug('Skipped meetings shortcut for user "%s"', data.get('name',' 1: - log = logging.getLogger('ir.ui.menu') - log.warning('User %s belongs to several portals', str(uid)) + _logger.warning('User %s belongs to several portals', str(uid)) p = portal_obj.browse(cr, uid, portal_ids[0]) # if the portal overrides the menu, use its domain if p.menu_action_id: diff --git a/addons/portal/wizard/portal_wizard.py b/addons/portal/wizard/portal_wizard.py index 706d6fd4a7f..f0790cb9bd9 100644 --- a/addons/portal/wizard/portal_wizard.py +++ b/addons/portal/wizard/portal_wizard.py @@ -28,7 +28,7 @@ from tools.translate import _ from base.res.res_users import _lang_get - +_logger = logging.getLogger(__name__) # welcome email sent to new portal users (note that calling tools.translate._ # has no effect except exporting those strings for translation) @@ -178,7 +178,7 @@ class wizard(osv.osv_memory): body = _(WELCOME_EMAIL_BODY) % data res = mail_message_obj.schedule_with_attach(cr, uid, email_from , [email_to], subject, body, context=context) if not res: - logging.getLogger('res.portal.wizard').warning( + _logger.warning( 'Failed to send email from %s to %s', email_from, email_to) return {'type': 'ir.actions.act_window_close'} diff --git a/addons/portal/wizard/share_wizard.py b/addons/portal/wizard/share_wizard.py index b5c9d165cc6..4236206a6fd 100644 --- a/addons/portal/wizard/share_wizard.py +++ b/addons/portal/wizard/share_wizard.py @@ -19,9 +19,13 @@ # ############################################################################## +import logging + from osv import osv, fields from tools.translate import _ +_logger = logging.getLogger(__name__) + UID_ROOT = 1 SHARED_DOCS_MENU = "Documents" SHARED_DOCS_CHILD_MENU = "Shared Documents" @@ -164,19 +168,19 @@ class share_wizard_portal(osv.osv_memory): # v6.1, the algorithm for combining them will OR the rules, hence # extending the visible data. Rules.write(cr, UID_ROOT, share_rule_ids, {'groups': [(4,target_group.id)]}) - self._logger.debug("Linked sharing rules from temporary sharing group to group %s", target_group) + _logger.debug("Linked sharing rules from temporary sharing group to group %s", target_group) # Copy the access rights. This is appropriate too because # groups have the UNION of all permissions granted by their # access right lines. for access_line in share_group.model_access: Rights.copy(cr, UID_ROOT, access_line.id, default={'group_id': target_group.id}) - self._logger.debug("Copied access rights from temporary sharing group to group %s", target_group) + _logger.debug("Copied access rights from temporary sharing group to group %s", target_group) # finally, delete it after removing its users Groups.write(cr, UID_ROOT, [share_group_id], {'users': [(6,0,[])]}) Groups.unlink(cr, UID_ROOT, [share_group_id]) - self._logger.debug("Deleted temporary sharing group %s", share_group_id) + _logger.debug("Deleted temporary sharing group %s", share_group_id) def _finish_result_lines(self, cr, uid, wizard_data, share_group_id, context=None): super(share_wizard_portal,self)._finish_result_lines(cr, uid, wizard_data, share_group_id, context=context) diff --git a/addons/share/wizard/share_wizard.py b/addons/share/wizard/share_wizard.py index 80a0b12f8b2..5ed07142367 100644 --- a/addons/share/wizard/share_wizard.py +++ b/addons/share/wizard/share_wizard.py @@ -33,6 +33,8 @@ from tools.translate import _ from tools.safe_eval import safe_eval import openerp +_logger = logging.getLogger(__name__) + FULL_ACCESS = ('perm_read', 'perm_write', 'perm_create', 'perm_unlink') READ_WRITE_ACCESS = ('perm_read', 'perm_write') READ_ONLY_ACCESS = ('perm_read',) @@ -48,7 +50,6 @@ def generate_random_pass(): return ''.join(random.sample(RANDOM_PASS_CHARACTERS,10)) class share_wizard(osv.osv_memory): - _logger = logging.getLogger('share.wizard') _name = 'share.wizard' _description = 'Share Wizard' @@ -322,7 +323,7 @@ class share_wizard(osv.osv_memory): except Exception: # Note: must catch all exceptions, as UnquoteEvalContext may cause many # different exceptions, as it shadows builtins. - self._logger.debug("Failed to cleanup action context as it does not parse server-side", exc_info=True) + _logger.debug("Failed to cleanup action context as it does not parse server-side", exc_info=True) result = context_str return result @@ -483,8 +484,8 @@ class share_wizard(osv.osv_memory): [x.id for x in current_user.groups_id], target_model_ids, context=context) group_access_map = self._get_access_map_for_groups_and_models(cr, uid, [group_id], target_model_ids, context=context) - self._logger.debug("Current user access matrix: %r", current_user_access_map) - self._logger.debug("New group current access matrix: %r", group_access_map) + _logger.debug("Current user access matrix: %r", current_user_access_map) + _logger.debug("New group current access matrix: %r", group_access_map) # Create required rights if allowed by current user rights and not # already granted @@ -505,7 +506,7 @@ class share_wizard(osv.osv_memory): need_creation = True if need_creation: model_access_obj.create(cr, UID_ROOT, values) - self._logger.debug("Creating access right for model %s with values: %r", model.model, values) + _logger.debug("Creating access right for model %s with values: %r", model.model, values) def _link_or_copy_current_user_rules(self, cr, current_user, group_id, fields_relations, context=None): rule_obj = self.pool.get('ir.rule') @@ -527,13 +528,13 @@ class share_wizard(osv.osv_memory): 'groups': [(6,0,[group_id])], 'domain_force': rule.domain, # evaluated version! }) - self._logger.debug("Copying rule %s (%s) on model %s with domain: %s", rule.name, rule.id, model.model, rule.domain_force) + _logger.debug("Copying rule %s (%s) on model %s with domain: %s", rule.name, rule.id, model.model, rule.domain_force) else: # otherwise we can simply link the rule to keep it dynamic rule_obj.write(cr, 1, [rule.id], { 'groups': [(4,group_id)] }) - self._logger.debug("Linking rule %s (%s) on model %s with domain: %s", rule.name, rule.id, model.model, rule.domain_force) + _logger.debug("Linking rule %s (%s) on model %s with domain: %s", rule.name, rule.id, model.model, rule.domain_force) def _check_personal_rule_or_duplicate(self, cr, group_id, rule, context=None): """Verifies that the given rule only belongs to the given group_id, otherwise @@ -552,7 +553,7 @@ class share_wizard(osv.osv_memory): 'groups': [(6,0,[group_id])], 'domain_force': rule.domain_force, # non evaluated! }) - self._logger.debug("Duplicating rule %s (%s) (domain: %s) for modified access ", rule.name, rule.id, rule.domain_force) + _logger.debug("Duplicating rule %s (%s) (domain: %s) for modified access ", rule.name, rule.id, rule.domain_force) # then disconnect from group_id: rule.write({'groups':[(3,group_id)]}) # disconnects, does not delete! return rule_obj.browse(cr, UID_ROOT, new_id, context=context) @@ -587,7 +588,7 @@ class share_wizard(osv.osv_memory): if restrict: continue else: - self._logger.debug("Ignoring sharing rule on model %s with domain: %s the same rule exists already", model_id, domain) + _logger.debug("Ignoring sharing rule on model %s with domain: %s the same rule exists already", model_id, domain) return if restrict: # restricting existing rules is done by adding the clause @@ -599,7 +600,7 @@ class share_wizard(osv.osv_memory): new_clause = expression.normalize(eval(domain, eval_ctx)) combined_domain = expression.AND([new_clause, org_domain]) rule.write({'domain_force': combined_domain, 'name': rule.name + _('(Modified)')}) - self._logger.debug("Combining sharing rule %s on model %s with domain: %s", rule.id, model_id, domain) + _logger.debug("Combining sharing rule %s on model %s with domain: %s", rule.id, model_id, domain) if not restrict: # Adding the new rule in the group is ok for normal cases, because rules # in the same group and for the same model will be combined with OR @@ -610,7 +611,7 @@ class share_wizard(osv.osv_memory): 'domain_force': domain, 'groups': [(4,group_id)] }) - self._logger.debug("Created sharing rule on model %s with domain: %s", model_id, domain) + _logger.debug("Created sharing rule on model %s with domain: %s", model_id, domain) def _create_indirect_sharing_rules(self, cr, current_user, wizard_data, group_id, fields_relations, context=None): rule_name = _('Indirect sharing filter created by user %s (%s) for group %s') % \ @@ -631,7 +632,7 @@ class share_wizard(osv.osv_memory): group_id, model_id=model.id, domain=str(related_domain), rule_name=rule_name, restrict=True, context=context) except Exception: - self._logger.exception('Failed to create share access') + _logger.exception('Failed to create share access') raise osv.except_osv(_('Sharing access could not be created'), _('Sorry, the current screen and filter you are trying to share are not supported at the moment.\nYou may want to try a simpler filter.')) @@ -749,7 +750,7 @@ class share_wizard(osv.osv_memory): } def send_emails(self, cr, uid, wizard_data, context=None): - self._logger.info('Sending share notifications by email...') + _logger.info('Sending share notifications by email...') mail_message = self.pool.get('mail.message') user = self.pool.get('res.users').browse(cr, UID_ROOT, uid) @@ -795,7 +796,7 @@ class share_wizard(osv.osv_memory): context=context)) # force direct delivery, as users expect instant notification mail_message.send(cr, uid, msg_ids, context=context) - self._logger.info('%d share notification(s) sent.', len(msg_ids)) + _logger.info('%d share notification(s) sent.', len(msg_ids)) def onchange_embed_options(self, cr, uid, ids, opt_title, opt_search, context=None): wizard = self.browse(cr, uid, ids[0], context) diff --git a/addons/stock_planning/stock_planning.py b/addons/stock_planning/stock_planning.py index d104d57c62b..0b9f95deb7e 100644 --- a/addons/stock_planning/stock_planning.py +++ b/addons/stock_planning/stock_planning.py @@ -29,8 +29,7 @@ from tools.translate import _ import logging import decimal_precision as dp -_logger = logging.getLogger('mps') - +_logger = logging.getLogger(__name__) def rounding(fl, round_value): if not round_value: diff --git a/addons/users_ldap/users_ldap.py b/addons/users_ldap/users_ldap.py index 1ad1b2df104..c32d120b298 100644 --- a/addons/users_ldap/users_ldap.py +++ b/addons/users_ldap/users_ldap.py @@ -28,6 +28,8 @@ import tools from osv import fields, osv from openerp import SUPERUSER_ID +_logger = logging.getLogger(__name__) + class CompanyLDAP(osv.osv): _name = 'res.company.ldap' _order = 'sequence' @@ -107,8 +109,7 @@ class CompanyLDAP(osv.osv): except ldap.INVALID_CREDENTIALS: return False except ldap.LDAPError, e: - logger = logging.getLogger('orm.ldap') - logger.error('An LDAP exception occurred: %s', e) + _logger.error('An LDAP exception occurred: %s', e) return entry def query(self, conf, filter, retrieve_attributes=None): @@ -135,7 +136,6 @@ class CompanyLDAP(osv.osv): """ results = [] - logger = logging.getLogger('orm.ldap') try: conn = self.connect(conf) conn.simple_bind_s(conf['ldap_binddn'] or '', @@ -144,9 +144,9 @@ class CompanyLDAP(osv.osv): filter, retrieve_attributes, timeout=60) conn.unbind() except ldap.INVALID_CREDENTIALS: - logger.error('LDAP bind failed.') + _logger.error('LDAP bind failed.') except ldap.LDAPError, e: - logger.error('An LDAP exception occurred: %s', e) + _logger.error('An LDAP exception occurred: %s', e) return results def map_ldap_attributes(self, cr, uid, conf, login, ldap_entry): @@ -188,8 +188,7 @@ class CompanyLDAP(osv.osv): if res[1]: user_id = res[0] elif conf['create_user']: - logger = logging.getLogger('orm.ldap') - logger.debug("Creating new OpenERP user \"%s\" from LDAP" % login) + _logger.debug("Creating new OpenERP user \"%s\" from LDAP" % login) user_obj = self.pool.get('res.users') values = self.map_ldap_attributes(cr, uid, conf, login, ldap_entry) if conf['user']: From bf6d569d75f8e4217773ea6f58d231160f32b557 Mon Sep 17 00:00:00 2001 From: Vo Minh Thu Date: Wed, 25 Jan 2012 14:00:11 +0100 Subject: [PATCH 7/7] [IMP] logging: use logging instead of netsvc. bzr revid: vmt@openerp.com-20120125130011-yxfhry0h881k64cn --- addons/document_ftp/ftpserver/__init__.py | 6 ++---- addons/l10n_be/wizard/l10n_be_partner_vat_listing.py | 1 - 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/addons/document_ftp/ftpserver/__init__.py b/addons/document_ftp/ftpserver/__init__.py index 11b041e074b..dd5ec255704 100644 --- a/addons/document_ftp/ftpserver/__init__.py +++ b/addons/document_ftp/ftpserver/__init__.py @@ -38,8 +38,6 @@ def start_server(): PASSIVE_PORTS = int(pps[0]), int(pps[1]) class ftp_server(threading.Thread): - def log(self, level, message): - _logger.log(level, message) def run(self): autho = authorizer.authorizer() @@ -50,9 +48,9 @@ def start_server(): if PASSIVE_PORTS: ftpserver.FTPHandler.passive_ports = PASSIVE_PORTS - ftpserver.log = lambda msg: self.log(netsvc.LOG_INFO, msg) + ftpserver.log = _logger.info ftpserver.logline = lambda msg: None - ftpserver.logerror = lambda msg: self.log(netsvc.LOG_ERROR, msg) + ftpserver.logerror = _logger.error ftpd = ftpserver.FTPServer((HOST, PORT), ftpserver.FTPHandler) ftpd.serve_forever() diff --git a/addons/l10n_be/wizard/l10n_be_partner_vat_listing.py b/addons/l10n_be/wizard/l10n_be_partner_vat_listing.py index e43dbfecf2b..7e7cba15c78 100644 --- a/addons/l10n_be/wizard/l10n_be_partner_vat_listing.py +++ b/addons/l10n_be/wizard/l10n_be_partner_vat_listing.py @@ -45,7 +45,6 @@ class partner_vat_13(osv.osv_memory): period = obj_period.search(cursor, user, [('date_start' ,'>=', date_start), ('date_stop','<=',date_stop)]) if not period: raise osv.except_osv(_('Data Insufficient!'), _('No data for the selected Year.')) - #logger.notifyChannel('addons.'+self._name, netsvc.LOG_WARNING, 'period = %s' %period ) p_id_list = obj_partner.search(cursor, user, [('vat_subjected', '!=', False)], context=context) if not p_id_list: