[IMP] Add logging at the top of python files
bzr revid: fka@tinyerp.com-20120622064839-llitm8szgikxznwr
This commit is contained in:
parent
e02d8a68ef
commit
0c374a66bc
|
@ -22,7 +22,7 @@ import logging
|
|||
import os
|
||||
import tempfile
|
||||
from subprocess import Popen, PIPE
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
class NhException(Exception):
|
||||
pass
|
||||
|
||||
|
@ -116,7 +116,7 @@ def mime_match(mime, mdict):
|
|||
return (None, None)
|
||||
|
||||
class contentIndex(object):
|
||||
__logger = logging.getLogger('addons.document.content_index')
|
||||
|
||||
def __init__(self):
|
||||
self.mimes = {}
|
||||
self.exts = {}
|
||||
|
@ -132,7 +132,7 @@ class contentIndex(object):
|
|||
f = True
|
||||
|
||||
if f:
|
||||
self.__logger.debug('Register content indexer: %r', obj)
|
||||
_logger.debug('Register content indexer: %r', obj)
|
||||
if not f:
|
||||
raise Exception("Your indexer should at least suport a mimetype or extension")
|
||||
|
||||
|
@ -169,22 +169,22 @@ class contentIndex(object):
|
|||
(result, _) = pop.communicate()
|
||||
|
||||
mime2 = result.split(';')[0]
|
||||
self.__logger.debug('File gave us: %s', mime2)
|
||||
_logger.debug('File gave us: %s', mime2)
|
||||
# Note that the temporary file still exists now.
|
||||
mime,fobj = mime_match(mime2, self.mimes)
|
||||
if not mime:
|
||||
mime = mime2
|
||||
except Exception:
|
||||
self.__logger.exception('Cannot determine mime type')
|
||||
_logger.exception('Cannot determine mime type')
|
||||
|
||||
try:
|
||||
if fobj:
|
||||
res = (mime, fobj.indexContent(content,filename,fname or realfname) )
|
||||
else:
|
||||
self.__logger.debug("Have no object, return (%s, None)", mime)
|
||||
_logger.debug("Have no object, return (%s, None)", mime)
|
||||
res = (mime, None )
|
||||
except Exception:
|
||||
self.__logger.exception("Could not index file %s (%s)",
|
||||
_logger.exception("Could not index file %s (%s)",
|
||||
filename, fname or realfname)
|
||||
res = None
|
||||
|
||||
|
@ -193,8 +193,7 @@ class contentIndex(object):
|
|||
try:
|
||||
os.unlink(fname)
|
||||
except Exception:
|
||||
self.__logger.exception("Could not unlink %s", fname)
|
||||
|
||||
_logger.exception("Could not unlink %s", fname)
|
||||
return res
|
||||
|
||||
cntIndex = contentIndex()
|
||||
|
|
|
@ -30,11 +30,14 @@ from tools.translate import _
|
|||
import nodes
|
||||
import logging
|
||||
|
||||
_loggerdoc = logging.getLogger(__name__)
|
||||
|
||||
DMS_ROOT_PATH = tools.config.get('document_path', os.path.join(tools.config['root_path'], 'filestore'))
|
||||
|
||||
class document_file(osv.osv):
|
||||
_inherit = 'ir.attachment'
|
||||
_rec_name = 'datas_fname'
|
||||
|
||||
|
||||
def _attach_parent_id(self, cr, uid, ids=None, context=None):
|
||||
"""Migrate ir.attachments to the document module.
|
||||
|
@ -54,7 +57,7 @@ class document_file(osv.osv):
|
|||
|
||||
parent_id = self.pool.get('document.directory')._get_root_directory(cr,uid)
|
||||
if not parent_id:
|
||||
logging.getLogger('document').warning("at _attach_parent_id(), still not able to set the parent!")
|
||||
_loggerdoc.warning("at _attach_parent_id(), still not able to set the parent!")
|
||||
return False
|
||||
|
||||
if ids is not None:
|
||||
|
@ -335,7 +338,7 @@ class document_file(osv.osv):
|
|||
if r:
|
||||
unres.append(r)
|
||||
else:
|
||||
logging.getLogger('document').warning("Unlinking attachment #%s %s that has no storage",
|
||||
self.loggerdoc.warning("Unlinking attachment #%s %s that has no storage",
|
||||
f.id, f.name)
|
||||
res = super(document_file, self).unlink(cr, uid, ids, context)
|
||||
stor.do_unlink(cr, uid, unres)
|
||||
|
|
|
@ -22,10 +22,10 @@
|
|||
|
||||
from osv import osv, fields
|
||||
from osv.orm import except_orm
|
||||
|
||||
import logging
|
||||
import nodes
|
||||
from tools.translate import _
|
||||
|
||||
_loggerdir = logging.getLogger(__name__)
|
||||
class document_directory(osv.osv):
|
||||
_name = 'document.directory'
|
||||
_description = 'Directory'
|
||||
|
@ -78,8 +78,8 @@ class document_directory(osv.osv):
|
|||
root_id = objid.read(cr, uid, mid, ['res_id'])['res_id']
|
||||
return root_id
|
||||
except Exception, e:
|
||||
import logging
|
||||
logging.getLogger('document').warning('Cannot set directory root:'+ str(e))
|
||||
|
||||
_loggerdir.warning('Cannot set directory root:'+ str(e))
|
||||
return False
|
||||
return objid.browse(cr, uid, mid, context=context).res_id
|
||||
|
||||
|
|
|
@ -29,18 +29,16 @@ import logging
|
|||
import shutil
|
||||
from StringIO import StringIO
|
||||
import psycopg2
|
||||
|
||||
from tools.misc import ustr
|
||||
from tools.translate import _
|
||||
|
||||
from osv.orm import except_orm
|
||||
|
||||
import random
|
||||
import string
|
||||
import pooler
|
||||
import nodes
|
||||
from content_index import cntIndex
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
_doclog = logging.getLogger(__name__)
|
||||
DMS_ROOT_PATH = tools.config.get('document_path', os.path.join(tools.config.get('root_path'), 'filestore'))
|
||||
|
||||
|
||||
|
@ -130,7 +128,7 @@ class nodefd_file(nodes.node_descriptor):
|
|||
mime, icont = cntIndex.doIndex(None, filename=filename,
|
||||
content_type=None, realfname=fname)
|
||||
except Exception:
|
||||
logging.getLogger('document.storage').debug('Cannot index file:', exc_info=True)
|
||||
_logger.debug('Cannot index file:', exc_info=True)
|
||||
pass
|
||||
|
||||
try:
|
||||
|
@ -150,7 +148,7 @@ class nodefd_file(nodes.node_descriptor):
|
|||
cr.commit()
|
||||
cr.close()
|
||||
except Exception:
|
||||
logging.getLogger('document.storage').warning('Cannot save file indexed content:', exc_info=True)
|
||||
_logger.warning('Cannot save file indexed content:', exc_info=True)
|
||||
|
||||
elif self.mode in ('a', 'a+' ):
|
||||
try:
|
||||
|
@ -164,7 +162,7 @@ class nodefd_file(nodes.node_descriptor):
|
|||
cr.commit()
|
||||
cr.close()
|
||||
except Exception:
|
||||
logging.getLogger('document.storage').warning('Cannot save file appended content:', exc_info=True)
|
||||
_logger.warning('Cannot save file appended content:', exc_info=True)
|
||||
|
||||
|
||||
|
||||
|
@ -191,7 +189,7 @@ class nodefd_db(StringIO, nodes.node_descriptor):
|
|||
elif mode == 'a':
|
||||
StringIO.__init__(self, None)
|
||||
else:
|
||||
logging.getLogger('document.storage').error("Incorrect mode %s specified", mode)
|
||||
_logger.error("Incorrect mode %s specified", mode)
|
||||
raise IOError(errno.EINVAL, "Invalid file mode")
|
||||
self.mode = mode
|
||||
|
||||
|
@ -217,7 +215,7 @@ class nodefd_db(StringIO, nodes.node_descriptor):
|
|||
mime, icont = cntIndex.doIndex(data, filename=filename,
|
||||
content_type=None, realfname=None)
|
||||
except Exception:
|
||||
logging.getLogger('document.storage').debug('Cannot index file:', exc_info=True)
|
||||
_logger.debug('Cannot index file:', exc_info=True)
|
||||
pass
|
||||
|
||||
try:
|
||||
|
@ -241,7 +239,7 @@ class nodefd_db(StringIO, nodes.node_descriptor):
|
|||
(out, len(data), par.file_id))
|
||||
cr.commit()
|
||||
except Exception:
|
||||
logging.getLogger('document.storage').exception('Cannot update db file #%d for close:', par.file_id)
|
||||
_logger.exception('Cannot update db file #%d for close:', par.file_id)
|
||||
raise
|
||||
finally:
|
||||
cr.close()
|
||||
|
@ -271,7 +269,7 @@ class nodefd_db64(StringIO, nodes.node_descriptor):
|
|||
elif mode == 'a':
|
||||
StringIO.__init__(self, None)
|
||||
else:
|
||||
logging.getLogger('document.storage').error("Incorrect mode %s specified", mode)
|
||||
_logger.error("Incorrect mode %s specified", mode)
|
||||
raise IOError(errno.EINVAL, "Invalid file mode")
|
||||
self.mode = mode
|
||||
|
||||
|
@ -297,7 +295,7 @@ class nodefd_db64(StringIO, nodes.node_descriptor):
|
|||
mime, icont = cntIndex.doIndex(data, filename=filename,
|
||||
content_type=None, realfname=None)
|
||||
except Exception:
|
||||
logging.getLogger('document.storage').debug('Cannot index file:', exc_info=True)
|
||||
self.logger.debug('Cannot index file:', exc_info=True)
|
||||
pass
|
||||
|
||||
try:
|
||||
|
@ -320,7 +318,7 @@ class nodefd_db64(StringIO, nodes.node_descriptor):
|
|||
(base64.encodestring(data), len(data), par.file_id))
|
||||
cr.commit()
|
||||
except Exception:
|
||||
logging.getLogger('document.storage').exception('Cannot update db file #%d for close:', par.file_id)
|
||||
_logger.exception('Cannot update db file #%d for close:', par.file_id)
|
||||
raise
|
||||
finally:
|
||||
cr.close()
|
||||
|
@ -339,7 +337,6 @@ class document_storage(osv.osv):
|
|||
"""
|
||||
_name = 'document.storage'
|
||||
_description = 'Storage Media'
|
||||
_doclog = logging.getLogger('document')
|
||||
|
||||
_columns = {
|
||||
'name': fields.char('Name', size=64, required=True, select=1),
|
||||
|
@ -413,7 +410,7 @@ class document_storage(osv.osv):
|
|||
dpath += npath[:-1]
|
||||
path = os.path.join(*dpath)
|
||||
if not os.path.isdir(path):
|
||||
self._doclog.debug("Create dirs: %s", path)
|
||||
_doclog.debug("Create dirs: %s", path)
|
||||
os.makedirs(path)
|
||||
return path, npath
|
||||
|
||||
|
@ -451,7 +448,7 @@ class document_storage(osv.osv):
|
|||
# try to fix their directory.
|
||||
if mode in ('r','r+'):
|
||||
if ira.file_size:
|
||||
self._doclog.warning( "ir.attachment #%d does not have a filename, but is at filestore, fix it!" % ira.id)
|
||||
_doclog.warning( "ir.attachment #%d does not have a filename, but is at filestore, fix it!" % ira.id)
|
||||
raise IOError(errno.ENOENT, 'No file can be located')
|
||||
else:
|
||||
store_fname = self.__get_random_fname(boo.path)
|
||||
|
@ -493,7 +490,7 @@ class document_storage(osv.osv):
|
|||
# On a migrated db, some files may have the wrong storage type
|
||||
# try to fix their directory.
|
||||
if ira.file_size:
|
||||
self._doclog.warning( "ir.attachment #%d does not have a filename, but is at filestore, fix it!" % ira.id)
|
||||
_doclog.warning( "ir.attachment #%d does not have a filename, but is at filestore, fix it!" % ira.id)
|
||||
return None
|
||||
fpath = os.path.join(boo.path, ira.store_fname)
|
||||
return file(fpath, 'rb').read()
|
||||
|
@ -517,7 +514,7 @@ class document_storage(osv.osv):
|
|||
# On a migrated db, some files may have the wrong storage type
|
||||
# try to fix their directory.
|
||||
if ira.file_size:
|
||||
self._doclog.warning("ir.attachment #%d does not have a filename, trying the name." %ira.id)
|
||||
_doclog.warning("ir.attachment #%d does not have a filename, trying the name." %ira.id)
|
||||
# sfname = ira.name
|
||||
fpath = os.path.join(boo.path,ira.store_fname or ira.name)
|
||||
if os.path.exists(fpath):
|
||||
|
@ -550,7 +547,7 @@ class document_storage(osv.osv):
|
|||
if boo.readonly:
|
||||
raise IOError(errno.EPERM, "Readonly medium")
|
||||
|
||||
self._doclog.debug( "Store data for ir.attachment #%d" % ira.id)
|
||||
_doclog.debug( "Store data for ir.attachment #%d" % ira.id)
|
||||
store_fname = None
|
||||
fname = None
|
||||
if boo.type == 'filestore':
|
||||
|
@ -563,13 +560,13 @@ class document_storage(osv.osv):
|
|||
fp.write(data)
|
||||
finally:
|
||||
fp.close()
|
||||
self._doclog.debug( "Saved data to %s" % fname)
|
||||
_doclog.debug( "Saved data to %s" % fname)
|
||||
filesize = len(data) # os.stat(fname).st_size
|
||||
|
||||
# TODO Here, an old file would be left hanging.
|
||||
|
||||
except Exception, e:
|
||||
self._doclog.warning( "Couldn't save data to %s", path, exc_info=True)
|
||||
_doclog.warning( "Couldn't save data to %s", path, exc_info=True)
|
||||
raise except_orm(_('Error!'), str(e))
|
||||
elif boo.type == 'db':
|
||||
filesize = len(data)
|
||||
|
@ -592,12 +589,12 @@ class document_storage(osv.osv):
|
|||
fp.write(data)
|
||||
finally:
|
||||
fp.close()
|
||||
self._doclog.debug("Saved data to %s", fname)
|
||||
_doclog.debug("Saved data to %s", fname)
|
||||
filesize = len(data) # os.stat(fname).st_size
|
||||
store_fname = os.path.join(*npath)
|
||||
# TODO Here, an old file would be left hanging.
|
||||
except Exception,e :
|
||||
self._doclog.warning("Couldn't save data:", exc_info=True)
|
||||
_doclog.warning("Couldn't save data:", exc_info=True)
|
||||
raise except_orm(_('Error!'), str(e))
|
||||
|
||||
elif boo.type == 'virtual':
|
||||
|
@ -616,7 +613,7 @@ class document_storage(osv.osv):
|
|||
mime, icont = cntIndex.doIndex(data, ira.datas_fname,
|
||||
ira.file_type or None, fname)
|
||||
except Exception:
|
||||
self._doclog.debug('Cannot index file:', exc_info=True)
|
||||
_doclog.debug('Cannot index file:', exc_info=True)
|
||||
pass
|
||||
|
||||
try:
|
||||
|
@ -671,9 +668,9 @@ class document_storage(osv.osv):
|
|||
try:
|
||||
os.unlink(fname)
|
||||
except Exception:
|
||||
self._doclog.warning("Could not remove file %s, please remove manually.", fname, exc_info=True)
|
||||
_doclog.warning("Could not remove file %s, please remove manually.", fname, exc_info=True)
|
||||
else:
|
||||
self._doclog.warning("Unknown unlink key %s" % ktype)
|
||||
_doclog.warning("Unknown unlink key %s" % ktype)
|
||||
|
||||
return True
|
||||
|
||||
|
@ -703,9 +700,9 @@ class document_storage(osv.osv):
|
|||
fname = ira.store_fname
|
||||
|
||||
if not fname:
|
||||
self._doclog.warning("Trying to rename a non-stored file")
|
||||
_doclog.warning("Trying to rename a non-stored file")
|
||||
if fname != os.path.join(*npath):
|
||||
self._doclog.warning("inconsistency in realstore: %s != %s" , fname, repr(npath))
|
||||
_doclog.warning("inconsistency in realstore: %s != %s" , fname, repr(npath))
|
||||
|
||||
oldpath = os.path.join(path, npath[-1])
|
||||
newpath = os.path.join(path, new_name)
|
||||
|
@ -743,7 +740,7 @@ class document_storage(osv.osv):
|
|||
break
|
||||
par = par.parent_id
|
||||
if file_node.storage_id != psto:
|
||||
self._doclog.debug('Cannot move file %r from %r to %r', file_node, file_node.parent, ndir_bro.name)
|
||||
_doclog.debug('Cannot move file %r from %r to %r', file_node, file_node.parent, ndir_bro.name)
|
||||
raise NotImplementedError('Cannot move files between storage media')
|
||||
|
||||
if sbro.type in ('filestore', 'db', 'db64'):
|
||||
|
@ -756,9 +753,9 @@ class document_storage(osv.osv):
|
|||
fname = ira.store_fname
|
||||
|
||||
if not fname:
|
||||
self._doclog.warning("Trying to rename a non-stored file")
|
||||
_doclog.warning("Trying to rename a non-stored file")
|
||||
if fname != os.path.join(*opath):
|
||||
self._doclog.warning("inconsistency in realstore: %s != %s" , fname, repr(opath))
|
||||
_doclog.warning("inconsistency in realstore: %s != %s" , fname, repr(opath))
|
||||
|
||||
oldpath = os.path.join(path, opath[-1])
|
||||
|
||||
|
@ -766,12 +763,12 @@ class document_storage(osv.osv):
|
|||
npath = filter(lambda x: x is not None, npath)
|
||||
newdir = os.path.join(*npath)
|
||||
if not os.path.isdir(newdir):
|
||||
self._doclog.debug("Must create dir %s", newdir)
|
||||
_doclog.debug("Must create dir %s", newdir)
|
||||
os.makedirs(newdir)
|
||||
npath.append(opath[-1])
|
||||
newpath = os.path.join(*npath)
|
||||
|
||||
self._doclog.debug("Going to move %s from %s to %s", opath[-1], oldpath, newpath)
|
||||
_doclog.debug("Going to move %s from %s to %s", opath[-1], oldpath, newpath)
|
||||
shutil.move(oldpath, newpath)
|
||||
|
||||
store_path = npath[1:] + [opath[-1],]
|
||||
|
|
|
@ -41,8 +41,9 @@ from StringIO import StringIO
|
|||
# file: objct = ir.attachement
|
||||
# root: if we are at the first directory of a ressource
|
||||
#
|
||||
|
||||
logger = logging.getLogger('doc2.nodes')
|
||||
_nodefd = logging.getLogger(__name__)
|
||||
_logger = logging.getLogger(__name__)
|
||||
_loggernode = logging.getLogger(__name__)
|
||||
|
||||
def _str2time(cre):
|
||||
""" Convert a string with time representation (from db) into time (float)
|
||||
|
@ -328,7 +329,7 @@ class node_class(object):
|
|||
if self.DAV_M_NS.has_key(ns):
|
||||
prefix = self.DAV_M_NS[ns]
|
||||
else:
|
||||
logger.debug('No namespace: %s ("%s")',ns, prop)
|
||||
_logger.debug('No namespace: %s ("%s")',ns, prop)
|
||||
return None
|
||||
|
||||
mname = prefix + "_" + prop.replace('-','_')
|
||||
|
@ -341,7 +342,7 @@ class node_class(object):
|
|||
r = m(cr)
|
||||
return r
|
||||
except AttributeError:
|
||||
logger.debug('Property %s not supported' % prop, exc_info=True)
|
||||
_logger.debug('Property %s not supported' % prop, exc_info=True)
|
||||
return None
|
||||
|
||||
def get_dav_resourcetype(self, cr):
|
||||
|
@ -384,13 +385,13 @@ class node_class(object):
|
|||
def create_child(self, cr, path, data=None):
|
||||
""" Create a regular file under this node
|
||||
"""
|
||||
logger.warning("Attempted to create a file under %r, not possible.", self)
|
||||
_logger.warning("Attempted to create a file under %r, not possible.", self)
|
||||
raise IOError(errno.EPERM, "Not allowed to create files here")
|
||||
|
||||
def create_child_collection(self, cr, objname):
|
||||
""" Create a child collection (directory) under self
|
||||
"""
|
||||
logger.warning("Attempted to create a collection under %r, not possible.", self)
|
||||
_logger.warning("Attempted to create a collection under %r, not possible.", self)
|
||||
raise IOError(errno.EPERM, "Not allowed to create folders here")
|
||||
|
||||
def rm(self, cr):
|
||||
|
@ -725,7 +726,7 @@ class node_dir(node_database):
|
|||
assert self.parent
|
||||
|
||||
if self.parent != ndir_node:
|
||||
logger.debug('Cannot move dir %r from %r to %r', self, self.parent, ndir_node)
|
||||
_logger.debug('Cannot move dir %r from %r to %r', self, self.parent, ndir_node)
|
||||
raise NotImplementedError('Cannot move dir to another dir')
|
||||
|
||||
ret = {}
|
||||
|
@ -998,7 +999,7 @@ class node_res_obj(node_class):
|
|||
def get_dav_eprop_DEPR(self, cr, ns, prop):
|
||||
# Deprecated!
|
||||
if ns != 'http://groupdav.org/' or prop != 'resourcetype':
|
||||
logger.warning("Who asked for %s:%s?" % (ns, prop))
|
||||
_logger.warning("Who asked for %s:%s?" % (ns, prop))
|
||||
return None
|
||||
cntobj = self.context._dirobj.pool.get('document.directory.content')
|
||||
uid = self.context.uid
|
||||
|
@ -1328,7 +1329,7 @@ class node_file(node_class):
|
|||
ret = {}
|
||||
if ndir_node and self.parent != ndir_node:
|
||||
if not (isinstance(self.parent, node_dir) and isinstance(ndir_node, node_dir)):
|
||||
logger.debug('Cannot move file %r from %r to %r', self, self.parent, ndir_node)
|
||||
_logger.debug('Cannot move file %r from %r to %r', self, self.parent, ndir_node)
|
||||
raise NotImplementedError('Cannot move files between dynamic folders')
|
||||
|
||||
if not ndir_obj:
|
||||
|
@ -1452,6 +1453,7 @@ class node_content(node_class):
|
|||
return ''
|
||||
|
||||
class nodefd_content(StringIO, node_descriptor):
|
||||
|
||||
""" A descriptor to content nodes
|
||||
"""
|
||||
def __init__(self, parent, cr, mode, ctx):
|
||||
|
@ -1473,7 +1475,7 @@ class nodefd_content(StringIO, node_descriptor):
|
|||
elif mode == 'a':
|
||||
StringIO.__init__(self, None)
|
||||
else:
|
||||
logging.getLogger('document.content').error("Incorrect mode %s specified", mode)
|
||||
_loggernode.error("Incorrect mode %s specified", mode)
|
||||
raise IOError(errno.EINVAL, "Invalid file mode")
|
||||
self.mode = mode
|
||||
|
||||
|
@ -1499,13 +1501,14 @@ class nodefd_content(StringIO, node_descriptor):
|
|||
raise NotImplementedError
|
||||
cr.commit()
|
||||
except Exception:
|
||||
logging.getLogger('document.content').exception('Cannot update db content #%d for close:', par.cnt_id)
|
||||
_loggernode.exception('Cannot update db content #%d for close:', par.cnt_id)
|
||||
raise
|
||||
finally:
|
||||
cr.close()
|
||||
StringIO.close(self)
|
||||
|
||||
class nodefd_static(StringIO, node_descriptor):
|
||||
|
||||
""" A descriptor to nodes with static data.
|
||||
"""
|
||||
def __init__(self, parent, cr, mode, ctx=None):
|
||||
|
@ -1526,7 +1529,7 @@ class nodefd_static(StringIO, node_descriptor):
|
|||
elif mode == 'a':
|
||||
StringIO.__init__(self, None)
|
||||
else:
|
||||
logging.getLogger('document.nodes').error("Incorrect mode %s specified", mode)
|
||||
_nodefd.error("Incorrect mode %s specified", mode)
|
||||
raise IOError(errno.EINVAL, "Invalid file mode")
|
||||
self.mode = mode
|
||||
|
||||
|
@ -1551,7 +1554,7 @@ class nodefd_static(StringIO, node_descriptor):
|
|||
raise NotImplementedError
|
||||
cr.commit()
|
||||
except Exception:
|
||||
logging.getLogger('document.nodes').exception('Cannot update db content #%d for close:', par.cnt_id)
|
||||
_nodefd.exception('Cannot update db content #%d for close:', par.cnt_id)
|
||||
raise
|
||||
finally:
|
||||
cr.close()
|
||||
|
|
|
@ -25,6 +25,7 @@ import StringIO
|
|||
import odt2txt
|
||||
import sys, zipfile, xml.dom.minidom
|
||||
import logging
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
def _to_unicode(s):
|
||||
try:
|
||||
|
@ -101,9 +102,9 @@ class DocIndex(indexer):
|
|||
(data, _) = pop.communicate()
|
||||
return _to_unicode(data)
|
||||
except OSError:
|
||||
logger = logging.getLogger('document.DocIndex')
|
||||
logger.warn("Failed attempt to execute antiword (MS Word reader). Antiword is necessary to index the file %s of MIME type %s. Detailed error available at DEBUG level.", fname, self._getMimeTypes()[0])
|
||||
logger.debug("Trace of the failed file indexing attempt: ", exc_info=True)
|
||||
|
||||
_logger.warn("Failed attempt to execute antiword (MS Word reader). Antiword is necessary to index the file %s of MIME type %s. Detailed error available at DEBUG level.", fname, self._getMimeTypes()[0])
|
||||
_logger.debug("Trace of the failed file indexing attempt: ", exc_info=True)
|
||||
return False
|
||||
|
||||
cntIndex.register(DocIndex())
|
||||
|
|
|
@ -25,7 +25,7 @@ import authorizer
|
|||
import abstracted_fs
|
||||
import logging
|
||||
from tools import config
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
def start_server():
|
||||
HOST = config.get('ftp_server_host', '127.0.0.1')
|
||||
PORT = int(config.get('ftp_server_port', '8021'))
|
||||
|
@ -35,7 +35,7 @@ def start_server():
|
|||
PASSIVE_PORTS = int(pps[0]), int(pps[1])
|
||||
|
||||
class ftp_server(threading.Thread):
|
||||
|
||||
|
||||
def run(self):
|
||||
autho = authorizer.authorizer()
|
||||
ftpserver.FTPHandler.authorizer = autho
|
||||
|
@ -45,17 +45,17 @@ def start_server():
|
|||
if PASSIVE_PORTS:
|
||||
ftpserver.FTPHandler.passive_ports = PASSIVE_PORTS
|
||||
|
||||
ftpserver.log = lambda msg: logging.getLogger('document.ftp').info(msg)
|
||||
ftpserver.log = lambda msg: _logger.info(msg)
|
||||
ftpserver.logline = lambda msg: None
|
||||
ftpserver.logerror = lambda msg: logging.getLogger('document.ftp').error(msg)
|
||||
ftpserver.logerror = lambda msg: self.logger.error(msg)
|
||||
|
||||
ftpd = ftpserver.FTPServer((HOST, PORT), ftpserver.FTPHandler)
|
||||
ftpd.serve_forever()
|
||||
|
||||
if HOST.lower() == 'none':
|
||||
logging.getLogger('document.ftp').info("\n Server FTP Not Started\n")
|
||||
_logger.info("\n Server FTP Not Started\n")
|
||||
else:
|
||||
logging.getLogger('document.ftp').info("\n Serving FTP on %s:%s\n" % (HOST, PORT))
|
||||
_logger.info("\n Serving FTP on %s:%s\n" % (HOST, PORT))
|
||||
ds = ftp_server()
|
||||
ds.daemon = True
|
||||
ds.start()
|
||||
|
|
|
@ -56,7 +56,7 @@ class abstracted_fs(object):
|
|||
self.cwd = '/'
|
||||
self.cwd_node = None
|
||||
self.rnfr = None
|
||||
self._log = logging.getLogger('FTP.fs')
|
||||
self._log = logging.getLogger(__name__)
|
||||
|
||||
# Ok
|
||||
def db_list(self):
|
||||
|
|
|
@ -24,9 +24,9 @@ import logging
|
|||
import urlparse
|
||||
from service.websrv_lib import FixSendError, HTTPHandler, HttpOptions
|
||||
from service.http_server import HttpLogHandler
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
class RedirectHTTPHandler(HttpLogHandler, FixSendError, HttpOptions, HTTPHandler):
|
||||
_logger = logging.getLogger('httpd.well-known')
|
||||
|
||||
_HTTP_OPTIONS = { 'Allow': ['OPTIONS', 'GET', 'HEAD', 'PROPFIND'] }
|
||||
redirect_paths = {}
|
||||
|
||||
|
@ -80,7 +80,7 @@ class RedirectHTTPHandler(HttpLogHandler, FixSendError, HttpOptions, HTTPHandler
|
|||
self.send_header("Content-Length", 0)
|
||||
self.end_headers()
|
||||
# Do we need a Cache-content: header here?
|
||||
self._logger.debug("redirecting %s to %s", self.path, redir_path)
|
||||
_logger.debug("redirecting %s to %s", self.path, redir_path)
|
||||
return None
|
||||
|
||||
def do_PROPFIND(self):
|
||||
|
|
|
@ -43,7 +43,7 @@ from xmlrpclib import Transport, ProtocolError
|
|||
import StringIO
|
||||
import base64
|
||||
|
||||
log = logging.getLogger('http-client')
|
||||
_log = logging.getLogger(__name__)
|
||||
|
||||
class HTTP11(httplib.HTTP):
|
||||
_http_vsn = 11
|
||||
|
@ -62,7 +62,7 @@ class PersistentTransport(Transport):
|
|||
if not self._http.has_key(host):
|
||||
host, extra_headers, x509 = self.get_host_info(host)
|
||||
self._http[host] = HTTP11(host)
|
||||
log.debug("New connection to %s", host)
|
||||
_log.debug("New connection to %s", host)
|
||||
return self._http[host]
|
||||
|
||||
def get_host_info(self, host):
|
||||
|
@ -170,7 +170,7 @@ class SafePersistentTransport(PersistentTransport):
|
|||
if not self._http.has_key(host):
|
||||
host, extra_headers, x509 = self.get_host_info(host)
|
||||
self._http[host] = httplib.HTTPS(host, None, **(x509 or {}))
|
||||
log.debug("New connection to %s", host)
|
||||
_log.debug("New connection to %s", host)
|
||||
return self._http[host]
|
||||
|
||||
class AuthClient(object):
|
||||
|
@ -191,8 +191,8 @@ class BasicAuthClient(AuthClient):
|
|||
return super(BasicAuthClient,self).getAuth(atype, realm)
|
||||
|
||||
if not self._realm_dict.has_key(realm):
|
||||
log.debug("realm dict: %r", self._realm_dict)
|
||||
log.debug("missing key: \"%s\"" % realm)
|
||||
_log.debug("realm dict: %r", self._realm_dict)
|
||||
_log.debug("missing key: \"%s\"" % realm)
|
||||
self.resolveFailedRealm(realm)
|
||||
return 'Basic '+ self._realm_dict[realm]
|
||||
|
||||
|
@ -239,7 +239,7 @@ class addAuthTransport:
|
|||
# This line will bork if self.setAuthClient has not
|
||||
# been issued. That is a programming error, fix your code!
|
||||
auths = self._auth_client.getAuth(atype, realm)
|
||||
log.debug("sending authorization: %s", auths)
|
||||
_log.debug("sending authorization: %s", auths)
|
||||
h.putheader('Authorization', auths)
|
||||
self.send_content(h, request_body)
|
||||
|
||||
|
@ -255,8 +255,8 @@ class addAuthTransport:
|
|||
log.warning("Why have data on a 401 auth. message?")
|
||||
if realm.startswith('realm="') and realm.endswith('"'):
|
||||
realm = realm[7:-1]
|
||||
log.debug("Resp: %r %r", resp.version,resp.isclosed(), resp.will_close)
|
||||
log.debug("Want to do auth %s for realm %s", atype, realm)
|
||||
_log.debug("Resp: %r %r", resp.version,resp.isclosed(), resp.will_close)
|
||||
_log.debug("Want to do auth %s for realm %s", atype, realm)
|
||||
if atype != 'Basic':
|
||||
raise ProtocolError(host+handler, 403,
|
||||
"Unknown authentication method: %s" % atype, resp.msg)
|
||||
|
@ -315,7 +315,7 @@ class HTTPSConnection(httplib.HTTPSConnection):
|
|||
lf = (len(ssl.PEM_FOOTER)+1)
|
||||
if cert[0-lf] != '\n':
|
||||
cert = cert[:0-lf]+'\n'+cert[0-lf:]
|
||||
log.debug("len-footer: %s cert: %r", lf, cert[0-lf])
|
||||
_log.debug("len-footer: %s cert: %r", lf, cert[0-lf])
|
||||
|
||||
return cert
|
||||
|
||||
|
@ -390,7 +390,7 @@ class DAVClient(object):
|
|||
import base64
|
||||
dbg = self.dbg
|
||||
hdrs.update(self.hdrs)
|
||||
log.debug("Getting %s http://%s:%d/%s", method, self.host, self.port, path)
|
||||
_log.debug("Getting %s http://%s:%d/%s", method, self.host, self.port, path)
|
||||
conn = httplib.HTTPConnection(self.host, port=self.port, timeout=self.timeout)
|
||||
conn.set_debuglevel(dbg)
|
||||
if not path:
|
||||
|
@ -409,8 +409,8 @@ class DAVClient(object):
|
|||
data1 = r1.read()
|
||||
if not self.user:
|
||||
raise Exception('Must auth, have no user/pass!')
|
||||
log.debug("Ver: %s, closed: %s, will close: %s", r1.version,r1.isclosed(), r1.will_close)
|
||||
log.debug("Want to do auth %s for realm %s", atype, realm)
|
||||
_log.debug("Ver: %s, closed: %s, will close: %s", r1.version,r1.isclosed(), r1.will_close)
|
||||
_log.debug("Want to do auth %s for realm %s", atype, realm)
|
||||
if atype == 'Basic' :
|
||||
auths = base64.encodestring(self.user + ':' + self.passwd)
|
||||
if auths[-1] == "\n":
|
||||
|
@ -422,22 +422,22 @@ class DAVClient(object):
|
|||
else:
|
||||
raise Exception("Unknown auth type %s" %atype)
|
||||
else:
|
||||
log.warning("Got 401, cannot auth")
|
||||
_log.warning("Got 401, cannot auth")
|
||||
raise Exception('No auth')
|
||||
|
||||
log.debug("Reponse: %s %s",r1.status, r1.reason)
|
||||
_log.debug("Reponse: %s %s",r1.status, r1.reason)
|
||||
data1 = r1.read()
|
||||
if method != 'GET':
|
||||
log.debug("Body:\n%s\nEnd of body", data1)
|
||||
_log.debug("Body:\n%s\nEnd of body", data1)
|
||||
try:
|
||||
ctype = r1.msg.getheader('content-type')
|
||||
if ctype and ';' in ctype:
|
||||
ctype, encoding = ctype.split(';',1)
|
||||
if ctype == 'text/xml':
|
||||
doc = xml.dom.minidom.parseString(data1)
|
||||
log.debug("XML Body:\n %s", doc.toprettyxml(indent="\t"))
|
||||
_log.debug("XML Body:\n %s", doc.toprettyxml(indent="\t"))
|
||||
except Exception:
|
||||
log.warning("could not print xml", exc_info=True)
|
||||
_log.warning("could not print xml", exc_info=True)
|
||||
pass
|
||||
conn.close()
|
||||
return r1.status, r1.msg, data1
|
||||
|
@ -474,7 +474,7 @@ class DAVClient(object):
|
|||
s, m, d = self._http_request(path, method='OPTIONS', hdrs=hdrs)
|
||||
assert s == 200, "Status: %r" % s
|
||||
assert 'OPTIONS' in m.getheader('Allow')
|
||||
log.debug('Options: %r', m.getheader('Allow'))
|
||||
_log.debug('Options: %r', m.getheader('Allow'))
|
||||
|
||||
if expect:
|
||||
self._assert_headers(expect, m)
|
||||
|
@ -493,10 +493,10 @@ class DAVClient(object):
|
|||
for cnod in node.childNodes:
|
||||
if cnod.nodeType != node.ELEMENT_NODE:
|
||||
if strict:
|
||||
log.debug("Found %r inside <%s>", cnod, node.tagName)
|
||||
_log.debug("Found %r inside <%s>", cnod, node.tagName)
|
||||
continue
|
||||
if namespaces and (cnod.namespaceURI not in namespaces):
|
||||
log.debug("Ignoring <%s> in <%s>", cnod.tagName, node.localName)
|
||||
_log.debug("Ignoring <%s> in <%s>", cnod.tagName, node.localName)
|
||||
continue
|
||||
yield cnod
|
||||
|
||||
|
@ -533,10 +533,10 @@ class DAVClient(object):
|
|||
assert htver == 'HTTP/1.1'
|
||||
rstatus = int(sta)
|
||||
else:
|
||||
log.debug("What is <%s> inside a <propstat>?", pno.tagName)
|
||||
_log.debug("What is <%s> inside a <propstat>?", pno.tagName)
|
||||
|
||||
else:
|
||||
log.debug("Unknown node: %s", cno.tagName)
|
||||
_log.debug("Unknown node: %s", cno.tagName)
|
||||
|
||||
res.setdefault(href,[]).append((status, res_nss))
|
||||
|
||||
|
@ -637,7 +637,7 @@ class DAVClient(object):
|
|||
if lsp[1] in davprops:
|
||||
lsline[lsp[0]] = lsp[2]
|
||||
else:
|
||||
log.debug("Strange status: %s", st)
|
||||
_log.debug("Strange status: %s", st)
|
||||
|
||||
res.append(lsline)
|
||||
|
||||
|
|
|
@ -55,6 +55,9 @@ from DAV.propfind import PROPFIND
|
|||
from xml.dom import minidom
|
||||
from redirect import RedirectHTTPHandler
|
||||
|
||||
_logger_DAV = logging.getLogger(__name__)
|
||||
_logger = logging.getLogger(__name__)
|
||||
_log_web = logging.getLogger(__name__)
|
||||
khtml_re = re.compile(r' KHTML/([0-9\.]+) ')
|
||||
|
||||
def OpenDAVConfig(**kw):
|
||||
|
@ -73,7 +76,7 @@ def OpenDAVConfig(**kw):
|
|||
|
||||
class DAVHandler(HttpOptions, FixSendError, DAVRequestHandler):
|
||||
verbose = False
|
||||
_logger = logging.getLogger('webdav')
|
||||
|
||||
protocol_version = 'HTTP/1.1'
|
||||
_HTTP_OPTIONS= { 'DAV' : ['1', '2'],
|
||||
'Allow' : [ 'GET', 'HEAD', 'COPY', 'MOVE', 'POST', 'PUT',
|
||||
|
@ -127,10 +130,10 @@ class DAVHandler(HttpOptions, FixSendError, DAVRequestHandler):
|
|||
return self.davpath
|
||||
|
||||
def log_message(self, format, *args):
|
||||
self._logger.debug(format % args)
|
||||
_logger.debug(format % args)
|
||||
|
||||
def log_error(self, format, *args):
|
||||
self._logger.warning(format % args)
|
||||
_logger.warning(format % args)
|
||||
|
||||
def _prep_OPTIONS(self, opts):
|
||||
ret = opts
|
||||
|
@ -477,7 +480,7 @@ class dummy_dav_interface(object):
|
|||
uri2 = uri.split('/')
|
||||
if len(uri2) < 3:
|
||||
return True
|
||||
logging.getLogger('webdav').debug("Requested uri: %s", uri)
|
||||
_logger_DAV.debug("Requested uri: %s", uri)
|
||||
return None # no
|
||||
|
||||
def is_collection(self, uri):
|
||||
|
@ -487,6 +490,7 @@ class dummy_dav_interface(object):
|
|||
class DAVStaticHandler(http_server.StaticHTTPHandler):
|
||||
""" A variant of the Static handler, which will serve dummy DAV requests
|
||||
"""
|
||||
|
||||
verbose = False
|
||||
protocol_version = 'HTTP/1.1'
|
||||
_HTTP_OPTIONS= { 'DAV' : ['1', '2'],
|
||||
|
@ -573,7 +577,7 @@ try:
|
|||
conf = OpenDAVConfig(**_dc)
|
||||
handler._config = conf
|
||||
reg_http_service(directory, DAVHandler, DAVAuthProvider)
|
||||
logging.getLogger('webdav').info("WebDAV service registered at path: %s/ "% directory)
|
||||
_logger_DAV.info("WebDAV service registered at path: %s/ "% directory)
|
||||
|
||||
if not (config.get_misc('webdav', 'no_root_hack', False)):
|
||||
# Now, replace the static http handler with the dav-enabled one.
|
||||
|
@ -595,7 +599,7 @@ try:
|
|||
reg_http_service('/', DAVStaticHandler)
|
||||
|
||||
except Exception, e:
|
||||
logging.getLogger('webdav').error('Cannot launch webdav: %s' % e)
|
||||
_logger_DAV.error('Cannot launch webdav: %s' % e)
|
||||
|
||||
|
||||
def init_well_known():
|
||||
|
@ -616,6 +620,8 @@ def init_well_known():
|
|||
init_well_known()
|
||||
|
||||
class PrincipalsRedirect(RedirectHTTPHandler):
|
||||
|
||||
|
||||
redirect_paths = {}
|
||||
|
||||
def _find_redirect(self):
|
||||
|
@ -639,7 +645,7 @@ def init_principals_redirect():
|
|||
if dbname:
|
||||
PrincipalsRedirect.redirect_paths[''] = '/webdav/%s/principals' % dbname
|
||||
reg_http_service('/principals', PrincipalsRedirect)
|
||||
logging.getLogger("web-services").info(
|
||||
_log_web.info(
|
||||
"Registered HTTP redirect handler for /principals to the %s db.",
|
||||
dbname)
|
||||
|
||||
|
|
Loading…
Reference in New Issue