[IMP] rephrase some error and warning messages in document, edi, google_base_account, mrp and mrp_repair

remove exclamation marks at the end of messages
remove unjustified capital letters

bzr revid: abo@openerp.com-20120806170841-cx9vuend1vglmsqk
This commit is contained in:
Antonin Bourguignon 2012-08-06 19:08:41 +02:00
parent d928b07ace
commit a52eeef519
15 changed files with 252 additions and 252 deletions

View File

@ -93,13 +93,13 @@ class indexer(object):
except NhException: except NhException:
pass pass
raise NhException('No appropriate method to index file !') raise NhException('No appropriate method to index file.')
def _doIndexContent(self,content): def _doIndexContent(self,content):
raise NhException("Content cannot be handled here!") raise NhException("Content cannot be handled here.")
def _doIndexFile(self,fpath): def _doIndexFile(self,fpath):
raise NhException("Content cannot be handled here!") raise NhException("Content cannot be handled here.")
def __repr__(self): def __repr__(self):
return "<indexer %s.%s>" %(self.__module__, self.__class__.__name__) return "<indexer %s.%s>" %(self.__module__, self.__class__.__name__)
@ -116,7 +116,7 @@ def mime_match(mime, mdict):
return (None, None) return (None, None)
class contentIndex(object): class contentIndex(object):
def __init__(self): def __init__(self):
self.mimes = {} self.mimes = {}
self.exts = {} self.exts = {}

View File

@ -23,7 +23,7 @@ import base64
from osv import osv, fields from osv import osv, fields
import os import os
# from psycopg2 import Binary #from psycopg2 import Binary
#from tools import config #from tools import config
import tools import tools
from tools.translate import _ from tools.translate import _
@ -37,8 +37,8 @@ DMS_ROOT_PATH = tools.config.get('document_path', os.path.join(tools.config['roo
class document_file(osv.osv): class document_file(osv.osv):
_inherit = 'ir.attachment' _inherit = 'ir.attachment'
_rec_name = 'datas_fname' _rec_name = 'datas_fname'
def _attach_parent_id(self, cr, uid, ids=None, context=None): def _attach_parent_id(self, cr, uid, ids=None, context=None):
"""Migrate ir.attachments to the document module. """Migrate ir.attachments to the document module.
@ -61,7 +61,7 @@ class document_file(osv.osv):
return False return False
if ids is not None: if ids is not None:
raise NotImplementedError("Ids is just there by convention,please do not use it.") raise NotImplementedError("Ids are just there by convention, please do not use it.")
cr.execute("UPDATE ir_attachment " \ cr.execute("UPDATE ir_attachment " \
"SET parent_id = %s, db_datas = decode(encode(db_datas,'escape'), 'base64') " \ "SET parent_id = %s, db_datas = decode(encode(db_datas,'escape'), 'base64') " \
@ -294,7 +294,7 @@ class document_file(osv.osv):
('datas_fname', '=', vals['datas_fname']), ('datas_fname', '=', vals['datas_fname']),
] ]
attach_ids = self.search(cr, uid, domain, context=context) attach_ids = self.search(cr, uid, domain, context=context)
super(document_file, self).write(cr, uid, attach_ids, super(document_file, self).write(cr, uid, attach_ids,
{'datas' : vals['datas']}, {'datas' : vals['datas']},
context=context) context=context)
result = attach_ids[0] result = attach_ids[0]

View File

@ -78,7 +78,7 @@ class document_directory(osv.osv):
root_id = objid.read(cr, uid, mid, ['res_id'])['res_id'] root_id = objid.read(cr, uid, mid, ['res_id'])['res_id']
return root_id return root_id
except Exception, e: except Exception, e:
_logger.warning('Cannot set directory root:'+ str(e)) _logger.warning('Cannot set directory root:'+ str(e))
return False return False
return objid.browse(cr, uid, mid, context=context).res_id return objid.browse(cr, uid, mid, context=context).res_id
@ -153,7 +153,7 @@ class document_directory(osv.osv):
return True return True
_constraints = [ _constraints = [
(_check_recursion, 'Error! You cannot create recursive Directories.', ['parent_id']) (_check_recursion, 'Error! You cannot create recursive directories.', ['parent_id'])
] ]
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
@ -179,7 +179,7 @@ class document_directory(osv.osv):
def get_node_class(self, cr, uid, ids, dbro=None, dynamic=False, context=None): def get_node_class(self, cr, uid, ids, dbro=None, dynamic=False, context=None):
"""Retrieve the class of nodes for this directory """Retrieve the class of nodes for this directory
This function can be overriden by inherited classes ;) This function can be overriden by inherited classes ;)
@param dbro The browse object, if caller already has it @param dbro The browse object, if caller already has it
""" """
@ -193,17 +193,17 @@ class document_directory(osv.osv):
elif dbro.type == 'ressource': elif dbro.type == 'ressource':
return nodes.node_res_dir return nodes.node_res_dir
else: else:
raise ValueError("dir node for %s type!", dbro.type) raise ValueError("dir node for %s type.", dbro.type)
def _prepare_context(self, cr, uid, nctx, context=None): def _prepare_context(self, cr, uid, nctx, context=None):
""" Fill nctx with properties for this database """ Fill nctx with properties for this database
@param nctx instance of nodes.node_context, to be filled @param nctx instance of nodes.node_context, to be filled
@param context ORM context (dict) for us @param context ORM context (dict) for us
Note that this function is called *without* a list of ids, Note that this function is called *without* a list of ids,
it should behave the same for the whole database (based on the it should behave the same for the whole database (based on the
ORM instance of document.directory). ORM instance of document.directory).
Some databases may override this and attach properties to the Some databases may override this and attach properties to the
node_context. See WebDAV, CalDAV. node_context. See WebDAV, CalDAV.
""" """

View File

@ -52,7 +52,7 @@ For data /storage/ we have the cases:
Have (ir.attachment, context), we modify the file (save, update, rename etc). Have (ir.attachment, context), we modify the file (save, update, rename etc).
Have (directory, context), we create a file. Have (directory, context), we create a file.
Have (path, context), we create or modify a file. Have (path, context), we create or modify a file.
Note that in all above cases, we don't explicitly choose the storage media, Note that in all above cases, we don't explicitly choose the storage media,
but always require a context to be present. but always require a context to be present.
@ -61,7 +61,7 @@ nodes, for once, won't. Their metadata will be computed by the parent storage
media + directory. media + directory.
The algorithm says that in any of the above cases, our first goal is to locate The algorithm says that in any of the above cases, our first goal is to locate
the node for any combination of search criteria. It would be wise NOT to the node for any combination of search criteria. It would be wise NOT to
represent each node in the path (like node[/] + node[/dir1] + node[/dir1/dir2]) represent each node in the path (like node[/] + node[/dir1] + node[/dir1/dir2])
but directly jump to the end node (like node[/dir1/dir2]) whenever possible. but directly jump to the end node (like node[/dir1/dir2]) whenever possible.
@ -99,13 +99,13 @@ class nodefd_file(nodes.node_descriptor):
mode = mode[:-1] mode = mode[:-1]
self.mode = mode self.mode = mode
self._size = os.stat(path).st_size self._size = os.stat(path).st_size
for attr in ('closed', 'read', 'write', 'seek', 'tell', 'next'): for attr in ('closed', 'read', 'write', 'seek', 'tell', 'next'):
setattr(self,attr, getattr(self.__file, attr)) setattr(self,attr, getattr(self.__file, attr))
def size(self): def size(self):
return self._size return self._size
def __iter__(self): def __iter__(self):
return self return self
@ -122,7 +122,7 @@ class nodefd_file(nodes.node_descriptor):
filename = par.path filename = par.path
if isinstance(filename, (tuple, list)): if isinstance(filename, (tuple, list)):
filename = '/'.join(filename) filename = '/'.join(filename)
try: try:
mime, icont = cntIndex.doIndex(None, filename=filename, mime, icont = cntIndex.doIndex(None, filename=filename,
content_type=None, realfname=fname) content_type=None, realfname=fname)
@ -173,7 +173,7 @@ class nodefd_db(StringIO, nodes.node_descriptor):
self._size = 0L self._size = 0L
if mode.endswith('b'): if mode.endswith('b'):
mode = mode[:-1] mode = mode[:-1]
if mode in ('r', 'r+'): if mode in ('r', 'r+'):
cr = ira_browse._cr # reuse the cursor of the browse object, just now cr = ira_browse._cr # reuse the cursor of the browse object, just now
cr.execute('SELECT db_datas FROM ir_attachment WHERE id = %s',(ira_browse.id,)) cr.execute('SELECT db_datas FROM ir_attachment WHERE id = %s',(ira_browse.id,))
@ -189,7 +189,7 @@ class nodefd_db(StringIO, nodes.node_descriptor):
StringIO.__init__(self, None) StringIO.__init__(self, None)
else: else:
_logger.error("Incorrect mode %s is specified.", mode) _logger.error("Incorrect mode %s is specified.", mode)
raise IOError(errno.EINVAL, "Invalid file mode!") raise IOError(errno.EINVAL, "Invalid file mode.")
self.mode = mode self.mode = mode
def size(self): def size(self):
@ -209,7 +209,7 @@ class nodefd_db(StringIO, nodes.node_descriptor):
filename = par.path filename = par.path
if isinstance(filename, (tuple, list)): if isinstance(filename, (tuple, list)):
filename = '/'.join(filename) filename = '/'.join(filename)
try: try:
mime, icont = cntIndex.doIndex(data, filename=filename, mime, icont = cntIndex.doIndex(data, filename=filename,
content_type=None, realfname=None) content_type=None, realfname=None)
@ -246,7 +246,7 @@ class nodefd_db(StringIO, nodes.node_descriptor):
class nodefd_db64(StringIO, nodes.node_descriptor): class nodefd_db64(StringIO, nodes.node_descriptor):
""" A descriptor to db data, base64 (the old way) """ A descriptor to db data, base64 (the old way)
It stores the data in base64 encoding at the db. Not optimal, but It stores the data in base64 encoding at the db. Not optimal, but
the transparent compression of Postgres will save the day. the transparent compression of Postgres will save the day.
""" """
@ -255,7 +255,7 @@ class nodefd_db64(StringIO, nodes.node_descriptor):
self._size = 0L self._size = 0L
if mode.endswith('b'): if mode.endswith('b'):
mode = mode[:-1] mode = mode[:-1]
if mode in ('r', 'r+'): if mode in ('r', 'r+'):
data = base64.decodestring(ira_browse.db_datas) data = base64.decodestring(ira_browse.db_datas)
if data: if data:
@ -269,7 +269,7 @@ class nodefd_db64(StringIO, nodes.node_descriptor):
StringIO.__init__(self, None) StringIO.__init__(self, None)
else: else:
_logger.error("Incorrect mode %s is specified.", mode) _logger.error("Incorrect mode %s is specified.", mode)
raise IOError(errno.EINVAL, "Invalid file mode!") raise IOError(errno.EINVAL, "Invalid file mode.")
self.mode = mode self.mode = mode
def size(self): def size(self):
@ -289,7 +289,7 @@ class nodefd_db64(StringIO, nodes.node_descriptor):
filename = par.path filename = par.path
if isinstance(filename, (tuple, list)): if isinstance(filename, (tuple, list)):
filename = '/'.join(filename) filename = '/'.join(filename)
try: try:
mime, icont = cntIndex.doIndex(data, filename=filename, mime, icont = cntIndex.doIndex(data, filename=filename,
content_type=None, realfname=None) content_type=None, realfname=None)
@ -317,7 +317,7 @@ class nodefd_db64(StringIO, nodes.node_descriptor):
(base64.encodestring(data), len(data), par.file_id)) (base64.encodestring(data), len(data), par.file_id))
cr.commit() cr.commit()
except Exception: except Exception:
_logger.exception('Cannot update db file #%d for close !', par.file_id) _logger.exception('Cannot update db file #%d for close.', par.file_id)
raise raise
finally: finally:
cr.close() cr.close()
@ -330,7 +330,7 @@ class document_storage(osv.osv):
media. media.
The referring document.directory-ies will control the placement of data The referring document.directory-ies will control the placement of data
into the storage. into the storage.
It is a bad idea to have multiple document.storage objects pointing to It is a bad idea to have multiple document.storage objects pointing to
the same tree of filesystem storage. the same tree of filesystem storage.
""" """
@ -384,12 +384,12 @@ class document_storage(osv.osv):
def __prepare_realpath(self, cr, file_node, ira, store_path, do_create=True): def __prepare_realpath(self, cr, file_node, ira, store_path, do_create=True):
""" Cleanup path for realstore, create dirs if needed """ Cleanup path for realstore, create dirs if needed
@param file_node the node @param file_node the node
@param ira ir.attachment browse of the file_node @param ira ir.attachment browse of the file_node
@param store_path the path of the parent storage object, list @param store_path the path of the parent storage object, list
@param do_create create the directories, if needed @param do_create create the directories, if needed
@return tuple(path "/var/filestore/real/dir/", npath ['dir','fname.ext'] ) @return tuple(path "/var/filestore/real/dir/", npath ['dir','fname.ext'] )
""" """
file_node.fix_ppath(cr, ira) file_node.fix_ppath(cr, ira)
@ -401,10 +401,10 @@ class document_storage(osv.osv):
# self._logger.debug('Npath: %s', npath) # self._logger.debug('Npath: %s', npath)
for n in npath: for n in npath:
if n == '..': if n == '..':
raise ValueError("Invalid '..' element in path!") raise ValueError("Invalid '..' element in path.")
for ch in ('*', '|', "\\", '/', ':', '"', '<', '>', '?',): for ch in ('*', '|', "\\", '/', ':', '"', '<', '>', '?',):
if ch in n: if ch in n:
raise ValueError("Invalid char %s in path %s!" %(ch, n)) raise ValueError("Invalid char %s in path %s." %(ch, n))
dpath = [store_path,] dpath = [store_path,]
dpath += npath[:-1] dpath += npath[:-1]
path = os.path.join(*dpath) path = os.path.join(*dpath)
@ -420,8 +420,8 @@ class document_storage(osv.osv):
""" """
boo = self.browse(cr, uid, id, context=context) boo = self.browse(cr, uid, id, context=context)
if not boo.online: if not boo.online:
raise IOError(errno.EREMOTE, 'Medium offline!') raise IOError(errno.EREMOTE, 'Medium offline.')
if fil_obj: if fil_obj:
ira = fil_obj ira = fil_obj
else: else:
@ -435,11 +435,11 @@ class document_storage(osv.osv):
context = {} context = {}
boo = self.browse(cr, uid, id, context=context) boo = self.browse(cr, uid, id, context=context)
if not boo.online: if not boo.online:
raise IOError(errno.EREMOTE, 'medium offline!') raise IOError(errno.EREMOTE, 'Medium offline.')
if boo.readonly and mode not in ('r', 'rb'): if boo.readonly and mode not in ('r', 'rb'):
raise IOError(errno.EPERM, "Readonly medium!") raise IOError(errno.EPERM, "Readonly medium.")
ira = self.pool.get('ir.attachment').browse(cr, uid, file_node.file_id, context=context) ira = self.pool.get('ir.attachment').browse(cr, uid, file_node.file_id, context=context)
if boo.type == 'filestore': if boo.type == 'filestore':
if not ira.store_fname: if not ira.store_fname:
@ -447,8 +447,8 @@ class document_storage(osv.osv):
# try to fix their directory. # try to fix their directory.
if mode in ('r','r+'): if mode in ('r','r+'):
if ira.file_size: if ira.file_size:
_logger.warning( "ir.attachment #%d does not have a filename, but is at filestore, fix it!" % ira.id) _logger.warning( "ir.attachment #%d does not have a filename, but is at filestore. This should get fixed." % ira.id)
raise IOError(errno.ENOENT, 'No file can be located!') raise IOError(errno.ENOENT, 'No file can be located.')
else: else:
store_fname = self.__get_random_fname(boo.path) store_fname = self.__get_random_fname(boo.path)
cr.execute('UPDATE ir_attachment SET store_fname = %s WHERE id = %s', cr.execute('UPDATE ir_attachment SET store_fname = %s WHERE id = %s',
@ -479,9 +479,9 @@ class document_storage(osv.osv):
elif boo.type == 'virtual': elif boo.type == 'virtual':
raise ValueError('Virtual storage does not support static file(s).') raise ValueError('Virtual storage does not support static file(s).')
else: else:
raise TypeError("No %s storage !" % boo.type) raise TypeError("No %s storage." % boo.type)
def __get_data_3(self, cr, uid, boo, ira, context): def __get_data_3(self, cr, uid, boo, ira, context):
if boo.type == 'filestore': if boo.type == 'filestore':
@ -489,7 +489,7 @@ class document_storage(osv.osv):
# On a migrated db, some files may have the wrong storage type # On a migrated db, some files may have the wrong storage type
# try to fix their directory. # try to fix their directory.
if ira.file_size: if ira.file_size:
_logger.warning( "ir.attachment #%d does not have a filename, but is at filestore, fix it!" % ira.id) _logger.warning( "ir.attachment #%d does not have a filename, but is at filestore. This should get fixed." % ira.id)
return None return None
fpath = os.path.join(boo.path, ira.store_fname) fpath = os.path.join(boo.path, ira.store_fname)
return file(fpath, 'rb').read() return file(fpath, 'rb').read()
@ -541,10 +541,10 @@ class document_storage(osv.osv):
ira = self.pool.get('ir.attachment').browse(cr, uid, file_node.file_id, context=context) ira = self.pool.get('ir.attachment').browse(cr, uid, file_node.file_id, context=context)
if not boo.online: if not boo.online:
raise IOError(errno.EREMOTE, 'Medium offline!') raise IOError(errno.EREMOTE, 'Medium offline.')
if boo.readonly: if boo.readonly:
raise IOError(errno.EPERM, "Readonly medium!") raise IOError(errno.EPERM, "Readonly medium.")
_logger.debug( "Store data for ir.attachment #%d." % ira.id) _logger.debug( "Store data for ir.attachment #%d." % ira.id)
store_fname = None store_fname = None
@ -557,11 +557,11 @@ class document_storage(osv.osv):
fp = open(fname, 'wb') fp = open(fname, 'wb')
try: try:
fp.write(data) fp.write(data)
finally: finally:
fp.close() fp.close()
_logger.debug( "Saved data to %s." % fname) _logger.debug( "Saved data to %s." % fname)
filesize = len(data) # os.stat(fname).st_size filesize = len(data) # os.stat(fname).st_size
# TODO Here, an old file would be left hanging. # TODO Here, an old file would be left hanging.
except Exception, e: except Exception, e:
@ -586,7 +586,7 @@ class document_storage(osv.osv):
fp = open(fname,'wb') fp = open(fname,'wb')
try: try:
fp.write(data) fp.write(data)
finally: finally:
fp.close() fp.close()
_logger.debug("Saved data to %s.", fname) _logger.debug("Saved data to %s.", fname)
filesize = len(data) # os.stat(fname).st_size filesize = len(data) # os.stat(fname).st_size
@ -639,10 +639,10 @@ class document_storage(osv.osv):
files that have to be removed, too. """ files that have to be removed, too. """
if not storage_bo.online: if not storage_bo.online:
raise IOError(errno.EREMOTE, 'Medium offline!') raise IOError(errno.EREMOTE, 'Medium offline.')
if storage_bo.readonly: if storage_bo.readonly:
raise IOError(errno.EPERM, "Readonly medium!") raise IOError(errno.EPERM, "Readonly medium.")
if storage_bo.type == 'filestore': if storage_bo.type == 'filestore':
fname = fil_bo.store_fname fname = fil_bo.store_fname
@ -677,17 +677,17 @@ class document_storage(osv.osv):
""" A preparation for a file rename. """ A preparation for a file rename.
It will not affect the database, but merely check and perhaps It will not affect the database, but merely check and perhaps
rename the realstore file. rename the realstore file.
@return the dict of values that can safely be be stored in the db. @return the dict of values that can safely be be stored in the db.
""" """
sbro = self.browse(cr, uid, file_node.storage_id, context=context) sbro = self.browse(cr, uid, file_node.storage_id, context=context)
assert sbro, "The file #%d didn't provide storage" % file_node.file_id assert sbro, "The file #%d didn't provide storage" % file_node.file_id
if not sbro.online: if not sbro.online:
raise IOError(errno.EREMOTE, 'Medium offline!') raise IOError(errno.EREMOTE, 'Medium offline.')
if sbro.readonly: if sbro.readonly:
raise IOError(errno.EPERM, "Readonly medium!") raise IOError(errno.EPERM, "Readonly medium.")
if sbro.type in ('filestore', 'db', 'db64'): if sbro.type in ('filestore', 'db', 'db64'):
# nothing to do for a rename, allow to change the db field # nothing to do for a rename, allow to change the db field
@ -717,7 +717,7 @@ class document_storage(osv.osv):
""" A preparation for a file move. """ A preparation for a file move.
It will not affect the database, but merely check and perhaps It will not affect the database, but merely check and perhaps
move the realstore file. move the realstore file.
@param ndir_bro a browse object of document.directory, where this @param ndir_bro a browse object of document.directory, where this
file should move to. file should move to.
@return the dict of values that can safely be be stored in the db. @return the dict of values that can safely be be stored in the db.
@ -726,10 +726,10 @@ class document_storage(osv.osv):
assert sbro, "The file #%d didn't provide storage" % file_node.file_id assert sbro, "The file #%d didn't provide storage" % file_node.file_id
if not sbro.online: if not sbro.online:
raise IOError(errno.EREMOTE, 'Medium offline!') raise IOError(errno.EREMOTE, 'Medium offline.')
if sbro.readonly: if sbro.readonly:
raise IOError(errno.EPERM, "Readonly medium!") raise IOError(errno.EPERM, "Readonly medium.")
par = ndir_bro par = ndir_bro
psto = None psto = None
@ -757,7 +757,7 @@ class document_storage(osv.osv):
_logger.warning("Inconsistency to realstore: %s != %s." , fname, repr(opath)) _logger.warning("Inconsistency to realstore: %s != %s." , fname, repr(opath))
oldpath = os.path.join(path, opath[-1]) oldpath = os.path.join(path, opath[-1])
npath = [sbro.path,] + (ndir_bro.get_full_path() or []) npath = [sbro.path,] + (ndir_bro.get_full_path() or [])
npath = filter(lambda x: x is not None, npath) npath = filter(lambda x: x is not None, npath)
newdir = os.path.join(*npath) newdir = os.path.join(*npath)
@ -766,16 +766,16 @@ class document_storage(osv.osv):
os.makedirs(newdir) os.makedirs(newdir)
npath.append(opath[-1]) npath.append(opath[-1])
newpath = os.path.join(*npath) newpath = os.path.join(*npath)
_logger.debug("Going to move %s from %s to %s.", opath[-1], oldpath, newpath) _logger.debug("Going to move %s from %s to %s.", opath[-1], oldpath, newpath)
shutil.move(oldpath, newpath) shutil.move(oldpath, newpath)
store_path = npath[1:] + [opath[-1],] store_path = npath[1:] + [opath[-1],]
store_fname = os.path.join(*store_path) store_fname = os.path.join(*store_path)
return { 'store_fname': store_fname } return { 'store_fname': store_fname }
else: else:
raise TypeError("No %s storage!" % sbro.type) raise TypeError("No %s storage." % sbro.type)
document_storage() document_storage()

View File

@ -45,7 +45,7 @@ _logger = logging.getLogger(__name__)
def _str2time(cre): def _str2time(cre):
""" Convert a string with time representation (from db) into time (float) """ Convert a string with time representation (from db) into time (float)
Note: a place to fix if datetime is used in db. Note: a place to fix if datetime is used in db.
""" """
if not cre: if not cre:
@ -62,7 +62,7 @@ def get_node_context(cr, uid, context):
class node_context(object): class node_context(object):
""" This is the root node, representing access to some particular context """ This is the root node, representing access to some particular context
A context is a set of persistent data, which may influence the structure A context is a set of persistent data, which may influence the structure
of the nodes. All other transient information during a data query should of the nodes. All other transient information during a data query should
be passed down with function arguments. be passed down with function arguments.
@ -99,7 +99,7 @@ class node_context(object):
def __ne__(self, other): def __ne__(self, other):
return not self.__eq__(other) return not self.__eq__(other)
def get(self, name, default=None): def get(self, name, default=None):
return self.context.get(name, default) return self.context.get(name, default)
@ -119,7 +119,7 @@ class node_context(object):
"""Create (or locate) a node for a directory """Create (or locate) a node for a directory
@param dbro a browse object of document.directory @param dbro a browse object of document.directory
""" """
fullpath = dbro.get_full_path(context=self.context) fullpath = dbro.get_full_path(context=self.context)
klass = dbro.get_node_class(dbro, context=self.context) klass = dbro.get_node_class(dbro, context=self.context)
return klass(fullpath, None ,self, dbro) return klass(fullpath, None ,self, dbro)
@ -183,7 +183,7 @@ class node_descriptor(object):
def __nonzero__(self): def __nonzero__(self):
""" Ensure that a node_descriptor will never equal False """ Ensure that a node_descriptor will never equal False
Since we do define __len__ and __iter__ for us, we must avoid Since we do define __len__ and __iter__ for us, we must avoid
being regarded as non-true objects. being regarded as non-true objects.
""" """
@ -244,7 +244,7 @@ class node_class(object):
else: else:
s.append(self.path) s.append(self.path)
return s #map(lambda x: '/' +x, s) return s #map(lambda x: '/' +x, s)
def __repr__(self): def __repr__(self):
return "%s@/%s" % (self.our_type, '/'.join(self.full_path())) return "%s@/%s" % (self.our_type, '/'.join(self.full_path()))
@ -323,7 +323,7 @@ class node_class(object):
def get_dav_eprop(self, cr, ns, prop): def get_dav_eprop(self, cr, ns, prop):
if not self.DAV_M_NS: if not self.DAV_M_NS:
return None return None
if self.DAV_M_NS.has_key(ns): if self.DAV_M_NS.has_key(ns):
prefix = self.DAV_M_NS[ns] prefix = self.DAV_M_NS[ns]
else: else:
@ -340,12 +340,12 @@ class node_class(object):
r = m(cr) r = m(cr)
return r return r
except AttributeError: except AttributeError:
_logger.debug('Property %s not supported.' % prop, exc_info=True) _logger.debug('The property %s is not supported.' % prop, exc_info=True)
return None return None
def get_dav_resourcetype(self, cr): def get_dav_resourcetype(self, cr):
""" Get the DAV resource type. """ Get the DAV resource type.
Is here because some nodes may exhibit special behaviour, like Is here because some nodes may exhibit special behaviour, like
CalDAV/GroupDAV collections CalDAV/GroupDAV collections
""" """
@ -385,7 +385,7 @@ class node_class(object):
""" """
_logger.warning("Attempted to create a file under %r, not possible.", self) _logger.warning("Attempted to create a file under %r, not possible.", self)
raise IOError(errno.EPERM, "Not allowed to create file(s) here.") raise IOError(errno.EPERM, "Not allowed to create file(s) here.")
def create_child_collection(self, cr, objname): def create_child_collection(self, cr, objname):
""" Create a child collection (directory) under self """ Create a child collection (directory) under self
""" """
@ -404,7 +404,7 @@ class node_class(object):
def check_perms(self, perms): def check_perms(self, perms):
""" Check the permissions of the current node. """ Check the permissions of the current node.
@param perms either an integers of the bits to check, or @param perms either an integers of the bits to check, or
a string with the permission letters a string with the permission letters
@ -414,7 +414,7 @@ class node_class(object):
4, r : allow read of file, or listing of dir contents 4, r : allow read of file, or listing of dir contents
8, u : allow remove (unlink) 8, u : allow remove (unlink)
""" """
if isinstance(perms, str): if isinstance(perms, str):
pe2 = 0 pe2 = 0
chars = { 'x': 1, 'w': 2, 'r': 4, 'u': 8 } chars = { 'x': 1, 'w': 2, 'r': 4, 'u': 8 }
@ -426,7 +426,7 @@ class node_class(object):
raise ValueError("Invalid permission bits.") raise ValueError("Invalid permission bits.")
else: else:
raise ValueError("Invalid permission attribute.") raise ValueError("Invalid permission attribute.")
return ((self.uidperms & perms) == perms) return ((self.uidperms & perms) == perms)
class node_database(node_class): class node_database(node_class):
@ -463,7 +463,7 @@ class node_database(node_class):
is_allowed = self.check_perms(1) is_allowed = self.check_perms(1)
else: else:
is_allowed = self.check_perms(5) is_allowed = self.check_perms(5)
if not is_allowed: if not is_allowed:
raise IOError(errno.EPERM, "Permission into directory denied.") raise IOError(errno.EPERM, "Permission into directory denied.")
@ -493,7 +493,7 @@ def mkdosname(company_name, default='noname'):
for c in company_name[:8]: for c in company_name[:8]:
n += (c in badchars and '_') or c n += (c in badchars and '_') or c
return n return n
def _uid2unixperms(perms, has_owner): def _uid2unixperms(perms, has_owner):
""" Convert the uidperms and the owner flag to full unix bits """ Convert the uidperms and the owner flag to full unix bits
@ -566,7 +566,7 @@ class node_dir(node_database):
def _file_get(self, cr, nodename=False): def _file_get(self, cr, nodename=False):
res = super(node_dir,self)._file_get(cr, nodename) res = super(node_dir,self)._file_get(cr, nodename)
is_allowed = self.check_perms(nodename and 1 or 5) is_allowed = self.check_perms(nodename and 1 or 5)
if not is_allowed: if not is_allowed:
raise IOError(errno.EPERM, "Permission into directory denied.") raise IOError(errno.EPERM, "Permission into directory denied.")
@ -583,7 +583,7 @@ class node_dir(node_database):
res.extend(res3) res.extend(res3)
return res return res
def _child_get(self, cr, name=None, domain=None): def _child_get(self, cr, name=None, domain=None):
dirobj = self.context._dirobj dirobj = self.context._dirobj
uid = self.context.uid uid = self.context.uid
@ -595,7 +595,7 @@ class node_dir(node_database):
is_allowed = self.check_perms(1) is_allowed = self.check_perms(1)
else: else:
is_allowed = self.check_perms(5) is_allowed = self.check_perms(5)
if not is_allowed: if not is_allowed:
raise IOError(errno.EPERM, "Permission into directory denied.") raise IOError(errno.EPERM, "Permission into directory denied.")
@ -640,7 +640,7 @@ class node_dir(node_database):
raise OSError(39, 'Directory not empty.') raise OSError(39, 'Directory not empty.')
res = self.context._dirobj.unlink(cr, uid, [directory.id]) res = self.context._dirobj.unlink(cr, uid, [directory.id])
else: else:
raise OSError(1, 'Operation is not permited.') raise OSError(1, 'Operation is not permitted.')
return res return res
def create_child_collection(self, cr, objname): def create_child_collection(self, cr, objname):
@ -654,7 +654,7 @@ class node_dir(node_database):
ctx.update(self.dctx) ctx.update(self.dctx)
obj = dirobj.browse(cr, uid, self.dir_id) obj = dirobj.browse(cr, uid, self.dir_id)
if obj and (obj.type == 'ressource') and not object2: if obj and (obj.type == 'ressource') and not object2:
raise OSError(1, 'Operation is not permited.') raise OSError(1, 'Operation is not permitted.')
#objname = uri2[-1] #objname = uri2[-1]
val = { val = {
@ -730,7 +730,7 @@ class node_dir(node_database):
ret = {} ret = {}
if new_name and (new_name != dbro.name): if new_name and (new_name != dbro.name):
if ndir_node.child(cr, new_name): if ndir_node.child(cr, new_name):
raise IOError(errno.EEXIST, "Destination path already exists!") raise IOError(errno.EEXIST, "Destination path already exists.")
ret['name'] = new_name ret['name'] = new_name
del dbro del dbro
@ -864,10 +864,10 @@ class node_res_dir(node_class):
if not res_name: if not res_name:
continue continue
# Yes! we can't do better but skip nameless records. # Yes! we can't do better but skip nameless records.
# Escape the name for characters not supported in filenames # Escape the name for characters not supported in filenames
res_name = res_name.replace('/','_') # any other weird char? res_name = res_name.replace('/','_') # any other weird char?
if name and (res_name != ustr(name)): if name and (res_name != ustr(name)):
# we have matched _ to any character, but we only meant to match # we have matched _ to any character, but we only meant to match
# the special ones. # the special ones.
@ -1057,7 +1057,7 @@ class node_res_obj(node_class):
where2 = where + [('parent_id','=',self.dir_id) ] where2 = where + [('parent_id','=',self.dir_id) ]
ids = dirobj.search(cr, uid, where2, context=ctx) ids = dirobj.search(cr, uid, where2, context=ctx)
bo = obj.browse(cr, uid, self.res_id, context=ctx) bo = obj.browse(cr, uid, self.res_id, context=ctx)
for dirr in dirobj.browse(cr, uid, ids, context=ctx): for dirr in dirobj.browse(cr, uid, ids, context=ctx):
if name and (name != dirr.name): if name and (name != dirr.name):
continue continue
@ -1114,7 +1114,7 @@ class node_res_obj(node_class):
obj = dirobj.browse(cr, uid, self.dir_id) obj = dirobj.browse(cr, uid, self.dir_id)
if obj and (obj.type == 'ressource') and not object2: if obj and (obj.type == 'ressource') and not object2:
raise OSError(1, 'Operation is not permited.') raise OSError(1, 'Operation is not permitted.')
val = { val = {
@ -1177,14 +1177,14 @@ class node_file(node_class):
self.write_date = fil.write_date or fil.create_date self.write_date = fil.write_date or fil.create_date
self.content_length = fil.file_size self.content_length = fil.file_size
self.displayname = fil.name self.displayname = fil.name
self.uidperms = 14 self.uidperms = 14
if parent: if parent:
if not parent.check_perms('x'): if not parent.check_perms('x'):
self.uidperms = 0 self.uidperms = 0
elif not parent.check_perms('w'): elif not parent.check_perms('w'):
self.uidperms = 4 self.uidperms = 4
try: try:
self.uuser = (fil.user_id and fil.user_id.login) or 'nobody' self.uuser = (fil.user_id and fil.user_id.login) or 'nobody'
except Exception: except Exception:
@ -1323,7 +1323,7 @@ class node_file(node_class):
# there *must* be a parent node for this one # there *must* be a parent node for this one
self.parent = self.context.get_dir_node(cr, dbro.parent_id) self.parent = self.context.get_dir_node(cr, dbro.parent_id)
assert self.parent assert self.parent
ret = {} ret = {}
if ndir_node and self.parent != ndir_node: if ndir_node and self.parent != ndir_node:
if not (isinstance(self.parent, node_dir) and isinstance(ndir_node, node_dir)): if not (isinstance(self.parent, node_dir) and isinstance(ndir_node, node_dir)):
@ -1373,7 +1373,7 @@ class node_content(node_class):
self.uidperms = parent.uidperms & 14 self.uidperms = parent.uidperms & 14
self.uuser = parent.uuser self.uuser = parent.uuser
self.ugroup = parent.ugroup self.ugroup = parent.ugroup
self.extension = cnt.extension self.extension = cnt.extension
self.report_id = cnt.report_id and cnt.report_id.id self.report_id = cnt.report_id and cnt.report_id.id
#self.mimetype = cnt.extension. #self.mimetype = cnt.extension.
@ -1417,13 +1417,13 @@ class node_content(node_class):
cperms = 'rw' cperms = 'rw'
else: else:
raise IOError(errno.EINVAL, "Cannot open at mode %s." % mode) raise IOError(errno.EINVAL, "Cannot open at mode %s." % mode)
if not self.check_perms(cperms): if not self.check_perms(cperms):
raise IOError(errno.EPERM, "Permission denied.") raise IOError(errno.EPERM, "Permission denied.")
ctx = self.context.context.copy() ctx = self.context.context.copy()
ctx.update(self.dctx) ctx.update(self.dctx)
return nodefd_content(self, cr, mode, ctx) return nodefd_content(self, cr, mode, ctx)
def get_data_len(self, cr, fil_obj = None): def get_data_len(self, cr, fil_obj = None):
@ -1451,7 +1451,7 @@ class node_content(node_class):
return '' return ''
class nodefd_content(StringIO, node_descriptor): class nodefd_content(StringIO, node_descriptor):
""" A descriptor to content nodes """ A descriptor to content nodes
""" """
def __init__(self, parent, cr, mode, ctx): def __init__(self, parent, cr, mode, ctx):
@ -1474,7 +1474,7 @@ class nodefd_content(StringIO, node_descriptor):
StringIO.__init__(self, None) StringIO.__init__(self, None)
else: else:
_logger.error("Incorrect mode %s is specified.", mode) _logger.error("Incorrect mode %s is specified.", mode)
raise IOError(errno.EINVAL, "Invalid file mode!") raise IOError(errno.EINVAL, "Invalid file mode.")
self.mode = mode self.mode = mode
def size(self): def size(self):
@ -1506,7 +1506,7 @@ class nodefd_content(StringIO, node_descriptor):
StringIO.close(self) StringIO.close(self)
class nodefd_static(StringIO, node_descriptor): class nodefd_static(StringIO, node_descriptor):
""" A descriptor to nodes with static data. """ A descriptor to nodes with static data.
""" """
def __init__(self, parent, cr, mode, ctx=None): def __init__(self, parent, cr, mode, ctx=None):
@ -1528,7 +1528,7 @@ class nodefd_static(StringIO, node_descriptor):
StringIO.__init__(self, None) StringIO.__init__(self, None)
else: else:
_logger.error("Incorrect mode %s is specified.", mode) _logger.error("Incorrect mode %s is specified.", mode)
raise IOError(errno.EINVAL, "Invalid file mode!") raise IOError(errno.EINVAL, "Invalid file mode.")
self.mode = mode self.mode = mode
def size(self): def size(self):

View File

@ -303,7 +303,7 @@ class abstracted_fs(object):
raise raise
if not uid: if not uid:
cr.close() cr.close()
raise OSError(2, 'Authentification is Required!') raise OSError(2, 'Authentification required.')
n = get_node_context(cr, uid, {}) n = get_node_context(cr, uid, {})
node = n.get_uri(cr, p_parts[1:]) node = n.get_uri(cr, p_parts[1:])
return (cr, node, rem_path) return (cr, node, rem_path)
@ -318,7 +318,7 @@ class abstracted_fs(object):
node = self.cwd_node node = self.cwd_node
if node is False and mode not in ('???'): if node is False and mode not in ('???'):
cr.close() cr.close()
raise IOError(errno.ENOENT, 'Path does not exist!') raise IOError(errno.ENOENT, 'Path does not exist.')
return (cr, node, rem_path) return (cr, node, rem_path)
def get_node_cr_uid(self, node): def get_node_cr_uid(self, node):

View File

@ -302,13 +302,13 @@ class DummyAuthorizer:
provide customized response strings when user log-in and quit. provide customized response strings when user log-in and quit.
""" """
if self.has_user(username): if self.has_user(username):
raise AuthorizerError('User "%s" already exists!' %username) raise AuthorizerError('User "%s" already exists.' %username)
homedir = os.path.realpath(homedir) homedir = os.path.realpath(homedir)
if not os.path.isdir(homedir): if not os.path.isdir(homedir):
raise AuthorizerError('No such directory: "%s"!' %homedir) raise AuthorizerError('No such directory: "%s".' %homedir)
for p in perm: for p in perm:
if p not in 'elradfmw': if p not in 'elradfmw':
raise AuthorizerError('No such permission: "%s"!' %p) raise AuthorizerError('No such permission: "%s".' %p)
for p in perm: for p in perm:
if (p in self.write_perms) and (username == 'anonymous'): if (p in self.write_perms) and (username == 'anonymous'):
warnings.warn("Write permissions are assigned to anonymous user.", warnings.warn("Write permissions are assigned to anonymous user.",
@ -638,7 +638,7 @@ class DTPHandler(asyncore.dispatcher):
elif type == 'i': elif type == 'i':
self.data_wrapper = lambda x: x self.data_wrapper = lambda x: x
else: else:
raise TypeError, "Unsupported type!" raise TypeError, "Unsupported type."
self.receive = True self.receive = True
def get_transmitted_bytes(self): def get_transmitted_bytes(self):
@ -767,7 +767,7 @@ class DTPHandler(asyncore.dispatcher):
# some other exception occurred; we don't want to provide # some other exception occurred; we don't want to provide
# confidential error messages # confidential error messages
logerror(traceback.format_exc()) logerror(traceback.format_exc())
error = "Internal error!" error = "Internal error."
self.cmd_channel.respond("426 %s; transfer aborted." %error) self.cmd_channel.respond("426 %s; transfer aborted." %error)
self.close() self.close()
@ -823,7 +823,7 @@ class FileProducer:
elif type == 'i': elif type == 'i':
self.data_wrapper = lambda x: x self.data_wrapper = lambda x: x
else: else:
raise TypeError, "Unsupported type!" raise TypeError, "Unsupported type."
def more(self): def more(self):
"""Attempt a chunk of data of size self.buffer_size.""" """Attempt a chunk of data of size self.buffer_size."""
@ -2150,7 +2150,7 @@ class FTPHandler(asynchat.async_chat):
datacr = self.get_crdata2(line, mode='list') datacr = self.get_crdata2(line, mode='list')
# RFC-3659 requires 501 response code if path is not a directory # RFC-3659 requires 501 response code if path is not a directory
if not self.fs.isdir(datacr[1]): if not self.fs.isdir(datacr[1]):
err = 'No such directory!' err = 'No such directory.'
self.log('FAIL MLSD "%s". %s.' %(line, err)) self.log('FAIL MLSD "%s". %s.' %(line, err))
self.respond("501 %s." %err) self.respond("501 %s." %err)
return return
@ -2191,7 +2191,7 @@ class FTPHandler(asynchat.async_chat):
fd.seek(self.restart_position) fd.seek(self.restart_position)
ok = 1 ok = 1
except AssertionError: except AssertionError:
why = "Invalid REST parameter!" why = "Invalid REST parameter."
except IOError, err: except IOError, err:
why = _strerror(err) why = _strerror(err)
self.restart_position = 0 self.restart_position = 0
@ -2240,7 +2240,7 @@ class FTPHandler(asynchat.async_chat):
fd.seek(self.restart_position) fd.seek(self.restart_position)
ok = 1 ok = 1
except AssertionError: except AssertionError:
why = "Invalid REST parameter!" why = "Invalid REST parameter."
except IOError, err: except IOError, err:
why = _strerror(err) why = _strerror(err)
self.restart_position = 0 self.restart_position = 0
@ -2760,7 +2760,7 @@ class FTPHandler(asynchat.async_chat):
def ftp_OPTS(self, line): def ftp_OPTS(self, line):
"""Specify options for FTP commands as specified in RFC-2389.""" """Specify options for FTP commands as specified in RFC-2389."""
try: try:
assert (not line.count(' ') > 1), 'Invalid number of arguments!' assert (not line.count(' ') > 1), 'Invalid number of arguments.'
if ' ' in line: if ' ' in line:
cmd, arg = line.split(' ') cmd, arg = line.split(' ')
assert (';' in arg), 'Invalid argument!' assert (';' in arg), 'Invalid argument!'

View File

@ -78,13 +78,13 @@ def _str2time(cre):
class BoundStream2(object): class BoundStream2(object):
"""Wraps around a seekable buffer, reads a determined range of data """Wraps around a seekable buffer, reads a determined range of data
Note that the supplied stream object MUST support a size() which Note that the supplied stream object MUST support a size() which
should return its data length (in bytes). should return its data length (in bytes).
A variation of the class in websrv_lib.py A variation of the class in websrv_lib.py
""" """
def __init__(self, stream, offset=None, length=None, chunk_size=None): def __init__(self, stream, offset=None, length=None, chunk_size=None):
self._stream = stream self._stream = stream
self._offset = offset or 0 self._offset = offset or 0
@ -99,7 +99,7 @@ class BoundStream2(object):
def read(self, size=-1): def read(self, size=-1):
if not self._stream: if not self._stream:
raise IOError(errno.EBADF, "read() without stream.") raise IOError(errno.EBADF, "read() without stream.")
if self._rem_length == 0: if self._rem_length == 0:
return '' return ''
elif self._rem_length < 0: elif self._rem_length < 0:
@ -110,7 +110,7 @@ class BoundStream2(object):
rsize = size rsize = size
if self._chunk_size and self._chunk_size < rsize: if self._chunk_size and self._chunk_size < rsize:
rsize = self._chunk_size rsize = self._chunk_size
data = self._stream.read(rsize) data = self._stream.read(rsize)
self._rem_length -= len(data) self._rem_length -= len(data)
@ -136,25 +136,25 @@ class BoundStream2(object):
""" """
if whence == os.SEEK_SET: if whence == os.SEEK_SET:
if pos < 0 or pos > self._length: if pos < 0 or pos > self._length:
raise IOError(errno.EINVAL,"Cannot seek!") raise IOError(errno.EINVAL,"Cannot seek.")
self._stream.seek(pos - self._offset) self._stream.seek(pos - self._offset)
self._rem_length = self._length - pos self._rem_length = self._length - pos
elif whence == os.SEEK_CUR: elif whence == os.SEEK_CUR:
if pos > 0: if pos > 0:
if pos > self._rem_length: if pos > self._rem_length:
raise IOError(errno.EINVAL,"Cannot seek past end!") raise IOError(errno.EINVAL,"Cannot seek past end.")
elif pos < 0: elif pos < 0:
oldpos = self.tell() oldpos = self.tell()
if oldpos + pos < 0: if oldpos + pos < 0:
raise IOError(errno.EINVAL,"Cannot seek before start!") raise IOError(errno.EINVAL,"Cannot seek before start.")
self._stream.seek(pos, os.SEEK_CUR) self._stream.seek(pos, os.SEEK_CUR)
self._rem_length -= pos self._rem_length -= pos
elif whence == os.SEEK_END: elif whence == os.SEEK_END:
if pos > 0: if pos > 0:
raise IOError(errno.EINVAL,"Cannot seek past end!") raise IOError(errno.EINVAL,"Cannot seek past end.")
else: else:
if self._length + pos < 0: if self._length + pos < 0:
raise IOError(errno.EINVAL,"Cannot seek before start!") raise IOError(errno.EINVAL,"Cannot seek before start.")
newpos = self._offset + self._length + pos newpos = self._offset + self._length + pos
self._stream.seek(newpos, os.SEEK_SET) self._stream.seek(newpos, os.SEEK_SET)
self._rem_length = 0 - pos self._rem_length = 0 - pos
@ -400,7 +400,7 @@ class openerp_dav_handler(dav_interface):
domain = None domain = None
if filters: if filters:
domain = node.get_domain(cr, filters) domain = node.get_domain(cr, filters)
if hasattr(filters, 'getElementsByTagNameNS'): if hasattr(filters, 'getElementsByTagNameNS'):
hrefs = filters.getElementsByTagNameNS('DAV:', 'href') hrefs = filters.getElementsByTagNameNS('DAV:', 'href')
if hrefs: if hrefs:
@ -434,7 +434,7 @@ class openerp_dav_handler(dav_interface):
except DAV_Error: except DAV_Error:
raise raise
except Exception, e: except Exception, e:
self.parent.log_error("Cannot get_children: "+ str(e)) self.parent.log_error("Cannot get_children: "+str(e)+".")
raise raise
finally: finally:
if cr: cr.close() if cr: cr.close()
@ -488,7 +488,7 @@ class openerp_dav_handler(dav_interface):
if not node: if not node:
raise DAV_NotFound2(uri2) raise DAV_NotFound2(uri2)
# TODO: if node is a collection, for some specific set of # TODO: if node is a collection, for some specific set of
# clients ( web browsers; available in node context), # clients ( web browsers; available in node context),
# we may return a pseydo-html page with the directory listing. # we may return a pseydo-html page with the directory listing.
try: try:
res = node.open_data(cr,'r') res = node.open_data(cr,'r')
@ -508,7 +508,7 @@ class openerp_dav_handler(dav_interface):
else: else:
length = res.size() - start length = res.size() - start
res = BoundStream2(res, offset=start, length=length) res = BoundStream2(res, offset=start, length=length)
except TypeError,e: except TypeError,e:
# for the collections that return this error, the DAV standard # for the collections that return this error, the DAV standard
# says we'd better just return 200 OK with empty data # says we'd better just return 200 OK with empty data
@ -564,10 +564,10 @@ class openerp_dav_handler(dav_interface):
@memoize(CACHE_SIZE) @memoize(CACHE_SIZE)
def _get_dav_getcontentlength(self, uri): def _get_dav_getcontentlength(self, uri):
""" return the content length of an object """ """ return the content length of an object """
self.parent.log_message('get length: %s' % uri) self.parent.log_message('get length: %s' % uri)
result = 0 result = 0
cr, uid, pool, dbname, uri2 = self.get_cr(uri) cr, uid, pool, dbname, uri2 = self.get_cr(uri)
if not dbname: if not dbname:
if cr: cr.close() if cr: cr.close()
return str(result) return str(result)
@ -602,7 +602,7 @@ class openerp_dav_handler(dav_interface):
cr, uid, pool, dbname, uri2 = self.get_cr(uri) cr, uid, pool, dbname, uri2 = self.get_cr(uri)
if not dbname: if not dbname:
return time.time() return time.time()
try: try:
node = self.uri2object(cr, uid, pool, uri2) node = self.uri2object(cr, uid, pool, uri2)
if not node: if not node:
raise DAV_NotFound2(uri2) raise DAV_NotFound2(uri2)
@ -623,11 +623,11 @@ class openerp_dav_handler(dav_interface):
@memoize(CACHE_SIZE) @memoize(CACHE_SIZE)
def get_creationdate(self, uri): def get_creationdate(self, uri):
""" return the last modified date of the object """ """ return the last modified date of the object """
cr, uid, pool, dbname, uri2 = self.get_cr(uri) cr, uid, pool, dbname, uri2 = self.get_cr(uri)
if not dbname: if not dbname:
raise DAV_Error, 409 raise DAV_Error, 409
try: try:
node = self.uri2object(cr, uid, pool, uri2) node = self.uri2object(cr, uid, pool, uri2)
if not node: if not node:
raise DAV_NotFound2(uri2) raise DAV_NotFound2(uri2)
@ -643,7 +643,7 @@ class openerp_dav_handler(dav_interface):
if not dbname: if not dbname:
if cr: cr.close() if cr: cr.close()
return 'httpd/unix-directory' return 'httpd/unix-directory'
try: try:
node = self.uri2object(cr, uid, pool, uri2) node = self.uri2object(cr, uid, pool, uri2)
if not node: if not node:
raise DAV_NotFound2(uri2) raise DAV_NotFound2(uri2)
@ -651,8 +651,8 @@ class openerp_dav_handler(dav_interface):
return result return result
#raise DAV_NotFound, 'Could not find %s' % path #raise DAV_NotFound, 'Could not find %s' % path
finally: finally:
if cr: cr.close() if cr: cr.close()
def mkcol(self,uri): def mkcol(self,uri):
""" create a new collection """ create a new collection
see par. 9.3 of rfc4918 see par. 9.3 of rfc4918
@ -690,9 +690,9 @@ class openerp_dav_handler(dav_interface):
node = self.uri2object(cr, uid, pool, uri2[:]) node = self.uri2object(cr, uid, pool, uri2[:])
except Exception: except Exception:
node = False node = False
objname = misc.ustr(uri2[-1]) objname = misc.ustr(uri2[-1])
ret = None ret = None
if not node: if not node:
dir_node = self.uri2object(cr, uid, pool, uri2[:-1]) dir_node = self.uri2object(cr, uid, pool, uri2[:-1])
@ -706,14 +706,14 @@ class openerp_dav_handler(dav_interface):
cr.commit() cr.commit()
cr.close() cr.close()
raise DAV_Error(400, "Failed to create resource.") raise DAV_Error(400, "Failed to create resource.")
uparts=urlparse.urlparse(uri) uparts=urlparse.urlparse(uri)
fileloc = '/'.join(newchild.full_path()) fileloc = '/'.join(newchild.full_path())
if isinstance(fileloc, unicode): if isinstance(fileloc, unicode):
fileloc = fileloc.encode('utf-8') fileloc = fileloc.encode('utf-8')
# the uri we get is a mangled one, where the davpath has been removed # the uri we get is a mangled one, where the davpath has been removed
davpath = self.parent.get_davpath() davpath = self.parent.get_davpath()
surl = '%s://%s' % (uparts[0], uparts[1]) surl = '%s://%s' % (uparts[0], uparts[1])
uloc = urllib.quote(fileloc) uloc = urllib.quote(fileloc)
hurl = False hurl = False
@ -727,19 +727,19 @@ class openerp_dav_handler(dav_interface):
ret = (str(hurl), etag) ret = (str(hurl), etag)
else: else:
self._try_function(node.set_data, (cr, data), "save %s" % objname, cr=cr) self._try_function(node.set_data, (cr, data), "save %s" % objname, cr=cr)
cr.commit() cr.commit()
cr.close() cr.close()
return ret return ret
def rmcol(self,uri): def rmcol(self,uri):
""" delete a collection """ """ delete a collection """
cr, uid, pool, dbname, uri2 = self.get_cr(uri) cr, uid, pool, dbname, uri2 = self.get_cr(uri)
if not dbname: if not dbname:
if cr: cr.close() if cr: cr.close()
raise DAV_Error, 409 raise DAV_Error, 409
node = self.uri2object(cr, uid, pool, uri2) node = self.uri2object(cr, uid, pool, uri2)
self._try_function(node.rmcol, (cr,), "rmcol %s" % uri, cr=cr) self._try_function(node.rmcol, (cr,), "rmcol %s" % uri, cr=cr)
cr.commit() cr.commit()
@ -748,14 +748,14 @@ class openerp_dav_handler(dav_interface):
def rm(self,uri): def rm(self,uri):
cr, uid, pool,dbname, uri2 = self.get_cr(uri) cr, uid, pool,dbname, uri2 = self.get_cr(uri)
if not dbname: if not dbname:
if cr: cr.close() if cr: cr.close()
raise DAV_Error, 409 raise DAV_Error, 409
node = self.uri2object(cr, uid, pool, uri2) node = self.uri2object(cr, uid, pool, uri2)
res = self._try_function(node.rm, (cr,), "rm %s" % uri, cr=cr) res = self._try_function(node.rm, (cr,), "rm %s" % uri, cr=cr)
if not res: if not res:
if cr: cr.close() if cr: cr.close()
raise OSError(1, 'Operation not permited.') raise OSError(1, 'Operation not permitted.')
cr.commit() cr.commit()
cr.close() cr.close()
return 204 return 204
@ -922,8 +922,8 @@ class openerp_dav_handler(dav_interface):
return result return result
def unlock(self, uri, token): def unlock(self, uri, token):
""" Unlock a resource from that token """ Unlock a resource from that token
@return True if unlocked, False if no lock existed, Exceptions @return True if unlocked, False if no lock existed, Exceptions
""" """
cr, uid, pool, dbname, uri2 = self.get_cr(uri) cr, uid, pool, dbname, uri2 = self.get_cr(uri)
@ -959,9 +959,9 @@ class openerp_dav_handler(dav_interface):
node = self.uri2object(cr, uid, pool, uri2[:]) node = self.uri2object(cr, uid, pool, uri2[:])
except Exception: except Exception:
node = False node = False
objname = misc.ustr(uri2[-1]) objname = misc.ustr(uri2[-1])
if not node: if not node:
dir_node = self.uri2object(cr, uid, pool, uri2[:-1]) dir_node = self.uri2object(cr, uid, pool, uri2[:-1])
if not dir_node: if not dir_node:
@ -976,7 +976,7 @@ class openerp_dav_handler(dav_interface):
cr.commit() cr.commit()
cr.close() cr.close()
raise DAV_Error(400, "Failed to create resource.") raise DAV_Error(400, "Failed to create resource.")
created = True created = True
try: try:
@ -993,9 +993,9 @@ class openerp_dav_handler(dav_interface):
cr.commit() cr.commit()
cr.close() cr.close()
raise DAV_Error(423, "Resource already locked.") raise DAV_Error(423, "Resource already locked.")
assert isinstance(lres, list), 'lres: %s' % repr(lres) assert isinstance(lres, list), 'lres: %s' % repr(lres)
try: try:
data = mk_lock_response(self, uri, lres) data = mk_lock_response(self, uri, lres)
cr.commit() cr.commit()

View File

@ -43,7 +43,7 @@ class document_davdir(osv.osv):
elif dbro.type == 'ressource': elif dbro.type == 'ressource':
return nodes.node_res_dir return nodes.node_res_dir
else: else:
raise ValueError("Directory node for %s type", dbro.type) raise ValueError("Directory node for %s type.", dbro.type)
def _prepare_context(self, cr, uid, nctx, context=None): def _prepare_context(self, cr, uid, nctx, context=None):
nctx.node_file_class = nodes.node_file nctx.node_file_class = nodes.node_file
@ -67,18 +67,18 @@ document_davdir()
class dav_dir_property(osv.osv): class dav_dir_property(osv.osv):
""" Arbitrary WebDAV properties, attached to document.directories. """ Arbitrary WebDAV properties, attached to document.directories.
Some DAV properties have to be settable at directories, depending Some DAV properties have to be settable at directories, depending
on the database directory structure. on the database directory structure.
Example would be the principal-URL. Example would be the principal-URL.
There _can_ be properties without a directory, which means that they There _can_ be properties without a directory, which means that they
globally apply to all the directories (aka. collections) of the globally apply to all the directories (aka. collections) of the
present database. present database.
""" """
_name = 'document.webdav.dir.property' _name = 'document.webdav.dir.property'
_columns = { _columns = {
'create_date': fields.datetime('Date Created', readonly=True), 'create_date': fields.datetime('Date Created', readonly=True),
'create_uid': fields.many2one('res.users', 'Creator', readonly=True), 'create_uid': fields.many2one('res.users', 'Creator', readonly=True),
@ -90,25 +90,25 @@ class dav_dir_property(osv.osv):
'value': fields.text('Value'), 'value': fields.text('Value'),
'do_subst': fields.boolean('Substitute', required=True), 'do_subst': fields.boolean('Substitute', required=True),
} }
_defaults = { _defaults = {
'do_subst': False, 'do_subst': False,
} }
dav_dir_property() dav_dir_property()
class dav_file_property(osv.osv): class dav_file_property(osv.osv):
""" Arbitrary WebDAV properties, attached to ir.attachments. """ Arbitrary WebDAV properties, attached to ir.attachments.
A special case is the locks that can be applied on file nodes. A special case is the locks that can be applied on file nodes.
There _can_ be properties without a file (RFC?), which means that they There _can_ be properties without a file (RFC?), which means that they
globally apply to all the attachments of the present database. globally apply to all the attachments of the present database.
TODO access permissions, per property. TODO access permissions, per property.
""" """
_name = 'document.webdav.file.property' _name = 'document.webdav.file.property'
_columns = { _columns = {
'create_date': fields.datetime('Date Created', readonly=True), 'create_date': fields.datetime('Date Created', readonly=True),
'create_uid': fields.many2one('res.users', 'Creator', readonly=True), 'create_uid': fields.many2one('res.users', 'Creator', readonly=True),
@ -120,11 +120,11 @@ class dav_file_property(osv.osv):
'value': fields.text('Value'), 'value': fields.text('Value'),
'do_subst': fields.boolean('Substitute', required=True), 'do_subst': fields.boolean('Substitute', required=True),
} }
_defaults = { _defaults = {
'do_subst': False, 'do_subst': False,
} }
dav_file_property() dav_file_property()
#eof #eof

View File

@ -69,9 +69,9 @@ class PersistentTransport(Transport):
host, extra_headers, x509 = Transport.get_host_info(self,host) host, extra_headers, x509 = Transport.get_host_info(self,host)
if extra_headers == None: if extra_headers == None:
extra_headers = [] extra_headers = []
extra_headers.append( ( 'Connection', 'keep-alive' )) extra_headers.append( ( 'Connection', 'keep-alive' ))
return host, extra_headers, x509 return host, extra_headers, x509
def _parse_response(self, file, sock, response): def _parse_response(self, file, sock, response):
@ -122,9 +122,9 @@ class PersistentTransport(Transport):
resp = h._conn.getresponse() resp = h._conn.getresponse()
# TODO: except BadStatusLine, e: # TODO: except BadStatusLine, e:
errcode, errmsg, headers = resp.status, resp.reason, resp.msg errcode, errmsg, headers = resp.status, resp.reason, resp.msg
if errcode != 200: if errcode != 200:
raise ProtocolError( raise ProtocolError(
@ -145,7 +145,7 @@ class PersistentTransport(Transport):
class CompressedTransport(PersistentTransport): class CompressedTransport(PersistentTransport):
def send_content(self, connection, request_body): def send_content(self, connection, request_body):
connection.putheader("Content-Type", "text/xml") connection.putheader("Content-Type", "text/xml")
if len(request_body) > 512 or True: if len(request_body) > 512 or True:
buffer = StringIO.StringIO() buffer = StringIO.StringIO()
output = gzip.GzipFile(mode='wb', fileobj=buffer) output = gzip.GzipFile(mode='wb', fileobj=buffer)
@ -176,7 +176,7 @@ class SafePersistentTransport(PersistentTransport):
class AuthClient(object): class AuthClient(object):
def getAuth(self, atype, realm): def getAuth(self, atype, realm):
raise NotImplementedError("Cannot authenticate for %s" % atype) raise NotImplementedError("Cannot authenticate for %s" % atype)
def resolveFailedRealm(self, realm): def resolveFailedRealm(self, realm):
""" Called when, using a known auth type, the realm is not in cache """ Called when, using a known auth type, the realm is not in cache
""" """
@ -195,7 +195,7 @@ class BasicAuthClient(AuthClient):
_logger.debug("missing key: \"%s\"" % realm) _logger.debug("missing key: \"%s\"" % realm)
self.resolveFailedRealm(realm) self.resolveFailedRealm(realm)
return 'Basic '+ self._realm_dict[realm] return 'Basic '+ self._realm_dict[realm]
def addLogin(self, realm, username, passwd): def addLogin(self, realm, username, passwd):
""" Add some known username/password for a specific login. """ Add some known username/password for a specific login.
This function should be called once, for each realm This function should be called once, for each realm
@ -210,7 +210,7 @@ class BasicAuthClient(AuthClient):
class addAuthTransport: class addAuthTransport:
""" Intermediate class that authentication algorithm to http transport """ Intermediate class that authentication algorithm to http transport
""" """
def setAuthClient(self, authobj): def setAuthClient(self, authobj):
""" Set the authentication client object. """ Set the authentication client object.
This method must be called before any request is issued, that This method must be called before any request is issued, that
@ -218,7 +218,7 @@ class addAuthTransport:
""" """
assert isinstance(authobj, AuthClient) assert isinstance(authobj, AuthClient)
self._auth_client = authobj self._auth_client = authobj
def request(self, host, handler, request_body, verbose=0): def request(self, host, handler, request_body, verbose=0):
# issue XML-RPC request # issue XML-RPC request
@ -226,7 +226,7 @@ class addAuthTransport:
h = self.make_connection(host) h = self.make_connection(host)
if verbose: if verbose:
h.set_debuglevel(1) h.set_debuglevel(1)
tries = 0 tries = 0
atype = None atype = None
realm = None realm = None
@ -246,7 +246,7 @@ class addAuthTransport:
resp = h._conn.getresponse() resp = h._conn.getresponse()
# except BadStatusLine, e: # except BadStatusLine, e:
tries += 1 tries += 1
if resp.status == 401: if resp.status == 401:
if 'www-authenticate' in resp.msg: if 'www-authenticate' in resp.msg:
(atype,realm) = resp.msg.getheader('www-authenticate').split(' ',1) (atype,realm) = resp.msg.getheader('www-authenticate').split(' ',1)
@ -258,7 +258,7 @@ class addAuthTransport:
_logger.debug("Resp: %r %r", resp.version,resp.isclosed(), resp.will_close) _logger.debug("Resp: %r %r", resp.version,resp.isclosed(), resp.will_close)
_logger.debug("Want to do auth %s for realm %s", atype, realm) _logger.debug("Want to do auth %s for realm %s", atype, realm)
if atype != 'Basic': if atype != 'Basic':
raise ProtocolError(host+handler, 403, raise ProtocolError(host+handler, 403,
"Unknown authentication method: %s" % atype, resp.msg) "Unknown authentication method: %s" % atype, resp.msg)
continue # with the outer while loop continue # with the outer while loop
else: else:
@ -268,14 +268,14 @@ class addAuthTransport:
if resp.status != 200: if resp.status != 200:
raise ProtocolError( host + handler, raise ProtocolError( host + handler,
resp.status, resp.reason, resp.msg ) resp.status, resp.reason, resp.msg )
self.verbose = verbose self.verbose = verbose
try: try:
sock = h._conn.sock sock = h._conn.sock
except AttributeError: except AttributeError:
sock = None sock = None
return self._parse_response(h.getfile(), sock, resp) return self._parse_response(h.getfile(), sock, resp)
raise ProtocolError(host+handler, 403, "No authentication.",'') raise ProtocolError(host+handler, 403, "No authentication.",'')
@ -302,7 +302,7 @@ class HTTPSConnection(httplib.HTTPSConnection):
self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file, self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file,
ca_certs=ca_certs, ca_certs=ca_certs,
cert_reqs=cert_reqs) cert_reqs=cert_reqs)
def getpeercert(self): def getpeercert(self):
import ssl import ssl
@ -316,14 +316,14 @@ class HTTPSConnection(httplib.HTTPSConnection):
if cert[0-lf] != '\n': if cert[0-lf] != '\n':
cert = cert[:0-lf]+'\n'+cert[0-lf:] cert = cert[:0-lf]+'\n'+cert[0-lf:]
_logger.debug("len-footer: %s cert: %r", lf, cert[0-lf]) _logger.debug("len-footer: %s cert: %r", lf, cert[0-lf])
return cert return cert
class DAVClient(object): class DAVClient(object):
"""An instance of a WebDAV client, connected to the OpenERP server """An instance of a WebDAV client, connected to the OpenERP server
""" """
def __init__(self, user=None, passwd=None, dbg=0, use_ssl=False, useragent=False, timeout=None): def __init__(self, user=None, passwd=None, dbg=0, use_ssl=False, useragent=False, timeout=None):
if use_ssl: if use_ssl:
self.host = config.get_misc('httpsd', 'interface', False) self.host = config.get_misc('httpsd', 'interface', False)
@ -353,10 +353,10 @@ class DAVClient(object):
def get_creds(self, obj, cr, uid): def get_creds(self, obj, cr, uid):
"""Read back the user credentials from cr, uid """Read back the user credentials from cr, uid
@param obj is any orm object, in order to use its pool @param obj is any orm object, in order to use its pool
@param uid is the numeric id, which we will try to reverse resolve @param uid is the numeric id, which we will try to reverse resolve
note: this is a hackish way to get the credentials. It is expected note: this is a hackish way to get the credentials. It is expected
to break if "base_crypt" is used. to break if "base_crypt" is used.
""" """
@ -366,7 +366,7 @@ class DAVClient(object):
self.user = res[0]['login'] self.user = res[0]['login']
self.passwd = res[0]['password'] self.passwd = res[0]['password']
if self.passwd.startswith('$1$'): if self.passwd.startswith('$1$'):
# md5 by base crypt. We cannot decode, wild guess # md5 by base crypt. We cannot decode, wild guess
# that passwd = login # that passwd = login
self.passwd = self.user self.passwd = self.user
return True return True
@ -415,7 +415,7 @@ class DAVClient(object):
auths = base64.encodestring(self.user + ':' + self.passwd) auths = base64.encodestring(self.user + ':' + self.passwd)
if auths[-1] == "\n": if auths[-1] == "\n":
auths = auths[:-1] auths = auths[:-1]
hdrs['Authorization']= 'Basic '+ auths hdrs['Authorization']= 'Basic '+ auths
#sleep(1) #sleep(1)
conn.request(method, path, body, hdrs ) conn.request(method, path, body, hdrs )
r1 = conn.getresponse() r1 = conn.getresponse()
@ -437,7 +437,7 @@ class DAVClient(object):
doc = xml.dom.minidom.parseString(data1) doc = xml.dom.minidom.parseString(data1)
_logger.debug("XML Body:\n %s", doc.toprettyxml(indent="\t")) _logger.debug("XML Body:\n %s", doc.toprettyxml(indent="\t"))
except Exception: except Exception:
_logger.warning("cannot print xml", exc_info=True) _logger.warning("Cannot print XML.", exc_info=True)
pass pass
conn.close() conn.close()
return r1.status, r1.msg, data1 return r1.status, r1.msg, data1
@ -475,10 +475,10 @@ class DAVClient(object):
assert s == 200, "Status: %r" % s assert s == 200, "Status: %r" % s
assert 'OPTIONS' in m.getheader('Allow') assert 'OPTIONS' in m.getheader('Allow')
_logger.debug('Options: %r', m.getheader('Allow')) _logger.debug('Options: %r', m.getheader('Allow'))
if expect: if expect:
self._assert_headers(expect, m) self._assert_headers(expect, m)
def _parse_prop_response(self, data): def _parse_prop_response(self, data):
""" Parse a propfind/propname response """ Parse a propfind/propname response
""" """
@ -488,7 +488,7 @@ class DAVClient(object):
if node.nodeType == node.TEXT_NODE: if node.nodeType == node.TEXT_NODE:
rc.append(node.data) rc.append(node.data)
return ''.join(rc) return ''.join(rc)
def getElements(node, namespaces=None, strict=False): def getElements(node, namespaces=None, strict=False):
for cnod in node.childNodes: for cnod in node.childNodes:
if cnod.nodeType != node.ELEMENT_NODE: if cnod.nodeType != node.ELEMENT_NODE:
@ -534,10 +534,10 @@ class DAVClient(object):
rstatus = int(sta) rstatus = int(sta)
else: else:
_logger.debug("What is <%s> inside a <propstat>?", pno.tagName) _logger.debug("What is <%s> inside a <propstat>?", pno.tagName)
else: else:
_logger.debug("Unknown node: %s", cno.tagName) _logger.debug("Unknown node: %s", cno.tagName)
res.setdefault(href,[]).append((status, res_nss)) res.setdefault(href,[]).append((status, res_nss))
return res return res
@ -558,7 +558,7 @@ class DAVClient(object):
propstr += '<ns%d:%s xmlns:ns%d="%s" />' %(nscount, p, nscount, ns) propstr += '<ns%d:%s xmlns:ns%d="%s" />' %(nscount, p, nscount, ns)
nscount += 1 nscount += 1
propstr += '</prop>' propstr += '</prop>'
body="""<?xml version="1.0" encoding="utf-8"?> body="""<?xml version="1.0" encoding="utf-8"?>
<propfind xmlns="DAV:">%s</propfind>""" % propstr <propfind xmlns="DAV:">%s</propfind>""" % propstr
hdrs = { 'Content-Type': 'text/xml; charset=utf-8', hdrs = { 'Content-Type': 'text/xml; charset=utf-8',
@ -566,7 +566,7 @@ class DAVClient(object):
'Depth': depth, 'Depth': depth,
} }
s, m, d = self._http_request(self.davpath + path, method='PROPFIND', s, m, d = self._http_request(self.davpath + path, method='PROPFIND',
hdrs=hdrs, body=body) hdrs=hdrs, body=body)
assert s == 207, "Bad status: %s" % s assert s == 207, "Bad status: %s" % s
ctype = m.getheader('Content-Type').split(';',1)[0] ctype = m.getheader('Content-Type').split(';',1)[0]
@ -578,7 +578,7 @@ class DAVClient(object):
else: else:
assert len(res) >= 1 assert len(res) >= 1
return res return res
def gd_propname(self, path, depth=0): def gd_propname(self, path, depth=0):
body="""<?xml version="1.0" encoding="utf-8"?> body="""<?xml version="1.0" encoding="utf-8"?>
@ -587,7 +587,7 @@ class DAVClient(object):
'Accept': 'text/xml', 'Accept': 'text/xml',
'Depth': depth 'Depth': depth
} }
s, m, d = self._http_request(self.davpath + path, method='PROPFIND', s, m, d = self._http_request(self.davpath + path, method='PROPFIND',
hdrs=hdrs, body=body) hdrs=hdrs, body=body)
assert s == 207, "Bad status: %s" % s assert s == 207, "Bad status: %s" % s
ctype = m.getheader('Content-Type').split(';',1)[0] ctype = m.getheader('Content-Type').split(';',1)[0]
@ -605,7 +605,7 @@ class DAVClient(object):
def gd_lsl(self, path): def gd_lsl(self, path):
""" Return a list of 'ls -l' kind of data for a folder """ Return a list of 'ls -l' kind of data for a folder
This is based on propfind. This is based on propfind.
""" """
@ -616,7 +616,7 @@ class DAVClient(object):
propnames = [ l[1] for l in lspairs] propnames = [ l[1] for l in lspairs]
propres = self.gd_propfind(path, props=propnames, depth=1) propres = self.gd_propfind(path, props=propnames, depth=1)
res = [] res = []
for href, pr in propres.items(): for href, pr in propres.items():
lsline = {} lsline = {}
@ -638,9 +638,9 @@ class DAVClient(object):
lsline[lsp[0]] = lsp[2] lsline[lsp[0]] = lsp[2]
else: else:
_logger.debug("Strange status: %s", st) _logger.debug("Strange status: %s", st)
res.append(lsline) res.append(lsline)
return res return res
def gd_get(self, path, crange=None, mime=None, compare=None): def gd_get(self, path, crange=None, mime=None, compare=None):
@ -683,7 +683,7 @@ class DAVClient(object):
return ctype, rrange, d return ctype, rrange, d
def gd_put(self, path, body=None, srcpath=None, mime=None, noclobber=False, ): def gd_put(self, path, body=None, srcpath=None, mime=None, noclobber=False, ):
""" HTTP PUT """ HTTP PUT
@param noclobber will prevent overwritting a resource (If-None-Match) @param noclobber will prevent overwritting a resource (If-None-Match)
@param mime will set the content-type @param mime will set the content-type
""" """
@ -698,7 +698,7 @@ class DAVClient(object):
hdrs['Content-Type'] = mime hdrs['Content-Type'] = mime
if noclobber: if noclobber:
hdrs['If-None-Match'] = '*' hdrs['If-None-Match'] = '*'
s, m, d = self._http_request(self.davpath + path, method='PUT', s, m, d = self._http_request(self.davpath + path, method='PUT',
hdrs=hdrs, body=body) hdrs=hdrs, body=body)
assert s == (201), "Bad status: %s" % s assert s == (201), "Bad status: %s" % s
etag = m.getheader('ETag') etag = m.getheader('ETag')

View File

@ -73,7 +73,7 @@ def OpenDAVConfig(**kw):
class DAVHandler(HttpOptions, FixSendError, DAVRequestHandler): class DAVHandler(HttpOptions, FixSendError, DAVRequestHandler):
verbose = False verbose = False
protocol_version = 'HTTP/1.1' protocol_version = 'HTTP/1.1'
_HTTP_OPTIONS= { 'DAV' : ['1', '2'], _HTTP_OPTIONS= { 'DAV' : ['1', '2'],
'Allow' : [ 'GET', 'HEAD', 'COPY', 'MOVE', 'POST', 'PUT', 'Allow' : [ 'GET', 'HEAD', 'COPY', 'MOVE', 'POST', 'PUT',
@ -304,7 +304,7 @@ class DAVHandler(HttpOptions, FixSendError, DAVRequestHandler):
res = dc.unlock(uri, token) res = dc.unlock(uri, token)
except DAV_Error, (ec, dd): except DAV_Error, (ec, dd):
return self.send_status(ec, dd) return self.send_status(ec, dd)
if res == True: if res == True:
self.send_body(None, '204', 'OK', 'Resource unlocked.') self.send_body(None, '204', 'OK', 'Resource unlocked.')
else: else:
@ -338,7 +338,7 @@ class DAVHandler(HttpOptions, FixSendError, DAVRequestHandler):
if isinstance(ldif, list): if isinstance(ldif, list):
if len(ldif) !=1 or (not isinstance(ldif[0], TagList)) \ if len(ldif) !=1 or (not isinstance(ldif[0], TagList)) \
or len(ldif[0].list) != 1: or len(ldif[0].list) != 1:
raise DAV_Error(400, "Cannot accept multiple tokens!") raise DAV_Error(400, "Cannot accept multiple tokens.")
ldif = ldif[0].list[0] ldif = ldif[0].list[0]
if ldif[0] == '<' and ldif[-1] == '>': if ldif[0] == '<' and ldif[-1] == '>':
ldif = ldif[1:-1] ldif = ldif[1:-1]
@ -352,7 +352,7 @@ class DAVHandler(HttpOptions, FixSendError, DAVRequestHandler):
lock_data.update(self._lock_unlock_parse(body)) lock_data.update(self._lock_unlock_parse(body))
if lock_data['refresh'] and not lock_data.get('token', False): if lock_data['refresh'] and not lock_data.get('token', False):
raise DAV_Error(400, 'Lock refresh must specify token!') raise DAV_Error(400, 'Lock refresh must specify token.')
lock_data['depth'] = depth lock_data['depth'] = depth
@ -487,7 +487,7 @@ class dummy_dav_interface(object):
class DAVStaticHandler(http_server.StaticHTTPHandler): class DAVStaticHandler(http_server.StaticHTTPHandler):
""" A variant of the Static handler, which will serve dummy DAV requests """ A variant of the Static handler, which will serve dummy DAV requests
""" """
verbose = False verbose = False
protocol_version = 'HTTP/1.1' protocol_version = 'HTTP/1.1'
_HTTP_OPTIONS= { 'DAV' : ['1', '2'], _HTTP_OPTIONS= { 'DAV' : ['1', '2'],
@ -503,13 +503,13 @@ class DAVStaticHandler(http_server.StaticHTTPHandler):
self.end_headers() self.end_headers()
if hasattr(self, '_flush'): if hasattr(self, '_flush'):
self._flush() self._flush()
if self.command != 'HEAD': if self.command != 'HEAD':
self.wfile.write(content) self.wfile.write(content)
def do_PROPFIND(self): def do_PROPFIND(self):
"""Answer to PROPFIND with generic data. """Answer to PROPFIND with generic data.
A rough copy of python-webdav's do_PROPFIND, but hacked to work A rough copy of python-webdav's do_PROPFIND, but hacked to work
statically. statically.
""" """
@ -575,7 +575,7 @@ try:
handler._config = conf handler._config = conf
reg_http_service(directory, DAVHandler, DAVAuthProvider) reg_http_service(directory, DAVHandler, DAVAuthProvider)
_logger.info("WebDAV service registered at path: %s/ "% directory) _logger.info("WebDAV service registered at path: %s/ "% directory)
if not (config.get_misc('webdav', 'no_root_hack', False)): if not (config.get_misc('webdav', 'no_root_hack', False)):
# Now, replace the static http handler with the dav-enabled one. # Now, replace the static http handler with the dav-enabled one.
# If a static-http service has been specified for our server, then # If a static-http service has been specified for our server, then
@ -592,7 +592,7 @@ try:
# an _ugly_ hack: we put that dir back in tools.config.misc, so that # an _ugly_ hack: we put that dir back in tools.config.misc, so that
# the StaticHttpHandler can find its dir_path. # the StaticHttpHandler can find its dir_path.
config.misc.setdefault('static-http',{})['dir_path'] = dir_path config.misc.setdefault('static-http',{})['dir_path'] = dir_path
reg_http_service('/', DAVStaticHandler) reg_http_service('/', DAVStaticHandler)
except Exception, e: except Exception, e:
@ -617,10 +617,10 @@ def init_well_known():
init_well_known() init_well_known()
class PrincipalsRedirect(RedirectHTTPHandler): class PrincipalsRedirect(RedirectHTTPHandler):
redirect_paths = {} redirect_paths = {}
def _find_redirect(self): def _find_redirect(self):
for b, r in self.redirect_paths.items(): for b, r in self.redirect_paths.items():
if self.path.startswith(b): if self.path.startswith(b):
@ -628,7 +628,7 @@ class PrincipalsRedirect(RedirectHTTPHandler):
return False return False
def init_principals_redirect(): def init_principals_redirect():
""" Some devices like the iPhone will look under /principals/users/xxx for """ Some devices like the iPhone will look under /principals/users/xxx for
the user's properties. In OpenERP we _cannot_ have a stray /principals/... the user's properties. In OpenERP we _cannot_ have a stray /principals/...
working path, since we have a database path and the /webdav/ component. So, working path, since we have a database path and the /webdav/ component. So,
the best solution is to redirect the url with 301. Luckily, it does work in the best solution is to redirect the url with 301. Luckily, it does work in

View File

@ -149,7 +149,7 @@ class edi_document(osv.osv):
module = edi_document.get('__import_module') or edi_document.get('__module') module = edi_document.get('__import_module') or edi_document.get('__module')
assert module, 'a `__module` or `__import_module` attribute is required in each EDI document.' assert module, 'a `__module` or `__import_module` attribute is required in each EDI document.'
if module != 'base' and not ir_module.search(cr, uid, [('name','=',module),('state','=','installed')]): if module != 'base' and not ir_module.search(cr, uid, [('name','=',module),('state','=','installed')]):
raise osv.except_osv(_('Missing Application !'), raise osv.except_osv(_('Missing application.'),
_("The document you are trying to import requires the OpenERP `%s` application. " _("The document you are trying to import requires the OpenERP `%s` application. "
"You can install it by connecting as the administrator and opening the configuration assistant.")%(module,)) "You can install it by connecting as the administrator and opening the configuration assistant.")%(module,))
model = edi_document.get('__import_model') or edi_document.get('__model') model = edi_document.get('__import_model') or edi_document.get('__model')
@ -276,7 +276,7 @@ class EDIMixin(object):
# this could happen for data records defined in a module that depends # this could happen for data records defined in a module that depends
# on the module that owns the model, e.g. purchase defines # on the module that owns the model, e.g. purchase defines
# product.pricelist records. # product.pricelist records.
_logger.debug('Mismatching module! expected %s, got %s, for %s.', _logger.debug('Mismatching module: expected %s, got %s, for %s.',
module, record._original_module, record) module, record._original_module, record)
# ID is unique cross-db thanks to db_uuid # ID is unique cross-db thanks to db_uuid
module = "%s:%s" % (module, db_uuid) module = "%s:%s" % (module, db_uuid)
@ -515,7 +515,7 @@ class EDIMixin(object):
file_name = record.name_get()[0][1] file_name = record.name_get()[0][1]
file_name = re.sub(r'[^a-zA-Z0-9_-]', '_', file_name) file_name = re.sub(r'[^a-zA-Z0-9_-]', '_', file_name)
file_name += ".pdf" file_name += ".pdf"
ir_attachment = self.pool.get('ir.attachment').create(cr, uid, ir_attachment = self.pool.get('ir.attachment').create(cr, uid,
{'name': file_name, {'name': file_name,
'datas': result, 'datas': result,
'datas_fname': file_name, 'datas_fname': file_name,
@ -593,22 +593,22 @@ class EDIMixin(object):
target = self._edi_get_object_by_external_id(cr, uid, external_id, model, context=context) target = self._edi_get_object_by_external_id(cr, uid, external_id, model, context=context)
need_new_ext_id = False need_new_ext_id = False
if not target: if not target:
_logger.debug("%s: Importing EDI relationship [%r,%r] - ID is not found, trying name_get.", _logger.debug("%s: Importing EDI relationship [%r,%r] - ID not found, trying name_get.",
self._name, external_id, value) self._name, external_id, value)
target = self._edi_get_object_by_name(cr, uid, value, model, context=context) target = self._edi_get_object_by_name(cr, uid, value, model, context=context)
need_new_ext_id = True need_new_ext_id = True
if not target: if not target:
_logger.debug("%s: Importing EDI relationship [%r,%r] - name is not found, creating it!", _logger.debug("%s: Importing EDI relationship [%r,%r] - name not found, creating it.",
self._name, external_id, value) self._name, external_id, value)
# also need_new_ext_id here, but already been set above # also need_new_ext_id here, but already been set above
model = self.pool.get(model) model = self.pool.get(model)
# should use name_create() but e.g. res.partner won't allow it at the moment # should use name_create() but e.g. res.partner won't allow it at the moment
res_id = model.create(cr, uid, {model._rec_name: value}, context=context) res_id = model.create(cr, uid, {model._rec_name: value}, context=context)
target = model.browse(cr, uid, res_id, context=context) target = model.browse(cr, uid, res_id, context=context)
if need_new_ext_id: if need_new_ext_id:
ext_id_members = split_external_id(external_id) ext_id_members = split_external_id(external_id)
# module name is never used bare when creating ir.model.data entries, in order # module name is never used bare when creating ir.model.data entries, in order
# to avoid being taken as part of the module's data, and cleanup up at next update # to avoid being taken as part of the module's data, and cleanup up at next update
module = "%s:%s" % (ext_id_members['module'], ext_id_members['db_uuid']) module = "%s:%s" % (ext_id_members['module'], ext_id_members['db_uuid'])
# create a new ir.model.data entry for this value # create a new ir.model.data entry for this value
self._edi_external_id(cr, uid, target, existing_id=ext_id_members['id'], existing_module=module, context=context) self._edi_external_id(cr, uid, target, existing_id=ext_id_members['id'], existing_module=module, context=context)

View File

@ -74,7 +74,7 @@ class google_login(osv.osv_memory):
} }
self.pool.get('res.users').write(cr, uid, uid, res, context=context) self.pool.get('res.users').write(cr, uid, uid, res, context=context)
else: else:
raise osv.except_osv(_('Error'), _("Authentication failed. Check the user and password !")) raise osv.except_osv(_('Error'), _("Authentication failed. Check the user and password."))
return self._get_next_action(cr, uid, context=context) return self._get_next_action(cr, uid, context=context)

View File

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
############################################################################## ##############################################################################
# #
# OpenERP, Open Source Management Solution # OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
# #
@ -15,7 +15,7 @@
# GNU Affero General Public License for more details. # GNU Affero General Public License for more details.
# #
# You should have received a copy of the GNU Affero General Public License # You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>. # along with this program. If not, see <http://www.gnu.org/licenses/>.
# #
############################################################################## ##############################################################################
@ -26,7 +26,7 @@ import decimal_precision as dp
class change_production_qty(osv.osv_memory): class change_production_qty(osv.osv_memory):
_name = 'change.production.qty' _name = 'change.production.qty'
_description = 'Change Quantity of Products' _description = 'Change Quantity of Products'
_columns = { _columns = {
'product_qty': fields.float('Product Qty', digits_compute=dp.get_precision('Product Unit of Measure'), required=True), 'product_qty': fields.float('Product Qty', digits_compute=dp.get_precision('Product Unit of Measure'), required=True),
} }
@ -36,17 +36,17 @@ class change_production_qty(osv.osv_memory):
@param self: The object pointer. @param self: The object pointer.
@param cr: A database cursor @param cr: A database cursor
@param uid: ID of the user currently logged in @param uid: ID of the user currently logged in
@param fields: List of fields for which we want default values @param fields: List of fields for which we want default values
@param context: A standard dictionary @param context: A standard dictionary
@return: A dictionary which of fields with values. @return: A dictionary which of fields with values.
""" """
if context is None: if context is None:
context = {} context = {}
res = super(change_production_qty, self).default_get(cr, uid, fields, context=context) res = super(change_production_qty, self).default_get(cr, uid, fields, context=context)
prod_obj = self.pool.get('mrp.production') prod_obj = self.pool.get('mrp.production')
prod = prod_obj.browse(cr, uid, context.get('active_id'), context=context) prod = prod_obj.browse(cr, uid, context.get('active_id'), context=context)
if 'product_qty' in fields: if 'product_qty' in fields:
res.update({'product_qty': prod.product_qty}) res.update({'product_qty': prod.product_qty})
return res return res
def _update_product_to_produce(self, cr, uid, prod, qty, context=None): def _update_product_to_produce(self, cr, uid, prod, qty, context=None):
@ -55,17 +55,17 @@ class change_production_qty(osv.osv_memory):
move_lines_obj.write(cr, uid, [m.id], {'product_qty': qty}) move_lines_obj.write(cr, uid, [m.id], {'product_qty': qty})
def change_prod_qty(self, cr, uid, ids, context=None): def change_prod_qty(self, cr, uid, ids, context=None):
""" """
Changes the Quantity of Product. Changes the Quantity of Product.
@param self: The object pointer. @param self: The object pointer.
@param cr: A database cursor @param cr: A database cursor
@param uid: ID of the user currently logged in @param uid: ID of the user currently logged in
@param ids: List of IDs selected @param ids: List of IDs selected
@param context: A standard dictionary @param context: A standard dictionary
@return: @return:
""" """
record_id = context and context.get('active_id',False) record_id = context and context.get('active_id',False)
assert record_id, _('Active Id is not found') assert record_id, _('Active Id not found')
prod_obj = self.pool.get('mrp.production') prod_obj = self.pool.get('mrp.production')
bom_obj = self.pool.get('mrp.bom') bom_obj = self.pool.get('mrp.bom')
for wiz_qty in self.browse(cr, uid, ids, context=context): for wiz_qty in self.browse(cr, uid, ids, context=context):

View File

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
############################################################################## ##############################################################################
# #
# OpenERP, Open Source Management Solution # OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
# #
@ -15,7 +15,7 @@
# GNU Affero General Public License for more details. # GNU Affero General Public License for more details.
# #
# You should have received a copy of the GNU Affero General Public License # You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>. # along with this program. If not, see <http://www.gnu.org/licenses/>.
# #
############################################################################## ##############################################################################
@ -31,42 +31,42 @@ class repair_cancel(osv.osv_memory):
@param self: The object pointer. @param self: The object pointer.
@param cr: A database cursor @param cr: A database cursor
@param uid: ID of the user currently logged in @param uid: ID of the user currently logged in
@param ids: List of IDs selected @param ids: List of IDs selected
@param context: A standard dictionary @param context: A standard dictionary
@return: @return:
""" """
if context is None: if context is None:
context = {} context = {}
record_id = context and context.get('active_id', False) or False record_id = context and context.get('active_id', False) or False
assert record_id, _('Active ID is not Found') assert record_id, _('Active ID not Found')
repair_order_obj = self.pool.get('mrp.repair') repair_order_obj = self.pool.get('mrp.repair')
repair_line_obj = self.pool.get('mrp.repair.line') repair_line_obj = self.pool.get('mrp.repair.line')
repair_order = repair_order_obj.browse(cr, uid, record_id, context=context) repair_order = repair_order_obj.browse(cr, uid, record_id, context=context)
if repair_order.invoiced or repair_order.invoice_method == 'none': if repair_order.invoiced or repair_order.invoice_method == 'none':
repair_order_obj.action_cancel(cr, uid, [record_id], context=context) repair_order_obj.action_cancel(cr, uid, [record_id], context=context)
else: else:
raise osv.except_osv(_('Warning!'),_('Repair order is not invoiced.')) raise osv.except_osv(_('Warning!'),_('Repair order is not invoiced.'))
return {'type': 'ir.actions.act_window_close'} return {'type': 'ir.actions.act_window_close'}
def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False): def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False):
""" Changes the view dynamically """ Changes the view dynamically
@param self: The object pointer. @param self: The object pointer.
@param cr: A database cursor @param cr: A database cursor
@param uid: ID of the user currently logged in @param uid: ID of the user currently logged in
@param context: A standard dictionary @param context: A standard dictionary
@return: New arch of view. @return: New arch of view.
""" """
if context is None: if context is None:
context = {} context = {}
res = super(repair_cancel, self).fields_view_get(cr, uid, view_id=view_id, view_type=view_type, context=context, toolbar=toolbar,submenu=False) res = super(repair_cancel, self).fields_view_get(cr, uid, view_id=view_id, view_type=view_type, context=context, toolbar=toolbar,submenu=False)
record_id = context and context.get('active_id', False) or False record_id = context and context.get('active_id', False) or False
active_model = context.get('active_model') active_model = context.get('active_model')
if not record_id or (active_model and active_model != 'mrp.repair'): if not record_id or (active_model and active_model != 'mrp.repair'):
return res return res
repair_order = self.pool.get('mrp.repair').browse(cr, uid, record_id, context=context) repair_order = self.pool.get('mrp.repair').browse(cr, uid, record_id, context=context)
if not repair_order.invoiced: if not repair_order.invoiced:
res['arch'] = """ res['arch'] = """
@ -77,7 +77,7 @@ class repair_cancel(osv.osv_memory):
<button string="Cancel" class="oe_link" special="cancel"/> <button string="Cancel" class="oe_link" special="cancel"/>
</header> </header>
<label string="Do you want to continue?"/> <label string="Do you want to continue?"/>
</form> </form>
""" """
return res return res