[IMP]: Improved warning messages/exceptions for document modules

bzr revid: pso@tinyerp.com-20120718071412-04obk41a719wqus6
This commit is contained in:
pso (OpenERP) 2012-07-18 12:44:12 +05:30
commit bbffa1c58c
12 changed files with 117 additions and 117 deletions

View File

@ -93,13 +93,13 @@ class indexer(object):
except NhException:
pass
raise NhException('No appropriate method to index file')
raise NhException('No appropriate method to index file !')
def _doIndexContent(self,content):
raise NhException("Content not handled here")
raise NhException("Content not handled here!")
def _doIndexFile(self,fpath):
raise NhException("Content not handled here")
raise NhException("Content not handled here!")
def __repr__(self):
return "<indexer %s.%s>" %(self.__module__, self.__class__.__name__)
@ -134,7 +134,7 @@ class contentIndex(object):
if f:
_logger.debug('Register content indexer: %r', obj)
if not f:
raise Exception("Your indexer should at least suport a mimetype or extension")
raise Exception("Your indexer should at least support a mimetype or extension.")
def doIndex(self, content, filename=None, content_type=None, realfname = None, debug=False):
fobj = None

View File

@ -61,7 +61,7 @@ class document_file(osv.osv):
return False
if ids is not None:
raise NotImplementedError("Ids is just there by convention! Don't use it yet, please.")
raise NotImplementedError("Ids is just there by convention,please donot use it yet.")
cr.execute("UPDATE ir_attachment " \
"SET parent_id = %s, db_datas = decode(encode(db_datas,'escape'), 'base64') " \

View File

@ -189,7 +189,7 @@ class nodefd_db(StringIO, nodes.node_descriptor):
StringIO.__init__(self, None)
else:
_logger.error("Incorrect mode %s specified", mode)
raise IOError(errno.EINVAL, "Invalid file mode")
raise IOError(errno.EINVAL, "Invalid file mode!")
self.mode = mode
def size(self):
@ -269,7 +269,7 @@ class nodefd_db64(StringIO, nodes.node_descriptor):
StringIO.__init__(self, None)
else:
_logger.error("Incorrect mode %s specified", mode)
raise IOError(errno.EINVAL, "Invalid file mode")
raise IOError(errno.EINVAL, "Invalid file mode!")
self.mode = mode
def size(self):
@ -317,7 +317,7 @@ class nodefd_db64(StringIO, nodes.node_descriptor):
(base64.encodestring(data), len(data), par.file_id))
cr.commit()
except Exception:
_logger.exception('Cannot update db file #%d for close:', par.file_id)
_logger.exception('Cannot update db file #%d for close !', par.file_id)
raise
finally:
cr.close()
@ -401,10 +401,10 @@ class document_storage(osv.osv):
# self._logger.debug('Npath: %s', npath)
for n in npath:
if n == '..':
raise ValueError("Invalid '..' element in path")
raise ValueError("Invalid '..' element in path!")
for ch in ('*', '|', "\\", '/', ':', '"', '<', '>', '?',):
if ch in n:
raise ValueError("Invalid char %s in path %s" %(ch, n))
raise ValueError("Invalid char %s in path %s!" %(ch, n))
dpath = [store_path,]
dpath += npath[:-1]
path = os.path.join(*dpath)
@ -420,7 +420,7 @@ class document_storage(osv.osv):
"""
boo = self.browse(cr, uid, id, context=context)
if not boo.online:
raise IOError(errno.EREMOTE, 'medium offline')
raise IOError(errno.EREMOTE, 'medium offline!')
if fil_obj:
ira = fil_obj
@ -435,10 +435,10 @@ class document_storage(osv.osv):
context = {}
boo = self.browse(cr, uid, id, context=context)
if not boo.online:
raise IOError(errno.EREMOTE, 'medium offline')
raise IOError(errno.EREMOTE, 'medium offline!')
if boo.readonly and mode not in ('r', 'rb'):
raise IOError(errno.EPERM, "Readonly medium")
raise IOError(errno.EPERM, "Readonly medium!")
ira = self.pool.get('ir.attachment').browse(cr, uid, file_node.file_id, context=context)
if boo.type == 'filestore':
@ -448,7 +448,7 @@ class document_storage(osv.osv):
if mode in ('r','r+'):
if ira.file_size:
_logger.warning( "ir.attachment #%d does not have a filename, but is at filestore, fix it!" % ira.id)
raise IOError(errno.ENOENT, 'No file can be located')
raise IOError(errno.ENOENT, 'No file can be located!')
else:
store_fname = self.__get_random_fname(boo.path)
cr.execute('UPDATE ir_attachment SET store_fname = %s WHERE id = %s',
@ -478,10 +478,10 @@ class document_storage(osv.osv):
return nodefd_file(file_node, path=fpath, mode=mode)
elif boo.type == 'virtual':
raise ValueError('Virtual storage does not support static files')
raise ValueError('Virtual storage does not support static file(s).')
else:
raise TypeError("No %s storage" % boo.type)
raise TypeError("No %s storage !" % boo.type)
def __get_data_3(self, cr, uid, boo, ira, context):
if boo.type == 'filestore':
@ -524,10 +524,10 @@ class document_storage(osv.osv):
raise IOError(errno.ENOENT, "File not found: %s" % fpath)
elif boo.type == 'virtual':
raise ValueError('Virtual storage does not support static files')
raise ValueError('Virtual storage does not support static file(s).')
else:
raise TypeError("No %s storage" % boo.type)
raise TypeError("No %s storage!" % boo.type)
def set_data(self, cr, uid, id, file_node, data, context=None, fil_obj=None):
""" store the data.
@ -541,10 +541,10 @@ class document_storage(osv.osv):
ira = self.pool.get('ir.attachment').browse(cr, uid, file_node.file_id, context=context)
if not boo.online:
raise IOError(errno.EREMOTE, 'medium offline')
raise IOError(errno.EREMOTE, 'Medium offline!')
if boo.readonly:
raise IOError(errno.EPERM, "Readonly medium")
raise IOError(errno.EPERM, "Readonly medium!")
_logger.debug( "Store data for ir.attachment #%d" % ira.id)
store_fname = None
@ -565,7 +565,7 @@ class document_storage(osv.osv):
# TODO Here, an old file would be left hanging.
except Exception, e:
_logger.warning( "Couldn't save data to %s", path, exc_info=True)
_logger.warning( "Cannot save data to %s.", path, exc_info=True)
raise except_orm(_('Error!'), str(e))
elif boo.type == 'db':
filesize = len(data)
@ -593,14 +593,14 @@ class document_storage(osv.osv):
store_fname = os.path.join(*npath)
# TODO Here, an old file would be left hanging.
except Exception,e :
_logger.warning("Couldn't save data:", exc_info=True)
_logger.warning("Cannot save data:", exc_info=True)
raise except_orm(_('Error!'), str(e))
elif boo.type == 'virtual':
raise ValueError('Virtual storage does not support static files')
raise ValueError('Virtual storage does not support static file(s).')
else:
raise TypeError("No %s storage" % boo.type)
raise TypeError("No %s storage!" % boo.type)
# 2nd phase: store the metadata
try:
@ -629,7 +629,7 @@ class document_storage(osv.osv):
file_node.content_type = mime
return True
except Exception, e :
self._logger.warning("Couldn't save data:", exc_info=True)
self._logger.warning("Cannot save data:", exc_info=True)
# should we really rollback once we have written the actual data?
# at the db case (only), that rollback would be safe
raise except_orm(_('Error at doc write!'), str(e))
@ -639,10 +639,10 @@ class document_storage(osv.osv):
files that have to be removed, too. """
if not storage_bo.online:
raise IOError(errno.EREMOTE, 'medium offline')
raise IOError(errno.EREMOTE, 'Medium offline!')
if storage_bo.readonly:
raise IOError(errno.EPERM, "Readonly medium")
raise IOError(errno.EPERM, "Readonly medium!")
if storage_bo.type == 'filestore':
fname = fil_bo.store_fname
@ -659,7 +659,7 @@ class document_storage(osv.osv):
path = storage_bo.path
return ( storage_bo.id, 'file', os.path.join(path, fname))
else:
raise TypeError("No %s storage" % storage_bo.type)
raise TypeError("No %s storage!" % storage_bo.type)
def do_unlink(self, cr, uid, unres):
for id, ktype, fname in unres:
@ -667,9 +667,9 @@ class document_storage(osv.osv):
try:
os.unlink(fname)
except Exception:
_logger.warning("Could not remove file %s, please remove manually.", fname, exc_info=True)
_logger.warning("Cannot remove file %s, please remove manually.", fname, exc_info=True)
else:
_logger.warning("Unknown unlink key %s" % ktype)
_logger.warning("Unlink unknown key %s." % ktype)
return True
@ -699,9 +699,9 @@ class document_storage(osv.osv):
fname = ira.store_fname
if not fname:
_logger.warning("Trying to rename a non-stored file")
_logger.warning("Trying to rename a non-stored file.")
if fname != os.path.join(*npath):
_logger.warning("inconsistency in realstore: %s != %s" , fname, repr(npath))
_logger.warning("Inconsistency to realstore: %s != %s." , fname, repr(npath))
oldpath = os.path.join(path, npath[-1])
newpath = os.path.join(path, new_name)
@ -711,7 +711,7 @@ class document_storage(osv.osv):
store_fname = os.path.join(*store_path)
return { 'name': new_name, 'datas_fname': new_name, 'store_fname': store_fname }
else:
raise TypeError("No %s storage" % sbro.type)
raise TypeError("No %s storage!" % sbro.type)
def simple_move(self, cr, uid, file_node, ndir_bro, context=None):
""" A preparation for a file move.
@ -739,8 +739,8 @@ class document_storage(osv.osv):
break
par = par.parent_id
if file_node.storage_id != psto:
_logger.debug('Cannot move file %r from %r to %r', file_node, file_node.parent, ndir_bro.name)
raise NotImplementedError('Cannot move files between storage media')
_logger.debug('Cannot move file %r from %r to %r.', file_node, file_node.parent, ndir_bro.name)
raise NotImplementedError('Cannot move file(s) between storage media.')
if sbro.type in ('filestore', 'db', 'db64'):
# nothing to do for a rename, allow to change the db field
@ -752,9 +752,9 @@ class document_storage(osv.osv):
fname = ira.store_fname
if not fname:
_logger.warning("Trying to rename a non-stored file")
_logger.warning("Trying to rename a non-stored file.")
if fname != os.path.join(*opath):
_logger.warning("inconsistency in realstore: %s != %s" , fname, repr(opath))
_logger.warning("Inconsistency to realstore: %s != %s." , fname, repr(opath))
oldpath = os.path.join(path, opath[-1])
@ -762,12 +762,12 @@ class document_storage(osv.osv):
npath = filter(lambda x: x is not None, npath)
newdir = os.path.join(*npath)
if not os.path.isdir(newdir):
_logger.debug("Must create dir %s", newdir)
_logger.debug("Must create dir %s.", newdir)
os.makedirs(newdir)
npath.append(opath[-1])
newpath = os.path.join(*npath)
_logger.debug("Going to move %s from %s to %s", opath[-1], oldpath, newpath)
_logger.debug("Going to move %s from %s to %s.", opath[-1], oldpath, newpath)
shutil.move(oldpath, newpath)
store_path = npath[1:] + [opath[-1],]
@ -775,7 +775,7 @@ class document_storage(osv.osv):
return { 'store_fname': store_fname }
else:
raise TypeError("No %s storage" % sbro.type)
raise TypeError("No %s storage!" % sbro.type)
document_storage()

View File

@ -271,7 +271,7 @@ class node_class(object):
return False
def get_data(self,cr):
raise TypeError('no data for %s'% self.type)
raise TypeError('No data for %s.'% self.type)
def open_data(self, cr, mode):
""" Open a node_descriptor object for this node.
@ -285,10 +285,10 @@ class node_class(object):
For this class, there is no data, so no implementation. Each
child class that has data should override this.
"""
raise TypeError('no data for %s' % self.type)
raise TypeError('No data for %s.' % self.type)
def _get_storage(self,cr):
raise RuntimeError("no storage for base class")
raise RuntimeError("No storage for base class.")
def get_etag(self,cr):
""" Get a tag, unique per object + modification.
@ -327,7 +327,7 @@ class node_class(object):
if self.DAV_M_NS.has_key(ns):
prefix = self.DAV_M_NS[ns]
else:
_logger.debug('No namespace: %s ("%s")',ns, prop)
_logger.debug('No namespace: %s ("%s").',ns, prop)
return None
mname = prefix + "_" + prop.replace('-','_')
@ -340,7 +340,7 @@ class node_class(object):
r = m(cr)
return r
except AttributeError:
_logger.debug('Property %s not supported' % prop, exc_info=True)
_logger.debug('Property %s not supported.' % prop, exc_info=True)
return None
def get_dav_resourcetype(self, cr):
@ -384,13 +384,13 @@ class node_class(object):
""" Create a regular file under this node
"""
_logger.warning("Attempted to create a file under %r, not possible.", self)
raise IOError(errno.EPERM, "Not allowed to create files here")
raise IOError(errno.EPERM, "Not allowed to create file(s) here.")
def create_child_collection(self, cr, objname):
""" Create a child collection (directory) under self
"""
_logger.warning("Attempted to create a collection under %r, not possible.", self)
raise IOError(errno.EPERM, "Not allowed to create folders here")
raise IOError(errno.EPERM, "Not allowed to create folder(s) here.")
def rm(self, cr):
raise NotImplementedError(repr(self))
@ -423,9 +423,9 @@ class node_class(object):
perms = pe2
elif isinstance(perms, int):
if perms < 0 or perms > 15:
raise ValueError("Invalid permission bits")
raise ValueError("Invalid permission bits.")
else:
raise ValueError("Invalid permission attribute")
raise ValueError("Invalid permission attribute.")
return ((self.uidperms & perms) == perms)
@ -465,7 +465,7 @@ class node_database(node_class):
is_allowed = self.check_perms(5)
if not is_allowed:
raise IOError(errno.EPERM, "Permission into directory denied")
raise IOError(errno.EPERM, "Permission into directory denied.")
if domain:
where = where + domain
@ -569,7 +569,7 @@ class node_dir(node_database):
is_allowed = self.check_perms(nodename and 1 or 5)
if not is_allowed:
raise IOError(errno.EPERM, "Permission into directory denied")
raise IOError(errno.EPERM, "Permission into directory denied.")
cntobj = self.context._dirobj.pool.get('document.directory.content')
uid = self.context.uid
@ -597,7 +597,7 @@ class node_dir(node_database):
is_allowed = self.check_perms(5)
if not is_allowed:
raise IOError(errno.EPERM, "Permission into directory denied")
raise IOError(errno.EPERM, "Permission into directory denied.")
if not domain:
domain = []
@ -633,7 +633,7 @@ class node_dir(node_database):
if not directory:
raise OSError(2, 'Not such file or directory.')
if not self.check_perms('u'):
raise IOError(errno.EPERM,"Permission denied")
raise IOError(errno.EPERM,"Permission denied.")
if directory._table_name=='document.directory':
if self.children(cr):
@ -646,7 +646,7 @@ class node_dir(node_database):
def create_child_collection(self, cr, objname):
object2 = False
if not self.check_perms(2):
raise IOError(errno.EPERM,"Permission denied")
raise IOError(errno.EPERM,"Permission denied.")
dirobj = self.context._dirobj
uid = self.context.uid
@ -672,7 +672,7 @@ class node_dir(node_database):
Return the node_* created
"""
if not self.check_perms(2):
raise IOError(errno.EPERM,"Permission denied")
raise IOError(errno.EPERM,"Permission denied.")
dirobj = self.context._dirobj
uid = self.context.uid
@ -702,10 +702,10 @@ class node_dir(node_database):
Note /may/ be called with ndir_node = None, to rename the document root.
"""
if ndir_node and (ndir_node.context != self.context):
raise NotImplementedError("Cannot move directories between contexts")
raise NotImplementedError("Cannot move directories between contexts.")
if (not self.check_perms('u')) or (not ndir_node.check_perms('w')):
raise IOError(errno.EPERM,"Permission denied")
raise IOError(errno.EPERM,"Permission denied.")
dir_obj = self.context._dirobj
if not fil_obj:
@ -725,12 +725,12 @@ class node_dir(node_database):
if self.parent != ndir_node:
_logger.debug('Cannot move dir %r from %r to %r', self, self.parent, ndir_node)
raise NotImplementedError('Cannot move dir to another dir')
raise NotImplementedError('Cannot move dir to another dir.')
ret = {}
if new_name and (new_name != dbro.name):
if ndir_node.child(cr, new_name):
raise IOError(errno.EEXIST, "Destination path already exists")
raise IOError(errno.EEXIST, "Destination path already exists!")
ret['name'] = new_name
del dbro
@ -845,7 +845,7 @@ class node_res_dir(node_class):
is_allowed = self.check_perms(5)
if not is_allowed:
raise IOError(errno.EPERM,"Permission denied")
raise IOError(errno.EPERM,"Permission denied.")
# print "Where clause for %s" % self.res_model, where
if self.ressource_tree:
@ -962,7 +962,7 @@ class node_res_obj(node_class):
res = []
is_allowed = self.check_perms((nodename and 1) or 5)
if not is_allowed:
raise IOError(errno.EPERM,"Permission denied")
raise IOError(errno.EPERM,"Permission denied.")
cntobj = self.context._dirobj.pool.get('document.directory.content')
uid = self.context.uid
@ -1016,7 +1016,7 @@ class node_res_obj(node_class):
is_allowed = self.check_perms((name and 1) or 5)
if not is_allowed:
raise IOError(errno.EPERM,"Permission denied")
raise IOError(errno.EPERM,"Permission denied.")
uid = self.context.uid
ctx = self.context.context.copy()
@ -1103,7 +1103,7 @@ class node_res_obj(node_class):
dirobj = self.context._dirobj
is_allowed = self.check_perms(2)
if not is_allowed:
raise IOError(errno.EPERM,"Permission denied")
raise IOError(errno.EPERM,"Permission denied.")
uid = self.context.uid
ctx = self.context.context.copy()
@ -1135,7 +1135,7 @@ class node_res_obj(node_class):
"""
is_allowed = self.check_perms(2)
if not is_allowed:
raise IOError(errno.EPERM,"Permission denied")
raise IOError(errno.EPERM,"Permission denied.")
dirobj = self.context._dirobj
uid = self.context.uid
@ -1215,7 +1215,7 @@ class node_file(node_class):
stor = self.storage_id
assert stor, "No storage for file #%s" % self.file_id
if not self.check_perms(4):
raise IOError(errno.EPERM, "Permission denied")
raise IOError(errno.EPERM, "Permission denied.")
# If storage is not set properly, we are just screwed here, don't
# try to get it from default.
@ -1225,7 +1225,7 @@ class node_file(node_class):
def rm(self, cr):
uid = self.context.uid
if not self.check_perms(8):
raise IOError(errno.EPERM, "Permission denied")
raise IOError(errno.EPERM, "Permission denied.")
document_obj = self.context._dirobj.pool.get('ir.attachment')
if self.type in ('collection','database'):
return False
@ -1271,7 +1271,7 @@ class node_file(node_class):
stor = self.storage_id
assert stor, "No storage for file #%s" % self.file_id
if not self.check_perms(4):
raise IOError(errno.EPERM, "Permission denied")
raise IOError(errno.EPERM, "Permission denied.")
# If storage is not set properly, we are just screwed here, don't
# try to get it from default.
@ -1294,7 +1294,7 @@ class node_file(node_class):
stor = self.storage_id
assert stor, "No storage for file #%s" % self.file_id
if not self.check_perms(2):
raise IOError(errno.EPERM, "Permission denied")
raise IOError(errno.EPERM, "Permission denied.")
stobj = self.context._dirobj.pool.get('document.storage')
return stobj.set_data(cr, self.context.uid,stor, self, data, self.context.context, fil_obj)
@ -1304,10 +1304,10 @@ class node_file(node_class):
def move_to(self, cr, ndir_node, new_name=False, fil_obj=None, ndir_obj=None, in_write=False):
if ndir_node and ndir_node.context != self.context:
raise NotImplementedError("Cannot move files between contexts")
raise NotImplementedError("Cannot move files between contexts.")
if (not self.check_perms(8)) and ndir_node.check_perms(2):
raise IOError(errno.EPERM, "Permission denied")
raise IOError(errno.EPERM, "Permission denied.")
doc_obj = self.context._dirobj.pool.get('ir.attachment')
if not fil_obj:
@ -1343,7 +1343,7 @@ class node_file(node_class):
if new_name and (new_name != dbro.name):
if len(ret):
raise NotImplementedError("Cannot rename and move") # TODO
raise NotImplementedError("Cannot rename and move.") # TODO
stobj = self.context._dirobj.pool.get('document.storage')
r2 = stobj.simple_rename(cr, self.context.uid, self, new_name, self.context.context)
ret.update(r2)
@ -1399,7 +1399,7 @@ class node_content(node_class):
def get_data(self, cr, fil_obj = None):
cntobj = self.context._dirobj.pool.get('document.directory.content')
if not self.check_perms(4):
raise IOError(errno.EPERM, "Permission denied")
raise IOError(errno.EPERM, "Permission denied.")
ctx = self.context.context.copy()
ctx.update(self.dctx)
@ -1419,7 +1419,7 @@ class node_content(node_class):
raise IOError(errno.EINVAL, "Cannot open at mode %s" % mode)
if not self.check_perms(cperms):
raise IOError(errno.EPERM, "Permission denied")
raise IOError(errno.EPERM, "Permission denied.")
ctx = self.context.context.copy()
ctx.update(self.dctx)
@ -1438,7 +1438,7 @@ class node_content(node_class):
def set_data(self, cr, data, fil_obj = None):
cntobj = self.context._dirobj.pool.get('document.directory.content')
if not self.check_perms(2):
raise IOError(errno.EPERM, "Permission denied")
raise IOError(errno.EPERM, "Permission denied.")
ctx = self.context.context.copy()
ctx.update(self.dctx)
@ -1474,7 +1474,7 @@ class nodefd_content(StringIO, node_descriptor):
StringIO.__init__(self, None)
else:
_logger.error("Incorrect mode %s specified", mode)
raise IOError(errno.EINVAL, "Invalid file mode")
raise IOError(errno.EINVAL, "Invalid file mode!")
self.mode = mode
def size(self):
@ -1528,7 +1528,7 @@ class nodefd_static(StringIO, node_descriptor):
StringIO.__init__(self, None)
else:
_logger.error("Incorrect mode %s specified", mode)
raise IOError(errno.EINVAL, "Invalid file mode")
raise IOError(errno.EINVAL, "Invalid file mode!")
self.mode = mode
def size(self):

View File

@ -176,7 +176,7 @@ class abstracted_fs(object):
res = node.open_data(cr, mode)
cr.commit()
except TypeError:
raise IOError(errno.EINVAL, "No data")
raise IOError(errno.EINVAL, "No data.")
return res
# ok, but need test more
@ -211,9 +211,9 @@ class abstracted_fs(object):
self.cwd_node = None
return None
if not datacr[1]:
raise OSError(1, 'Operation not permitted')
raise OSError(1, 'Operation not permitted.')
if datacr[1].type not in ('collection','database'):
raise OSError(2, 'Path is not a directory')
raise OSError(2, 'Path is not a directory.')
self.cwd = '/'+datacr[1].context.dbname + '/'
self.cwd += '/'.join(datacr[1].full_path())
self.cwd_node = datacr[1]
@ -287,7 +287,7 @@ class abstracted_fs(object):
p_parts = p_parts[1:]
# self._log.debug("Path parts: %r ", p_parts)
if not p_parts:
raise IOError(errno.EPERM, 'Cannot perform operation at root dir')
raise IOError(errno.EPERM, 'Cannot perform operation at root directory.')
dbname = p_parts[0]
if dbname not in self.db_list():
raise IOError(errno.ENOENT,'Invalid database path: %s' % dbname)
@ -318,7 +318,7 @@ class abstracted_fs(object):
node = self.cwd_node
if node is False and mode not in ('???'):
cr.close()
raise IOError(errno.ENOENT, 'Path does not exist')
raise IOError(errno.ENOENT, 'Path does not exist!')
return (cr, node, rem_path)
def get_node_cr_uid(self, node):
@ -429,7 +429,7 @@ class abstracted_fs(object):
def getsize(self, datacr):
"""Return the size of the specified file in bytes."""
if not (datacr and datacr[1]):
raise IOError(errno.ENOENT, "No such file or directory")
raise IOError(errno.ENOENT, "No such file or directory.")
if datacr[1].type in ('file', 'content'):
return datacr[1].get_data_len(datacr[0]) or 0L
return 0L

View File

@ -308,7 +308,7 @@ class DummyAuthorizer:
raise AuthorizerError('No such directory: "%s"' %homedir)
for p in perm:
if p not in 'elradfmw':
raise AuthorizerError('No such permission "%s"' %p)
raise AuthorizerError('No such permission: "%s"' %p)
for p in perm:
if (p in self.write_perms) and (username == 'anonymous'):
warnings.warn("write permissions assigned to anonymous user.",
@ -638,7 +638,7 @@ class DTPHandler(asyncore.dispatcher):
elif type == 'i':
self.data_wrapper = lambda x: x
else:
raise TypeError, "Unsupported type"
raise TypeError, "Unsupported type!"
self.receive = True
def get_transmitted_bytes(self):
@ -823,7 +823,7 @@ class FileProducer:
elif type == 'i':
self.data_wrapper = lambda x: x
else:
raise TypeError, "Unsupported type"
raise TypeError, "Unsupported type!"
def more(self):
"""Attempt a chunk of data of size self.buffer_size."""
@ -2554,7 +2554,7 @@ class FTPHandler(asynchat.async_chat):
else:
datacr = self.get_crdata2(line)
if not datacr:
raise IOError(errno.ENOENT, "%s is not retrievable" %line)
raise IOError(errno.ENOENT, "%s is not retrievable." %line)
lmt = self.try_as_current_user(self.fs.getmtime, (datacr,), line=line)
lmt = time.strftime("%Y%m%d%H%M%S", time.localtime(lmt))

View File

@ -62,7 +62,7 @@ def get_ftp_fulldata(ftp, fname, limit=8192):
data = []
def ffp(data, ndata):
if len(data)+ len(ndata) > limit:
raise IndexError('Data over the limit')
raise IndexError('Data over the limit.')
data.append(ndata)
ftp.retrbinary('RETR %s' % fname, partial(ffp,data))
return ''.join(data)

View File

@ -98,7 +98,7 @@ class BoundStream2(object):
def read(self, size=-1):
if not self._stream:
raise IOError(errno.EBADF, "read() without stream")
raise IOError(errno.EBADF, "read() without stream.")
if self._rem_length == 0:
return ''
@ -136,25 +136,25 @@ class BoundStream2(object):
"""
if whence == os.SEEK_SET:
if pos < 0 or pos > self._length:
raise IOError(errno.EINVAL,"Cannot seek")
raise IOError(errno.EINVAL,"Cannot seek!")
self._stream.seek(pos - self._offset)
self._rem_length = self._length - pos
elif whence == os.SEEK_CUR:
if pos > 0:
if pos > self._rem_length:
raise IOError(errno.EINVAL,"Cannot seek past end")
raise IOError(errno.EINVAL,"Cannot seek past end!")
elif pos < 0:
oldpos = self.tell()
if oldpos + pos < 0:
raise IOError(errno.EINVAL,"Cannot seek before start")
raise IOError(errno.EINVAL,"Cannot seek before start!")
self._stream.seek(pos, os.SEEK_CUR)
self._rem_length -= pos
elif whence == os.SEEK_END:
if pos > 0:
raise IOError(errno.EINVAL,"Cannot seek past end")
raise IOError(errno.EINVAL,"Cannot seek past end!")
else:
if self._length + pos < 0:
raise IOError(errno.EINVAL,"Cannot seek before start")
raise IOError(errno.EINVAL,"Cannot seek before start!")
newpos = self._offset + self._length + pos
self._stream.seek(newpos, os.SEEK_SET)
self._rem_length = 0 - pos
@ -206,7 +206,7 @@ class openerp_dav_handler(dav_interface):
self.parent.log_error("Cannot %s: %s", opname, str(e))
self.parent.log_message("Exc: %s",traceback.format_exc())
# see par 9.3.1 of rfc
raise DAV_Error(403, str(e) or 'Not supported at this path')
raise DAV_Error(403, str(e) or 'Not supported at this path.')
except EnvironmentError, err:
if cr: cr.close()
import traceback
@ -218,7 +218,7 @@ class openerp_dav_handler(dav_interface):
if cr: cr.close()
self.parent.log_error("Cannot %s: %s", opname, str(e))
self.parent.log_message("Exc: %s",traceback.format_exc())
raise default_exc("Operation failed")
raise default_exc("Operation failed.")
def _get_dav_lockdiscovery(self, uri):
""" We raise that so that the node API is used """
@ -434,7 +434,7 @@ class openerp_dav_handler(dav_interface):
except DAV_Error:
raise
except Exception, e:
self.parent.log_error("cannot get_children: "+ str(e))
self.parent.log_error("Cannot get_children: "+ str(e))
raise
finally:
if cr: cr.close()
@ -500,10 +500,10 @@ class openerp_dav_handler(dav_interface):
assert start >= 0
if end and end < start:
self.parent.log_error("Invalid range for data: %s-%s" %(start, end))
raise DAV_Error(416, "Invalid range for data")
raise DAV_Error(416, "Invalid range for data.")
if end:
if end >= res.size():
raise DAV_Error(416, "Requested data exceeds available size")
raise DAV_Error(416, "Requested data exceeds available size.")
length = (end + 1) - start
else:
length = res.size() - start
@ -661,7 +661,7 @@ class openerp_dav_handler(dav_interface):
cr, uid, pool, dbname, uri2 = self.get_cr(uri)
if not uri2[-1]:
if cr: cr.close()
raise DAV_Error(409, "Cannot create nameless collection")
raise DAV_Error(409, "Cannot create nameless collection.")
if not dbname:
if cr: cr.close()
raise DAV_Error, 409
@ -672,7 +672,7 @@ class openerp_dav_handler(dav_interface):
nc = node.child(cr, uri2[-1])
if nc:
cr.close()
raise DAV_Error(405, "Path already exists")
raise DAV_Error(405, "Path already exists.")
self._try_function(node.create_child_collection, (cr, uri2[-1]),
"create col %s" % uri2[-1], cr=cr)
cr.commit()
@ -698,14 +698,14 @@ class openerp_dav_handler(dav_interface):
dir_node = self.uri2object(cr, uid, pool, uri2[:-1])
if not dir_node:
cr.close()
raise DAV_NotFound('Parent folder not found')
raise DAV_NotFound('Parent folder not found.')
newchild = self._try_function(dir_node.create_child, (cr, objname, data),
"create %s" % objname, cr=cr)
if not newchild:
cr.commit()
cr.close()
raise DAV_Error(400, "Failed to create resource")
raise DAV_Error(400, "Failed to create resource.")
uparts=urlparse.urlparse(uri)
fileloc = '/'.join(newchild.full_path())
@ -937,7 +937,7 @@ class openerp_dav_handler(dav_interface):
except AttributeError:
# perhaps the node doesn't support locks
cr.close()
raise DAV_Error(400, 'No locks for this resource')
raise DAV_Error(400, 'No locks for this resource.')
res = self._try_function(node_fn, (cr, token), "unlock %s" % uri, cr=cr)
cr.commit()
@ -966,7 +966,7 @@ class openerp_dav_handler(dav_interface):
dir_node = self.uri2object(cr, uid, pool, uri2[:-1])
if not dir_node:
cr.close()
raise DAV_NotFound('Parent folder not found')
raise DAV_NotFound('Parent folder not found.')
# We create a new node (file) but with empty data=None,
# as in RFC4918 p. 9.10.4
@ -975,7 +975,7 @@ class openerp_dav_handler(dav_interface):
if not node:
cr.commit()
cr.close()
raise DAV_Error(400, "Failed to create resource")
raise DAV_Error(400, "Failed to create resource.")
created = True
@ -984,7 +984,7 @@ class openerp_dav_handler(dav_interface):
except AttributeError:
# perhaps the node doesn't support locks
cr.close()
raise DAV_Error(400, 'No locks for this resource')
raise DAV_Error(400, 'No locks for this resource.')
# Obtain the lock on the node
lres, pid, token = self._try_function(node_fn, (cr, lock_data), "lock %s" % objname, cr=cr)
@ -992,7 +992,7 @@ class openerp_dav_handler(dav_interface):
if not lres:
cr.commit()
cr.close()
raise DAV_Error(423, "Resource already locked")
raise DAV_Error(423, "Resource already locked.")
assert isinstance(lres, list), 'lres: %s' % repr(lres)

View File

@ -43,7 +43,7 @@ class document_davdir(osv.osv):
elif dbro.type == 'ressource':
return nodes.node_res_dir
else:
raise ValueError("dir node for %s type", dbro.type)
raise ValueError("Directory node for %s type", dbro.type)
def _prepare_context(self, cr, uid, nctx, context=None):
nctx.node_file_class = nodes.node_file

View File

@ -278,7 +278,7 @@ class addAuthTransport:
return self._parse_response(h.getfile(), sock, resp)
raise ProtocolError(host+handler, 403, "No authentication",'')
raise ProtocolError(host+handler, 403, "No authentication.",'')
class PersistentAuthTransport(addAuthTransport,PersistentTransport):
pass
@ -402,7 +402,7 @@ class DAVClient(object):
r1 = conn.getresponse()
except httplib.BadStatusLine, bsl:
log.warning("Bad status line: %s", bsl.line)
raise Exception('Bad status line')
raise Exception('Bad status line.')
if r1.status == 401: # and r1.headers:
if 'www-authenticate' in r1.msg:
(atype,realm) = r1.msg.getheader('www-authenticate').split(' ',1)
@ -437,7 +437,7 @@ class DAVClient(object):
doc = xml.dom.minidom.parseString(data1)
_logger.debug("XML Body:\n %s", doc.toprettyxml(indent="\t"))
except Exception:
_logger.warning("could not print xml", exc_info=True)
_logger.warning("cannot print xml", exc_info=True)
pass
conn.close()
return r1.status, r1.msg, data1
@ -651,7 +651,7 @@ class DAVClient(object):
if isinstance(crange, tuple):
crange = [crange,]
if not isinstance(crange, list):
raise TypeError("Range must be a tuple or list of tuples")
raise TypeError("Range must be a tuple or list of tuples.")
rs = []
for r in crange:
rs.append('%d-%d' % r)
@ -689,7 +689,7 @@ class DAVClient(object):
"""
hdrs = { }
if not (body or srcpath):
raise ValueError("PUT must have something to send")
raise ValueError("PUT must have something to send.")
if (not body) and srcpath:
fd = open(srcpath, 'rb')
body = fd.read()

View File

@ -65,7 +65,7 @@ class Prop2xml(object):
def createText2Node(self, data):
if not isinstance(data, StringTypes):
raise TypeError, "node contents must be a string"
raise TypeError, "Node contents must be a string."
t = Text2()
t.data = data
t.ownerDocument = self.doc

View File

@ -119,7 +119,7 @@ class DAVHandler(HttpOptions, FixSendError, DAVRequestHandler):
if up.path.startswith(self.davpath):
self.headers['Destination'] = up.path[len(self.davpath):]
else:
raise DAV_Forbidden("Not allowed to copy/move outside webdav path")
raise DAV_Forbidden("Not allowed to copy/move outside webdav path.")
# TODO: locks
DAVRequestHandler.copymove(self, CLASS)
@ -338,7 +338,7 @@ class DAVHandler(HttpOptions, FixSendError, DAVRequestHandler):
if isinstance(ldif, list):
if len(ldif) !=1 or (not isinstance(ldif[0], TagList)) \
or len(ldif[0].list) != 1:
raise DAV_Error(400, "Cannot accept multiple tokens")
raise DAV_Error(400, "Cannot accept multiple tokens!")
ldif = ldif[0].list[0]
if ldif[0] == '<' and ldif[-1] == '>':
ldif = ldif[1:-1]
@ -352,7 +352,7 @@ class DAVHandler(HttpOptions, FixSendError, DAVRequestHandler):
lock_data.update(self._lock_unlock_parse(body))
if lock_data['refresh'] and not lock_data.get('token', False):
raise DAV_Error(400, 'Lock refresh must specify token')
raise DAV_Error(400, 'Lock refresh must specify token!')
lock_data['depth'] = depth