2009-12-02 05:36:57 +00:00
|
|
|
# -*- encoding: utf-8 -*-
|
2010-08-10 12:29:57 +00:00
|
|
|
|
2008-09-26 14:23:55 +00:00
|
|
|
import os
|
|
|
|
import time
|
|
|
|
from tarfile import filemode
|
2010-06-29 14:05:15 +00:00
|
|
|
import logging
|
2010-07-02 21:52:49 +00:00
|
|
|
import errno
|
2008-09-26 14:23:55 +00:00
|
|
|
|
|
|
|
import glob
|
|
|
|
import fnmatch
|
|
|
|
|
2012-12-17 15:58:19 +00:00
|
|
|
from openerp import pooler, netsvc, sql_db
|
2012-12-17 15:46:28 +00:00
|
|
|
from openerp.service import security
|
2012-12-06 14:56:32 +00:00
|
|
|
from openerp.osv import osv
|
2010-06-29 14:05:32 +00:00
|
|
|
from document.nodes import get_node_context
|
2008-09-26 14:23:55 +00:00
|
|
|
|
2009-04-13 06:28:27 +00:00
|
|
|
def _get_month_name(month):
|
|
|
|
month=int(month)
|
|
|
|
if month==1:return 'Jan'
|
|
|
|
elif month==2:return 'Feb'
|
|
|
|
elif month==3:return 'Mar'
|
|
|
|
elif month==4:return 'Apr'
|
|
|
|
elif month==5:return 'May'
|
|
|
|
elif month==6:return 'Jun'
|
|
|
|
elif month==7:return 'Jul'
|
|
|
|
elif month==8:return 'Aug'
|
|
|
|
elif month==9:return 'Sep'
|
|
|
|
elif month==10:return 'Oct'
|
|
|
|
elif month==11:return 'Nov'
|
|
|
|
elif month==12:return 'Dec'
|
|
|
|
|
2010-07-08 22:54:33 +00:00
|
|
|
from ftpserver import _to_decode, _to_unicode
|
2009-04-13 06:28:27 +00:00
|
|
|
|
2008-10-28 20:57:46 +00:00
|
|
|
|
2010-06-29 14:05:15 +00:00
|
|
|
class abstracted_fs(object):
|
2008-10-28 07:10:13 +00:00
|
|
|
"""A class used to interact with the file system, providing a high
|
|
|
|
level, cross-platform interface compatible with both Windows and
|
|
|
|
UNIX style filesystems.
|
|
|
|
|
|
|
|
It provides some utility methods and some wraps around operations
|
|
|
|
involved in file creation and file system operations like moving
|
|
|
|
files or removing directories.
|
|
|
|
|
|
|
|
Instance attributes:
|
|
|
|
- (str) root: the user home directory.
|
|
|
|
- (str) cwd: the current working directory.
|
|
|
|
- (str) rnfr: source file to be renamed.
|
2010-07-02 21:52:49 +00:00
|
|
|
|
2008-10-28 07:10:13 +00:00
|
|
|
"""
|
|
|
|
|
2010-07-02 21:52:49 +00:00
|
|
|
def __init__(self):
|
|
|
|
self.root = None
|
|
|
|
self.cwd = '/'
|
|
|
|
self.cwd_node = None
|
|
|
|
self.rnfr = None
|
2012-06-22 06:48:39 +00:00
|
|
|
self._log = logging.getLogger(__name__)
|
2010-07-02 21:52:49 +00:00
|
|
|
|
2008-10-28 07:10:13 +00:00
|
|
|
# Ok
|
|
|
|
def db_list(self):
|
2010-07-02 21:52:49 +00:00
|
|
|
"""Get the list of available databases, with FTPd support
|
|
|
|
"""
|
2009-12-02 05:36:57 +00:00
|
|
|
s = netsvc.ExportService.getService('db')
|
2010-05-11 07:01:38 +00:00
|
|
|
result = s.exp_list(document=True)
|
2008-11-25 13:35:16 +00:00
|
|
|
self.db_name_list = []
|
2008-10-28 07:10:13 +00:00
|
|
|
for db_name in result:
|
2008-11-25 13:35:16 +00:00
|
|
|
db, cr = None, None
|
|
|
|
try:
|
2009-07-23 09:18:43 +00:00
|
|
|
try:
|
2011-09-09 11:39:16 +00:00
|
|
|
db = sql_db.db_connect(db_name)
|
2009-07-23 09:18:43 +00:00
|
|
|
cr = db.cursor()
|
|
|
|
cr.execute("SELECT 1 FROM pg_class WHERE relkind = 'r' AND relname = 'ir_module_module'")
|
|
|
|
if not cr.fetchone():
|
|
|
|
continue
|
2010-08-19 11:51:57 +00:00
|
|
|
|
2010-08-27 07:39:50 +00:00
|
|
|
cr.execute("SELECT id FROM ir_module_module WHERE name = 'document_ftp' AND state IN ('installed', 'to install', 'to upgrade') ")
|
2009-07-23 09:18:43 +00:00
|
|
|
res = cr.fetchone()
|
|
|
|
if res and len(res):
|
|
|
|
self.db_name_list.append(db_name)
|
|
|
|
cr.commit()
|
2010-06-29 14:05:15 +00:00
|
|
|
except Exception:
|
2012-07-25 10:33:34 +00:00
|
|
|
self._log.warning('Cannot use db "%s".', db_name)
|
2008-11-25 13:35:16 +00:00
|
|
|
finally:
|
|
|
|
if cr is not None:
|
|
|
|
cr.close()
|
2008-10-28 07:10:13 +00:00
|
|
|
return self.db_name_list
|
|
|
|
|
|
|
|
def ftpnorm(self, ftppath):
|
|
|
|
"""Normalize a "virtual" ftp pathname (tipically the raw string
|
2010-07-02 21:52:49 +00:00
|
|
|
coming from client).
|
2008-10-28 07:10:13 +00:00
|
|
|
|
2010-07-02 21:52:49 +00:00
|
|
|
Pathname returned is relative!.
|
2008-10-28 07:10:13 +00:00
|
|
|
"""
|
2010-07-02 21:52:49 +00:00
|
|
|
p = os.path.normpath(ftppath)
|
2008-10-28 07:10:13 +00:00
|
|
|
# normalize string in a standard web-path notation having '/'
|
2010-07-02 21:52:49 +00:00
|
|
|
# as separator. xrg: is that really in the spec?
|
2008-10-28 07:10:13 +00:00
|
|
|
p = p.replace("\\", "/")
|
|
|
|
# os.path.normpath supports UNC paths (e.g. "//a/b/c") but we
|
|
|
|
# don't need them. In case we get an UNC path we collapse
|
|
|
|
# redundant separators appearing at the beginning of the string
|
|
|
|
while p[:2] == '//':
|
|
|
|
p = p[1:]
|
2010-07-02 21:52:49 +00:00
|
|
|
if p == '.':
|
|
|
|
return ''
|
2008-10-28 07:10:13 +00:00
|
|
|
return p
|
|
|
|
|
2010-07-02 21:52:49 +00:00
|
|
|
def get_cwd(self):
|
|
|
|
""" return the cwd, decoded in utf"""
|
|
|
|
return _to_decode(self.cwd)
|
|
|
|
|
2008-10-28 07:10:13 +00:00
|
|
|
def ftp2fs(self, path_orig, data):
|
2010-07-02 21:52:49 +00:00
|
|
|
raise DeprecationWarning()
|
2008-10-28 07:10:13 +00:00
|
|
|
|
2010-08-19 11:51:57 +00:00
|
|
|
def fs2ftp(self, node):
|
2010-07-02 21:52:49 +00:00
|
|
|
""" Return the string path of a node, in ftp form
|
|
|
|
"""
|
2009-12-08 13:30:41 +00:00
|
|
|
res='/'
|
2009-04-14 10:17:32 +00:00
|
|
|
if node:
|
2009-12-17 08:42:14 +00:00
|
|
|
paths = node.full_path()
|
2010-07-02 21:52:49 +00:00
|
|
|
res = '/' + node.context.dbname + '/' + \
|
|
|
|
_to_decode(os.path.join(*paths))
|
2010-08-19 11:51:57 +00:00
|
|
|
|
2008-10-28 07:10:13 +00:00
|
|
|
return res
|
|
|
|
|
|
|
|
def validpath(self, path):
|
|
|
|
"""Check whether the path belongs to user's home directory.
|
2010-07-02 21:52:49 +00:00
|
|
|
Expected argument is a datacr tuple
|
2008-10-28 07:10:13 +00:00
|
|
|
"""
|
2010-07-02 21:52:49 +00:00
|
|
|
# TODO: are we called for "/" ?
|
|
|
|
return isinstance(path, tuple) and path[1] and True or False
|
2008-10-28 07:10:13 +00:00
|
|
|
|
|
|
|
# --- Wrapper methods around open() and tempfile.mkstemp
|
|
|
|
|
2010-07-02 21:52:49 +00:00
|
|
|
def create(self, datacr, objname, mode):
|
2010-07-01 17:51:34 +00:00
|
|
|
""" Create a children file-node under node, open it
|
|
|
|
@return open node_descriptor of the created node
|
|
|
|
"""
|
2010-08-19 11:51:57 +00:00
|
|
|
objname = _to_unicode(objname)
|
2010-07-02 21:52:49 +00:00
|
|
|
cr , node, rem = datacr
|
2008-10-28 07:10:13 +00:00
|
|
|
try:
|
2009-12-10 06:30:00 +00:00
|
|
|
child = node.child(cr, objname)
|
|
|
|
if child:
|
2010-07-01 17:51:34 +00:00
|
|
|
if child.type not in ('file','content'):
|
2012-07-25 10:33:34 +00:00
|
|
|
raise OSError(1, 'Operation is not permitted.')
|
2008-10-28 07:10:13 +00:00
|
|
|
|
2010-07-01 17:51:34 +00:00
|
|
|
ret = child.open_data(cr, mode)
|
2010-07-13 20:53:36 +00:00
|
|
|
cr.commit()
|
2012-07-25 10:33:34 +00:00
|
|
|
assert ret, "Cannot create descriptor for %r: %r." % (child, ret)
|
2010-07-01 17:51:34 +00:00
|
|
|
return ret
|
2010-07-02 21:52:49 +00:00
|
|
|
except EnvironmentError:
|
2010-07-01 17:51:34 +00:00
|
|
|
raise
|
2010-11-12 11:49:46 +00:00
|
|
|
except Exception:
|
2012-07-25 10:33:34 +00:00
|
|
|
self._log.exception('Cannot locate item %s at node %s.', objname, repr(node))
|
2010-07-01 17:51:34 +00:00
|
|
|
pass
|
|
|
|
|
|
|
|
try:
|
|
|
|
child = node.create_child(cr, objname, data=None)
|
2010-07-13 20:53:36 +00:00
|
|
|
ret = child.open_data(cr, mode)
|
2012-07-25 10:33:34 +00:00
|
|
|
assert ret, "Cannot create descriptor for %r." % child
|
2010-07-13 20:53:36 +00:00
|
|
|
cr.commit()
|
|
|
|
return ret
|
2010-07-13 20:54:18 +00:00
|
|
|
except EnvironmentError:
|
|
|
|
raise
|
2010-08-10 12:29:57 +00:00
|
|
|
except Exception:
|
2012-07-25 10:33:34 +00:00
|
|
|
self._log.exception('Cannot create item %s at node %s.', objname, repr(node))
|
|
|
|
raise OSError(1, 'Operation is not permitted.')
|
2008-10-28 07:10:13 +00:00
|
|
|
|
2010-07-02 21:52:49 +00:00
|
|
|
def open(self, datacr, mode):
|
|
|
|
if not (datacr and datacr[1]):
|
2012-07-25 10:33:34 +00:00
|
|
|
raise OSError(1, 'Operation is not permitted.')
|
2010-05-14 04:48:36 +00:00
|
|
|
# Reading operation
|
2010-07-02 21:52:49 +00:00
|
|
|
cr, node, rem = datacr
|
2010-07-13 10:30:35 +00:00
|
|
|
try:
|
|
|
|
res = node.open_data(cr, mode)
|
2010-07-13 20:53:36 +00:00
|
|
|
cr.commit()
|
2010-08-10 12:29:57 +00:00
|
|
|
except TypeError:
|
2012-07-13 05:29:57 +00:00
|
|
|
raise IOError(errno.EINVAL, "No data.")
|
2010-05-14 04:48:36 +00:00
|
|
|
return res
|
2008-10-28 07:10:13 +00:00
|
|
|
|
|
|
|
# ok, but need test more
|
|
|
|
|
|
|
|
def mkstemp(self, suffix='', prefix='', dir=None, mode='wb'):
|
|
|
|
"""A wrap around tempfile.mkstemp creating a file with a unique
|
|
|
|
name. Unlike mkstemp it returns an object with a file-like
|
|
|
|
interface.
|
|
|
|
"""
|
2010-08-10 12:29:57 +00:00
|
|
|
raise NotImplementedError # TODO
|
2008-10-28 07:10:13 +00:00
|
|
|
|
|
|
|
text = not 'b' in mode
|
|
|
|
# for unique file , maintain version if duplicate file
|
|
|
|
if dir:
|
|
|
|
cr = dir.cr
|
|
|
|
uid = dir.uid
|
2009-12-08 13:30:41 +00:00
|
|
|
pool = pooler.get_pool(node.context.dbname)
|
2008-10-28 07:10:13 +00:00
|
|
|
object=dir and dir.object or False
|
|
|
|
object2=dir and dir.object2 or False
|
|
|
|
res=pool.get('ir.attachment').search(cr,uid,[('name','like',prefix),('parent_id','=',object and object.type in ('directory','ressource') and object.id or False),('res_id','=',object2 and object2.id or False),('res_model','=',object2 and object2._name or False)])
|
|
|
|
if len(res):
|
|
|
|
pre = prefix.split('.')
|
|
|
|
prefix=pre[0] + '.v'+str(len(res))+'.'+pre[1]
|
|
|
|
return self.create(dir,suffix+prefix,text)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Ok
|
2010-07-02 21:52:49 +00:00
|
|
|
def chdir(self, datacr):
|
|
|
|
if (not datacr) or datacr == (None, None, None):
|
2009-12-08 13:30:41 +00:00
|
|
|
self.cwd = '/'
|
2010-07-02 21:52:49 +00:00
|
|
|
self.cwd_node = None
|
2010-08-19 11:51:57 +00:00
|
|
|
return None
|
2010-07-02 21:52:49 +00:00
|
|
|
if not datacr[1]:
|
2012-07-25 10:33:34 +00:00
|
|
|
raise OSError(1, 'Operation is not permitted.')
|
2010-07-02 21:52:49 +00:00
|
|
|
if datacr[1].type not in ('collection','database'):
|
2012-07-13 05:29:57 +00:00
|
|
|
raise OSError(2, 'Path is not a directory.')
|
2010-07-02 21:52:49 +00:00
|
|
|
self.cwd = '/'+datacr[1].context.dbname + '/'
|
|
|
|
self.cwd += '/'.join(datacr[1].full_path())
|
|
|
|
self.cwd_node = datacr[1]
|
2008-10-28 07:10:13 +00:00
|
|
|
|
|
|
|
# Ok
|
2010-07-02 21:52:49 +00:00
|
|
|
def mkdir(self, datacr, basename):
|
2008-10-28 07:10:13 +00:00
|
|
|
"""Create the specified directory."""
|
2010-07-02 21:52:49 +00:00
|
|
|
cr, node, rem = datacr or (None, None, None)
|
2008-10-28 07:10:13 +00:00
|
|
|
if not node:
|
2012-07-25 10:33:34 +00:00
|
|
|
raise OSError(1, 'Operation is not permitted.')
|
2010-08-19 11:51:57 +00:00
|
|
|
|
2008-10-28 07:10:13 +00:00
|
|
|
try:
|
2009-12-08 13:30:41 +00:00
|
|
|
basename =_to_unicode(basename)
|
2010-07-01 17:51:34 +00:00
|
|
|
cdir = node.create_child_collection(cr, basename)
|
|
|
|
self._log.debug("Created child dir: %r", cdir)
|
2010-07-01 22:43:11 +00:00
|
|
|
cr.commit()
|
2010-08-10 12:29:57 +00:00
|
|
|
except Exception:
|
2012-07-25 10:33:34 +00:00
|
|
|
self._log.exception('Cannot create dir "%s" at node %s.', basename, repr(node))
|
|
|
|
raise OSError(1, 'Operation is not permitted.')
|
2008-10-28 07:10:13 +00:00
|
|
|
|
|
|
|
def close_cr(self, data):
|
2010-07-02 21:52:49 +00:00
|
|
|
if data and data[0]:
|
2008-10-28 07:10:13 +00:00
|
|
|
data[0].close()
|
|
|
|
return True
|
|
|
|
|
2010-07-02 21:52:49 +00:00
|
|
|
def get_cr(self, pathname):
|
|
|
|
raise DeprecationWarning()
|
2010-08-19 11:51:57 +00:00
|
|
|
|
2010-07-02 21:52:49 +00:00
|
|
|
def get_crdata(self, line, mode='file'):
|
|
|
|
""" Get database cursor, node and remainder data, for commands
|
2010-08-19 11:51:57 +00:00
|
|
|
|
2010-07-02 21:52:49 +00:00
|
|
|
This is the helper function that will prepare the arguments for
|
|
|
|
any of the subsequent commands.
|
|
|
|
It returns a tuple in the form of:
|
|
|
|
@code ( cr, node, rem_path=None )
|
2010-08-19 11:51:57 +00:00
|
|
|
|
2010-07-02 21:52:49 +00:00
|
|
|
@param line An absolute or relative ftp path, as passed to the cmd.
|
|
|
|
@param mode A word describing the mode of operation, so that this
|
|
|
|
function behaves properly in the different commands.
|
|
|
|
"""
|
|
|
|
path = self.ftpnorm(line)
|
|
|
|
if self.cwd_node is None:
|
|
|
|
if not os.path.isabs(path):
|
|
|
|
path = os.path.join(self.root, path)
|
|
|
|
|
2010-07-06 12:10:50 +00:00
|
|
|
if path == '/' and mode in ('list', 'cwd'):
|
|
|
|
return (None, None, None )
|
2010-07-02 21:52:49 +00:00
|
|
|
|
2010-07-08 22:52:13 +00:00
|
|
|
path = _to_unicode(os.path.normpath(path)) # again, for '/db/../ss'
|
2010-07-02 21:52:49 +00:00
|
|
|
if path == '.': path = ''
|
|
|
|
|
|
|
|
if os.path.isabs(path) and self.cwd_node is not None \
|
|
|
|
and path.startswith(self.cwd):
|
|
|
|
# make relative, so that cwd_node is used again
|
|
|
|
path = path[len(self.cwd):]
|
|
|
|
if path.startswith('/'):
|
|
|
|
path = path[1:]
|
|
|
|
|
2012-10-31 12:43:04 +00:00
|
|
|
p_parts = path.split(os.sep)
|
2010-07-02 21:52:49 +00:00
|
|
|
|
|
|
|
assert '..' not in p_parts
|
|
|
|
|
|
|
|
rem_path = None
|
|
|
|
if mode in ('create',):
|
|
|
|
rem_path = p_parts[-1]
|
|
|
|
p_parts = p_parts[:-1]
|
|
|
|
|
|
|
|
if os.path.isabs(path):
|
|
|
|
# we have to start from root, again
|
2010-07-14 09:02:43 +00:00
|
|
|
while p_parts and p_parts[0] == '':
|
2010-07-06 12:10:50 +00:00
|
|
|
p_parts = p_parts[1:]
|
|
|
|
# self._log.debug("Path parts: %r ", p_parts)
|
2010-07-06 06:01:55 +00:00
|
|
|
if not p_parts:
|
2012-07-13 05:29:57 +00:00
|
|
|
raise IOError(errno.EPERM, 'Cannot perform operation at root directory.')
|
2010-07-02 21:52:49 +00:00
|
|
|
dbname = p_parts[0]
|
|
|
|
if dbname not in self.db_list():
|
2012-07-25 10:33:34 +00:00
|
|
|
raise IOError(errno.ENOENT,'Invalid database path: %s.' % dbname)
|
2010-07-02 21:52:49 +00:00
|
|
|
try:
|
|
|
|
db = pooler.get_db(dbname)
|
|
|
|
except Exception:
|
|
|
|
raise OSError(1, 'Database cannot be used.')
|
|
|
|
cr = db.cursor()
|
|
|
|
try:
|
|
|
|
uid = security.login(dbname, self.username, self.password)
|
|
|
|
except Exception:
|
|
|
|
cr.close()
|
|
|
|
raise
|
|
|
|
if not uid:
|
|
|
|
cr.close()
|
2012-08-06 17:08:41 +00:00
|
|
|
raise OSError(2, 'Authentification required.')
|
2010-07-02 21:52:49 +00:00
|
|
|
n = get_node_context(cr, uid, {})
|
|
|
|
node = n.get_uri(cr, p_parts[1:])
|
|
|
|
return (cr, node, rem_path)
|
|
|
|
else:
|
|
|
|
# we never reach here if cwd_node is not set
|
|
|
|
if p_parts and p_parts[-1] == '':
|
|
|
|
p_parts = p_parts[:-1]
|
|
|
|
cr, uid = self.get_node_cr_uid(self.cwd_node)
|
|
|
|
if p_parts:
|
|
|
|
node = self.cwd_node.get_uri(cr, p_parts)
|
|
|
|
else:
|
|
|
|
node = self.cwd_node
|
|
|
|
if node is False and mode not in ('???'):
|
|
|
|
cr.close()
|
2012-08-06 17:08:41 +00:00
|
|
|
raise IOError(errno.ENOENT, 'Path does not exist.')
|
2010-07-02 21:52:49 +00:00
|
|
|
return (cr, node, rem_path)
|
2008-10-28 07:10:13 +00:00
|
|
|
|
2010-06-29 14:05:32 +00:00
|
|
|
def get_node_cr_uid(self, node):
|
|
|
|
""" Get cr, uid, pool from a node
|
|
|
|
"""
|
2010-07-02 10:38:58 +00:00
|
|
|
assert node
|
2010-06-29 14:05:32 +00:00
|
|
|
db = pooler.get_db(node.context.dbname)
|
|
|
|
return db.cursor(), node.context.uid
|
2010-08-19 11:51:57 +00:00
|
|
|
|
2010-06-29 14:05:32 +00:00
|
|
|
def get_node_cr(self, node):
|
|
|
|
""" Get the cursor for the database of a node
|
2010-08-19 11:51:57 +00:00
|
|
|
|
|
|
|
The cursor is the only thing that a node will not store
|
2010-06-29 14:05:32 +00:00
|
|
|
persistenly, so we have to obtain a new one for each call.
|
|
|
|
"""
|
|
|
|
return self.get_node_cr_uid(node)[0]
|
2010-08-19 11:51:57 +00:00
|
|
|
|
2010-07-02 21:52:49 +00:00
|
|
|
def listdir(self, datacr):
|
2008-10-28 07:10:13 +00:00
|
|
|
"""List the content of a directory."""
|
2009-12-08 13:30:41 +00:00
|
|
|
class false_node(object):
|
2010-07-02 21:52:49 +00:00
|
|
|
write_date = 0.0
|
|
|
|
create_date = 0.0
|
|
|
|
unixperms = 040550
|
|
|
|
content_length = 0L
|
|
|
|
uuser = 'root'
|
|
|
|
ugroup = 'root'
|
2008-10-28 07:10:13 +00:00
|
|
|
type = 'database'
|
2010-08-19 11:51:57 +00:00
|
|
|
|
2008-10-28 07:10:13 +00:00
|
|
|
def __init__(self, db):
|
2010-07-02 21:52:49 +00:00
|
|
|
self.path = db
|
2008-10-28 07:10:13 +00:00
|
|
|
|
2010-07-02 21:52:49 +00:00
|
|
|
if datacr[1] is None:
|
2008-10-28 07:10:13 +00:00
|
|
|
result = []
|
|
|
|
for db in self.db_list():
|
2009-04-17 08:31:05 +00:00
|
|
|
try:
|
2010-06-29 14:03:28 +00:00
|
|
|
result.append(false_node(db))
|
|
|
|
except osv.except_osv:
|
2009-04-17 08:31:05 +00:00
|
|
|
pass
|
2008-10-28 07:10:13 +00:00
|
|
|
return result
|
2010-07-02 21:52:49 +00:00
|
|
|
cr, node, rem = datacr
|
|
|
|
res = node.children(cr)
|
2009-12-10 06:30:00 +00:00
|
|
|
return res
|
2008-10-28 07:10:13 +00:00
|
|
|
|
2010-07-02 21:52:49 +00:00
|
|
|
def rmdir(self, datacr):
|
2008-10-28 07:10:13 +00:00
|
|
|
"""Remove the specified directory."""
|
2010-07-02 21:52:49 +00:00
|
|
|
cr, node, rem = datacr
|
2009-12-08 13:30:41 +00:00
|
|
|
assert node
|
2010-07-02 21:52:49 +00:00
|
|
|
node.rmcol(cr)
|
|
|
|
cr.commit()
|
|
|
|
|
|
|
|
def remove(self, datacr):
|
|
|
|
assert datacr[1]
|
|
|
|
if datacr[1].type == 'collection':
|
|
|
|
return self.rmdir(datacr)
|
|
|
|
elif datacr[1].type == 'file':
|
|
|
|
return self.rmfile(datacr)
|
2012-07-25 10:33:34 +00:00
|
|
|
raise OSError(1, 'Operation is not permitted.')
|
2009-12-08 13:30:41 +00:00
|
|
|
|
2010-07-02 21:52:49 +00:00
|
|
|
def rmfile(self, datacr):
|
2008-10-28 07:10:13 +00:00
|
|
|
"""Remove the specified file."""
|
2010-07-02 21:52:49 +00:00
|
|
|
assert datacr[1]
|
|
|
|
cr = datacr[0]
|
|
|
|
datacr[1].rm(cr)
|
|
|
|
cr.commit()
|
2008-10-28 07:10:13 +00:00
|
|
|
|
2010-07-02 21:52:49 +00:00
|
|
|
def rename(self, src, datacr):
|
2010-07-01 17:51:34 +00:00
|
|
|
""" Renaming operation, the effect depends on the src:
|
2008-10-28 07:10:13 +00:00
|
|
|
* A file: read, create and remove
|
2010-03-24 16:37:24 +00:00
|
|
|
* A directory: change the parent and reassign children to ressource
|
2008-10-28 07:10:13 +00:00
|
|
|
"""
|
2010-07-02 21:52:49 +00:00
|
|
|
cr = datacr[0]
|
2008-10-28 07:10:13 +00:00
|
|
|
try:
|
2010-07-02 21:52:49 +00:00
|
|
|
nname = _to_unicode(datacr[2])
|
|
|
|
ret = src.move_to(cr, datacr[1], new_name=nname)
|
2010-07-01 17:51:34 +00:00
|
|
|
# API shouldn't wait for us to write the object
|
|
|
|
assert (ret is True) or (ret is False)
|
2010-07-01 22:43:11 +00:00
|
|
|
cr.commit()
|
2010-07-14 12:52:32 +00:00
|
|
|
except EnvironmentError:
|
|
|
|
raise
|
2010-08-10 12:29:57 +00:00
|
|
|
except Exception:
|
2012-07-25 10:33:34 +00:00
|
|
|
self._log.exception('Cannot rename "%s" to "%s" at "%s".', src, datacr[2], datacr[1])
|
|
|
|
raise OSError(1,'Operation is not permitted.')
|
2008-10-28 07:10:13 +00:00
|
|
|
|
|
|
|
def stat(self, node):
|
2010-07-02 21:52:49 +00:00
|
|
|
raise NotImplementedError()
|
2008-10-28 07:10:13 +00:00
|
|
|
|
|
|
|
# --- Wrapper methods around os.path.*
|
|
|
|
|
|
|
|
# Ok
|
|
|
|
def isfile(self, node):
|
2010-07-02 21:52:49 +00:00
|
|
|
if node and (node.type in ('file','content')):
|
2008-10-28 07:10:13 +00:00
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
# Ok
|
|
|
|
def islink(self, path):
|
|
|
|
"""Return True if path is a symbolic link."""
|
|
|
|
return False
|
|
|
|
|
|
|
|
def isdir(self, node):
|
|
|
|
"""Return True if path is a directory."""
|
|
|
|
if node is None:
|
|
|
|
return True
|
|
|
|
if node and (node.type in ('collection','database')):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2010-07-02 21:52:49 +00:00
|
|
|
def getsize(self, datacr):
|
2008-10-28 07:10:13 +00:00
|
|
|
"""Return the size of the specified file in bytes."""
|
2010-07-02 21:52:49 +00:00
|
|
|
if not (datacr and datacr[1]):
|
2012-07-13 05:29:57 +00:00
|
|
|
raise IOError(errno.ENOENT, "No such file or directory.")
|
2010-07-02 21:52:49 +00:00
|
|
|
if datacr[1].type in ('file', 'content'):
|
2010-07-13 10:31:02 +00:00
|
|
|
return datacr[1].get_data_len(datacr[0]) or 0L
|
2010-07-02 21:52:49 +00:00
|
|
|
return 0L
|
2008-10-28 07:10:13 +00:00
|
|
|
|
|
|
|
# Ok
|
2010-07-02 21:52:49 +00:00
|
|
|
def getmtime(self, datacr):
|
2008-10-28 07:10:13 +00:00
|
|
|
"""Return the last modified time as a number of seconds since
|
|
|
|
the epoch."""
|
2010-08-19 11:51:57 +00:00
|
|
|
|
2010-07-02 21:52:49 +00:00
|
|
|
node = datacr[1]
|
2009-12-08 13:30:41 +00:00
|
|
|
if node.write_date or node.create_date:
|
|
|
|
dt = (node.write_date or node.create_date)[:19]
|
2008-10-28 07:10:13 +00:00
|
|
|
result = time.mktime(time.strptime(dt, '%Y-%m-%d %H:%M:%S'))
|
|
|
|
else:
|
|
|
|
result = time.mktime(time.localtime())
|
|
|
|
return result
|
|
|
|
|
|
|
|
# Ok
|
|
|
|
def realpath(self, path):
|
|
|
|
"""Return the canonical version of path eliminating any
|
|
|
|
symbolic links encountered in the path (if they are
|
|
|
|
supported by the operating system).
|
|
|
|
"""
|
|
|
|
return path
|
|
|
|
|
|
|
|
# Ok
|
|
|
|
def lexists(self, path):
|
|
|
|
"""Return True if path refers to an existing path, including
|
|
|
|
a broken or circular symbolic link.
|
|
|
|
"""
|
2010-07-02 21:52:49 +00:00
|
|
|
raise DeprecationWarning()
|
2008-10-28 07:10:13 +00:00
|
|
|
return path and True or False
|
2010-07-02 21:52:49 +00:00
|
|
|
|
2008-10-28 07:10:13 +00:00
|
|
|
exists = lexists
|
|
|
|
|
|
|
|
# Ok, can be improved
|
|
|
|
def glob1(self, dirname, pattern):
|
|
|
|
"""Return a list of files matching a dirname pattern
|
|
|
|
non-recursively.
|
|
|
|
|
|
|
|
Unlike glob.glob1 raises exception if os.listdir() fails.
|
|
|
|
"""
|
|
|
|
names = self.listdir(dirname)
|
|
|
|
if pattern[0] != '.':
|
|
|
|
names = filter(lambda x: x.path[0] != '.', names)
|
|
|
|
return fnmatch.filter(names, pattern)
|
|
|
|
|
|
|
|
# --- Listing utilities
|
|
|
|
|
|
|
|
# note: the following operations are no more blocking
|
|
|
|
|
2010-07-02 21:52:49 +00:00
|
|
|
def get_list_dir(self, datacr):
|
2008-10-28 07:10:13 +00:00
|
|
|
""""Return an iterator object that yields a directory listing
|
|
|
|
in a form suitable for LIST command.
|
2010-08-19 11:51:57 +00:00
|
|
|
"""
|
2010-07-02 21:52:49 +00:00
|
|
|
if not datacr:
|
|
|
|
return None
|
|
|
|
elif self.isdir(datacr[1]):
|
|
|
|
listing = self.listdir(datacr)
|
|
|
|
return self.format_list(datacr[0], datacr[1], listing)
|
2008-10-28 07:10:13 +00:00
|
|
|
# if path is a file or a symlink we return information about it
|
2010-07-02 21:52:49 +00:00
|
|
|
elif self.isfile(datacr[1]):
|
|
|
|
par = datacr[1].parent
|
|
|
|
return self.format_list(datacr[0], par, [datacr[1]])
|
2008-10-28 07:10:13 +00:00
|
|
|
|
|
|
|
def get_stat_dir(self, rawline, datacr):
|
|
|
|
"""Return an iterator object that yields a list of files
|
|
|
|
matching a dirname pattern non-recursively in a form
|
|
|
|
suitable for STAT command.
|
|
|
|
|
|
|
|
- (str) rawline: the raw string passed by client as command
|
|
|
|
argument.
|
|
|
|
"""
|
|
|
|
ftppath = self.ftpnorm(rawline)
|
|
|
|
if not glob.has_magic(ftppath):
|
|
|
|
return self.get_list_dir(self.ftp2fs(rawline, datacr))
|
|
|
|
else:
|
|
|
|
basedir, basename = os.path.split(ftppath)
|
|
|
|
if glob.has_magic(basedir):
|
|
|
|
return iter(['Directory recursion not supported.\r\n'])
|
|
|
|
else:
|
|
|
|
basedir = self.ftp2fs(basedir, datacr)
|
|
|
|
listing = self.glob1(basedir, basename)
|
|
|
|
if listing:
|
|
|
|
listing.sort()
|
|
|
|
return self.format_list(basedir, listing)
|
|
|
|
|
2010-07-02 21:52:49 +00:00
|
|
|
def format_list(self, cr, parent_node, listing, ignore_err=True):
|
2008-10-28 07:10:13 +00:00
|
|
|
"""Return an iterator object that yields the entries of given
|
|
|
|
directory emulating the "/bin/ls -lA" UNIX command output.
|
|
|
|
|
2010-07-02 21:52:49 +00:00
|
|
|
- (str) basedir: the parent directory node. Can be None
|
|
|
|
- (list) listing: a list of nodes
|
2008-10-28 07:10:13 +00:00
|
|
|
- (bool) ignore_err: when False raise exception if os.lstat()
|
|
|
|
call fails.
|
|
|
|
|
|
|
|
On platforms which do not support the pwd and grp modules (such
|
|
|
|
as Windows), ownership is printed as "owner" and "group" as a
|
|
|
|
default, and number of hard links is always "1". On UNIX
|
|
|
|
systems, the actual owner, group, and number of links are
|
|
|
|
printed.
|
|
|
|
|
|
|
|
This is how output appears to client:
|
|
|
|
|
|
|
|
-rw-rw-rw- 1 owner group 7045120 Sep 02 3:47 music.mp3
|
|
|
|
drwxrwxrwx 1 owner group 0 Aug 31 18:50 e-books
|
|
|
|
-rw-rw-rw- 1 owner group 380 Sep 02 3:40 module.py
|
|
|
|
"""
|
2010-07-02 21:52:49 +00:00
|
|
|
for node in listing:
|
|
|
|
perms = filemode(node.unixperms) # permissions
|
|
|
|
nlinks = 1
|
|
|
|
size = node.content_length or 0L
|
2010-07-11 14:25:48 +00:00
|
|
|
uname = _to_decode(node.uuser)
|
|
|
|
gname = _to_decode(node.ugroup)
|
2008-10-28 07:10:13 +00:00
|
|
|
# stat.st_mtime could fail (-1) if last mtime is too old
|
|
|
|
# in which case we return the local time as last mtime
|
|
|
|
try:
|
2010-07-02 21:52:49 +00:00
|
|
|
st_mtime = node.write_date or 0.0
|
|
|
|
if isinstance(st_mtime, basestring):
|
|
|
|
st_mtime = time.strptime(st_mtime, '%Y-%m-%d %H:%M:%S')
|
|
|
|
elif isinstance(st_mtime, float):
|
|
|
|
st_mtime = time.localtime(st_mtime)
|
|
|
|
mname=_get_month_name(time.strftime("%m", st_mtime ))
|
|
|
|
mtime = mname+' '+time.strftime("%d %H:%M", st_mtime)
|
2008-10-28 07:10:13 +00:00
|
|
|
except ValueError:
|
2009-04-13 06:28:27 +00:00
|
|
|
mname=_get_month_name(time.strftime("%m"))
|
2010-08-19 11:51:57 +00:00
|
|
|
mtime = mname+' '+time.strftime("%d %H:%M")
|
2010-07-02 21:52:49 +00:00
|
|
|
fpath = node.path
|
|
|
|
if isinstance(fpath, (list, tuple)):
|
|
|
|
fpath = fpath[-1]
|
2010-08-19 11:51:57 +00:00
|
|
|
# formatting is matched with proftpd ls output
|
2010-07-02 21:52:49 +00:00
|
|
|
path=_to_decode(fpath)
|
2008-10-28 07:10:13 +00:00
|
|
|
yield "%s %3s %-8s %-8s %8s %s %s\r\n" %(perms, nlinks, uname, gname,
|
2010-07-02 21:52:49 +00:00
|
|
|
size, mtime, path)
|
2008-10-28 07:10:13 +00:00
|
|
|
|
|
|
|
# Ok
|
2010-07-02 21:52:49 +00:00
|
|
|
def format_mlsx(self, cr, basedir, listing, perms, facts, ignore_err=True):
|
2008-10-28 07:10:13 +00:00
|
|
|
"""Return an iterator object that yields the entries of a given
|
|
|
|
directory or of a single file in a form suitable with MLSD and
|
|
|
|
MLST commands.
|
|
|
|
|
|
|
|
Every entry includes a list of "facts" referring the listed
|
|
|
|
element. See RFC-3659, chapter 7, to see what every single
|
|
|
|
fact stands for.
|
|
|
|
|
|
|
|
- (str) basedir: the absolute dirname.
|
|
|
|
- (list) listing: the names of the entries in basedir
|
|
|
|
- (str) perms: the string referencing the user permissions.
|
|
|
|
- (str) facts: the list of "facts" to be returned.
|
|
|
|
- (bool) ignore_err: when False raise exception if os.stat()
|
|
|
|
call fails.
|
|
|
|
|
|
|
|
Note that "facts" returned may change depending on the platform
|
|
|
|
and on what user specified by using the OPTS command.
|
|
|
|
|
|
|
|
This is how output could appear to the client issuing
|
|
|
|
a MLSD request:
|
|
|
|
|
|
|
|
type=file;size=156;perm=r;modify=20071029155301;unique=801cd2; music.mp3
|
|
|
|
type=dir;size=0;perm=el;modify=20071127230206;unique=801e33; ebooks
|
|
|
|
type=file;size=211;perm=r;modify=20071103093626;unique=801e32; module.py
|
|
|
|
"""
|
|
|
|
permdir = ''.join([x for x in perms if x not in 'arw'])
|
|
|
|
permfile = ''.join([x for x in perms if x not in 'celmp'])
|
|
|
|
if ('w' in perms) or ('a' in perms) or ('f' in perms):
|
|
|
|
permdir += 'c'
|
|
|
|
if 'd' in perms:
|
|
|
|
permdir += 'p'
|
|
|
|
type = size = perm = modify = create = unique = mode = uid = gid = ""
|
2010-07-02 21:52:49 +00:00
|
|
|
for node in listing:
|
2008-10-28 07:10:13 +00:00
|
|
|
# type + perm
|
2010-07-02 21:52:49 +00:00
|
|
|
if self.isdir(node):
|
2008-10-28 07:10:13 +00:00
|
|
|
if 'type' in facts:
|
2010-08-19 11:51:57 +00:00
|
|
|
type = 'type=dir;'
|
2008-10-28 07:10:13 +00:00
|
|
|
if 'perm' in facts:
|
|
|
|
perm = 'perm=%s;' %permdir
|
|
|
|
else:
|
|
|
|
if 'type' in facts:
|
|
|
|
type = 'type=file;'
|
|
|
|
if 'perm' in facts:
|
|
|
|
perm = 'perm=%s;' %permfile
|
|
|
|
if 'size' in facts:
|
2010-07-02 21:52:49 +00:00
|
|
|
size = 'size=%s;' % (node.content_length or 0L)
|
2008-10-28 07:10:13 +00:00
|
|
|
# last modification time
|
|
|
|
if 'modify' in facts:
|
|
|
|
try:
|
2010-07-02 21:52:49 +00:00
|
|
|
st_mtime = node.write_date or 0.0
|
|
|
|
if isinstance(st_mtime, basestring):
|
|
|
|
st_mtime = time.strptime(st_mtime, '%Y-%m-%d %H:%M:%S')
|
|
|
|
elif isinstance(st_mtime, float):
|
|
|
|
st_mtime = time.localtime(st_mtime)
|
|
|
|
modify = 'modify=%s;' %time.strftime("%Y%m%d%H%M%S", st_mtime)
|
2008-10-28 07:10:13 +00:00
|
|
|
except ValueError:
|
|
|
|
# stat.st_mtime could fail (-1) if last mtime is too old
|
|
|
|
modify = ""
|
|
|
|
if 'create' in facts:
|
|
|
|
# on Windows we can provide also the creation time
|
|
|
|
try:
|
2010-07-02 21:52:49 +00:00
|
|
|
st_ctime = node.create_date or 0.0
|
|
|
|
if isinstance(st_ctime, basestring):
|
|
|
|
st_ctime = time.strptime(st_ctime, '%Y-%m-%d %H:%M:%S')
|
|
|
|
elif isinstance(st_mtime, float):
|
|
|
|
st_ctime = time.localtime(st_ctime)
|
|
|
|
create = 'create=%s;' %time.strftime("%Y%m%d%H%M%S",st_ctime)
|
2008-10-28 07:10:13 +00:00
|
|
|
except ValueError:
|
|
|
|
create = ""
|
|
|
|
# UNIX only
|
|
|
|
if 'unix.mode' in facts:
|
2010-07-02 21:52:49 +00:00
|
|
|
mode = 'unix.mode=%s;' %oct(node.unixperms & 0777)
|
2008-10-28 07:10:13 +00:00
|
|
|
if 'unix.uid' in facts:
|
2010-07-11 14:25:48 +00:00
|
|
|
uid = 'unix.uid=%s;' % _to_decode(node.uuser)
|
2008-10-28 07:10:13 +00:00
|
|
|
if 'unix.gid' in facts:
|
2010-07-11 14:25:48 +00:00
|
|
|
gid = 'unix.gid=%s;' % _to_decode(node.ugroup)
|
2008-10-28 07:10:13 +00:00
|
|
|
# We provide unique fact (see RFC-3659, chapter 7.5.2) on
|
|
|
|
# posix platforms only; we get it by mixing st_dev and
|
|
|
|
# st_ino values which should be enough for granting an
|
|
|
|
# uniqueness for the file listed.
|
|
|
|
# The same approach is used by pure-ftpd.
|
|
|
|
# Implementors who want to provide unique fact on other
|
|
|
|
# platforms should use some platform-specific method (e.g.
|
|
|
|
# on Windows NTFS filesystems MTF records could be used).
|
2010-07-02 21:52:49 +00:00
|
|
|
# if 'unique' in facts: todo
|
|
|
|
# unique = "unique=%x%x;" %(st.st_dev, st.st_ino)
|
|
|
|
path = node.path
|
|
|
|
if isinstance (path, (list, tuple)):
|
|
|
|
path = path[-1]
|
|
|
|
path=_to_decode(path)
|
2008-10-28 07:10:13 +00:00
|
|
|
yield "%s%s%s%s%s%s%s%s%s %s\r\n" %(type, size, perm, modify, create,
|
2009-07-31 12:24:54 +00:00
|
|
|
mode, uid, gid, unique, path)
|
2008-09-26 14:23:55 +00:00
|
|
|
|
2009-03-03 09:51:57 +00:00
|
|
|
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
2009-04-13 06:28:27 +00:00
|
|
|
|