Document Management System

bzr revid: hmo@tinyerp.com-20080926142355-5nxy223jb4rad5lh
This commit is contained in:
Harshad Modi 2008-09-26 19:53:55 +05:30
parent fdd7c80f61
commit 4cc0e3dc98
15 changed files with 4919 additions and 0 deletions

View File

@ -0,0 +1,8 @@
To be done:
-----------
* Test to not create several times the same file / directory
-> May be put a sql_constraints uniq on several files
-> test through remove or put
* Retest everything

View File

@ -0,0 +1,30 @@
##############################################################################
#
# Copyright (c) 2004 TINY SPRL. (http://tiny.be) All Rights Reserved.
# Fabien Pinckaers <fp@tiny.Be>
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import document
import ftpserver

View File

@ -0,0 +1,21 @@
#
# Use the custom module to put your specific code in a separate module.
#
{
"name" : "Integrated Document Management System",
"version" : "1.0",
"author" : "Tiny",
"category" : "Generic Modules/Others",
"website": "http://www.tinyerp.com",
"description": """This is a complete document management system:
* FTP Interface
* User Authentification
* Document Indexation
""",
"depends" : ["base"],
"init_xml" : ["document_data.xml"],
"update_xml" : ["document_view.xml"],
"demo_xml" : ["document_demo.xml"],
"active": False,
"installable": True
}

View File

@ -0,0 +1,34 @@
import time
import os
import StringIO
import odt2txt
#
# This should be the indexer
#
def content_index(content, filename=None, content_type=None):
fname,ext = os.path.splitext(filename)
result = ''
if ext in ('.doc'): #or content_type ?
(stdin,stdout) = os.popen2('antiword -', 'b')
stdin.write(content)
stdin.close()
result = stdout.read().decode('latin1','replace').encode('utf-8','replace')
elif ext == '.pdf':
fname = os.tempnam(filename)+'.pdf'
fp = file(fname,'wb')
fp.write(content)
fp.close()
fp = os.popen('pdftotext -enc UTF-8 -nopgbrk '+fname+' -', 'r')
result = fp.read()
fp.close()
elif ext in ('.xls','.ods','.odt','.odp'):
s = StringIO.StringIO(content)
o = odt2txt.OpenDocumentTextFile(s)
result = o.toString().encode('ascii','replace')
s.close()
elif ext in ('.txt','.py','.patch','.html','.csv','.xml'):
result = content
else:
result = content
return result

View File

@ -0,0 +1,6 @@
<?xml version="1.0"?>
<terp>
<data>
</data>
</terp>

View File

@ -0,0 +1,12 @@
<terp>
<data>
<record model="res.groups" id="group_compta_user">
<field name="name">grcompta</field>
</record>
<record model="res.groups" id="group_compta_admin">
<field name="name">grcomptaadmin</field>
</record>
<menuitem name="Administration" groups="admin,grcomptaadmin" icon="terp-stock" id="menu_admin_compta"/>
</data>
</terp>

568
addons/document/document.py Normal file
View File

@ -0,0 +1,568 @@
#############################################################################
#
# Copyright (c) 2004 TINY SPRL. (http://tiny.be) All Rights Reserved.
# Fabien Pinckaers <fp@tiny.Be>
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import base64
from osv import osv, fields
from osv.orm import except_orm
import urlparse
import os
import pooler
from content_index import content_index
# Unsupported WebDAV Commands:
# label
# search
# checkin
# checkout
# propget
# propset
#
# An object that represent an uri
# path: the uri of the object
# content: the Content it belongs to (_print.pdf)
# type: content or collection
# content: objct = res.partner
# collection: object = directory, object2 = res.partner
# file: objct = ir.attachement
# root: if we are at the first directory of a ressource
#
INVALID_CHARS={'*':str(hash('*')), '|':str(hash('|')) , "\\":str(hash("\\")), '/':'__', ':':str(hash(':')), '"':str(hash('"')), '<':str(hash('<')) , '>':str(hash('>')) , '?':str(hash('?'))}
class node_class(object):
def __init__(self, cr, uid, path,object,object2=False, context={}, content=False, type='collection', root=False):
self.cr = cr
self.uid = uid
self.path = path
self.object = object
self.object2 = object2
self.context = context
self.content = content
self.type=type
self.root=root
def _file_get(self, nodename=False):
if not self.object:
return []
pool = pooler.get_pool(self.cr.dbname)
fobj = pool.get('ir.attachment')
res2 = []
where = []
if self.object2:
where.append( ('res_model','=',self.object2._name) )
where.append( ('res_id','=',self.object2.id) )
for content in self.object.content_ids:
test_nodename = self.object2.name + (content.suffix or '') + (content.extension or '')
if test_nodename.find('/'):
test_nodename=test_nodename.replace('/', '_')
path = self.path+'/'+test_nodename
#path = self.path+'/'+self.object2.name + (content.suffix or '') + (content.extension or '')
if not nodename:
n = node_class(self.cr, self.uid,path, self.object2, False, content=content, type='content', root=False)
res2.append( n)
else:
if nodename == test_nodename:
n = node_class(self.cr, self.uid, path, self.object2, False, content=content, type='content', root=False)
res2.append(n)
else:
where.append( ('parent_id','=',self.object.id) )
where.append( ('res_id','=',False) )
if nodename:
where.append( (fobj._rec_name,'=',nodename) )
ids = fobj.search(self.cr, self.uid, where+[ ('parent_id','=',self.object and self.object.id or False) ], context=self.context)
if self.object and self.root and (self.object.type=='ressource'):
ids += fobj.search(self.cr, self.uid, where+[ ('parent_id','=',False) ], context=self.context)
res = fobj.browse(self.cr, self.uid, ids, context=self.context)
return map(lambda x: node_class(self.cr, self.uid, self.path+'/'+x.name, x, False, type='file', root=False), res) + res2
def directory_list_for_child(self,nodename,parent=False):
pool = pooler.get_pool(self.cr.dbname)
where = []
if nodename:
where.append(('name','=',nodename))
if (self.object and self.object.type=='directory') or not self.object2:
where.append(('parent_id','=',self.object and self.object.id or False))
else:
where.append(('parent_id','=',False))
if self.object:
where.append(('ressource_parent_type_id','=',self.object.ressource_type_id.id))
else:
where.append(('ressource_parent_type_id','=',False))
ids = pool.get('document.directory').search(self.cr, self.uid, where+[('ressource_id','=',0)], self.context)
if self.object2:
ids += pool.get('document.directory').search(self.cr, self.uid, where+[('ressource_id','=',self.object2.id)], self.context)
res = pool.get('document.directory').browse(self.cr, self.uid, ids,self.context)
return res
def _child_get(self, nodename=False):
if self.type not in ('collection','database'):
return []
res = self.directory_list_for_child(nodename)
result= map(lambda x: node_class(self.cr, self.uid, self.path+'/'+x.name, x, x.type=='directory' and self.object2 or False, root=self.root), res)
if self.type=='database':
pool = pooler.get_pool(self.cr.dbname)
fobj = pool.get('ir.attachment')
vargs = [('parent_id','=',False),('res_id','=',False)]
if nodename:
vargs.append(('name','=',nodename))
file_ids=fobj.search(self.cr,self.uid,vargs)
res = fobj.browse(self.cr, self.uid, file_ids, context=self.context)
result +=map(lambda x: node_class(self.cr, self.uid, self.path+'/'+x.name, x, False, type='file', root=self.root), res)
if self.type=='collection' and self.object.type=="ressource":
where = self.object.domain and eval(self.object.domain, {'active_id':self.root}) or []
pool = pooler.get_pool(self.cr.dbname)
obj = pool.get(self.object.ressource_type_id.model)
if self.object.ressource_tree:
if obj._parent_name in obj.fields_get(self.cr,self.uid):
where.append((obj._parent_name,'=',self.object2 and self.object2.id or False))
else :
if self.object2:
return result
else:
if self.object2:
return result
name_for = obj._name.split('.')[-1]
if nodename and nodename.find(name_for) == 0 :
id = int(nodename.replace(name_for,''))
where.append(('id','=',id))
elif nodename:
if nodename.find('__') :
nodename=nodename.replace('__','/')
for invalid in INVALID_CHARS:
if nodename.find(INVALID_CHARS[invalid]) :
nodename=nodename.replace(INVALID_CHARS[invalid],invalid)
where.append(('name','=',nodename))
ids = obj.search(self.cr, self.uid, where, self.context)
res = obj.browse(self.cr, self.uid, ids,self.context)
for r in res:
if not r.name:
r.name = name_for+'%d'%r.id
for invalid in INVALID_CHARS:
if r.name.find(invalid) :
r.name=r.name.replace(invalid,INVALID_CHARS[invalid])
result2 = map(lambda x: node_class(self.cr, self.uid, self.path+'/'+x.name.replace('/','__'), self.object, x, root=r.id), res)
if result2:
result = result2
return result
def children(self):
return self._child_get() + self._file_get()
def child(self, name):
res = self._child_get(name)
if res:
return res[0]
res = self._file_get(name)
if res:
return res[0]
return None
def path_get(self):
path = self.path
if self.path[0]=='/':
path = self.path[1:]
return path
class document_directory(osv.osv):
_name = 'document.directory'
_description = 'Document directory'
_columns = {
'name': fields.char('Name', size=64, required=True, select=1),
'write_date': fields.datetime('Date Modified', readonly=True),
'write_uid': fields.many2one('res.users', 'Last Modification User', readonly=True),
'create_date': fields.datetime('Date Created', readonly=True),
'create_uid': fields.many2one('res.users', 'Creator', readonly=True),
'file_type': fields.char('Content Type', size=32),
'domain': fields.char('Domain', size=128),
'user_id': fields.many2one('res.users', 'Owner'),
'group_ids': fields.many2many('res.groups', 'document_directory_group_rel', 'item_id', 'group_id', 'Groups'),
'parent_id': fields.many2one('document.directory', 'Parent Item'),
'child_ids': fields.one2many('document.directory', 'parent_id', 'Childs'),
'file_ids': fields.one2many('ir.attachment', 'parent_id', 'Files'),
'content_ids': fields.one2many('document.directory.content', 'directory_id', 'Virtual Files'),
'type': fields.selection([('directory','Static Directory'),('ressource','Other Ressources')], 'Type', required=True),
'ressource_type_id': fields.many2one('ir.model', 'Childs Model'),
'ressource_parent_type_id': fields.many2one('ir.model', 'Linked Model'),
'ressource_id': fields.integer('Ressource ID'),
'ressource_tree': fields.boolean('Tree Structure'),
}
_defaults = {
'user_id': lambda self,cr,uid,ctx: uid,
'domain': lambda self,cr,uid,ctx: '[]',
'type': lambda *args: 'directory',
}
_sql_constraints = [
('dirname_uniq', 'unique (name,parent_id,ressource_id,ressource_parent_type_id)', 'The directory name must be unique !')
]
def _check_duplication(self, cr, uid,vals):
if 'name' in vals:
where=" name='%s'"% (vals['name'])
if not 'parent_id' in vals or not vals['parent_id']:
where+=' and parent_id is null'
else:
where+=' and parent_id=%d'%(vals['parent_id'])
if not 'ressource_parent_type_id' in vals or not vals['ressource_parent_type_id']:
where+= ' and ressource_parent_type_id is null'
else:
where+=" and ressource_parent_type_id='%s'"%(vals['ressource_parent_type_id'])
# if not 'ressource_id' in vals or not vals['ressource_id']:
# where+= ' and ressource_id is null'
# else:
# where+=" and ressource_id=%d"%(vals['ressource_id'])
cr.execute("select id from document_directory where" + where)
res = cr.fetchall()
if len(res):
return False
return True
def _check_recursion(self, cr, uid, ids):
level = 100
while len(ids):
cr.execute('select distinct parent_id from document_directory where id in ('+','.join(map(str,ids))+')')
ids = filter(None, map(lambda x:x[0], cr.fetchall()))
if not level:
return False
level -= 1
return True
_constraints = [
(_check_recursion, 'Error! You can not create recursive Directories.', ['parent_id'])
]
def __init__(self, *args, **kwargs):
res = super(document_directory, self).__init__(*args, **kwargs)
self._cache = {}
return res
def onchange_content_id(self, cr, uid, ids, ressource_type_id):
return {}
def _get_childs(self, cr, uid, node, nodename=False, context={}):
where = []
if nodename:
where.append(('name','=',nodename))
if object:
where.append(('parent_id','=',object.id))
ids = self.search(cr, uid, where, context)
return self.browse(cr, uid, ids, context), False
"""
PRE:
uri: of the form "Sales Order/SO001"
PORT:
uri
object: the object.directory or object.directory.content
object2: the other object linked (if object.directory.content)
"""
def get_object(self, cr, uid, uri, context={}):
if not uri:
return node_class(cr, uid, '', False, type='database')
turi = tuple(uri)
if False and (turi in self._cache):
(path, oo, oo2, content,type,root) = self._cache[turi]
if oo:
object = self.pool.get(oo[0]).browse(cr, uid, oo[1], context)
else:
object = False
if oo2:
object2 = self.pool.get(oo2[0]).browse(cr, uid, oo2[1], context)
else:
object2 = False
node = node_class(cr, uid, path, object,object2, context, content, type, root)
return node
node = node_class(cr, uid, '/', False, type='database')
for path in uri[:]:
if path:
node = node.child(path)
if not node:
return False
oo = node.object and (node.object._name, node.object.id) or False
oo2 = node.object2 and (node.object2._name, node.object2.id) or False
self._cache[turi] = (node.path, oo, oo2, node.content,node.type,node.root)
return node
def get_childs(self, cr, uid, uri, context={}):
node = self.get_object(cr, uid, uri, context)
if uri:
children = node.children()
else:
children= [node]
result = map(lambda node: node.path_get(), children)
#childs,object2 = self._get_childs(cr, uid, object, False, context)
#result = map(lambda x: urlparse.urljoin(path+'/',x.name), childs)
return result
def write(self, cr, uid, ids, vals, context=None):
if not self._check_duplication(cr,uid,vals):
raise except_orm('ValidateError', 'Directory name must be unique!')
return super(document_directory,self).write(cr,uid,ids,vals,context=context)
def copy(self, cr, uid, id, default=None, context=None):
if not default:
default ={}
name = self.read(cr, uid, [id])[0]['name']
default.update({'name': name+ " (copy)"})
return super(document_directory,self).copy(cr,uid,id,default,context)
def create(self, cr, uid, vals, context=None):
if not self._check_duplication(cr,uid,vals):
raise except_orm('ValidateError', 'Directory name must be unique!')
if vals.get('name',False) and (vals.get('name').find('/')+1 or vals.get('name').find('@')+1 or vals.get('name').find('$')+1 or vals.get('name').find('#')+1) :
raise 'Error'
return super(document_directory,self).create(cr, uid, vals, context)
document_directory()
class document_directory_content(osv.osv):
_name = 'document.directory.content'
_description = 'Directory Content'
_order = "sequence"
_columns = {
'name': fields.char('Content Name', size=64, required=True),
'sequence': fields.integer('Sequence', size=16),
'suffix': fields.char('Suffix', size=16),
'versioning': fields.boolean('Versioning'),
'report_id': fields.many2one('ir.actions.report.xml', 'Report', required=True),
'extension': fields.selection([('.pdf','.pdf'),('','None')], 'Extension', required=True),
'directory_id': fields.many2one('document.directory', 'Directory')
}
_defaults = {
'extension': lambda *args: '',
'sequence': lambda *args: 1
}
document_directory_content()
class ir_action_report_xml(osv.osv):
_name="ir.actions.report.xml"
_inherit ="ir.actions.report.xml"
def _model_get(self, cr, uid, ids, name, arg, context):
res = {}
model_pool = self.pool.get('ir.model')
for data in self.read(cr,uid,ids,['model']):
model = data.get('model',False)
if model:
model_id =model_pool.search(cr,uid,[('model','=',model)])
if model_id:
res[data.get('id')] = model_id[0]
else:
res[data.get('id')] = False
return res
def _model_search(self, cr, uid, obj, name, args):
if not len(args):
return []
model_id= args[0][2]
if not model_id:
return []
model = self.pool.get('ir.model').read(cr,uid,[model_id])[0]['model']
report_id = self.search(cr,uid,[('model','=',model)])
if not report_id:
return [('id','=','0')]
return [('id','in',report_id)]
_columns={
'model_id' : fields.function(_model_get,fnct_search=_model_search,method=True,string='Model Id'),
}
ir_action_report_xml()
import random
import string
def random_name():
random.seed()
d = [random.choice(string.letters) for x in xrange(10) ]
name = "".join(d)
return name
def create_directory(path):
dir_name = random_name()
path = os.path.join(path,dir_name)
os.mkdir(path)
return dir_name
class document_file(osv.osv):
_inherit = 'ir.attachment'
def _data_get(self, cr, uid, ids, name, arg, context):
result = {}
cr.execute('select id,store_method,datas,store_fname,link from ir_attachment where id in ('+','.join(map(str,ids))+')')
for id,m,d,r,l in cr.fetchall():
if m=='db':
result[id] = d
elif m=='fs':
try:
path = os.path.join(os.getcwd(),'filestore')
value = file(os.path.join(path,r), 'rb').read()
result[id] = base64.encodestring(value)
except:
result[id]=''
else:
result[id] = ''
return result
#
# This code can be improved
#
def _data_set(self, cr, obj, id, name, value, uid=None, context={}):
if not value:
return True
if (not context) or context.get('store_method','fs')=='fs':
path = os.path.join(os.getcwd(), "filestore")
if not os.path.isdir(path):
os.mkdir(path)
flag = None
# This can be improved
for dirs in os.listdir(path):
if os.path.isdir(os.path.join(path,dirs)) and len(os.listdir(os.path.join(path,dirs)))<4000:
flag = dirs
break
flag = flag or create_directory(path)
filename = random_name()
fname = os.path.join(path, flag, filename)
fp = file(fname,'wb')
v = base64.decodestring(value)
fp.write(v)
filesize = os.stat(fname).st_size
cr.execute('update ir_attachment set store_fname=%s,store_method=%s,file_size=%d where id=%d', (os.path.join(flag,filename),'fs',len(v),id))
else:
cr.execute('update ir_attachment set datas=%s,store_method=%s where id=%d', (psycopg.Binary(value),'db',id))
return True
_columns = {
'user_id': fields.many2one('res.users', 'Owner', select=1),
'group_ids': fields.many2many('res.groups', 'document_directory_group_rel', 'item_id', 'group_id', 'Groups'),
'parent_id': fields.many2one('document.directory', 'Directory', select=1),
'file_size': fields.integer('File Size', required=True),
'file_type': fields.char('Content Type', size=32),
'index_content': fields.text('Indexed Content'),
'write_date': fields.datetime('Date Modified', readonly=True),
'write_uid': fields.many2one('res.users', 'Last Modification User', readonly=True),
'create_date': fields.datetime('Date Created', readonly=True),
'create_uid': fields.many2one('res.users', 'Creator', readonly=True),
'store_method': fields.selection([('db','Database'),('fs','Filesystem'),('link','Link')], "Storing Method"),
'datas': fields.function(_data_get,method=True,store=True,fnct_inv=_data_set,string='File Content',type="binary"),
'store_fname': fields.char('Stored Filename', size=200),
'res_model': fields.char('Attached Model', size=64), #res_model
'res_id': fields.integer('Attached ID'), #res_id
'partner_id':fields.many2one('res.partner', 'Partner', select=1),
'title': fields.char('Resource Title',size=64),
}
_defaults = {
'user_id': lambda self,cr,uid,ctx:uid,
'file_size': lambda self,cr,uid,ctx:0,
'store_method': lambda *args: 'db'
}
_sql_constraints = [
('filename_uniq', 'unique (name,parent_id,res_id,res_model)', 'The file name must be unique !')
]
def _check_duplication(self, cr, uid,vals):
if 'name' in vals:
res=self.search(cr,uid,[('name','=',vals['name']),('parent_id','=','parent_id' in vals and vals['parent_id'] or False),('res_id','=','res_id' in vals and vals['res_id'] or False),('res_model','=','res_model' in vals and vals['res_model']) or False])
if len(res):
return False
return True
def write(self, cr, uid, ids, vals, context=None):
if not self._check_duplication(cr,uid,vals):
raise except_orm('ValidateError', 'File name must be unique!')
result = super(document_file,self).write(cr,uid,ids,vals,context=context)
try:
for f in self.browse(cr, uid, ids, context=context):
if 'datas' not in vals:
vals['datas']=f.datas
res = content_index(base64.decodestring(vals['datas']), f.datas_fname, f.file_type or None)
super(document_file,self).write(cr, uid, ids, {
'index_content': res
})
except:
pass
return result
def create(self, cr, uid, vals, context={}):
vals['title']=vals['name']
if vals.get('res_id', False) and vals.get('res_model',False):
obj_model=self.pool.get(vals['res_model'])
result = obj_model.read(cr, uid, [vals['res_id']], context=context)
if len(result):
obj=result[0]
vals['title'] = (obj['name'] or '')[:60]
if obj_model._name=='res.partner':
vals['partner_id']=obj['id']
elif 'address_id' in obj:
address=self.pool.get('res.partner.address').read(cr,uid,[obj['address_id']],context=context)
if len(address):
vals['partner_id']=address[0]['partner_id'] or False
elif 'partner_id' in obj:
if isinstance(obj['partner_id'],tuple) or isinstance(obj['partner_id'],list):
vals['partner_id']=obj['partner_id'][0]
else:
vals['partner_id']=obj['partner_id']
datas=None
if 'datas' not in vals:
import urllib
datas=base64.encodestring(urllib.urlopen(vals['link']).read())
else:
datas=vals['datas']
vals['file_size']= len(datas)
if not self._check_duplication(cr,uid,vals):
raise except_orm('ValidateError', 'File name must be unique!')
result = super(document_file,self).create(cr, uid, vals, context)
cr.commit()
try:
res = content_index(base64.decodestring(datas), vals['datas_fname'], vals.get('content_type', None))
super(document_file,self).write(cr, uid, [result], {
'index_content': res,
})
cr.commit()
except:
pass
return result
def unlink(self,cr, uid, ids, context={}):
for f in self.browse(cr, uid, ids, context):
if f.store_method=='fs':
try:
path = os.path.join(os.getcwd(),'filestore',f.store_fname)
os.unlink(path)
except:
pass
return super(document_file, self).unlink(cr, uid, ids, context)
document_file()

View File

@ -0,0 +1,6 @@
<?xml version="1.0"?>
<terp>
<data noupdate="1">
</data>
</terp>

View File

@ -0,0 +1,93 @@
<?xml version="1.0"?>
<terp>
<data noupdate="1">
<record model="document.directory" id="dir_root">
<field name="name">Main Repository</field>
<field name="user_id" ref="base.user_admin"/>
<field name="ressource_id" eval="0"/>
</record>
<record model="document.directory" id="dir_my_folder">
<field name="name">My Folder</field>
<field name="parent_id" ref="dir_root"/>
<field name="user_id" ref="base.user_admin"/>
<field name="ressource_id" eval="0"/>
</record>
<record model="document.directory" id="dir_partner_category">
<field name="name">Partner Category</field>
<field name="parent_id" ref="dir_root"/>
<field name="type">ressource</field>
<field name="ressource_tree">1</field>
<field name="ressource_type_id" search="[('model','=','res.partner.category')]" />
<field name="user_id" ref="base.user_admin"/>
<field name="ressource_id" eval="0"/>
</record>
<record model="document.directory" id="dir_partner">
<field name="name">Partner</field>
<field name="type">ressource</field>
<field name="domain">[('category_id','in',[active_id])]</field>
<field name="ressource_type_id" search="[('model','=','res.partner')]" />
<field name="ressource_parent_type_id" search="[('model','=','res.partner.category')]" />
<field name="user_id" ref="base.user_admin"/>
<field name="ressource_id" eval="0"/>
</record>
<record model="document.directory" id="dir_partner_contacts">
<field name="name">Contacts</field>
<field name="type">ressource</field>
<field name="domain">[('partner_id','=',active_id)]</field>
<field name="ressource_type_id" search="[('model','=','res.partner.address')]" />
<field name="ressource_parent_type_id" search="[('model','=','res.partner')]" />
<field name="user_id" ref="base.user_admin"/>
<field name="ressource_id" eval="0"/>
</record>
<record model="document.directory" id="dir_my_partner">
<field name="name">My Folder</field>
<field name="user_id" ref="base.user_admin"/>
<field name="ressource_parent_type_id" search="[('model','=','res.partner')]" />
<field name="ressource_id" eval="0"/>
</record>
<record model="document.directory" id="dir_personnal_folder">
<field name="name">Personnal Folders</field>
<field name="parent_id" ref="dir_root"/>
<field name="user_id" ref="base.user_admin"/>
<field name="ressource_id" eval="0"/>
</record>
<record model="document.directory" id="dir_sale_order">
<field name="name">Sales Order</field>
<field name="user_id" ref="base.user_admin"/>
<field name="parent_id" ref="dir_root"/>
<field name="ressource_id" eval="0"/>
</record>
<record model="document.directory" id="dir_sale_order_all">
<field name="name">All Sales Order</field>
<field name="user_id" ref="base.user_admin"/>
<field name="parent_id" ref="dir_sale_order"/>
<field name="ressource_id" eval="0"/>
</record>
<record model="document.directory" id="dir_sale_order_salesman">
<field name="name">Sales by Salesman</field>
<field name="user_id" ref="base.user_admin"/>
<field name="parent_id" ref="dir_sale_order"/>
<field name="ressource_id" eval="0"/>
</record>
<record model="document.directory" id="dir_project">
<field name="name">Projects</field>
<field name="user_id" ref="base.user_admin"/>
<field name="parent_id" ref="dir_root"/>
<field name="ressource_id" eval="0"/>
</record>
</data>
</terp>

View File

@ -0,0 +1,175 @@
<terp>
<data>
<menuitem name="Document Management" icon="terp-stock" id="menu_document"/>
<record model="ir.ui.view" id="view_document_directory_form">
<field name="name">document.directory</field>
<field name="model">document.directory</field>
<field name="type">form</field>
<field name="arch" type="xml">
<form string="Directories">
<notebook>
<page string="Definition">
<field name="name" select="1" colspan="4"/>
<field name="user_id"/>
<field name="parent_id"/>
<separator string="Directory Type" colspan="4"/>
<field name="type"/>
<field name="ressource_type_id" on_change="onchange_content_id(ressource_type_id)"/>
<newline/>
<field name="domain"/>
<field name="ressource_tree"/>
<field name="ressource_id" select="2"/>
<field name="ressource_parent_type_id"/>
<separator string="Contents" colspan="4"/>
<field name="content_ids" nolabel="1" colspan="4">
<form string="Contents">
<field name="name"/>
<field name="sequence"/>
<field name="suffix"/>
<field name="extension"/>
<field name="report_id" domain="[('model_id','=',parent.ressource_type_id)]"/>
</form>
<tree string="Contents">
<field name="sequence"/>
<field name="name"/>
<field name="suffix"/>
<field name="extension"/>
<field name="report_id"/>
</tree>
</field>
</page><page string="Security">
<field name="group_ids" colspan="4" nolabel="1"/>
</page>
</notebook>
</form>
</field>
</record>
<record model="ir.ui.view" id="view_document_directory_tree">
<field name="name">document.directory</field>
<field name="model">document.directory</field>
<field name="type">tree</field>
<field name="field_parent">child_ids</field>
<field name="arch" type="xml">
<tree string="Directories">
<field name="name"/>
<field name="type"/>
<field name="user_id"/>
<field name="create_date"/>
<field name="write_date"/>
</tree>
</field>
</record>
<record model="ir.actions.act_window" id="action_document_directory_form">
<field name="type">ir.actions.act_window</field>
<field name="res_model">document.directory</field>
<field name="view_type">form</field>
<field name="view_mode">tree,form</field>
</record>
<menuitem
name="Document Management/Configuration/Directories"
action="action_document_directory_form"
id="menu_document_directories"/>
<record model="ir.actions.act_window" id="action_document_directory_tree">
<field name="type">ir.actions.act_window</field>
<field name="res_model">document.directory</field>
<field name="view_type">tree</field>
<field name="domain">[('ressource_parent_type_id','=',False),('parent_id','=',False)]</field>
</record>
<menuitem
name="Document Management/Directorie's Structure"
action="action_document_directory_tree"
id="menu_document_directories_tree"/>
<record model="ir.ui.view" id="view_document_file_form">
<field name="name">ir.attachment</field>
<field name="model">ir.attachment</field>
<field name="priority" eval="1"/>
<field name="type">form</field>
<field name="arch" type="xml">
<form string="Files">
<notebook>
<page string="Definition">
<field name="name" select="1" colspan="4"/>
<field name="title" select="1" colspan="4"/>
<newline/>
<field name="datas"/>
<field name="file_size"/>
<newline/>
<field name="parent_id"/>
<newline/>
<field name="user_id"/>
<field name="partner_id"/>
<newline/>
<field name="create_date"/>
<field name="create_uid"/>
<field name="write_date"/>
<field name="write_uid"/>
</page><page string="Indexed Content">
<field name="index_content" nolabel="1" colspan="4" select="1"/>
</page><page string="Security">
<field name="group_ids" colspan="4" nolabel="1"/>
</page>
</notebook>
</form>
</field>
</record>
<record model="ir.ui.view" id="view_document_file_tree">
<field name="name">ir.attachment</field>
<field name="model">ir.attachment</field>
<field name="type">tree</field>
<field name="priority" eval="1"/>
<field name="arch" type="xml">
<tree string="Files">
<field name="name"/>
<field name="title" />
<field name="partner_id"/>
<field name="file_size"/>
<field name="user_id"/>
<field name="write_date"/>
</tree>
</field>
</record>
<record model="ir.actions.act_window" id="action_document_file_form">
<field name="type">ir.actions.act_window</field>
<field name="res_model">ir.attachment</field>
<field name="view_type">form</field>
</record>
<menuitem
name="Document Management/Search a File"
action="action_document_file_form"
id="menu_document_files"/>
<record model="ir.actions.act_window" id="action_document_file_directory_form">
<field name="type">ir.actions.act_window</field>
<field name="res_model">ir.attachment</field>
<field name="view_type">form</field>
<field name="domain">[('parent_id','=',active_id)]</field>
</record>
<record model="ir.values" id="ir_action_document_file_directory_form">
<field name="key2" eval="'tree_but_open'"/>
<field name="model" eval="'document.directory'"/>
<field name="name">Browse Files</field>
<field name="value" eval="'ir.actions.act_window,%d'%action_document_file_directory_form"/>
<field name="object" eval="True"/>
</record>
<record model="ir.ui.view" id="view_attachment_form_inherit">
<field name="name">ir.attachment.view.inherit</field>
<field name="model">ir.attachment</field>
<field name="inherit_id" ref="base.view_attachment_form"/>
<field name="arch" type="xml">
<field name="datas_fname" position = "replace" >
</field>
</field>
</record>
</data>
</terp>

View File

@ -0,0 +1,22 @@
import threading
import ftpserver
import authorizer
import abstracted_fs
PORT = 8021
HOST = ''
class ftp_server(threading.Thread):
def run(self):
autho = authorizer.authorizer()
ftpserver.FTPHandler.authorizer = autho
ftpserver.max_cons = 300
ftpserver.max_cons_per_ip = 50
ftpserver.FTPHandler.abstracted_fs = abstracted_fs.abstracted_fs
address = (HOST, PORT)
ftpd = ftpserver.FTPServer(address, ftpserver.FTPHandler)
ftpd.serve_forever()
ds = ftp_server()
ds.start()

View File

@ -0,0 +1,751 @@
import os
import time
from tarfile import filemode
import StringIO
import base64
import glob
import fnmatch
import pooler
import netsvc
import posix
from service import security
class abstracted_fs:
"""A class used to interact with the file system, providing a high
level, cross-platform interface compatible with both Windows and
UNIX style filesystems.
It provides some utility methods and some wraps around operations
involved in file creation and file system operations like moving
files or removing directories.
Instance attributes:
- (str) root: the user home directory.
- (str) cwd: the current working directory.
- (str) rnfr: source file to be renamed.
"""
# Ok
def db_list(self):
s = netsvc.LocalService('db')
result = s.list()
self.db_name_list=[]
for db_name in result:
db = pooler.get_db_only(db_name)
cr = db.cursor()
cr.execute("select id from ir_module_module where name like 'document%' and state='installed' ")
res=cr.fetchone()
if res and len(res):
self.db_name_list.append(db_name)
cr.close()
return self.db_name_list
# Ok
def __init__(self):
self.root = None
self.cwd = '/'
self.rnfr = None
# --- Pathname / conversion utilities
# Ok
def ftpnorm(self, ftppath):
"""Normalize a "virtual" ftp pathname (tipically the raw string
coming from client) depending on the current working directory.
Example (having "/foo" as current working directory):
'x' -> '/foo/x'
Note: directory separators are system independent ("/").
Pathname returned is always absolutized.
"""
if os.path.isabs(ftppath):
p = os.path.normpath(ftppath)
else:
p = os.path.normpath(os.path.join(self.cwd, ftppath))
# normalize string in a standard web-path notation having '/'
# as separator.
p = p.replace("\\", "/")
# os.path.normpath supports UNC paths (e.g. "//a/b/c") but we
# don't need them. In case we get an UNC path we collapse
# redundant separators appearing at the beginning of the string
while p[:2] == '//':
p = p[1:]
# Anti path traversal: don't trust user input, in the event
# that self.cwd is not absolute, return "/" as a safety measure.
# This is for extra protection, maybe not really necessary.
if not os.path.isabs(p):
p = "/"
return p
# Ok
def ftp2fs(self, path_orig, data):
path = self.ftpnorm(path_orig)
if path and path=='/':
return None
path2 = filter(None,path.split('/'))[1:]
(cr, uid, pool) = data
res = pool.get('document.directory').get_object(cr, uid, path2[:])
if not res:
raise OSError(2, 'Not such file or directory.')
return res
# Ok
def fs2ftp(self, node):
res = node and ('/' + node.cr.dbname + '/' + node.path) or '/'
return res
# Ok
def validpath(self, path):
"""Check whether the path belongs to user's home directory.
Expected argument is a "real" filesystem pathname.
If path is a symbolic link it is resolved to check its real
destination.
Pathnames escaping from user's root directory are considered
not valid.
"""
return path and True or False
# --- Wrapper methods around open() and tempfile.mkstemp
# Ok
def create(self, node, objname, mode):
try:
class file_wrapper(StringIO.StringIO):
def __init__(self, sstr='', ressource_id=False, dbname=None, uid=1, name=''):
StringIO.StringIO.__init__(self, sstr)
self.ressource_id = ressource_id
self.name = name
self.dbname = dbname
self.uid = uid
def close(self, *args, **kwargs):
db,pool = pooler.get_db_and_pool(self.dbname)
cr = db.cursor()
uid =self.uid
val = self.getvalue()
val2 = {
'datas': base64.encodestring(val),
'file_size': len(val),
}
pool.get('ir.attachment').write(cr, uid, [self.ressource_id], val2)
cr.commit()
StringIO.StringIO.close(self, *args, **kwargs)
cr = node.cr
uid = node.uid
pool = pooler.get_pool(cr.dbname)
fobj = pool.get('ir.attachment')
ext = objname.find('.') >0 and objname.split('.')[1] or False
# TODO: test if already exist and modify in this case if node.type=file
### checked already exits
object2=node and node.object2 or False
object=node and node.object or False
cid=False
where=[('name','=',objname)]
if object and (object.type in ('directory','ressource')) or object2:
where.append(('parent_id','=',object.id))
else:
where.append(('parent_id','=',False))
if object2:
where +=[('res_id','=',object2.id),('res_model','=',object2._name)]
cids = fobj.search(cr, uid,where)
if len(cids):
cid=cids[0]
if not cid:
val = {
'name': objname,
'datas_fname': objname,
'datas': '',
'file_size': 0L,
'file_type': ext,
}
if object and (object.type in ('directory','ressource')) or not object2:
val['parent_id']= object and object.id or False
partner = False
if object2:
if 'partner_id' in object2 and object2.partner_id.id:
partner = object2.partner_id.id
if object2._name == 'res.partner':
partner = object2.id
val.update( {
'res_model': object2._name,
'partner_id': partner,
'res_id': object2.id
})
cid = fobj.create(cr, uid, val, context={})
cr.commit()
s = file_wrapper('', cid, cr.dbname, uid, )
return s
except Exception,e:
print e
raise OSError(1, 'Operation not permited.')
# Ok
def open(self, node, mode):
if not node:
raise OSError(1, 'Operation not permited.')
# Reading operation
if node.type=='file':
if not self.isfile(node):
raise OSError(1, 'Operation not permited.')
s = StringIO.StringIO(base64.decodestring(node.object.datas or ''))
s.name = node
return s
elif node.type=='content':
cr = node.cr
uid = node.uid
pool = pooler.get_pool(cr.dbname)
report = pool.get('ir.actions.report.xml').browse(cr, uid, node.content['report_id']['id'])
srv = netsvc.LocalService('report.'+report.report_name)
pdf,pdftype = srv.create(cr, uid, [node.object.id], {}, {})
s = StringIO.StringIO(pdf)
s.name = node
return s
else:
raise OSError(1, 'Operation not permited.')
# ok, but need test more
def mkstemp(self, suffix='', prefix='', dir=None, mode='wb'):
"""A wrap around tempfile.mkstemp creating a file with a unique
name. Unlike mkstemp it returns an object with a file-like
interface.
"""
raise 'Not Yet Implemented'
# class FileWrapper:
# def __init__(self, fd, name):
# self.file = fd
# self.name = name
# def __getattr__(self, attr):
# return getattr(self.file, attr)
#
# text = not 'b' in mode
# # max number of tries to find out a unique file name
# tempfile.TMP_MAX = 50
# fd, name = tempfile.mkstemp(suffix, prefix, dir, text=text)
# file = os.fdopen(fd, mode)
# return FileWrapper(file, name)
text = not 'b' in mode
# for unique file , maintain version if duplicate file
if dir:
cr = dir.cr
uid = dir.uid
pool = pooler.get_pool(cr.dbname)
object=dir and dir.object or False
object2=dir and dir.object2 or False
res=pool.get('ir.attachment').search(cr,uid,[('name','like',prefix),('parent_id','=',object and object.type in ('directory','ressource') and object.id or False),('res_id','=',object2 and object2.id or False),('res_model','=',object2 and object2._name or False)])
if len(res):
pre = prefix.split('.')
prefix=pre[0] + '.v'+str(len(res))+'.'+pre[1]
#prefix = prefix + '.'
return self.create(dir,suffix+prefix,text)
# Ok
def chdir(self, path):
if not path:
self.cwd='/'
return None
if path.type in ('collection','database'):
self.cwd = self.fs2ftp(path)
else:
raise OSError(1, 'Operation not permited.')
# Ok
def mkdir(self, node, basename):
"""Create the specified directory."""
if not node:
raise OSError(1, 'Operation not permited.')
try:
object2=node and node.object2 or False
object=node and node.object or False
cr = node.cr
uid = node.uid
pool = pooler.get_pool(cr.dbname)
if node.object and (node.object.type=='ressource') and not node.object2:
raise OSError(1, 'Operation not permited.')
val = {
'name': basename,
'ressource_parent_type_id': object and object.ressource_type_id.id or False,
'ressource_id': object2 and object2.id or False
}
if (object and (object.type in ('directory'))) or not object2:
val['parent_id'] = object and object.id or False
# Check if it alreayd exists !
pool.get('document.directory').create(cr, uid, val)
cr.commit()
except Exception,e:
print e
raise OSError(1, 'Operation not permited.')
# Ok
def close_cr(self, data):
if data:
data[0].close()
return True
def get_cr(self, path):
path = self.ftpnorm(path)
if path=='/':
return None
dbname = path.split('/')[1]
try:
db,pool = pooler.get_db_and_pool(dbname)
except:
raise OSError(1, 'Operation not permited.')
cr = db.cursor()
uid = security.login(dbname, self.username, self.password)
if not uid:
raise OSError(1, 'Operation not permited.')
return cr, uid, pool
# Ok
def listdir(self, path):
"""List the content of a directory."""
class false_node:
object = None
type = 'database'
def __init__(self, db):
self.path = '/'+db
if path is None:
result = []
for db in self.db_list():
result.append(false_node(db))
return result
return path.children()
# Ok
def rmdir(self, node):
"""Remove the specified directory."""
cr = node.cr
uid = node.uid
pool = pooler.get_pool(cr.dbname)
object2=node and node.object2 or False
object=node and node.object or False
if object._table_name=='document.directory':
if node.children():
raise OSError(39, 'Directory not empty.')
res = pool.get('document.directory').unlink(cr, uid, [object.id])
else:
raise OSError(39, 'Directory not empty.')
cr.commit()
# Ok
def remove(self, node):
"""Remove the specified file."""
cr = node.cr
uid = node.uid
pool = pooler.get_pool(cr.dbname)
object2=node and node.object2 or False
object=node and node.object or False
if not object:
raise OSError(2, 'Not such file or directory.')
if object._table_name=='ir.attachment':
res = pool.get('ir.attachment').unlink(cr, uid, [object.id])
else:
raise OSError(1, 'Operation not permited.')
cr.commit()
# Ok
def rename(self, src, dst_basedir,dst_basename):
"""
Renaming operation, the effect depends on the src:
* A file: read, create and remove
* A directory: change the parent and reassign childs to ressource
"""
try:
if src.type=='collection':
if src.object._table_name <> 'document.directory':
raise OSError(1, 'Operation not permited.')
result = {
'directory': [],
'attachment': []
}
# Compute all childs to set the new ressource ID
child_ids = [src]
while len(child_ids):
node = child_ids.pop(0)
child_ids += node.children()
if node.type =='collection':
result['directory'].append(node.object.id)
if (not node.object.ressource_id) and node.object2:
raise OSError(1, 'Operation not permited.')
elif node.type =='file':
result['attachment'].append(node.object.id)
cr = src.cr
uid = src.uid
pool = pooler.get_pool(cr.dbname)
object2=src and src.object2 or False
object=src and src.object or False
if object2 and not object.ressource_id:
raise OSError(1, 'Operation not permited.')
val = {
'name':dst_basename,
}
if (dst_basedir.object and (dst_basedir.object.type in ('directory'))) or not dst_basedir.object2:
val['parent_id'] = dst_basedir.object and dst_basedir.object.id or False
else:
val['parent_id'] = False
res = pool.get('document.directory').write(cr, uid, [object.id],val)
if dst_basedir.object2:
ressource_type_id = pool.get('ir.model').search(cr,uid,[('model','=',dst_basedir.object2._name)])[0]
ressource_id = dst_basedir.object2.id
title = dst_basedir.object2.name
ressource_model = dst_basedir.object2._name
if dst_basedir.object2._name=='res.partner':
partner_id=dst_basedir.object2.id
else:
partner_id= dst_basedir.object2.partner_id and dst_basedir.object2.partner_id.id or False
else:
ressource_type_id = False
ressource_id=False
ressource_model = False
partner_id = False
title = False
pool.get('document.directory').write(cr, uid, result['directory'], {
'ressource_id': ressource_id,
'ressource_parent_type_id': ressource_type_id
})
val = {
'res_id': ressource_id,
'res_model': ressource_model,
'title': title,
'partner_id': partner_id
}
pool.get('ir.attachment').write(cr, uid, result['attachment'], val)
if (not val['res_id']) and result['attachment']:
dst_basedir.cr.execute('update ir_attachment set res_id=NULL where id in ('+','.join(map(str,result['attachment']))+')')
cr.commit()
elif src.type=='file':
pool = pooler.get_pool(src.cr.dbname)
val = {
'partner_id':False,
#'res_id': False,
'res_model': False,
'name': dst_basename,
'datas_fname': dst_basename,
'title': dst_basename,
}
if (dst_basedir.object and (dst_basedir.object.type in ('directory','ressource'))) or not dst_basedir.object2:
val['parent_id'] = dst_basedir.object and dst_basedir.object.id or False
else:
val['parent_id'] = False
if dst_basedir.object2:
val['res_model'] = dst_basedir.object2._name
val['res_id'] = dst_basedir.object2.id
val['title'] = dst_basedir.object2.name
if dst_basedir.object2._name=='res.partner':
val['partner_id']=dst_basedir.object2.id
else:
val['partner_id']= dst_basedir.object2.partner_id and dst_basedir.object2.partner_id.id or False
elif src.object.res_id:
# I had to do that because writing False to an integer writes 0 instead of NULL
# change if one day we decide to improve osv/fields.py
dst_basedir.cr.execute('update ir_attachment set res_id=NULL where id=%d', (src.object.id,))
pool.get('ir.attachment').write(src.cr, src.uid, [src.object.id], val)
src.cr.commit()
elif src.type=='content':
src_file=self.open(src,'r')
dst_file=self.create(dst_basedir,dst_basename,'w')
dst_file.write(src_file.getvalue())
dst_file.close()
src_file.close()
src.cr.commit()
else:
raise OSError(1, 'Operation not permited.')
except Exception,err:
print err
raise OSError(1,'Operation not permited.')
# Nearly Ok
def stat(self, node):
r = list(os.stat('/'))
if self.isfile(node):
r[0] = 33188
r[6] = self.getsize(node)
r[7] = self.getmtime(node)
r[8] = self.getmtime(node)
r[9] = self.getmtime(node)
return posix.stat_result(r)
lstat = stat
# --- Wrapper methods around os.path.*
# Ok
def isfile(self, node):
if node and (node.type not in ('collection','database')):
return True
return False
# Ok
def islink(self, path):
"""Return True if path is a symbolic link."""
return False
# Ok
def isdir(self, node):
"""Return True if path is a directory."""
if node is None:
return True
if node and (node.type in ('collection','database')):
return True
return False
# Ok
def getsize(self, node):
"""Return the size of the specified file in bytes."""
result = 0L
if node.type=='file':
result = node.object.file_size or 0L
return result
# Ok
def getmtime(self, node):
"""Return the last modified time as a number of seconds since
the epoch."""
if node.object and node.type<>'content':
dt = (node.object.write_date or node.object.create_date)[:19]
result = time.mktime(time.strptime(dt, '%Y-%m-%d %H:%M:%S'))
else:
result = time.mktime(time.localtime())
return result
# Ok
def realpath(self, path):
"""Return the canonical version of path eliminating any
symbolic links encountered in the path (if they are
supported by the operating system).
"""
return path
# Ok
def lexists(self, path):
"""Return True if path refers to an existing path, including
a broken or circular symbolic link.
"""
return path and True or False
exists = lexists
# Ok, can be improved
def glob1(self, dirname, pattern):
"""Return a list of files matching a dirname pattern
non-recursively.
Unlike glob.glob1 raises exception if os.listdir() fails.
"""
names = self.listdir(dirname)
if pattern[0] != '.':
names = filter(lambda x: x.path[0] != '.', names)
return fnmatch.filter(names, pattern)
# --- Listing utilities
# note: the following operations are no more blocking
# Ok
def get_list_dir(self, path):
""""Return an iterator object that yields a directory listing
in a form suitable for LIST command.
"""
if self.isdir(path):
listing = self.listdir(path)
#listing.sort()
return self.format_list(path and path.path or '/', listing)
# if path is a file or a symlink we return information about it
elif self.isfile(path):
basedir, filename = os.path.split(path.path)
self.lstat(path) # raise exc in case of problems
return self.format_list(basedir, [filename])
# Ok
def get_stat_dir(self, rawline, datacr):
"""Return an iterator object that yields a list of files
matching a dirname pattern non-recursively in a form
suitable for STAT command.
- (str) rawline: the raw string passed by client as command
argument.
"""
ftppath = self.ftpnorm(rawline)
if not glob.has_magic(ftppath):
return self.get_list_dir(self.ftp2fs(rawline, datacr))
else:
basedir, basename = os.path.split(ftppath)
if glob.has_magic(basedir):
return iter(['Directory recursion not supported.\r\n'])
else:
basedir = self.ftp2fs(basedir, datacr)
listing = self.glob1(basedir, basename)
if listing:
listing.sort()
return self.format_list(basedir, listing)
# Ok
def format_list(self, basedir, listing, ignore_err=True):
"""Return an iterator object that yields the entries of given
directory emulating the "/bin/ls -lA" UNIX command output.
- (str) basedir: the absolute dirname.
- (list) listing: the names of the entries in basedir
- (bool) ignore_err: when False raise exception if os.lstat()
call fails.
On platforms which do not support the pwd and grp modules (such
as Windows), ownership is printed as "owner" and "group" as a
default, and number of hard links is always "1". On UNIX
systems, the actual owner, group, and number of links are
printed.
This is how output appears to client:
-rw-rw-rw- 1 owner group 7045120 Sep 02 3:47 music.mp3
drwxrwxrwx 1 owner group 0 Aug 31 18:50 e-books
-rw-rw-rw- 1 owner group 380 Sep 02 3:40 module.py
"""
for file in listing:
try:
st = self.lstat(file)
except os.error:
if ignore_err:
continue
raise
perms = filemode(st.st_mode) # permissions
nlinks = st.st_nlink # number of links to inode
if not nlinks: # non-posix system, let's use a bogus value
nlinks = 1
size = st.st_size # file size
uname = "owner"
gname = "group"
# stat.st_mtime could fail (-1) if last mtime is too old
# in which case we return the local time as last mtime
try:
mtime = time.strftime("%b %d %H:%M", time.localtime(st.st_mtime))
except ValueError:
mtime = time.strftime("%b %d %H:%M")
# formatting is matched with proftpd ls output
yield "%s %3s %-8s %-8s %8s %s %s\r\n" %(perms, nlinks, uname, gname,
size, mtime, file.path.split('/')[-1])
# Ok
def format_mlsx(self, basedir, listing, perms, facts, ignore_err=True):
"""Return an iterator object that yields the entries of a given
directory or of a single file in a form suitable with MLSD and
MLST commands.
Every entry includes a list of "facts" referring the listed
element. See RFC-3659, chapter 7, to see what every single
fact stands for.
- (str) basedir: the absolute dirname.
- (list) listing: the names of the entries in basedir
- (str) perms: the string referencing the user permissions.
- (str) facts: the list of "facts" to be returned.
- (bool) ignore_err: when False raise exception if os.stat()
call fails.
Note that "facts" returned may change depending on the platform
and on what user specified by using the OPTS command.
This is how output could appear to the client issuing
a MLSD request:
type=file;size=156;perm=r;modify=20071029155301;unique=801cd2; music.mp3
type=dir;size=0;perm=el;modify=20071127230206;unique=801e33; ebooks
type=file;size=211;perm=r;modify=20071103093626;unique=801e32; module.py
"""
permdir = ''.join([x for x in perms if x not in 'arw'])
permfile = ''.join([x for x in perms if x not in 'celmp'])
if ('w' in perms) or ('a' in perms) or ('f' in perms):
permdir += 'c'
if 'd' in perms:
permdir += 'p'
type = size = perm = modify = create = unique = mode = uid = gid = ""
for basename in listing:
file = os.path.join(basedir, basename)
try:
st = self.stat(file)
except OSError:
if ignore_err:
continue
raise
# type + perm
if stat.S_ISDIR(st.st_mode):
if 'type' in facts:
if basename == '.':
type = 'type=cdir;'
elif basename == '..':
type = 'type=pdir;'
else:
type = 'type=dir;'
if 'perm' in facts:
perm = 'perm=%s;' %permdir
else:
if 'type' in facts:
type = 'type=file;'
if 'perm' in facts:
perm = 'perm=%s;' %permfile
if 'size' in facts:
size = 'size=%s;' %st.st_size # file size
# last modification time
if 'modify' in facts:
try:
modify = 'modify=%s;' %time.strftime("%Y%m%d%H%M%S",
time.localtime(st.st_mtime))
except ValueError:
# stat.st_mtime could fail (-1) if last mtime is too old
modify = ""
if 'create' in facts:
# on Windows we can provide also the creation time
try:
create = 'create=%s;' %time.strftime("%Y%m%d%H%M%S",
time.localtime(st.st_ctime))
except ValueError:
create = ""
# UNIX only
if 'unix.mode' in facts:
mode = 'unix.mode=%s;' %oct(st.st_mode & 0777)
if 'unix.uid' in facts:
uid = 'unix.uid=%s;' %st.st_uid
if 'unix.gid' in facts:
gid = 'unix.gid=%s;' %st.st_gid
# We provide unique fact (see RFC-3659, chapter 7.5.2) on
# posix platforms only; we get it by mixing st_dev and
# st_ino values which should be enough for granting an
# uniqueness for the file listed.
# The same approach is used by pure-ftpd.
# Implementors who want to provide unique fact on other
# platforms should use some platform-specific method (e.g.
# on Windows NTFS filesystems MTF records could be used).
if 'unique' in facts:
unique = "unique=%x%x;" %(st.st_dev, st.st_ino)
yield "%s%s%s%s%s%s%s%s%s %s\r\n" %(type, size, perm, modify, create,
mode, uid, gid, unique, basename)

View File

@ -0,0 +1,70 @@
#import pooler
class authorizer:
read_perms = "elr"
write_perms = "adfmw"
def __init__(self):
self.password = ''
def validate_authentication(self, username, password):
"""Return True if the supplied username and password match the
stored credentials."""
self.password = password
return True
def impersonate_user(self, username, password):
"""Impersonate another user (noop).
It is always called before accessing the filesystem.
By default it does nothing. The subclass overriding this
method is expected to provide a mechanism to change the
current user.
"""
def terminate_impersonation(self):
"""Terminate impersonation (noop).
It is always called after having accessed the filesystem.
By default it does nothing. The subclass overriding this
method is expected to provide a mechanism to switch back
to the original user.
"""
def has_user(self, username):
"""Whether the username exists in the virtual users table."""
return True
def has_perm(self, username, perm, path=None):
"""Whether the user has permission over path (an absolute
pathname of a file or a directory).
Expected perm argument is one of the following letters:
"elradfmw".
"""
return True
paths = path.split('/')
if not len(paths)>2:
return True
db_name = paths[1]
db,pool = pooler.get_db_and_pool(db_name)
res = security.login(db_name, username, self.password)
return bool(res)
def get_perms(self, username):
"""Return current user permissions."""
return 'elr'
def get_home_dir(self, username):
"""Return the user's home directory."""
return '/'
def get_msg_login(self, username):
"""Return the user's login message."""
return 'Welcome on OpenERP document management system.'
def get_msg_quit(self, username):
"""Return the user's quitting message."""
return 'Bye.'

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,31 @@
#!/usr/bin/python
import sys, zipfile, xml.dom.minidom
import StringIO
class OpenDocumentTextFile :
def __init__ (self, filepath) :
zip = zipfile.ZipFile(filepath)
self.content = xml.dom.minidom.parseString(zip.read("content.xml"))
def toString (self) :
""" Converts the document to a string. """
buffer = u""
for val in ["text:p", "text:h", "text:list"]:
for paragraph in self.content.getElementsByTagName(val) :
buffer += self.textToString(paragraph) + "\n"
return buffer
def textToString(self, element) :
buffer = u""
for node in element.childNodes :
if node.nodeType == xml.dom.Node.TEXT_NODE :
buffer += node.nodeValue
elif node.nodeType == xml.dom.Node.ELEMENT_NODE :
buffer += self.textToString(node)
return buffer
if __name__ == "__main__" :
s =StringIO.StringIO(file(sys.argv[1]).read())
odt = OpenDocumentTextFile(s)
print odt.toString().encode('ascii','replace')