bzr revid: hmo@tinyerp.com-20101112094836-qy4536d3tjvw04xc
This commit is contained in:
Harry (OpenERP) 2010-11-12 15:18:36 +05:30
commit ca33cf5313
24 changed files with 1331 additions and 127 deletions

View File

@ -21,7 +21,7 @@
import time
from document_webdav import nodes
from document.nodes import _str2time
from document.nodes import _str2time, nodefd_static
import logging
import StringIO
from orm_utils import get_last_modified
@ -381,6 +381,9 @@ class node_calendar(nodes.node_class):
res.append(child._get_caldav_calendar_data(cr))
return res
def open_data(self, cr, mode):
return nodefd_static(self, cr, mode)
def _get_caldav_calendar_description(self, cr):
uid = self.context.uid
calendar_obj = self.context._dirobj.pool.get('basic.calendar')
@ -458,14 +461,10 @@ class res_node_calendar(nodes.node_class):
self.model = res_model
self.res_id = res_id
def open(self, cr, mode=False):
if self.type in ('collection','database'):
return False
s = StringIO.StringIO(self.get_data(cr))
s.name = self
return s
def open_data(self, cr, mode):
return nodefd_static(self, cr, mode)
def get_data(self, cr, fil_obj = None):
def get_data(self, cr, fil_obj=None):
uid = self.context.uid
calendar_obj = self.context._dirobj.pool.get('basic.calendar')
context = self.context.context.copy()

View File

@ -6,7 +6,7 @@
<field name="domain">[]</field>
<field eval="1" name="resource_find_all"/>
<field eval="0" name="ressource_tree"/>
<field name="user_id" ref="base.user_root"/>
<field name="user_id" eval="False"/>
<field name="company_id" ref="base.main_company"/>
<field name="storage_id" ref="document.storage_default"/>
<field name="type">directory</field>
@ -17,6 +17,7 @@
<field name="domain">[]</field>
<field eval="1" name="resource_find_all"/>
<field eval="0" name="ressource_tree"/>
<field name="user_id" eval="False"/>
<field name="company_id" ref="base.main_company"/>
<field model="document.directory" name="parent_id" ref="document_directory_calendars0"/>
<field name="type">directory</field>
@ -24,10 +25,10 @@
<field name="name">resources</field>
</record>
<record id="document_directory_uids0" model="document.directory">
<field name="domain">[]</field>
<field name="domain">[('id','=',uid)]</field>
<field eval="1" name="resource_find_all"/>
<field eval="0" name="ressource_tree"/>
<field name="user_id" ref="base.user_root"/>
<field name="user_id" eval="False"/>
<field name="company_id" ref="base.main_company"/>
<field model="document.directory" name="parent_id" ref="document_directory_calendars0"/>
<field name="type">ressource</field>
@ -36,9 +37,10 @@
<field name="name">__uids__</field>
</record>
<record id="document_directory_users0" model="document.directory">
<field name="domain">[]</field>
<field name="domain">[('id','=',uid)]</field>
<field eval="1" name="resource_find_all"/>
<field eval="0" name="ressource_tree"/>
<field name="user_id" eval="False"/>
<field name="resource_field" ref="base.field_res_users_login"/>
<field name="company_id" ref="base.main_company"/>
<field model="document.directory" name="parent_id" ref="document_directory_calendars0"/>
@ -56,7 +58,7 @@
<record id="document_directory_c0" model="document.directory">
<field name="domain">[]</field>
<field eval="1" name="resource_find_all"/>
<field name="user_id" ref="base.user_root"/>
<field name="user_id" eval="False"/>
<field name="company_id" ref="base.main_company"/>
<field model="document.directory" name="parent_id" ref="document_directory_users0"/>
<field name="storage_id" ref="document.storage_default"/>

View File

@ -4,6 +4,7 @@
<record model="ir.ui.view" id="view_calendar_collection_form">
<field name="name">Calendar Collections : Form</field>
<field name="model">document.directory</field>
<field name="priority">40</field>
<field name="type">form</field>
<field name="arch" type="xml">
<form string="Calendar Collections">
@ -22,9 +23,11 @@
<field name="name">Calendar Collections : Tree</field>
<field name="model">document.directory</field>
<field name="type">tree</field>
<field name="priority">40</field>
<field name="arch" type="xml">
<tree string="Calendar Collections" toolbar="1">
<field name="name"/>
<field name="parent_id"/>
<field name="user_id"/>
<field name="create_date"/>
<field name="write_date"/>
@ -131,12 +134,10 @@
<field name="name"/>
<field name="type"/>
<field name="user_id"/>
<field name="collection_id"/>
<field name="calendar_color" />
<field name="calendar_order"/>
<field name="has_webcal" />
<field name="create_date"/>
<field name="write_date"/>
<field name="collection_id" required="1"/>
<field name="has_webcal" groups="base.group_extended" />
<field name="calendar_color" groups="base.group_extended" />
<field name="calendar_order" groups="base.group_extended" />
</tree>
</field>
</record>

View File

@ -22,7 +22,7 @@
{
'name': 'Integrated Document Management System',
'version': '2.0',
'version': '2.1',
'category': 'Generic Modules/Others',
'description': """This is a complete document management system:
* User Authentication

View File

@ -0,0 +1,65 @@
Access control in the Document Management system
================================================
The purpose is to let the DMS act as a real-life management system for
the file handling of some small business.
The key concept, there, is the separation of access according to users
and groups.
Fact 1: Users, in general, must NOT see each other's documents, not even
their names (because they usually imply sensitive data, like eg. a doc:
"Acme Company's Patent 012356 about fooobars.odf" )
Fact 2: Users, sometimes, fail to comprehend complex ACL schemes, so we
owe to keep things simple, a main principle applied all over the place.
Fact 3: our system has both statically placed files and directories, as
well as dynamic (aka "resources" in our terminology) nodes.
We allow/limit the access based on 3 factors (fields):
- The "owner" field, which holds the user that created or "owns" the
file or directory.
- The "group_ids" field, on directories, which specifieds group-wise
access
- The "company_id" field, for multi-company access rules [1]
[1] at multi-company setups, we may want the same file hierarchy to apply
to different companies, and also nodes to be company-specific in it.
Principle of "owner"
----------------------
Files or directories that have an empty "owner" field are public. All users
will be able to _read_ them. Only the OpenERP Administrator or specified
groups, however, will be able to modify them!
Files or directories that have an "owner" are private. Only their users will
be able to read or modify (including delete) them.
By default, all user's files are created with "owner" field set, thus private.
Principle of "group_ids"
-------------------------
Directories that have any group ids set will only (apart from their owner)
allow members of these groups to read them.
Directories that are created into the above directories will initially inherit
(that is, copy) the group_ids of their parents, so that they also allow
access to the same users.
Implementation note
---------------------
Most of the principles are applied through record rules (see ir.rule object),
so an administrator can actually readjust them.
In order to have logical "areas" of folders, where different policies apply
(like group folders, personal areas), default values for directories' owners
and group_ids can be tuned (through the 'set default' functionality of
fields).
Summary
--------
Table of permissions and behavior
|| Type | Owner set | Groups set | Description ||
|| Public | - | - | Public-readable folders, admin can write ||
|| Group | - | X | Group can read, write, delete in them ||
|| Group-read | X | X | Group can read[2], owner can write/delete ||
|| Private | X | - | Only owner can read, write, delete in. ||
[2] hint: using a wide group, like "Internal users" at this setup creates the
effect of public-readable folders, with write permission to a non-admin user.

View File

@ -74,7 +74,7 @@ class document_file(osv.osv):
'create_uid': fields.many2one('res.users', 'Creator', readonly=True),
'write_date': fields.datetime('Date Modified', readonly=True),
'write_uid': fields.many2one('res.users', 'Last Modification User', readonly=True),
'res_model': fields.char('Attached Model', size=64, readonly=True),
'res_model': fields.char('Attached Model', size=64, readonly=True, change_default=True),
'res_id': fields.integer('Attached ID', readonly=True),
# If ir.attachment contained any data before document is installed, preserve
@ -86,7 +86,7 @@ class document_file(osv.osv):
'user_id': fields.many2one('res.users', 'Owner', select=1),
# 'group_ids': fields.many2many('res.groups', 'document_group_rel', 'item_id', 'group_id', 'Groups'),
# the directory id now is mandatory. It can still be computed automatically.
'parent_id': fields.many2one('document.directory', 'Directory', select=1, required=True),
'parent_id': fields.many2one('document.directory', 'Directory', select=1, required=True, change_default=True),
'index_content': fields.text('Indexed Content'),
'partner_id':fields.many2one('res.partner', 'Partner', select=1),
'file_size': fields.integer('File Size', required=True),

View File

@ -16,13 +16,13 @@
<record model="document.directory" id="dir_root">
<field name="name">Documents</field>
<field name="user_id" ref="base.user_root"/>
<field name="user_id" eval="False"/>
<field name="storage_id" ref="storage_default"/>
<field name="ressource_id">0</field>
</record>
<record model="document.directory" id="dir_my_folder">
<field name="name">My Folder</field>
<field name="name">Admin Folder</field>
<field name="parent_id" ref="dir_root"/>
<field name="user_id" ref="base.user_root"/>
<field name="ressource_id">0</field>
@ -37,7 +37,7 @@
<field name="ressource_id">0</field>
<field name="ressource_type_id" search="[('model','=','res.partner.category')]" />
<field name="user_id" ref="base.user_root"/>
<field name="user_id" eval="False"/>
</record>
<record model="document.directory" id="dir_partner">
@ -46,7 +46,7 @@
<field name="domain">[('category_id','in',[active_id])]</field>
<field name="ressource_type_id" search="[('model','=','res.partner')]" />
<field name="ressource_parent_type_id" search="[('model','=','res.partner.category')]" />
<field name="user_id" ref="base.user_root"/>
<field name="user_id" eval="False"/>
<field name="ressource_id">0</field>
</record>
@ -55,14 +55,14 @@
<field name="name">Personal Folders</field>
<field name="parent_id" ref="dir_root"/>
<field name="type">ressource</field>
<field name="user_id" eval="False"/>
<field name="ressource_type_id" ref="base.model_res_users" />
<field name="ressource_id">0</field>
</record>
<record model="document.directory" id="dir_product">
<field name="name">Products</field>
<field name="user_id" ref="base.user_root"/>
<field name="user_id" eval="False"/>
<field name="parent_id" ref="dir_root"/>
<field name="ressource_id">0</field>
@ -70,7 +70,7 @@
<record model="document.directory" id="dir_sale_order">
<field name="name">Sales Order</field>
<field name="user_id" ref="base.user_root"/>
<field name="user_id" eval="False"/>
<field name="parent_id" ref="dir_root"/>
<field name="ressource_id">0</field>
@ -78,7 +78,7 @@
<record model="document.directory" id="dir_sale_order_all">
<field name="name">All Sales Order</field>
<field name="user_id" ref="base.user_root"/>
<field name="user_id" eval="False"/>
<field name="parent_id" ref="dir_sale_order"/>
<field name="ressource_id">0</field>
@ -86,7 +86,7 @@
<record model="document.directory" id="dir_sale_order_quote">
<field name="name">Quotations</field>
<field name="user_id" ref="base.user_root"/>
<field name="user_id" eval="False"/>
<field name="parent_id" ref="dir_sale_order"/>
<field name="ressource_id">0</field>
@ -94,7 +94,7 @@
<record model="document.directory" id="dir_project">
<field name="name">Projects</field>
<field name="user_id" ref="base.user_root"/>
<field name="user_id" eval="False"/>
<field name="parent_id" ref="dir_root"/>
<field name="ressource_id">0</field>

View File

@ -21,6 +21,7 @@
from osv import osv, fields
from osv.orm import except_orm
import os
import nodes
@ -38,9 +39,9 @@ class document_directory(osv.osv):
'create_uid': fields.many2one('res.users', 'Creator', readonly=True),
'domain': fields.char('Domain', size=128, help="Use a domain if you want to apply an automatic filter on visible resources."),
'user_id': fields.many2one('res.users', 'Owner'),
'storage_id': fields.many2one('document.storage', 'Storage'),
'storage_id': fields.many2one('document.storage', 'Storage', change_default=True),
'group_ids': fields.many2many('res.groups', 'document_directory_group_rel', 'item_id', 'group_id', 'Groups'),
'parent_id': fields.many2one('document.directory', 'Parent Directory', select=1),
'parent_id': fields.many2one('document.directory', 'Parent Directory', select=1, change_default=True),
'child_ids': fields.one2many('document.directory', 'parent_id', 'Children'),
'file_ids': fields.one2many('ir.attachment', 'parent_id', 'Files'),
'content_ids': fields.one2many('document.directory.content', 'directory_id', 'Virtual Files'),
@ -48,22 +49,24 @@ class document_directory(osv.osv):
('directory','Static Directory'),
('ressource','Folders per resource'),
],
'Type', required=True, select=1,
'Type', required=True, select=1, change_default=True,
help="Each directory can either have the type Static or be linked to another resource. A static directory, as with Operating Systems, is the classic directory that can contain a set of files. The directories linked to systems resources automatically possess sub-directories for each of resource types defined in the parent directory."),
'ressource_type_id': fields.many2one('ir.model', 'Resource model',
'ressource_type_id': fields.many2one('ir.model', 'Resource model', change_default=True,
help="Select an object here and there will be one folder per record of that resource."),
'resource_field': fields.many2one('ir.model.fields', 'Name field', help='Field to be used as name on resource directories. If empty, the "name" will be used.'),
'resource_find_all': fields.boolean('Find all resources', required=True,
help="If true, all attachments that match this resource will " \
" be located. If false, only ones that have this as parent." ),
'ressource_parent_type_id': fields.many2one('ir.model', 'Parent Model',
'ressource_parent_type_id': fields.many2one('ir.model', 'Parent Model', change_default=True,
help="If you put an object here, this directory template will appear bellow all of these objects. " \
"Such directories are \"attached\" to the specific model or record, just like attachments. " \
"Don't put a parent directory if you select a parent model."),
'ressource_id': fields.integer('Resource ID'),
'ressource_id': fields.integer('Resource ID',
help="Along with Parent Model, this ID attaches this folder to a specific record of Parent Model."),
'ressource_tree': fields.boolean('Tree Structure',
help="Check this if you want to use the same tree structure as the object selected in the system."),
'dctx_ids': fields.one2many('document.directory.dctx', 'dir_id', 'Context fields'),
'company_id': fields.many2one('res.company', 'Company'),
'company_id': fields.many2one('res.company', 'Company', change_default=True),
}
@ -98,10 +101,10 @@ class document_directory(osv.osv):
_defaults = {
'company_id': lambda s,cr,uid,c: s.pool.get('res.company')._company_default_get(cr, uid, 'document.directory', context=c),
'user_id': lambda self,cr,uid,ctx: uid,
'domain': lambda self,cr,uid,ctx: '[]',
'type': lambda *args: 'directory',
'ressource_id': lambda *a: 0,
'storage_id': _get_def_storage,
'domain': '[]',
'type': 'directory',
'ressource_id': 0,
'storage_id': _get_def_storage, # Still, it is bad practice to set it everywhere.
'resource_find_all': True,
}
_sql_constraints = [
@ -188,7 +191,6 @@ class document_directory(osv.osv):
dbro = self.browse(cr, uid, ids, context=context)
if dynamic:
assert dbro.type == 'directory'
return nodes.node_res_obj
elif dbro.type == 'directory':
return nodes.node_dir
@ -211,22 +213,20 @@ class document_directory(osv.osv):
"""
return
def get_dir_permissions(self, cr, uid, ids ):
def get_dir_permissions(self, cr, uid, ids, context=None ):
"""Check what permission user 'uid' has on directory 'id'
"""
assert len(ids) == 1
id = ids[0]
cr.execute( "SELECT count(dg.item_id) AS needs, count(ug.uid) AS has " \
" FROM document_directory_group_rel dg " \
" LEFT OUTER JOIN res_groups_users_rel ug " \
" ON (dg.group_id = ug.gid AND ug.uid = %s) " \
" WHERE dg.item_id = %s ", (uid, id))
needs, has = cr.fetchone()
if needs and not has:
return 1 # still allow to descend into.
else:
return 15
res = 0
for pperms in [('read', 5), ('write', 2), ('unlink', 8)]:
try:
self.check_access_rule(cr, uid, ids, pperms[0], context=context)
res |= pperms[1]
except except_orm:
pass
return res
def _locate_child(self, cr, uid, root_id, uri,nparent, ncontext):
""" try to locate the node in uri,

View File

@ -26,6 +26,7 @@ import tools
import base64
import errno
import logging
import shutil
from StringIO import StringIO
import psycopg2
@ -48,7 +49,7 @@ DMS_ROOT_PATH = tools.config.get('document_path', os.path.join(tools.config.get(
We have to consider 3 cases of data /retrieval/:
Given (context,path) we need to access the file (aka. node).
given (directory, context), we need one of its children (for listings, views)
given (ir.attachment, context), we needs its data and metadata (node).
given (ir.attachment, context), we need its data and metadata (node).
For data /storage/ we have the cases:
Have (ir.attachment, context), we modify the file (save, update, rename etc).
@ -100,10 +101,17 @@ class nodefd_file(nodes.node_descriptor):
if mode.endswith('b'):
mode = mode[:-1]
self.mode = mode
self._size = os.stat(path).st_size
for attr in ('closed', 'read', 'write', 'seek', 'tell'):
for attr in ('closed', 'read', 'write', 'seek', 'tell', 'next'):
setattr(self,attr, getattr(self.__file, attr))
def size(self):
return self._size
def __iter__(self):
return self
def close(self):
# TODO: locking in init, close()
fname = self.__file.name
@ -165,6 +173,7 @@ class nodefd_db(StringIO, nodes.node_descriptor):
"""
def __init__(self, parent, ira_browse, mode):
nodes.node_descriptor.__init__(self, parent)
self._size = 0L
if mode.endswith('b'):
mode = mode[:-1]
@ -172,6 +181,8 @@ class nodefd_db(StringIO, nodes.node_descriptor):
cr = ira_browse._cr # reuse the cursor of the browse object, just now
cr.execute('SELECT db_datas FROM ir_attachment WHERE id = %s',(ira_browse.id,))
data = cr.fetchone()[0]
if data:
self._size = len(data)
StringIO.__init__(self, data)
elif mode in ('w', 'w+'):
StringIO.__init__(self, None)
@ -184,6 +195,9 @@ class nodefd_db(StringIO, nodes.node_descriptor):
raise IOError(errno.EINVAL, "Invalid file mode")
self.mode = mode
def size(self):
return self._size
def close(self):
# we now open a *separate* cursor, to update the data.
# FIXME: this may be improved, for concurrency handling
@ -241,11 +255,15 @@ class nodefd_db64(StringIO, nodes.node_descriptor):
"""
def __init__(self, parent, ira_browse, mode):
nodes.node_descriptor.__init__(self, parent)
self._size = 0L
if mode.endswith('b'):
mode = mode[:-1]
if mode in ('r', 'r+'):
StringIO.__init__(self, base64.decodestring(ira_browse.db_datas))
data = base64.decodestring(ira_browse.db_datas)
if data:
self._size = len(data)
StringIO.__init__(self, data)
elif mode in ('w', 'w+'):
StringIO.__init__(self, None)
# at write, we start at 0 (= overwrite), but have the original
@ -257,6 +275,9 @@ class nodefd_db64(StringIO, nodes.node_descriptor):
raise IOError(errno.EINVAL, "Invalid file mode")
self.mode = mode
def size(self):
return self._size
def close(self):
# we now open a *separate* cursor, to update the data.
# FIXME: this may be improved, for concurrency handling
@ -452,11 +473,11 @@ class document_storage(osv.osv):
elif boo.type == 'realstore':
path, npath = self.__prepare_realpath(cr, file_node, ira, boo.path,
do_create = (mode[1] in ('w','a')) )
do_create = (mode[0] in ('w','a')) )
fpath = os.path.join(path, npath[-1])
if (not os.path.exists(fpath)) and mode[1] == 'r':
if (not os.path.exists(fpath)) and mode[0] == 'r':
raise IOError("File not found: %s" % fpath)
elif mode[1] in ('w', 'a') and not ira.store_fname:
elif mode[0] in ('w', 'a') and not ira.store_fname:
store_fname = os.path.join(*npath)
cr.execute('UPDATE ir_attachment SET store_fname = %s WHERE id = %s',
(store_fname, ira.id))
@ -653,7 +674,7 @@ class document_storage(osv.osv):
if ktype == 'file':
try:
os.unlink(fname)
except Exception, e:
except Exception:
self._doclog.warning("Could not remove file %s, please remove manually.", fname, exc_info=True)
else:
self._doclog.warning("Unknown unlink key %s" % ktype)
@ -733,26 +754,34 @@ class document_storage(osv.osv):
# nothing to do for a rename, allow to change the db field
return { 'parent_id': ndir_bro.id }
elif sbro.type == 'realstore':
raise NotImplementedError("Cannot move in realstore, yet") # TODO
fname = fil_bo.store_fname
ira = self.pool.get('ir.attachment').browse(cr, uid, file_node.file_id, context=context)
path, opath = self.__prepare_realpath(cr, file_node, ira, sbro.path, do_create=False)
fname = ira.store_fname
if not fname:
return ValueError("Tried to rename a non-stored file")
path = sbro.path
oldpath = os.path.join(path, fname)
self._doclog.warning("Trying to rename a non-stored file")
if fname != os.path.join(*opath):
self._doclog.warning("inconsistency in realstore: %s != %s" , fname, repr(opath))
oldpath = os.path.join(path, opath[-1])
for ch in ('*', '|', "\\", '/', ':', '"', '<', '>', '?', '..'):
if ch in new_name:
raise ValueError("Invalid char %s in name %s" %(ch, new_name))
file_node.fix_ppath(cr, ira)
npath = file_node.full_path() or []
dpath = [path,]
dpath.extend(npath[:-1])
dpath.append(new_name)
newpath = os.path.join(*dpath)
# print "old, new paths:", oldpath, newpath
os.rename(oldpath, newpath)
return { 'name': new_name, 'datas_fname': new_name, 'store_fname': new_name }
npath = [sbro.path,] + (ndir_bro.get_full_path() or [])
npath = filter(lambda x: x is not None, npath)
newdir = os.path.join(*npath)
if not os.path.isdir(newdir):
self._doclog.debug("Must create dir %s", newdir)
os.makedirs(newdir)
npath.append(opath[-1])
newpath = os.path.join(*npath)
self._doclog.debug("Going to move %s from %s to %s", opath[-1], oldpath, newpath)
shutil.move(oldpath, newpath)
store_path = npath[1:] + [opath[-1],]
store_fname = os.path.join(*store_path)
return { 'store_fname': store_fname }
else:
raise TypeError("No %s storage" % sbro.type)

View File

@ -175,6 +175,23 @@ class node_descriptor(object):
def write(self, str):
raise NotImplementedError
def size(self):
raise NotImplementedError
def __len__(self):
return self.size()
def __nonzero__(self):
""" Ensure that a node_descriptor will never equal False
Since we do define __len__ and __iter__ for us, we must avoid
being regarded as non-true objects.
"""
return True
def next(self, str):
raise NotImplementedError
class node_class(object):
""" this is a superclass for our inodes
It is an API for all code that wants to access the document files.
@ -366,12 +383,14 @@ class node_class(object):
def create_child(self, cr, path, data=None):
""" Create a regular file under this node
"""
raise NotImplementedError(repr(self))
logger.warning("Attempted to create a file under %r, not possible.", self)
raise IOError(errno.EPERM, "Not allowed to create files here")
def create_child_collection(self, cr, objname):
""" Create a child collection (directory) under self
"""
raise NotImplementedError(repr(self))
logger.warning("Attempted to create a collection under %r, not possible.", self)
raise IOError(errno.EPERM, "Not allowed to create folders here")
def rm(self, cr):
raise NotImplementedError(repr(self))
@ -433,12 +452,12 @@ class node_database(node_class):
return res[0]
return None
def _child_get(self, cr, name=False, parent_id=False, domain=None):
def _child_get(self, cr, name=False, domain=None):
dirobj = self.context._dirobj
uid = self.context.uid
ctx = self.context.context.copy()
ctx.update(self.dctx)
where = [('parent_id','=',parent_id)]
where = [('parent_id','=', False), ('ressource_parent_type_id','=',False)]
if name:
where.append(('name','=',name))
is_allowed = self.check_perms(1)
@ -448,23 +467,14 @@ class node_database(node_class):
if not is_allowed:
raise IOError(errno.EPERM, "Permission into directory denied")
if not domain:
domain = []
where2 = where + domain + ['|', ('type', '=', 'directory'), \
'&', ('type', '=', 'ressource'), ('ressource_parent_type_id','=',False)]
ids = dirobj.search(cr, uid, where2, context=ctx)
if domain:
where = where + domain
ids = dirobj.search(cr, uid, where, context=ctx)
res = []
for dirr in dirobj.browse(cr, uid, ids, context=ctx):
klass = dirr.get_node_class(dirr, context=ctx)
res.append(klass(dirr.name, self, self.context,dirr))
fil_obj = dirobj.pool.get('ir.attachment')
ids = fil_obj.search(cr, uid, where, context=ctx)
if ids:
for fil in fil_obj.browse(cr, uid, ids, context=ctx):
klass = self.context.node_file_class
res.append(klass(fil.name, self, self.context, fil))
return res
def _file_get(self,cr, nodename=False):
@ -485,6 +495,22 @@ def mkdosname(company_name, default='noname'):
return n
def _uid2unixperms(perms, has_owner):
""" Convert the uidperms and the owner flag to full unix bits
"""
res = 0
if has_owner:
res |= (perms & 0x07) << 6
res |= (perms & 0x05) << 3
elif perms & 0x02:
res |= (perms & 0x07) << 6
res |= (perms & 0x07) << 3
else:
res |= (perms & 0x07) << 6
res |= (perms & 0x05) << 3
res |= 0x05
return res
class node_dir(node_database):
our_type = 'collection'
def __init__(self, path, parent, context, dirr, dctx=None):
@ -499,13 +525,13 @@ class node_dir(node_database):
# TODO: the write date should be MAX(file.write)..
self.write_date = dirr and (dirr.write_date or dirr.create_date) or False
self.content_length = 0
self.unixperms = 040750
try:
self.uuser = (dirr.user_id and dirr.user_id.login) or 'nobody'
except Exception:
self.uuser = 'nobody'
self.ugroup = mkdosname(dirr.company_id and dirr.company_id.name, default='nogroup')
self.uidperms = dirr.get_dir_permissions()
self.unixperms = 040000 | _uid2unixperms(self.uidperms, dirr and dirr.user_id)
if dctx:
self.dctx.update(dctx)
dc2 = self.context.context
@ -559,7 +585,46 @@ class node_dir(node_database):
return res
def _child_get(self, cr, name=None, domain=None):
return super(node_dir,self)._child_get(cr, name, self.dir_id, domain=domain)
dirobj = self.context._dirobj
uid = self.context.uid
ctx = self.context.context.copy()
ctx.update(self.dctx)
where = [('parent_id','=',self.dir_id)]
if name:
where.append(('name','=',name))
is_allowed = self.check_perms(1)
else:
is_allowed = self.check_perms(5)
if not is_allowed:
raise IOError(errno.EPERM, "Permission into directory denied")
if not domain:
domain = []
where2 = where + domain + [('ressource_parent_type_id','=',False)]
ids = dirobj.search(cr, uid, where2, context=ctx)
res = []
for dirr in dirobj.browse(cr, uid, ids, context=ctx):
klass = dirr.get_node_class(dirr, context=ctx)
res.append(klass(dirr.name, self, self.context,dirr))
# Static directories should never return files with res_model/res_id
# because static dirs are /never/ related to a record.
# In fact, files related to some model and parented by the root dir
# (the default), will NOT be accessible in the node system unless
# a resource folder for that model exists (with resource_find_all=True).
# Having resource attachments in a common folder is bad practice,
# because they would be visible to all users, and their names may be
# the same, conflicting.
where += [('res_model', '=', False)]
fil_obj = dirobj.pool.get('ir.attachment')
ids = fil_obj.search(cr, uid, where, context=ctx)
if ids:
for fil in fil_obj.browse(cr, uid, ids, context=ctx):
klass = self.context.node_file_class
res.append(klass(fil.name, self, self.context, fil))
return res
def rmcol(self, cr):
uid = self.context.uid
@ -699,13 +764,13 @@ class node_res_dir(node_class):
# TODO: the write date should be MAX(file.write)..
self.write_date = dirr.write_date or dirr.create_date
self.content_length = 0
self.unixperms = 040750
try:
self.uuser = (dirr.user_id and dirr.user_id.login) or 'nobody'
except Exception:
self.uuser = 'nobody'
self.ugroup = mkdosname(dirr.company_id and dirr.company_id.name, default='nogroup')
self.uidperms = dirr.get_dir_permissions()
self.unixperms = 040000 | _uid2unixperms(self.uidperms, dirr and dirr.user_id)
self.res_model = dirr.ressource_type_id and dirr.ressource_type_id.model or False
self.resm_id = dirr.ressource_id
self.res_find_all = dirr.resource_find_all
@ -759,7 +824,7 @@ class node_res_dir(node_class):
ctx.update(self.dctx)
where = []
if self.domain:
app = safe_eval(self.domain, self.dctx)
app = safe_eval(self.domain, ctx)
if not app:
pass
elif isinstance(app, list):
@ -823,8 +888,8 @@ class node_res_obj(node_class):
# TODO: the write date should be MAX(file.write)..
self.write_date = parent.write_date
self.content_length = 0
self.unixperms = 040750
self.uidperms = parent.uidperms & 15
self.unixperms = 040000 | _uid2unixperms(self.uidperms, True)
self.uuser = parent.uuser
self.ugroup = parent.ugroup
self.res_model = res_model
@ -953,9 +1018,9 @@ class node_res_obj(node_class):
where1 = []
if obj._parent_name in obj.fields_get(cr, uid):
where1 = where + [(obj._parent_name, '=', self.res_id)]
namefield = directory.resource_field.name or 'name'
resids = obj.search(cr, uid, where1, context=ctx)
for bo in obj.browse(cr, uid, resids, context=ctx):
namefield = directory.resource_field.name or 'name'
if not bo:
continue
res_name = getattr(bo, namefield)
@ -963,7 +1028,7 @@ class node_res_obj(node_class):
continue
# TODO Revise
klass = directory.get_node_class(directory, dynamic=True, context=ctx)
res.append(klass(res_name, self.dir_id, self, self.context, self.res_model, res_bo = bo))
res.append(klass(res_name, dir_id=self.dir_id, parent=self, context=self.context, res_model=self.res_model, res_bo=bo))
where2 = where + [('parent_id','=',self.dir_id) ]
@ -1361,11 +1426,13 @@ class nodefd_content(StringIO, node_descriptor):
def __init__(self, parent, cr, mode, ctx):
node_descriptor.__init__(self, parent)
self._context=ctx
self._size = 0L
if mode in ('r', 'r+'):
cntobj = parent.context._dirobj.pool.get('document.directory.content')
data = cntobj.process_read(cr, parent.context.uid, parent, ctx)
if data:
self._size = len(data)
parent.content_length = len(data)
StringIO.__init__(self, data)
elif mode in ('w', 'w+'):
@ -1379,6 +1446,9 @@ class nodefd_content(StringIO, node_descriptor):
raise IOError(errno.EINVAL, "Invalid file mode")
self.mode = mode
def size(self):
return self._size
def close(self):
# we now open a *separate* cursor, to update the data.
# FIXME: this may be improved, for concurrency handling
@ -1404,4 +1474,56 @@ class nodefd_content(StringIO, node_descriptor):
cr.close()
StringIO.close(self)
class nodefd_static(StringIO, node_descriptor):
""" A descriptor to nodes with static data.
"""
def __init__(self, parent, cr, mode, ctx=None):
node_descriptor.__init__(self, parent)
self._context=ctx
self._size = 0L
if mode in ('r', 'r+'):
data = parent.get_data(cr)
if data:
self._size = len(data)
parent.content_length = len(data)
StringIO.__init__(self, data)
elif mode in ('w', 'w+'):
StringIO.__init__(self, None)
# at write, we start at 0 (= overwrite), but have the original
# data available, in case of a seek()
elif mode == 'a':
StringIO.__init__(self, None)
else:
logging.getLogger('document.nodes').error("Incorrect mode %s specified", mode)
raise IOError(errno.EINVAL, "Invalid file mode")
self.mode = mode
def size(self):
return self._size
def close(self):
# we now open a *separate* cursor, to update the data.
# FIXME: this may be improved, for concurrency handling
if self.mode == 'r':
StringIO.close(self)
return
par = self._get_parent()
# uid = par.context.uid
cr = pooler.get_db(par.context.dbname).cursor()
try:
if self.mode in ('w', 'w+', 'r+'):
data = self.getvalue()
par.set_data(cr, data)
elif self.mode == 'a':
raise NotImplementedError
cr.commit()
except Exception:
logging.getLogger('document.nodes').exception('Cannot update db content #%d for close:', par.cnt_id)
raise
finally:
cr.close()
StringIO.close(self)
#eof

View File

@ -14,6 +14,28 @@
<field name="groups_id" eval="[(6,0,[ref('base.group_system')])]"/>
</record>
<record id="ir_rule_readpublicdirectories0" model="ir.rule">
<field name="model_id" ref="document.model_document_directory"/>
<field name="domain_force">['|','|',('group_ids','in',[g.id for g in user.groups_id]), ('user_id', '=', user.id), '&amp;', ('user_id', '=', False), ('group_ids','=',False), '|', ('company_id','=',False), ('company_id','child_of',[user.company_id.id])]</field>
<field name="name">Read public directories</field>
<field eval="0" name="global"/>
<field eval="[(6,0,[ref('base.group_user')])]" name="groups"/>
<field eval="0" name="perm_unlink"/>
<field eval="0" name="perm_write"/>
<field eval="1" name="perm_read"/>
<field eval="0" name="perm_create"/>
</record>
<record id="ir_rule_documentmodifyowndirectories0" model="ir.rule">
<field name="model_id" ref="document.model_document_directory"/>
<field name="domain_force">[ '|', ('user_id', '=', user.id), '&amp;', ('group_ids','in',[g.id for g in user.groups_id]), ('user_id','=',False), '|', ('company_id','=',False), ('company_id','child_of',[user.company_id.id])]</field>
<field name="name">Document modify own directories</field>
<field eval="0" name="global"/>
<field eval="[(6,0,[ref('base.group_document_user')])]" name="groups"/>
<field eval="1" name="perm_unlink"/>
<field eval="1" name="perm_write"/>
<field eval="0" name="perm_read"/>
<field eval="1" name="perm_create"/>
</record>
</data>
</openerp>

View File

@ -1,6 +1,7 @@
"id","name","model_id:id","group_id:id","perm_read","perm_write","perm_create","perm_unlink"
"access_document_directory_all","document.directory all","model_document_directory",,1,0,0,0
"access_document_directory_group_document_manager","document.directory document manager","model_document_directory","base.group_system",1,1,1,1
"access_document_directory_group_knowledge","document.directory modify","model_document_directory","base.group_document_user",1,1,1,1
"access_document_directory_group_system","document.directory group system","model_document_directory","base.group_system",1,1,1,1
"access_document_directory_content_all","document.directory.content all","model_document_directory_content",,1,0,0,0
"access_document_directory_content_group_document_manager","document.directory.content document manager","model_document_directory_content","base.group_system",1,1,1,1

1 id name model_id:id group_id:id perm_read perm_write perm_create perm_unlink
2 access_document_directory_all document.directory all model_document_directory 1 0 0 0
3 access_document_directory_group_document_manager document.directory document manager model_document_directory base.group_system 1 1 1 1
4 access_document_directory_group_knowledge document.directory modify model_document_directory base.group_document_user 1 1 1 1
5 access_document_directory_group_system document.directory group system model_document_directory base.group_system 1 1 1 1
6 access_document_directory_content_all document.directory.content all model_document_directory_content 1 0 0 0
7 access_document_directory_content_group_document_manager document.directory.content document manager model_document_directory_content base.group_system 1 1 1 1

View File

@ -146,6 +146,7 @@ class abstracted_fs(object):
ret = child.open_data(cr, mode)
cr.commit()
assert ret, "Cannot create descriptor for %r: %r" % (child, ret)
return ret
except EnvironmentError:
raise
@ -156,6 +157,7 @@ class abstracted_fs(object):
try:
child = node.create_child(cr, objname, data=None)
ret = child.open_data(cr, mode)
assert ret, "cannot create descriptor for %r" % child
cr.commit()
return ret
except EnvironmentError:

View File

@ -2223,6 +2223,7 @@ class FTPHandler(asynchat.async_chat):
if self.restart_position:
mode = 'r+'
fd = self.try_as_current_user(self.fs.create, (datacr, datacr[2], mode + 'b'))
assert fd
except FTPExceptionSent:
self.fs.close_cr(datacr)
return

View File

@ -7,6 +7,7 @@
from document_ftp import test_easyftp as te
ftp = te.get_plain_ftp(timeout=2.0)
assert ftp.sock and (ftp.lastresp == '220'), ftp.lastresp
ftp.close()
-
I read the list of databases at port 8021 and confirm our db is
there
@ -15,6 +16,7 @@
from document_ftp import test_easyftp as te
ftp = te.get_ftp_login(cr, uid, self)
assert cr.dbname in ftp.nlst("/")
ftp.close()
-
I try to locate the default "Documents" folder in the db.
-
@ -22,6 +24,7 @@
from document_ftp import test_easyftp as te
ftp = te.get_ftp_login(cr, uid, self)
ftp.cwd('Documents')
ftp.close()
-
I create a "test.txt" file at the server (directly). The file
should have the "abcd" content
@ -32,6 +35,7 @@
ftp = te.get_ftp_folder(cr, uid, self, 'Documents')
fdata = StringIO('abcd')
ftp.storbinary('STOR test.txt', fdata)
ftp.close()
-
I look for the "test.txt" file at the server
-
@ -39,6 +43,7 @@
from document_ftp import test_easyftp as te
ftp = te.get_ftp_folder(cr, uid, self, 'Documents')
assert ftp.nlst("test.txt") == ['test.txt']
ftp.close()
-
I check that the content of "test.txt" is "abcd"
-
@ -46,7 +51,9 @@
from document_ftp import test_easyftp as te
from cStringIO import StringIO
ftp = te.get_ftp_folder(cr, uid, self, 'Documents')
assert te.get_ftp_fulldata(ftp, "test.txt") == 'abcd'
gotdata = te.get_ftp_fulldata(ftp, "test.txt")
ftp.close()
assert gotdata == 'abcd', 'Data: %r' % gotdata
-
I append the string 'defgh' to "test.txt"
-
@ -56,6 +63,7 @@
ftp = te.get_ftp_folder(cr, uid, self, 'Documents')
fdata = StringIO('defgh')
ftp.storbinary('APPE test.txt', fdata)
ftp.close()
-
I check that the content of "text.txt" is 'abcddefgh'
-
@ -63,7 +71,9 @@
from document_ftp import test_easyftp as te
from cStringIO import StringIO
ftp = te.get_ftp_folder(cr, uid, self, 'Documents')
assert te.get_ftp_fulldata(ftp, "test.txt") == 'abcddefgh'
gotdata = te.get_ftp_fulldata(ftp, "test.txt")
ftp.close()
assert gotdata == 'abcddefgh', 'Data: %r' % gotdata
-
I try to cd into an non-existing folder 'Not-This'
-
@ -77,7 +87,9 @@
except ftplib.error_perm:
pass
except OSError, err:
ftp.close()
assert err.errno == 2, err.errno
ftp.close()
-
I create a "test2.txt" file through FTP.
-
@ -87,6 +99,7 @@
ftp = te.get_ftp_folder(cr, uid, self, 'Documents')
fdata = StringIO('abcd')
ftp.storbinary('STOR test2.txt', fdata)
ftp.close()
-
I look for the "test2.txt" file at the server
-
@ -101,6 +114,7 @@
from cStringIO import StringIO
ftp = te.get_ftp_folder(cr, uid, self, 'Documents')
ftp.delete('test2.txt')
ftp.close()
-
I check at the server that test2.txt is deleted
-
@ -116,6 +130,7 @@
ftp = te.get_ftp_folder(cr, uid, self, 'Documents')
fdata = StringIO('abcd')
ftp.storbinary('STOR test2.txt', fdata)
ftp.close()
-
I delete the test2.txt from the server (RPC).
-
@ -137,6 +152,7 @@
except ftplib.error_perm: # 550 error: 'path not exists'
nlst_result = []
assert "test2.txt" not in nlst_result, "Files: %r" % nlst_result
ftp.close()
-
I create a "test-name.txt" file
-
@ -146,6 +162,7 @@
ftp = te.get_ftp_folder(cr, uid, self, 'Documents')
fdata = StringIO('abcd')
ftp.storbinary('STOR test-name.txt', fdata)
ftp.close()
-
I rename the "test-name.txt" file through ftp.
-
@ -153,6 +170,7 @@
from document_ftp import test_easyftp as te
ftp = te.get_ftp_folder(cr, uid, self, 'Documents')
ftp.rename("test-name.txt", "test-renamed.txt")
ftp.close()
-
I check that test-name.txt has been renamed.
-
@ -166,6 +184,7 @@
except error_perm, e:
pass
assert ftp.nlst("test-renamed.txt") == ['test-renamed.txt']
ftp.close()
-
I create a new folder 'Test-Folder2' through FTP
-
@ -173,6 +192,7 @@
from document_ftp import test_easyftp as te
ftp = te.get_ftp_folder(cr, uid, self, 'Documents')
ftp.mkd("Test-Folder2")
ftp.close()
-
I create a file 'test3.txt' at the 'Test-Folder2'
-
@ -182,6 +202,7 @@
ftp = te.get_ftp_folder(cr, uid, self, 'Documents/Test-Folder2')
fdata = StringIO('abcd')
ftp.storbinary('STOR test3.txt', fdata)
ftp.close()
-
I try to retrieve test3.txt
-
@ -189,6 +210,7 @@
from document_ftp import test_easyftp as te
ftp = te.get_ftp_folder(cr, uid, self, 'Documents/Test-Folder2')
assert ftp.nlst("test3.txt") == ['test3.txt'], "File test3.txt is not there!"
ftp.close()
-
I create a new folder, 'Test-Folder3', through FTP
I try to move test3.txt to 'Test-Folder3'
@ -197,6 +219,7 @@
from document_ftp import test_easyftp as te
ftp = te.get_ftp_folder(cr, uid, self, 'Documents')
ftp.mkd("Test-Folder3")
ftp.close()
# TODO move
-
I remove the 'Test-Folder3'
@ -205,6 +228,7 @@
from document_ftp import test_easyftp as te
ftp = te.get_ftp_folder(cr, uid, self, 'Documents')
ftp.rmd("Test-Folder3")
ftp.close()
-
I check that test3.txt is removed.
-
@ -219,6 +243,7 @@
for i in range(0, 200):
fdata.seek(0)
ftp.storbinary('STOR test-name%s.txt' %i, fdata)
ftp.close()
-
I list the 200 files, check speed
-
@ -245,6 +270,7 @@
ftp.delete('test3.txt')
for i in range(0, 200):
ftp.delete('test-name%s.txt' %i)
ftp.close()
-
I remove the 'Test-Folder2'
-
@ -252,3 +278,4 @@
from document_ftp import test_easyftp as te
ftp = te.get_ftp_folder(cr, uid, self, 'Documents')
ftp.rmd("Test-Folder2")
ftp.close()

View File

@ -113,6 +113,35 @@
- parent_id.name == 'Documents'
- res_model == 'res.partner'
- res_id != False
-
I try to create a file directly under the Partners Testing folder
-
!python {model: ir.attachment}: |
from document_ftp import test_easyftp as te
import ftplib
from cStringIO import StringIO
ftp = te.get_ftp_folder(cr, uid, self, 'Documents/Partners Testing')
fdata = StringIO('abcd')
try:
ftp.storbinary('STOR stray.txt', fdata)
assert False, "We should't be able to create files here"
except ftplib.error_perm:
# That's what should happen
pass
-
I try to create a folder directly under the Partners Testing folder
-
!python {model: ir.attachment}: |
from document_ftp import test_easyftp as te
import ftplib
from cStringIO import StringIO
ftp = te.get_ftp_folder(cr, uid, self, 'Documents/Partners Testing')
try:
ftp.mkd('Weird folder')
assert False, "We should't be able to create folders here"
except ftplib.error_perm:
# That's what should happen
pass
-
I check that all/Partner1 also has the file
- |

View File

@ -0,0 +1,30 @@
-
In order to check the permissions setup and functionality of the
document module:
-
I create a testing user for the documents
-
I assign some ... group to the testing user
-
I create a "group testing" user, which also belongs to the same ... group
-
I create a "blocked" user.
-
I create (as root) a testing folder in the document hierarchy, and
assign ownership to the testing user, groups to the ... group.
-
I create a "private" folder inside the testing folder.
-
I try to read the testing folder as the testing user
-
I try to read the folder as the group user, it should fail.
-
I try to read the folder as the blocked user.
-
I create a "group" folder, with the ... group.
-
I try to read the "group" folder as the testing user
-
I try to read the "group" folder as the group user
-
I try to read the "group" folder as the blocked user

View File

@ -57,6 +57,8 @@
'webdav_setup.xml',
],
"demo_xml" : [],
"test": [ #'test/webdav_test1.yml',
],
"active": False,
"installable": True
}

View File

@ -20,18 +20,16 @@
##############################################################################
import pooler
import base64
import sys
import os
import time
from string import joinfields, split, lower
import errno
import netsvc
import urlparse
from DAV.constants import COLLECTION, OBJECT
from DAV.errors import *
from DAV.iface import *
from DAV.constants import COLLECTION #, OBJECT
from DAV.errors import DAV_Error, DAV_Forbidden, DAV_NotFound
from DAV.iface import dav_interface
import urllib
from DAV.davcmd import copyone, copytree, moveone, movetree, delone, deltree
@ -74,6 +72,89 @@ def _str2time(cre):
cre = cre[:fdot]
return time.mktime(time.strptime(cre,'%Y-%m-%d %H:%M:%S')) + frac
class BoundStream2(object):
"""Wraps around a seekable buffer, reads a determined range of data
Note that the supplied stream object MUST support a size() which
should return its data length (in bytes).
A variation of the class in websrv_lib.py
"""
def __init__(self, stream, offset=None, length=None, chunk_size=None):
self._stream = stream
self._offset = offset or 0
self._length = length or self._stream.size()
self._rem_length = length
assert length and isinstance(length, (int, long))
assert length and length >= 0, length
self._chunk_size = chunk_size
if offset is not None:
self._stream.seek(offset)
def read(self, size=-1):
if not self._stream:
raise IOError(errno.EBADF, "read() without stream")
if self._rem_length == 0:
return ''
elif self._rem_length < 0:
raise EOFError()
rsize = self._rem_length
if size > 0 and size < rsize:
rsize = size
if self._chunk_size and self._chunk_size < rsize:
rsize = self._chunk_size
data = self._stream.read(rsize)
self._rem_length -= len(data)
return data
def __len__(self):
return self._length
def tell(self):
res = self._stream.tell()
if self._offset:
res -= self._offset
return res
def __iter__(self):
return self
def next(self):
return self.read(65536)
def seek(self, pos, whence=os.SEEK_SET):
""" Seek, computing our limited range
"""
if whence == os.SEEK_SET:
if pos < 0 or pos > self._length:
raise IOError(errno.EINVAL,"Cannot seek")
self._stream.seek(pos - self._offset)
self._rem_length = self._length - pos
elif whence == os.SEEK_CUR:
if pos > 0:
if pos > self._rem_length:
raise IOError(errno.EINVAL,"Cannot seek past end")
elif pos < 0:
oldpos = self.tell()
if oldpos + pos < 0:
raise IOError(errno.EINVAL,"Cannot seek before start")
self._stream.seek(pos, os.SEEK_CUR)
self._rem_length -= pos
elif whence == os.SEEK_END:
if pos > 0:
raise IOError(errno.EINVAL,"Cannot seek past end")
else:
if self._length + pos < 0:
raise IOError(errno.EINVAL,"Cannot seek before start")
newpos = self._offset + self._length + pos
self._stream.seek(newpos, os.SEEK_SET)
self._rem_length = 0 - pos
class openerp_dav_handler(dav_interface):
"""
This class models a OpenERP interface for the DAV server
@ -396,11 +477,28 @@ class openerp_dav_handler(dav_interface):
node = self.uri2object(cr, uid, pool, uri2)
if not node:
raise DAV_NotFound2(uri2)
# TODO: if node is a collection, for some specific set of
# clients ( web browsers; available in node context),
# we may return a pseydo-html page with the directory listing.
try:
res = node.open_data(cr,'r')
if rrange:
self.parent.log_error("Doc get_data cannot use range")
raise DAV_Error(409)
datas = node.get_data(cr)
assert isinstance(rrange, (tuple,list))
start, end = map(long, rrange)
if not start:
start = 0
assert start >= 0
if end and end < start:
self.parent.log_error("Invalid range for data: %s-%s" %(start, end))
raise DAV_Error(416, "Invalid range for data")
if end:
if end >= res.size():
raise DAV_Error(416, "Requested data exceeds available size")
length = (end + 1) - start
else:
length = res.size() - start
res = BoundStream2(res, offset=start, length=length)
except TypeError,e:
# for the collections that return this error, the DAV standard
# says we'd better just return 200 OK with empty data
@ -413,7 +511,7 @@ class openerp_dav_handler(dav_interface):
self.parent.log_error("GET exception: %s",str(e))
self.parent.log_message("Exc: %s", traceback.format_exc())
raise DAV_Error, 409
return str(datas) # FIXME!
return res
finally:
if cr: cr.close()
@ -584,8 +682,7 @@ class openerp_dav_handler(dav_interface):
node = False
objname = uri2[-1]
ext = objname.find('.') >0 and objname.split('.')[1] or False
ret = None
if not node:
dir_node = self.uri2object(cr, uid, pool, uri2[:-1])
@ -668,7 +765,7 @@ class openerp_dav_handler(dav_interface):
"""
if uri[-1]=='/':uri=uri[:-1]
res=delone(self,uri)
parent='/'.join(uri.split('/')[:-1])
# parent='/'.join(uri.split('/')[:-1])
return res
def deltree(self, uri):
@ -680,7 +777,7 @@ class openerp_dav_handler(dav_interface):
"""
if uri[-1]=='/':uri=uri[:-1]
res=deltree(self, uri)
parent='/'.join(uri.split('/')[:-1])
# parent='/'.join(uri.split('/')[:-1])
return res

View File

@ -37,7 +37,6 @@ class document_davdir(osv.osv):
dbro = self.browse(cr, uid, ids, context=context)
if dynamic:
assert dbro.type == 'directory'
return nodes.node_res_obj
elif dbro.type == 'directory':
return nodes.node_dir

View File

@ -0,0 +1,59 @@
-
In order to test the document_ftp functionality
-
I open the HTTP port and perform an OPTIONS request to the server
-
!python {model: ir.attachment}: |
from document_webdav import test_davclient as te
reload(te) # reload..
dc = te.DAVClient()
dc.gd_options()
dc.get_creds(self, cr, uid)
dc.gd_options(path=cr.dbname, expect={'DAV': ['1',]})
-
I will test the propnames at the document root
-
!python {model: ir.attachment}: |
from document_webdav import test_davclient as te
dc = te.DAVClient()
dc.get_creds(self, cr, uid)
dc.gd_propname(path=cr.dbname+'/Documents/')
-
I will test the ETags of the document root
-
!python {model: ir.attachment}: |
from document_webdav import test_davclient as te
dc = te.DAVClient()
dc.get_creds(self, cr, uid)
dc.gd_getetag(path=cr.dbname+'/Documents/')
-
I will now ls -l the document root.
-
!python {model: ir.attachment}: |
from document_webdav import test_davclient as te
dc = te.DAVClient()
dc.get_creds(self, cr, uid)
res = dc.gd_lsl(path=cr.dbname+'/Documents/')
for lin in res:
print "%(type)s\t%(uid)s\t%(gid)s\t%(size)s\t%(mtime)s\t%(name)s" % lin
-
I will put a file to the server
-
!python {model: ir.attachment}: |
from document_webdav import test_davclient as te
import addons
dc = te.DAVClient()
dc.get_creds(self, cr, uid)
tdp = addons.get_module_resource('document_webdav', 'test_davclient.py')
res = dc.gd_put(path=cr.dbname+'/Documents/test_davclient.py', srcpath=tdp)
-
I will try to get the file from the root
-
!python {model: ir.attachment}: |
from document_webdav import test_davclient as te
import addons
dc = te.DAVClient()
dc.get_creds(self, cr, uid)
tdp = addons.get_module_resource('document_webdav', 'test_davclient.py')
res = dc.gd_get(path=cr.dbname+'/Documents/test_davclient.py', crange=(4,508), compare=tdp)

View File

@ -0,0 +1,703 @@
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
#
# Copyright P. Christeas <p_christ@hol.gr> 2008,2009
# Copyright OpenERP SA. (http://www.openerp.com) 2010
#
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
###############################################################################
""" A trivial HTTP/WebDAV client, used for testing the server
"""
# code taken from the 'http-client.py' script:
# http://git.hellug.gr/?p=xrg/openerp;a=history;f=tests/http-client.py;hb=refs/heads/xrg-60
import re
import gzip
import logging
import xml.dom.minidom
import httplib
from tools import config
from xmlrpclib import Transport, ProtocolError
import StringIO
import base64
log = logging.getLogger('http-client')
class HTTP11(httplib.HTTP):
_http_vsn = 11
_http_vsn_str = 'HTTP/1.1'
class PersistentTransport(Transport):
"""Handles an HTTP transaction to an XML-RPC server, persistently."""
def __init__(self, use_datetime=0):
self._use_datetime = use_datetime
self._http = {}
log.debug("Using persistent transport")
def make_connection(self, host):
# create a HTTP connection object from a host descriptor
if not self._http.has_key(host):
host, extra_headers, x509 = self.get_host_info(host)
self._http[host] = HTTP11(host)
log.debug("New connection to %s", host)
return self._http[host]
def get_host_info(self, host):
host, extra_headers, x509 = Transport.get_host_info(self,host)
if extra_headers == None:
extra_headers = []
extra_headers.append( ( 'Connection', 'keep-alive' ))
return host, extra_headers, x509
def _parse_response(self, file, sock, response):
""" read response from input file/socket, and parse it
We are persistent, so it is important to only parse
the right amount of input
"""
p, u = self.getparser()
if response.msg.get('content-encoding') == 'gzip':
gzdata = StringIO.StringIO()
while not response.isclosed():
rdata = response.read(1024)
if not rdata:
break
gzdata.write(rdata)
gzdata.seek(0)
rbuffer = gzip.GzipFile(mode='rb', fileobj=gzdata)
while True:
respdata = rbuffer.read()
if not respdata:
break
p.feed(respdata)
else:
while not response.isclosed():
rdata = response.read(1024)
if not rdata:
break
p.feed(rdata)
if len(rdata)<1024:
break
p.close()
return u.close()
def request(self, host, handler, request_body, verbose=0):
# issue XML-RPC request
h = self.make_connection(host)
if verbose:
h.set_debuglevel(1)
self.send_request(h, handler, request_body)
self.send_host(h, host)
self.send_user_agent(h)
self.send_content(h, request_body)
resp = h._conn.getresponse()
# TODO: except BadStatusLine, e:
errcode, errmsg, headers = resp.status, resp.reason, resp.msg
if errcode != 200:
raise ProtocolError(
host + handler,
errcode, errmsg,
headers
)
self.verbose = verbose
try:
sock = h._conn.sock
except AttributeError:
sock = None
return self._parse_response(h.getfile(), sock, resp)
class CompressedTransport(PersistentTransport):
def send_content(self, connection, request_body):
connection.putheader("Content-Type", "text/xml")
if len(request_body) > 512 or True:
buffer = StringIO.StringIO()
output = gzip.GzipFile(mode='wb', fileobj=buffer)
output.write(request_body)
output.close()
buffer.seek(0)
request_body = buffer.getvalue()
connection.putheader('Content-Encoding', 'gzip')
connection.putheader("Content-Length", str(len(request_body)))
connection.putheader("Accept-Encoding",'gzip')
connection.endheaders()
if request_body:
connection.send(request_body)
def send_request(self, connection, handler, request_body):
connection.putrequest("POST", handler, skip_accept_encoding=1)
class SafePersistentTransport(PersistentTransport):
def make_connection(self, host):
# create a HTTP connection object from a host descriptor
if not self._http.has_key(host):
host, extra_headers, x509 = self.get_host_info(host)
self._http[host] = httplib.HTTPS(host, None, **(x509 or {}))
log.debug("New connection to %s", host)
return self._http[host]
class AuthClient(object):
def getAuth(self, atype, realm):
raise NotImplementedError("Cannot authenticate for %s" % atype)
def resolveFailedRealm(self, realm):
""" Called when, using a known auth type, the realm is not in cache
"""
raise NotImplementedError("Cannot authenticate for realm %s" % realm)
class BasicAuthClient(AuthClient):
def __init__(self):
self._realm_dict = {}
def getAuth(self, atype, realm):
if atype != 'Basic' :
return super(BasicAuthClient,self).getAuth(atype, realm)
if not self._realm_dict.has_key(realm):
log.debug("realm dict: %r", self._realm_dict)
log.debug("missing key: \"%s\"" % realm)
self.resolveFailedRealm(realm)
return 'Basic '+ self._realm_dict[realm]
def addLogin(self, realm, username, passwd):
""" Add some known username/password for a specific login.
This function should be called once, for each realm
that we want to authenticate against
"""
assert realm
auths = base64.encodestring(username + ':' + passwd)
if auths[-1] == "\n":
auths = auths[:-1]
self._realm_dict[realm] = auths
class addAuthTransport:
""" Intermediate class that authentication algorithm to http transport
"""
def setAuthClient(self, authobj):
""" Set the authentication client object.
This method must be called before any request is issued, that
would require http authentication
"""
assert isinstance(authobj, AuthClient)
self._auth_client = authobj
def request(self, host, handler, request_body, verbose=0):
# issue XML-RPC request
h = self.make_connection(host)
if verbose:
h.set_debuglevel(1)
tries = 0
atype = None
realm = None
while(tries < 3):
self.send_request(h, handler, request_body)
self.send_host(h, host)
self.send_user_agent(h)
if atype:
# This line will bork if self.setAuthClient has not
# been issued. That is a programming error, fix your code!
auths = self._auth_client.getAuth(atype, realm)
log.debug("sending authorization: %s", auths)
h.putheader('Authorization', auths)
self.send_content(h, request_body)
resp = h._conn.getresponse()
# except BadStatusLine, e:
tries += 1
if resp.status == 401:
if 'www-authenticate' in resp.msg:
(atype,realm) = resp.msg.getheader('www-authenticate').split(' ',1)
data1 = resp.read()
if data1:
log.warning("Why have data on a 401 auth. message?")
if realm.startswith('realm="') and realm.endswith('"'):
realm = realm[7:-1]
log.debug("Resp: %r %r", resp.version,resp.isclosed(), resp.will_close)
log.debug("Want to do auth %s for realm %s", atype, realm)
if atype != 'Basic':
raise ProtocolError(host+handler, 403,
"Unknown authentication method: %s" % atype, resp.msg)
continue # with the outer while loop
else:
raise ProtocolError(host+handler, 403,
'Server-incomplete authentication', resp.msg)
if resp.status != 200:
raise ProtocolError( host + handler,
resp.status, resp.reason, resp.msg )
self.verbose = verbose
try:
sock = h._conn.sock
except AttributeError:
sock = None
return self._parse_response(h.getfile(), sock, resp)
raise ProtocolError(host+handler, 403, "No authentication",'')
class PersistentAuthTransport(addAuthTransport,PersistentTransport):
pass
class PersistentAuthCTransport(addAuthTransport,CompressedTransport):
pass
class HTTPSConnection(httplib.HTTPSConnection):
certs_file = None
def connect(self):
"Connect to a host on a given (SSL) port. check the certificate"
import socket, ssl
if HTTPSConnection.certs_file:
ca_certs = HTTPSConnection.certs_file
cert_reqs = ssl.CERT_REQUIRED
else:
ca_certs = None
cert_reqs = ssl.CERT_NONE
sock = socket.create_connection((self.host, self.port), self.timeout)
self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file,
ca_certs=ca_certs,
cert_reqs=cert_reqs)
def getpeercert(self):
import ssl
cert = None
if self.sock:
cert = self.sock.getpeercert()
else:
cert = ssl.get_server_certificate((self.host,self.port),
ssl_version=ssl.PROTOCOL_SSLv23 )
lf = (len(ssl.PEM_FOOTER)+1)
if cert[0-lf] != '\n':
cert = cert[:0-lf]+'\n'+cert[0-lf:]
log.debug("len-footer: %s cert: %r", lf, cert[0-lf])
return cert
class DAVClient(object):
"""An instance of a WebDAV client, connected to the OpenERP server
"""
def __init__(self, user=None, passwd=None, dbg=0, use_ssl=False, useragent=False):
if use_ssl:
self.host = config.get_misc('httpsd', 'interface', False)
self.port = config.get_misc('httpsd', 'port', 8071)
if not self.host:
self.host = config.get('xmlrpcs_interface')
self.port = config.get('xmlrpcs_port')
else:
self.host = config.get_misc('httpd', 'interface')
self.port = config.get_misc('httpd', 'port', 8069)
if not self.host:
self.host = config.get('xmlrpc_interface')
self.port = config.get('xmlrpc_port') or self.port
if self.host == '0.0.0.0' or not self.host:
self.host = '127.0.0.1'
self.port = int(self.port)
if not config.get_misc('webdav','enable',True):
raise Exception("WebDAV is disabled, cannot continue")
self.davpath = '/' + config.get_misc('webdav','vdir','webdav')
self.user = user
self.passwd = passwd
self.dbg = dbg
self.hdrs = {}
if useragent:
self.set_useragent(useragent)
def get_creds(self, obj, cr, uid):
"""Read back the user credentials from cr, uid
@param obj is any orm object, in order to use its pool
@param uid is the numeric id, which we will try to reverse resolve
note: this is a hackish way to get the credentials. It is expected
to break if "base_crypt" is used.
"""
ruob = obj.pool.get('res.users')
res = ruob.read(cr, 1, [uid,], ['login', 'password'])
assert res, "uid %s not found" % uid
self.user = res[0]['login']
self.passwd = res[0]['password']
return True
def set_useragent(self, uastr):
""" Set the user-agent header to something meaningful.
Some shorthand names will be replaced by stock strings.
"""
if uastr in ('KDE4', 'Korganizer'):
self.hdrs['User-Agent'] = "Mozilla/5.0 (compatible; Konqueror/4.4; Linux) KHTML/4.4.3 (like Gecko)"
elif uastr == 'iPhone3':
self.hdrs['User-Agent'] = "DAVKit/5.0 (765); iCalendar/5.0 (79); iPhone/4.1 8B117"
elif uastr == "MacOS":
self.hdrs['User-Agent'] = "WebDAVFS/1.8 (01808000) Darwin/9.8.0 (i386)"
else:
self.hdrs['User-Agent'] = uastr
def _http_request(self, path, method='GET', hdrs=None, body=None):
if not hdrs:
hdrs = {}
import base64
dbg = self.dbg
hdrs.update(self.hdrs)
log.debug("Getting %s http://%s:%d/%s", method, self.host, self.port, path)
conn = httplib.HTTPConnection(self.host, port=self.port)
conn.set_debuglevel(dbg)
if not path:
path = "/index.html"
if not hdrs.has_key('Connection'):
hdrs['Connection']= 'keep-alive'
conn.request(method, path, body, hdrs )
try:
r1 = conn.getresponse()
except httplib.BadStatusLine, bsl:
log.warning("Bad status line: %s", bsl.line)
raise Exception('Bad status line')
if r1.status == 401: # and r1.headers:
if 'www-authenticate' in r1.msg:
(atype,realm) = r1.msg.getheader('www-authenticate').split(' ',1)
data1 = r1.read()
if not self.user:
raise Exception('Must auth, have no user/pass!')
log.debug("Ver: %s, closed: %s, will close: %s", r1.version,r1.isclosed(), r1.will_close)
log.debug("Want to do auth %s for realm %s", atype, realm)
if atype == 'Basic' :
auths = base64.encodestring(self.user + ':' + self.passwd)
if auths[-1] == "\n":
auths = auths[:-1]
hdrs['Authorization']= 'Basic '+ auths
#sleep(1)
conn.request(method, path, body, hdrs )
r1 = conn.getresponse()
else:
raise Exception("Unknown auth type %s" %atype)
else:
log.warning("Got 401, cannot auth")
raise Exception('No auth')
log.debug("Reponse: %s %s",r1.status, r1.reason)
data1 = r1.read()
if method != 'GET':
log.debug("Body:\n%s\nEnd of body", data1)
try:
ctype = r1.msg.getheader('content-type')
if ctype and ';' in ctype:
ctype, encoding = ctype.split(';',1)
if ctype == 'text/xml':
doc = xml.dom.minidom.parseString(data1)
log.debug("XML Body:\n %s", doc.toprettyxml(indent="\t"))
except Exception:
log.warning("could not print xml", exc_info=True)
pass
conn.close()
return r1.status, r1.msg, data1
def _assert_headers(self, expect, msg):
""" Assert that the headers in msg contain the expect values
"""
for k, v in expect.items():
hval = msg.getheader(k)
if not hval:
raise AssertionError("Header %s not defined in http response" % k)
if isinstance(v, (list, tuple)):
delim = ','
hits = map(str.strip, hval.split(delim))
mvits= []
for vit in v:
if vit not in hits:
mvits.append(vit)
if mvits:
raise AssertionError("HTTP header \"%s\" is missing: %s" %(k, ', '.join(mvits)))
else:
if hval.strip() != v.strip():
raise AssertionError("HTTP header \"%s: %s\"" % (k, hval))
def gd_options(self, path='*', expect=None):
""" Test the http options functionality
If a dictionary is defined in expect, those options are
asserted.
"""
if path != '*':
path = self.davpath + path
hdrs = { 'Content-Length': 0
}
s, m, d = self._http_request(path, method='OPTIONS', hdrs=hdrs)
assert s == 200, "Status: %r" % s
assert 'OPTIONS' in m.getheader('Allow')
log.debug('Options: %r', m.getheader('Allow'))
if expect:
self._assert_headers(expect, m)
def _parse_prop_response(self, data):
""" Parse a propfind/propname response
"""
def getText(node):
rc = []
for node in node.childNodes:
if node.nodeType == node.TEXT_NODE:
rc.append(node.data)
return ''.join(rc)
def getElements(node, namespaces=None, strict=False):
for cnod in node.childNodes:
if cnod.nodeType != node.ELEMENT_NODE:
if strict:
log.debug("Found %r inside <%s>", cnod, node.tagName)
continue
if namespaces and (cnod.namespaceURI not in namespaces):
log.debug("Ignoring <%s> in <%s>", cnod.tagName, node.localName)
continue
yield cnod
nod = xml.dom.minidom.parseString(data)
nod_r = nod.documentElement
res = {}
assert nod_r.localName == 'multistatus', nod_r.tagName
for resp in nod_r.getElementsByTagNameNS('DAV:', 'response'):
href = None
status = 200
res_nss = {}
for cno in getElements(resp, namespaces=['DAV:',]):
if cno.localName == 'href':
assert href is None, "Second href in same response"
href = getText(cno)
elif cno.localName == 'propstat':
for pno in getElements(cno, namespaces=['DAV:',]):
rstatus = None
if pno.localName == 'prop':
for prop in getElements(pno):
key = prop.localName
tval = getText(prop).strip()
val = tval or (True, rstatus or status)
if prop.namespaceURI == 'DAV:' and prop.localName == 'resourcetype':
val = 'plain'
for rte in getElements(prop, namespaces=['DAV:',]):
# Note: we only look at DAV:... elements, we
# actually expect only one DAV:collection child
val = rte.localName
res_nss.setdefault(prop.namespaceURI,{})[key] = val
elif pno.localName == 'status':
rstr = getText(pno)
htver, sta, msg = rstr.split(' ', 3)
assert htver == 'HTTP/1.1'
rstatus = int(sta)
else:
log.debug("What is <%s> inside a <propstat>?", pno.tagName)
else:
log.debug("Unknown node: %s", cno.tagName)
res.setdefault(href,[]).append((status, res_nss))
return res
def gd_propfind(self, path, props=None, depth=0):
if not props:
propstr = '<allprop/>'
else:
propstr = '<prop>'
nscount = 0
for p in props:
ns = None
if isinstance(p, tuple):
p, ns = p
if ns is None or ns == 'DAV:':
propstr += '<%s/>' % p
else:
propstr += '<ns%d:%s xmlns:ns%d="%s" />' %(nscount, p, nscount, ns)
nscount += 1
propstr += '</prop>'
body="""<?xml version="1.0" encoding="utf-8"?>
<propfind xmlns="DAV:">%s</propfind>""" % propstr
hdrs = { 'Content-Type': 'text/xml; charset=utf-8',
'Accept': 'text/xml',
'Depth': depth,
}
s, m, d = self._http_request(self.davpath + path, method='PROPFIND',
hdrs=hdrs, body=body)
assert s == 207, "Bad status: %s" % s
ctype = m.getheader('Content-Type').split(';',1)[0]
assert ctype == 'text/xml', m.getheader('Content-Type')
res = self._parse_prop_response(d)
if depth == 0:
assert len(res) == 1
res = res.values()[0]
else:
assert len(res) >= 1
return res
def gd_propname(self, path, depth=0):
body="""<?xml version="1.0" encoding="utf-8"?>
<propfind xmlns="DAV:"><propname/></propfind>"""
hdrs = { 'Content-Type': 'text/xml; charset=utf-8',
'Accept': 'text/xml',
'Depth': depth
}
s, m, d = self._http_request(self.davpath + path, method='PROPFIND',
hdrs=hdrs, body=body)
assert s == 207, "Bad status: %s" % s
ctype = m.getheader('Content-Type').split(';',1)[0]
assert ctype == 'text/xml', m.getheader('Content-Type')
res = self._parse_prop_response(d)
if depth == 0:
assert len(res) == 1
res = res.values()[0]
else:
assert len(res) >= 1
return res
def gd_getetag(self, path, depth=0):
return self.gd_propfind(path, props=['getetag',], depth=depth)
def gd_lsl(self, path):
""" Return a list of 'ls -l' kind of data for a folder
This is based on propfind.
"""
lspairs = [ ('name', 'displayname', 'n/a'), ('size', 'getcontentlength', '0'),
('type', 'resourcetype', '----------'), ('uid', 'owner', 'nobody'),
('gid', 'group', 'nogroup'), ('mtime', 'getlastmodified', 'n/a'),
('mime', 'getcontenttype', 'application/data'), ]
propnames = [ l[1] for l in lspairs]
propres = self.gd_propfind(path, props=propnames, depth=1)
res = []
for href, pr in propres.items():
lsline = {}
for st, nsdic in pr:
davprops = nsdic['DAV:']
if st == 200:
for lsp in lspairs:
if lsp[1] in davprops:
if lsp[1] == 'resourcetype':
if davprops[lsp[1]] == 'collection':
lsline[lsp[0]] = 'dr-xr-x---'
else:
lsline[lsp[0]] = '-r-xr-x---'
else:
lsline[lsp[0]] = davprops[lsp[1]]
elif st in (404, 403):
for lsp in lspairs:
if lsp[1] in davprops:
lsline[lsp[0]] = lsp[2]
else:
log.debug("Strange status: %s", st)
res.append(lsline)
return res
def gd_get(self, path, crange=None, mime=None, compare=None):
""" HTTP GET for path, supporting Partial ranges
"""
hdrs = { 'Accept': mime or '*/*', }
if crange:
if isinstance(crange, tuple):
crange = [crange,]
if not isinstance(crange, list):
raise TypeError("Range must be a tuple or list of tuples")
rs = []
for r in crange:
rs.append('%d-%d' % r)
hdrs['Range'] = 'bytes='+ (','.join(rs))
s, m, d = self._http_request(self.davpath + path, method='GET', hdrs=hdrs)
assert s in (200, 206), "Bad status: %s" % s
ctype = m.getheader('Content-Type')
if ctype and ';' in ctype:
ctype = ctype.split(';',1)[0]
if mime:
assert ctype == mime, m.getheader('Content-Type')
rrange = None
rrh = m.getheader('Content-Range')
if rrh:
assert rrh.startswith('bytes '), rrh
rrh=rrh[6:].split('/',1)[0]
rrange = map(int, rrh.split('-',1))
if compare:
# we need to compare the returned data with that of compare
fd = open(compare, 'rb')
d2 = fd.read()
fd.close()
if crange:
if len(crange) > 1:
raise NotImplementedError
r = crange[0]
d2 = d2[r[0]:r[1]+1]
assert d2 == d, "Data does not match"
return ctype, rrange, d
def gd_put(self, path, body=None, srcpath=None, mime=None, noclobber=False, ):
""" HTTP PUT
@param noclobber will prevent overwritting a resource (If-None-Match)
@param mime will set the content-type
"""
hdrs = { }
if not (body or srcpath):
raise ValueError("PUT must have something to send")
if (not body) and srcpath:
fd = open(srcpath, 'rb')
body = fd.read()
fd.close()
if mime:
hdrs['Content-Type'] = mime
if noclobber:
hdrs['If-None-Match'] = '*'
s, m, d = self._http_request(self.davpath + path, method='PUT',
hdrs=hdrs, body=body)
assert s == (201), "Bad status: %s" % s
etag = m.getheader('ETag')
return etag or True
#eof

View File

@ -36,10 +36,13 @@ from service.websrv_lib import HTTPDir, FixSendError, HttpOptions
from BaseHTTPServer import BaseHTTPRequestHandler
import urlparse
import urllib
import re
from string import atoi,split
from DAV.errors import *
# from DAV.constants import DAV_VERSION_1, DAV_VERSION_2
khtml_re = re.compile(r' KHTML/([0-9\.]+) ')
def OpenDAVConfig(**kw):
class OpenDAV:
def __init__(self, **kw):
@ -131,6 +134,16 @@ class DAVHandler(HttpOptions, FixSendError, DAVRequestHandler):
# the BufferingHttpServer will send Connection: close , while
# the BaseHTTPRequestHandler will only accept int code.
# workaround both of them.
if self.command == 'PROPFIND' and int(code) == 404:
kh = khtml_re.search(self.headers.get('User-Agent',''))
if kh and (kh.group(1) < '4.5'):
# There is an ugly bug in all khtml < 4.5.x, where the 404
# response is treated as an immediate error, which would even
# break the flow of a subsequent PUT request. At the same time,
# the 200 response (rather than 207 with content) is treated
# as "path not exist", so we send this instead
# https://bugs.kde.org/show_bug.cgi?id=166081
code = 200
BaseHTTPRequestHandler.send_response(self, int(code), message)
def send_header(self, key, value):
@ -216,7 +229,7 @@ class DAVHandler(HttpOptions, FixSendError, DAVRequestHandler):
try:
location = dc.put(uri, body, ct)
except DAV_Error, (ec,dd):
self._logger.warning("Cannot PUT to %s: %s", uri, dd, exc_info=True)
self.log_error("Cannot PUT to %s: %s", uri, dd)
return self.send_status(ec)
headers = {}

View File

@ -12,7 +12,7 @@
<field name="domain">[]</field>
<field eval="1" name="resource_find_all"/>
<field eval="0" name="ressource_tree"/>
<field name="user_id" ref="base.user_root"/>
<field name="user_id" eval="False"/>
<field name="company_id" ref="base.main_company"/>
<field name="storage_id" ref="document.storage_default"/>
<field name="type">directory</field>
@ -24,7 +24,7 @@
<field name="domain">[]</field>
<field eval="1" name="resource_find_all"/>
<field eval="0" name="ressource_tree"/>
<field name="user_id" ref="base.user_root"/>
<field name="user_id" eval="False"/>
<field name="company_id" ref="base.main_company"/>
<field model="document.directory" name="parent_id" ref="document_directory_principals0"/>
<field name="storage_id" ref="document.storage_default"/>
@ -45,9 +45,10 @@
<field name="name">resources</field>
</record>
<record id="document_directory_uids1" model="document.directory">
<field name="domain">[]</field>
<field name="domain">[('id','=',uid)]</field>
<field eval="1" name="resource_find_all"/>
<field eval="0" name="ressource_tree"/>
<field name="user_id" eval="False"/>
<field name="resource_field" ref="base.field_res_users_login"/>
<field name="company_id" ref="base.main_company"/>
<field model="document.directory" name="parent_id" ref="document_directory_principals0"/>
@ -58,10 +59,10 @@
<field name="name">__uids__</field>
</record>
<record id="document_directory_users1" model="document.directory">
<field name="domain">[]</field>
<field name="domain">[('id','=',uid)]</field>
<field eval="1" name="resource_find_all"/>
<field eval="0" name="ressource_tree"/>
<field name="user_id" ref="base.user_root"/>
<field name="user_id" eval="False"/>
<field name="resource_field" ref="base.field_res_users_login"/>
<field name="company_id" ref="base.main_company"/>
<field model="document.directory" name="parent_id" ref="document_directory_principals0"/>