2010-04-12 10:52:00 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
2009-11-26 13:54:00 +00:00
|
|
|
##############################################################################
|
|
|
|
#
|
2010-04-12 10:52:00 +00:00
|
|
|
# OpenERP, Open Source Management Solution
|
|
|
|
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
|
2009-11-26 13:54:00 +00:00
|
|
|
#
|
2010-04-12 10:52:00 +00:00
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU Affero General Public License as
|
|
|
|
# published by the Free Software Foundation, either version 3 of the
|
|
|
|
# License, or (at your option) any later version.
|
2009-11-26 13:54:00 +00:00
|
|
|
#
|
2010-04-12 10:52:00 +00:00
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU Affero General Public License for more details.
|
2009-11-26 13:54:00 +00:00
|
|
|
#
|
2010-04-12 10:52:00 +00:00
|
|
|
# You should have received a copy of the GNU Affero General Public License
|
|
|
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
2009-11-26 13:54:00 +00:00
|
|
|
#
|
|
|
|
##############################################################################
|
|
|
|
import pooler
|
|
|
|
|
|
|
|
import base64
|
|
|
|
import sys
|
|
|
|
import os
|
|
|
|
import time
|
|
|
|
from string import joinfields, split, lower
|
|
|
|
|
|
|
|
from service import security
|
|
|
|
|
|
|
|
import netsvc
|
|
|
|
import urlparse
|
|
|
|
|
|
|
|
from DAV.constants import COLLECTION, OBJECT
|
|
|
|
from DAV.errors import *
|
|
|
|
from DAV.iface import *
|
|
|
|
import urllib
|
|
|
|
|
|
|
|
from DAV.davcmd import copyone, copytree, moveone, movetree, delone, deltree
|
2009-12-17 11:56:04 +00:00
|
|
|
from document.nodes import node_res_dir, node_res_obj
|
2009-11-26 13:54:00 +00:00
|
|
|
from cache import memoize
|
2009-12-17 11:56:04 +00:00
|
|
|
from tools import misc
|
2009-11-26 13:54:00 +00:00
|
|
|
CACHE_SIZE=20000
|
|
|
|
|
|
|
|
#hack for urlparse: add webdav in the net protocols
|
|
|
|
urlparse.uses_netloc.append('webdav')
|
|
|
|
urlparse.uses_netloc.append('webdavs')
|
|
|
|
|
2010-07-29 13:39:37 +00:00
|
|
|
day_names = { 0: 'Mon', 1: 'Tue' , 2: 'Wed', 3: 'Thu', 4: 'Fri', 5: 'Sat', 6: 'Sun' }
|
|
|
|
month_names = { 1: 'Jan', 2: 'Feb', 3: 'Mar', 4: 'Apr', 5: 'May', 6: 'Jun',
|
|
|
|
7: 'Jul', 8: 'Aug', 9: 'Sep', 10: 'Oct', 11: 'Nov', 12: 'Dec' }
|
|
|
|
|
2010-07-12 19:29:30 +00:00
|
|
|
class DAV_NotFound2(DAV_NotFound):
|
|
|
|
"""404 exception, that accepts our list uris
|
|
|
|
"""
|
|
|
|
def __init__(self, *args):
|
|
|
|
if len(args) and isinstance(args[0], (tuple, list)):
|
|
|
|
path = ''.join([ '/' + x for x in args[0]])
|
|
|
|
args = (path, )
|
|
|
|
DAV_NotFound.__init__(self, *args)
|
|
|
|
|
2010-07-29 13:39:37 +00:00
|
|
|
|
|
|
|
def _str2time(cre):
|
|
|
|
""" Convert a string with time representation (from db) into time (float)
|
|
|
|
"""
|
|
|
|
if not cre:
|
|
|
|
return time.time()
|
|
|
|
frac = 0.0
|
|
|
|
if isinstance(cre, basestring) and '.' in cre:
|
|
|
|
fdot = cre.find('.')
|
|
|
|
frac = float(cre[fdot:])
|
|
|
|
cre = cre[:fdot]
|
|
|
|
return time.mktime(time.strptime(cre,'%Y-%m-%d %H:%M:%S')) + frac
|
2010-07-29 13:39:38 +00:00
|
|
|
|
2010-04-12 10:52:00 +00:00
|
|
|
class openerp_dav_handler(dav_interface):
|
2009-12-15 14:31:05 +00:00
|
|
|
"""
|
2010-04-12 10:52:00 +00:00
|
|
|
This class models a OpenERP interface for the DAV server
|
2009-12-15 14:31:05 +00:00
|
|
|
"""
|
2010-04-06 11:11:32 +00:00
|
|
|
PROPS={'DAV:': dav_interface.PROPS['DAV:'],}
|
2009-11-26 13:54:00 +00:00
|
|
|
|
2010-04-06 11:11:32 +00:00
|
|
|
M_NS={ "DAV:" : dav_interface.M_NS['DAV:'],}
|
2009-11-26 13:54:00 +00:00
|
|
|
|
2010-08-19 11:51:57 +00:00
|
|
|
def __init__(self, parent, verbose=False):
|
2009-12-15 14:31:05 +00:00
|
|
|
self.db_name_list=[]
|
|
|
|
self.parent = parent
|
|
|
|
self.baseuri = parent.baseuri
|
2010-04-06 11:11:32 +00:00
|
|
|
self.verbose = verbose
|
|
|
|
|
2010-06-16 11:51:39 +00:00
|
|
|
def get_propnames(self, uri):
|
2010-08-19 11:51:57 +00:00
|
|
|
props = self.PROPS
|
2010-04-14 07:36:16 +00:00
|
|
|
self.parent.log_message('get propnames: %s' % uri)
|
|
|
|
cr, uid, pool, dbname, uri2 = self.get_cr(uri)
|
|
|
|
if not dbname:
|
2010-04-14 11:49:40 +00:00
|
|
|
if cr: cr.close()
|
2010-06-23 10:54:37 +00:00
|
|
|
# TODO: maybe limit props for databases..?
|
2010-04-14 07:36:16 +00:00
|
|
|
return props
|
|
|
|
node = self.uri2object(cr, uid, pool, uri2)
|
|
|
|
if node:
|
2010-07-29 13:39:37 +00:00
|
|
|
props = props.copy()
|
2010-04-14 07:36:16 +00:00
|
|
|
props.update(node.get_dav_props(cr))
|
2010-08-19 11:51:57 +00:00
|
|
|
cr.close()
|
2010-04-06 11:11:32 +00:00
|
|
|
return props
|
|
|
|
|
2010-07-29 13:39:38 +00:00
|
|
|
def _try_function(self, funct, args, opname='run function', cr=None,
|
|
|
|
default_exc=DAV_Forbidden):
|
2010-07-12 19:29:30 +00:00
|
|
|
""" Try to run a function, and properly convert exceptions to DAV ones.
|
2010-07-29 13:39:38 +00:00
|
|
|
|
2010-07-12 19:29:30 +00:00
|
|
|
@objname the name of the operation being performed
|
|
|
|
@param cr if given, the cursor to close at exceptions
|
|
|
|
"""
|
2010-07-29 13:39:38 +00:00
|
|
|
|
2010-07-12 19:29:30 +00:00
|
|
|
try:
|
2010-07-29 13:39:38 +00:00
|
|
|
return funct(*args)
|
2010-07-12 19:29:30 +00:00
|
|
|
except DAV_Error:
|
|
|
|
if cr: cr.close()
|
|
|
|
raise
|
|
|
|
except NotImplementedError, e:
|
|
|
|
if cr: cr.close()
|
|
|
|
import traceback
|
|
|
|
self.parent.log_error("Cannot %s: %s", opname, str(e))
|
|
|
|
self.parent.log_message("Exc: %s",traceback.format_exc())
|
2010-07-13 10:29:46 +00:00
|
|
|
# see par 9.3.1 of rfc
|
|
|
|
raise DAV_Error(403, str(e) or 'Not supported at this path')
|
2010-07-12 19:29:30 +00:00
|
|
|
except EnvironmentError, err:
|
|
|
|
if cr: cr.close()
|
|
|
|
import traceback
|
2010-07-17 05:13:00 +00:00
|
|
|
self.parent.log_error("Cannot %s: %s", opname, err.strerror)
|
2010-07-12 19:29:30 +00:00
|
|
|
self.parent.log_message("Exc: %s",traceback.format_exc())
|
|
|
|
raise default_exc(err.strerror)
|
|
|
|
except Exception,e:
|
|
|
|
import traceback
|
2010-08-12 16:21:20 +00:00
|
|
|
if cr: cr.close()
|
2010-07-29 13:39:38 +00:00
|
|
|
self.parent.log_error("Cannot %s: %s", opname, str(e))
|
2010-07-12 19:29:30 +00:00
|
|
|
self.parent.log_message("Exc: %s",traceback.format_exc())
|
|
|
|
raise default_exc("Operation failed")
|
|
|
|
|
2010-04-06 11:11:32 +00:00
|
|
|
def _get_dav_lockdiscovery(self, uri):
|
|
|
|
raise DAV_NotFound
|
|
|
|
|
|
|
|
def _get_dav_supportedlock(self, uri):
|
|
|
|
raise DAV_NotFound
|
2010-04-14 07:36:16 +00:00
|
|
|
|
2010-08-19 11:51:57 +00:00
|
|
|
def match_prop(self, uri, match, ns, propname):
|
2010-07-29 13:39:38 +00:00
|
|
|
if self.M_NS.has_key(ns):
|
|
|
|
return match == dav_interface.get_prop(self, uri, ns, propname)
|
2010-04-21 13:34:02 +00:00
|
|
|
cr, uid, pool, dbname, uri2 = self.get_cr(uri)
|
|
|
|
if not dbname:
|
|
|
|
if cr: cr.close()
|
|
|
|
raise DAV_NotFound
|
2010-08-19 11:51:57 +00:00
|
|
|
node = self.uri2object(cr, uid, pool, uri2)
|
2010-04-21 13:34:02 +00:00
|
|
|
if not node:
|
|
|
|
cr.close()
|
|
|
|
raise DAV_NotFound
|
2010-08-19 11:51:57 +00:00
|
|
|
res = node.match_dav_eprop(cr, match, ns, propname)
|
|
|
|
cr.close()
|
|
|
|
return res
|
2010-04-21 13:34:02 +00:00
|
|
|
|
2010-07-29 13:39:36 +00:00
|
|
|
def prep_http_options(self, uri, opts):
|
|
|
|
"""see HttpOptions._prep_OPTIONS """
|
|
|
|
self.parent.log_message('get options: %s' % uri)
|
|
|
|
cr, uid, pool, dbname, uri2 = self.get_cr(uri, allow_last=True)
|
|
|
|
|
|
|
|
if not dbname:
|
|
|
|
if cr: cr.close()
|
|
|
|
return opts
|
|
|
|
node = self.uri2object(cr, uid, pool, uri2[:])
|
|
|
|
|
|
|
|
if not node:
|
|
|
|
if cr: cr.close()
|
|
|
|
return opts
|
|
|
|
else:
|
|
|
|
if hasattr(node, 'http_options'):
|
|
|
|
ret = opts.copy()
|
|
|
|
for key, val in node.http_options.items():
|
|
|
|
if isinstance(val, basestring):
|
|
|
|
val = [val, ]
|
2010-07-29 13:39:37 +00:00
|
|
|
if key in ret:
|
|
|
|
ret[key] = ret[key][:] # copy the orig. array
|
|
|
|
else:
|
|
|
|
ret[key] = []
|
|
|
|
ret[key].extend(val)
|
2010-07-29 13:39:38 +00:00
|
|
|
|
2010-07-29 13:39:36 +00:00
|
|
|
self.parent.log_message('options: %s' % ret)
|
|
|
|
else:
|
|
|
|
ret = opts
|
2010-07-29 13:39:38 +00:00
|
|
|
cr.close()
|
2010-07-29 13:39:36 +00:00
|
|
|
return ret
|
|
|
|
|
2010-04-14 07:36:16 +00:00
|
|
|
def get_prop(self, uri, ns, propname):
|
2010-04-06 11:11:32 +00:00
|
|
|
""" return the value of a given property
|
|
|
|
|
|
|
|
uri -- uri of the object to get the property of
|
|
|
|
ns -- namespace of the property
|
|
|
|
pname -- name of the property
|
2010-08-19 11:51:57 +00:00
|
|
|
"""
|
2010-04-06 11:11:32 +00:00
|
|
|
if self.M_NS.has_key(ns):
|
2010-10-12 10:38:30 +00:00
|
|
|
try:
|
|
|
|
# if it's not in the interface class, a "DAV:" property
|
|
|
|
# may be at the node class. So shouldn't give up early.
|
|
|
|
return dav_interface.get_prop(self, uri, ns, propname)
|
|
|
|
except DAV_NotFound:
|
|
|
|
pass
|
2010-04-14 07:36:16 +00:00
|
|
|
cr, uid, pool, dbname, uri2 = self.get_cr(uri)
|
|
|
|
if not dbname:
|
2010-04-14 11:49:40 +00:00
|
|
|
if cr: cr.close()
|
2010-04-14 07:36:16 +00:00
|
|
|
raise DAV_NotFound
|
|
|
|
node = self.uri2object(cr, uid, pool, uri2)
|
|
|
|
if not node:
|
|
|
|
cr.close()
|
|
|
|
raise DAV_NotFound
|
2010-06-23 10:54:37 +00:00
|
|
|
res = node.get_dav_eprop(cr, ns, propname)
|
2010-08-19 11:51:57 +00:00
|
|
|
cr.close()
|
|
|
|
return res
|
2010-04-06 11:11:32 +00:00
|
|
|
|
2010-06-23 10:54:37 +00:00
|
|
|
def get_db(self, uri, rest_ret=False, allow_last=False):
|
|
|
|
"""Parse the uri and get the dbname and the rest.
|
|
|
|
Db name should be the first component in the unix-like
|
|
|
|
path supplied in uri.
|
2010-07-29 13:39:38 +00:00
|
|
|
|
2010-06-23 10:54:37 +00:00
|
|
|
@param rest_ret Instead of the db_name, return (db_name, rest),
|
|
|
|
where rest is the remaining path
|
|
|
|
@param allow_last If the dbname is the last component in the
|
|
|
|
path, allow it to be resolved. The default False value means
|
|
|
|
we will not attempt to use the db, unless there is more
|
|
|
|
path.
|
2010-07-29 13:39:38 +00:00
|
|
|
|
2010-06-23 10:54:37 +00:00
|
|
|
@return db_name or (dbname, rest) depending on rest_ret,
|
|
|
|
will return dbname=False when component is not found.
|
|
|
|
"""
|
2010-07-29 13:39:38 +00:00
|
|
|
|
2010-06-23 10:54:37 +00:00
|
|
|
uri2 = self.uri2local(uri)
|
|
|
|
if uri2.startswith('/'):
|
|
|
|
uri2 = uri2[1:]
|
|
|
|
names=uri2.split('/',1)
|
|
|
|
db_name=False
|
|
|
|
rest = None
|
|
|
|
if allow_last:
|
|
|
|
ll = 0
|
|
|
|
else:
|
|
|
|
ll = 1
|
|
|
|
if len(names) > ll and names[0]:
|
|
|
|
db_name = names[0]
|
|
|
|
names = names[1:]
|
|
|
|
|
|
|
|
if rest_ret:
|
|
|
|
if len(names):
|
|
|
|
rest = names[0]
|
|
|
|
return db_name, rest
|
|
|
|
return db_name
|
|
|
|
|
2009-12-15 14:31:05 +00:00
|
|
|
|
|
|
|
def urijoin(self,*ajoin):
|
|
|
|
""" Return the base URI of this request, or even join it with the
|
|
|
|
ajoin path elements
|
|
|
|
"""
|
|
|
|
return self.baseuri+ '/'.join(ajoin)
|
|
|
|
|
|
|
|
@memoize(4)
|
|
|
|
def db_list(self):
|
2010-01-27 05:07:04 +00:00
|
|
|
s = netsvc.ExportService.getService('db')
|
2010-04-06 11:11:32 +00:00
|
|
|
result = s.exp_list()
|
2009-12-15 14:31:05 +00:00
|
|
|
self.db_name_list=[]
|
|
|
|
for db_name in result:
|
2010-06-23 10:49:56 +00:00
|
|
|
cr = None
|
|
|
|
try:
|
|
|
|
db = pooler.get_db_only(db_name)
|
|
|
|
cr = db.cursor()
|
|
|
|
cr.execute("SELECT id FROM ir_module_module WHERE name = 'document' AND state='installed' ")
|
|
|
|
res=cr.fetchone()
|
|
|
|
if res and len(res):
|
|
|
|
self.db_name_list.append(db_name)
|
|
|
|
except Exception, e:
|
|
|
|
self.parent.log_error("Exception in db list: %s" % e)
|
|
|
|
finally:
|
|
|
|
if cr:
|
|
|
|
cr.close()
|
2009-12-15 14:31:05 +00:00
|
|
|
return self.db_name_list
|
|
|
|
|
2010-06-16 11:51:39 +00:00
|
|
|
def get_childs(self, uri, filters=None):
|
2010-08-19 11:51:57 +00:00
|
|
|
""" return the child objects as self.baseuris for the given URI """
|
2009-12-15 14:31:05 +00:00
|
|
|
self.parent.log_message('get childs: %s' % uri)
|
2010-06-23 10:54:37 +00:00
|
|
|
cr, uid, pool, dbname, uri2 = self.get_cr(uri, allow_last=True)
|
2010-08-19 11:51:57 +00:00
|
|
|
|
|
|
|
if not dbname:
|
2010-04-06 11:11:32 +00:00
|
|
|
if cr: cr.close()
|
2010-08-19 11:51:57 +00:00
|
|
|
res = map(lambda x: self.urijoin(x), self.db_list())
|
2010-04-06 11:11:32 +00:00
|
|
|
return res
|
2009-12-15 14:31:05 +00:00
|
|
|
result = []
|
2010-06-16 11:51:39 +00:00
|
|
|
node = self.uri2object(cr, uid, pool, uri2[:])
|
2010-08-19 11:51:57 +00:00
|
|
|
|
2009-12-15 14:31:05 +00:00
|
|
|
if not node:
|
2010-04-06 11:11:32 +00:00
|
|
|
if cr: cr.close()
|
2010-07-12 19:29:30 +00:00
|
|
|
raise DAV_NotFound2(uri2)
|
2009-12-15 14:31:05 +00:00
|
|
|
else:
|
|
|
|
fp = node.full_path()
|
|
|
|
if fp and len(fp):
|
|
|
|
self.parent.log_message('childs: @%s' % fp)
|
|
|
|
fp = '/'.join(fp)
|
|
|
|
else:
|
|
|
|
fp = None
|
2010-04-15 07:58:09 +00:00
|
|
|
domain = None
|
|
|
|
if filters:
|
|
|
|
domain = node.get_domain(cr, filters)
|
|
|
|
for d in node.children(cr, domain):
|
2010-08-19 11:51:57 +00:00
|
|
|
self.parent.log_message('child: %s' % d.path)
|
2009-12-15 14:31:05 +00:00
|
|
|
if fp:
|
|
|
|
result.append( self.urijoin(dbname,fp,d.path) )
|
|
|
|
else:
|
|
|
|
result.append( self.urijoin(dbname,d.path) )
|
2010-08-19 11:51:57 +00:00
|
|
|
if cr: cr.close()
|
2009-12-15 14:31:05 +00:00
|
|
|
return result
|
|
|
|
|
|
|
|
def uri2local(self, uri):
|
|
|
|
uparts=urlparse.urlparse(uri)
|
|
|
|
reluri=uparts[2]
|
|
|
|
if reluri and reluri[-1]=="/":
|
|
|
|
reluri=reluri[:-1]
|
|
|
|
return reluri
|
|
|
|
|
|
|
|
#
|
|
|
|
# pos: -1 to get the parent of the uri
|
|
|
|
#
|
2010-06-23 10:54:37 +00:00
|
|
|
def get_cr(self, uri, allow_last=False):
|
|
|
|
""" Split the uri, grab a cursor for that db
|
|
|
|
"""
|
2009-12-15 14:31:05 +00:00
|
|
|
pdb = self.parent.auth_proxy.last_auth
|
2010-06-23 10:54:37 +00:00
|
|
|
dbname, uri2 = self.get_db(uri, rest_ret=True, allow_last=allow_last)
|
|
|
|
uri2 = (uri2 and uri2.split('/')) or []
|
2009-12-15 14:31:05 +00:00
|
|
|
if not dbname:
|
2010-06-23 10:54:37 +00:00
|
|
|
return None, None, None, False, uri2
|
|
|
|
# if dbname was in our uri, we should have authenticated
|
|
|
|
# against that.
|
|
|
|
assert pdb == dbname, " %s != %s" %(pdb, dbname)
|
2010-04-06 11:11:32 +00:00
|
|
|
res = self.parent.auth_proxy.auth_creds.get(dbname, False)
|
2010-06-23 10:54:37 +00:00
|
|
|
if not res:
|
2010-04-06 11:11:32 +00:00
|
|
|
self.parent.auth_proxy.checkRequest(self.parent, uri, dbname)
|
2010-06-23 10:54:37 +00:00
|
|
|
res = self.parent.auth_proxy.auth_creds[dbname]
|
2010-04-06 11:11:32 +00:00
|
|
|
user, passwd, dbn2, uid = res
|
2009-12-15 14:31:05 +00:00
|
|
|
db,pool = pooler.get_db_and_pool(dbname)
|
|
|
|
cr = db.cursor()
|
|
|
|
return cr, uid, pool, dbname, uri2
|
|
|
|
|
2010-06-23 10:54:37 +00:00
|
|
|
def uri2object(self, cr, uid, pool, uri):
|
2009-12-15 14:31:05 +00:00
|
|
|
if not uid:
|
|
|
|
return None
|
|
|
|
return pool.get('document.directory').get_object(cr, uid, uri)
|
|
|
|
|
2010-06-22 15:03:39 +00:00
|
|
|
def get_data(self,uri, rrange=None):
|
2009-12-15 14:31:05 +00:00
|
|
|
self.parent.log_message('GET: %s' % uri)
|
|
|
|
cr, uid, pool, dbname, uri2 = self.get_cr(uri)
|
|
|
|
try:
|
|
|
|
if not dbname:
|
|
|
|
raise DAV_Error, 409
|
2010-07-29 13:39:38 +00:00
|
|
|
node = self.uri2object(cr, uid, pool, uri2)
|
2009-12-15 14:31:05 +00:00
|
|
|
if not node:
|
2010-07-12 19:29:30 +00:00
|
|
|
raise DAV_NotFound2(uri2)
|
2009-12-15 14:31:05 +00:00
|
|
|
try:
|
2010-06-22 15:03:39 +00:00
|
|
|
if rrange:
|
|
|
|
self.parent.log_error("Doc get_data cannot use range")
|
|
|
|
raise DAV_Error(409)
|
2009-12-15 14:31:05 +00:00
|
|
|
datas = node.get_data(cr)
|
|
|
|
except TypeError,e:
|
2010-07-13 10:29:46 +00:00
|
|
|
# for the collections that return this error, the DAV standard
|
|
|
|
# says we'd better just return 200 OK with empty data
|
|
|
|
return ''
|
2009-12-15 14:31:05 +00:00
|
|
|
except IndexError,e :
|
|
|
|
self.parent.log_error("GET IndexError: %s", str(e))
|
2010-07-12 19:29:30 +00:00
|
|
|
raise DAV_NotFound2(uri2)
|
2009-12-15 14:31:05 +00:00
|
|
|
except Exception,e:
|
|
|
|
import traceback
|
|
|
|
self.parent.log_error("GET exception: %s",str(e))
|
|
|
|
self.parent.log_message("Exc: %s", traceback.format_exc())
|
|
|
|
raise DAV_Error, 409
|
2010-07-12 19:32:36 +00:00
|
|
|
return str(datas) # FIXME!
|
2009-12-15 14:31:05 +00:00
|
|
|
finally:
|
2010-08-19 11:51:57 +00:00
|
|
|
if cr: cr.close()
|
2009-12-15 14:31:05 +00:00
|
|
|
|
|
|
|
@memoize(CACHE_SIZE)
|
2010-08-01 08:37:22 +00:00
|
|
|
def _get_dav_resourcetype(self, uri):
|
2010-08-19 11:51:57 +00:00
|
|
|
""" return type of object """
|
2009-12-15 14:31:05 +00:00
|
|
|
self.parent.log_message('get RT: %s' % uri)
|
|
|
|
cr, uid, pool, dbname, uri2 = self.get_cr(uri)
|
|
|
|
try:
|
|
|
|
if not dbname:
|
|
|
|
return COLLECTION
|
2010-08-01 08:37:22 +00:00
|
|
|
node = self.uri2object(cr, uid, pool, uri2)
|
2009-12-15 14:31:05 +00:00
|
|
|
if not node:
|
2010-07-12 19:29:30 +00:00
|
|
|
raise DAV_NotFound2(uri2)
|
2010-08-01 08:37:22 +00:00
|
|
|
try:
|
|
|
|
return node.get_dav_resourcetype(cr)
|
|
|
|
except NotImplementedError:
|
|
|
|
if node.type in ('collection','database'):
|
|
|
|
return ('collection', 'DAV:')
|
|
|
|
return ''
|
2009-12-15 14:31:05 +00:00
|
|
|
finally:
|
2010-04-06 11:11:32 +00:00
|
|
|
if cr: cr.close()
|
2009-12-15 14:31:05 +00:00
|
|
|
|
|
|
|
def _get_dav_displayname(self,uri):
|
|
|
|
self.parent.log_message('get DN: %s' % uri)
|
|
|
|
cr, uid, pool, dbname, uri2 = self.get_cr(uri)
|
|
|
|
if not dbname:
|
2010-07-29 13:39:38 +00:00
|
|
|
if cr: cr.close()
|
2010-08-18 06:00:48 +00:00
|
|
|
# at root, dbname, just return the last component
|
|
|
|
# of the path.
|
|
|
|
if uri2 and len(uri2) < 2:
|
|
|
|
return uri2[-1]
|
|
|
|
return ''
|
2010-06-16 11:51:39 +00:00
|
|
|
node = self.uri2object(cr, uid, pool, uri2)
|
2009-12-15 14:31:05 +00:00
|
|
|
if not node:
|
2010-07-29 13:39:38 +00:00
|
|
|
if cr: cr.close()
|
2010-07-12 19:29:30 +00:00
|
|
|
raise DAV_NotFound2(uri2)
|
2009-12-15 14:31:05 +00:00
|
|
|
cr.close()
|
|
|
|
return node.displayname
|
|
|
|
|
|
|
|
@memoize(CACHE_SIZE)
|
2010-06-16 11:51:39 +00:00
|
|
|
def _get_dav_getcontentlength(self, uri):
|
2010-04-06 11:11:32 +00:00
|
|
|
""" return the content length of an object """
|
2009-12-15 14:31:05 +00:00
|
|
|
self.parent.log_message('get length: %s' % uri)
|
|
|
|
result = 0
|
2010-04-06 11:11:32 +00:00
|
|
|
cr, uid, pool, dbname, uri2 = self.get_cr(uri)
|
2009-12-15 14:31:05 +00:00
|
|
|
if not dbname:
|
2010-04-06 11:11:32 +00:00
|
|
|
if cr: cr.close()
|
|
|
|
return str(result)
|
2009-12-15 14:31:05 +00:00
|
|
|
node = self.uri2object(cr, uid, pool, uri2)
|
|
|
|
if not node:
|
2010-07-29 13:39:38 +00:00
|
|
|
if cr: cr.close()
|
2010-07-12 19:29:30 +00:00
|
|
|
raise DAV_NotFound2(uri2)
|
2009-12-15 14:31:05 +00:00
|
|
|
result = node.content_length or 0
|
|
|
|
cr.close()
|
|
|
|
return str(result)
|
|
|
|
|
|
|
|
@memoize(CACHE_SIZE)
|
|
|
|
def _get_dav_getetag(self,uri):
|
|
|
|
""" return the ETag of an object """
|
|
|
|
self.parent.log_message('get etag: %s' % uri)
|
|
|
|
result = 0
|
|
|
|
cr, uid, pool, dbname, uri2 = self.get_cr(uri)
|
|
|
|
if not dbname:
|
2010-07-29 13:39:38 +00:00
|
|
|
if cr: cr.close()
|
2009-12-15 14:31:05 +00:00
|
|
|
return '0'
|
|
|
|
node = self.uri2object(cr, uid, pool, uri2)
|
|
|
|
if not node:
|
|
|
|
cr.close()
|
2010-07-12 19:29:30 +00:00
|
|
|
raise DAV_NotFound2(uri2)
|
2010-07-29 13:39:38 +00:00
|
|
|
result = self._try_function(node.get_etag ,(cr,), "etag %s" %uri, cr=cr)
|
2009-12-15 14:31:05 +00:00
|
|
|
cr.close()
|
|
|
|
return str(result)
|
|
|
|
|
|
|
|
@memoize(CACHE_SIZE)
|
2010-06-16 11:51:39 +00:00
|
|
|
def get_lastmodified(self, uri):
|
2009-12-15 14:31:05 +00:00
|
|
|
""" return the last modified date of the object """
|
|
|
|
cr, uid, pool, dbname, uri2 = self.get_cr(uri)
|
2010-04-06 11:11:32 +00:00
|
|
|
if not dbname:
|
2010-07-29 13:39:37 +00:00
|
|
|
return time.time()
|
2010-04-06 11:11:32 +00:00
|
|
|
try:
|
2010-06-16 11:51:39 +00:00
|
|
|
node = self.uri2object(cr, uid, pool, uri2)
|
2009-12-15 14:31:05 +00:00
|
|
|
if not node:
|
2010-07-12 19:29:30 +00:00
|
|
|
raise DAV_NotFound2(uri2)
|
2010-07-29 13:39:37 +00:00
|
|
|
return _str2time(node.write_date)
|
2009-12-15 14:31:05 +00:00
|
|
|
finally:
|
2010-04-06 11:11:32 +00:00
|
|
|
if cr: cr.close()
|
2009-12-15 14:31:05 +00:00
|
|
|
|
2010-07-29 13:39:37 +00:00
|
|
|
def _get_dav_getlastmodified(self,uri):
|
2010-07-29 13:39:38 +00:00
|
|
|
""" return the last modified date of a resource
|
2010-07-29 13:39:37 +00:00
|
|
|
"""
|
|
|
|
d=self.get_lastmodified(uri)
|
|
|
|
# format it. Note that we explicitly set the day, month names from
|
|
|
|
# an array, so that strftime() doesn't use its own locale-aware
|
|
|
|
# strings.
|
|
|
|
gmt = time.gmtime(d)
|
|
|
|
return time.strftime("%%s, %d %%s %Y %H:%M:%S GMT", gmt ) % \
|
|
|
|
(day_names[gmt.tm_wday], month_names[gmt.tm_mon])
|
|
|
|
|
2009-12-15 14:31:05 +00:00
|
|
|
@memoize(CACHE_SIZE)
|
2010-06-16 11:51:39 +00:00
|
|
|
def get_creationdate(self, uri):
|
2010-04-06 11:11:32 +00:00
|
|
|
""" return the last modified date of the object """
|
2009-12-15 14:31:05 +00:00
|
|
|
cr, uid, pool, dbname, uri2 = self.get_cr(uri)
|
2010-04-06 11:11:32 +00:00
|
|
|
if not dbname:
|
|
|
|
raise DAV_Error, 409
|
|
|
|
try:
|
2010-06-16 11:51:39 +00:00
|
|
|
node = self.uri2object(cr, uid, pool, uri2)
|
2009-12-15 14:31:05 +00:00
|
|
|
if not node:
|
2010-07-12 19:29:30 +00:00
|
|
|
raise DAV_NotFound2(uri2)
|
2010-07-29 13:39:38 +00:00
|
|
|
|
2010-07-29 13:39:37 +00:00
|
|
|
return _str2time(node.create_date)
|
2009-12-15 14:31:05 +00:00
|
|
|
finally:
|
2010-04-06 11:11:32 +00:00
|
|
|
if cr: cr.close()
|
2009-12-15 14:31:05 +00:00
|
|
|
|
|
|
|
@memoize(CACHE_SIZE)
|
|
|
|
def _get_dav_getcontenttype(self,uri):
|
|
|
|
self.parent.log_message('get contenttype: %s' % uri)
|
|
|
|
cr, uid, pool, dbname, uri2 = self.get_cr(uri)
|
2010-04-06 11:11:32 +00:00
|
|
|
if not dbname:
|
2010-07-29 13:39:38 +00:00
|
|
|
if cr: cr.close()
|
2010-04-06 11:11:32 +00:00
|
|
|
return 'httpd/unix-directory'
|
|
|
|
try:
|
2010-06-16 11:51:39 +00:00
|
|
|
node = self.uri2object(cr, uid, pool, uri2)
|
2009-12-15 14:31:05 +00:00
|
|
|
if not node:
|
2010-07-12 19:29:30 +00:00
|
|
|
raise DAV_NotFound2(uri2)
|
2010-06-23 14:07:20 +00:00
|
|
|
result = str(node.mimetype)
|
2009-12-15 14:31:05 +00:00
|
|
|
return result
|
|
|
|
#raise DAV_NotFound, 'Could not find %s' % path
|
|
|
|
finally:
|
2010-04-06 11:11:32 +00:00
|
|
|
if cr: cr.close()
|
|
|
|
|
2009-12-15 14:31:05 +00:00
|
|
|
def mkcol(self,uri):
|
2010-07-12 19:32:36 +00:00
|
|
|
""" create a new collection
|
|
|
|
see par. 9.3 of rfc4918
|
|
|
|
"""
|
2009-12-15 14:31:05 +00:00
|
|
|
self.parent.log_message('MKCOL: %s' % uri)
|
2010-07-12 19:32:36 +00:00
|
|
|
cr, uid, pool, dbname, uri2 = self.get_cr(uri)
|
|
|
|
if not uri2[-1]:
|
2010-07-29 13:39:38 +00:00
|
|
|
if cr: cr.close()
|
2010-07-12 19:29:30 +00:00
|
|
|
raise DAV_Error(409, "Cannot create nameless collection")
|
2009-12-15 14:31:05 +00:00
|
|
|
if not dbname:
|
2010-07-29 13:39:38 +00:00
|
|
|
if cr: cr.close()
|
2009-12-15 14:31:05 +00:00
|
|
|
raise DAV_Error, 409
|
|
|
|
node = self.uri2object(cr,uid,pool, uri2[:-1])
|
2010-07-12 19:32:36 +00:00
|
|
|
if not node:
|
|
|
|
cr.close()
|
|
|
|
raise DAV_Error(409, "Parent path %s does not exist" % uri2[:-1])
|
|
|
|
nc = node.child(cr, uri2[-1])
|
|
|
|
if nc:
|
|
|
|
cr.close()
|
|
|
|
raise DAV_Error(405, "Path already exists")
|
2010-07-13 10:29:46 +00:00
|
|
|
self._try_function(node.create_child_collection, (cr, uri2[-1]),
|
|
|
|
"create col %s" % uri2[-1], cr=cr)
|
2010-07-12 19:32:36 +00:00
|
|
|
cr.commit()
|
2009-12-15 14:31:05 +00:00
|
|
|
cr.close()
|
|
|
|
return True
|
|
|
|
|
2010-06-16 11:51:39 +00:00
|
|
|
def put(self, uri, data, content_type=None):
|
2009-12-15 14:31:05 +00:00
|
|
|
""" put the object into the filesystem """
|
2010-04-06 11:11:32 +00:00
|
|
|
self.parent.log_message('Putting %s (%d), %s'%( misc.ustr(uri), data and len(data) or 0, content_type))
|
2009-12-15 14:31:05 +00:00
|
|
|
cr, uid, pool,dbname, uri2 = self.get_cr(uri)
|
|
|
|
if not dbname:
|
2010-07-29 13:39:38 +00:00
|
|
|
if cr: cr.close()
|
2009-12-15 14:31:05 +00:00
|
|
|
raise DAV_Forbidden
|
|
|
|
try:
|
2010-06-16 11:51:39 +00:00
|
|
|
node = self.uri2object(cr, uid, pool, uri2[:])
|
2010-07-29 13:39:38 +00:00
|
|
|
except Exception:
|
2009-12-15 14:31:05 +00:00
|
|
|
node = False
|
2010-04-22 12:50:41 +00:00
|
|
|
|
2009-12-15 14:31:05 +00:00
|
|
|
objname = uri2[-1]
|
|
|
|
ext = objname.find('.') >0 and objname.split('.')[1] or False
|
|
|
|
|
2010-08-12 11:10:23 +00:00
|
|
|
ret = None
|
2009-12-15 14:31:05 +00:00
|
|
|
if not node:
|
2010-06-16 11:51:39 +00:00
|
|
|
dir_node = self.uri2object(cr, uid, pool, uri2[:-1])
|
2009-12-15 14:31:05 +00:00
|
|
|
if not dir_node:
|
2010-07-12 19:29:30 +00:00
|
|
|
cr.close()
|
2009-12-15 14:31:05 +00:00
|
|
|
raise DAV_NotFound('Parent folder not found')
|
2010-07-12 19:29:30 +00:00
|
|
|
|
2010-08-12 11:10:23 +00:00
|
|
|
newchild = self._try_function(dir_node.create_child, (cr, objname, data),
|
2010-07-12 19:29:30 +00:00
|
|
|
"create %s" % objname, cr=cr)
|
2010-08-12 11:10:23 +00:00
|
|
|
if not newchild:
|
|
|
|
cr.commit()
|
|
|
|
cr.close()
|
|
|
|
raise DAV_Error(400, "Failed to create resource")
|
|
|
|
|
|
|
|
uparts=urlparse.urlparse(uri)
|
|
|
|
fileloc = '/'.join(newchild.full_path())
|
|
|
|
if isinstance(fileloc, unicode):
|
|
|
|
fileloc = fileloc.encode('utf-8')
|
|
|
|
# the uri we get is a mangled one, where the davpath has been removed
|
|
|
|
davpath = self.parent.get_davpath()
|
|
|
|
|
|
|
|
surl = '%s://%s' % (uparts[0], uparts[1])
|
|
|
|
uloc = urllib.quote(fileloc)
|
|
|
|
hurl = False
|
|
|
|
if uri != ('/'+uloc) and uri != (surl + '/' + uloc):
|
|
|
|
hurl = '%s%s/%s/%s' %(surl, davpath, dbname, uloc)
|
|
|
|
etag = False
|
|
|
|
try:
|
|
|
|
etag = str(newchild.get_etag(cr))
|
|
|
|
except Exception, e:
|
|
|
|
self.parent.log_error("Cannot get etag for node: %s" % e)
|
|
|
|
ret = (hurl, etag)
|
2009-12-15 14:31:05 +00:00
|
|
|
else:
|
2010-07-12 19:29:30 +00:00
|
|
|
self._try_function(node.set_data, (cr, data), "save %s" % objname, cr=cr)
|
2009-12-15 14:31:05 +00:00
|
|
|
|
|
|
|
cr.commit()
|
2009-12-17 11:56:04 +00:00
|
|
|
cr.close()
|
2010-08-12 11:10:23 +00:00
|
|
|
return ret
|
2009-12-15 14:31:05 +00:00
|
|
|
|
|
|
|
def rmcol(self,uri):
|
|
|
|
""" delete a collection """
|
2009-12-17 11:56:04 +00:00
|
|
|
cr, uid, pool, dbname, uri2 = self.get_cr(uri)
|
2010-06-23 10:54:37 +00:00
|
|
|
if not dbname:
|
2010-07-29 13:39:38 +00:00
|
|
|
if cr: cr.close()
|
2009-12-15 14:31:05 +00:00
|
|
|
raise DAV_Error, 409
|
2010-07-29 13:39:38 +00:00
|
|
|
|
2009-12-17 11:56:04 +00:00
|
|
|
node = self.uri2object(cr, uid, pool, uri2)
|
2010-07-29 13:39:38 +00:00
|
|
|
self._try_function(node.rmcol, (cr,), "rmcol %s" % uri, cr=cr)
|
2009-12-15 14:31:05 +00:00
|
|
|
|
|
|
|
cr.commit()
|
|
|
|
cr.close()
|
|
|
|
return 204
|
|
|
|
|
|
|
|
def rm(self,uri):
|
|
|
|
cr, uid, pool,dbname, uri2 = self.get_cr(uri)
|
2009-12-17 11:56:04 +00:00
|
|
|
if not dbname:
|
2010-07-29 13:39:38 +00:00
|
|
|
if cr: cr.close()
|
2009-12-15 14:31:05 +00:00
|
|
|
raise DAV_Error, 409
|
2010-06-16 11:51:39 +00:00
|
|
|
node = self.uri2object(cr, uid, pool, uri2)
|
2010-07-29 13:39:38 +00:00
|
|
|
res = self._try_function(node.rm, (cr,), "rm %s" % uri, cr=cr)
|
2010-04-23 12:05:44 +00:00
|
|
|
if not res:
|
2010-07-29 13:39:38 +00:00
|
|
|
if cr: cr.close()
|
2010-04-23 12:05:44 +00:00
|
|
|
raise OSError(1, 'Operation not permited.')
|
2009-12-15 14:31:05 +00:00
|
|
|
cr.commit()
|
|
|
|
cr.close()
|
|
|
|
return 204
|
|
|
|
|
|
|
|
### DELETE handlers (examples)
|
|
|
|
### (we use the predefined methods in davcmd instead of doing
|
|
|
|
### a rm directly
|
|
|
|
###
|
|
|
|
|
2010-06-16 11:51:39 +00:00
|
|
|
def delone(self, uri):
|
2009-12-15 14:31:05 +00:00
|
|
|
""" delete a single resource
|
|
|
|
|
|
|
|
You have to return a result dict of the form
|
|
|
|
uri:error_code
|
|
|
|
or None if everything's ok
|
|
|
|
|
|
|
|
"""
|
|
|
|
if uri[-1]=='/':uri=uri[:-1]
|
|
|
|
res=delone(self,uri)
|
|
|
|
parent='/'.join(uri.split('/')[:-1])
|
|
|
|
return res
|
|
|
|
|
2010-06-16 11:51:39 +00:00
|
|
|
def deltree(self, uri):
|
2009-12-15 14:31:05 +00:00
|
|
|
""" delete a collection
|
|
|
|
|
|
|
|
You have to return a result dict of the form
|
|
|
|
uri:error_code
|
|
|
|
or None if everything's ok
|
|
|
|
"""
|
|
|
|
if uri[-1]=='/':uri=uri[:-1]
|
2010-06-16 11:51:39 +00:00
|
|
|
res=deltree(self, uri)
|
2009-12-15 14:31:05 +00:00
|
|
|
parent='/'.join(uri.split('/')[:-1])
|
|
|
|
return res
|
|
|
|
|
|
|
|
|
|
|
|
###
|
|
|
|
### MOVE handlers (examples)
|
|
|
|
###
|
|
|
|
|
2010-06-16 11:51:39 +00:00
|
|
|
def moveone(self, src, dst, overwrite):
|
2009-12-15 14:31:05 +00:00
|
|
|
""" move one resource with Depth=0
|
|
|
|
|
|
|
|
an alternative implementation would be
|
|
|
|
|
|
|
|
result_code=201
|
|
|
|
if overwrite:
|
|
|
|
result_code=204
|
|
|
|
r=os.system("rm -f '%s'" %dst)
|
|
|
|
if r: return 412
|
|
|
|
r=os.system("mv '%s' '%s'" %(src,dst))
|
|
|
|
if r: return 412
|
|
|
|
return result_code
|
|
|
|
|
|
|
|
(untested!). This would not use the davcmd functions
|
|
|
|
and thus can only detect errors directly on the root node.
|
|
|
|
"""
|
2010-06-16 11:51:39 +00:00
|
|
|
res=moveone(self, src, dst, overwrite)
|
2009-12-15 14:31:05 +00:00
|
|
|
return res
|
|
|
|
|
2010-06-16 11:51:39 +00:00
|
|
|
def movetree(self, src, dst, overwrite):
|
2009-12-15 14:31:05 +00:00
|
|
|
""" move a collection with Depth=infinity
|
|
|
|
|
|
|
|
an alternative implementation would be
|
|
|
|
|
|
|
|
result_code=201
|
|
|
|
if overwrite:
|
|
|
|
result_code=204
|
|
|
|
r=os.system("rm -rf '%s'" %dst)
|
|
|
|
if r: return 412
|
|
|
|
r=os.system("mv '%s' '%s'" %(src,dst))
|
|
|
|
if r: return 412
|
|
|
|
return result_code
|
|
|
|
|
|
|
|
(untested!). This would not use the davcmd functions
|
|
|
|
and thus can only detect errors directly on the root node"""
|
|
|
|
|
2010-06-16 11:51:39 +00:00
|
|
|
res=movetree(self, src, dst, overwrite)
|
2009-12-15 14:31:05 +00:00
|
|
|
return res
|
|
|
|
|
|
|
|
###
|
|
|
|
### COPY handlers
|
|
|
|
###
|
|
|
|
|
2010-06-16 11:51:39 +00:00
|
|
|
def copyone(self, src, dst, overwrite):
|
2009-12-15 14:31:05 +00:00
|
|
|
""" copy one resource with Depth=0
|
|
|
|
|
|
|
|
an alternative implementation would be
|
|
|
|
|
|
|
|
result_code=201
|
|
|
|
if overwrite:
|
|
|
|
result_code=204
|
|
|
|
r=os.system("rm -f '%s'" %dst)
|
|
|
|
if r: return 412
|
|
|
|
r=os.system("cp '%s' '%s'" %(src,dst))
|
|
|
|
if r: return 412
|
|
|
|
return result_code
|
|
|
|
|
|
|
|
(untested!). This would not use the davcmd functions
|
|
|
|
and thus can only detect errors directly on the root node.
|
|
|
|
"""
|
2010-06-16 11:51:39 +00:00
|
|
|
res=copyone(self, src, dst, overwrite)
|
2009-12-15 14:31:05 +00:00
|
|
|
return res
|
|
|
|
|
2010-06-16 11:51:39 +00:00
|
|
|
def copytree(self, src, dst, overwrite):
|
2009-12-15 14:31:05 +00:00
|
|
|
""" copy a collection with Depth=infinity
|
|
|
|
|
|
|
|
an alternative implementation would be
|
|
|
|
|
|
|
|
result_code=201
|
|
|
|
if overwrite:
|
|
|
|
result_code=204
|
|
|
|
r=os.system("rm -rf '%s'" %dst)
|
|
|
|
if r: return 412
|
|
|
|
r=os.system("cp -r '%s' '%s'" %(src,dst))
|
|
|
|
if r: return 412
|
|
|
|
return result_code
|
|
|
|
|
|
|
|
(untested!). This would not use the davcmd functions
|
|
|
|
and thus can only detect errors directly on the root node"""
|
2010-06-16 11:51:39 +00:00
|
|
|
res=copytree(self, src, dst, overwrite)
|
2009-12-15 14:31:05 +00:00
|
|
|
return res
|
|
|
|
|
|
|
|
###
|
|
|
|
### copy methods.
|
|
|
|
### This methods actually copy something. low-level
|
|
|
|
### They are called by the davcmd utility functions
|
|
|
|
### copytree and copyone (not the above!)
|
|
|
|
### Look in davcmd.py for further details.
|
|
|
|
###
|
|
|
|
|
2010-06-16 11:51:39 +00:00
|
|
|
def copy(self, src, dst):
|
2009-12-15 14:31:05 +00:00
|
|
|
src=urllib.unquote(src)
|
|
|
|
dst=urllib.unquote(dst)
|
|
|
|
ct = self._get_dav_getcontenttype(src)
|
|
|
|
data = self.get_data(src)
|
2010-06-16 11:51:39 +00:00
|
|
|
self.put(dst, data, ct)
|
2009-12-15 14:31:05 +00:00
|
|
|
return 201
|
|
|
|
|
2010-06-16 11:51:39 +00:00
|
|
|
def copycol(self, src, dst):
|
2009-12-15 14:31:05 +00:00
|
|
|
""" copy a collection.
|
|
|
|
|
|
|
|
As this is not recursive (the davserver recurses itself)
|
|
|
|
we will only create a new directory here. For some more
|
|
|
|
advanced systems we might also have to copy properties from
|
|
|
|
the source to the destination.
|
|
|
|
"""
|
|
|
|
return self.mkcol(dst)
|
|
|
|
|
|
|
|
|
2010-06-16 11:51:39 +00:00
|
|
|
def exists(self, uri):
|
2009-12-15 14:31:05 +00:00
|
|
|
""" test if a resource exists """
|
|
|
|
result = False
|
|
|
|
cr, uid, pool,dbname, uri2 = self.get_cr(uri)
|
|
|
|
if not dbname:
|
2010-04-06 11:11:32 +00:00
|
|
|
if cr: cr.close()
|
2009-12-15 14:31:05 +00:00
|
|
|
return True
|
|
|
|
try:
|
2010-06-16 11:51:39 +00:00
|
|
|
node = self.uri2object(cr, uid, pool, uri2)
|
2009-12-15 14:31:05 +00:00
|
|
|
if node:
|
|
|
|
result = True
|
2010-07-29 13:39:38 +00:00
|
|
|
except Exception:
|
2009-12-15 14:31:05 +00:00
|
|
|
pass
|
|
|
|
cr.close()
|
|
|
|
return result
|
|
|
|
|
|
|
|
@memoize(CACHE_SIZE)
|
2010-06-16 11:51:39 +00:00
|
|
|
def is_collection(self, uri):
|
2009-12-15 14:31:05 +00:00
|
|
|
""" test if the given uri is a collection """
|
2010-08-01 08:37:22 +00:00
|
|
|
cr, uid, pool, dbname, uri2 = self.get_cr(uri)
|
|
|
|
try:
|
|
|
|
if not dbname:
|
|
|
|
return True
|
|
|
|
node = self.uri2object(cr,uid,pool, uri2)
|
|
|
|
if not node:
|
|
|
|
raise DAV_NotFound2(uri2)
|
|
|
|
if node.type in ('collection','database'):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
finally:
|
|
|
|
if cr: cr.close()
|
2010-06-23 10:54:37 +00:00
|
|
|
|
|
|
|
#eof
|