[MERGE] merge from lp:~xrg/openobject-addons/doc2-5.2
bzr revid: hmo@tinyerp.com-20091126135400-z32iycvadbt9so8n
This commit is contained in:
parent
e65f61d492
commit
8d0b4a7418
|
@ -639,7 +639,7 @@ class account_period(osv.osv):
|
|||
#CHECKME: shouldn't we check the state of the period?
|
||||
ids = self.search(cr, uid, [('date_start','<=',dt),('date_stop','>=',dt)])
|
||||
if not ids:
|
||||
raise osv.except_osv(_('Error !'), _('No period defined for this date !\nPlease create a fiscal year.'))
|
||||
raise osv.except_osv(_('Error !'), _('No period defined for this date: %s !\nPlease create a fiscal year.')%dt)
|
||||
return ids
|
||||
|
||||
def action_draft(self, cr, uid, ids, *args):
|
||||
|
@ -1810,11 +1810,13 @@ class account_account_template(osv.osv):
|
|||
'parent_id': fields.many2one('account.account.template','Parent Account Template', ondelete='cascade'),
|
||||
'child_parent_ids':fields.one2many('account.account.template','parent_id','Children'),
|
||||
'tax_ids': fields.many2many('account.tax.template', 'account_account_template_tax_rel','account_id','tax_id', 'Default Taxes'),
|
||||
'nocreate': fields.boolean('Optional create', help="If checked, the new chart of accounts will not contain this by default."),
|
||||
}
|
||||
|
||||
_defaults = {
|
||||
'reconcile': lambda *a: False,
|
||||
'type' : lambda *a :'view',
|
||||
'nocreate': lambda *a: False,
|
||||
}
|
||||
|
||||
def _check_recursion(self, cr, uid, ids):
|
||||
|
@ -1846,6 +1848,67 @@ class account_account_template(osv.osv):
|
|||
|
||||
account_account_template()
|
||||
|
||||
class account_add_tmpl_wizard(osv.osv_memory):
|
||||
"""Add one more account from the template.
|
||||
|
||||
With the 'nocreate' option, some accounts may not be created. Use this to add them later."""
|
||||
_name = 'account.addtmpl.wizard'
|
||||
|
||||
def _get_def_cparent(self, cr, uid, context):
|
||||
acc_obj=self.pool.get('account.account')
|
||||
tmpl_obj=self.pool.get('account.account.template')
|
||||
#print "Searching for ",context
|
||||
tids=tmpl_obj.read(cr, uid, [context['tmpl_ids']],['parent_id'])
|
||||
if not tids or not tids[0]['parent_id']:
|
||||
return False
|
||||
ptids = tmpl_obj.read(cr, uid, [tids[0]['parent_id'][0]],['code'])
|
||||
if not ptids or not ptids[0]['code']:
|
||||
raise osv.except_osv(_('Error !'), _('Cannot locate parent code for template account!'))
|
||||
res = acc_obj.search(cr,uid,[('code','=',ptids[0]['code'])])
|
||||
if res:
|
||||
return res[0]
|
||||
else:
|
||||
return False
|
||||
|
||||
_columns = {
|
||||
'cparent_id':fields.many2one('account.account', 'Parent target', help="Create an account with the selected template under this existing parent.", required=True),
|
||||
}
|
||||
_defaults = {
|
||||
'cparent_id': _get_def_cparent,
|
||||
}
|
||||
|
||||
def action_create(self,cr,uid,ids,context=None):
|
||||
acc_obj=self.pool.get('account.account')
|
||||
tmpl_obj=self.pool.get('account.account.template')
|
||||
data= self.read(cr,uid,ids)
|
||||
company_id = acc_obj.read(cr,uid,[data[0]['cparent_id']],['company_id'])[0]['company_id'][0]
|
||||
account_template = tmpl_obj.browse(cr,uid,context['tmpl_ids'])
|
||||
#tax_ids = []
|
||||
#for tax in account_template.tax_ids:
|
||||
# tax_ids.append(tax_template_ref[tax.id])
|
||||
vals={
|
||||
'name': account_template.name,
|
||||
#'sign': account_template.sign,
|
||||
'currency_id': account_template.currency_id and account_template.currency_id.id or False,
|
||||
'code': account_template.code,
|
||||
'type': account_template.type,
|
||||
'user_type': account_template.user_type and account_template.user_type.id or False,
|
||||
'reconcile': account_template.reconcile,
|
||||
'shortcut': account_template.shortcut,
|
||||
'note': account_template.note,
|
||||
'parent_id': data[0]['cparent_id'],
|
||||
# 'tax_ids': [(6,0,tax_ids)], todo!!
|
||||
'company_id': company_id,
|
||||
}
|
||||
# print "Creating:", vals
|
||||
new_account = acc_obj.create(cr,uid,vals)
|
||||
return {'type':'state', 'state': 'end' }
|
||||
|
||||
def action_cancel(self,cr,uid,ids,context=None):
|
||||
return { 'type': 'state', 'state': 'end' }
|
||||
|
||||
account_add_tmpl_wizard()
|
||||
|
||||
class account_tax_code_template(osv.osv):
|
||||
|
||||
_name = 'account.tax.code.template'
|
||||
|
@ -2141,7 +2204,7 @@ class wizard_multi_charts_accounts(osv.osv_memory):
|
|||
#deactivate the parent_store functionnality on account_account for rapidity purpose
|
||||
self.pool._init = True
|
||||
|
||||
children_acc_template = obj_acc_template.search(cr, uid, [('parent_id','child_of',[obj_acc_root.id])])
|
||||
children_acc_template = obj_acc_template.search(cr, uid, [('parent_id','child_of',[obj_acc_root.id]),('nocreate','!=',True)])
|
||||
children_acc_template.sort()
|
||||
for account_template in obj_acc_template.browse(cr, uid, children_acc_template):
|
||||
tax_ids = []
|
||||
|
|
|
@ -97,8 +97,10 @@ class account_analytic_line(osv.osv):
|
|||
|
||||
def view_header_get(self, cr, user, view_id, view_type, context):
|
||||
if context.get('account_id', False):
|
||||
# account_id in context may also be pointing to an account.account.id
|
||||
cr.execute('select name from account_analytic_account where id=%s', (context['account_id'],))
|
||||
res = cr.fetchone()
|
||||
if res:
|
||||
res = _('Entries: ')+ (res[0] or '')
|
||||
return res
|
||||
return False
|
||||
|
|
|
@ -797,7 +797,7 @@
|
|||
</record>
|
||||
|
||||
<record id="view_account_move_line_filter" model="ir.ui.view">
|
||||
<field name="name">account.move.line.select</field>
|
||||
<field name="name">Entry Lines</field>
|
||||
<field name="model">account.move.line</field>
|
||||
<field name="type">search</field>
|
||||
<field name="arch" type="xml">
|
||||
|
@ -1484,8 +1484,28 @@
|
|||
</record>
|
||||
|
||||
|
||||
<record id="view_account_addtmpl_wizard_form" model="ir.ui.view">
|
||||
<field name="name">Account Add wizard</field>
|
||||
<field name="model">account.addtmpl.wizard</field>
|
||||
<field name="type">form</field>
|
||||
<field name="arch" type="xml">
|
||||
<form string="Account Add">
|
||||
<separator col="4" colspan="4" string="Select the common parent for the accounts"/>
|
||||
<field name="cparent_id"/>
|
||||
<group col="2" colspan="2">
|
||||
<button icon="gtk-cancel" special="cancel" string="Cancel" name="action_cancel" type="object"/>
|
||||
<button icon="gtk-ok" name="action_create" string="Add" type="object"/>
|
||||
</group>
|
||||
</form>
|
||||
</field>
|
||||
</record>
|
||||
|
||||
|
||||
<act_window domain="[]" id="action_account_addtmpl_wizard_form"
|
||||
name="Add account Wizard"
|
||||
res_model="account.addtmpl.wizard"
|
||||
context="{'tmpl_ids': active_id}"
|
||||
src_model="account.account.template"
|
||||
view_type="form" view_mode="form"/>
|
||||
|
||||
<!-- register configuration wizard -->
|
||||
|
||||
|
|
|
@ -226,7 +226,7 @@ class account_invoice(osv.osv):
|
|||
('in_invoice','Supplier Invoice'),
|
||||
('out_refund','Customer Refund'),
|
||||
('in_refund','Supplier Refund'),
|
||||
],'Type', readonly=True, select=True),
|
||||
],'Type', readonly=True, select=True, change_default=True),
|
||||
|
||||
'number': fields.char('Invoice Number', size=32, readonly=True, help="Unique number of the invoice, computed automatically when the invoice is created."),
|
||||
'reference': fields.char('Invoice Reference', size=64, help="The partner reference of this invoice."),
|
||||
|
@ -284,7 +284,7 @@ class account_invoice(osv.osv):
|
|||
multi='all'),
|
||||
'currency_id': fields.many2one('res.currency', 'Currency', required=True, readonly=True, states={'draft':[('readonly',False)]}),
|
||||
'journal_id': fields.many2one('account.journal', 'Journal', required=True,readonly=True, states={'draft':[('readonly',False)]}),
|
||||
'company_id': fields.many2one('res.company', 'Company', required=True),
|
||||
'company_id': fields.many2one('res.company', 'Company', required=True, change_default=True),
|
||||
'check_total': fields.float('Total', digits=(16, int(config['price_accuracy'])), states={'open':[('readonly',True)],'close':[('readonly',True)]}),
|
||||
'reconciled': fields.function(_reconciled, method=True, string='Paid/Reconciled', type='boolean',
|
||||
store={
|
||||
|
@ -1014,7 +1014,10 @@ class account_invoice(osv.osv):
|
|||
line_ids = []
|
||||
total = 0.0
|
||||
line = self.pool.get('account.move.line')
|
||||
cr.execute('select id from account_move_line where move_id in ('+str(move_id)+','+str(invoice.move_id.id)+')')
|
||||
move_ids = [move_id,]
|
||||
if invoice.move_id:
|
||||
move_ids.append(invoice.move_id.id)
|
||||
cr.execute('SELECT id FROM account_move_line WHERE move_id = ANY(%s)',(move_ids,))
|
||||
lines = line.browse(cr, uid, map(lambda x: x[0], cr.fetchall()) )
|
||||
for l in lines+invoice.payment_ids:
|
||||
if l.account_id.id==src_account_id:
|
||||
|
|
|
@ -53,7 +53,7 @@
|
|||
</group>
|
||||
<notebook colspan="4">
|
||||
<page string="Account Data">
|
||||
<field name="partner_id"/>
|
||||
<field name="partner_id" select="1"/>
|
||||
<newline/>
|
||||
<field name="date_start"/>
|
||||
<field name="date" select="2"/>
|
||||
|
|
|
@ -67,6 +67,8 @@
|
|||
"access_res_currency_rate_account_manager","res.currency.rate account manager","base.model_res_currency_rate","group_account_manager",1,1,1,1
|
||||
"access_account_config_wizard_account_manager","account.config.wizard account manager","model_account_config_wizard","group_account_manager",1,1,1,1
|
||||
"access_account_config_wizard_system_manager","account.config.wizard system manager","model_account_config_wizard","base.group_system",1,1,1,1
|
||||
"access_account_add_tmpl_wizard_account_manager","account.addtmpl.wizard account manager","model_account_addtmpl_wizard","group_account_manager",1,1,1,1
|
||||
"access_account_add_tmpl_wizard_system_manager","account.addtmpl.wizard system manager","model_account_addtmpl_wizard","base.group_system",1,1,1,1
|
||||
"access_account_invoice_user","account.invoice user","model_account_invoice","base.group_user",1,0,0,0
|
||||
"access_account_invoice_user","account.invoice.line user","model_account_invoice_line","base.group_user",1,0,0,0
|
||||
"access_account_invoice_user","account.invoice.tax user","model_account_invoice_tax","base.group_user",1,0,0,0
|
||||
|
|
|
|
@ -70,7 +70,7 @@ account_analytic_default()
|
|||
class account_invoice_line(osv.osv):
|
||||
_inherit = 'account.invoice.line'
|
||||
_description = 'account invoice line'
|
||||
def product_id_change(self, cr, uid, ids, product, uom, qty=0, name='', type='out_invoice', partner_id=False, fposition=False, price_unit=False, address_invoice_id=False, context={}):
|
||||
def product_id_change(self, cr, uid, ids, product, uom, qty=0, name='', type='out_invoice', partner_id=False, fposition=False, price_unit=False, address_invoice_id=False, context=None):
|
||||
res_prod = super(account_invoice_line,self).product_id_change(cr, uid, ids, product, uom, qty, name, type, partner_id, fposition, price_unit, address_invoice_id, context)
|
||||
rec = self.pool.get('account.analytic.default').account_get(cr, uid, product, partner_id, uid, time.strftime('%Y-%m-%d'), context)
|
||||
if rec:
|
||||
|
|
|
@ -253,12 +253,12 @@ class account_invoice_line(osv.osv):
|
|||
vals['analytics_id'] = vals['analytics_id'][0]
|
||||
return super(account_invoice_line, self).create(cr, uid, vals, context)
|
||||
|
||||
def move_line_get_item(self, cr, uid, line, context={}):
|
||||
def move_line_get_item(self, cr, uid, line, context=None):
|
||||
res= super(account_invoice_line,self).move_line_get_item(cr, uid, line, context={})
|
||||
res ['analytics_id']=line.analytics_id and line.analytics_id.id or False
|
||||
return res
|
||||
|
||||
def product_id_change(self, cr, uid, ids, product, uom, qty=0, name='', type='out_invoice', partner_id=False, fposition_id=False, price_unit=False, address_invoice_id=False, context={}):
|
||||
def product_id_change(self, cr, uid, ids, product, uom, qty=0, name='', type='out_invoice', partner_id=False, fposition_id=False, price_unit=False, address_invoice_id=False, context=None):
|
||||
res_prod = super(account_invoice_line,self).product_id_change(cr, uid, ids, product, uom, qty, name, type, partner_id, fposition_id, price_unit, address_invoice_id, context)
|
||||
rec = self.pool.get('account.analytic.default').account_get(cr, uid, product, partner_id, uid, time.strftime('%Y-%m-%d'), context)
|
||||
if rec and rec.analytics_id:
|
||||
|
|
|
@ -46,6 +46,8 @@
|
|||
<field name="type">form</field>
|
||||
<field name="arch" type="xml">
|
||||
<xpath expr="/form/notebook/page/field[@name='invoice_line']" position="replace">
|
||||
<!-- keep the original fields, because other views position on that, too -->
|
||||
<field name="invoice_line" invisible="True"/>
|
||||
<field name="abstract_line_ids" colspan="4" nolabel="1"/>
|
||||
</xpath>
|
||||
</field>
|
||||
|
|
|
@ -141,10 +141,14 @@ class account_invoice_line(osv.osv):
|
|||
|
||||
def product_id_change(self, cr, uid, ids, product, uom, qty=0, name='', type='out_invoice', partner_id=False, fposition_id=False, price_unit=False, address_invoice_id=False, context=None):
|
||||
# note: will call product_id_change_unit_price_inv with context...
|
||||
if context is None:
|
||||
context = {}
|
||||
context.update({'price_type': context.get('price_type','tax_excluded')})
|
||||
return super(account_invoice_line, self).product_id_change(cr, uid, ids, product, uom, qty, name, type, partner_id, fposition_id, price_unit, address_invoice_id, context=context)
|
||||
|
||||
# Temporary trap, for bad context that came from koo:
|
||||
# if isinstance(context, str):
|
||||
# print "str context:", context
|
||||
|
||||
ctx = (context and context.copy()) or {}
|
||||
ctx.update({'price_type': ctx.get('price_type','tax_excluded')})
|
||||
return super(account_invoice_line, self).product_id_change(cr, uid, ids, product, uom, qty, name, type, partner_id, fposition_id, price_unit, address_invoice_id, context=ctx)
|
||||
account_invoice_line()
|
||||
|
||||
class account_invoice_tax(osv.osv):
|
||||
|
|
|
@ -36,6 +36,7 @@
|
|||
<field name="partner_id" invisible="1" select="1"/>
|
||||
<field name="title" select="1"/>
|
||||
<field name="function_id" invisible="1" select="2"/>
|
||||
<field name="email"/>
|
||||
<field name="lang_id"/>
|
||||
<field name="active"/>
|
||||
</group>
|
||||
|
@ -50,6 +51,9 @@
|
|||
<field name="name" colspan="4"/>
|
||||
<field name="address_id" colspan="4"/>
|
||||
<field name="function_id" colspan="4"/>
|
||||
<field name="fax"/>
|
||||
<field name="extension"/>
|
||||
<field name="other"/>
|
||||
<field name="date_start" />
|
||||
<field name="date_stop" />
|
||||
<field name="state" />
|
||||
|
@ -163,6 +167,8 @@
|
|||
<group string="Communication" colspan="2" col="2">
|
||||
<field name="phone"/>
|
||||
<field name="fax"/>
|
||||
<field name="extension"/>
|
||||
<field name="other"/>
|
||||
<field name="email" widget="email"/>
|
||||
<field name="extension"/>
|
||||
<field name="other"/>
|
||||
|
|
|
@ -26,7 +26,7 @@ import pooler
|
|||
import string
|
||||
import tools
|
||||
|
||||
objects_proxy = netsvc.SERVICES['object'].__class__
|
||||
objects_proxy = netsvc.ExportService.getService('object').__class__
|
||||
|
||||
class recording_objects_proxy(objects_proxy):
|
||||
def execute(self, *args, **argv):
|
||||
|
@ -55,6 +55,22 @@ class recording_objects_proxy(objects_proxy):
|
|||
|
||||
recording_objects_proxy()
|
||||
|
||||
class xElement(minidom.Element):
|
||||
"""dom.Element with compact print
|
||||
The Element in minidom has a problem: if printed, adds whitespace
|
||||
around the text nodes. The standard will not ignore that whitespace.
|
||||
This class simply prints the contained nodes in their compact form, w/o
|
||||
added spaces.
|
||||
"""
|
||||
def writexml(self, writer, indent="", addindent="", newl=""):
|
||||
writer.write(indent)
|
||||
minidom.Element.writexml(self, writer, indent='', addindent='', newl='')
|
||||
writer.write(newl)
|
||||
|
||||
def doc_createXElement(xdoc, tagName):
|
||||
e = xElement(tagName)
|
||||
e.ownerDocument = xdoc
|
||||
return e
|
||||
|
||||
class base_module_record(osv.osv):
|
||||
_name = "ir.module.record"
|
||||
|
@ -165,15 +181,11 @@ class base_module_record(osv.osv):
|
|||
field.setAttribute("eval", "[(6,0,["+','.join(map(lambda x: "ref('%s')" % (x,), res))+'])]')
|
||||
record.appendChild(field)
|
||||
else:
|
||||
field = doc.createElement('field')
|
||||
field = doc_createXElement(doc, 'field')
|
||||
field.setAttribute("name", key)
|
||||
|
||||
if not isinstance(val, basestring):
|
||||
val = str(val)
|
||||
|
||||
val = val and ('"""%s"""' % val.replace('\\', '\\\\').replace('"', '\"')) or 'False'
|
||||
field.setAttribute(u"eval", tools.ustr(val))
|
||||
field.appendChild(doc.createTextNode(val))
|
||||
record.appendChild(field)
|
||||
|
||||
return record_list, noupdate
|
||||
|
||||
def get_copy_data(self, cr, uid, model, id, result):
|
||||
|
|
|
@ -21,5 +21,18 @@
|
|||
action="wizard_base_module_record_objects"
|
||||
id="menu_wizard_base_module_record_objects"/>
|
||||
|
||||
<wizard
|
||||
id="wizard_base_module_record_data"
|
||||
string="Export Customizations As Data File"
|
||||
model="ir.module.module"
|
||||
multi="True"
|
||||
name="base_module_record.module_record_data"/>
|
||||
<menuitem
|
||||
parent="menu_wizard_base_mod_rec"
|
||||
name="Export Customizations As Data File"
|
||||
type="wizard"
|
||||
action="wizard_base_module_record_data"
|
||||
id="menu_wizard_base_module_record_data"/>
|
||||
|
||||
</data>
|
||||
</openerp>
|
||||
|
|
|
@ -21,5 +21,6 @@
|
|||
|
||||
import base_module_save
|
||||
import base_module_record_objects
|
||||
import base_module_record_data
|
||||
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
||||
|
||||
|
|
|
@ -0,0 +1,148 @@
|
|||
# -*- encoding: utf-8 -*-
|
||||
##############################################################################
|
||||
#
|
||||
# OpenERP, Open Source Management Solution
|
||||
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). All Rights Reserved
|
||||
# $Id$
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
##############################################################################
|
||||
|
||||
import wizard
|
||||
import osv
|
||||
import pooler
|
||||
import time
|
||||
|
||||
info = '''<?xml version="1.0"?>
|
||||
<form string="Module Recording">
|
||||
<label string="Thanks For using Module Recorder" colspan="4" align="0.0"/>
|
||||
</form>'''
|
||||
|
||||
intro_start_form = '''<?xml version="1.0"?>
|
||||
<form string="Objects Recording">
|
||||
<field name="check_date"/>
|
||||
<newline/>
|
||||
<field name="filter_cond"/>
|
||||
<separator string="Choose objects to record" colspan="4"/>
|
||||
<field name="objects" colspan="4" nolabel="1"/>
|
||||
|
||||
</form>'''
|
||||
|
||||
intro_start_fields = {
|
||||
'check_date': {'string':"Record from Date",'type':'datetime','required':True, 'default': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S')},
|
||||
'objects':{'string': 'Objects', 'type': 'many2many', 'relation': 'ir.model', 'help': 'List of objects to be recorded'},
|
||||
'filter_cond':{'string':'Records only', 'type':'selection','selection':[('created','Created'),('modified','Modified'),('created_modified','Created & Modified')], 'required':True, 'default': lambda *args:'created'},
|
||||
|
||||
}
|
||||
|
||||
exp_form = '''<?xml version="1.0"?>
|
||||
<form string="Objects Recording">
|
||||
<separator string="Result, paste this to your module's xml" colspan="4" />
|
||||
<field name="res_text" nolabel="1" colspan="4"/>
|
||||
</form>'''
|
||||
|
||||
exp_fields = {
|
||||
'res_text': {'string':"Result",'type':'text', },
|
||||
}
|
||||
|
||||
def _info_default(self, cr, uid, data, context):
|
||||
pool = pooler.get_pool(cr.dbname)
|
||||
mod = pool.get('ir.model')
|
||||
list=('ir.ui.view','ir.ui.menu','ir.model','ir.model.fields','ir.model.access',\
|
||||
'res.partner','res.partner.address','res.partner.category','workflow',\
|
||||
'workflow.activity','workflow.transition','ir.actions.server','ir.server.object.lines')
|
||||
data['form']['objects']=mod.search(cr,uid,[('model','in',list)])
|
||||
cr.execute('select max(create_date) from ir_model_data')
|
||||
c=(cr.fetchone())[0].split('.')[0]
|
||||
c = time.strptime(c,"%Y-%m-%d %H:%M:%S")
|
||||
sec=c.tm_sec + 1
|
||||
c=(c[0],c[1],c[2],c[3],c[4],sec,c[6],c[7],c[8])
|
||||
data['form']['check_date']=time.strftime("%Y-%m-%d %H:%M:%S",c)
|
||||
return data['form']
|
||||
|
||||
def _record_objects(self, cr, uid, data, context):
|
||||
check_date=data['form']['check_date']
|
||||
filter=data['form']['filter_cond']
|
||||
pool = pooler.get_pool(cr.dbname)
|
||||
user=(pool.get('res.users').browse(cr,uid,uid)).login
|
||||
mod = pool.get('ir.module.record')
|
||||
mod_obj = pool.get('ir.model')
|
||||
mod.recording_data = []
|
||||
|
||||
for id in data['form']['objects'][0][2]:
|
||||
obj_name=(mod_obj.browse(cr,uid,id)).model
|
||||
obj_pool=pool.get(obj_name)
|
||||
if filter =='created':
|
||||
search_condition =[('create_date','>',check_date)]
|
||||
elif filter =='modified':
|
||||
search_condition =[('write_date','>',check_date)]
|
||||
elif filter =='created_modified':
|
||||
search_condition =['|',('create_date','>',check_date),('write_date','>',check_date)]
|
||||
if '_log_access' in dir(obj_pool):
|
||||
if not (obj_pool._log_access):
|
||||
search_condition=[]
|
||||
if '_auto' in dir(obj_pool):
|
||||
if not obj_pool._auto:
|
||||
continue
|
||||
search_ids=obj_pool.search(cr,uid,search_condition)
|
||||
for s_id in search_ids:
|
||||
args=(cr.dbname,uid,user,obj_name,'copy',s_id,{},context)
|
||||
mod.recording_data.append(('query',args, {}, s_id))
|
||||
return {}
|
||||
|
||||
def _create_xml(self, cr, uid, data, context):
|
||||
pool = pooler.get_pool(cr.dbname)
|
||||
mod = pool.get('ir.module.record')
|
||||
res_xml = mod.generate_xml(cr, uid)
|
||||
return { 'res_text': res_xml }
|
||||
|
||||
class base_module_record_objects(wizard.interface):
|
||||
states = {
|
||||
'init': {
|
||||
'actions': [_info_default],
|
||||
'result': {
|
||||
'type':'form',
|
||||
'arch':intro_start_form,
|
||||
'fields': intro_start_fields,
|
||||
'state':[
|
||||
('end', 'Cancel', 'gtk-cancel'),
|
||||
('record', 'Record', 'gtk-ok'),
|
||||
]
|
||||
}
|
||||
},
|
||||
'record': {
|
||||
'actions': [],
|
||||
'result': {'type':'action','action':_record_objects,'state':'intro'}
|
||||
},
|
||||
'intro': {
|
||||
'actions': [ _create_xml ],
|
||||
'result': {
|
||||
'type':'form',
|
||||
'arch': exp_form,
|
||||
'fields':exp_fields,
|
||||
'state':[
|
||||
('end', 'End', 'gtk-cancel'),
|
||||
]
|
||||
},
|
||||
},
|
||||
'end': {
|
||||
'actions': [],
|
||||
'result': {'type':'form', 'arch':info, 'fields':{}, 'state':[('end','OK')]}
|
||||
},
|
||||
}
|
||||
base_module_record_objects('base_module_record.module_record_data')
|
||||
|
||||
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
||||
|
|
@ -800,6 +800,10 @@ class crm_case(osv.osv):
|
|||
if case.section_id.reply_to and case.email_from:
|
||||
src = case.email_from
|
||||
|
||||
if not src:
|
||||
raise osv.except_osv(_('Error!'),
|
||||
_("No E-Mail ID Found for the Responsible Partner or missing reply address in section!"))
|
||||
|
||||
dest = case.section_id.reply_to
|
||||
body = case.email_last or case.description
|
||||
if not destination:
|
||||
|
|
|
@ -0,0 +1,23 @@
|
|||
# -*- encoding: utf-8 -*-
|
||||
##############################################################################
|
||||
#
|
||||
# OpenERP, Open Source Management Solution
|
||||
#
|
||||
# Copyright (C) P. Christeas, 2009, all rights reserved
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
##############################################################################
|
||||
|
||||
import document_protocol
|
|
@ -0,0 +1,38 @@
|
|||
# -*- encoding: utf-8 -*-
|
||||
##############################################################################
|
||||
#
|
||||
# OpenERP, Open Source Management Solution
|
||||
#
|
||||
# Copyright (C) P. Christeas, 2009, all rights reserved
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
##############################################################################
|
||||
|
||||
{
|
||||
"name" : "Document Protocol",
|
||||
"version" : "0.1",
|
||||
"author" : "P. Christeas" ,
|
||||
"website" : "http://openerp.hellug.gr",
|
||||
"description" : """ Protocol functionality for document management.
|
||||
|
||||
With this, documents can take a protocol number, and be locked down with it.
|
||||
""",
|
||||
"depends" : ["base","document", "document_lock"],
|
||||
"init_xml" : [],
|
||||
"demo_xml" : [],
|
||||
"update_xml" : ["document_protocol.xml"],
|
||||
"active": False,
|
||||
"installable": True
|
||||
}
|
|
@ -0,0 +1,63 @@
|
|||
# -*- encoding: utf-8 -*-
|
||||
##############################################################################
|
||||
#
|
||||
# OpenERP, Open Source Management Solution
|
||||
#
|
||||
# Copyright (C) P. Christeas, 2009, all rights reserved
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
##############################################################################
|
||||
|
||||
from osv import osv, fields
|
||||
|
||||
class ir_attachment(osv.osv):
|
||||
_inherit = 'ir.attachment'
|
||||
|
||||
_columns = {
|
||||
'proto_num' : fields.char('Protocol Number', size=32),
|
||||
'proto_date': fields.datetime('Protocol Date', readonly=False),
|
||||
'proto_from': fields.char('From',size=100, help="A textual description where the document has come from."),
|
||||
'proto_to': fields.char('To',size=100,help="The main recepient of this document"),
|
||||
'signature_ids': fields.one2many('document.signature', 'file_id', 'Signatures', readonly=True),
|
||||
}
|
||||
|
||||
_defaults = {
|
||||
}
|
||||
|
||||
ir_attachment()
|
||||
|
||||
class document_signature(osv.osv):
|
||||
""" A detached digital signature for a file
|
||||
"""
|
||||
_name = 'document.signature'
|
||||
_columns = {
|
||||
'file_id' :fields.many2one('ir.attachment', 'File', readonly=True, required=True),
|
||||
'write_uid': fields.many2one('res.users', 'User', readonly=True),
|
||||
'write_date': fields.datetime('Date', readonly=True),
|
||||
'sig_type' : fields.selection([('gpg','GPG'),('sha','SHA')], 'Type', required=True),
|
||||
'signature': fields.text('Signature',required=True),
|
||||
'keyid': fields.char('Key ID',size=64,help="The key id used to generate the signature"),
|
||||
'status': fields.selection([('valid','Valid'),('invalid','Invalid'),('unknown','Unknown')], 'Status',
|
||||
help="Last known status of signature. Do NOT trust this, unless you verify the signature yourself"),
|
||||
}
|
||||
|
||||
_defaults = {
|
||||
'sig_type': lambda *a: 'gpg',
|
||||
'status': lambda *a: 'unknown',
|
||||
}
|
||||
|
||||
document_signature()
|
||||
|
||||
#eof
|
|
@ -0,0 +1,121 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<openerp>
|
||||
<data>
|
||||
<record model="ir.ui.view" id="view_document_file_form_proto">
|
||||
<field name="name">ir.attachment.inherited</field>
|
||||
<field name="model">ir.attachment</field>
|
||||
<field name="inherit_id" ref="document.view_document_file_form" />
|
||||
<field name="type">form</field>
|
||||
<field name="arch" type="xml">
|
||||
<xpath expr="/form/notebook/page[@string='Others Info']" position="inside">
|
||||
<group name="protocol" colspan="2" col="2">
|
||||
<separator string="Protocol" colspan="2" />
|
||||
<field name="proto_num" select="1"/>
|
||||
<field name="proto_date" />
|
||||
<field name="proto_from" />
|
||||
<field name="proto_to" />
|
||||
</group>
|
||||
</xpath>
|
||||
</field>
|
||||
</record>
|
||||
|
||||
<record model="ir.ui.view" id="view_document_file_form_proto2">
|
||||
<field name="name">ir.attachment.inherited2</field>
|
||||
<field name="model">ir.attachment</field>
|
||||
<field name="inherit_id" ref="view_document_file_form_proto" />
|
||||
<field name="type">form</field>
|
||||
<field name="arch" type="xml">
|
||||
<xpath expr="/form/notebook/page" position="after">
|
||||
<page name='signatures' string='Signatures'>
|
||||
<field name="signature_ids">
|
||||
<tree string="Signatures">
|
||||
<field name="write_uid" />
|
||||
<field name="keyid"/>
|
||||
<field name="write_date"/>
|
||||
<field name="status"/>
|
||||
</tree>
|
||||
</field>
|
||||
</page>
|
||||
</xpath>
|
||||
</field>
|
||||
</record>
|
||||
|
||||
<record model="ir.ui.view" id="view_document_signature_tree">
|
||||
<field name="name">document.signature.tree</field>
|
||||
<field name="model">document.signature</field>
|
||||
<field name="type">tree</field>
|
||||
<field name="arch" type="xml">
|
||||
<tree string="Signatures">
|
||||
<field name="file"/>
|
||||
<field name="write_uid" />
|
||||
<field name="keyid"/>
|
||||
<field name="write_date"/>
|
||||
<field name="status"/>
|
||||
</tree>
|
||||
</field>
|
||||
</record>
|
||||
|
||||
<record model="ir.ui.view" id="view_document_signature_view">
|
||||
<field name="name">document.signature.form</field>
|
||||
<field name="model">document.signature</field>
|
||||
<field name="type">form</field>
|
||||
<field name="arch" type="xml">
|
||||
<form string="Signatures">
|
||||
<field name="file_id"/>
|
||||
<field name="write_uid" />
|
||||
<field name="keyid"/>
|
||||
<field name="write_date"/>
|
||||
<field name="status"/>
|
||||
<newline />
|
||||
<field name="signature" colspan="4"/>
|
||||
</form>
|
||||
</field>
|
||||
</record>
|
||||
|
||||
<!-- A separate menu only for the protocolled documents -->
|
||||
|
||||
<record model="ir.ui.view" id="view_protocol_tree">
|
||||
<field name="name">ir.attachment.protocol.tree</field>
|
||||
<field name="model">ir.attachment</field>
|
||||
<field name="type">tree</field>
|
||||
<field name="arch" type="xml">
|
||||
<tree string="Files">
|
||||
<field name="proto_num" />
|
||||
<field name="proto_date" />
|
||||
<field name="partner_id"/>
|
||||
<field name="title" />
|
||||
<field name="name"/>
|
||||
<field name="datas_fname"/>
|
||||
</tree>
|
||||
</field>
|
||||
</record>
|
||||
|
||||
<record model="ir.actions.act_window" id="action_document_proto_tree">
|
||||
<field name="type">ir.actions.act_window</field>
|
||||
<field name="res_model">ir.attachment</field>
|
||||
<field name="view_type">form</field>
|
||||
<field name="view_mode">tree,form</field>
|
||||
<field name="domain">[('proto_num','!=',False)]</field>
|
||||
</record>
|
||||
<record id="action_protocol_view2" model="ir.actions.act_window.view">
|
||||
<field eval="16" name="sequence"/>
|
||||
<field name="view_mode">tree</field>
|
||||
<field name="view_id" ref="view_protocol_tree" />
|
||||
<field name="act_window_id" ref="action_document_proto_tree"/>
|
||||
</record>
|
||||
<record id="action_protocol_view3" model="ir.actions.act_window.view">
|
||||
<field eval="16" name="sequence"/>
|
||||
<field name="view_mode">form</field>
|
||||
<field name="view_id" ref="document.view_document_file_form"/>
|
||||
<field name="act_window_id" ref="action_document_proto_tree"/>
|
||||
</record>
|
||||
|
||||
|
||||
<menuitem
|
||||
name="Documents in protocol"
|
||||
action="action_document_proto_tree"
|
||||
id="menu_document_protocol"
|
||||
parent="document.menu_document"/>
|
||||
|
||||
</data>
|
||||
</openerp>
|
|
@ -0,0 +1,164 @@
|
|||
# Translation of OpenERP Server.
|
||||
# This file contains the translation of the following modules:
|
||||
# * document_protocol
|
||||
#
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: OpenERP Server 5.0.4\n"
|
||||
"Report-Msgid-Bugs-To: support@openerp.com\n"
|
||||
"POT-Creation-Date: 2009-09-04 14:37:56+0000\n"
|
||||
"PO-Revision-Date: 2009-09-04 14:37:56+0000\n"
|
||||
"Last-Translator: <>\n"
|
||||
"Language-Team: \n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: \n"
|
||||
"Plural-Forms: \n"
|
||||
|
||||
#. module: document_protocol
|
||||
#: field:ir.attachment,proto_num:0
|
||||
msgid "Protocol Number"
|
||||
msgstr ""
|
||||
|
||||
#. module: document_protocol
|
||||
#: help:document.signature,keyid:0
|
||||
msgid "The key id used to generate the signature"
|
||||
msgstr ""
|
||||
|
||||
#. module: document_protocol
|
||||
#: constraint:ir.model:0
|
||||
msgid "The Object name must start with x_ and not contain any special character !"
|
||||
msgstr ""
|
||||
|
||||
#. module: document_protocol
|
||||
#: constraint:ir.actions.act_window:0
|
||||
msgid "Invalid model name in the action definition."
|
||||
msgstr ""
|
||||
|
||||
#. module: document_protocol
|
||||
#: selection:document.signature,status:0
|
||||
msgid "Unknown"
|
||||
msgstr ""
|
||||
|
||||
#. module: document_protocol
|
||||
#: selection:document.signature,status:0
|
||||
msgid "Invalid"
|
||||
msgstr ""
|
||||
|
||||
#. module: document_protocol
|
||||
#: field:ir.attachment,proto_to:0
|
||||
msgid "To"
|
||||
msgstr ""
|
||||
|
||||
#. module: document_protocol
|
||||
#: selection:document.signature,status:0
|
||||
msgid "Valid"
|
||||
msgstr ""
|
||||
|
||||
#. module: document_protocol
|
||||
#: field:ir.attachment,proto_date:0
|
||||
msgid "Protocol Date"
|
||||
msgstr ""
|
||||
|
||||
#. module: document_protocol
|
||||
#: help:ir.attachment,proto_from:0
|
||||
msgid "A textual description where the document has come from."
|
||||
msgstr ""
|
||||
|
||||
#. module: document_protocol
|
||||
#: field:ir.attachment,proto_from:0
|
||||
msgid "From"
|
||||
msgstr ""
|
||||
|
||||
#. module: document_protocol
|
||||
#: field:document.signature,sig_type:0
|
||||
msgid "Type"
|
||||
msgstr ""
|
||||
|
||||
#. module: document_protocol
|
||||
#: field:document.signature,keyid:0
|
||||
msgid "Key ID"
|
||||
msgstr ""
|
||||
|
||||
#. module: document_protocol
|
||||
#: view:ir.attachment:0
|
||||
msgid "Files"
|
||||
msgstr ""
|
||||
|
||||
#. module: document_protocol
|
||||
#: help:document.signature,status:0
|
||||
msgid "Last known status of signature. Do NOT trust this, unless you verify the signature yourself"
|
||||
msgstr ""
|
||||
|
||||
#. module: document_protocol
|
||||
#: field:document.signature,write_uid:0
|
||||
msgid "User"
|
||||
msgstr ""
|
||||
|
||||
#. module: document_protocol
|
||||
#: field:document.signature,write_date:0
|
||||
msgid "Date"
|
||||
msgstr ""
|
||||
|
||||
#. module: document_protocol
|
||||
#: help:ir.attachment,proto_to:0
|
||||
msgid "The main recepient of this document"
|
||||
msgstr ""
|
||||
|
||||
#. module: document_protocol
|
||||
#: model:ir.ui.menu,name:document_protocol.menu_document_protocol
|
||||
msgid "Documents in protocol"
|
||||
msgstr ""
|
||||
|
||||
#. module: document_protocol
|
||||
#: field:document.signature,status:0
|
||||
msgid "Status"
|
||||
msgstr ""
|
||||
|
||||
#. module: document_protocol
|
||||
#: constraint:ir.ui.view:0
|
||||
msgid "Invalid XML for View Architecture!"
|
||||
msgstr ""
|
||||
|
||||
#. module: document_protocol
|
||||
#: selection:document.signature,sig_type:0
|
||||
msgid "GPG"
|
||||
msgstr ""
|
||||
|
||||
#. module: document_protocol
|
||||
#: selection:document.signature,sig_type:0
|
||||
msgid "SHA"
|
||||
msgstr ""
|
||||
|
||||
#. module: document_protocol
|
||||
#: field:document.signature,signature:0
|
||||
msgid "Signature"
|
||||
msgstr ""
|
||||
|
||||
#. module: document_protocol
|
||||
#: model:ir.module.module,shortdesc:document_protocol.module_meta_information
|
||||
msgid "Document Protocol"
|
||||
msgstr ""
|
||||
|
||||
#. module: document_protocol
|
||||
#: view:document.signature:0
|
||||
#: view:ir.attachment:0
|
||||
#: field:ir.attachment,signature_ids:0
|
||||
msgid "Signatures"
|
||||
msgstr ""
|
||||
|
||||
#. module: document_protocol
|
||||
#: view:ir.attachment:0
|
||||
msgid "Protocol"
|
||||
msgstr ""
|
||||
|
||||
#. module: document_protocol
|
||||
#: field:document.signature,file_id:0
|
||||
msgid "File"
|
||||
msgstr ""
|
||||
|
||||
#. module: document_protocol
|
||||
#: model:ir.model,name:document_protocol.model_document_signature
|
||||
msgid "document.signature"
|
||||
msgstr ""
|
||||
|
|
@ -0,0 +1,99 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Buffering HTTP Server
|
||||
Copyright (C) 1999 Christian Scholz (ruebe@aachen.heimat.de)
|
||||
|
||||
This library is free software; you can redistribute it and/or
|
||||
modify it under the terms of the GNU Library General Public
|
||||
License as published by the Free Software Foundation; either
|
||||
version 2 of the License, or (at your option) any later version.
|
||||
|
||||
This library is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
Library General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU Library General Public
|
||||
License along with this library; if not, write to the Free
|
||||
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
|
||||
"""
|
||||
|
||||
|
||||
from utils import VERSION, AUTHOR
|
||||
__version__ = VERSION
|
||||
__author__ = AUTHOR
|
||||
|
||||
from BaseHTTPServer import BaseHTTPRequestHandler
|
||||
import os
|
||||
class BufferedHTTPRequestHandler(BaseHTTPRequestHandler):
|
||||
"""
|
||||
Buffering HTTP Request Handler
|
||||
|
||||
This class is an extension to the BaseHTTPRequestHandler
|
||||
class which buffers the whole output and sends it at once
|
||||
after the processing if the request is finished.
|
||||
|
||||
This makes it possible to work together with some clients
|
||||
which otherwise would break (e.g. cadaver)
|
||||
|
||||
"""
|
||||
|
||||
|
||||
def _init_buffer(self):
|
||||
"""initialize the buffer.
|
||||
|
||||
If you override the handle() method remember to call
|
||||
this (see below)
|
||||
"""
|
||||
self.__buffer=""
|
||||
self.__outfp=os.tmpfile()
|
||||
|
||||
def _append(self,s):
|
||||
""" append a string to the buffer """
|
||||
self.__buffer=self.__buffer+s
|
||||
|
||||
def _flush(self):
|
||||
""" flush the buffer to wfile """
|
||||
self.wfile.write(self.__buffer)
|
||||
self.__outfp.write(self.__buffer)
|
||||
self.__outfp.flush()
|
||||
self.wfile.flush()
|
||||
self.__buffer=""
|
||||
|
||||
def handle(self):
|
||||
""" Handle a HTTP request """
|
||||
self._init_buffer()
|
||||
BaseHTTPRequestHandler.handle(self)
|
||||
self._flush()
|
||||
|
||||
def send_header(self, keyword, value):
|
||||
"""Send a MIME header."""
|
||||
if self.request_version != 'HTTP/0.9':
|
||||
self._append("%s: %s\r\n" % (keyword, value))
|
||||
|
||||
def end_headers(self):
|
||||
"""Send the blank line ending the MIME headers."""
|
||||
if self.request_version != 'HTTP/0.9':
|
||||
self._append("\r\n")
|
||||
|
||||
def send_response(self, code, message=None):
|
||||
self.log_request(code)
|
||||
|
||||
if message is None:
|
||||
if self.responses.has_key(code):
|
||||
message = self.responses[code][0]
|
||||
else:
|
||||
message = ''
|
||||
|
||||
if self.request_version != 'HTTP/0.9':
|
||||
self._append("%s %s %s\r\n" %
|
||||
(self.protocol_version, str(code), message))
|
||||
|
||||
self.send_header('Server', self.version_string())
|
||||
self.send_header('Connection', 'close')
|
||||
self.send_header('Date', self.date_time_string())
|
||||
|
||||
protocol_version="HTTP/1.1"
|
||||
|
|
@ -0,0 +1,379 @@
|
|||
"""
|
||||
Python WebDAV Server.
|
||||
Copyright (C) 1999 Christian Scholz (ruebe@aachen.heimat.de)
|
||||
|
||||
This library is free software; you can redistribute it and/or
|
||||
modify it under the terms of the GNU Library General Public
|
||||
License as published by the Free Software Foundation; either
|
||||
version 2 of the License, or (at your option) any later version.
|
||||
|
||||
This library is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
Library General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU Library General Public
|
||||
License along with this library; if not, write to the Free
|
||||
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
|
||||
This module builds on AuthServer by implementing the standard DAV
|
||||
methods.
|
||||
|
||||
Subclass this class and specify an IFACE_CLASS. See example.
|
||||
|
||||
"""
|
||||
|
||||
DEBUG=None
|
||||
|
||||
from utils import VERSION, AUTHOR
|
||||
__version__ = VERSION
|
||||
__author__ = AUTHOR
|
||||
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import socket
|
||||
import string
|
||||
import posixpath
|
||||
import base64
|
||||
import urlparse
|
||||
import urllib
|
||||
|
||||
from propfind import PROPFIND
|
||||
from delete import DELETE
|
||||
from davcopy import COPY
|
||||
from davmove import MOVE
|
||||
|
||||
from string import atoi,split
|
||||
from status import STATUS_CODES
|
||||
from errors import *
|
||||
|
||||
import BaseHTTPServer
|
||||
|
||||
class DAVRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||
"""Simple DAV request handler with
|
||||
|
||||
- GET
|
||||
- HEAD
|
||||
- PUT
|
||||
- OPTIONS
|
||||
- PROPFIND
|
||||
- PROPPATCH
|
||||
- MKCOL
|
||||
|
||||
It uses the resource/collection classes for serving and
|
||||
storing content.
|
||||
|
||||
"""
|
||||
|
||||
server_version = "DAV/" + __version__
|
||||
protocol_version = 'HTTP/1.1'
|
||||
|
||||
### utility functions
|
||||
def _log(self, message):
|
||||
pass
|
||||
|
||||
def _append(self,s):
|
||||
""" write the string to wfile """
|
||||
self.wfile.write(s)
|
||||
|
||||
def send_body(self,DATA,code,msg,desc,ctype='application/octet-stream',headers=None):
|
||||
""" send a body in one part """
|
||||
|
||||
if not headers:
|
||||
headers = {}
|
||||
self.send_response(code,message=msg)
|
||||
self.send_header("Connection", "keep-alive")
|
||||
self.send_header("Accept-Ranges", "bytes")
|
||||
|
||||
for a,v in headers.items():
|
||||
self.send_header(a,v)
|
||||
|
||||
if DATA:
|
||||
self.send_header("Content-Length", str(len(DATA)))
|
||||
self.send_header("Content-Type", ctype)
|
||||
else:
|
||||
self.send_header("Content-Length", "0")
|
||||
|
||||
self.end_headers()
|
||||
if DATA:
|
||||
self._append(DATA)
|
||||
|
||||
def send_body_chunks(self,DATA,code,msg,desc,ctype='text/xml; encoding="utf-8"'):
|
||||
""" send a body in chunks """
|
||||
|
||||
self.responses[207]=(msg,desc)
|
||||
self.send_response(code,message=msg)
|
||||
self.send_header("Content-type", ctype)
|
||||
self.send_header("Connection", "keep-alive")
|
||||
self.send_header("Transfer-Encoding", "chunked")
|
||||
self.end_headers()
|
||||
self._append(hex(len(DATA))[2:]+"\r\n")
|
||||
self._append(DATA)
|
||||
self._append("\r\n")
|
||||
self._append("0\r\n")
|
||||
self._append("\r\n")
|
||||
|
||||
### HTTP METHODS
|
||||
|
||||
def do_OPTIONS(self):
|
||||
"""return the list of capabilities """
|
||||
self.send_response(200)
|
||||
self.send_header("Allow", "GET, HEAD, COPY, MOVE, POST, PUT, PROPFIND, PROPPATCH, OPTIONS, MKCOL, DELETE, TRACE")
|
||||
self.send_header("Content-Type", "text/plain")
|
||||
self.send_header("Connection", "keep-alive")
|
||||
self.send_header("DAV", "1")
|
||||
self.end_headers()
|
||||
|
||||
def do_PROPFIND(self):
|
||||
|
||||
dc=self.IFACE_CLASS
|
||||
# read the body
|
||||
body=None
|
||||
if self.headers.has_key("Content-Length"):
|
||||
l=self.headers['Content-Length']
|
||||
body=self.rfile.read(atoi(l))
|
||||
alt_body = """<?xml version="1.0" encoding="utf-8"?>
|
||||
<propfind xmlns="DAV:"><prop>
|
||||
<getcontentlength xmlns="DAV:"/>
|
||||
<getlastmodified xmlns="DAV:"/>
|
||||
<getcreationdate xmlns="DAV:"/>
|
||||
<checked-in xmlns="DAV:"/>
|
||||
<executable xmlns="http://apache.org/dav/props/"/>
|
||||
<displayname xmlns="DAV:"/>
|
||||
<resourcetype xmlns="DAV:"/>
|
||||
<checked-out xmlns="DAV:"/>
|
||||
</prop></propfind>"""
|
||||
#self.wfile.write(body)
|
||||
|
||||
# which Depth?
|
||||
if self.headers.has_key('Depth'):
|
||||
d=self.headers['Depth']
|
||||
else:
|
||||
d="infinity"
|
||||
|
||||
uri=self.geturi()
|
||||
pf=PROPFIND(uri,dc,d)
|
||||
|
||||
if body:
|
||||
pf.read_propfind(body)
|
||||
|
||||
try:
|
||||
DATA=pf.createResponse()
|
||||
DATA=DATA+"\n"
|
||||
# print "Data:", DATA
|
||||
except DAV_NotFound,(ec,dd):
|
||||
return self.send_notFound(dd, uri)
|
||||
except DAV_Error, (ec,dd):
|
||||
return self.send_error(ec,dd)
|
||||
|
||||
self.send_body_chunks(DATA,207,"Multi-Status","Multiple responses")
|
||||
|
||||
def geturi(self):
|
||||
buri = self.IFACE_CLASS.baseuri
|
||||
if buri[-1] == '/':
|
||||
return urllib.unquote(buri[:-1]+self.path)
|
||||
else:
|
||||
return urllib.unquote(buri+self.path)
|
||||
|
||||
def do_GET(self):
|
||||
"""Serve a GET request."""
|
||||
dc=self.IFACE_CLASS
|
||||
uri=self.geturi()
|
||||
|
||||
# get the last modified date
|
||||
try:
|
||||
lm=dc.get_prop(uri,"DAV:","getlastmodified")
|
||||
except:
|
||||
lm="Sun, 01 Dec 2014 00:00:00 GMT" # dummy!
|
||||
headers={"Last-Modified":lm , "Connection": "keep-alive"}
|
||||
|
||||
# get the content type
|
||||
try:
|
||||
ct=dc.get_prop(uri,"DAV:","getcontenttype")
|
||||
except:
|
||||
ct="application/octet-stream"
|
||||
|
||||
# get the data
|
||||
try:
|
||||
data=dc.get_data(uri)
|
||||
except DAV_Error, (ec,dd):
|
||||
self.send_status(ec)
|
||||
return
|
||||
|
||||
# send the data
|
||||
self.send_body(data,200,"OK","OK",ct,headers)
|
||||
|
||||
def do_HEAD(self):
|
||||
""" Send a HEAD response """
|
||||
dc=self.IFACE_CLASS
|
||||
uri=self.geturi()
|
||||
|
||||
# get the last modified date
|
||||
try:
|
||||
lm=dc.get_prop(uri,"DAV:","getlastmodified")
|
||||
except:
|
||||
lm="Sun, 01 Dec 2014 00:00:00 GMT" # dummy!
|
||||
|
||||
headers={"Last-Modified":lm, "Connection": "keep-alive"}
|
||||
|
||||
# get the content type
|
||||
try:
|
||||
ct=dc.get_prop(uri,"DAV:","getcontenttype")
|
||||
except:
|
||||
ct="application/octet-stream"
|
||||
|
||||
try:
|
||||
data=dc.get_data(uri)
|
||||
headers["Content-Length"]=str(len(data))
|
||||
except DAV_NotFound:
|
||||
self.send_body(None,404,"Not Found","")
|
||||
return
|
||||
|
||||
self.send_body(None,200,"OK","OK",ct,headers)
|
||||
|
||||
def do_POST(self):
|
||||
self.send_error(404,"File not found")
|
||||
|
||||
def do_MKCOL(self):
|
||||
""" create a new collection """
|
||||
|
||||
dc=self.IFACE_CLASS
|
||||
uri=self.geturi()
|
||||
try:
|
||||
res = dc.mkcol(uri)
|
||||
if res:
|
||||
self.send_body(None,201,"Created",'')
|
||||
else:
|
||||
self.send_body(None,415,"Cannot create",'')
|
||||
#self.send_header("Connection", "keep-alive")
|
||||
# Todo: some content, too
|
||||
except DAV_Error, (ec,dd):
|
||||
self.send_body(None,int(ec),dd,dd)
|
||||
|
||||
def do_DELETE(self):
|
||||
""" delete an resource """
|
||||
dc=self.IFACE_CLASS
|
||||
uri=self.geturi()
|
||||
dl=DELETE(uri,dc)
|
||||
if dc.is_collection(uri):
|
||||
res=dl.delcol()
|
||||
else:
|
||||
res=dl.delone()
|
||||
|
||||
if res:
|
||||
self.send_status(207,body=res)
|
||||
else:
|
||||
self.send_status(204)
|
||||
|
||||
def do_PUT(self):
|
||||
dc=self.IFACE_CLASS
|
||||
|
||||
# read the body
|
||||
body=None
|
||||
if self.headers.has_key("Content-Length"):
|
||||
l=self.headers['Content-Length']
|
||||
body=self.rfile.read(atoi(l))
|
||||
uri=self.geturi()
|
||||
|
||||
ct=None
|
||||
if self.headers.has_key("Content-Type"):
|
||||
ct=self.headers['Content-Type']
|
||||
try:
|
||||
dc.put(uri,body,ct)
|
||||
except DAV_Error, (ec,dd):
|
||||
self.send_status(ec)
|
||||
return
|
||||
self.send_status(201)
|
||||
|
||||
def do_COPY(self):
|
||||
""" copy one resource to another """
|
||||
try:
|
||||
self.copymove(COPY)
|
||||
except DAV_Error, (ec,dd):
|
||||
self.send_status(ec)
|
||||
|
||||
def do_MOVE(self):
|
||||
""" move one resource to another """
|
||||
try:
|
||||
self.copymove(MOVE)
|
||||
except DAV_Error, (ec,dd):
|
||||
self.send_status(ec)
|
||||
|
||||
def copymove(self,CLASS):
|
||||
""" common method for copying or moving objects """
|
||||
dc=self.IFACE_CLASS
|
||||
|
||||
# get the source URI
|
||||
source_uri=self.geturi()
|
||||
|
||||
# get the destination URI
|
||||
dest_uri=self.headers['Destination']
|
||||
dest_uri=urllib.unquote(dest_uri)
|
||||
|
||||
# Overwrite?
|
||||
overwrite=1
|
||||
result_code=204
|
||||
if self.headers.has_key("Overwrite"):
|
||||
if self.headers['Overwrite']=="F":
|
||||
overwrite=None
|
||||
result_code=201
|
||||
|
||||
# instanciate ACTION class
|
||||
cp=CLASS(dc,source_uri,dest_uri,overwrite)
|
||||
|
||||
# Depth?
|
||||
d="infinity"
|
||||
if self.headers.has_key("Depth"):
|
||||
d=self.headers['Depth']
|
||||
|
||||
if d!="0" and d!="infinity":
|
||||
self.send_status(400)
|
||||
return
|
||||
|
||||
if d=="0":
|
||||
res=cp.single_action()
|
||||
self.send_status(res)
|
||||
return
|
||||
|
||||
# now it only can be "infinity" but we nevertheless check for a collection
|
||||
if dc.is_collection(source_uri):
|
||||
try:
|
||||
res=cp.tree_action()
|
||||
except DAV_Error, (ec,dd):
|
||||
self.send_status(ec)
|
||||
return
|
||||
else:
|
||||
try:
|
||||
res=cp.single_action()
|
||||
except DAV_Error, (ec,dd):
|
||||
self.send_status(ec)
|
||||
return
|
||||
|
||||
if res:
|
||||
self.send_body_chunks(res,207,STATUS_CODES[207],STATUS_CODES[207],
|
||||
ctype='text/xml; charset="utf-8"')
|
||||
else:
|
||||
self.send_status(result_code)
|
||||
|
||||
def get_userinfo(self,user,pw):
|
||||
""" Dummy method which lets all users in """
|
||||
|
||||
return 1
|
||||
|
||||
def send_status(self,code=200,mediatype='text/xml; charset="utf-8"', \
|
||||
msg=None,body=None):
|
||||
|
||||
if not msg: msg=STATUS_CODES[code]
|
||||
self.send_body(body,code,STATUS_CODES[code],msg,mediatype)
|
||||
|
||||
def send_notFound(self,descr,uri):
|
||||
body = """<?xml version="1.0" encoding="utf-8" ?>
|
||||
<D:response xmlns:D="DAV:">
|
||||
<D:href>%s</D:href>
|
||||
<D:error/>
|
||||
<D:responsedescription>%s</D:responsedescription>
|
||||
</D:response>
|
||||
"""
|
||||
return self.send_status(404,descr, body=body % (uri,descr))
|
|
@ -0,0 +1,20 @@
|
|||
"""
|
||||
python davserver
|
||||
Copyright (C) 1999 Christian Scholz (ruebe@aachen.heimat.de)
|
||||
|
||||
This library is free software; you can redistribute it and/or
|
||||
modify it under the terms of the GNU Library General Public
|
||||
License as published by the Free Software Foundation; either
|
||||
version 2 of the License, or (at your option) any later version.
|
||||
|
||||
This library is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
Library General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU Library General Public
|
||||
License along with this library; if not, write to the Free
|
||||
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
"""
|
||||
|
||||
|
|
@ -0,0 +1,18 @@
|
|||
"""
|
||||
|
||||
constants definition
|
||||
|
||||
|
||||
"""
|
||||
|
||||
# definition for resourcetype
|
||||
COLLECTION=1
|
||||
OBJECT=None
|
||||
DAV_PROPS=['creationdate', 'displayname', 'getcontentlanguage', 'getcontentlength', 'getcontenttype', 'getetag', 'getlastmodified', 'lockdiscovery', 'resourcetype', 'source', 'supportedlock']
|
||||
|
||||
|
||||
# Request classes in propfind
|
||||
|
||||
RT_ALLPROP=1
|
||||
RT_PROPNAME=2
|
||||
RT_PROP=3
|
|
@ -0,0 +1,218 @@
|
|||
"""
|
||||
|
||||
davcmd.py
|
||||
---------
|
||||
|
||||
containts commands like copy, move, delete for normal
|
||||
resources and collections
|
||||
|
||||
"""
|
||||
|
||||
from string import split,replace,joinfields
|
||||
import urlparse
|
||||
|
||||
from utils import create_treelist, is_prefix
|
||||
from errors import *
|
||||
|
||||
def deltree(dc,uri,exclude={}):
|
||||
""" delete a tree of resources
|
||||
|
||||
dc -- dataclass to use
|
||||
uri -- root uri to delete
|
||||
exclude -- an optional list of uri:error_code pairs which should not
|
||||
be deleted.
|
||||
|
||||
returns dict of uri:error_code tuples from which
|
||||
another method can create a multistatus xml element.
|
||||
|
||||
Also note that we only know Depth=infinity thus we don't have
|
||||
to test for it.
|
||||
|
||||
"""
|
||||
|
||||
tlist=create_treelist(dc,uri)
|
||||
result={}
|
||||
|
||||
for i in range(len(tlist),0,-1):
|
||||
problem_uris=result.keys()
|
||||
element=tlist[i-1]
|
||||
|
||||
# test here, if an element is a prefix of an uri which
|
||||
# generated an error before.
|
||||
# note that we walk here from childs to parents, thus
|
||||
# we cannot delete a parent if a child made a problem.
|
||||
# (see example in 8.6.2.1)
|
||||
ok=1
|
||||
for p in problem_uris:
|
||||
if is_prefix(element,p):
|
||||
ok=None
|
||||
break
|
||||
|
||||
if not ok: continue
|
||||
|
||||
# here we test for the exclude list which is the other way round!
|
||||
for p in exclude.keys():
|
||||
if is_prefix(p,element):
|
||||
ok=None
|
||||
break
|
||||
|
||||
if not ok: continue
|
||||
|
||||
# now delete stuff
|
||||
try:
|
||||
delone(dc,element)
|
||||
except DAV_Error, (ec,dd):
|
||||
result[element]=ec
|
||||
|
||||
return result
|
||||
|
||||
def delone(dc,uri):
|
||||
""" delete a single object """
|
||||
if dc.is_collection(uri):
|
||||
dc.rmcol(uri) # should be empty
|
||||
else:
|
||||
dc.rm(uri)
|
||||
|
||||
###
|
||||
### COPY
|
||||
###
|
||||
|
||||
# helper function
|
||||
|
||||
def copy(dc,src,dst):
|
||||
""" only copy the element
|
||||
|
||||
This is just a helper method factored out from copy and
|
||||
copytree. It will not handle the overwrite or depth header.
|
||||
|
||||
"""
|
||||
|
||||
# destination should have been deleted before
|
||||
if dc.exists(dst): raise DAV_Error, 412
|
||||
|
||||
# source should exist also
|
||||
if not dc.exists(src): raise DAV_NotFound
|
||||
|
||||
if dc.is_collection(src):
|
||||
dc.copycol(src,dst) # an exception will be passed thru
|
||||
else:
|
||||
dc.copy(src,dst) # an exception will be passed thru
|
||||
|
||||
|
||||
# the main functions
|
||||
|
||||
def copyone(dc,src,dst,overwrite=None):
|
||||
""" copy one resource to a new destination """
|
||||
|
||||
if overwrite and dc.exists(dst):
|
||||
delres=deltree(dc,dst)
|
||||
else:
|
||||
delres={}
|
||||
|
||||
# if we cannot delete everything, then do not copy!
|
||||
if delres: return delres
|
||||
|
||||
try:
|
||||
copy(dc,src,dst) # pass thru exceptions
|
||||
except DAV_Error, (ec,dd):
|
||||
return ec
|
||||
|
||||
def copytree(dc,src,dst,overwrite=None):
|
||||
""" copy a tree of resources to another location
|
||||
|
||||
dc -- dataclass to use
|
||||
src -- src uri from where to copy
|
||||
dst -- dst uri
|
||||
overwrite -- if 1 then delete dst uri before
|
||||
|
||||
returns dict of uri:error_code tuples from which
|
||||
another method can create a multistatus xml element.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
# first delete the destination resource
|
||||
if overwrite and dc.exists(dst):
|
||||
delres=deltree(dc,dst)
|
||||
else:
|
||||
delres={}
|
||||
|
||||
# if we cannot delete everything, then do not copy!
|
||||
if delres: return delres
|
||||
|
||||
# get the tree we have to copy
|
||||
tlist=create_treelist(dc,src)
|
||||
result={}
|
||||
|
||||
# prepare destination URIs (get the prefix)
|
||||
dpath=urlparse.urlparse(dst)[2]
|
||||
|
||||
for element in tlist:
|
||||
problem_uris=result.keys()
|
||||
|
||||
# now URIs get longer and longer thus we have
|
||||
# to test if we had a parent URI which we were not
|
||||
# able to copy in problem_uris which is the prefix
|
||||
# of the actual element. If it is, then we cannot
|
||||
# copy this as well but do not generate another error.
|
||||
ok=1
|
||||
for p in problem_uris:
|
||||
if is_prefix(p,element):
|
||||
ok=None
|
||||
break
|
||||
|
||||
if not ok: continue
|
||||
|
||||
# now create the destination URI which corresponds to
|
||||
# the actual source URI. -> actual_dst
|
||||
# ("subtract" the base src from the URI and prepend the
|
||||
# dst prefix to it.)
|
||||
esrc=replace(element,src,"")
|
||||
actual_dst=dpath+esrc
|
||||
|
||||
# now copy stuff
|
||||
try:
|
||||
copy(dc,element,actual_dst)
|
||||
except DAV_Error, (ec,dd):
|
||||
result[element]=ec
|
||||
|
||||
return result
|
||||
|
||||
|
||||
|
||||
###
|
||||
### MOVE
|
||||
###
|
||||
|
||||
|
||||
def moveone(dc,src,dst,overwrite=None):
|
||||
""" move a single resource
|
||||
|
||||
This is done by first copying it and then deleting
|
||||
the original.
|
||||
"""
|
||||
|
||||
# first copy it
|
||||
copyone(dc,src,dst,overwrite)
|
||||
|
||||
# then delete it
|
||||
dc.rm(src)
|
||||
|
||||
def movetree(dc,src,dst,overwrite=None):
|
||||
""" move a collection
|
||||
|
||||
This is done by first copying it and then deleting
|
||||
the original.
|
||||
|
||||
PROBLEM: if something did not copy then we have a problem
|
||||
when deleting as the original might get deleted!
|
||||
"""
|
||||
|
||||
# first copy it
|
||||
res=copytree(dc,src,dst,overwrite)
|
||||
|
||||
# then delete it
|
||||
res=deltree(dc,src,exclude=res)
|
||||
|
||||
return res
|
||||
|
|
@ -0,0 +1,133 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
python davserver
|
||||
Copyright (C) 1999 Christian Scholz (ruebe@aachen.heimat.de)
|
||||
|
||||
This library is free software; you can redistribute it and/or
|
||||
modify it under the terms of the GNU Library General Public
|
||||
License as published by the Free Software Foundation; either
|
||||
version 2 of the License, or (at your option) any later version.
|
||||
|
||||
This library is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
Library General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU Library General Public
|
||||
License along with this library; if not, write to the Free
|
||||
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
|
||||
|
||||
"""
|
||||
|
||||
|
||||
from xml.dom import ext
|
||||
from xml.dom.Document import Document
|
||||
|
||||
import sys
|
||||
import string
|
||||
import urlparse
|
||||
import urllib
|
||||
from StringIO import StringIO
|
||||
|
||||
import utils
|
||||
from constants import COLLECTION, OBJECT, DAV_PROPS, RT_ALLPROP, RT_PROPNAME, RT_PROP
|
||||
from errors import *
|
||||
from utils import create_treelist, quote_uri, gen_estring
|
||||
|
||||
class COPY:
|
||||
""" copy resources and eventually create multistatus responses
|
||||
|
||||
This module implements the COPY class which is responsible for
|
||||
copying resources. Usually the normal copy work is done in the
|
||||
interface class. This class only creates error messages if error
|
||||
occur.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
def __init__(self,dataclass,src_uri,dst_uri,overwrite):
|
||||
self.__dataclass=dataclass
|
||||
self.__src=src_uri
|
||||
self.__dst=dst_uri
|
||||
self.__overwrite=overwrite
|
||||
|
||||
|
||||
def single_action(self):
|
||||
""" copy a normal resources.
|
||||
|
||||
We try to copy it and return the result code.
|
||||
This is for Depth==0
|
||||
|
||||
"""
|
||||
|
||||
dc=self.__dataclass
|
||||
base=self.__src
|
||||
|
||||
### some basic tests
|
||||
# test if dest exists and overwrite is false
|
||||
if dc.exists(self.__dst) and not self.__overwrite: raise DAV_Error, 412
|
||||
# test if src and dst are the same
|
||||
# (we assume that both uris are on the same server!)
|
||||
ps=urlparse.urlparse(self.__src)[2]
|
||||
pd=urlparse.urlparse(self.__dst)[2]
|
||||
if ps==pd: raise DAV_Error, 403
|
||||
|
||||
return dc.copyone(self.__src,self.__dst,self.__overwrite)
|
||||
|
||||
#return copyone(dc,self.__src,self.__dst,self.__overwrite)
|
||||
|
||||
def tree_action(self):
|
||||
""" copy a tree of resources (a collection)
|
||||
|
||||
Here we return a multistatus xml element.
|
||||
|
||||
"""
|
||||
dc=self.__dataclass
|
||||
base=self.__src
|
||||
|
||||
### some basic tests
|
||||
# test if dest exists and overwrite is false
|
||||
if dc.exists(self.__dst) and not self.__overwrite: raise DAV_Error, 412
|
||||
# test if src and dst are the same
|
||||
# (we assume that both uris are on the same server!)
|
||||
ps=urlparse.urlparse(self.__src)[2]
|
||||
pd=urlparse.urlparse(self.__dst)[2]
|
||||
if ps==pd: raise DAV_Error, 403
|
||||
|
||||
|
||||
result=dc.copytree(self.__src,self.__dst,self.__overwrite)
|
||||
#result=copytree(dc,self.__src,self.__dst,self.__overwrite)
|
||||
|
||||
if not result: return None
|
||||
|
||||
###
|
||||
### create the multistatus XML element
|
||||
### (this is also the same as in delete.py.
|
||||
### we might make a common method out of it)
|
||||
###
|
||||
|
||||
doc = Document(None)
|
||||
ms=doc.createElement("D:multistatus")
|
||||
ms.setAttribute("xmlns:D","DAV:")
|
||||
doc.appendChild(ms)
|
||||
|
||||
for el,ec in result.items():
|
||||
re=doc.createElement("D:response")
|
||||
hr=doc.createElement("D:href")
|
||||
st=doc.createElement("D:status")
|
||||
huri=doc.createTextNode(quote_uri(el))
|
||||
t=doc.createTextNode(gen_estring(ec))
|
||||
st.appendChild(t)
|
||||
hr.appendChild(huri)
|
||||
re.appendChild(hr)
|
||||
re.appendChild(st)
|
||||
ms.appendChild(re)
|
||||
|
||||
sfile=StringIO()
|
||||
ext.PrettyPrint(doc,stream=sfile)
|
||||
s=sfile.getvalue()
|
||||
sfile.close()
|
||||
return s
|
||||
|
|
@ -0,0 +1,102 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
python davserver
|
||||
Copyright (C) 1999 Christian Scholz (ruebe@aachen.heimat.de)
|
||||
|
||||
This library is free software; you can redistribute it and/or
|
||||
modify it under the terms of the GNU Library General Public
|
||||
License as published by the Free Software Foundation; either
|
||||
version 2 of the License, or (at your option) any later version.
|
||||
|
||||
This library is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
Library General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU Library General Public
|
||||
License along with this library; if not, write to the Free
|
||||
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
|
||||
|
||||
"""
|
||||
|
||||
|
||||
import sys
|
||||
import string
|
||||
import urlparse
|
||||
import urllib
|
||||
from StringIO import StringIO
|
||||
|
||||
import utils
|
||||
from constants import COLLECTION, OBJECT, DAV_PROPS
|
||||
from constants import RT_ALLPROP, RT_PROPNAME, RT_PROP
|
||||
from errors import *
|
||||
from utils import create_treelist, quote_uri, gen_estring, make_xmlresponse
|
||||
from davcmd import moveone, movetree
|
||||
|
||||
class MOVE:
|
||||
""" move resources and eventually create multistatus responses
|
||||
|
||||
This module implements the MOVE class which is responsible for
|
||||
moving resources.
|
||||
|
||||
MOVE is implemented by a COPY followed by a DELETE of the old
|
||||
resource.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
def __init__(self,dataclass,src_uri,dst_uri,overwrite):
|
||||
self.__dataclass=dataclass
|
||||
self.__src=src_uri
|
||||
self.__dst=dst_uri
|
||||
self.__overwrite=overwrite
|
||||
|
||||
|
||||
def single_action(self):
|
||||
""" move a normal resources.
|
||||
|
||||
We try to move it and return the result code.
|
||||
This is for Depth==0
|
||||
|
||||
"""
|
||||
|
||||
dc=self.__dataclass
|
||||
base=self.__src
|
||||
|
||||
### some basic tests
|
||||
# test if dest exists and overwrite is false
|
||||
if dc.exists(self.__dst) and not self.__overwrite: raise DAV_Error, 412
|
||||
# test if src and dst are the same
|
||||
# (we assume that both uris are on the same server!)
|
||||
ps=urlparse.urlparse(self.__src)[2]
|
||||
pd=urlparse.urlparse(self.__dst)[2]
|
||||
if ps==pd: raise DAV_Error, 403
|
||||
|
||||
return dc.moveone(self.__src,self.__dst,self.__overwrite)
|
||||
|
||||
def tree_action(self):
|
||||
""" move a tree of resources (a collection)
|
||||
|
||||
Here we return a multistatus xml element.
|
||||
|
||||
"""
|
||||
dc=self.__dataclass
|
||||
base=self.__src
|
||||
|
||||
### some basic tests
|
||||
# test if dest exists and overwrite is false
|
||||
if dc.exists(self.__dst) and not self.__overwrite: raise DAV_Error, 412
|
||||
# test if src and dst are the same
|
||||
# (we assume that both uris are on the same server!)
|
||||
ps=urlparse.urlparse(self.__src)[2]
|
||||
pd=urlparse.urlparse(self.__dst)[2]
|
||||
if ps==pd: raise DAV_Error, 403
|
||||
|
||||
result=dc.movetree(self.__src,self.__dst,self.__overwrite)
|
||||
if not result: return None
|
||||
|
||||
# create the multistatus XML element
|
||||
return make_xmlresponse(result)
|
||||
|
|
@ -0,0 +1,63 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
|
||||
python davserver
|
||||
Copyright (C) 1999 Christian Scholz (ruebe@aachen.heimat.de)
|
||||
|
||||
This library is free software; you can redistribute it and/or
|
||||
modify it under the terms of the GNU Library General Public
|
||||
License as published by the Free Software Foundation; either
|
||||
version 2 of the License, or (at your option) any later version.
|
||||
|
||||
This library is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
Library General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU Library General Public
|
||||
License along with this library; if not, write to the Free
|
||||
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
|
||||
|
||||
"""
|
||||
import os
|
||||
import string
|
||||
import urllib
|
||||
from StringIO import StringIO
|
||||
|
||||
from status import STATUS_CODES
|
||||
from utils import gen_estring, quote_uri, make_xmlresponse
|
||||
from davcmd import deltree
|
||||
|
||||
class DELETE:
|
||||
|
||||
def __init__(self,uri,dataclass):
|
||||
self.__dataclass=dataclass
|
||||
self.__uri=uri
|
||||
|
||||
def delcol(self):
|
||||
""" delete a collection """
|
||||
|
||||
dc=self.__dataclass
|
||||
result=dc.deltree(self.__uri)
|
||||
|
||||
if not len(result.items()):
|
||||
return None # everything ok
|
||||
|
||||
# create the result element
|
||||
return make_xmlresponse(result)
|
||||
|
||||
def delone(self):
|
||||
""" delete a resource """
|
||||
|
||||
dc=self.__dataclass
|
||||
result=dc.delone(self.__uri)
|
||||
|
||||
if not result: return None
|
||||
if not len(result.items()):
|
||||
return None # everything ok
|
||||
|
||||
# create the result element
|
||||
return make_xmlresponse(result)
|
||||
|
|
@ -0,0 +1,56 @@
|
|||
#!/usr/bin/env python
|
||||
"""
|
||||
|
||||
Exceptions for the DAVserver implementation
|
||||
|
||||
"""
|
||||
|
||||
class DAV_Error(Exception):
|
||||
""" in general we can have the following arguments:
|
||||
|
||||
1. the error code
|
||||
2. the error result element, e.g. a <multistatus> element
|
||||
"""
|
||||
|
||||
def __init__(self,*args):
|
||||
if len(args)==1:
|
||||
self.args=(args[0],"")
|
||||
else:
|
||||
self.args=args
|
||||
|
||||
class DAV_Secret(DAV_Error):
|
||||
""" the user is not allowed to know anything about it
|
||||
|
||||
returning this for a property value means to exclude it
|
||||
from the response xml element.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
DAV_Error.__init__(self,0)
|
||||
pass
|
||||
|
||||
class DAV_NotFound(DAV_Error):
|
||||
""" a requested property was not found for a resource """
|
||||
|
||||
def __init__(self,*args):
|
||||
if len(args):
|
||||
if isinstance(args[0],list):
|
||||
stre = "Path %s not found!"%('/'.join(args[0]))
|
||||
else:
|
||||
stre = args[0]
|
||||
DAV_Error.__init__(self,404,stre)
|
||||
else:
|
||||
DAV_Error.__init__(self,404)
|
||||
|
||||
pass
|
||||
|
||||
class DAV_Forbidden(DAV_Error):
|
||||
""" a method on a resource is not allowed """
|
||||
|
||||
def __init__(self,*args):
|
||||
if len(args):
|
||||
DAV_Error.__init__(self,403,args[0])
|
||||
else:
|
||||
DAV_Error.__init__(self,403)
|
||||
pass
|
||||
|
|
@ -0,0 +1,263 @@
|
|||
"""
|
||||
|
||||
basic interface class
|
||||
|
||||
use this for subclassing when writing your own interface
|
||||
class.
|
||||
|
||||
"""
|
||||
|
||||
from errors import *
|
||||
|
||||
import time
|
||||
from string import lower
|
||||
|
||||
class dav_interface:
|
||||
""" interface class for implementing DAV servers """
|
||||
|
||||
### defined properties (modify this but let the DAV stuff there!)
|
||||
### the format is namespace: [list of properties]
|
||||
|
||||
PROPS={"DAV:" : ('creationdate',
|
||||
'displayname',
|
||||
'getcontentlanguage',
|
||||
'getcontentlength',
|
||||
'getcontenttype',
|
||||
'getetag',
|
||||
'getlastmodified',
|
||||
'lockdiscovery',
|
||||
'resourcetype',
|
||||
'source',
|
||||
'supportedlock'),
|
||||
"NS2" : ("p1","p2")
|
||||
}
|
||||
|
||||
# here we define which methods handle which namespace
|
||||
# the first item is the namespace URI and the second one
|
||||
# the method prefix
|
||||
# e.g. for DAV:getcontenttype we call dav_getcontenttype()
|
||||
M_NS={"DAV:" : "_get_dav",
|
||||
"NS2" : "ns2" }
|
||||
|
||||
def get_propnames(self,uri):
|
||||
""" return the property names allowed for the given URI
|
||||
|
||||
In this method we simply return the above defined properties
|
||||
assuming that they are valid for any resource.
|
||||
You can override this in order to return a different set
|
||||
of property names for each resource.
|
||||
|
||||
"""
|
||||
return self.PROPS
|
||||
|
||||
def get_prop2(self,uri,ns,pname):
|
||||
""" return the value of a property
|
||||
"""
|
||||
if lower(ns)=="dav:": return self.get_dav(uri,pname)
|
||||
|
||||
raise DAV_NotFound
|
||||
|
||||
def get_prop(self,uri,ns,propname):
|
||||
""" return the value of a given property
|
||||
|
||||
uri -- uri of the object to get the property of
|
||||
ns -- namespace of the property
|
||||
pname -- name of the property
|
||||
"""
|
||||
if self.M_NS.has_key(ns):
|
||||
prefix=self.M_NS[ns]
|
||||
else:
|
||||
print "No namespace:",ns
|
||||
raise DAV_NotFound
|
||||
mname=prefix+"_"+propname
|
||||
if not hasattr(self,mname):
|
||||
raise DAV_NotFound
|
||||
|
||||
try:
|
||||
m=getattr(self,mname)
|
||||
r=m(uri)
|
||||
return r
|
||||
except AttributeError, e:
|
||||
print 'Property %s not supported' % propname
|
||||
print "Exception:", e
|
||||
raise DAV_NotFound
|
||||
|
||||
###
|
||||
### DATA methods (for GET and PUT)
|
||||
###
|
||||
|
||||
def get_data(self,uri):
|
||||
""" return the content of an object
|
||||
|
||||
return data or raise an exception
|
||||
|
||||
"""
|
||||
raise DAV_NotFound
|
||||
|
||||
def put(self,uri,data):
|
||||
""" write an object to the repository
|
||||
|
||||
return a result code or raise an exception
|
||||
"""
|
||||
|
||||
raise DAV_Forbidden
|
||||
|
||||
###
|
||||
### Methods for DAV properties
|
||||
###
|
||||
|
||||
def _get_dav_creationdate(self,uri):
|
||||
""" return the creationdate of a resource """
|
||||
d=self.get_creationdate(uri)
|
||||
# format it
|
||||
if isinstance(d, int) or isinstance(d, float):
|
||||
d = time.localtimetime(d)
|
||||
return time.strftime("%Y-%m-%dT%H:%M:%S%Z",d)
|
||||
|
||||
def _get_dav_getlastmodified(self,uri):
|
||||
""" return the last modified date of a resource """
|
||||
d=self.get_lastmodified(uri)
|
||||
if isinstance(d, int) or isinstance(d, float):
|
||||
d = time.localtime(d)
|
||||
# format it
|
||||
return time.asctime(d)
|
||||
|
||||
|
||||
###
|
||||
### OVERRIDE THESE!
|
||||
###
|
||||
|
||||
def get_creationdate(self,uri):
|
||||
""" return the creationdate of the resource """
|
||||
return time.time()
|
||||
|
||||
def get_lastmodified(self,uri):
|
||||
""" return the last modification date of the resource """
|
||||
return time.time()
|
||||
|
||||
|
||||
###
|
||||
### COPY MOVE DELETE
|
||||
###
|
||||
|
||||
### methods for deleting a resource
|
||||
|
||||
def rmcol(self,uri):
|
||||
""" delete a collection
|
||||
|
||||
This should not delete any children! This is automatically done
|
||||
before by the DELETE class in DAV/delete.py
|
||||
|
||||
return a success code or raise an exception
|
||||
|
||||
"""
|
||||
raise DAV_NotFound
|
||||
|
||||
def rm(self,uri):
|
||||
""" delete a single resource
|
||||
|
||||
return a success code or raise an exception
|
||||
|
||||
"""
|
||||
raise DAV_NotFound
|
||||
|
||||
"""
|
||||
|
||||
COPY/MOVE HANDLER
|
||||
|
||||
These handler are called when a COPY or MOVE method is invoked by
|
||||
a client. In the default implementation it works as follows:
|
||||
|
||||
- the davserver receives a COPY/MOVE method
|
||||
- the davcopy or davmove module will be loaded and the corresponding
|
||||
class will be initialized
|
||||
- this class parses the query and decides which method of the interface class
|
||||
to call:
|
||||
|
||||
copyone for a single resource to copy
|
||||
copytree for a tree to copy (collection)
|
||||
(the same goes for move of course).
|
||||
|
||||
- the interface class has now two options:
|
||||
1. to handle the action directly (e.g. cp or mv on filesystems)
|
||||
2. to let it handle via the copy/move methods in davcmd.
|
||||
|
||||
ad 1) The first approach can be used when we know that no error can
|
||||
happen inside a tree or when the action can exactly tell which
|
||||
element made which error. We have to collect these and return
|
||||
it in a dict of the form {uri: error_code, ...}
|
||||
|
||||
ad 2) The copytree/movetree/... methods of davcmd.py will do the recursion
|
||||
themselves and call for each resource the copy/move method of the
|
||||
interface class. Thus method will then only act on a single resource.
|
||||
(Thus a copycol on a normal unix filesystem actually only needs to do
|
||||
an mkdir as the content will be copied by the davcmd.py function.
|
||||
The davcmd.py method will also automatically collect all errors and
|
||||
return the dictionary described above.
|
||||
When you use 2) you also have to implement the copy() and copycol()
|
||||
methods in your interface class. See the example for details.
|
||||
|
||||
To decide which approach is the best you have to decide if your application
|
||||
is able to generate errors inside a tree. E.g. a function which completely
|
||||
fails on a tree if one of the tree's childs fail is not what we need. Then
|
||||
2) would be your way of doing it.
|
||||
Actually usually 2) is the better solution and should only be replaced by
|
||||
1) if you really need it.
|
||||
|
||||
The remaining question is if we should do the same for the DELETE method.
|
||||
|
||||
"""
|
||||
|
||||
### MOVE handlers
|
||||
|
||||
def moveone(self,src,dst,overwrite):
|
||||
""" move one resource with Depth=0 """
|
||||
return moveone(self,src,dst,overwrite)
|
||||
|
||||
def movetree(self,src,dst,overwrite):
|
||||
""" move a collection with Depth=infinity """
|
||||
return movetree(self,src,dst,overwrite)
|
||||
|
||||
### COPY handlers
|
||||
|
||||
def copyone(self,src,dst,overwrite):
|
||||
""" copy one resource with Depth=0 """
|
||||
return copyone(self,src,dst,overwrite)
|
||||
|
||||
def copytree(self,src,dst,overwrite):
|
||||
""" copy a collection with Depth=infinity """
|
||||
return copytree(self,src,dst,overwrite)
|
||||
|
||||
|
||||
### low level copy methods (you only need these for method 2)
|
||||
def copy(self,src,dst):
|
||||
""" copy a resource with depth==0
|
||||
|
||||
You don't need to bother about overwrite or not.
|
||||
This has been done already.
|
||||
|
||||
return a success code or raise an exception if something fails
|
||||
"""
|
||||
return 201
|
||||
|
||||
|
||||
def copycol(self,src,dst):
|
||||
""" copy a resource with depth==infinity
|
||||
|
||||
You don't need to bother about overwrite or not.
|
||||
This has been done already.
|
||||
|
||||
return a success code or raise an exception if something fails
|
||||
"""
|
||||
return 201
|
||||
|
||||
### some utility functions you need to implement
|
||||
|
||||
def exists(self,uri):
|
||||
""" return 1 or None depending on if a resource exists """
|
||||
return None # no
|
||||
|
||||
def is_collection(self,uri):
|
||||
""" return 1 or None depending on if a resource is a collection """
|
||||
return None # no
|
||||
|
|
@ -0,0 +1,372 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
python davserver
|
||||
Copyright (C) 1999 Christian Scholz (ruebe@aachen.heimat.de)
|
||||
|
||||
This library is free software; you can redistribute it and/or
|
||||
modify it under the terms of the GNU Library General Public
|
||||
License as published by the Free Software Foundation; either
|
||||
version 2 of the License, or (at your option) any later version.
|
||||
|
||||
This library is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
Library General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU Library General Public
|
||||
License along with this library; if not, write to the Free
|
||||
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
|
||||
"""
|
||||
|
||||
|
||||
from xml.dom import ext
|
||||
from xml.dom.Document import Document
|
||||
|
||||
import sys
|
||||
import string
|
||||
import urlparse
|
||||
import urllib
|
||||
from StringIO import StringIO
|
||||
|
||||
import utils
|
||||
from constants import COLLECTION, OBJECT, DAV_PROPS, RT_ALLPROP, RT_PROPNAME, RT_PROP
|
||||
from errors import *
|
||||
|
||||
def utf8str(st):
|
||||
if isinstance(st,unicode):
|
||||
return st.encode('utf8')
|
||||
else:
|
||||
return str(st)
|
||||
|
||||
class PROPFIND:
|
||||
""" parse a propfind xml element and extract props
|
||||
|
||||
It will set the following instance vars:
|
||||
|
||||
request_class : ALLPROP | PROPNAME | PROP
|
||||
proplist : list of properties
|
||||
nsmap : map of namespaces
|
||||
|
||||
The list of properties will contain tuples of the form
|
||||
(element name, ns_prefix, ns_uri)
|
||||
|
||||
|
||||
"""
|
||||
|
||||
|
||||
def __init__(self,uri,dataclass,depth):
|
||||
self.request_type=None
|
||||
self.nsmap={}
|
||||
self.proplist={}
|
||||
self.default_ns=None
|
||||
self.__dataclass=dataclass
|
||||
self.__depth=str(depth)
|
||||
self.__uri=uri
|
||||
self.use_full_urls=True
|
||||
self.__has_body=None # did we parse a body?
|
||||
|
||||
def read_propfind(self,xml_doc):
|
||||
self.request_type,self.proplist,self.namespaces=utils.parse_propfind(xml_doc)
|
||||
|
||||
# a violation of the expected logic: client (korganizer) will ask for DAV:resourcetype
|
||||
# but we also have to return the http://groupdav.org/:resourcetype property!
|
||||
if self.proplist.has_key('DAV:') and 'resourcetype' in self.proplist['DAV:']:
|
||||
if not self.proplist.has_key('http://groupdav.org/'):
|
||||
self.proplist['http://groupdav.org/'] = []
|
||||
self.proplist['http://groupdav.org/'].append('resourcetype')
|
||||
if 'DAV:' in self.namespaces: #TMP
|
||||
self.namespaces.append('http://groupdav.org/')
|
||||
|
||||
def createResponse(self):
|
||||
""" create the multistatus response
|
||||
|
||||
This will be delegated to the specific method
|
||||
depending on which request (allprop, propname, prop)
|
||||
was found.
|
||||
|
||||
If we get a PROPNAME then we simply return the list with empty
|
||||
values which we get from the interface class
|
||||
|
||||
If we get an ALLPROP we first get the list of properties and then
|
||||
we do the same as with a PROP method.
|
||||
|
||||
If the uri doesn't exist, return an xml response with a 404 status
|
||||
|
||||
"""
|
||||
|
||||
if not self.__dataclass.exists(self.__uri):
|
||||
raise DAV_NotFound("Path %s doesn't exist" % self.__uri)
|
||||
|
||||
if self.request_type==RT_ALLPROP:
|
||||
return self.create_allprop()
|
||||
|
||||
if self.request_type==RT_PROPNAME:
|
||||
return self.create_propname()
|
||||
|
||||
if self.request_type==RT_PROP:
|
||||
return self.create_prop()
|
||||
|
||||
# no body means ALLPROP!
|
||||
return self.create_allprop()
|
||||
|
||||
def create_propname(self):
|
||||
""" create a multistatus response for the prop names """
|
||||
|
||||
dc=self.__dataclass
|
||||
# create the document generator
|
||||
doc = Document(None)
|
||||
ms=doc.createElement("D:multistatus")
|
||||
ms.setAttribute("xmlns:D","DAV:")
|
||||
doc.appendChild(ms)
|
||||
|
||||
if self.__depth=="0":
|
||||
pnames=dc.get_propnames(self.__uri)
|
||||
re=self.mk_propname_response(self.__uri,pnames,doc)
|
||||
ms.appendChild(re)
|
||||
|
||||
elif self.__depth=="1":
|
||||
pnames=dc.get_propnames(self.__uri)
|
||||
re=self.mk_propname_response(self.__uri,pnames,doc)
|
||||
ms.appendChild(re)
|
||||
|
||||
for newuri in dc.get_childs(self.__uri):
|
||||
pnames=dc.get_propnames(newuri)
|
||||
re=self.mk_propname_response(newuri,pnames,doc)
|
||||
ms.appendChild(re)
|
||||
# *** depth=="infinity"
|
||||
|
||||
sfile=StringIO()
|
||||
ext.PrettyPrint(doc,stream=sfile)
|
||||
s=sfile.getvalue()
|
||||
sfile.close()
|
||||
return s
|
||||
|
||||
def create_allprop(self):
|
||||
""" return a list of all properties """
|
||||
self.proplist={}
|
||||
self.namespaces=[]
|
||||
for ns,plist in self.__dataclass.get_propnames(self.__uri).items():
|
||||
self.proplist[ns]=plist
|
||||
self.namespaces.append(ns)
|
||||
|
||||
return self.create_prop()
|
||||
|
||||
def create_prop(self):
|
||||
""" handle a <prop> request
|
||||
|
||||
This will
|
||||
|
||||
1. set up the <multistatus>-Framework
|
||||
|
||||
2. read the property values for each URI
|
||||
(which is dependant on the Depth header)
|
||||
This is done by the get_propvalues() method.
|
||||
|
||||
3. For each URI call the append_result() method
|
||||
to append the actual <result>-Tag to the result
|
||||
document.
|
||||
|
||||
We differ between "good" properties, which have been
|
||||
assigned a value by the interface class and "bad"
|
||||
properties, which resulted in an error, either 404
|
||||
(Not Found) or 403 (Forbidden).
|
||||
|
||||
"""
|
||||
|
||||
|
||||
# create the document generator
|
||||
doc = Document(None)
|
||||
ms=doc.createElement("D:multistatus")
|
||||
ms.setAttribute("xmlns:D","DAV:")
|
||||
doc.appendChild(ms)
|
||||
|
||||
if self.__depth=="0":
|
||||
gp,bp=self.get_propvalues(self.__uri)
|
||||
res=self.mk_prop_response(self.__uri,gp,bp,doc)
|
||||
ms.appendChild(res)
|
||||
|
||||
elif self.__depth=="1":
|
||||
gp,bp=self.get_propvalues(self.__uri)
|
||||
res=self.mk_prop_response(self.__uri,gp,bp,doc)
|
||||
ms.appendChild(res)
|
||||
|
||||
try:
|
||||
for newuri in self.__dataclass.get_childs(self.__uri):
|
||||
gp,bp=self.get_propvalues(newuri)
|
||||
res=self.mk_prop_response(newuri,gp,bp,doc)
|
||||
ms.appendChild(res)
|
||||
except DAV_NotFound:
|
||||
# If no children, never mind.
|
||||
pass
|
||||
|
||||
sfile=StringIO()
|
||||
ext.PrettyPrint(doc,stream=sfile)
|
||||
s=sfile.getvalue()
|
||||
sfile.close()
|
||||
return s
|
||||
|
||||
|
||||
def mk_propname_response(self,uri,propnames,doc):
|
||||
""" make a new <prop> result element for a PROPNAME request
|
||||
|
||||
This will simply format the propnames list.
|
||||
propnames should have the format {NS1 : [prop1, prop2, ...], NS2: ...}
|
||||
|
||||
"""
|
||||
re=doc.createElement("D:response")
|
||||
|
||||
# write href information
|
||||
href=doc.createElement("D:href")
|
||||
if self.use_full_urls:
|
||||
huri=doc.createTextNode(uri)
|
||||
else:
|
||||
uparts=urlparse.urlparse(uri)
|
||||
fileloc=uparts[2]
|
||||
huri=doc.createTextNode(urllib.quote(fileloc.encode('utf8')))
|
||||
href.appendChild(huri)
|
||||
re.appendChild(href)
|
||||
|
||||
ps=doc.createElement("D:propstat")
|
||||
nsnum=0
|
||||
|
||||
for ns,plist in propnames.items():
|
||||
# write prop element
|
||||
pr=doc.createElement("D:prop")
|
||||
nsp="ns"+str(nsnum)
|
||||
pr.setAttribute("xmlns:"+nsp,ns)
|
||||
nsnum=nsnum+1
|
||||
|
||||
# write propertynames
|
||||
for p in plist:
|
||||
pe=doc.createElement(nsp+":"+p)
|
||||
pr.appendChild(pe)
|
||||
|
||||
ps.appendChild(pr)
|
||||
|
||||
re.appendChild(ps)
|
||||
|
||||
return re
|
||||
|
||||
def mk_prop_response(self,uri,good_props,bad_props,doc):
|
||||
""" make a new <prop> result element
|
||||
|
||||
We differ between the good props and the bad ones for
|
||||
each generating an extra <propstat>-Node (for each error
|
||||
one, that means).
|
||||
|
||||
"""
|
||||
re=doc.createElement("D:response")
|
||||
# append namespaces to response
|
||||
nsnum=0
|
||||
for nsname in self.namespaces:
|
||||
re.setAttribute("xmlns:ns"+str(nsnum),nsname)
|
||||
nsnum=nsnum+1
|
||||
|
||||
# write href information
|
||||
href=doc.createElement("D:href")
|
||||
if self.use_full_urls:
|
||||
huri=doc.createTextNode(uri)
|
||||
else:
|
||||
uparts=urlparse.urlparse(uri)
|
||||
fileloc=uparts[2]
|
||||
huri=doc.createTextNode(urllib.quote(fileloc.encode('utf8')))
|
||||
href.appendChild(huri)
|
||||
re.appendChild(href)
|
||||
|
||||
# write good properties
|
||||
if good_props and len(good_props.items()):
|
||||
ps=doc.createElement("D:propstat")
|
||||
|
||||
gp=doc.createElement("D:prop")
|
||||
for ns in good_props.keys():
|
||||
ns_prefix="ns"+str(self.namespaces.index(ns))+":"
|
||||
for p,v in good_props[ns].items():
|
||||
pe=doc.createElement(ns_prefix+str(p))
|
||||
if v == None:
|
||||
pass
|
||||
elif ns=='DAV:' and p=="resourcetype":
|
||||
if v == 1:
|
||||
ve=doc.createElement("D:collection")
|
||||
pe.appendChild(ve)
|
||||
elif isinstance(v,tuple) and v[1] == ns:
|
||||
ve=doc.createElement(ns_prefix+v[0])
|
||||
pe.appendChild(ve)
|
||||
else:
|
||||
ve=doc.createTextNode(utf8str(v))
|
||||
pe.appendChild(ve)
|
||||
|
||||
gp.appendChild(pe)
|
||||
if gp.hasChildNodes():
|
||||
re.appendChild(ps)
|
||||
ps.appendChild(gp)
|
||||
s=doc.createElement("D:status")
|
||||
t=doc.createTextNode("HTTP/1.1 200 OK")
|
||||
s.appendChild(t)
|
||||
ps.appendChild(s)
|
||||
re.appendChild(ps)
|
||||
|
||||
# now write the errors!
|
||||
if len(bad_props.items()):
|
||||
|
||||
# write a propstat for each error code
|
||||
for ecode in bad_props.keys():
|
||||
ps=doc.createElement("D:propstat")
|
||||
re.appendChild(ps)
|
||||
bp=doc.createElement("D:prop")
|
||||
ps.appendChild(bp)
|
||||
|
||||
for ns in bad_props[ecode].keys():
|
||||
ns_prefix="ns"+str(self.namespaces.index(ns))+":"
|
||||
|
||||
for p in bad_props[ecode][ns]:
|
||||
pe=doc.createElement(ns_prefix+str(p))
|
||||
bp.appendChild(pe)
|
||||
|
||||
s=doc.createElement("D:status")
|
||||
t=doc.createTextNode(utils.gen_estring(ecode))
|
||||
s.appendChild(t)
|
||||
ps.appendChild(s)
|
||||
re.appendChild(ps)
|
||||
|
||||
# return the new response element
|
||||
return re
|
||||
|
||||
def get_propvalues(self,uri):
|
||||
""" create lists of property values for an URI
|
||||
|
||||
We create two lists for an URI: the properties for
|
||||
which we found a value and the ones for which we
|
||||
only got an error, either because they haven't been
|
||||
found or the user is not allowed to read them.
|
||||
|
||||
"""
|
||||
good_props={}
|
||||
bad_props={}
|
||||
|
||||
for (ns,plist) in self.proplist.items():
|
||||
good_props[ns]={}
|
||||
bad_props={}
|
||||
ec = 0
|
||||
for prop in plist:
|
||||
try:
|
||||
ec = 0
|
||||
r=self.__dataclass.get_prop(uri,ns,prop)
|
||||
good_props[ns][prop]=r
|
||||
except DAV_Error, error_code:
|
||||
ec=error_code[0]
|
||||
|
||||
# ignore props with error_code if 0 (invisible)
|
||||
if ec==0: continue
|
||||
|
||||
if bad_props.has_key(ec):
|
||||
if bad_props[ec].has_key(ns):
|
||||
bad_props[ec][ns].append(prop)
|
||||
else:
|
||||
bad_props[ec][ns]=[prop]
|
||||
else:
|
||||
bad_props[ec]={ns:[prop]}
|
||||
|
||||
return good_props, bad_props
|
||||
|
|
@ -0,0 +1,31 @@
|
|||
"""
|
||||
|
||||
status codes for DAV services
|
||||
|
||||
|
||||
"""
|
||||
|
||||
|
||||
STATUS_CODES={
|
||||
102: "Processing",
|
||||
200: "Ok",
|
||||
201: "Created",
|
||||
204: "No Content",
|
||||
207: "Multi-Status",
|
||||
201: "Created",
|
||||
400: "Bad Request",
|
||||
403: "Forbidden",
|
||||
404: "Not Found",
|
||||
405: "Method Not Allowed",
|
||||
409: "Conflict",
|
||||
412: "Precondition failed",
|
||||
423: "Locked",
|
||||
415: "Unsupported Media Type",
|
||||
507: "Insufficient Storage",
|
||||
422: "Unprocessable Entity",
|
||||
423: "Locked",
|
||||
424: "Failed Dependency",
|
||||
502: "Bad Gateway",
|
||||
507: "Insufficient Storage",
|
||||
999: "Some error in Create Method please check the data of create method"
|
||||
}
|
|
@ -0,0 +1,160 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
|
||||
UTILITIES
|
||||
|
||||
- parse a propfind request body into a list of props
|
||||
|
||||
"""
|
||||
|
||||
from xml.dom import ext
|
||||
from xml.dom.Document import Document
|
||||
from xml.dom.ext.reader import PyExpat
|
||||
from xml.dom import Node
|
||||
from xml.dom import NodeIterator, NodeFilter
|
||||
|
||||
from string import lower, split, atoi, joinfields
|
||||
import urlparse
|
||||
from StringIO import StringIO
|
||||
|
||||
from constants import RT_ALLPROP, RT_PROPNAME, RT_PROP
|
||||
from status import STATUS_CODES
|
||||
|
||||
VERSION = '0.6'
|
||||
AUTHOR = 'Simon Pamies <s.pamies@banality.de>'
|
||||
|
||||
|
||||
def gen_estring(ecode):
|
||||
""" generate an error string from the given code """
|
||||
ec=atoi(str(ecode))
|
||||
if STATUS_CODES.has_key(ec):
|
||||
return "HTTP/1.1 %s %s" %(ec,STATUS_CODES[ec])
|
||||
else:
|
||||
return "HTTP/1.1 %s" %(ec)
|
||||
|
||||
def parse_propfind(xml_doc):
|
||||
""" parse an propfind xml file and return a list of props
|
||||
|
||||
returns:
|
||||
|
||||
request_type -- ALLPROP, PROPNAME, PROP
|
||||
proplist -- list of properties found
|
||||
namespaces -- list of namespaces found
|
||||
|
||||
"""
|
||||
doc = PyExpat.Reader().fromString(xml_doc)
|
||||
snit = doc.createNodeIterator(doc, NodeFilter.NodeFilter.SHOW_ELEMENT, None, None)
|
||||
|
||||
request_type=None
|
||||
props={}
|
||||
namespaces=[]
|
||||
|
||||
while 1:
|
||||
curr_elem = snit.nextNode()
|
||||
if not curr_elem: break
|
||||
ename=fname=lower(curr_elem.nodeName)
|
||||
if ":" in fname:
|
||||
ename=split(fname,":")[1]
|
||||
if ename=="prop": request_type=RT_PROP; continue
|
||||
if ename=="propfind": continue
|
||||
if ename=="allprop": request_type=RT_ALLPROP; continue
|
||||
if ename=="propname": request_type=RT_PROPNAME; continue
|
||||
|
||||
# rest should be names of attributes
|
||||
|
||||
ns = curr_elem.namespaceURI
|
||||
if props.has_key(ns):
|
||||
props[ns].append(ename)
|
||||
else:
|
||||
props[ns]=[ename]
|
||||
namespaces.append(ns)
|
||||
|
||||
return request_type,props,namespaces
|
||||
|
||||
|
||||
def create_treelist(dataclass,uri):
|
||||
""" create a list of resources out of a tree
|
||||
|
||||
This function is used for the COPY, MOVE and DELETE methods
|
||||
|
||||
uri - the root of the subtree to flatten
|
||||
|
||||
It will return the flattened tree as list
|
||||
|
||||
"""
|
||||
queue=[uri]
|
||||
list=[uri]
|
||||
while len(queue):
|
||||
element=queue[-1]
|
||||
if dataclass.is_collection(element):
|
||||
childs=dataclass.get_childs(element)
|
||||
else:
|
||||
childs=[]
|
||||
if len(childs):
|
||||
list=list+childs
|
||||
# update queue
|
||||
del queue[-1]
|
||||
if len(childs):
|
||||
queue=queue+childs
|
||||
return list
|
||||
|
||||
def is_prefix(uri1,uri2):
|
||||
""" returns 1 of uri1 is a prefix of uri2 """
|
||||
if uri2[:len(uri1)]==uri1:
|
||||
return 1
|
||||
else:
|
||||
return None
|
||||
|
||||
def quote_uri(uri):
|
||||
""" quote an URL but not the protocol part """
|
||||
import urlparse
|
||||
import urllib
|
||||
|
||||
up=urlparse.urlparse(uri)
|
||||
np=urllib.quote(up[2])
|
||||
return urlparse.urlunparse((up[0],up[1],np,up[3],up[4],up[5]))
|
||||
|
||||
def get_uriparentpath(uri):
|
||||
""" extract the uri path and remove the last element """
|
||||
up=urlparse.urlparse(uri)
|
||||
return joinfields(split(up[2],"/")[:-1],"/")
|
||||
|
||||
def get_urifilename(uri):
|
||||
""" extract the uri path and return the last element """
|
||||
up=urlparse.urlparse(uri)
|
||||
return split(up[2],"/")[-1]
|
||||
|
||||
def get_parenturi(uri):
|
||||
""" return the parent of the given resource"""
|
||||
up=urlparse.urlparse(uri)
|
||||
np=joinfields(split(up[2],"/")[:-1],"/")
|
||||
return urlparse.urlunparse((up[0],up[1],np,up[3],up[4],up[5]))
|
||||
|
||||
### XML utilities
|
||||
|
||||
def make_xmlresponse(result):
|
||||
""" construct a response from a dict of uri:error_code elements """
|
||||
doc = Document.Document(None)
|
||||
ms=doc.createElement("D:multistatus")
|
||||
ms.setAttribute("xmlns:D","DAV:")
|
||||
doc.appendChild(ms)
|
||||
|
||||
for el,ec in result.items():
|
||||
re=doc.createElement("D:response")
|
||||
hr=doc.createElement("D:href")
|
||||
st=doc.createElement("D:status")
|
||||
huri=doc.createTextNode(quote_uri(el))
|
||||
t=doc.createTextNode(gen_estring(ec))
|
||||
st.appendChild(t)
|
||||
hr.appendChild(huri)
|
||||
re.appendChild(hr)
|
||||
re.appendChild(st)
|
||||
ms.appendChild(re)
|
||||
|
||||
sfile=StringIO()
|
||||
ext.PrettyPrint(doc,stream=sfile)
|
||||
s=sfile.getvalue()
|
||||
sfile.close()
|
||||
return s
|
||||
|
|
@ -0,0 +1,29 @@
|
|||
##############################################################################
|
||||
#
|
||||
# Copyright (c) 2004 TINY SPRL. (http://tiny.be) All Rights Reserved.
|
||||
# Fabien Pinckaers <fp@tiny.Be>
|
||||
#
|
||||
# WARNING: This program as such is intended to be used by professional
|
||||
# programmers who take the whole responsability of assessing all potential
|
||||
# consequences resulting from its eventual inadequacies and bugs
|
||||
# End users who are looking for a ready-to-use solution with commercial
|
||||
# garantees and support are strongly adviced to contract a Free Software
|
||||
# Service Company
|
||||
#
|
||||
# This program is Free Software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
|
||||
#
|
||||
##############################################################################
|
||||
|
||||
import webdav_server
|
|
@ -0,0 +1,47 @@
|
|||
# -*- encoding: utf-8 -*-
|
||||
##############################################################################
|
||||
#
|
||||
# Copyright (c) 2004 TINY SPRL. (http://tiny.be), 2009 P. Christeas
|
||||
# All Rights Reserved.
|
||||
# Fabien Pinckaers <fp@tiny.Be>
|
||||
#
|
||||
# WARNING: This program as such is intended to be used by professional
|
||||
# programmers who take the whole responsability of assessing all potential
|
||||
# consequences resulting from its eventual inadequacies and bugs
|
||||
# End users who are looking for a ready-to-use solution with commercial
|
||||
# garantees and support are strongly adviced to contract a Free Software
|
||||
# Service Company
|
||||
#
|
||||
# This program is Free Software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
|
||||
#
|
||||
##############################################################################
|
||||
|
||||
{
|
||||
"name" : "WebDAV server for Document Management",
|
||||
"version" : "2.0",
|
||||
"author" : "Tiny",
|
||||
"category" : "Generic Modules/Others",
|
||||
"website": "http://www.openerp.com",
|
||||
"description": """ With this module, the WebDAV server for the documents is activated. You can then use any compatible browser to remotely see the attachments of OpenObject.
|
||||
|
||||
After installation, the webDAV server can be controlled by a [webdav] section in the server's config.
|
||||
""",
|
||||
"depends" : ["base", "document"],
|
||||
"init_xml" : [],
|
||||
"update_xml" : [],
|
||||
"demo_xml" : [],
|
||||
"active": False,
|
||||
"installable": True
|
||||
}
|
|
@ -0,0 +1,39 @@
|
|||
import time
|
||||
import heapq
|
||||
|
||||
def memoize(maxsize):
|
||||
"""decorator to 'memoize' a function - caching its results"""
|
||||
def decorating_function(f):
|
||||
cache = {} # map from key to value
|
||||
heap = [] # list of keys, in LRU heap
|
||||
cursize = 0 # because len() is slow
|
||||
def wrapper(*args):
|
||||
key = repr(args)
|
||||
# performance crap
|
||||
_cache=cache
|
||||
_heap=heap
|
||||
_heappop = heapq.heappop
|
||||
_heappush = heapq.heappush
|
||||
_time = time.time
|
||||
_cursize = cursize
|
||||
_maxsize = maxsize
|
||||
if not _cache.has_key(key):
|
||||
if _cursize == _maxsize:
|
||||
# pop oldest element
|
||||
(_,oldkey) = _heappop(_heap)
|
||||
_cache.pop(oldkey)
|
||||
else:
|
||||
_cursize += 1
|
||||
# insert this element
|
||||
_cache[key] = f(*args)
|
||||
_heappush(_heap,(_time(),key))
|
||||
wrapper.misses += 1
|
||||
else:
|
||||
wrapper.hits += 1
|
||||
return cache[key]
|
||||
wrapper.__doc__ = f.__doc__
|
||||
wrapper.__name__ = f.__name__
|
||||
wrapper.hits = wrapper.misses = 0
|
||||
return wrapper
|
||||
return decorating_function
|
||||
|
|
@ -0,0 +1,653 @@
|
|||
# -*- encoding: utf-8 -*-
|
||||
##############################################################################
|
||||
#
|
||||
# Copyright (c) 2004 TINY SPRL. (http://tiny.be) All Rights Reserved.
|
||||
# Fabien Pinckaers <fp@tiny.Be>
|
||||
#
|
||||
# WARNING: This program as such is intended to be used by professional
|
||||
# programmers who take the whole responsability of assessing all potential
|
||||
# consequences resulting from its eventual inadequacies and bugs
|
||||
# End users who are looking for a ready-to-use solution with commercial
|
||||
# garantees and support are strongly adviced to contract a Free Software
|
||||
# Service Company
|
||||
#
|
||||
# This program is Free Software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
|
||||
#
|
||||
##############################################################################
|
||||
|
||||
import pooler
|
||||
|
||||
import base64
|
||||
import sys
|
||||
import os
|
||||
import time
|
||||
from string import joinfields, split, lower
|
||||
|
||||
from service import security
|
||||
|
||||
import netsvc
|
||||
import urlparse
|
||||
|
||||
from DAV.constants import COLLECTION, OBJECT
|
||||
from DAV.errors import *
|
||||
from DAV.iface import *
|
||||
import urllib
|
||||
|
||||
from DAV.davcmd import copyone, copytree, moveone, movetree, delone, deltree
|
||||
|
||||
from cache import memoize
|
||||
|
||||
CACHE_SIZE=20000
|
||||
|
||||
#hack for urlparse: add webdav in the net protocols
|
||||
urlparse.uses_netloc.append('webdav')
|
||||
urlparse.uses_netloc.append('webdavs')
|
||||
|
||||
class tinydav_handler(dav_interface):
|
||||
"""
|
||||
This class models a Tiny ERP interface for the DAV server
|
||||
"""
|
||||
PROPS={'DAV:': dav_interface.PROPS['DAV:'], }
|
||||
|
||||
M_NS={ "DAV:" : dav_interface.M_NS['DAV:'], }
|
||||
|
||||
def __init__(self, parent, verbose=False):
|
||||
self.db_name = False
|
||||
self.directory_id=False
|
||||
self.db_name_list=[]
|
||||
self.parent = parent
|
||||
self.baseuri = parent.baseuri
|
||||
|
||||
def get_propnames(self,uri):
|
||||
props = self.PROPS
|
||||
self.parent.log_message('get propnames: %s' % uri)
|
||||
if uri[-1]=='/':uri=uri[:-1]
|
||||
cr, uid, pool, dbname, uri2 = self.get_cr(uri)
|
||||
if not dbname:
|
||||
cr.close()
|
||||
return props
|
||||
node = self.uri2object(cr,uid,pool, uri2)
|
||||
if node:
|
||||
props.update(node.get_dav_props(cr))
|
||||
cr.close()
|
||||
return props
|
||||
|
||||
def get_prop(self,uri,ns,propname):
|
||||
""" return the value of a given property
|
||||
|
||||
uri -- uri of the object to get the property of
|
||||
ns -- namespace of the property
|
||||
pname -- name of the property
|
||||
"""
|
||||
if self.M_NS.has_key(ns):
|
||||
return dav_interface.get_prop(self,uri,ns,propname)
|
||||
|
||||
if uri[-1]=='/':uri=uri[:-1]
|
||||
cr, uid, pool, dbname, uri2 = self.get_cr(uri)
|
||||
if not dbname:
|
||||
cr.close()
|
||||
raise DAV_NotFound
|
||||
node = self.uri2object(cr,uid,pool, uri2)
|
||||
if not node:
|
||||
cr.close()
|
||||
raise DAV_NotFound
|
||||
res = node.get_dav_eprop(cr,ns,propname)
|
||||
cr.close()
|
||||
return res
|
||||
|
||||
#
|
||||
# def get_db(self,uri):
|
||||
# names=self.uri2local(uri).split('/')
|
||||
# self.db_name=False
|
||||
# if len(names) > 1:
|
||||
# self.db_name=self.uri2local(uri).split('/')[1]
|
||||
# if self.db_name=='':
|
||||
# raise Exception,'Plese specify Database name in folder'
|
||||
# return self.db_name
|
||||
#
|
||||
|
||||
def later_get_db_from_path(self,path):
|
||||
return "aaa"
|
||||
|
||||
def urijoin(self,*ajoin):
|
||||
""" Return the base URI of this request, or even join it with the
|
||||
ajoin path elements
|
||||
"""
|
||||
return self.baseuri+ '/'.join(ajoin)
|
||||
|
||||
@memoize(4)
|
||||
def db_list(self):
|
||||
s = netsvc.LocalService('db')
|
||||
result = s.list()
|
||||
self.db_name_list=[]
|
||||
for db_name in result:
|
||||
db = pooler.get_db_only(db_name)
|
||||
cr = db.cursor()
|
||||
cr.execute("select id from ir_module_module where name = 'document' and state='installed' ")
|
||||
res=cr.fetchone()
|
||||
if res and len(res):
|
||||
self.db_name_list.append(db_name)
|
||||
cr.close()
|
||||
return self.db_name_list
|
||||
|
||||
def get_childs(self,uri):
|
||||
""" return the child objects as self.baseuris for the given URI """
|
||||
self.parent.log_message('get childs: %s' % uri)
|
||||
if uri[-1]=='/':uri=uri[:-1]
|
||||
cr, uid, pool, dbname, uri2 = self.get_cr(uri)
|
||||
|
||||
if not dbname:
|
||||
s = netsvc.LocalService('db')
|
||||
return map(lambda x: self.urijoin(x), self.db_list())
|
||||
result = []
|
||||
node = self.uri2object(cr,uid,pool, uri2[:])
|
||||
if not node:
|
||||
cr.close()
|
||||
raise DAV_NotFound(uri2)
|
||||
else:
|
||||
fp = node.full_path()
|
||||
if fp and len(fp):
|
||||
self.parent.log_message('childs: @%s' % fp)
|
||||
fp = '/'.join(fp)
|
||||
else:
|
||||
fp = None
|
||||
for d in node.children(cr):
|
||||
self.parent.log_message('child: %s' % d.path)
|
||||
if fp:
|
||||
result.append( self.urijoin(dbname,fp,d.path) )
|
||||
else:
|
||||
result.append( self.urijoin(dbname,d.path) )
|
||||
cr.close()
|
||||
return result
|
||||
|
||||
def uri2local(self, uri):
|
||||
uparts=urlparse.urlparse(uri)
|
||||
reluri=uparts[2]
|
||||
if reluri and reluri[-1]=="/":
|
||||
reluri=reluri[:-1]
|
||||
return reluri
|
||||
|
||||
#
|
||||
# pos: -1 to get the parent of the uri
|
||||
#
|
||||
def get_cr(self, uri):
|
||||
pdb = self.parent.auth_proxy.last_auth
|
||||
reluri = self.uri2local(uri)
|
||||
try:
|
||||
dbname = reluri.split('/')[2]
|
||||
except:
|
||||
dbname = False
|
||||
if not dbname:
|
||||
return None, None, None, False, None
|
||||
if not pdb and dbname:
|
||||
# if dbname was in our uri, we should have authenticated
|
||||
# against that.
|
||||
raise Exception("Programming error")
|
||||
assert pdb == dbname, " %s != %s" %(pdb, dbname)
|
||||
user, passwd, dbn2, uid = self.parent.auth_proxy.auth_creds[pdb]
|
||||
db,pool = pooler.get_db_and_pool(dbname)
|
||||
cr = db.cursor()
|
||||
uri2 = reluri.split('/')[3:]
|
||||
return cr, uid, pool, dbname, uri2
|
||||
|
||||
def uri2object(self, cr,uid, pool,uri):
|
||||
if not uid:
|
||||
return None
|
||||
return pool.get('document.directory').get_object(cr, uid, uri)
|
||||
|
||||
def get_data(self,uri):
|
||||
self.parent.log_message('GET: %s' % uri)
|
||||
if uri[-1]=='/':uri=uri[:-1]
|
||||
cr, uid, pool, dbname, uri2 = self.get_cr(uri)
|
||||
try:
|
||||
if not dbname:
|
||||
raise DAV_Error, 409
|
||||
node = self.uri2object(cr,uid,pool, uri2)
|
||||
if not node:
|
||||
raise DAV_NotFound(uri2)
|
||||
try:
|
||||
datas = node.get_data(cr)
|
||||
except TypeError,e:
|
||||
import traceback
|
||||
self.parent.log_error("GET typeError: %s", str(e))
|
||||
self.parent.log_message("Exc: %s",traceback.format_exc())
|
||||
raise DAV_Forbidden
|
||||
except IndexError,e :
|
||||
self.parent.log_error("GET IndexError: %s", str(e))
|
||||
raise DAV_NotFound(uri2)
|
||||
except Exception,e:
|
||||
import traceback
|
||||
self.parent.log_error("GET exception: %s",str(e))
|
||||
self.parent.log_message("Exc: %s", traceback.format_exc())
|
||||
raise DAV_Error, 409
|
||||
return datas
|
||||
finally:
|
||||
cr.close()
|
||||
|
||||
@memoize(CACHE_SIZE)
|
||||
def _get_dav_resourcetype(self,uri):
|
||||
""" return type of object """
|
||||
self.parent.log_message('get RT: %s' % uri)
|
||||
if uri[-1]=='/':uri=uri[:-1]
|
||||
cr, uid, pool, dbname, uri2 = self.get_cr(uri)
|
||||
try:
|
||||
if not dbname:
|
||||
return COLLECTION
|
||||
node = self.uri2object(cr,uid,pool, uri2)
|
||||
if not node:
|
||||
raise DAV_NotFound(uri2)
|
||||
if node.type in ('collection','database'):
|
||||
return COLLECTION
|
||||
return OBJECT
|
||||
finally:
|
||||
cr.close()
|
||||
|
||||
def _get_dav_displayname(self,uri):
|
||||
self.parent.log_message('get DN: %s' % uri)
|
||||
if uri[-1]=='/':uri=uri[:-1]
|
||||
cr, uid, pool, dbname, uri2 = self.get_cr(uri)
|
||||
if not dbname:
|
||||
cr.close()
|
||||
return COLLECTION
|
||||
node = self.uri2object(cr,uid,pool, uri2)
|
||||
if not node:
|
||||
cr.close()
|
||||
raise DAV_NotFound(uri2)
|
||||
cr.close()
|
||||
return node.displayname
|
||||
|
||||
@memoize(CACHE_SIZE)
|
||||
def _get_dav_getcontentlength(self,uri):
|
||||
""" return the content length of an object """
|
||||
self.parent.log_message('get length: %s' % uri)
|
||||
if uri[-1]=='/':uri=uri[:-1]
|
||||
result = 0
|
||||
cr, uid, pool, dbname, uri2 = self.get_cr(uri)
|
||||
if not dbname:
|
||||
cr.close()
|
||||
return '0'
|
||||
node = self.uri2object(cr, uid, pool, uri2)
|
||||
if not node:
|
||||
cr.close()
|
||||
raise DAV_NotFound(uri2)
|
||||
result = node.content_length or 0
|
||||
cr.close()
|
||||
return str(result)
|
||||
|
||||
@memoize(CACHE_SIZE)
|
||||
def _get_dav_getetag(self,uri):
|
||||
""" return the ETag of an object """
|
||||
self.parent.log_message('get etag: %s' % uri)
|
||||
if uri[-1]=='/':uri=uri[:-1]
|
||||
result = 0
|
||||
cr, uid, pool, dbname, uri2 = self.get_cr(uri)
|
||||
if not dbname:
|
||||
cr.close()
|
||||
return '0'
|
||||
node = self.uri2object(cr, uid, pool, uri2)
|
||||
if not node:
|
||||
cr.close()
|
||||
raise DAV_NotFound(uri2)
|
||||
result = node.get_etag(cr)
|
||||
cr.close()
|
||||
return str(result)
|
||||
|
||||
@memoize(CACHE_SIZE)
|
||||
def get_lastmodified(self,uri):
|
||||
""" return the last modified date of the object """
|
||||
if uri[-1]=='/':uri=uri[:-1]
|
||||
today = time.time()
|
||||
cr, uid, pool, dbname, uri2 = self.get_cr(uri)
|
||||
try:
|
||||
if not dbname:
|
||||
return today
|
||||
node = self.uri2object(cr,uid,pool, uri2)
|
||||
if not node:
|
||||
raise DAV_NotFound(uri2)
|
||||
if node.write_date:
|
||||
return time.mktime(time.strptime(node.write_date,'%Y-%m-%d %H:%M:%S'))
|
||||
else:
|
||||
return today
|
||||
finally:
|
||||
cr.close()
|
||||
|
||||
@memoize(CACHE_SIZE)
|
||||
def get_creationdate(self,uri):
|
||||
""" return the last modified date of the object """
|
||||
|
||||
if uri[-1]=='/':uri=uri[:-1]
|
||||
cr, uid, pool, dbname, uri2 = self.get_cr(uri)
|
||||
try:
|
||||
if not dbname:
|
||||
raise DAV_Error, 409
|
||||
node = self.uri2object(cr,uid,pool, uri2)
|
||||
if not node:
|
||||
raise DAV_NotFound(uri2)
|
||||
if node.create_date:
|
||||
result = time.strptime(node.create_date,'%Y-%m-%d %H:%M:%S')
|
||||
else:
|
||||
result = time.gmtime()
|
||||
return result
|
||||
finally:
|
||||
cr.close()
|
||||
|
||||
@memoize(CACHE_SIZE)
|
||||
def _get_dav_getcontenttype(self,uri):
|
||||
self.parent.log_message('get contenttype: %s' % uri)
|
||||
if uri[-1]=='/':uri=uri[:-1]
|
||||
cr, uid, pool, dbname, uri2 = self.get_cr(uri)
|
||||
try:
|
||||
if not dbname:
|
||||
return 'httpd/unix-directory'
|
||||
node = self.uri2object(cr,uid,pool, uri2)
|
||||
if not node:
|
||||
raise DAV_NotFound(uri2)
|
||||
|
||||
result = 'application/octet-stream'
|
||||
#if node.type=='collection':
|
||||
#result ='httpd/unix-directory'
|
||||
#else:
|
||||
result = node.mimetype
|
||||
return result
|
||||
#raise DAV_NotFound, 'Could not find %s' % path
|
||||
finally:
|
||||
cr.close()
|
||||
|
||||
def mkcol(self,uri):
|
||||
""" create a new collection """
|
||||
self.parent.log_message('MKCOL: %s' % uri)
|
||||
if uri[-1]=='/':uri=uri[:-1]
|
||||
parent='/'.join(uri.split('/')[:-1])
|
||||
if not parent.startswith(self.baseuri):
|
||||
parent=self.baseuri + ''.join(parent[1:])
|
||||
if not uri.startswith(self.baseuri):
|
||||
uri=self.baseuri + ''.join(uri[1:])
|
||||
|
||||
|
||||
cr, uid, pool,dbname, uri2 = self.get_cr(uri)
|
||||
if not dbname:
|
||||
raise DAV_Error, 409
|
||||
node = self.uri2object(cr,uid,pool, uri2[:-1])
|
||||
object2=node and node.object2 or False
|
||||
object=node and node.object or False
|
||||
|
||||
objname = uri2[-1]
|
||||
if not object:
|
||||
pool.get('document.directory').create(cr, uid, {
|
||||
'name': objname,
|
||||
'parent_id': False,
|
||||
'ressource_type_id': False,
|
||||
'ressource_id': False
|
||||
})
|
||||
else:
|
||||
pool.get('document.directory').create(cr, uid, {
|
||||
'name': objname,
|
||||
'parent_id': object.id,
|
||||
'ressource_type_id': object.ressource_type_id.id,
|
||||
'ressource_id': object2 and object2.id or False
|
||||
})
|
||||
|
||||
cr.commit()
|
||||
cr.close()
|
||||
return True
|
||||
|
||||
def put(self,uri,data,content_type=None):
|
||||
""" put the object into the filesystem """
|
||||
self.parent.log_message('Putting %s (%d), %s'%( unicode(uri,'utf8'), len(data), content_type))
|
||||
parent='/'.join(uri.split('/')[:-1])
|
||||
cr, uid, pool,dbname, uri2 = self.get_cr(uri)
|
||||
if not dbname:
|
||||
raise DAV_Forbidden
|
||||
try:
|
||||
node = self.uri2object(cr,uid,pool, uri2[:])
|
||||
except:
|
||||
node = False
|
||||
objname = uri2[-1]
|
||||
ext = objname.find('.') >0 and objname.split('.')[1] or False
|
||||
|
||||
if not node:
|
||||
dir_node = self.uri2object(cr,uid,pool, uri2[:-1])
|
||||
if not dir_node:
|
||||
raise DAV_NotFound('Parent folder not found')
|
||||
try:
|
||||
dir_node.create_child(cr,objname,data)
|
||||
except Exception,e:
|
||||
import traceback
|
||||
self.parent.log_error("Cannot create %s: %s", objname, str(e))
|
||||
self.parent.log_message("Exc: %s",traceback.format_exc())
|
||||
raise DAV_Forbidden
|
||||
else:
|
||||
try:
|
||||
node.set_data(cr,data)
|
||||
except Exception,e:
|
||||
import traceback
|
||||
self.parent.log_error("Cannot save %s: %s", objname, str(e))
|
||||
self.parent.log_message("Exc: %s",traceback.format_exc())
|
||||
raise DAV_Forbidden
|
||||
|
||||
cr.commit()
|
||||
|
||||
return 201
|
||||
|
||||
def rmcol(self,uri):
|
||||
""" delete a collection """
|
||||
if uri[-1]=='/':uri=uri[:-1]
|
||||
|
||||
cr, uid, pool, dbname, uri2 = self.get_cr(uri)
|
||||
if True or not dbname: # *-*
|
||||
raise DAV_Error, 409
|
||||
node = self.uri2object(cr,uid,pool, uri2)
|
||||
object2=node and node.object2 or False
|
||||
object=node and node.object or False
|
||||
if object._table_name=='document.directory':
|
||||
if object.child_ids:
|
||||
raise DAV_Forbidden # forbidden
|
||||
if object.file_ids:
|
||||
raise DAV_Forbidden # forbidden
|
||||
res = pool.get('document.directory').unlink(cr, uid, [object.id])
|
||||
|
||||
cr.commit()
|
||||
cr.close()
|
||||
return 204
|
||||
|
||||
def rm(self,uri):
|
||||
if uri[-1]=='/':uri=uri[:-1]
|
||||
|
||||
object=False
|
||||
cr, uid, pool,dbname, uri2 = self.get_cr(uri)
|
||||
#if not dbname:
|
||||
if True:
|
||||
raise DAV_Error, 409
|
||||
node = self.uri2object(cr,uid,pool, uri2)
|
||||
object2=node and node.object2 or False
|
||||
object=node and node.object or False
|
||||
if not object:
|
||||
raise DAV_NotFound
|
||||
|
||||
self.parent.log_message(' rm %s "%s"'%(object._table_name,uri))
|
||||
if object._table_name=='ir.attachment':
|
||||
res = pool.get('ir.attachment').unlink(cr, uid, [object.id])
|
||||
else:
|
||||
raise DAV_Forbidden # forbidden
|
||||
parent='/'.join(uri.split('/')[:-1])
|
||||
cr.commit()
|
||||
cr.close()
|
||||
return 204
|
||||
|
||||
### DELETE handlers (examples)
|
||||
### (we use the predefined methods in davcmd instead of doing
|
||||
### a rm directly
|
||||
###
|
||||
|
||||
def delone(self,uri):
|
||||
""" delete a single resource
|
||||
|
||||
You have to return a result dict of the form
|
||||
uri:error_code
|
||||
or None if everything's ok
|
||||
|
||||
"""
|
||||
if uri[-1]=='/':uri=uri[:-1]
|
||||
res=delone(self,uri)
|
||||
parent='/'.join(uri.split('/')[:-1])
|
||||
return res
|
||||
|
||||
def deltree(self,uri):
|
||||
""" delete a collection
|
||||
|
||||
You have to return a result dict of the form
|
||||
uri:error_code
|
||||
or None if everything's ok
|
||||
"""
|
||||
if uri[-1]=='/':uri=uri[:-1]
|
||||
res=deltree(self,uri)
|
||||
parent='/'.join(uri.split('/')[:-1])
|
||||
return res
|
||||
|
||||
|
||||
###
|
||||
### MOVE handlers (examples)
|
||||
###
|
||||
|
||||
def moveone(self,src,dst,overwrite):
|
||||
""" move one resource with Depth=0
|
||||
|
||||
an alternative implementation would be
|
||||
|
||||
result_code=201
|
||||
if overwrite:
|
||||
result_code=204
|
||||
r=os.system("rm -f '%s'" %dst)
|
||||
if r: return 412
|
||||
r=os.system("mv '%s' '%s'" %(src,dst))
|
||||
if r: return 412
|
||||
return result_code
|
||||
|
||||
(untested!). This would not use the davcmd functions
|
||||
and thus can only detect errors directly on the root node.
|
||||
"""
|
||||
res=moveone(self,src,dst,overwrite)
|
||||
return res
|
||||
|
||||
def movetree(self,src,dst,overwrite):
|
||||
""" move a collection with Depth=infinity
|
||||
|
||||
an alternative implementation would be
|
||||
|
||||
result_code=201
|
||||
if overwrite:
|
||||
result_code=204
|
||||
r=os.system("rm -rf '%s'" %dst)
|
||||
if r: return 412
|
||||
r=os.system("mv '%s' '%s'" %(src,dst))
|
||||
if r: return 412
|
||||
return result_code
|
||||
|
||||
(untested!). This would not use the davcmd functions
|
||||
and thus can only detect errors directly on the root node"""
|
||||
|
||||
res=movetree(self,src,dst,overwrite)
|
||||
return res
|
||||
|
||||
###
|
||||
### COPY handlers
|
||||
###
|
||||
|
||||
def copyone(self,src,dst,overwrite):
|
||||
""" copy one resource with Depth=0
|
||||
|
||||
an alternative implementation would be
|
||||
|
||||
result_code=201
|
||||
if overwrite:
|
||||
result_code=204
|
||||
r=os.system("rm -f '%s'" %dst)
|
||||
if r: return 412
|
||||
r=os.system("cp '%s' '%s'" %(src,dst))
|
||||
if r: return 412
|
||||
return result_code
|
||||
|
||||
(untested!). This would not use the davcmd functions
|
||||
and thus can only detect errors directly on the root node.
|
||||
"""
|
||||
res=copyone(self,src,dst,overwrite)
|
||||
return res
|
||||
|
||||
def copytree(self,src,dst,overwrite):
|
||||
""" copy a collection with Depth=infinity
|
||||
|
||||
an alternative implementation would be
|
||||
|
||||
result_code=201
|
||||
if overwrite:
|
||||
result_code=204
|
||||
r=os.system("rm -rf '%s'" %dst)
|
||||
if r: return 412
|
||||
r=os.system("cp -r '%s' '%s'" %(src,dst))
|
||||
if r: return 412
|
||||
return result_code
|
||||
|
||||
(untested!). This would not use the davcmd functions
|
||||
and thus can only detect errors directly on the root node"""
|
||||
res=copytree(self,src,dst,overwrite)
|
||||
return res
|
||||
|
||||
###
|
||||
### copy methods.
|
||||
### This methods actually copy something. low-level
|
||||
### They are called by the davcmd utility functions
|
||||
### copytree and copyone (not the above!)
|
||||
### Look in davcmd.py for further details.
|
||||
###
|
||||
|
||||
def copy(self,src,dst):
|
||||
src=urllib.unquote(src)
|
||||
dst=urllib.unquote(dst)
|
||||
ct = self._get_dav_getcontenttype(src)
|
||||
data = self.get_data(src)
|
||||
self.put(dst,data,ct)
|
||||
return 201
|
||||
|
||||
def copycol(self,src,dst):
|
||||
""" copy a collection.
|
||||
|
||||
As this is not recursive (the davserver recurses itself)
|
||||
we will only create a new directory here. For some more
|
||||
advanced systems we might also have to copy properties from
|
||||
the source to the destination.
|
||||
"""
|
||||
print " copy a collection."
|
||||
return self.mkcol(dst)
|
||||
|
||||
|
||||
def exists(self,uri):
|
||||
""" test if a resource exists """
|
||||
result = False
|
||||
cr, uid, pool,dbname, uri2 = self.get_cr(uri)
|
||||
if not dbname:
|
||||
return True
|
||||
try:
|
||||
node = self.uri2object(cr,uid,pool, uri2)
|
||||
if node:
|
||||
result = True
|
||||
except:
|
||||
pass
|
||||
cr.close()
|
||||
return result
|
||||
|
||||
@memoize(CACHE_SIZE)
|
||||
def is_collection(self,uri):
|
||||
""" test if the given uri is a collection """
|
||||
return self._get_dav_resourcetype(uri)==COLLECTION
|
|
@ -0,0 +1,26 @@
|
|||
# Translation of OpenERP Server.
|
||||
# This file contains the translation of the following modules:
|
||||
# * document_webdav_old
|
||||
#
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: OpenERP Server 5.0.0\n"
|
||||
"Report-Msgid-Bugs-To: support@openerp.com\n"
|
||||
"POT-Creation-Date: 2009-05-30 15:14:08+0000\n"
|
||||
"PO-Revision-Date: 2009-05-30 15:14:08+0000\n"
|
||||
"Last-Translator: <>\n"
|
||||
"Language-Team: \n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: \n"
|
||||
"Plural-Forms: \n"
|
||||
|
||||
#. module: document_webdav_old
|
||||
#: model:ir.module.module,description:document_webdav_old.module_meta_information
|
||||
msgid "This is a complete document management system:\n"
|
||||
" * WebDav Interface\n"
|
||||
" * User Authentification\n"
|
||||
" * Document Indexation\n"
|
||||
""
|
||||
msgstr ""
|
||||
|
|
@ -0,0 +1,80 @@
|
|||
# -*- encoding: utf-8 -*-
|
||||
|
||||
#
|
||||
# Copyright P. Christeas <p_christ@hol.gr> 2008,2009
|
||||
#
|
||||
#
|
||||
# WARNING: This program as such is intended to be used by professional
|
||||
# programmers who take the whole responsability of assessing all potential
|
||||
# consequences resulting from its eventual inadequacies and bugs
|
||||
# End users who are looking for a ready-to-use solution with commercial
|
||||
# garantees and support are strongly adviced to contract a Free Software
|
||||
# Service Company
|
||||
#
|
||||
# This program is Free Software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
|
||||
###############################################################################
|
||||
|
||||
|
||||
import netsvc
|
||||
from dav_fs import tinydav_handler
|
||||
from tools.config import config
|
||||
from DAV.WebDAVServer import DAVRequestHandler
|
||||
from service.websrv_lib import HTTPDir,FixSendError
|
||||
|
||||
class DAVHandler(FixSendError,DAVRequestHandler):
|
||||
verbose = False
|
||||
|
||||
def get_userinfo(self,user,pw):
|
||||
print "get_userinfo"
|
||||
return False
|
||||
def _log(self, message):
|
||||
netsvc.Logger().notifyChannel("webdav",netsvc.LOG_DEBUG,message)
|
||||
|
||||
def handle(self):
|
||||
pass
|
||||
|
||||
def finish(self):
|
||||
pass
|
||||
|
||||
def setup(self):
|
||||
davpath = '/'+config.get_misc('webdav','vdir','webdav')+'/'
|
||||
self.baseuri = "http://%s:%d%s"% (self.server.server_name,self.server.server_port,davpath)
|
||||
self.IFACE_CLASS = tinydav_handler(self)
|
||||
pass
|
||||
|
||||
def log_message(self, format, *args):
|
||||
netsvc.Logger().notifyChannel('webdav',netsvc.LOG_DEBUG_RPC,format % args)
|
||||
|
||||
def log_error(self, format, *args):
|
||||
netsvc.Logger().notifyChannel('xmlrpc',netsvc.LOG_WARNING,format % args)
|
||||
|
||||
|
||||
try:
|
||||
from service.http_server import reg_http_service,OpenERPAuthProvider
|
||||
if (config.get_misc('webdav','enable',False)):
|
||||
davpath = '/'+config.get_misc('webdav','vdir','webdav')+'/'
|
||||
handler = DAVHandler
|
||||
handler.verbose = config.get_misc('webdav','verbose',True)
|
||||
handler.debug = config.get_misc('webdav','debug',True)
|
||||
reg_http_service(HTTPDir(davpath,DAVHandler,OpenERPAuthProvider()))
|
||||
netsvc.Logger().notifyChannel('webdav',netsvc.LOG_INFO,"WebDAV service registered at path: %s/ "% davpath)
|
||||
except Exception, e:
|
||||
logger = netsvc.Logger()
|
||||
logger.notifyChannel('webdav', netsvc.LOG_ERROR, 'Cannot launch webdav: %s' % e)
|
||||
|
||||
#eof
|
||||
|
||||
|
||||
|
|
@ -32,17 +32,17 @@
|
|||
|
||||
Note that:
|
||||
- A synchronisation with an internal agenda (use of the CRM module) is possible: in order to automatically create a case when an holiday request is accepted, you have to link the holidays status to a case section. You can set up this info and your colour preferences in
|
||||
HR \ Configuration \ Holidays Status
|
||||
HR / Configuration / Holidays Status
|
||||
- An employee can make an ask for more off-days by making a new Allocation It will increase his total of that leave type available (if the request is accepted).
|
||||
- There are two ways to print the employee's holidays:
|
||||
* The first will allow to choose employees by department and is used by clicking the menu item located in
|
||||
HR \ Holidays Request \ Print Summary of Holidays
|
||||
HR / Holidays Request / Print Summary of Holidays
|
||||
* The second will allow you to choose the holidays report for specific employees. Go on the list
|
||||
HR \ Employees \ Employees
|
||||
HR / Employees / Employees
|
||||
then select the ones you want to choose, click on the print icon and select the option
|
||||
'Print Summary of Employee's Holidays'
|
||||
- The wizard allows you to choose if you want to print either the Confirmed & Validated holidays or only the Validated ones. These states must be set up by a user from the group 'HR' and with the role 'holidays'. You can define these features in the security tab from the user data in
|
||||
Administration \ Users \ Users
|
||||
Administration / Users / Users
|
||||
for example, you maybe will do it for the user 'admin'.
|
||||
""",
|
||||
'author': 'Tiny & Axelor',
|
||||
|
|
|
@ -32,7 +32,6 @@ from tools.translate import _
|
|||
class hr_holidays_status(osv.osv):
|
||||
_name = "hr.holidays.status"
|
||||
_description = "Leave Types"
|
||||
|
||||
def get_days(self, cr, uid, ids, employee_id, return_false, context={}):
|
||||
res = {}
|
||||
for record in self.browse(cr, uid, ids, context):
|
||||
|
@ -83,6 +82,48 @@ class hr_holidays_status(osv.osv):
|
|||
}
|
||||
hr_holidays_status()
|
||||
|
||||
class hr_holidays_per_user(osv.osv):
|
||||
_name = "hr.holidays.per.user"
|
||||
_description = "Holidays Per User"
|
||||
_rec_name = "user_id"
|
||||
|
||||
def _get_remaining_leaves(self, cr, uid, ids, field_name, arg=None, context={}):
|
||||
obj_holiday = self.pool.get('hr.holidays')
|
||||
result = {}
|
||||
for holiday_user in self.browse(cr, uid, ids):
|
||||
days = 0.0
|
||||
ids_request = obj_holiday.search(cr, uid, [('employee_id', '=', holiday_user.employee_id.id),('state', '=', 'validate'),('holiday_status', '=', holiday_user.holiday_status.id)])
|
||||
if ids_request:
|
||||
holidays = obj_holiday.browse(cr, uid, ids_request)
|
||||
for holiday in holidays:
|
||||
days += holiday.number_of_days
|
||||
days = holiday_user.max_leaves - days
|
||||
result[holiday_user.id] = days
|
||||
return result
|
||||
|
||||
_columns = {
|
||||
'employee_id': fields.many2one('hr.employee', 'Employee',required=True),
|
||||
'user_id' : fields.many2one('res.users','User'),
|
||||
'holiday_status' : fields.many2one("hr.holidays.status", "Holiday's Status", required=True),
|
||||
'max_leaves' : fields.float('Maximum Leaves Allowed',required=True),
|
||||
'leaves_taken' : fields.float('Leaves Already Taken',readonly=True),
|
||||
'active' : fields.boolean('Active'),
|
||||
'notes' : fields.text('Notes'),
|
||||
'remaining_leaves': fields.function(_get_remaining_leaves, method=True, string='Remaining Leaves', type='float'),
|
||||
'holiday_ids': fields.one2many('hr.holidays', 'holiday_user_id', 'Holidays')
|
||||
}
|
||||
_defaults = {
|
||||
'active' : lambda *a: True,
|
||||
}
|
||||
|
||||
def create(self, cr, uid, vals, *args, **kwargs):
|
||||
if vals['employee_id']:
|
||||
obj_emp=self.pool.get('hr.employee').browse(cr,uid,vals['employee_id'])
|
||||
vals.update({'user_id': obj_emp.user_id.id})
|
||||
return super(osv.osv,self).create(cr, uid, vals, *args, **kwargs)
|
||||
|
||||
hr_holidays_per_user()
|
||||
|
||||
class hr_holidays(osv.osv):
|
||||
_name = "hr.holidays"
|
||||
_description = "Holidays"
|
||||
|
@ -311,6 +352,3 @@ class hr_holidays(osv.osv):
|
|||
return True
|
||||
hr_holidays()
|
||||
|
||||
|
||||
|
||||
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
||||
|
|
|
@ -152,7 +152,7 @@ class report_custom(report_rml):
|
|||
|
||||
while day_diff1>0:
|
||||
if month+i<=12:
|
||||
if day_diff1>30:
|
||||
if day_diff1 > lengthmonth(year,i+month): # Not on 30 else you have problems when entering 01-01-2009 for example
|
||||
som1=datetime.date(year,month+i,1)
|
||||
date_xml += ['<dayy number="%d" name="%s" cell="%d"/>' % (x, som1.replace(day=x).strftime('%a'),cell+x) for x in range(1, lengthmonth(year,i+month)+1)]
|
||||
i=i+1
|
||||
|
|
|
@ -82,14 +82,15 @@ class account_analytic_line(osv.osv):
|
|||
context=context)
|
||||
|
||||
def write(self, cr, uid, ids, vals, context=None):
|
||||
self._check(cr, uid, ids)
|
||||
self._check_inv(cr, uid, ids,vals)
|
||||
return super(account_analytic_line,self).write(cr, uid, ids, vals,
|
||||
context=context)
|
||||
|
||||
def _check(self, cr, uid, ids):
|
||||
def _check_inv(self, cr, uid, ids,vals):
|
||||
select = ids
|
||||
if isinstance(select, (int, long)):
|
||||
select = [ids]
|
||||
if ( not vals.has_key('invoice_id')) or vals['invoice_id' ] == False:
|
||||
for line in self.browse(cr, uid, select):
|
||||
if line.invoice_id:
|
||||
raise osv.except_osv(_('Error !'),
|
||||
|
|
|
@ -137,13 +137,7 @@ class invoice_create(wizard.interface):
|
|||
#
|
||||
# Compute for lines
|
||||
#
|
||||
cr.execute("SELECT * " # TODO optimize this
|
||||
" FROM account_analytic_line"
|
||||
" WHERE account_id=%%s"
|
||||
" AND id IN (%s)"
|
||||
" AND product_id=%%s"
|
||||
" AND to_invoice=%%s" % ','.join(['%s']*len(data['ids'])),
|
||||
tuple([account.id]+ data['ids']+[ product_id, factor_id]))
|
||||
cr.execute("SELECT * FROM account_analytic_line WHERE account_id = %s and id = ANY (%s) AND product_id=%s and to_invoice=%s", (account.id, data['ids'], product_id, factor_id))
|
||||
line_ids = cr.dictfetchall()
|
||||
note = []
|
||||
for line in line_ids:
|
||||
|
@ -160,9 +154,9 @@ class invoice_create(wizard.interface):
|
|||
details.append(line['name'])
|
||||
#if data['form']['price']:
|
||||
# details.append(abs(line['amount']))
|
||||
note.append(' - '.join(map(lambda x: x or '',details)))
|
||||
note.append(u' - '.join(map(lambda x: unicode(x) or '',details)))
|
||||
|
||||
curr_line['note'] = "\n".join(map(lambda x: x or '',note))
|
||||
curr_line['note'] = "\n".join(map(lambda x: unicode(x) or '',note))
|
||||
pool.get('account.invoice.line').create(cr, uid, curr_line)
|
||||
strids = ','.join(map(str, data['ids']))
|
||||
cr.execute("update account_analytic_line set invoice_id=%%s WHERE account_id = %%s and id IN (%s)" % strids, (last_invoice,account.id,))
|
||||
|
|
|
@ -454,8 +454,8 @@ class hr_attendance(osv.osv):
|
|||
LEFT JOIN (hr_attendance a \
|
||||
LEFT JOIN hr_employee e \
|
||||
ON (a.employee_id = e.id)) \
|
||||
ON (s.date_to >= to_date(to_char(a.name, 'YYYY-MM-dd'),'YYYY-MM-dd') \
|
||||
AND s.date_from <= to_date(to_char(a.name, 'YYYY-MM-dd'),'YYYY-MM-dd') \
|
||||
ON (s.date_to >= date_trunc('day',a.name) \
|
||||
AND s.date_from <= a.name \
|
||||
AND s.user_id = e.user_id) \
|
||||
WHERE a.id in (" + ",".join([str(x) for x in ids]) + ") \
|
||||
GROUP BY a.id")
|
||||
|
@ -515,8 +515,8 @@ class hr_attendance(osv.osv):
|
|||
LEFT JOIN (hr_attendance a \
|
||||
LEFT JOIN hr_employee e \
|
||||
ON (a.employee_id = e.id)) \
|
||||
ON (s.date_to >= a.name::date \
|
||||
AND s.date_from <= a.name::date \
|
||||
ON (s.date_to >= date_trunc(\'day\',a.name) \
|
||||
AND s.date_from <= a.name \
|
||||
AND s.user_id = e.user_id) ' + \
|
||||
qu1, qu2)
|
||||
res = cursor.fetchall()
|
||||
|
@ -635,8 +635,8 @@ class hr_timesheet_sheet_sheet_day(osv.osv):
|
|||
LEFT JOIN hr_employee e
|
||||
ON (s.user_id = e.user_id))
|
||||
ON (a.employee_id = e.id
|
||||
AND s.date_to >= a.name::date
|
||||
AND s.date_from <= a.name::date)
|
||||
AND s.date_to >= date_trunc('day',a.name)
|
||||
AND s.date_from <= a.name)
|
||||
WHERE action in ('sign_in', 'sign_out')
|
||||
group by a.name::date, s.id
|
||||
)) AS foo
|
||||
|
|
|
@ -47,40 +47,40 @@ class idea_idea(osv.osv):
|
|||
if not len(ids):
|
||||
return {}
|
||||
|
||||
sql = """select i.id, avg(v.score::integer)
|
||||
from idea_idea i left outer join idea_vote v on i.id = v.idea_id
|
||||
where i.id in (%s)
|
||||
group by i.id
|
||||
""" % ','.join(['%s']*len(ids))
|
||||
sql = """SELECT i.id, avg(v.score::integer)
|
||||
FROM idea_idea i LEFT OUTER JOIN idea_vote v ON i.id = v.idea_id
|
||||
WHERE i.id = ANY(%s)
|
||||
GROUP BY i.id
|
||||
"""
|
||||
|
||||
cr.execute(sql, ids)
|
||||
cr.execute(sql, (ids,))
|
||||
return dict(cr.fetchall())
|
||||
|
||||
def _vote_count(self,cr,uid,ids,name,arg,context=None):
|
||||
if not len(ids):
|
||||
return {}
|
||||
|
||||
sql = """select i.id, count(1)
|
||||
from idea_idea i left outer join idea_vote v on i.id = v.idea_id
|
||||
where i.id in (%s)
|
||||
group by i.id
|
||||
""" % ','.join(['%s']*len(ids))
|
||||
sql = """SELECT i.id, COUNT(1)
|
||||
FROM idea_idea i LEFT OUTER JOIN idea_vote v ON i.id = v.idea_id
|
||||
WHERE i.id = ANY(%s)
|
||||
GROUP BY i.id
|
||||
"""
|
||||
|
||||
cr.execute(sql, ids)
|
||||
cr.execute(sql, (ids,))
|
||||
return dict(cr.fetchall())
|
||||
|
||||
def _comment_count(self,cr,uid,ids,name,arg,context=None):
|
||||
if not len(ids):
|
||||
return {}
|
||||
|
||||
sql = """select i.id, count(1)
|
||||
from idea_idea i left outer join idea_comment c on i.id = c.idea_id
|
||||
where i.id in (%s)
|
||||
group by i.id
|
||||
""" % ','.join(['%s']*len(ids))
|
||||
sql = """SELECT i.id, COUNT(1)
|
||||
FROM idea_idea i LEFT OUTER JOIN idea_comment c ON i.id = c.idea_id
|
||||
WHERE i.id = ANY(%s)
|
||||
GROUP BY i.id
|
||||
"""
|
||||
|
||||
|
||||
cr.execute(sql,ids)
|
||||
cr.execute(sql,(ids,))
|
||||
return dict(cr.fetchall())
|
||||
|
||||
def _vote_read(self, cr, uid, ids, name, arg, context = None):
|
||||
|
@ -193,19 +193,18 @@ class idea_vote_stat(osv.osv):
|
|||
cr -- the cursor
|
||||
"""
|
||||
cr.execute("""
|
||||
create or replace view idea_vote_stat as (
|
||||
select
|
||||
min(v.id) as id,
|
||||
i.id as idea_id,
|
||||
CREATE OR REPLACE VIEW idea_vote_stat AS (
|
||||
SELECT
|
||||
MIN(v.id) AS id,
|
||||
i.id AS idea_id,
|
||||
v.score,
|
||||
count(1) as nbr
|
||||
from
|
||||
COUNT(1) AS nbr
|
||||
FROM
|
||||
idea_vote v
|
||||
left join
|
||||
idea_idea i on (v.idea_id=i.id)
|
||||
group by
|
||||
i.id, v.score, i.id
|
||||
)""")
|
||||
LEFT JOIN idea_idea i ON (v.idea_id = i.id)
|
||||
GROUP BY
|
||||
i.id, v.score, i.id )
|
||||
""")
|
||||
idea_vote_stat()
|
||||
|
||||
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
||||
|
|
|
@ -21,6 +21,8 @@
|
|||
import time
|
||||
import datetime
|
||||
import base64
|
||||
from tools.translate import _
|
||||
import tools
|
||||
|
||||
import wizard
|
||||
import pooler
|
||||
|
@ -101,6 +103,7 @@ class wizard_vat(wizard.interface):
|
|||
partners = []
|
||||
records = []
|
||||
for obj_partner in pool.get('res.partner').browse(cr, uid, p_id_list):
|
||||
|
||||
record = {} # this holds record per partner
|
||||
|
||||
#This listing is only for customers located in belgium, that's the
|
||||
|
|
|
@ -747,6 +747,10 @@
|
|||
<separator colspan="4" string="Note" />
|
||||
<field name="note" colspan="4" nolabel="1"/>
|
||||
</page>
|
||||
<page string="Notes">
|
||||
<separator colspan="4" string="Note" />
|
||||
<field name="note" colspan="4" nolabel="1"/>
|
||||
</page>
|
||||
</notebook>
|
||||
</form>
|
||||
</field>
|
||||
|
|
|
@ -67,10 +67,11 @@ class mrp_production_workcenter_line(osv.osv):
|
|||
res[op.id] = op.date_planned
|
||||
return res
|
||||
_inherit = 'mrp.production.workcenter.line'
|
||||
_order = "sequence, date_planned"
|
||||
_columns = {
|
||||
'state': fields.selection([('draft','Draft'),('startworking', 'In Progress'),('pause','Pause'),('cancel','Canceled'),('done','Finished')],'Status', readonly=True),
|
||||
'date_start_date': fields.function(_get_date_date, method=True, string='Start Date', type='date'),
|
||||
'date_planned': fields.related('production_id', 'date_planned', type='datetime', string='Date Planned'),
|
||||
'date_planned': fields.datetime('Scheduled Date'),
|
||||
'date_planned_end': fields.function(_get_date_end, method=True, string='End Date', type='datetime'),
|
||||
'date_start': fields.datetime('Start Date'),
|
||||
'date_finnished': fields.datetime('End Date'),
|
||||
|
|
|
@ -159,6 +159,22 @@ def _test_no_line(self, cr, uid, data, context):
|
|||
if not order.lines:
|
||||
raise wizard.except_wizard(_('Error'), _('No order lines defined for this sale.'))
|
||||
|
||||
if not order.lines:
|
||||
raise wizard.except_wizard(_('Error'), _('No order lines defined for this sale.'))
|
||||
|
||||
return {}
|
||||
|
||||
|
||||
def create_invoice(self, cr, uid, data, context):
|
||||
pool = pooler.get_pool(cr.dbname)
|
||||
order_obj = pool.get('pos.order')
|
||||
order = order_obj.browse(cr, uid, data['id'], context)
|
||||
if not order.invoice_id:
|
||||
inv_id = order_obj.action_invoice(cr, uid, [data['id']])
|
||||
#raise wizard.except_wizard(_('Error !'), _('Please create an invoice for this sale.'))
|
||||
# wf_service = netsvc.LocalService("workflow")
|
||||
# for i in data['ids']:
|
||||
# wf_service.trg_validate(uid, 'pos.order', i, 'invoice', cr)
|
||||
return {}
|
||||
|
||||
|
||||
|
|
|
@ -432,6 +432,7 @@ class project_work(osv.osv):
|
|||
'task_id': fields.many2one('project.task', 'Task', ondelete='cascade', required=True),
|
||||
'hours': fields.float('Time Spent'),
|
||||
'user_id': fields.many2one('res.users', 'Done by', required=True),
|
||||
'project_id': fields.related('task_id','project_id',type='many2one',relation='project.project', string='Project'),
|
||||
}
|
||||
_defaults = {
|
||||
'user_id': lambda obj,cr,uid,context: uid,
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
#
|
||||
##############################################################################
|
||||
|
||||
from sets import Set
|
||||
from mx.DateTime import *
|
||||
|
||||
import StringIO
|
||||
|
|
|
@ -51,6 +51,39 @@
|
|||
<field name="name">Specific adaptation to MRP</field>
|
||||
</record>
|
||||
|
||||
<record id="project.project_task_201" model="project.task">
|
||||
<field name="sequence">40</field>
|
||||
<field name="planned_hours">32.0</field>
|
||||
<field name="remaining_hours">32.0</field>
|
||||
<field name="timebox_id" ref="timebox_monthly"/>
|
||||
<field name="context_id" ref="context_home"/>
|
||||
<field name="user_id" ref="base.user_root"/>
|
||||
<field name="project_id" ref="project.project_project_23"/>
|
||||
<field name="name">In house training</field>
|
||||
</record>
|
||||
|
||||
<record id="project.project_task_202" model="project.task">
|
||||
<field name="sequence">50</field>
|
||||
<field name="planned_hours">24.0</field>
|
||||
<field name="remaining_hours">24.0</field>
|
||||
<field name="timebox_id" ref="timebox_weekly"/>
|
||||
<field name="context_id" ref="context_office"/>
|
||||
<field name="user_id" ref="base.user_root"/>
|
||||
<field name="project_id" ref="project.project_project_22"/>
|
||||
<field name="name">Validation + latest modifications.</field>
|
||||
</record>
|
||||
|
||||
<record id="project.project_task_116" model="project.task">
|
||||
<field name="planned_hours">38.0</field>
|
||||
<field name="remaining_hours">38.0</field>
|
||||
<field name="timebox_id" ref="timebox_daily"/>
|
||||
<field name="context_id" ref="context_office"/>
|
||||
<field name="type" ref="project.project_tt_feature"/>
|
||||
<field name="user_id" ref="base.user_root"/>
|
||||
<field name="project_id" ref="project.project_project_22"/>
|
||||
<field name="name">Specific adaptation to MRP</field>
|
||||
</record>
|
||||
|
||||
<record id="project.project_task_201" model="project.task">
|
||||
<field name="sequence">40</field>
|
||||
<field name="planned_hours">32.0</field>
|
||||
|
|
|
@ -78,7 +78,7 @@ class project_work(osv.osv):
|
|||
vals_line['amount'] = 00.0
|
||||
timeline_id = obj.create(cr, uid, vals_line, {})
|
||||
|
||||
vals_line['amount'] = (-1) * vals['hours'] * obj.browse(cr, uid, timeline_id).product_id.standard_price
|
||||
vals_line['amount'] = (-1) * vals['hours']* ( obj.browse(cr,uid,timeline_id).product_id.standard_price or 0.0)
|
||||
obj.write(cr, uid,[timeline_id], vals_line, {})
|
||||
vals['hr_analytic_timesheet_id'] = timeline_id
|
||||
return super(project_work,self).create(cr, uid, vals, *args, **kwargs)
|
||||
|
@ -104,7 +104,7 @@ class project_work(osv.osv):
|
|||
vals_line['date'] = vals['date'][:10]
|
||||
if 'hours' in vals:
|
||||
vals_line['unit_amount'] = vals['hours']
|
||||
vals_line['amount'] = (-1) * vals['hours'] * obj.browse(cr, uid, line_id).product_id.standard_price
|
||||
vals_line['amount'] = (-1) * vals['hours'] * (obj.browse(cr,uid,line_id).product_id.standard_price or 0.0)
|
||||
obj.write(cr, uid, [line_id], vals_line, {})
|
||||
|
||||
return super(project_work,self).write(cr, uid, ids, vals, context)
|
||||
|
|
|
@ -464,13 +464,15 @@ class purchase_order_line(osv.osv):
|
|||
return super(purchase_order_line, self).copy_data(cr, uid, id, default, context)
|
||||
|
||||
def product_id_change(self, cr, uid, ids, pricelist, product, qty, uom,
|
||||
partner_id, date_order=False, fiscal_position=False):
|
||||
partner_id, date_order=False, fiscal_position=False, date_planned=False,
|
||||
name=False, price_unit=False, notes=False):
|
||||
if not pricelist:
|
||||
raise osv.except_osv(_('No Pricelist !'), _('You have to select a pricelist in the purchase form !\nPlease set one before choosing a product.'))
|
||||
if not partner_id:
|
||||
raise osv.except_osv(_('No Partner!'), _('You have to select a partner in the purchase form !\nPlease set one partner before choosing a product.'))
|
||||
if not product:
|
||||
return {'value': {'price_unit': 0.0, 'name':'','notes':'', 'product_uom' : False}, 'domain':{'product_uom':[]}}
|
||||
return {'value': {'price_unit': price_unit or 0.0, 'name': name or '',
|
||||
'notes': notes or'', 'product_uom' : uom or False}, 'domain':{'product_uom':[]}}
|
||||
prod= self.pool.get('product.product').browse(cr, uid,product)
|
||||
lang=False
|
||||
if partner_id:
|
||||
|
@ -484,6 +486,9 @@ class purchase_order_line(osv.osv):
|
|||
uom = prod_uom_po
|
||||
if not date_order:
|
||||
date_order = time.strftime('%Y-%m-%d')
|
||||
if price_unit:
|
||||
price = price_unit
|
||||
else:
|
||||
price = self.pool.get('product.pricelist').price_get(cr,uid,[pricelist],
|
||||
product, qty or 1.0, partner_id, {
|
||||
'uom': uom,
|
||||
|
@ -503,8 +508,9 @@ class purchase_order_line(osv.osv):
|
|||
prod_name = self.pool.get('product.product').name_get(cr, uid, [prod.id])[0][1]
|
||||
|
||||
|
||||
res = {'value': {'price_unit': price, 'name':prod_name, 'taxes_id':map(lambda x: x.id, prod.supplier_taxes_id),
|
||||
'date_planned': dt,'notes':prod.description_purchase,
|
||||
res = {'value': {'price_unit': price, 'name': name or prod_name,
|
||||
'taxes_id':map(lambda x: x.id, prod.supplier_taxes_id),
|
||||
'date_planned': date_planned or dt,'notes': notes or prod.description_purchase,
|
||||
'product_qty': qty,
|
||||
'product_uom': uom}}
|
||||
domain = {}
|
||||
|
|
|
@ -190,8 +190,8 @@
|
|||
<form string="Purchase Order Line">
|
||||
<notebook colspan="4">
|
||||
<page string="Order Line">
|
||||
<field colspan="4" context="partner_id=parent.partner_id,quantity=product_qty,pricelist=parent.pricelist_id,uom=product_uom,warehouse=parent.warehouse_id" name="product_id" on_change="product_id_change(parent.pricelist_id,product_id,product_qty,product_uom,parent.partner_id, parent.date_order, parent.fiscal_position)"/>
|
||||
<field context="partner_id=parent.partner_id,quantity=product_qty,pricelist=parent.pricelist_id,uom=product_uom,warehouse=parent.warehouse_id" name="product_qty" on_change="product_id_change(parent.pricelist_id,product_id,product_qty,product_uom,parent.partner_id, parent.date_order, parent.fiscal_position)"/>
|
||||
<field colspan="4" context="partner_id=parent.partner_id,quantity=product_qty,pricelist=parent.pricelist_id,uom=product_uom,warehouse=parent.warehouse_id" name="product_id" on_change="product_id_change(parent.pricelist_id,product_id,product_qty,product_uom,parent.partner_id, parent.date_order,parent.fiscal_position,date_planned,name,price_unit,notes)"/>
|
||||
<field context="partner_id=parent.partner_id,quantity=product_qty,pricelist=parent.pricelist_id,uom=product_uom,warehouse=parent.warehouse_id" name="product_qty" on_change="product_id_change(parent.pricelist_id,product_id,product_qty,product_uom,parent.partner_id,parent.date_order,parent.fiscal_position,date_planned,name,price_unit,notes)"/>
|
||||
<field name="product_uom" on_change="product_uom_change(parent.pricelist_id,product_id,product_qty,product_uom,parent.partner_id, parent.date_order)"/>
|
||||
<field colspan="4" name="name"/>
|
||||
<field name="date_planned"/>
|
||||
|
|
|
@ -101,8 +101,9 @@ class product_product(osv.osv):
|
|||
cr.execute(
|
||||
'select sum(product_qty), product_id, product_uom '\
|
||||
'from stock_move '\
|
||||
'where location_id not in ('+location_ids_str+') '\
|
||||
'and location_dest_id in ('+location_ids_str+') '\
|
||||
'where ' +\
|
||||
(location_ids_str and 'location_id not in ('+location_ids_str+') ' \
|
||||
'and location_dest_id in ('+location_ids_str+') ' or 'true ') +\
|
||||
'and product_id in ('+prod_ids_str+') '\
|
||||
'and state in ('+states_str+') '+ (date_str and 'and '+date_str+' ' or ' ') +''\
|
||||
'group by product_id,product_uom'
|
||||
|
@ -113,8 +114,9 @@ class product_product(osv.osv):
|
|||
cr.execute(
|
||||
'select sum(product_qty), product_id, product_uom '\
|
||||
'from stock_move '\
|
||||
'where location_id in ('+location_ids_str+') '\
|
||||
'and location_dest_id not in ('+location_ids_str+') '\
|
||||
'where ' +\
|
||||
(location_ids_str and 'location_id in ('+location_ids_str+') '\
|
||||
'and location_dest_id not in ('+location_ids_str+') ' or 'true ') +\
|
||||
'and product_id in ('+prod_ids_str+') '\
|
||||
'and state in ('+states_str+') '+ (date_str and 'and '+date_str+' ' or '') + ''\
|
||||
'group by product_id,product_uom'
|
||||
|
|
|
@ -330,8 +330,9 @@ class stock_tracking(osv.osv):
|
|||
return (10 - (sum % 10)) % 10
|
||||
checksum = staticmethod(checksum)
|
||||
|
||||
def make_sscc(self, cr, uid, context={}):
|
||||
sequence = self.pool.get('ir.sequence').get(cr, uid, 'stock.lot.tracking')
|
||||
def make_sscc(self, cr, uid, context=None):
|
||||
context = context or {}
|
||||
sequence = self.pool.get('ir.sequence').get(cr, uid, 'stock.lot.tracking',context)
|
||||
return sequence + str(self.checksum(sequence))
|
||||
|
||||
_columns = {
|
||||
|
@ -428,8 +429,10 @@ class stock_picking(osv.osv):
|
|||
return res
|
||||
|
||||
def create(self, cr, user, vals, context=None):
|
||||
context = (context or {}).copy()
|
||||
context.update({'object': vals})
|
||||
if ('name' not in vals) or (vals.get('name')=='/'):
|
||||
vals['name'] = self.pool.get('ir.sequence').get(cr, user, 'stock.picking')
|
||||
vals['name'] = self.pool.get('ir.sequence').get(cr, user, 'stock.picking',context)
|
||||
|
||||
return super(stock_picking, self).create(cr, user, vals, context)
|
||||
|
||||
|
@ -484,7 +487,7 @@ class stock_picking(osv.osv):
|
|||
default = {}
|
||||
default = default.copy()
|
||||
if not default.get('name',False):
|
||||
default['name'] = self.pool.get('ir.sequence').get(cr, uid, 'stock.picking')
|
||||
default['name'] = self.pool.get('ir.sequence').get(cr, uid, 'stock.picking', context)
|
||||
return super(stock_picking, self).copy(cr, uid, id, default, context)
|
||||
|
||||
def onchange_partner_in(self, cr, uid, context, partner_id=None):
|
||||
|
@ -877,7 +880,7 @@ class stock_production_lot(osv.osv):
|
|||
}
|
||||
_defaults = {
|
||||
'date': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
|
||||
'name': lambda x, y, z, c: x.pool.get('ir.sequence').get(y, z, 'stock.lot.serial'),
|
||||
'name': lambda x, y, z, c: x.pool.get('ir.sequence').get(y, z, 'stock.lot.serial', c),
|
||||
'product_id': lambda x, y, z, c: c.get('product_id', False),
|
||||
}
|
||||
_sql_constraints = [
|
||||
|
@ -1034,20 +1037,25 @@ class stock_move(osv.osv):
|
|||
cursor.commit()
|
||||
return res
|
||||
|
||||
def onchange_lot_id(self, cr, uid, ids, prodlot_id=False, product_qty=False, loc_id=False, context=None):
|
||||
if not prodlot_id or not loc_id:
|
||||
def onchange_lot_id(self, cr, uid, ids, prodlot_id=False, product_qty=False, loc_id=False,product_id=True, context=None):
|
||||
if not prodlot_id:
|
||||
return {}
|
||||
ctx = context and context.copy() or {}
|
||||
ctx['location_id'] = loc_id
|
||||
prodlot = self.pool.get('stock.production.lot').browse(cr, uid, prodlot_id, ctx)
|
||||
ret = {}
|
||||
if not product_id:
|
||||
ret['value']= { 'product_id': prodlot.product_id.id }
|
||||
if not loc_id:
|
||||
return ret
|
||||
location = self.pool.get('stock.location').browse(cr, uid, loc_id)
|
||||
warning = {}
|
||||
if (location.usage == 'internal') and (product_qty > (prodlot.stock_available or 0.0)):
|
||||
warning = {
|
||||
ret['warning']={
|
||||
'title': 'Bad Lot Assignation !',
|
||||
'message': 'You are moving %.2f products but only %.2f available in this lot.' % (product_qty, prodlot.stock_available or 0.0)
|
||||
}
|
||||
return {'warning': warning}
|
||||
|
||||
return ret
|
||||
|
||||
def onchange_product_id(self, cr, uid, ids, prod_id=False, loc_id=False, loc_dest_id=False):
|
||||
if not prod_id:
|
||||
|
|
|
@ -551,8 +551,8 @@
|
|||
<field groups="base.group_extended" name="product_packaging"/>
|
||||
<field name="prodlot_id" select="2"
|
||||
context="{'location_id':location_id, 'product_id':product_id}"
|
||||
domain="[('product_id','=',product_id)]"
|
||||
on_change="onchange_lot_id(prodlot_id,product_qty, location_id)"/>
|
||||
domain="[('product_id','=?',product_id)]"
|
||||
on_change="onchange_lot_id(prodlot_id,product_qty, location_id, product_id)"/>
|
||||
<field groups="base.group_extended" name="tracking_id" select="2"/>
|
||||
<newline/>
|
||||
<label/>
|
||||
|
@ -649,8 +649,8 @@
|
|||
<field groups="base.group_extended" name="product_packaging"/>
|
||||
<field name="prodlot_id" select="2"
|
||||
context="{'location_id':location_id, 'product_id':product_id}"
|
||||
domain="[('product_id','=',product_id)]"
|
||||
on_change="onchange_lot_id(prodlot_id,product_qty, location_id)"/>
|
||||
domain="[('product_id','=?',product_id)]"
|
||||
on_change="onchange_lot_id(prodlot_id,product_qty, location_id, product_id)"/>
|
||||
<field groups="base.group_extended" name="tracking_id" select="1"/>
|
||||
<separator colspan="4" string="Move State"/>
|
||||
<field name="state" select="1"/>
|
||||
|
@ -864,8 +864,8 @@
|
|||
<field groups="base.group_extended" name="product_packaging"/>
|
||||
<field name="prodlot_id" select="1"
|
||||
context="{'location_id':location_id, 'product_id':product_id}"
|
||||
domain="[('product_id','=',product_id)]"
|
||||
on_change="onchange_lot_id(prodlot_id,product_qty, location_id)"/>
|
||||
domain="[('product_id','=?',product_id)]"
|
||||
on_change="onchange_lot_id(prodlot_id,product_qty, location_id, product_id)"/>
|
||||
<field groups="base.group_extended" name="tracking_id" select="1"/>
|
||||
<label/>
|
||||
<button name="%(track_line)d" string="Split in production lots" type="action" icon="gtk-justify-fill"/>
|
||||
|
@ -1061,8 +1061,8 @@
|
|||
<newline/>
|
||||
<field name="prodlot_id" select="1"
|
||||
context="{'location_id':location_id, 'product_id':product_id}"
|
||||
domain="[('product_id','=',product_id)]"
|
||||
on_change="onchange_lot_id(prodlot_id,product_qty, location_id)"/>
|
||||
domain="[('product_id','=?',product_id)]"
|
||||
on_change="onchange_lot_id(prodlot_id,product_qty, location_id, product_id)"/>
|
||||
<field groups="base.group_extended" name="tracking_id" select="1"/>
|
||||
<newline/>
|
||||
<label/>
|
||||
|
@ -1292,8 +1292,8 @@
|
|||
<newline/>
|
||||
<field name="prodlot_id" select="2"
|
||||
context="{'location_id':location_id, 'product_id':product_id}"
|
||||
domain="[('product_id','=',product_id)]"
|
||||
on_change="onchange_lot_id(prodlot_id,product_qty, location_id)"/>
|
||||
domain="[('product_id','=?',product_id)]"
|
||||
on_change="onchange_lot_id(prodlot_id,product_qty, location_id, product_id)"/>
|
||||
<field name="tracking_id" select="2"/>
|
||||
<newline/>
|
||||
<label/>
|
||||
|
|
|
@ -119,5 +119,15 @@
|
|||
<field name="signal">button_cancel</field>
|
||||
</record>
|
||||
|
||||
<record id="trans_assigned_cancel_auto" model="workflow.transition">
|
||||
<field name="act_from" ref="act_assigned"/>
|
||||
<field name="act_to" ref="act_cancel"/>
|
||||
<field name="condition">test_cancel()</field>
|
||||
</record>
|
||||
<record id="trans_confirmed_cancel_auto" model="workflow.transition">
|
||||
<field name="act_from" ref="act_confirmed"/>
|
||||
<field name="act_to" ref="act_cancel"/>
|
||||
<field name="condition">test_cancel()</field>
|
||||
</record>
|
||||
</data>
|
||||
</openerp>
|
||||
|
|
|
@ -56,6 +56,7 @@ invoice_fields = {
|
|||
'invoice_date': {'string': 'Invoiced date', 'type':'date' }
|
||||
}
|
||||
|
||||
|
||||
def _get_type(obj, cr, uid, data, context):
|
||||
picking_obj = pooler.get_pool(cr.dbname).get('stock.picking')
|
||||
usage = 'customer'
|
||||
|
|
Loading…
Reference in New Issue