bzr revid: hmo@tinyerp.com-20100813061333-p1rjbpgi09lg434y
This commit is contained in:
Harry (OpenERP) 2010-08-13 11:43:33 +05:30
commit 2dfb4b6625
25 changed files with 386 additions and 331 deletions

View File

@ -378,7 +378,6 @@ property or property parameter."),
}
_defaults = {
'state': 'needs-action',
'user_id': lambda self, cr, uid, ctx: uid,
'role': 'req-participant',
'rsvp': True,
'cutype': 'individual',
@ -419,7 +418,11 @@ property or property parameter."),
event.add('location').value = event_obj.location
if event_obj.rrule:
event.add('rrule').value = event_obj.rrule
if event_obj.user_id or event_obj.organizer_id:
if event_obj.organizer:
event_org = event.add('organizer')
event_org.params['CN'] = [event_obj.organizer]
event_org.value = 'MAILTO:' + (event_obj.organizer)
elif event_obj.user_id or event_obj.organizer_id:
event_org = event.add('organizer')
organizer = event_obj.organizer_id
if not organizer:
@ -1111,6 +1114,7 @@ e.g.: Every other month on the last Sunday of the month for 10 occurrences:\
'vtimezone': fields.related('user_id', 'context_tz', type='char', size=24, \
string='Timezone', store=True),
'user_id': fields.many2one('res.users', 'Responsible', states={'done': [('readonly', True)]}),
'organizer': fields.char("Organizer", size=256, states={'done': [('readonly', True)]}), # Map with Organizer Attribure of VEvent.
'organizer_id': fields.many2one('res.users', 'Organizer', states={'done': [('readonly', True)]}),
'freq': fields.selection([('None', 'No Repeat'), \
('secondly', 'Secondly'), \
@ -1147,6 +1151,14 @@ e.g.: Every other month on the last Sunday of the month for 10 occurrences:\
'active': fields.boolean('Active', help="If the active field is set to \
true, it will allow you to hide the event alarm information without removing it.")
}
def default_organizer(self, cr, uid, context=None):
user_pool = self.pool.get('res.users')
user = user_pool.browse(cr, uid, uid, context=context)
res = user.name
if user.user_email:
res += " <%s>" %(user.user_email)
return res
_defaults = {
'state': 'tentative',
'class': 'public',
@ -1156,7 +1168,7 @@ true, it will allow you to hide the event alarm information without removing it.
'interval': 1,
'active': 1,
'user_id': lambda self, cr, uid, ctx: uid,
'organizer_id': lambda self, cr, uid, ctx: uid,
'organizer': default_organizer,
}
def open_event(self, cr, uid, ids, context=None):
@ -1199,33 +1211,6 @@ true, it will allow you to hide the event alarm information without removing it.
return value
def modify_this(self, cr, uid, event_id, defaults, real_date, context=None, *args):
"""Modifies only one event record out of virtual recurrent events
and creates new event as a specific instance of a Recurring Event",
@param self: The object pointer
@param cr: the current row, from the database cursor,
@param uid: the current users ID for security checks,
@param event_id: Id of Recurring Event
@param real_date: Date of event recurrence that is being modified
@param context: A standard dictionary for contextual values
@param *args: Get Tupple Value
"""
event_id = base_calendar_id2real_id(event_id)
datas = self.read(cr, uid, event_id, context=context)
defaults.update({
'recurrent_uid': base_calendar_id2real_id(datas['id']),
'recurrent_id': defaults.get('date') or real_date,
'rrule_type': 'none',
'rrule': ''
})
exdate = datas['exdate'] and datas['exdate'].split(',') or []
if real_date and defaults.get('date'):
exdate.append(real_date)
self.write(cr, uid, event_id, {'exdate': ','.join(exdate)}, context=context)
new_id = self.copy(cr, uid, event_id, default=defaults, context=context)
return new_id
def modify_all(self, cr, uid, event_ids, defaults, context=None, *args):
"""
Modifies the recurring event
@ -1235,7 +1220,6 @@ true, it will allow you to hide the event alarm information without removing it.
@param context: A standard dictionary for contextual values
@return: True
"""
for event_id in event_ids:
event_id = base_calendar_id2real_id(event_id)
@ -1270,9 +1254,10 @@ true, it will allow you to hide the event alarm information without removing it.
else:
ids = select
result = []
recur_dict = []
if ids and (base_start_date or base_until_date):
cr.execute("select m.id, m.rrule, m.date, m.date_deadline, \
m.exdate, m.exrule from " + self._table + \
cr.execute("select m.id, m.rrule, m.date, m.date_deadline, m.duration, \
m.exdate, m.exrule, m.recurrent_id, m.recurrent_uid from " + self._table + \
" m where m.id in ("\
+ ','.join(map(lambda x: str(x), ids))+")")
@ -1293,8 +1278,16 @@ true, it will allow you to hide the event alarm information without removing it.
if until_date and (event_date > until_date):
continue
idval = real_id2base_calendar_id(data['id'], data['date'])
result.append(idval)
count += 1
if not data['recurrent_id']:
result.append(idval)
count += 1
else:
ex_id = real_id2base_calendar_id(data['recurrent_uid'], data['recurrent_id'])
ls = base_calendar_id2real_id(ex_id, with_date=data and data.get('duration', 0) or 0)
if not isinstance(ls, (str, int, long)) and len(ls) >= 2:
if ls[1] == data['recurrent_id']:
result.append(idval)
recur_dict.append(ex_id)
else:
exdate = data['exdate'] and data['exdate'].split(',') or []
rrule_str = data['rrule']
@ -1329,7 +1322,7 @@ true, it will allow you to hide the event alarm information without removing it.
result.append(idval)
count += 1
if result:
ids = result
ids = list(set(result)-set(recur_dict))
if isinstance(select, (str, int, long)):
return ids and ids[0] or False
return ids
@ -1390,7 +1383,6 @@ true, it will allow you to hide the event alarm information without removing it.
return rrule_string
def search(self, cr, uid, args, offset=0, limit=100, order=None,
context=None, count=False):
"""
@ -1424,8 +1416,6 @@ true, it will allow you to hide the event alarm information without removing it.
return self.get_recurrent_ids(cr, uid, res, start_date, until_date, limit)
def write(self, cr, uid, ids, vals, context=None, check=True, update_check=True):
"""
Overrides orm write method.
@ -1444,22 +1434,27 @@ true, it will allow you to hide the event alarm information without removing it.
else:
select = ids
new_ids = []
res = False
for event_id in select:
real_event_id = base_calendar_id2real_id(event_id)
if len(str(event_id).split('-')) > 1:
data = self.read(cr, uid, event_id, ['date', 'date_deadline', \
'rrule', 'duration'])
if data.get('rrule'):
real_date = data.get('date')
data.update(vals)
new_id = self.modify_this(cr, uid, event_id, data, \
real_date, context)
vals.update({
'recurrent_uid': real_event_id,
'recurrent_id': data.get('date'),
'rrule_type': 'none',
'rrule': ''
})
new_id = self.copy(cr, uid, real_event_id, default=vals, context=context)
context.update({'active_id': new_id, 'active_ids': [new_id]})
continue
event_id = base_calendar_id2real_id(event_id)
if not event_id in new_ids:
new_ids.append(event_id)
if not real_event_id in new_ids:
new_ids.append(real_event_id)
res = super(calendar_event, self).write(cr, uid, new_ids, vals, context=context)
if new_ids:
res = super(calendar_event, self).write(cr, uid, new_ids, vals, context=context)
if (vals.has_key('alarm_id') or vals.has_key('base_calendar_alarm_id'))\
or (vals.has_key('date') or vals.has_key('duration') or vals.has_key('date_deadline')):
# change alarm details
@ -1559,13 +1554,12 @@ true, it will allow you to hide the event alarm information without removing it.
continue
event_id, date_new = event_id.split('-')
event_id = [int(event_id)]
for record in self.read(cr, uid, event_id, ['date', 'rrule', 'exdate']):
for record in self.read(cr, uid, event_id, ['date', 'rrule', 'exdate'], context=context):
if record['rrule']:
# Remove one of the recurrent event
date_new = time.strftime("%Y-%m-%d %H:%M:%S", \
date_new = time.strftime("%Y%m%dT%H%M%S", \
time.strptime(date_new, "%Y%m%d%H%M%S"))
exdate = (record['exdate'] and (record['exdate'] + ',') or '') + ''.join((re.compile('\d')).findall(date_new)) + 'Z'
res = self.write(cr, uid, event_id, {'exdate': exdate})
res = self.write(cr, uid, event_id, {'exdate': date_new})
else:
res = super(calendar_event, self).unlink(cr, uid, event_id)
self.pool.get('res.alarm').do_alarm_unlink(cr, uid, event_id, self._name)

View File

@ -10,6 +10,7 @@
<field name="type">form</field>
<field name="arch" type="xml">
<form string="Invite People">
<separator string="Invite People" colspan="4" />
<field name="type" />
<field name="send_mail" />
<newline />

View File

@ -71,8 +71,8 @@ class node_calendar_collection(nodes.node_dir):
where = [('collection_id','=',self.dir_id)]
ext = False
if name and name.endswith('.ics'):
name = name[-4]
ext = '.ics'
name = name[:-4]
ext = True
if name:
where.append(('name','=',name))
if not domain:
@ -81,15 +81,16 @@ class node_calendar_collection(nodes.node_dir):
fil_obj = dirobj.pool.get('basic.calendar')
ids = fil_obj.search(cr,uid,where,context=ctx)
res = []
for calender in fil_obj.browse(cr, uid, ids, context=ctx):
if not ext:
res.append(node_calendar(calender.name, self, self.context, calender))
else:
res.append(res_node_calendar(name, self, self.context, calender))
for cal in fil_obj.browse(cr, uid, ids, context=ctx):
if (not name) or not ext:
res.append(node_calendar(cal.name, self, self.context, cal))
if (not name) or ext:
res.append(res_node_calendar(cal.name+'.ics', self, self.context, cal))
# May be both of them!
return res
def _get_dav_owner(self, cr):
# Todo?
# Todo?
return False
def _get_ttag(self, cr):
@ -205,7 +206,15 @@ class node_calendar(nodes.node_class):
if name:
if name.endswith('.ics'):
name = name[:-4]
where.append(('id','=',int(name)))
try:
where.append(('id','=',int(name)))
except ValueError:
# if somebody requests any other name than the ones we
# generate (non-numeric), it just won't exist
# FIXME: however, this confuses Evolution (at least), which
# thinks the .ics node hadn't been saved.
return []
if not domain:
domain = []
#for opr1, opt, opr2 in domain:
@ -224,9 +233,21 @@ class node_calendar(nodes.node_class):
Return the node_* created
"""
# we ignore the path, it will be re-generated automatically
fil_obj = self.context._dirobj.pool.get('basic.calendar')
ctx = self.context.context.copy()
ctx.update(self.dctx)
uid = self.context.uid
res = self.set_data(cr, data)
# TODO: use the res to create at least one node
if res and len(res):
# We arbitrarily construct only the first node of the data
# that have been imported. ICS may have had more elements,
# but only one node can be returned here.
assert isinstance(res[0], (int, long))
fnodes = fil_obj.get_calendar_objects(cr, uid, [self.calendar_id], self,
domain=[('id','=',res[0])], context=ctx)
return fnodes[0]
return None

View File

@ -74,37 +74,45 @@
<field name="type"/>
<field name="user_id"/>
<field name="collection_id" required="1"/>
<field name="line_ids" mode="form,tree" colspan="4" nolabel="1">
<form string="Calendar Lines">
<field name="name" required="1" select="1" />
<field name="object_id" required="1" select="1" />
<field name="domain" select="1" />
<field name="mapping_ids" select="1" colspan="4" nolabel="1">
<tree string="Attributes Mapping" editable="bottom">
<field name="name" required="1" domain="[('type', '=', parent.name)]"/>
<field name="fn" select="1" />
<field name="field_id" select="1" required="1" domain="[('model_id', '=', parent.object_id)]" />
<field name="expr" />
</tree>
<form string="Attributes Mapping">
<field name="name" select="1" required="1" domain="[('type', '=', parent.name)]"/>
<field name="field_id" select="1" domain="[('model_id', '=', parent.object_id)]"
required="1" />
<field name="fn" select="1" required="1" />
<field name="expr" />
<separator string="Value Mapping" colspan="4" />
<field name="mapping" select="1" colspan="4" nolabel="1" />
<notebook colspan="4">
<page string="Calendar Lines">
<field name="line_ids" mode="form,tree" colspan="4" nolabel="1">
<form string="Calendar Lines">
<field name="name" required="1" select="1" />
<field name="object_id" required="1" select="1" />
<field name="domain" select="1" />
<field name="mapping_ids" select="1" colspan="4" nolabel="1">
<tree string="Attributes Mapping" editable="bottom">
<field name="name" required="1" domain="[('type', '=', parent.name)]"/>
<field name="fn" select="1" />
<field name="field_id" select="1" required="1" domain="[('model_id', '=', parent.object_id)]" />
<field name="expr" />
</tree>
<form string="Attributes Mapping">
<field name="name" select="1" required="1" domain="[('type', '=', parent.name)]"/>
<field name="field_id" select="1" domain="[('model_id', '=', parent.object_id)]"
required="1" />
<field name="fn" select="1" required="1" />
<field name="expr" />
<separator string="Value Mapping" colspan="4" />
<field name="mapping" select="1" colspan="4" nolabel="1" />
</form>
</field>
</form>
</field>
</form>
<tree string="Calendar Lines" editable="bottom">
<field name="name" select="1" />
<field name="object_id" select="1" />
</tree>
</field>
</form>
</field>
</record>
<tree string="Calendar Lines" editable="bottom">
<field name="name" select="1" />
<field name="object_id" select="1" />
</tree>
</field>
</page>
<page string="Other Info">
<separator string="Description" colspan="4" />
<field name="description" colspan="4" nolabel="1" />
</page>
</notebook>
</form>
</field>
</record>
<record model="ir.ui.view" id="view_caldav_tree">

View File

@ -83,7 +83,61 @@ def openobjectid2uid(cr, uidval, oomodel):
value = 'OpenObject-%s_%s@%s' % (oomodel, uidval, cr.dbname)
return value
def get_attribute_mapping(cr, uid, calname, context={}):
def mailto2str(arg):
"""Take a dict of mail and convert to string.
"""
ret = []
if isinstance(arg, dict):
args = [arg,]
else:
args = arg
for ard in args:
rstr = ard.get('name','')
if ard.get('company',False):
rstr += ' (%s)' % ard.get('company')
if ard.get('email'):
rstr += ' <%s>' % ard.get('email')
ret.append(rstr)
return ', '.join(ret)
def str2mailto(emailstr, multi=False):
"""Split one email string to a dict of name, company, mail parts
@param multi Return an array, recognize comma-sep
"""
# TODO: move to tools or sth.
mege = re.compile(r'([^\(\<]+) *(\((.*?)\))? *(\< ?(.*?) ?\>)? ?(\((.*?)\))? *$')
mailz= [emailstr,]
retz = []
if multi:
mailz = emailstr.split(',')
for mas in mailz:
m = mege.match(mas.strip())
if not m:
# one of the rare non-matching strings is "sad" :(
# retz.append({ 'name': mas.strip() })
# continue
raise ValueError("Invalid email address %r" % mas)
rd = { 'name': m.group(1).strip(),
'email': m.group(5), }
if m.group(2):
rd['company'] = m.group(3).strip()
elif m.group(6):
rd['company'] = m.group(7).strip()
if rd['name'].startswith('"') and rd['name'].endswith('"'):
rd['name'] = rd['name'][1:-1]
retz.append(rd)
if multi:
return retz
else:
return retz[0]
def get_attribute_mapping(cr, uid, calname, context=None):
""" Attribute Mapping with Basic calendar fields and lines
@param cr: the current row, from the database cursor,
@param uid: the current users ID for security checks,
@ -220,6 +274,10 @@ class CalDAV(object):
self.__attribute__[name][type] = None
return True
def format_date_tz(self, date, tz=None):
format = tools.DEFAULT_SERVER_DATETIME_FORMAT
return tools.server_to_local_timestamp(date, format, format, tz)
def parse_ics(self, cr, uid, child, cal_children=None, context=None):
""" parse calendaring and scheduling information
@param self: The object pointer
@ -228,9 +286,16 @@ class CalDAV(object):
@param context: A standard dictionary for contextual values """
att_data = []
exdates = []
_server_tzinfo = pytz.timezone(tools.get_server_timezone())
for cal_data in child.getChildren():
if cal_data.name.lower() == 'organizer':
self.ical_set(cal_data.name.lower(), {'name': cal_data.params.get('CN') and cal_data.params.get('CN')[0]}, 'value')
dmail = { 'name': cal_data.params.get('CN', ['',])[0],
'email': cal_data.value.replace('MAILTO:',''),
# TODO: company?
}
self.ical_set(cal_data.name.lower(), mailto2str(dmail), 'value')
continue
if cal_data.name.lower() == 'attendee':
ctx = context.copy()
@ -249,13 +314,20 @@ class CalDAV(object):
self.ical_set(cal_data.name.lower(), vals, 'value')
continue
if cal_data.name.lower() == 'exdate':
exval = map(lambda x: str(x), cal_data.value)
self.ical_set(cal_data.name.lower(), ','.join(exval), 'value')
exdates += cal_data.value
exvals = []
for exdate in exdates:
exvals.append(datetime.fromtimestamp(time.mktime(exdate.utctimetuple())).strftime('%Y%m%dT%H%M%S'))
self.ical_set(cal_data.name.lower(), ','.join(exvals), 'value')
continue
if cal_data.name.lower() in self.__attribute__:
if cal_data.params.get('X-VOBJ-ORIGINAL-TZID'):
self.ical_set('vtimezone', cal_data.params.get('X-VOBJ-ORIGINAL-TZID'), 'value')
# since we do convert, do we also need to save the original tzid?
# self.ical_set('vtimezone', cal_data.params.get('X-VOBJ-ORIGINAL-TZID'), 'value')
date_local = cal_data.value.astimezone(_server_tzinfo)
self.ical_set(cal_data.name.lower(), date_local, 'value')
continue
self.ical_set(cal_data.name.lower(), cal_data.value, 'value')
vals = map_data(cr, uid, self, context=context)
return vals
@ -272,6 +344,8 @@ class CalDAV(object):
timezones = []
for data in datas:
tzval = None
exfield = None
exdates = []
vevent = ical.add(name)
for field in self.__attribute__.keys():
map_field = self.ical_get(field, 'field')
@ -305,30 +379,53 @@ class CalDAV(object):
alarm_obj = self.pool.get('basic.calendar.alarm')
vevent = alarm_obj.export_cal(cr, uid, model, \
data[map_field][0], vevent, context=ctx)
elif field == 'vtimezone' and data[map_field] and data[map_field] not in timezones:
elif field == 'vtimezone' and data[map_field]:
tzval = data[map_field]
tz_obj = self.pool.get('basic.calendar.timezone')
ical = tz_obj.export_cal(cr, uid, None, \
data[map_field], ical, context=context)
timezones.append(data[map_field])
if tzval not in timezones:
tz_obj = self.pool.get('basic.calendar.timezone')
ical = tz_obj.export_cal(cr, uid, None, \
data[map_field], ical, context=context)
timezones.append(data[map_field])
if exfield:
exfield.params['TZID'] = [tzval.title()]
exdates_updated = []
for exdate in exdates:
date1 = (datetime.strptime(exdate, "%Y%m%dT%H%M%S")).strftime('%Y-%m-%d %H:%M:%S')
dest_date = self.format_date_tz(date1, tzval.title())
ex_date = (datetime.strptime(dest_date, "%Y-%m-%d %H:%M:%S")).strftime('%Y%m%dT%H%M%S')
exdates_updated.append(ex_date)
exfield.value = map(parser.parse, exdates_updated)
elif field == 'organizer' and data[map_field]:
organizer = str2mailto(data[map_field])
event_org = vevent.add('organizer')
organizer_id = data[map_field][0]
user_obj = self.pool.get('res.users')
organizer = user_obj.browse(cr, uid, organizer_id, context=context)
event_org.params['CN'] = [organizer.name]
event_org.value = 'MAILTO:' + (organizer.user_email or organizer.name)
event_org.params['CN'] = [organizer['name']]
event_org.value = 'MAILTO:' + (organizer.get('email') or '')
# TODO: company?
elif data[map_field]:
if map_type in ("char", "text"):
if field in ('exdate'):
vevent.add(field).value = map(parser.parse, (data[map_field]).split(','))
exfield = vevent.add(field)
exdates = (data[map_field]).split(',')
if tzval:
exfield.params['TZID'] = [tzval.title()]
exdates_updated = []
for exdate in exdates:
date1 = (datetime.strptime(exdate, "%Y%m%dT%H%M%S")).strftime('%Y-%m-%d %H:%M:%S')
dest_date = self.format_date_tz(date1, tzval.title())
ex_date = (datetime.strptime(dest_date, "%Y-%m-%d %H:%M:%S")).strftime('%Y%m%dT%H%M%S')
exdates_updated.append(ex_date)
exdates = exdates_updated
exfield.value = map(parser.parse, exdates)
else:
vevent.add(field).value = tools.ustr(data[map_field])
elif map_type in ('datetime', 'date') and data[map_field]:
dtfield = vevent.add(field)
dtfield.value = parser.parse(data[map_field])
if tzval:
dest_date = self.format_date_tz(data[map_field], tzval.title())
dtfield.params['TZID'] = [tzval.title()]
dtfield.value = parser.parse(dest_date)
else:
dtfield.value = parser.parse(data[map_field])
elif map_type == "timedelta":
vevent.add(field).value = timedelta(hours=data[map_field])
elif map_type == "many2one":
@ -344,7 +441,7 @@ class CalDAV(object):
vevent.add(field).value = key1
return vevent
def check_import(self, cr, uid, vals, context={}):
def check_import(self, cr, uid, vals, context=None):
"""
@param self: The object pointer
@param cr: the current row, from the database cursor,
@ -352,7 +449,8 @@ class CalDAV(object):
@param vals: Get Values
@param context: A standard dictionary for contextual values
"""
if not context:
context = {}
ids = []
model_obj = self.pool.get(context.get('model'))
recur_pool = {}
@ -379,8 +477,8 @@ class CalDAV(object):
event_id = model_obj.create(cr, uid, val)
recur_pool[u_id] = event_id
ids.append(event_id)
except Exception, e:
raise osv.except_osv(('Error !'), (str(e)))
except Exception:
raise
return ids
def export_cal(self, cr, uid, datas, vobj=None, context=None):
@ -451,6 +549,7 @@ class Calendar(CalDAV, osv.osv):
'line_ids': fields.one2many('basic.calendar.lines', 'calendar_id', 'Calendar Lines'),
'create_date': fields.datetime('Created Date', readonly=True),
'write_date': fields.datetime('Modifided Date', readonly=True),
'description': fields.text("description"),
}
def get_calendar_objects(self, cr, uid, ids, parent=None, domain=None, context=None):
@ -467,7 +566,7 @@ class Calendar(CalDAV, osv.osv):
continue
if line.name in ('valarm', 'attendee'):
continue
line_domain = eval(line.domain)
line_domain = eval(line.domain or '[]')
line_domain += domain
if ctx_res_id:
line_domain += [('id','=',ctx_res_id)]
@ -497,7 +596,7 @@ class Calendar(CalDAV, osv.osv):
continue
if line.name in ('valarm', 'attendee'):
continue
domain = eval(line.domain)
domain = eval(line.domain or '[]')
if ctx_res_id:
domain += [('id','=',ctx_res_id)]
mod_obj = self.pool.get(line.object_id.model)
@ -580,7 +679,7 @@ class basic_calendar_line(osv.osv):
'domain': lambda *a: '[]',
}
def create(self, cr, uid, vals, context={}):
def create(self, cr, uid, vals, context=None):
""" create calendar's line
@param self: The object pointer
@param cr: the current row, from the database cursor,
@ -662,7 +761,7 @@ class basic_calendar_fields(osv.osv):
raise osv.except_osv(_('Warning !'), _('Please provide proper configuration of "%s" in Calendar Lines' % (name)))
return True
def create(self, cr, uid, vals, context={}):
def create(self, cr, uid, vals, context=None):
""" Create Calendar's fields
@param self: The object pointer
@param cr: the current row, from the database cursor,
@ -739,7 +838,7 @@ class Event(CalDAV, osv.osv_memory):
'dtend': None, # Use: O-1, Type: DATE-TIME, Specifies the date and time that a calendar component ends.
}
def export_cal(self, cr, uid, datas, vobj='vevent', context={}):
def export_cal(self, cr, uid, datas, vobj='vevent', context=None):
""" Export calendar
@param self: The object pointer
@param cr: the current row, from the database cursor,
@ -792,7 +891,7 @@ class ToDo(CalDAV, osv.osv_memory):
'rrule': None,
}
def export_cal(self, cr, uid, datas, vobj='vevent', context={}):
def export_cal(self, cr, uid, datas, vobj='vevent', context=None):
""" Export Calendar
@param self: The object pointer
@param cr: the current row, from the database cursor,
@ -842,7 +941,7 @@ class Timezone(CalDAV, osv.osv_memory):
'x-prop': None, # Use: O-n, Type: Text,
}
def get_name_offset(self, cr, uid, tzid, context={}):
def get_name_offset(self, cr, uid, tzid, context=None):
""" Get Name Offset value
@param self: The object pointer
@param cr: the current row, from the database cursor,
@ -859,7 +958,7 @@ class Timezone(CalDAV, osv.osv_memory):
realoffset = (val < 0 and ('-' + realoffset) or ('+' + realoffset))
return (mydt.tzname(), realoffset)
def export_cal(self, cr, uid, model, tzid, ical, context={}):
def export_cal(self, cr, uid, model, tzid, ical, context=None):
""" Export Calendar
@param self: The object pointer
@param cr: the current row, from the database cursor,
@ -867,7 +966,8 @@ class Timezone(CalDAV, osv.osv_memory):
@param model: Get Model's name
@param context: A standard dictionary for contextual values
"""
if not context:
context = {}
ctx = context.copy()
ctx.update({'model': model})
cal_tz = ical.add('vtimezone')
@ -876,7 +976,8 @@ class Timezone(CalDAV, osv.osv_memory):
tzname, offset = self.get_name_offset(cr, uid, tzid)
tz_std.add("TZOFFSETFROM").value = offset
tz_std.add("TZOFFSETTO").value = offset
tz_std.add("DTSTART").value = datetime.now() # TODO
#TODO: Get start date for timezone
tz_std.add("DTSTART").value = datetime.strptime('1970-01-01 00:00:00', '%Y-%m-%d %H:%M:%S')
tz_std.add("TZNAME").value = tzname
return ical
@ -915,7 +1016,7 @@ class Alarm(CalDAV, osv.osv_memory):
'x-prop': None,
}
def export_cal(self, cr, uid, model, alarm_id, vevent, context={}):
def export_cal(self, cr, uid, model, alarm_id, vevent, context=None):
""" Export Calendar
@param self: The object pointer
@param cr: the current row, from the database cursor,
@ -924,7 +1025,8 @@ class Alarm(CalDAV, osv.osv_memory):
@param alarm_id: Get Alarm's Id
@param context: A standard dictionary for contextual values
"""
if not context:
context = {}
valarm = vevent.add('valarm')
alarm_object = self.pool.get(model)
alarm_data = alarm_object.read(cr, uid, alarm_id, [])
@ -1035,7 +1137,7 @@ class Attendee(CalDAV, osv.osv_memory):
vals = map_data(cr, uid, self, context=context)
return vals
def export_cal(self, cr, uid, model, attendee_ids, vevent, context={}):
def export_cal(self, cr, uid, model, attendee_ids, vevent, context=None):
""" Export Calendar
@param self: The object pointer
@param cr: the current row, from the database cursor,
@ -1044,7 +1146,8 @@ class Attendee(CalDAV, osv.osv_memory):
@param attendee_ids: Get Attendee's Id
@param context: A standard dictionary for contextual values
"""
if not context:
context = {}
attendee_object = self.pool.get(model)
ctx = context.copy()
ctx.update({'model': model})

View File

@ -33,19 +33,20 @@ class calendar_event_export(osv.osv_memory):
Export Calendar Event.
"""
def default_get(self, cr, uid, fields, context):
def default_get(self, cr, uid, fields, context=None):
"""
Get Default value for Name field.
"""
if not context:
context = {}
model = context.get('model', 'basic.calendar')
model_obj = self.pool.get(model)
calendar = model_obj.export_cal(cr, uid, context['active_ids'], context)
res = super(calendar_event_export, self).default_get( cr, uid, fields, context=context)
name = 'OpenERP %s.ics' % (model_obj._description)
if 'name' in fields:
res.update({'name': name})
if 'file_path' in fields:
calendar = model_obj.export_cal(cr, uid, context['active_ids'], context)
calendar = model_obj.export_cal(cr, uid, context['active_ids'], context=context)
res.update({'file_path': base64.encodestring(calendar)})
return res

View File

@ -107,7 +107,7 @@
<group col="2" colspan="2">
<separator colspan="2" string="Assignment"/>
<field name="user_id" />
<field name="organizer_id" groups="base.group_extended"/>
<field name="organizer" groups="base.group_extended"/>
<field name="section_id" widget="selection"
groups="base.group_extended"/>
</group><group col="2" colspan="2">

View File

@ -27,7 +27,7 @@ from datetime import datetime
class crm_meeting(osv.osv):
_inherit = 'crm.meeting'
def export_cal(self, cr, uid, ids, context={}):
def export_cal(self, cr, uid, ids, context=None):
"""
@param self: The object pointer
@param cr: the current row, from the database cursor,
@ -35,14 +35,16 @@ class crm_meeting(osv.osv):
@param ids: List of CRM Meetings IDs
@param context: A standard dictionary for contextual values
"""
if not context:
context = {}
ids = map(lambda x: base_calendar.base_calendar_id2real_id(x), ids)
event_data = self.read(cr, uid, ids)
event_data = self.read(cr, uid, ids, context=context)
event_obj = self.pool.get('basic.calendar.event')
ical = event_obj.export_cal(cr, uid, event_data, context={'model': self._name})
context.update({'model': self._name})
ical = event_obj.export_cal(cr, uid, event_data, context=context)
return ical.serialize()
def import_cal(self, cr, uid, data, data_id=None, context={}):
def import_cal(self, cr, uid, data, data_id=None, context=None):
"""
@param self: The object pointer
@param cr: the current row, from the database cursor,
@ -51,12 +53,13 @@ class crm_meeting(osv.osv):
@param data_id: calendar's Id
@param context: A standard dictionary for contextual values
"""
if not context:
context = {}
event_obj = self.pool.get('basic.calendar.event')
vals = event_obj.import_cal(cr, uid, data, context=context)
return self.check_import(cr, uid, vals, context=context)
def check_import(self, cr, uid, vals, context={}):
def check_import(self, cr, uid, vals, context=None):
"""
@param self: The object pointer
@param cr: the current row, from the database cursor,
@ -64,6 +67,8 @@ class crm_meeting(osv.osv):
@param vals: Get Values
@param context: A standard dictionary for contextual values
"""
if not context:
context = {}
ids = []
model_obj = self.pool.get(context.get('model'))
recur_pool = {}
@ -96,8 +101,8 @@ class crm_meeting(osv.osv):
event_id = model_obj.create(cr, uid, val)
recur_pool[u_id] = event_id
ids.append(event_id)
except Exception, e:
raise osv.except_osv(('Error !'), (str(e)))
except Exception:
raise
return ids
crm_meeting()

View File

@ -32,7 +32,7 @@
<record model="basic.calendar.fields" id="map_event_0">
<field name="name" ref="caldav.field_event_organizer"/>
<field name="type_id" ref="base_calendar.calendar_lines_event" />
<field name="field_id" search="[('name','=','organizer_id'),('model_id.model','=','calendar.event')]" />
<field name="field_id" search="[('name','=','organizer'),('model_id.model','=','calendar.event')]" />
<field name="fn">field</field>
</record>

View File

@ -98,7 +98,9 @@ class indexer(object):
def _doIndexFile(self,fpath):
raise NhException("Content not handled here")
def __repr__(self):
return "<indexer %s.%s>" %(self.__module__, self.__class__.__name__)
def mime_match(mime, mdict):
@ -128,7 +130,7 @@ class contentIndex(object):
f = True
if f:
self.__logger.debug('Register content indexer: %s', obj)
self.__logger.debug('Register content indexer: %r', obj)
if not f:
raise Exception("Your indexer should at least suport a mimetype or extension")

View File

@ -18,16 +18,13 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import base64
from osv import osv, fields
from osv.orm import except_orm
import urlparse
import netsvc
import os
# import os
import nodes
import StringIO
# import StringIO
class document_directory_content_type(osv.osv):
_name = 'document.directory.content.type'
@ -83,7 +80,7 @@ class document_directory_content(osv.osv):
tname = ''
if content.include_name:
content_name = node.displayname or ''
obj = node.context._dirobj.pool.get(model)
# obj = node.context._dirobj.pool.get(model)
if content_name:
tname = (content.prefix or '') + content_name + (content.suffix or '') + (content.extension or '')
else:

View File

@ -20,13 +20,7 @@
#
##############################################################################
import base64
from osv import osv, fields
from osv.orm import except_orm
import urlparse
import os
class ir_action_report_xml(osv.osv):
_name="ir.actions.report.xml"

View File

@ -22,16 +22,10 @@
import base64
from osv import osv, fields
import urlparse
import os
import pooler
import netsvc
from osv.orm import except_orm
#import StringIO
from psycopg2 import Binary
# from psycopg2 import Binary
#from tools import config
import tools
from tools.translate import _

View File

@ -19,11 +19,8 @@
#
##############################################################################
import base64
from osv import osv, fields
from osv.orm import except_orm
import urlparse
import os
import nodes
@ -141,30 +138,6 @@ class document_directory(osv.osv):
_parent(dir_id, path)
return path
def ol_get_resource_path(self, cr, uid, dir_id, res_model, res_id):
# this method will be used in process module
# to be need test and Improvement if resource dir has parent resource (link resource)
path=[]
def _parent(dir_id,path):
parent=self.browse(cr, uid, dir_id)
if parent.parent_id and not parent.ressource_parent_type_id:
_parent(parent.parent_id.id,path)
path.append(parent.name)
else:
path.append(parent.name)
return path
directory=self.browse(cr,uid,dir_id)
model_ids=self.pool.get('ir.model').search(cr, uid, [('model','=',res_model)])
if directory:
_parent(dir_id,path)
path.append(self.pool.get(directory.ressource_type_id.model).browse(cr, uid, res_id).name)
#user=self.pool.get('res.users').browse(cr,uid,uid)
#return "ftp://%s:%s@localhost:%s/%s/%s"%(user.login,user.password,config.get('ftp_server_port',8021),cr.dbname,'/'.join(path))
# No way we will return the password!
return "ftp://user:pass@host:port/test/this"
return False
def _check_recursion(self, cr, uid, ids):
level = 100
while len(ids):
@ -178,8 +151,9 @@ class document_directory(osv.osv):
_constraints = [
(_check_recursion, 'Error! You can not create recursive Directories.', ['parent_id'])
]
def __init__(self, *args, **kwargs):
res = super(document_directory, self).__init__(*args, **kwargs)
super(document_directory, self).__init__(*args, **kwargs)
#self._cache = {}
def onchange_content_id(self, cr, uid, ids, ressource_type_id):
@ -223,31 +197,8 @@ class document_directory(osv.osv):
""" try to locate the node in uri,
Return a tuple (node_dir, remaining_path)
"""
return (nodes.node_database(context=ncontext), uri)
return (nodes.node_database(context=ncontext), uri)
def old_code():
if not uri:
return node_database(cr, uid, context=context)
turi = tuple(uri)
node = node_class(cr, uid, '/', False, context=context, type='database')
for path in uri[:]:
if path:
node = node.child(path)
if not node:
return False
oo = node.object and (node.object._name, node.object.id) or False
oo2 = node.object2 and (node.object2._name, node.object2.id) or False
return node
def ol_get_childs(self, cr, uid, uri, context={}):
node = self.get_object(cr, uid, uri, context)
if uri:
children = node.children()
else:
children= [node]
result = map(lambda node: node.path_get(), children)
return result
def copy(self, cr, uid, id, default=None, context=None):
if not default:
default ={}

View File

@ -37,7 +37,6 @@ from osv.orm import except_orm
import random
import string
import pooler
import netsvc
import nodes
from content_index import cntIndex
@ -189,7 +188,7 @@ class nodefd_db(StringIO, nodes.node_descriptor):
# we now open a *separate* cursor, to update the data.
# FIXME: this may be improved, for concurrency handling
par = self._get_parent()
uid = par.context.uid
# uid = par.context.uid
cr = pooler.get_db(par.context.dbname).cursor()
try:
if self.mode in ('w', 'w+', 'r+'):
@ -227,7 +226,7 @@ class nodefd_db(StringIO, nodes.node_descriptor):
" WHERE id = %s",
(out, len(data), par.file_id))
cr.commit()
except Exception, e:
except Exception:
logging.getLogger('document.storage').exception('Cannot update db file #%d for close:', par.file_id)
raise
finally:
@ -262,7 +261,7 @@ class nodefd_db64(StringIO, nodes.node_descriptor):
# we now open a *separate* cursor, to update the data.
# FIXME: this may be improved, for concurrency handling
par = self._get_parent()
uid = par.context.uid
# uid = par.context.uid
cr = pooler.get_db(par.context.dbname).cursor()
try:
if self.mode in ('w', 'w+', 'r+'):
@ -288,18 +287,18 @@ class nodefd_db64(StringIO, nodes.node_descriptor):
cr.execute('UPDATE ir_attachment SET db_datas = %s::bytea, file_size=%s, ' \
'index_content = %s, file_type = %s ' \
'WHERE id = %s',
(base64.encodestring(out), len(out), icont_u, mime, par.file_id))
(base64.encodestring(data), len(data), icont_u, mime, par.file_id))
elif self.mode == 'a':
out = self.getvalue()
data = self.getvalue()
# Yes, we're obviously using the wrong representation for storing our
# data as base64-in-bytea
cr.execute("UPDATE ir_attachment " \
"SET db_datas = encode( (COALESCE(decode(encode(db_datas,'escape'),'base64'),'') || decode(%s, 'base64')),'base64')::bytea , " \
" file_size = COALESCE(file_size, 0) + %s " \
" WHERE id = %s",
(base64.encodestring(out), len(out), par.file_id))
(base64.encodestring(data), len(data), par.file_id))
cr.commit()
except Exception, e:
except Exception:
logging.getLogger('document.storage').exception('Cannot update db file #%d for close:', par.file_id)
raise
finally:
@ -500,7 +499,7 @@ class document_storage(osv.osv):
# try to fix their directory.
if ira.file_size:
self._doclog.warning("ir.attachment #%d does not have a filename, trying the name." %ira.id)
sfname = ira.name
# sfname = ira.name
fpath = os.path.join(boo.path,ira.store_fname or ira.name)
if os.path.exists(fpath):
return file(fpath,'rb').read()
@ -695,7 +694,7 @@ class document_storage(osv.osv):
store_fname = os.path.join(*store_path)
return { 'name': new_name, 'datas_fname': new_name, 'store_fname': store_fname }
else:
raise TypeError("No %s storage" % boo.type)
raise TypeError("No %s storage" % sbro.type)
def simple_move(self, cr, uid, file_node, ndir_bro, context=None):
""" A preparation for a file move.
@ -734,7 +733,7 @@ class document_storage(osv.osv):
fname = fil_bo.store_fname
if not fname:
return ValueError("Tried to rename a non-stored file")
path = storage_bo.path
path = sbro.path
oldpath = os.path.join(path, fname)
for ch in ('*', '|', "\\", '/', ':', '"', '<', '>', '?', '..'):
@ -751,7 +750,7 @@ class document_storage(osv.osv):
os.rename(oldpath, newpath)
return { 'name': new_name, 'datas_fname': new_name, 'store_fname': new_name }
else:
raise TypeError("No %s storage" % boo.type)
raise TypeError("No %s storage" % sbro.type)
document_storage()

View File

@ -19,17 +19,14 @@
#
##############################################################################
# import base64
# import StringIO
from osv import osv, fields
from osv.orm import except_orm
# import urlparse
import pooler
from tools.safe_eval import safe_eval
import errno
import os
# import os
import time
import logging
from StringIO import StringIO
@ -44,6 +41,8 @@ from StringIO import StringIO
# root: if we are at the first directory of a ressource
#
logger = logging.getLogger('doc2.nodes')
def _str2time(cre):
""" Convert a string with time representation (from db) into time (float)
@ -902,9 +901,8 @@ class node_res_obj(node_class):
def get_dav_eprop(self, cr, ns, prop):
if ns != 'http://groupdav.org/' or prop != 'resourcetype':
print "Who asked for %s:%s?" % (ns, prop)
logger.warning("Who asked for %s:%s?" % (ns, prop))
return None
res = {}
cntobj = self.context._dirobj.pool.get('document.directory.content')
uid = self.context.uid
ctx = self.context.context.copy()
@ -1374,11 +1372,11 @@ class nodefd_content(StringIO, node_descriptor):
if self.mode in ('w', 'w+', 'r+'):
data = self.getvalue()
cntobj = par.context._dirobj.pool.get('document.directory.content')
cntobj.process_write(cr, uid, parent, data, ctx)
cntobj.process_write(cr, uid, par, data, par.context.context)
elif self.mode == 'a':
raise NotImplementedError
cr.commit()
except Exception, e:
except Exception:
logging.getLogger('document.content').exception('Cannot update db content #%d for close:', par.cnt_id)
raise
finally:

View File

@ -35,7 +35,7 @@ for fname in args:
for line in res[:5]:
print line
except Exception,e:
import traceback,sys
import traceback
tb_s = reduce(lambda x, y: x+y, traceback.format_exception( sys.exc_type, sys.exc_value, sys.exc_traceback))

View File

@ -1,9 +1,8 @@
# -*- encoding: utf-8 -*-
import os
import time
from tarfile import filemode
import StringIO
import base64
import logging
import errno
@ -12,12 +11,11 @@ import fnmatch
import pooler
import netsvc
import os
from service import security
from osv import osv
#from document.nodes import node_res_dir, node_res_obj
from document.nodes import get_node_context
import stat
def _get_month_name(month):
month=int(month)
@ -165,7 +163,7 @@ class abstracted_fs(object):
return ret
except EnvironmentError:
raise
except Exception,e:
except Exception:
self._log.exception('Cannot create item %s at node %s', objname, repr(node))
raise OSError(1, 'Operation not permited.')
@ -177,7 +175,7 @@ class abstracted_fs(object):
try:
res = node.open_data(cr, mode)
cr.commit()
except TypeError, e:
except TypeError:
raise IOError(errno.EINVAL, "No data")
return res
@ -188,12 +186,11 @@ class abstracted_fs(object):
name. Unlike mkstemp it returns an object with a file-like
interface.
"""
raise NotImplementedError
raise NotImplementedError # TODO
text = not 'b' in mode
# for unique file , maintain version if duplicate file
if dir:
# TODO
cr = dir.cr
uid = dir.uid
pool = pooler.get_pool(node.context.dbname)
@ -234,7 +231,7 @@ class abstracted_fs(object):
cdir = node.create_child_collection(cr, basename)
self._log.debug("Created child dir: %r", cdir)
cr.commit()
except Exception,e:
except Exception:
self._log.exception('Cannot create dir "%s" at node %s', basename, repr(node))
raise OSError(1, 'Operation not permited.')
@ -405,7 +402,7 @@ class abstracted_fs(object):
cr.commit()
except EnvironmentError:
raise
except Exception, err:
except Exception:
self._log.exception('Cannot rename "%s" to "%s" at "%s"', src, datacr[2], datacr[1])
raise OSError(1,'Operation not permited.')

View File

@ -121,15 +121,9 @@ import tempfile
import warnings
import random
import stat
from collections import deque
from tarfile import filemode
try:
import pwd
import grp
except ImportError:
pwd = grp = None
LOG_ACTIVE = True
__all__ = ['proto_cmds', 'Error', 'log', 'logline', 'logerror', 'DummyAuthorizer',
@ -192,24 +186,6 @@ proto_cmds = {
}
# hack around format_exc function of traceback module to grant
# backward compatibility with python < 2.4
if not hasattr(traceback, 'format_exc'):
try:
import cStringIO as StringIO
except ImportError:
import StringIO
def _format_exc():
f = StringIO.StringIO()
traceback.print_exc(file=f)
data = f.getvalue()
f.close()
return data
traceback.format_exc = _format_exc
def _strerror(err):
"""A wrap around os.strerror() which may be not available on all
platforms (e.g. pythonCE).
@ -590,16 +566,6 @@ class ActiveDTP(asyncore.dispatcher):
self.cmd_channel.respond("425 Can't connect to specified address.")
self.close()
try:
from collections import deque
except ImportError:
# backward compatibility with Python < 2.4 by replacing deque with a list
class deque(list):
def appendleft(self, obj):
list.insert(self, 0, obj)
class DTPHandler(asyncore.dispatcher):
"""Class handling server-data-transfer-process (server-DTP, see
RFC-959) managing data-transfer operations involving sending
@ -1227,21 +1193,9 @@ class AbstractedFS:
if not nlinks: # non-posix system, let's use a bogus value
nlinks = 1
size = st.st_size # file size
if pwd and grp:
# get user and group name, else just use the raw uid/gid
try:
uname = pwd.getpwuid(st.st_uid).pw_name
except KeyError:
uname = st.st_uid
try:
gname = grp.getgrgid(st.st_gid).gr_name
except KeyError:
gname = st.st_gid
else:
# on non-posix systems the only chance we use default
# bogus values for owner and group
uname = "owner"
gname = "group"
uname = st.st_uid or "owner"
gname = st.st_gid or "group"
# stat.st_mtime could fail (-1) if last mtime is too old
# in which case we return the local time as last mtime
try:
@ -1438,7 +1392,7 @@ class FTPHandler(asynchat.async_chat):
"""
try:
asynchat.async_chat.__init__(self, conn=conn) # python2.5
except TypeError, e:
except TypeError:
asynchat.async_chat.__init__(self, sock=conn) # python2.6
self.server = server
self.remote_ip, self.remote_port = self.socket.getpeername()[:2]
@ -1469,13 +1423,10 @@ class FTPHandler(asynchat.async_chat):
# mlsx facts attributes
self.current_facts = ['type', 'perm', 'size', 'modify']
if os.name == 'posix':
self.current_facts.append('unique')
self.current_facts.append('unique')
self.available_facts = self.current_facts[:]
if pwd and grp:
self.available_facts += ['unix.mode', 'unix.uid', 'unix.gid']
if os.name == 'nt':
self.available_facts.append('create')
self.available_facts += ['unix.mode', 'unix.uid', 'unix.gid']
self.available_facts.append('create')
# dtp attributes
self.data_server = None
@ -1866,7 +1817,7 @@ class FTPHandler(asynchat.async_chat):
self.respond('%s %s.' % (str(ret_code), why))
raise FTPExceptionSent(why)
except Exception, e:
except Exception, err:
cmdname = function.__name__
try:
logerror(traceback.format_exc())
@ -2003,7 +1954,7 @@ class FTPHandler(asynchat.async_chat):
assert len(octs) == 4
for x in octs:
assert 0 <= x <= 255
except (AssertionError, ValueError, OverflowError), err:
except (AssertionError, ValueError, OverflowError):
self.respond("501 Invalid EPRT format.")
else:
self._make_eport(ip, port)

View File

@ -20,8 +20,9 @@
##############################################################################
from osv import osv, fields
from tools.translate import _
# from tools.translate import _
from document_ftp import ftpserver
class document_ftp_browse(osv.osv_memory):
_name = 'document.ftp.browse'
_description = 'Document FTP Browse'
@ -35,8 +36,8 @@ class document_ftp_browse(osv.osv_memory):
if 'url' in fields:
user_pool = self.pool.get('res.users')
current_user = user_pool.browse(cr, uid, uid, context=context)
dir_pool = self.pool.get('document.directory')
data_pool = self.pool.get('ir.model.data')
# dir_pool = self.pool.get('document.directory')
data_pool = self.pool.get('ir.model.data')
aid = data_pool._get_id(cr, uid, 'document_ftp', 'action_document_browse')
aid = data_pool.browse(cr, uid, aid, context=context).res_id
ftp_url = self.pool.get('ir.actions.url').browse(cr, uid, aid, context=context)
@ -50,12 +51,12 @@ class document_ftp_browse(osv.osv_memory):
def browse_ftp(self, cr, uid, ids, context):
data_id = ids and ids[0] or False
data = self.browse(cr, uid, data_id, context)
data = self.browse(cr, uid, data_id, context)
final_url = data.url
return {
'type': 'ir.actions.act_url',
'url':final_url,
'target': 'new'
}
document_ftp_browse()

View File

@ -19,14 +19,8 @@
#
##############################################################################
import base64
from osv import osv, fields
from osv.orm import except_orm
from tools import config
import urlparse
import os
class document_ftp_configuration(osv.osv_memory):
@ -46,10 +40,10 @@ class document_ftp_configuration(osv.osv_memory):
def execute(self, cr, uid, ids, context=None):
conf = self.browse(cr, uid, ids[0], context)
dir_pool = self.pool.get('document.directory')
data_pool = self.pool.get('ir.model.data')
data_pool = self.pool.get('ir.model.data')
# Update the action for FTP browse.
aid = data_pool._get_id(cr, uid, 'document_ftp', 'action_document_browse')
aid = data_pool.browse(cr, uid, aid, context=context).res_id
self.pool.get('ir.actions.url').write(cr, uid, [aid], {'url': 'ftp://'+(conf.host or 'localhost:8021')+'/'})
document_ftp_configuration()

View File

@ -129,6 +129,7 @@ class openerp_dav_handler(dav_interface):
raise default_exc(err.strerror)
except Exception,e:
import traceback
if cr: cr.close()
self.parent.log_error("Cannot %s: %s", opname, str(e))
self.parent.log_message("Exc: %s",traceback.format_exc())
raise default_exc("Operation failed")
@ -522,7 +523,6 @@ class openerp_dav_handler(dav_interface):
def put(self, uri, data, content_type=None):
""" put the object into the filesystem """
self.parent.log_message('Putting %s (%d), %s'%( misc.ustr(uri), data and len(data) or 0, content_type))
parent='/'.join(uri.split('/')[:-1])
cr, uid, pool,dbname, uri2 = self.get_cr(uri)
if not dbname:
if cr: cr.close()
@ -535,20 +535,44 @@ class openerp_dav_handler(dav_interface):
objname = uri2[-1]
ext = objname.find('.') >0 and objname.split('.')[1] or False
ret = None
if not node:
dir_node = self.uri2object(cr, uid, pool, uri2[:-1])
if not dir_node:
cr.close()
raise DAV_NotFound('Parent folder not found')
self._try_function(dir_node.create_child, (cr, objname, data),
newchild = self._try_function(dir_node.create_child, (cr, objname, data),
"create %s" % objname, cr=cr)
if not newchild:
cr.commit()
cr.close()
raise DAV_Error(400, "Failed to create resource")
uparts=urlparse.urlparse(uri)
fileloc = '/'.join(newchild.full_path())
if isinstance(fileloc, unicode):
fileloc = fileloc.encode('utf-8')
# the uri we get is a mangled one, where the davpath has been removed
davpath = self.parent.get_davpath()
surl = '%s://%s' % (uparts[0], uparts[1])
uloc = urllib.quote(fileloc)
hurl = False
if uri != ('/'+uloc) and uri != (surl + '/' + uloc):
hurl = '%s%s/%s/%s' %(surl, davpath, dbname, uloc)
etag = False
try:
etag = str(newchild.get_etag(cr))
except Exception, e:
self.parent.log_error("Cannot get etag for node: %s" % e)
ret = (hurl, etag)
else:
self._try_function(node.set_data, (cr, data), "save %s" % objname, cr=cr)
cr.commit()
cr.close()
return 201
return ret
def rmcol(self,uri):
""" delete a collection """

View File

@ -24,8 +24,15 @@ import xml.dom.minidom
domimpl = xml.dom.minidom.getDOMImplementation()
import urlparse
import urllib
from DAV import utils
from DAV.propfind import PROPFIND
from osv import osv
try:
from DAV import utils
from DAV.propfind import PROPFIND
except ImportError:
raise osv.except_osv('PyWebDAV Import Error!','Please install PyWebDAV \
from http://code.google.com/p/pywebdav/downloads/detail?name=PyWebDAV-0.9.4.tar.gz&can=2&q=/')
import tools

View File

@ -214,14 +214,21 @@ class DAVHandler(HttpOptions, FixSendError, DAVRequestHandler):
return self.send_status(ec)
headers = {}
etag = None
if location and isinstance(location, tuple):
etag = location[1]
location = location[0]
# note that we have allowed for > 2 elems
if location:
headers['Location'] = location
try:
etag = dc.get_prop(location or uri, "DAV:", "getetag")
headers['ETag'] = etag
except:
pass
else:
try:
if not etag:
etag = dc.get_prop(location or uri, "DAV:", "getetag")
if etag:
headers['ETag'] = str(etag)
except Exception:
pass
self.send_body(None, '201', 'Created', '', headers=headers)

View File

@ -21,13 +21,19 @@
from osv import fields, osv
from caldav import calendar
from datetime import datetime
from project.project import task as base_project_task
class project_task(osv.osv):
_name = "project.task"
_inherit = ["calendar.todo", "project.task"]
_columns = {
# force inherit from project.project_task so that
# calendar.todo.active is masked oute
'active': base_project_task._columns['active'],
'write_date': fields.datetime('Write Date'),
'create_date': fields.datetime('Create Date'),
'create_date': fields.datetime('Create Date', readonly=True),
'attendee_ids': fields.many2many('calendar.attendee', \
'task_attendee_rel', 'task_id', 'attendee_id', 'Attendees'),
'state': fields.selection([('draft', 'Draft'),('open', 'In Progress'),('pending', 'Pending'), ('cancelled', 'Cancelled'), ('done', 'Done')], 'State', readonly=True, required=True,