[BACKPORT] Stable fixes and refactorings to Trunk

bzr revid: jvo@tinyerp.com-20100520063044-sviy1e9pybirhpsw
This commit is contained in:
Jay (Open ERP) 2010-05-20 12:00:44 +05:30
commit e3ccef27d3
33 changed files with 1331 additions and 384 deletions

View File

@ -9,7 +9,8 @@ include bin/server.pkey
include bin/gpl.txt include bin/gpl.txt
include man/openerp-server.1 include man/openerp-server.1
include man/openerp_serverrc.5 include man/openerp_serverrc.5
recursive-include pixmaps recursive-include pixmaps *
recursive-include win32 *
recursive-include doc * recursive-include doc *
recursive-include bin *xml *xsl *sql *rml *sxw *csv *rng recursive-include bin *xml *xsl *sql *rml *sxw *csv *rng
graft bin/addons/ graft bin/addons/

View File

@ -1302,14 +1302,14 @@
<field name="subject" colspan="4" attrs="{'required':[('state','=','email')]}"/> <field name="subject" colspan="4" attrs="{'required':[('state','=','email')]}"/>
<field name="message" select="2" colspan="4" attrs="{'required':[('state','=','email')]}"/> <field name="message" select="2" colspan="4" attrs="{'required':[('state','=','email')]}"/>
<newline/> <newline/>
<label colspan="4" string="Access all the fields related to the current object using expression in double brackets, i.e.[[ object.partner_id.name ]]" align="0.0"/> <label colspan="4" string="Access all the fields related to the current object using expressions, i.e. object.partner_id.name " align="0.0"/>
</page> </page>
<page string="SMS Configuration" attrs="{'invisible':[('state','!=','sms')]}"> <page string="SMS Configuration" attrs="{'invisible':[('state','!=','sms')]}">
<separator colspan="4" string="SMS Configuration"/> <separator colspan="4" string="SMS Configuration"/>
<field name="mobile" domain="[('model_id','=',model_id)]" attrs="{'required':[('state','=','sms')]}"/> <field name="mobile" domain="[('model_id','=',model_id)]" attrs="{'required':[('state','=','sms')]}"/>
<field name="sms" colspan="4" attrs="{'required':[('state','=','sms')]}"/> <field name="sms" colspan="4" attrs="{'required':[('state','=','sms')]}"/>
<newline/> <newline/>
<label colspan="4" string="Access all the fields related to the current object using expression in double brackets, i.e. [[ object.partner_id.name ]]" align="0.0"/> <label colspan="4" string="Access all the fields related to the current object using expressions, i.e. object.partner_id.name " align="0.0"/>
</page> </page>
<page string="Create / Write / Copy" attrs="{'invisible':[('state','!=','object_create'), ('state','!=','object_write'), ('state','!=','object_copy')]}"> <page string="Create / Write / Copy" attrs="{'invisible':[('state','!=','object_create'), ('state','!=','object_write'), ('state','!=','object_copy')]}">
<separator colspan="4" string="Fields Mapping"/> <separator colspan="4" string="Fields Mapping"/>

View File

@ -455,7 +455,7 @@ class actions_server(osv.osv):
'type': lambda *a: 'ir.actions.server', 'type': lambda *a: 'ir.actions.server',
'sequence': lambda *a: 5, 'sequence': lambda *a: 5,
'code': lambda *a: """# You can use the following variables 'code': lambda *a: """# You can use the following variables
# - object # - object or obj
# - time # - time
# - cr # - cr
# - uid # - uid
@ -532,7 +532,6 @@ class actions_server(osv.osv):
def run(self, cr, uid, ids, context={}): def run(self, cr, uid, ids, context={}):
logger = netsvc.Logger() logger = netsvc.Logger()
for action in self.browse(cr, uid, ids, context): for action in self.browse(cr, uid, ids, context):
obj_pool = self.pool.get(action.model_id.model) obj_pool = self.pool.get(action.model_id.model)
obj = obj_pool.browse(cr, uid, context['active_id'], context=context) obj = obj_pool.browse(cr, uid, context['active_id'], context=context)
@ -554,19 +553,26 @@ class actions_server(osv.osv):
return self.pool.get(action.action_id.type)\ return self.pool.get(action.action_id.type)\
.read(cr, uid, action.action_id.id, context=context) .read(cr, uid, action.action_id.id, context=context)
if action.state == 'code': if config['server_actions_allow_code']:
localdict = { localdict = {
'self': self.pool.get(action.model_id.model), 'self': self.pool.get(action.model_id.model),
'context': context, 'context': context,
'time': time, 'time': time,
'ids': ids, 'ids': ids,
'cr': cr, 'cr': cr,
'uid': uid, 'uid': uid,
'object':obj 'object':obj,
} 'obj': obj,
exec action.code in localdict }
if 'action' in localdict: exec action.code in localdict
return localdict['action'] if 'action' in localdict:
return localdict['action']
else:
netsvc.Logger().notifyChannel(
self._name, netsvc.LOG_ERROR,
"%s is a `code` server action, but "
"it isn't allowed in this configuration.\n\n"
"See server options to enable it"%action)
if action.state == 'email': if action.state == 'email':
user = config['email_from'] user = config['email_from']
@ -580,7 +586,7 @@ class actions_server(osv.osv):
logger.notifyChannel('email', netsvc.LOG_INFO, 'Partner Email address not Specified!') logger.notifyChannel('email', netsvc.LOG_INFO, 'Partner Email address not Specified!')
continue continue
if not user: if not user:
raise osv.except_osv(_('Error'), _("Please specify server option --smtp-from !")) raise osv.except_osv(_('Error'), _("Please specify server option --email-from !"))
subject = self.merge_message(cr, uid, action.subject, action, context) subject = self.merge_message(cr, uid, action.subject, action, context)
body = self.merge_message(cr, uid, action.message, action, context) body = self.merge_message(cr, uid, action.message, action, context)

View File

@ -146,16 +146,19 @@ class ir_cron(osv.osv, netsvc.Agent):
def create(self, cr, uid, vals, context=None): def create(self, cr, uid, vals, context=None):
res = super(ir_cron, self).create(cr, uid, vals, context=context) res = super(ir_cron, self).create(cr, uid, vals, context=context)
cr.commit() cr.commit()
self.cancel(cr.dbname)
self._poolJobs(cr.dbname) self._poolJobs(cr.dbname)
return res return res
def write(self, cr, user, ids, vals, context=None): def write(self, cr, user, ids, vals, context=None):
res = super(ir_cron, self).write(cr, user, ids, vals, context=context) res = super(ir_cron, self).write(cr, user, ids, vals, context=context)
cr.commit() cr.commit()
self.cancel(cr.dbname)
self._poolJobs(cr.dbname) self._poolJobs(cr.dbname)
return res return res
def unlink(self, cr, uid, ids, context=None): def unlink(self, cr, uid, ids, context=None):
res = super(ir_cron, self).unlink(cr, uid, ids, context=context) res = super(ir_cron, self).unlink(cr, uid, ids, context=context)
cr.commit() cr.commit()
self.cancel(cr.dbname)
self._poolJobs(cr.dbname) self._poolJobs(cr.dbname)
return res return res
ir_cron() ir_cron()

View File

@ -82,6 +82,8 @@ class ir_model(osv.osv):
return super(ir_model,self).write(cr, user, ids, vals, context) return super(ir_model,self).write(cr, user, ids, vals, context)
def create(self, cr, user, vals, context=None): def create(self, cr, user, vals, context=None):
if context is None:
context = {}
if context and context.get('manual',False): if context and context.get('manual',False):
vals['state']='manual' vals['state']='manual'
res = super(ir_model,self).create(cr, user, vals, context) res = super(ir_model,self).create(cr, user, vals, context)
@ -260,6 +262,8 @@ class ir_model_fields(osv.osv):
if 'model_id' in vals: if 'model_id' in vals:
model_data = self.pool.get('ir.model').browse(cr, user, vals['model_id']) model_data = self.pool.get('ir.model').browse(cr, user, vals['model_id'])
vals['model'] = model_data.model vals['model'] = model_data.model
if context is None:
context = {}
if context and context.get('manual',False): if context and context.get('manual',False):
vals['state'] = 'manual' vals['state'] = 'manual'
res = super(ir_model_fields,self).create(cr, user, vals, context) res = super(ir_model_fields,self).create(cr, user, vals, context)
@ -267,7 +271,7 @@ class ir_model_fields(osv.osv):
if not vals['name'].startswith('x_'): if not vals['name'].startswith('x_'):
raise except_orm(_('Error'), _("Custom fields must have a name that starts with 'x_' !")) raise except_orm(_('Error'), _("Custom fields must have a name that starts with 'x_' !"))
if 'relation' in vals and not self.pool.get('ir.model').search(cr, user, [('model','=',vals['relation'])]): if vals.get('relation',False) and not self.pool.get('ir.model').search(cr, user, [('model','=',vals['relation'])]):
raise except_orm(_('Error'), _("Model %s Does not Exist !" % vals['relation'])) raise except_orm(_('Error'), _("Model %s Does not Exist !" % vals['relation']))
if self.pool.get(vals['model']): if self.pool.get(vals['model']):
@ -286,7 +290,7 @@ class ir_model_access(osv.osv):
_columns = { _columns = {
'name': fields.char('Name', size=64, required=True), 'name': fields.char('Name', size=64, required=True),
'model_id': fields.many2one('ir.model', 'Object', required=True), 'model_id': fields.many2one('ir.model', 'Object', required=True),
'group_id': fields.many2one('res.groups', 'Group'), 'group_id': fields.many2one('res.groups', 'Group', ondelete='cascade'),
'perm_read': fields.boolean('Read Access'), 'perm_read': fields.boolean('Read Access'),
'perm_write': fields.boolean('Write Access'), 'perm_write': fields.boolean('Write Access'),
'perm_create': fields.boolean('Create Access'), 'perm_create': fields.boolean('Create Access'),
@ -453,7 +457,7 @@ class ir_model_data(osv.osv):
def _get_id(self, cr, uid, module, xml_id): def _get_id(self, cr, uid, module, xml_id):
ids = self.search(cr, uid, [('module','=',module),('name','=', xml_id)]) ids = self.search(cr, uid, [('module','=',module),('name','=', xml_id)])
if not ids: if not ids:
raise Exception('No references to %s.%s' % (module, xml_id)) raise ValueError('No references to %s.%s' % (module, xml_id))
# the sql constraints ensure us we have only one result # the sql constraints ensure us we have only one result
return ids[0] return ids[0]

View File

@ -150,8 +150,8 @@ class ir_translation(osv.osv):
if not context: if not context:
context = {} context = {}
ids = super(ir_translation, self).create(cursor, user, vals, context=context) ids = super(ir_translation, self).create(cursor, user, vals, context=context)
for trans_obj in self.read(cursor, user, [ids], ['name','type','res_id'], context=context): for trans_obj in self.read(cursor, user, [ids], ['name','type','res_id','src','lang'], context=context):
self._get_source.clear_cache(cursor.dbname, user, trans_obj['name'], trans_obj['type'], lang=context.get('lang','en_US')) self._get_source.clear_cache(cursor.dbname, user, trans_obj['name'], trans_obj['type'], trans_obj['lang'], source=trans_obj['src'])
self._get_ids.clear_cache(cursor.dbname, user, trans_obj['name'], trans_obj['type'], context.get('lang','en_US'), [trans_obj['res_id']]) self._get_ids.clear_cache(cursor.dbname, user, trans_obj['name'], trans_obj['type'], context.get('lang','en_US'), [trans_obj['res_id']])
return ids return ids
@ -159,16 +159,16 @@ class ir_translation(osv.osv):
if not context: if not context:
context = {} context = {}
result = super(ir_translation, self).write(cursor, user, ids, vals, context=context) result = super(ir_translation, self).write(cursor, user, ids, vals, context=context)
for trans_obj in self.read(cursor, user, ids, ['name','type','res_id'], context=context): for trans_obj in self.read(cursor, user, ids, ['name','type','res_id','src','lang'], context=context):
self._get_source.clear_cache(cursor.dbname, user, trans_obj['name'], trans_obj['type'], lang=context.get('lang','en_US')) self._get_source.clear_cache(cursor.dbname, user, trans_obj['name'], trans_obj['type'], trans_obj['lang'], source=trans_obj['src'])
self._get_ids.clear_cache(cursor.dbname, user, trans_obj['name'], trans_obj['type'], context.get('lang','en_US'), [trans_obj['res_id']]) self._get_ids.clear_cache(cursor.dbname, user, trans_obj['name'], trans_obj['type'], context.get('lang','en_US'), [trans_obj['res_id']])
return result return result
def unlink(self, cursor, user, ids, context=None): def unlink(self, cursor, user, ids, context=None):
if not context: if not context:
context = {} context = {}
for trans_obj in self.read(cursor, user, ids, ['name','type','res_id'], context=context): for trans_obj in self.read(cursor, user, ids, ['name','type','res_id','src','lang'], context=context):
self._get_source.clear_cache(cursor.dbname, user, trans_obj['name'], trans_obj['type'], lang=context.get('lang','en_US')) self._get_source.clear_cache(cursor.dbname, user, trans_obj['name'], trans_obj['type'], trans_obj['lang'], source=trans_obj['src'])
self._get_ids.clear_cache(cursor.dbname, user, trans_obj['name'], trans_obj['type'], context.get('lang','en_US'), [trans_obj['res_id']]) self._get_ids.clear_cache(cursor.dbname, user, trans_obj['name'], trans_obj['type'], context.get('lang','en_US'), [trans_obj['res_id']])
result = super(ir_translation, self).unlink(cursor, user, ids, context=context) result = super(ir_translation, self).unlink(cursor, user, ids, context=context)
return result return result

View File

@ -31,6 +31,10 @@ def one_in(setA, setB):
return True return True
return False return False
def cond(C, X, Y):
if C: return X
return Y
class many2many_unique(fields.many2many): class many2many_unique(fields.many2many):
def set(self, cr, obj, id, name, values, user=None, context=None): def set(self, cr, obj, id, name, values, user=None, context=None):
if not values: if not values:
@ -63,19 +67,24 @@ class ir_ui_menu(osv.osv):
# radical but this doesn't frequently happen # radical but this doesn't frequently happen
self._cache = {} self._cache = {}
def search(self, cr, uid, args, offset=0, limit=2000, order=None, def search(self, cr, uid, args, offset=0, limit=None, order=None,
context=None, count=False): context=None, count=False):
if context is None:
context = {} super_offset = cond(uid == 1, offset, 0)
ids = osv.orm.orm.search(self, cr, uid, args, offset, limit, order, context=context, count=(count and uid==1)) super_limit = cond(uid == 1, limit, None)
super_count = cond(uid == 1, count, False)
ids = super(ir_ui_menu, self).search(cr, uid, args, super_offset,
super_limit, order,
context=context, count=super_count)
if uid == 1 or count:
return ids
if not ids: if not ids:
if count: if count:
return 0 return 0
return [] return []
if count and ids:
return ids
modelaccess = self.pool.get('ir.model.access') modelaccess = self.pool.get('ir.model.access')
user_groups = set(self.pool.get('res.users').read(cr, 1, uid, ['groups_id'])['groups_id']) user_groups = set(self.pool.get('res.users').read(cr, 1, uid, ['groups_id'])['groups_id'])
@ -123,6 +132,11 @@ class ir_ui_menu(osv.osv):
result.append(menu.id) result.append(menu.id)
self._cache[key] = True self._cache[key] = True
if offset:
result = result[long(offset):]
if limit:
result = result[:long(limit)]
if count: if count:
return len(result) return len(result)
return result return result

View File

@ -200,6 +200,10 @@ class view_sc(osv.osv):
'resource': lambda *a: 'ir.ui.menu', 'resource': lambda *a: 'ir.ui.menu',
'user_id': lambda obj, cr, uid, context: uid, 'user_id': lambda obj, cr, uid, context: uid,
} }
_sql_constraints = [
('shortcut_unique', 'unique(res_id, user_id)', 'Shortcut for this menu already exists!'),
]
view_sc() view_sc()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:

View File

@ -146,11 +146,13 @@ class module(osv.osv):
], string='State', readonly=True), ], string='State', readonly=True),
'demo': fields.boolean('Demo data'), 'demo': fields.boolean('Demo data'),
'license': fields.selection([ 'license': fields.selection([
('GPL-2', 'GPL-2'), ('GPL-2', 'GPL Version 2'),
('GPL-2 or any later version', 'GPL-2 or later version'), ('GPL-2 or any later version', 'GPL-2 or later version'),
('GPL-3', 'GPL-3'), ('GPL-3', 'GPL Version 3'),
('GPL-3 or any later version', 'GPL-3 or later version'), ('GPL-3 or any later version', 'GPL-3 or later version'),
('Other proprietary', 'Other proprietary') ('AGPL-3', 'Affero GPL-3'),
('Other OSI approved licence', 'Other OSI Approved Licence'),
('Other proprietary', 'Other Proprietary')
], string='License', readonly=True), ], string='License', readonly=True),
'menus_by_module': fields.function(_get_views, method=True, string='Menus', type='text', multi="meta", store=True), 'menus_by_module': fields.function(_get_views, method=True, string='Menus', type='text', multi="meta", store=True),
'reports_by_module': fields.function(_get_views, method=True, string='Reports', type='text', multi="meta", store=True), 'reports_by_module': fields.function(_get_views, method=True, string='Reports', type='text', multi="meta", store=True),
@ -161,7 +163,7 @@ class module(osv.osv):
_defaults = { _defaults = {
'state': lambda *a: 'uninstalled', 'state': lambda *a: 'uninstalled',
'demo': lambda *a: False, 'demo': lambda *a: False,
'license': lambda *a: 'GPL-2', 'license': lambda *a: 'AGPL-3',
} }
_order = 'name' _order = 'name'

View File

@ -275,6 +275,26 @@
<field name="type">default</field> <field name="type">default</field>
<field name="partner_id" ref="res_partner_agrolait"/> <field name="partner_id" ref="res_partner_agrolait"/>
</record> </record>
<record id="res_partner_address_8delivery" model="res.partner.address">
<field name="city">Wavre</field>
<field name="name">Paul Lelitre</field>
<field name="zip">5478</field>
<field name="title">M.</field>
<field model="res.country" name="country_id" search="[('name','=','Belgium')]"/>
<field name="street">71 rue de Chimay</field>
<field name="type">delivery</field>
<field name="partner_id" ref="res_partner_agrolait"/>
</record>
<record id="res_partner_address_8invoice" model="res.partner.address">
<field name="city">Wavre</field>
<field name="name">Serge Lelitre</field>
<field name="zip">5478</field>
<field name="title">M.</field>
<field model="res.country" name="country_id" search="[('name','=','Belgium')]"/>
<field name="street">69 rue de Chimay</field>
<field name="type">invoice</field>
<field name="partner_id" ref="res_partner_agrolait"/>
</record>
<record id="res_partner_address_9" model="res.partner.address"> <record id="res_partner_address_9" model="res.partner.address">
<field name="city">Paris</field> <field name="city">Paris</field>
<field name="name">Arthur Grosbonnet</field> <field name="name">Arthur Grosbonnet</field>

View File

@ -64,7 +64,7 @@ class lang(osv.osv):
@tools.cache(skiparg=3) @tools.cache(skiparg=3)
def _lang_data_get(self, cr, uid, lang_id, monetary=False): def _lang_data_get(self, cr, uid, lang_id, monetary=False):
conv = localeconv() conv = localeconv()
lang_obj=self.browse(cr,uid,lang_id) lang_obj = self.browse(cr, uid, lang_id)
thousands_sep = lang_obj.thousands_sep or conv[monetary and 'mon_thousands_sep' or 'thousands_sep'] thousands_sep = lang_obj.thousands_sep or conv[monetary and 'mon_thousands_sep' or 'thousands_sep']
decimal_point = lang_obj.decimal_point decimal_point = lang_obj.decimal_point
grouping = lang_obj.grouping grouping = lang_obj.grouping
@ -80,6 +80,7 @@ class lang(osv.osv):
if not grouping: if not grouping:
return (s, 0) return (s, 0)
result = "" result = ""
seps = 0 seps = 0
spaces = "" spaces = ""
@ -129,7 +130,7 @@ class lang(osv.osv):
parts = formatted.split('.') parts = formatted.split('.')
if grouping: if grouping:
parts[0], seps = self._group(cr,uid,ids,parts[0], monetary=monetary, grouping=lang_grouping, thousands_sep=thousands_sep) parts[0], seps = self._group(cr,uid,ids,parts[0], monetary=monetary, grouping=lang_grouping, thousands_sep=thousands_sep)
formatted = decimal_point.join(parts) formatted = decimal_point.join(parts)
while seps: while seps:

View File

@ -6,9 +6,13 @@
<field name="name">Partner Manager</field> <field name="name">Partner Manager</field>
</record> </record>
<record model="ir.ui.menu" id="menu_base_config"> <record model="ir.ui.menu" id="menu_base_partner">
<field eval="[(6,0,[ref('group_system'), ref('group_partner_manager')])]" name="groups_id"/> <field eval="[(6,0,[ref('group_system'), ref('group_partner_manager')])]" name="groups_id"/>
</record> </record>
<record model="ir.ui.menu" id="menu_base_config">
<field eval="[(6,0,[ref('group_system'), ref('group_partner_manager')])]" name="groups_id"/>
</record>
</data> </data>
</openerp> </openerp>

View File

@ -106,10 +106,11 @@ roles()
def _lang_get(self, cr, uid, context={}): def _lang_get(self, cr, uid, context={}):
obj = self.pool.get('res.lang') obj = self.pool.get('res.lang')
ids = obj.search(cr, uid, []) ids = obj.search(cr, uid, [('translatable','=',True)])
res = obj.read(cr, uid, ids, ['code', 'name'], context) res = obj.read(cr, uid, ids, ['code', 'name'], context)
res = [(r['code'], r['name']) for r in res] res = [(r['code'], r['name']) for r in res]
return res return res
def _tz_get(self,cr,uid, context={}): def _tz_get(self,cr,uid, context={}):
return [(x, x) for x in pytz.all_timezones] return [(x, x) for x in pytz.all_timezones]
@ -245,6 +246,15 @@ class users(osv.osv):
result = override_password(result) result = override_password(result)
else: else:
result = map(override_password, result) result = map(override_password, result)
if isinstance(result, list):
for rec in result:
if not rec.get('action_id',True):
rec['action_id'] = (self._get_menu(cr, uid),'Menu')
else:
if not result.get('action_id',True):
result['action_id'] = (self._get_menu(cr, uid),'Menu')
return result return result
@ -467,6 +477,13 @@ class groups2(osv.osv): ##FIXME: Is there a reason to inherit this object ?
_columns = { _columns = {
'users': fields.many2many('res.users', 'res_groups_users_rel', 'gid', 'uid', 'Users'), 'users': fields.many2many('res.users', 'res_groups_users_rel', 'gid', 'uid', 'Users'),
} }
def unlink(self, cr, uid, ids, context=None):
for record in self.read(cr, uid, ids, ['users'], context=context):
if record['users']:
raise osv.except_osv(_('Warning !'), _('Make sure you have no users linked with the group(s)!'))
return super(groups2, self).unlink(cr, uid, ids, context=context)
groups2() groups2()
class res_config_view(osv.osv_memory): class res_config_view(osv.osv_memory):

View File

@ -19,7 +19,7 @@
"access_ir_module_module_group_user","ir_module_module group_user","model_ir_module_module","group_system",1,1,1,1 "access_ir_module_module_group_user","ir_module_module group_user","model_ir_module_module","group_system",1,1,1,1
"access_ir_module_module_dependency_group_system","ir_module_module_dependency group_system","model_ir_module_module_dependency","group_system",1,1,1,1 "access_ir_module_module_dependency_group_system","ir_module_module_dependency group_system","model_ir_module_module_dependency","group_system",1,1,1,1
"access_ir_property_group_user","ir_property group_user","model_ir_property",,1,0,0,0 "access_ir_property_group_user","ir_property group_user","model_ir_property",,1,0,0,0
"access_ir_property_group_user_manager","ir_property group_manager","model_ir_property","base.group_partner_manager",1,1,1,1 "access_ir_property_group_user_manager","ir_property group_manager","model_ir_property","base.group_partner_manager",1,1,1,0
"access_ir_report_custom_group_system","ir_report_custom group_system","model_ir_report_custom",,1,0,0,0 "access_ir_report_custom_group_system","ir_report_custom group_system","model_ir_report_custom",,1,0,0,0
"access_ir_report_custom_fields_group_system","ir_report_custom_fields group_system","model_ir_report_custom_fields",,1,0,0,0 "access_ir_report_custom_fields_group_system","ir_report_custom_fields group_system","model_ir_report_custom_fields",,1,0,0,0
"access_ir_rule_group_user","ir_rule group_user","model_ir_rule",,1,0,0,0 "access_ir_rule_group_user","ir_rule group_user","model_ir_rule",,1,0,0,0

1 id name model_id:id group_id:id perm_read perm_write perm_create perm_unlink
19 access_ir_module_module_group_user ir_module_module group_user model_ir_module_module group_system 1 1 1 1
20 access_ir_module_module_dependency_group_system ir_module_module_dependency group_system model_ir_module_module_dependency group_system 1 1 1 1
21 access_ir_property_group_user ir_property group_user model_ir_property 1 0 0 0
22 access_ir_property_group_user_manager ir_property group_manager model_ir_property base.group_partner_manager 1 1 1 1 0
23 access_ir_report_custom_group_system ir_report_custom group_system model_ir_report_custom 1 0 0 0
24 access_ir_report_custom_fields_group_system ir_report_custom_fields group_system model_ir_report_custom_fields 1 0 0 0
25 access_ir_rule_group_user ir_rule group_user model_ir_rule 1 0 0 0

View File

@ -47,11 +47,12 @@
<rng:define name="assert"> <rng:define name="assert">
<rng:element name="assert"> <rng:element name="assert">
<rng:attribute name="model" /> <rng:attribute name="model" />
<rng:optional><rng:attribute name="search" /> </rng:optional> <rng:optional><rng:attribute name="search" /></rng:optional>
<rng:optional><rng:attribute name="count" /></rng:optional>
<rng:optional><rng:attribute name="string" /></rng:optional> <rng:optional><rng:attribute name="string" /></rng:optional>
<rng:optional><rng:attribute name="id" /></rng:optional> <rng:optional><rng:attribute name="id" /></rng:optional>
<rng:optional><rng:attribute name="severity" /></rng:optional> <rng:optional><rng:attribute name="severity" /></rng:optional>
<rng:oneOrMore> <rng:zeroOrMore>
<rng:element name="test"> <rng:element name="test">
<rng:attribute name="expr"/> <rng:attribute name="expr"/>
<rng:choice> <rng:choice>
@ -59,7 +60,7 @@
<rng:empty /> <rng:empty />
</rng:choice> </rng:choice>
</rng:element> </rng:element>
</rng:oneOrMore> </rng:zeroOrMore>
</rng:element> </rng:element>
</rng:define> </rng:define>

View File

@ -382,7 +382,8 @@ class OpenERPDispatcherException(Exception):
class OpenERPDispatcher: class OpenERPDispatcher:
def log(self, title, msg): def log(self, title, msg):
Logger().notifyChannel('%s' % title, LOG_DEBUG_RPC, pformat(msg)) if tools.config['log_level'] == logging.DEBUG_RPC:
Logger().notifyChannel('%s' % title, LOG_DEBUG_RPC, pformat(msg))
def dispatch(self, service_name, method, params): def dispatch(self, service_name, method, params):
try: try:

View File

@ -280,8 +280,11 @@ class expression(object):
c = context.copy() c = context.copy()
c['active_test'] = False c['active_test'] = False
res_ids = field_obj.name_search(cr, uid, right, [], operator, limit=None, context=c) res_ids = field_obj.name_search(cr, uid, right, [], operator, limit=None, context=c)
right = map(lambda x: x[0], res_ids) if not res_ids:
self.__exp[i] = (left, 'in', right) self.__exp[i] = ('id','=',0)
else:
right = map(lambda x: x[0], res_ids)
self.__exp[i] = (left, 'in', right)
else: else:
# other field type # other field type
# add the time part to datetime field when it's not there: # add the time part to datetime field when it's not there:

View File

@ -39,7 +39,7 @@ from psycopg2 import Binary
import warnings import warnings
import tools import tools
from tools.translate import _
def _symbol_set(symb): def _symbol_set(symb):
if symb == None or symb == False: if symb == None or symb == False:
@ -748,7 +748,7 @@ class related(function):
ids=[ids] ids=[ids]
objlst = obj.browse(cr, uid, ids) objlst = obj.browse(cr, uid, ids)
for data in objlst: for data in objlst:
t_id=None t_id = None
t_data = data t_data = data
relation = obj._name relation = obj._name
for i in range(len(self.arg)): for i in range(len(self.arg)):

View File

@ -49,6 +49,7 @@ import string
import sys import sys
import time import time
import traceback import traceback
import datetime
import types import types
import fields import fields
@ -58,6 +59,7 @@ from tools.translate import _
import copy import copy
import sys import sys
import operator
try: try:
from lxml import etree from lxml import etree
@ -86,7 +88,6 @@ def last_day_of_current_month():
def intersect(la, lb): def intersect(la, lb):
return filter(lambda x: x in lb, la) return filter(lambda x: x in lb, la)
class except_orm(Exception): class except_orm(Exception):
def __init__(self, name, value): def __init__(self, name, value):
self.name = name self.name = name
@ -447,7 +448,7 @@ class orm_template(object):
'name': k, 'name': k,
'field_description': f.string.replace("'", " "), 'field_description': f.string.replace("'", " "),
'ttype': f._type, 'ttype': f._type,
'relation': f._obj or 'NULL', 'relation': f._obj or '',
'view_load': (f.view_load and 1) or 0, 'view_load': (f.view_load and 1) or 0,
'select_level': tools.ustr(f.select or 0), 'select_level': tools.ustr(f.select or 0),
'readonly':(f.readonly and 1) or 0, 'readonly':(f.readonly and 1) or 0,
@ -544,6 +545,8 @@ class orm_template(object):
return browse_null() return browse_null()
def __export_row(self, cr, uid, row, fields, context=None): def __export_row(self, cr, uid, row, fields, context=None):
if context is None:
context = {}
def check_type(field_type): def check_type(field_type):
if field_type == 'float': if field_type == 'float':
@ -597,9 +600,9 @@ class orm_template(object):
cols = selection_field(self._inherits) cols = selection_field(self._inherits)
if cols and cols._type == 'selection': if cols and cols._type == 'selection':
sel_list = cols.selection sel_list = cols.selection
if type(sel_list) == type([]): if r and type(sel_list) == type([]):
r = [x[1] for x in sel_list if r==x[0]][0] r = [x[1] for x in sel_list if r==x[0]]
r = r and r[0] or False
if not r: if not r:
if f[i] in self._columns: if f[i] in self._columns:
r = check_type(self._columns[f[i]]._type) r = check_type(self._columns[f[i]]._type)
@ -626,7 +629,7 @@ class orm_template(object):
for rr in r : for rr in r :
if isinstance(rr.name, browse_record): if isinstance(rr.name, browse_record):
rr = rr.name rr = rr.name
rr_name = self.pool.get(rr._table_name).name_get(cr, uid, [rr.id]) rr_name = self.pool.get(rr._table_name).name_get(cr, uid, [rr.id], context=context)
rr_name = rr_name and rr_name[0] and rr_name[0][1] or '' rr_name = rr_name and rr_name[0] and rr_name[0][1] or ''
dt += tools.ustr(rr_name or '') + ',' dt += tools.ustr(rr_name or '') + ','
data[fpos] = dt[:-1] data[fpos] = dt[:-1]
@ -639,7 +642,7 @@ class orm_template(object):
i += 1 i += 1
if i == len(f): if i == len(f):
if isinstance(r, browse_record): if isinstance(r, browse_record):
r = self.pool.get(r._table_name).name_get(cr, uid, [r.id]) r = self.pool.get(r._table_name).name_get(cr, uid, [r.id], context=context)
r = r and r[0] and r[0][1] or '' r = r and r[0] and r[0][1] or ''
data[fpos] = tools.ustr(r or '') data[fpos] = tools.ustr(r or '')
return [data] + lines return [data] + lines
@ -795,12 +798,12 @@ class orm_template(object):
module, xml_id = line[i].rsplit('.', 1) module, xml_id = line[i].rsplit('.', 1)
else: else:
module, xml_id = current_module, line[i] module, xml_id = current_module, line[i]
id = ir_model_data_obj._get_id(cr, uid, module, xml_id) record_id = ir_model_data_obj._get_id(cr, uid, module, xml_id)
ir_model_data = ir_model_data_obj.read(cr, uid, [record_id], ['res_id'])
res_res_id = ir_model_data_obj.read(cr, uid, [id], if ir_model_data:
['res_id']) res_id = ir_model_data[0]['res_id']
if res_res_id: else:
res_id = res_res_id[0]['res_id'] raise ValueError('No references to %s.%s' % (module, xml_id))
row[field[-1][:-3]] = res_id or False row[field[-1][:-3]] = res_id or False
continue continue
if (len(field) == len(prefix)+1) and \ if (len(field) == len(prefix)+1) and \
@ -994,7 +997,8 @@ class orm_template(object):
msg = _('Insertion Failed! ' + e[1]) msg = _('Insertion Failed! ' + e[1])
return (-1, res, 'Line ' + str(counter) +' : ' + msg, '' ) return (-1, res, 'Line ' + str(counter) +' : ' + msg, '' )
#Raising Uncaught exception #Raising Uncaught exception
raise return (-1, res, 'Line ' + str(counter) +' : ' + str(e), '' )
for lang in translate: for lang in translate:
context2 = context.copy() context2 = context.copy()
context2['lang'] = lang context2['lang'] = lang
@ -1003,6 +1007,8 @@ class orm_template(object):
data = pickle.load(file(config.get('import_partial'))) data = pickle.load(file(config.get('import_partial')))
data[filename] = initial_size - len(datas) + original_value data[filename] = initial_size - len(datas) + original_value
pickle.dump(data, file(config.get('import_partial'),'wb')) pickle.dump(data, file(config.get('import_partial'),'wb'))
if context.get('defer_parent_store_computation'):
self._parent_store_compute(cr)
cr.commit() cr.commit()
#except Exception, e: #except Exception, e:
@ -1016,6 +1022,8 @@ class orm_template(object):
# #
# TODO: Send a request with the result and multi-thread ! # TODO: Send a request with the result and multi-thread !
# #
if context.get('defer_parent_store_computation'):
self._parent_store_compute(cr)
return (done, 0, 0, 0) return (done, 0, 0, 0)
def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'): def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'):
@ -1108,8 +1116,7 @@ class orm_template(object):
if getattr(self._columns[f], arg, None): if getattr(self._columns[f], arg, None):
res[f][arg] = getattr(self._columns[f], arg) res[f][arg] = getattr(self._columns[f], arg)
#TODO: optimize res_trans = translation_obj._get_source(cr, user, self._name + ',' + f, 'field', context.get('lang', False) or 'en_US', self._columns[f].string)
res_trans = translation_obj._get_source(cr, user, self._name + ',' + f, 'field', context.get('lang', False) or 'en_US')
if res_trans: if res_trans:
res[f]['string'] = res_trans res[f]['string'] = res_trans
help_trans = translation_obj._get_source(cr, user, self._name + ',' + f, 'help', context.get('lang', False) or 'en_US') help_trans = translation_obj._get_source(cr, user, self._name + ',' + f, 'help', context.get('lang', False) or 'en_US')
@ -1724,7 +1731,8 @@ class orm_template(object):
for lang in langs: for lang in langs:
for field in vals: for field in vals:
if field in self._columns: if field in self._columns:
self.pool.get('ir.translation')._set_ids(cr, uid, self._name+','+field, 'field', lang, [0], vals[field]) src = self._columns[field].string
self.pool.get('ir.translation')._set_ids(cr, uid, self._name+','+field, 'field', lang, [0], vals[field], src)
for table in self._inherits: for table in self._inherits:
cols = intersect(self._inherit_fields.keys(), vals) cols = intersect(self._inherit_fields.keys(), vals)
if cols: if cols:
@ -2137,6 +2145,8 @@ class orm(orm_template):
return (tables, where_clause) return (tables, where_clause)
def _parent_store_compute(self, cr): def _parent_store_compute(self, cr):
if not self._parent_store:
return
logger = netsvc.Logger() logger = netsvc.Logger()
logger.notifyChannel('orm', netsvc.LOG_INFO, 'Computing parent left and right for table %s...' % (self._table, )) logger.notifyChannel('orm', netsvc.LOG_INFO, 'Computing parent left and right for table %s...' % (self._table, ))
def browse_rec(root, pos=0): def browse_rec(root, pos=0):
@ -2225,8 +2235,8 @@ class orm(orm_template):
logger.notifyChannel('orm', netsvc.LOG_ERROR, 'create a column parent_left on object %s: fields.integer(\'Left Parent\', select=1)' % (self._table, )) logger.notifyChannel('orm', netsvc.LOG_ERROR, 'create a column parent_left on object %s: fields.integer(\'Left Parent\', select=1)' % (self._table, ))
if 'parent_right' not in self._columns: if 'parent_right' not in self._columns:
logger.notifyChannel('orm', netsvc.LOG_ERROR, 'create a column parent_right on object %s: fields.integer(\'Right Parent\', select=1)' % (self._table, )) logger.notifyChannel('orm', netsvc.LOG_ERROR, 'create a column parent_right on object %s: fields.integer(\'Right Parent\', select=1)' % (self._table, ))
if self._columns[self._parent_name].ondelete<>'cascade': if self._columns[self._parent_name].ondelete != 'cascade':
logger.notifyChannel('orm', netsvc.LOG_ERROR, "the columns %s on object must be set as ondelete='cascasde'" % (self._name, self._parent_name)) logger.notifyChannel('orm', netsvc.LOG_ERROR, "The column %s on object %s must be set as ondelete='cascade'" % (self._parent_name, self._name))
cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_left" INTEGER' % (self._table,)) cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_left" INTEGER' % (self._table,))
cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_right" INTEGER' % (self._table,)) cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_right" INTEGER' % (self._table,))
cr.commit() cr.commit()
@ -2472,6 +2482,8 @@ class orm(orm_template):
cr.execute("SELECT relname FROM pg_class WHERE relkind in ('r','v') AND relname=%s", (self._table,)) cr.execute("SELECT relname FROM pg_class WHERE relkind in ('r','v') AND relname=%s", (self._table,))
create = not bool(cr.fetchone()) create = not bool(cr.fetchone())
cr.commit() # start a new transaction
for (key, con, _) in self._sql_constraints: for (key, con, _) in self._sql_constraints:
conname = '%s_%s' % (self._table, key) conname = '%s_%s' % (self._table, key)
cr.execute("SELECT conname FROM pg_constraint where conname=%s", (conname,)) cr.execute("SELECT conname FROM pg_constraint where conname=%s", (conname,))
@ -2481,6 +2493,7 @@ class orm(orm_template):
cr.commit() cr.commit()
except: except:
logger.notifyChannel('orm', netsvc.LOG_WARNING, 'unable to add \'%s\' constraint on table %s !\n If you want to have it, you should update the records and execute manually:\nALTER table %s ADD CONSTRAINT %s_%s %s' % (con, self._table, self._table, self._table, key, con,)) logger.notifyChannel('orm', netsvc.LOG_WARNING, 'unable to add \'%s\' constraint on table %s !\n If you want to have it, you should update the records and execute manually:\nALTER table %s ADD CONSTRAINT %s_%s %s' % (con, self._table, self._table, self._table, key, con,))
cr.rollback()
if create: if create:
if hasattr(self, "_sql"): if hasattr(self, "_sql"):
@ -2491,6 +2504,7 @@ class orm(orm_template):
cr.commit() cr.commit()
if store_compute: if store_compute:
self._parent_store_compute(cr) self._parent_store_compute(cr)
cr.commit()
return todo_end return todo_end
def __init__(self, cr): def __init__(self, cr):
@ -2646,11 +2660,16 @@ class orm(orm_template):
for i in range(len(field_value)): for i in range(len(field_value)):
field_value2.append({}) field_value2.append({})
for field2 in field_value[i]: for field2 in field_value[i]:
if obj._columns[field2]._type in ('many2one', 'one2one'): if field2 in obj._columns.keys() and obj._columns[field2]._type in ('many2one', 'one2one'):
obj2 = self.pool.get(obj._columns[field2]._obj) obj2 = self.pool.get(obj._columns[field2]._obj)
if not obj2.search(cr, uid, if not obj2.search(cr, uid,
[('id', '=', field_value[i][field2])]): [('id', '=', field_value[i][field2])]):
continue continue
elif field2 in obj._inherit_fields.keys() and obj._inherit_fields[field2][2]._type in ('many2one', 'one2one'):
obj2 = self.pool.get(obj._inherit_fields[field2][2]._obj)
if not obj2.search(cr, uid,
[('id', '=', field_value[i][field2])]):
continue
# TODO add test for many2many and one2many # TODO add test for many2many and one2many
field_value2[i][field2] = field_value[i][field2] field_value2[i][field2] = field_value[i][field2]
field_value = field_value2 field_value = field_value2
@ -3107,6 +3126,24 @@ class orm(orm_template):
self.pool.get('ir.model.access').check(cr, user, self._name, 'write', context=context) self.pool.get('ir.model.access').check(cr, user, self._name, 'write', context=context)
# No direct update of parent_left/right
vals.pop('parent_left', None)
vals.pop('parent_right', None)
parents_changed = []
if self._parent_store and (self._parent_name in vals):
# The parent_left/right computation may take up to
# 5 seconds. No need to recompute the values if the
# parent is the same. Get the current value of the parent
base_query = 'SELECT id FROM %s WHERE id IN %%s AND %s' % \
(self._table, self._parent_name)
params = (tuple(ids),)
parent_val = vals[self._parent_name]
if parent_val:
cr.execute(base_query + " != %s", params + (parent_val,))
else:
cr.execute(base_query + " IS NULL", params)
parents_changed = map(operator.itemgetter(0), cr.fetchall())
upd0 = [] upd0 = []
upd1 = [] upd1 = []
@ -3196,41 +3233,48 @@ class orm(orm_template):
self.pool.get(table).write(cr, user, nids, v, context) self.pool.get(table).write(cr, user, nids, v, context)
self._validate(cr, user, ids, context) self._validate(cr, user, ids, context)
# TODO: use _order to set dest at the right position and not first node of parent
if self._parent_store and (self._parent_name in vals): # TODO: use _order to set dest at the right position and not first node of parent
# We can't defer parent_store computation because the stored function
# fields that are computer may refer (directly or indirectly) to
# parent_left/right (via a child_of domain)
if parents_changed:
if self.pool._init: if self.pool._init:
self.pool._init_parent[self._name]=True self.pool._init_parent[self._name]=True
else: else:
for id in ids: order = self._parent_order or self._order
# Find Position of the element parent_val = vals[self._parent_name]
if vals[self._parent_name]: if parent_val:
cr.execute('select parent_left,parent_right,id from '+self._table+' where '+self._parent_name+'=%s order by '+(self._parent_order or self._order), (vals[self._parent_name],)) clause, params = '%s=%%s' % (self._parent_name,), (parent_val,)
else: else:
cr.execute('select parent_left,parent_right,id from '+self._table+' where '+self._parent_name+' is null order by '+(self._parent_order or self._order)) clause, params = '%s IS NULL' % (self._parent_name,), ()
result_p = cr.fetchall() cr.execute('SELECT parent_right, id FROM %s WHERE %s ORDER BY %s' % (self._table, clause, order), params)
position = None parents = cr.fetchall()
for (pleft,pright,pid) in result_p:
if pid == id:
break
position = pright+1
# It's the first node of the parent: position = parent_left+1 for id in parents_changed:
if not position: cr.execute('SELECT parent_left, parent_right FROM %s WHERE id=%%s' % (self._table,), (id,))
if not vals[self._parent_name]: pleft, pright = cr.fetchone()
position = 1
else:
cr.execute('select parent_left from '+self._table+' where id=%s', (vals[self._parent_name],))
position = cr.fetchone()[0]+1
# We have the new position !
cr.execute('select parent_left,parent_right from '+self._table+' where id=%s', (id,))
pleft,pright = cr.fetchone()
distance = pright - pleft + 1 distance = pright - pleft + 1
if position>pleft and position<=pright: # Find Position of the element
position = None
for (parent_pright, parent_id) in parents:
if parent_id == id:
break
position = parent_pright+1
# It's the first node of the parent
if not position:
if not parent_val:
position = 1
else:
cr.execute('select parent_left from '+self._table+' where id=%s', (parent_val,))
position = cr.fetchone()[0]+1
if pleft < position <= pright:
raise except_orm(_('UserError'), _('Recursivity Detected.')) raise except_orm(_('UserError'), _('Recursivity Detected.'))
if pleft<position: if pleft < position:
cr.execute('update '+self._table+' set parent_left=parent_left+%s where parent_left>=%s', (distance, position)) cr.execute('update '+self._table+' set parent_left=parent_left+%s where parent_left>=%s', (distance, position))
cr.execute('update '+self._table+' set parent_right=parent_right+%s where parent_right>=%s', (distance, position)) cr.execute('update '+self._table+' set parent_right=parent_right+%s where parent_right>=%s', (distance, position))
cr.execute('update '+self._table+' set parent_left=parent_left+%s, parent_right=parent_right+%s where parent_left>=%s and parent_left<%s', (position-pleft,position-pleft, pleft, pright)) cr.execute('update '+self._table+' set parent_left=parent_left+%s, parent_right=parent_right+%s where parent_left>=%s and parent_left<%s', (position-pleft,position-pleft, pleft, pright))
@ -3411,7 +3455,7 @@ class orm(orm_template):
self.check_access_rule(cr, user, [id_new], 'create', context=context) self.check_access_rule(cr, user, [id_new], 'create', context=context)
upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority) upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)
if self._parent_store: if self._parent_store and not context.get('defer_parent_store_computation'):
if self.pool._init: if self.pool._init:
self.pool._init_parent[self._name]=True self.pool._init_parent[self._name]=True
else: else:
@ -3639,8 +3683,9 @@ class orm(orm_template):
* '&' (default), '|', '!' * '&' (default), '|', '!'
""" """
if not context: if context is None:
context = {} context = {}
self.pool.get('ir.model.access').check(cr, user, self._name, 'read', context=context)
# compute the where, order by, limit and offset clauses # compute the where, order by, limit and offset clauses
(qu1, qu2, tables) = self._where_calc(cr, user, args, context=context) (qu1, qu2, tables) = self._where_calc(cr, user, args, context=context)
dom = self.pool.get('ir.rule').domain_get(cr, user, self._name, 'read', context=context) dom = self.pool.get('ir.rule').domain_get(cr, user, self._name, 'read', context=context)
@ -3750,9 +3795,9 @@ class orm(orm_template):
:return: dictionary containing all the field values :return: dictionary containing all the field values
""" """
if not context: if context is None:
context = {} context = {}
if not default: if default is None:
default = {} default = {}
if 'state' not in default: if 'state' not in default:
if 'state' in self._defaults: if 'state' in self._defaults:
@ -3761,7 +3806,11 @@ class orm(orm_template):
else: else:
default['state'] = self._defaults['state'] default['state'] = self._defaults['state']
data = self.read(cr, uid, [id], context=context)[0] context_wo_lang = context
if 'lang' in context:
del context_wo_lang['lang']
data = self.read(cr, uid, [id], context=context_wo_lang)[0]
fields = self.fields_get(cr, uid, context=context) fields = self.fields_get(cr, uid, context=context)
trans_data=[] trans_data=[]
for f in fields: for f in fields:
@ -3796,17 +3845,14 @@ class orm(orm_template):
trans_obj = self.pool.get('ir.translation') trans_obj = self.pool.get('ir.translation')
#TODO: optimize translations #TODO: optimize translations
trans_name=''
for f in fields: for f in fields:
trans_flag=True trans_name = ''
if f in self._columns and self._columns[f].translate: if f in self._columns and self._columns[f].translate:
trans_name=self._name+","+f trans_name = self._name+","+f
elif f in self._inherit_fields and self._inherit_fields[f][2].translate: elif f in self._inherit_fields and self._inherit_fields[f][2].translate:
trans_name=self._inherit_fields[f][0]+","+f trans_name = self._inherit_fields[f][0] + "," + f
else:
trans_flag=False
if trans_flag: if trans_name:
trans_ids = trans_obj.search(cr, uid, [ trans_ids = trans_obj.search(cr, uid, [
('name', '=', trans_name), ('name', '=', trans_name),
('res_id','=',data['id']) ('res_id','=',data['id'])

View File

@ -253,7 +253,8 @@ class osv(osv_base, orm.orm):
for c in cls.__dict__.get(s, []): for c in cls.__dict__.get(s, []):
exist = False exist = False
for c2 in range(len(new)): for c2 in range(len(new)):
if new[c2][2]==c[2]: #For _constraints, we should check field and methods as well
if new[c2][2]==c[2] and new[c2][0]==c[0]:
new[c2] = c new[c2] = c
exist = True exist = True
break break

View File

@ -21,8 +21,8 @@
############################################################################## ##############################################################################
name = 'openerp-server' name = 'openerp-server'
version = '5.2dev' version = '6.0dev'
major_version = '5.2' major_version = '6.0'
description = 'OpenERP Server' description = 'OpenERP Server'
long_desc = '''\ long_desc = '''\
OpenERP is a complete ERP and CRM. The main features are accounting (analytic OpenERP is a complete ERP and CRM. The main features are accounting (analytic

View File

@ -36,6 +36,7 @@ from pychart import *
import misc import misc
import cStringIO import cStringIO
from lxml import etree from lxml import etree
from tools.translate import _
class external_pdf(render.render): class external_pdf(render.render):
def __init__(self, pdf): def __init__(self, pdf):

View File

@ -35,7 +35,7 @@ class report(object):
for node in root_node: for node in root_node:
if node.tag == etree.Comment: if node.tag == etree.Comment:
continue continue
if node.text: if node.text or node.tail:
def _sub3(txt): def _sub3(txt):
n = node n = node
while n.tag != txt.group(2): while n.tag != txt.group(2):
@ -71,9 +71,9 @@ class report(object):
n = n.getparent() n = n.getparent()
n.set('rml_loop', txt.group(2)) n.set('rml_loop', txt.group(2))
return '[['+txt.group(1)+"''"+txt.group(4)+']]' return '[['+txt.group(1)+"''"+txt.group(4)+']]'
t = _regex1.sub(_sub1, node.text) t = _regex1.sub(_sub1, node.text or node.tail)
if t == " ": if t == " ":
t = _regex11.sub(_sub1, node.text) t = _regex11.sub(_sub1, node.text or node.tail)
t = _regex3.sub(_sub3, t) t = _regex3.sub(_sub3, t)
node.text = _regex2.sub(_sub2, t) node.text = _regex2.sub(_sub2, t)
self.preprocess_rml(node,type) self.preprocess_rml(node,type)

View File

@ -52,13 +52,20 @@ class _rml_styles(object,):
def __init__(self, nodes, localcontext): def __init__(self, nodes, localcontext):
self.localcontext = localcontext self.localcontext = localcontext
self.styles = {} self.styles = {}
self.styles_obj = {}
self.names = {} self.names = {}
self.table_styles = {} self.table_styles = {}
self.default_style = reportlab.lib.styles.getSampleStyleSheet()
for node in nodes: for node in nodes:
for style in node.findall('blockTableStyle'): for style in node.findall('blockTableStyle'):
self.table_styles[style.get('id')] = self._table_style_get(style) self.table_styles[style.get('id')] = self._table_style_get(style)
for style in node.findall('paraStyle'): for style in node.findall('paraStyle'):
self.styles[style.get('name')] = self._para_style_update(style) sname = style.get('name')
self.styles[sname] = self._para_style_update(style)
self.styles_obj[sname] = reportlab.lib.styles.ParagraphStyle(sname, self.default_style["Normal"], **self.styles[sname])
for variable in node.findall('initialize'): for variable in node.findall('initialize'):
for name in variable.findall('name'): for name in variable.findall('name'):
self.names[ name.get('id')] = name.get('value') self.names[ name.get('id')] = name.get('value')
@ -126,17 +133,19 @@ class _rml_styles(object,):
def para_style_get(self, node): def para_style_get(self, node):
style = False style = False
if node.get('style'): sname = node.get('style')
if node.get('style') in self.styles: if sname:
styles = reportlab.lib.styles.getSampleStyleSheet() if sname in self.styles_obj:
sname = node.get('style') style = self.styles_obj[sname]
style = reportlab.lib.styles.ParagraphStyle(sname, styles["Normal"], **self.styles[sname])
else: else:
sys.stderr.write('Warning: style not found, %s - setting default!\n' % (node.get('style'),) ) sys.stderr.write('Warning: style not found, %s - setting default!\n' % (node.get('style'),) )
if not style: if not style:
styles = reportlab.lib.styles.getSampleStyleSheet() style = self.default_style['Normal']
style = styles['Normal'] para_update = self._para_style_update(node)
style.__dict__.update(self._para_style_update(node)) if para_update:
# update style only is necessary
style = copy.deepcopy(style)
style.__dict__.update(para_update)
return style return style
class _rml_doc(object): class _rml_doc(object):
@ -565,7 +574,7 @@ class _rml_flowable(object):
def _illustration(self, node): def _illustration(self, node):
class Illustration(platypus.flowables.Flowable): class Illustration(platypus.flowables.Flowable):
def __init__(self, node, localcontext, styles, self2): def __init__(self, node, localcontext, styles, self2):
self.localcontext = localcontext self.localcontext = localcontext.copy()
self.node = node self.node = node
self.styles = styles self.styles = styles
self.width = utils.unit_get(node.get('width')) self.width = utils.unit_get(node.get('width'))

View File

@ -441,7 +441,7 @@ class report_sxw(report_rml, preprocess.report):
rml_parser.set_context(objs, data, ids, report_xml.report_type) rml_parser.set_context(objs, data, ids, report_xml.report_type)
processed_rml = etree.XML(rml) processed_rml = etree.XML(rml)
if report_xml.header: if report_xml.header:
rml_parser._add_header(processed_rml) rml_parser._add_header(processed_rml, self.header)
processed_rml = self.preprocess_rml(processed_rml,report_xml.report_type) processed_rml = self.preprocess_rml(processed_rml,report_xml.report_type)
if rml_parser.logo: if rml_parser.logo:
logo = base64.decodestring(rml_parser.logo) logo = base64.decodestring(rml_parser.logo)

View File

@ -283,8 +283,8 @@ class db(netsvc.ExportService):
## Not True: in fact, check if connection to database is possible. The database may exists ## Not True: in fact, check if connection to database is possible. The database may exists
return bool(sql_db.db_connect(db_name)) return bool(sql_db.db_connect(db_name))
def exp_list(self): def exp_list(self, document=False):
if not tools.config['list_db']: if not tools.config['list_db'] and not document:
raise Exception('AccessDenied') raise Exception('AccessDenied')
db = sql_db.db_connect('template1') db = sql_db.db_connect('template1')

View File

@ -123,7 +123,6 @@ class configmanager(object):
parser.add_option("--assert-exit-level", dest='assert_exit_level', type="choice", choices=self._LOGLEVELS.keys(), parser.add_option("--assert-exit-level", dest='assert_exit_level', type="choice", choices=self._LOGLEVELS.keys(),
help="specify the level at which a failed assertion will stop the server. Accepted values: %s" % (self._LOGLEVELS.keys(),)) help="specify the level at which a failed assertion will stop the server. Accepted values: %s" % (self._LOGLEVELS.keys(),))
parser.add_option('--price_accuracy', dest='price_accuracy', default='2', help='deprecated since v6.0, replaced by module decimal_precision') parser.add_option('--price_accuracy', dest='price_accuracy', default='2', help='deprecated since v6.0, replaced by module decimal_precision')
parser.add_option('--no-database-list', action="store_false", dest='list_db', default=True, help="disable the ability to return the list of databases")
if self.has_ssl: if self.has_ssl:
group = optparse.OptionGroup(parser, "SSL Configuration") group = optparse.OptionGroup(parser, "SSL Configuration")
@ -161,8 +160,7 @@ class configmanager(object):
group.add_option('--email-from', dest='email_from', default='', help='specify the SMTP email address for sending email') group.add_option('--email-from', dest='email_from', default='', help='specify the SMTP email address for sending email')
group.add_option('--smtp', dest='smtp_server', default='', help='specify the SMTP server for sending email') group.add_option('--smtp', dest='smtp_server', default='', help='specify the SMTP server for sending email')
group.add_option('--smtp-port', dest='smtp_port', default='25', help='specify the SMTP port', type="int") group.add_option('--smtp-port', dest='smtp_port', default='25', help='specify the SMTP port', type="int")
if self.has_ssl: group.add_option('--smtp-ssl', dest='smtp_ssl', default='', help='specify the SMTP server support SSL or not')
group.add_option('--smtp-ssl', dest='smtp_ssl', default='', help='specify the SMTP server support SSL or not')
group.add_option('--smtp-user', dest='smtp_user', default='', help='specify the SMTP username for sending email') group.add_option('--smtp-user', dest='smtp_user', default='', help='specify the SMTP username for sending email')
group.add_option('--smtp-password', dest='smtp_password', default='', help='specify the SMTP password for sending email') group.add_option('--smtp-password', dest='smtp_password', default='', help='specify the SMTP password for sending email')
parser.add_option_group(group) parser.add_option_group(group)
@ -199,6 +197,13 @@ class configmanager(object):
action="callback", callback=self._check_addons_path, nargs=1, type="string") action="callback", callback=self._check_addons_path, nargs=1, type="string")
parser.add_option_group(group) parser.add_option_group(group)
security = optparse.OptionGroup(parser, 'Security-related options')
security.add_option('--no-database-list', action="store_false", dest='list_db', default=True, help="disable the ability to return the list of databases")
security.add_option('--enable-code-actions', action='store_true',
dest='server_actions_allow_code', default=False,
help='Enables server actions of state "code". Warning, this is a security risk.')
parser.add_option_group(security)
def parse_config(self): def parse_config(self):
(opt, args) = self.parser.parse_args() (opt, args) = self.parser.parse_args()
@ -244,13 +249,13 @@ class configmanager(object):
self.options['pidfile'] = False self.options['pidfile'] = False
keys = ['interface', 'port', 'db_name', 'db_user', 'db_password', 'db_host', keys = ['interface', 'port', 'db_name', 'db_user', 'db_password', 'db_host',
'db_port', 'list_db', 'logfile', 'pidfile', 'smtp_port', 'cache_timeout', 'db_port', 'list_db', 'logfile', 'pidfile', 'smtp_port', 'cache_timeout','smtp_ssl',
'email_from', 'smtp_server', 'smtp_user', 'smtp_password', 'price_accuracy', 'email_from', 'smtp_server', 'smtp_user', 'smtp_password', 'price_accuracy',
'netinterface', 'netport', 'db_maxconn', 'import_partial', 'addons_path', 'netinterface', 'netport', 'db_maxconn', 'import_partial', 'addons_path',
'netrpc', 'xmlrpc', 'syslog', 'without_demo', 'timezone',] 'netrpc', 'xmlrpc', 'syslog', 'without_demo', 'timezone',]
if self.has_ssl: if self.has_ssl:
keys.extend(['smtp_ssl', 'secure_cert_file', 'secure_pkey_file']) keys.extend(['secure_cert_file', 'secure_pkey_file'])
keys.append('secure') keys.append('secure')
for arg in keys: for arg in keys:
@ -258,7 +263,8 @@ class configmanager(object):
self.options[arg] = getattr(opt, arg) self.options[arg] = getattr(opt, arg)
keys = ['language', 'translate_out', 'translate_in', 'debug_mode', keys = ['language', 'translate_out', 'translate_in', 'debug_mode',
'stop_after_init', 'logrotate', 'without_demo', 'netrpc', 'xmlrpc', 'syslog', 'list_db'] 'stop_after_init', 'logrotate', 'without_demo', 'netrpc', 'xmlrpc', 'syslog',
'list_db', 'server_actions_allow_code']
if self.has_ssl and not self.options['secure']: if self.has_ssl and not self.options['secure']:
keys.append('secure') keys.append('secure')

View File

@ -26,11 +26,9 @@ import os.path
import pickle import pickle
import re import re
import sys import sys
import time
from datetime import datetime from datetime import datetime
from lxml import etree from lxml import etree
import ir import ir
import misc import misc
import netsvc import netsvc
@ -79,6 +77,7 @@ def _eval_xml(self,node, pool, cr, uid, idref, context=None):
return f_val return f_val
a_eval = node.get('eval','') a_eval = node.get('eval','')
if a_eval: if a_eval:
import time
idref2 = idref.copy() idref2 = idref.copy()
idref2['time'] = time idref2['time'] = time
idref2['DateTime'] = datetime idref2['DateTime'] = datetime
@ -253,6 +252,14 @@ form: module.record_id""" % (xml_id,)
self.pool.get(d_model).unlink(cr, self.uid, ids) self.pool.get(d_model).unlink(cr, self.uid, ids)
self.pool.get('ir.model.data')._unlink(cr, self.uid, d_model, ids) self.pool.get('ir.model.data')._unlink(cr, self.uid, d_model, ids)
def _remove_ir_values(self, cr, name, value, model):
ir_value_ids = self.pool.get('ir.values').search(cr, self.uid, [('name','=',name),('value','=',value),('model','=',model)])
if ir_value_ids:
self.pool.get('ir.values').unlink(cr, self.uid, ir_value_ids)
self.pool.get('ir.model.data')._unlink(cr, self.uid, 'ir.values', ir_value_ids)
return True
def _tag_report(self, cr, rec, data_node=None): def _tag_report(self, cr, rec, data_node=None):
res = {} res = {}
for dest,f in (('name','string'),('model','model'),('report_name','name')): for dest,f in (('name','string'),('model','model'),('report_name','name')):
@ -262,16 +269,17 @@ form: module.record_id""" % (xml_id,)
if rec.get(field): if rec.get(field):
res[dest] = rec.get(field).encode('utf8') res[dest] = rec.get(field).encode('utf8')
if rec.get('auto'): if rec.get('auto'):
res['auto'] = eval(rec.get('auto')) res['auto'] = eval(rec.get('auto','False'))
if rec.get('sxw'): if rec.get('sxw'):
sxw_content = misc.file_open(rec.get('sxw')).read() sxw_content = misc.file_open(rec.get('sxw')).read()
res['report_sxw_content'] = sxw_content res['report_sxw_content'] = sxw_content
if rec.get('header'): if rec.get('header'):
res['header'] = eval(rec.get('header')) res['header'] = eval(rec.get('header','False'))
if rec.get('report_type'): if rec.get('report_type'):
res['report_type'] = rec.get('report_type') res['report_type'] = rec.get('report_type')
res['multi'] = rec.get('multi') and eval(rec.get('multi')) res['multi'] = rec.get('multi') and eval(rec.get('multi','False'))
xml_id = rec.get('id','').encode('utf8') xml_id = rec.get('id','').encode('utf8')
self._test_xml_id(xml_id) self._test_xml_id(xml_id)
@ -291,12 +299,16 @@ form: module.record_id""" % (xml_id,)
id = self.pool.get('ir.model.data')._update(cr, self.uid, "ir.actions.report.xml", self.module, res, xml_id, noupdate=self.isnoupdate(data_node), mode=self.mode) id = self.pool.get('ir.model.data')._update(cr, self.uid, "ir.actions.report.xml", self.module, res, xml_id, noupdate=self.isnoupdate(data_node), mode=self.mode)
self.idref[xml_id] = int(id) self.idref[xml_id] = int(id)
if not rec.get('menu') or eval(rec.get('menu','')): if not rec.get('menu') or eval(rec.get('menu','False')):
keyword = str(rec.get('keyword', 'client_print_multi')) keyword = str(rec.get('keyword', 'client_print_multi'))
keys = [('action',keyword),('res_model',res['model'])] keys = [('action',keyword),('res_model',res['model'])]
value = 'ir.actions.report.xml,'+str(id) value = 'ir.actions.report.xml,'+str(id)
replace = rec.get('replace', True) replace = rec.get('replace', True)
self.pool.get('ir.model.data').ir_set(cr, self.uid, 'action', keyword, res['name'], [res['model']], value, replace=replace, isobject=True, xml_id=xml_id) self.pool.get('ir.model.data').ir_set(cr, self.uid, 'action', keyword, res['name'], [res['model']], value, replace=replace, isobject=True, xml_id=xml_id)
elif self.mode=='update' and eval(rec.get('menu','False'))==False:
# Special check for report having attribute menu=False on update
value = 'ir.actions.report.xml,'+str(id)
self._remove_ir_values(cr, res['name'], value, res['model'])
return False return False
def _tag_function(self, cr, rec, data_node=None): def _tag_function(self, cr, rec, data_node=None):
@ -313,7 +325,7 @@ form: module.record_id""" % (xml_id,)
name = rec.get("name",'').encode('utf8') name = rec.get("name",'').encode('utf8')
xml_id = rec.get('id','').encode('utf8') xml_id = rec.get('id','').encode('utf8')
self._test_xml_id(xml_id) self._test_xml_id(xml_id)
multi = rec.get('multi','') and eval(rec.get('multi','')) multi = rec.get('multi','') and eval(rec.get('multi','False'))
res = {'name': string, 'wiz_name': name, 'multi': multi, 'model': model} res = {'name': string, 'wiz_name': name, 'multi': multi, 'model': model}
if rec.get('groups'): if rec.get('groups'):
@ -332,12 +344,16 @@ form: module.record_id""" % (xml_id,)
id = self.pool.get('ir.model.data')._update(cr, self.uid, "ir.actions.wizard", self.module, res, xml_id, noupdate=self.isnoupdate(data_node), mode=self.mode) id = self.pool.get('ir.model.data')._update(cr, self.uid, "ir.actions.wizard", self.module, res, xml_id, noupdate=self.isnoupdate(data_node), mode=self.mode)
self.idref[xml_id] = int(id) self.idref[xml_id] = int(id)
# ir_set # ir_set
if (not rec.get('menu') or eval(rec.get('menu',''))) and id: if (not rec.get('menu') or eval(rec.get('menu','False'))) and id:
keyword = str(rec.get('keyword','') or 'client_action_multi') keyword = str(rec.get('keyword','') or 'client_action_multi')
keys = [('action',keyword),('res_model',model)] keys = [('action',keyword),('res_model',model)]
value = 'ir.actions.wizard,'+str(id) value = 'ir.actions.wizard,'+str(id)
replace = rec.get("replace",'') or True replace = rec.get("replace",'') or True
self.pool.get('ir.model.data').ir_set(cr, self.uid, 'action', keyword, string, [model], value, replace=replace, isobject=True, xml_id=xml_id) self.pool.get('ir.model.data').ir_set(cr, self.uid, 'action', keyword, string, [model], value, replace=replace, isobject=True, xml_id=xml_id)
elif self.mode=='update' and (rec.get('menu') and eval(rec.get('menu','False'))==False):
# Special check for wizard having attribute menu=False on update
value = 'ir.actions.wizard,'+str(id)
self._remove_ir_values(cr, string, value, model)
def _tag_url(self, cr, rec, data_node=None): def _tag_url(self, cr, rec, data_node=None):
url = rec.get("string",'').encode('utf8') url = rec.get("string",'').encode('utf8')
@ -351,12 +367,16 @@ form: module.record_id""" % (xml_id,)
id = self.pool.get('ir.model.data')._update(cr, self.uid, "ir.actions.url", self.module, res, xml_id, noupdate=self.isnoupdate(data_node), mode=self.mode) id = self.pool.get('ir.model.data')._update(cr, self.uid, "ir.actions.url", self.module, res, xml_id, noupdate=self.isnoupdate(data_node), mode=self.mode)
self.idref[xml_id] = int(id) self.idref[xml_id] = int(id)
# ir_set # ir_set
if (not rec.get('menu') or eval(rec.get('menu',''))) and id: if (not rec.get('menu') or eval(rec.get('menu','False'))) and id:
keyword = str(rec.get('keyword','') or 'client_action_multi') keyword = str(rec.get('keyword','') or 'client_action_multi')
keys = [('action',keyword)] keys = [('action',keyword)]
value = 'ir.actions.url,'+str(id) value = 'ir.actions.url,'+str(id)
replace = rec.get("replace",'') or True replace = rec.get("replace",'') or True
self.pool.get('ir.model.data').ir_set(cr, self.uid, 'action', keyword, url, ["ir.actions.url"], value, replace=replace, isobject=True, xml_id=xml_id) self.pool.get('ir.model.data').ir_set(cr, self.uid, 'action', keyword, url, ["ir.actions.url"], value, replace=replace, isobject=True, xml_id=xml_id)
elif self.mode=='update' and (rec.get('menu') and eval(rec.get('menu','False'))==False):
# Special check for URL having attribute menu=False on update
value = 'ir.actions.url,'+str(id)
self._remove_ir_values(cr, url, value, "ir.actions.url")
def _tag_act_window(self, cr, rec, data_node=None): def _tag_act_window(self, cr, rec, data_node=None):
name = rec.get('name','').encode('utf-8') name = rec.get('name','').encode('utf-8')
@ -379,11 +399,12 @@ form: module.record_id""" % (xml_id,)
uid = self.uid uid = self.uid
# def ref() added because , if context has ref('id') eval wil use this ref # def ref() added because , if context has ref('id') eval wil use this ref
active_id=str("active_id") # for further reference in client/bin/tools/__init__.py active_id = str("active_id") # for further reference in client/bin/tools/__init__.py
def ref(str_id): def ref(str_id):
return self.id_get(cr, None, str_id) return self.id_get(cr, None, str_id)
context = eval(context) context = eval(context)
# domain = eval(domain) # XXX need to test this line -> uid, active_id, active_ids, ...
res = { res = {
'name': name, 'name': name,

View File

@ -820,6 +820,22 @@ class cache(object):
def to_xml(s): def to_xml(s):
return s.replace('&','&amp;').replace('<','&lt;').replace('>','&gt;') return s.replace('&','&amp;').replace('<','&lt;').replace('>','&gt;')
def get_encodings():
yield 'utf8'
from locale import getpreferredencoding
prefenc = getpreferredencoding()
if prefenc:
yield prefenc
prefenc = {
'latin1': 'latin9',
'iso-8859-1': 'iso8859-15',
'cp1252': '1252',
}.get(prefenc.lower())
if prefenc:
yield prefenc
def ustr(value): def ustr(value):
"""This method is similar to the builtin `str` method, except """This method is similar to the builtin `str` method, except
it will return Unicode string. it will return Unicode string.
@ -829,29 +845,25 @@ def ustr(value):
@rtype: unicode @rtype: unicode
@return: unicode string @return: unicode string
""" """
orig = value
if isinstance(value, Exception):
return exception_to_unicode(value)
if isinstance(value, unicode): if isinstance(value, unicode):
return value return value
if hasattr(value, '__unicode__'): try:
return unicode(value) return unicode(value)
if not isinstance(value, str):
value = str(value)
try: # first try utf-8
return unicode(value, 'utf-8')
except: except:
pass pass
try: # then extened iso-8858 for ln in get_encodings():
return unicode(value, 'iso-8859-15') try:
except: return unicode(value, ln)
pass except:
pass
raise UnicodeError('unable de to convert %r' % (orig,))
# else use default system locale
from locale import getlocale
return unicode(value, getlocale()[1])
def exception_to_unicode(e): def exception_to_unicode(e):
if (sys.version_info[:2] < (2,6)) and hasattr(e, 'message'): if (sys.version_info[:2] < (2,6)) and hasattr(e, 'message'):
@ -914,7 +926,7 @@ def get_languages():
'fr_BE': u'French (BE) / Français (BE)', 'fr_BE': u'French (BE) / Français (BE)',
'fr_CH': u'French (CH) / Français (CH)', 'fr_CH': u'French (CH) / Français (CH)',
'fr_FR': u'French / Français', 'fr_FR': u'French / Français',
'gl_ES': u'Galician / Spain', 'gl_ES': u'Galician / Galego',
'gu_IN': u'Gujarati / India', 'gu_IN': u'Gujarati / India',
'hi_IN': u'Hindi / India', 'hi_IN': u'Hindi / India',
'hr_HR': u'Croatian / hrvatski jezik', 'hr_HR': u'Croatian / hrvatski jezik',
@ -939,8 +951,8 @@ def get_languages():
'ro_RO': u'Romanian / limba română', 'ro_RO': u'Romanian / limba română',
'ru_RU': u'Russian / русский язык', 'ru_RU': u'Russian / русский язык',
'si_LK': u'Sinhalese / Sri Lanka', 'si_LK': u'Sinhalese / Sri Lanka',
'sk_SK': u'Slovak / Slovakia', 'sl_SI': u'Slovenian / slovenščina',
'sl_SL': u'Slovenian / slovenščina', 'sk_SK': u'Slovak / Slovenský jazyk',
'sq_AL': u'Albanian / Shqipëri', 'sq_AL': u'Albanian / Shqipëri',
'sr_RS': u'Serbian / Serbia', 'sr_RS': u'Serbian / Serbia',
'sv_SE': u'Swedish / svenska', 'sv_SE': u'Swedish / svenska',

View File

@ -596,23 +596,30 @@ def trans_generate(lang, modules, dbname=None):
push_translation(module, 'model', name, xml_name, encode(trad)) push_translation(module, 'model', name, xml_name, encode(trad))
# parse source code for _() calls # parse source code for _() calls
def get_module_from_path(path,mod_paths=None): def get_module_from_path(path, mod_paths=None):
if not mod_paths: # if not mod_paths:
# First, construct a list of possible paths ## First, construct a list of possible paths
def_path = os.path.abspath(os.path.join(tools.config['root_path'], 'addons')) # default addons path (base) # def_path = os.path.abspath(os.path.join(tools.config['root_path'], 'addons')) # default addons path (base)
ad_paths= map(lambda m: os.path.abspath(m.strip()),tools.config['addons_path'].split(',')) # ad_paths= map(lambda m: os.path.abspath(m.strip()),tools.config['addons_path'].split(','))
mod_paths=[def_path] # mod_paths=[def_path]
for adp in ad_paths: # for adp in ad_paths:
mod_paths.append(adp) # mod_paths.append(adp)
if not adp.startswith('/'): # if not adp.startswith('/'):
mod_paths.append(os.path.join(def_path,adp)) # mod_paths.append(os.path.join(def_path,adp))
elif adp.startswith(def_path): # elif adp.startswith(def_path):
mod_paths.append(adp[len(def_path)+1:]) # mod_paths.append(adp[len(def_path)+1:])
# for mp in mod_paths:
for mp in mod_paths: # if path.startswith(mp) and (os.path.dirname(path) != mp):
if path.startswith(mp) and (os.path.dirname(path) != mp): # path = path[len(mp)+1:]
path = path[len(mp)+1:] # return path.split(os.path.sep)[0]
return path.split(os.path.sep)[0] path_dir = os.path.dirname(path[1:])
if path_dir:
if os.path.exists(os.path.join(tools.config['addons_path'],path[1:])):
return path.split(os.path.sep)[1]
else:
root_addons = os.path.join(tools.config['root_path'], 'addons')
if os.path.exists(os.path.join(root_addons,path[1:])):
return path.split(os.path.sep)[1]
return 'base' # files that are not in a module are considered as being in 'base' module return 'base' # files that are not in a module are considered as being in 'base' module
modobj = pool.get('ir.module.module') modobj = pool.get('ir.module.module')
@ -699,6 +706,11 @@ def trans_load_data(db_name, fileobj, fileformat, lang, strict=False, lang_name=
if not lang_name: if not lang_name:
lang_name = tools.get_languages().get(lang, lang) lang_name = tools.get_languages().get(lang, lang)
def fix_xa0(s):
if s == '\xa0':
return '\xc2\xa0'
return s
lang_info = { lang_info = {
'code': lang, 'code': lang,
'iso_code': iso_lang, 'iso_code': iso_lang,
@ -706,9 +718,10 @@ def trans_load_data(db_name, fileobj, fileformat, lang, strict=False, lang_name=
'translatable': 1, 'translatable': 1,
'date_format' : str(locale.nl_langinfo(locale.D_FMT).replace('%y', '%Y')), 'date_format' : str(locale.nl_langinfo(locale.D_FMT).replace('%y', '%Y')),
'time_format' : str(locale.nl_langinfo(locale.T_FMT)), 'time_format' : str(locale.nl_langinfo(locale.T_FMT)),
'decimal_point' : str(locale.localeconv()['decimal_point']).replace('\xa0', '\xc2\xa0'), 'decimal_point' : fix_xa0(str(locale.localeconv()['decimal_point'])),
'thousands_sep' : str(locale.localeconv()['thousands_sep']).replace('\xa0', '\xc2\xa0'), 'thousands_sep' : fix_xa0(str(locale.localeconv()['thousands_sep'])),
} }
try: try:
lang_obj.create(cr, uid, lang_info) lang_obj.create(cr, uid, lang_info)
finally: finally:
@ -767,7 +780,7 @@ def trans_load_data(db_name, fileobj, fileformat, lang, strict=False, lang_name=
# the same source # the same source
obj = pool.get(model) obj = pool.get(model)
if obj: if obj:
if not field in obj._columns: if field not in obj.fields_get_keys(cr, uid):
continue continue
ids = obj.search(cr, uid, [(field, '=', dic['src'])]) ids = obj.search(cr, uid, [(field, '=', dic['src'])])

File diff suppressed because it is too large Load Diff

View File

@ -6,7 +6,7 @@ formats=rpm
[bdist_rpm] [bdist_rpm]
# release must exactly match 'release' as set in bin/release.py # release must exactly match 'release' as set in bin/release.py
release=3 release=10
requires=python >= 2.4 requires=python >= 2.4
#build-requires=python-devel >= 2.3 #build-requires=python-devel >= 2.3

View File

@ -44,7 +44,7 @@ sys.path.append(join(os.path.abspath(os.path.dirname(__file__)), "bin"))
execfile(join('bin', 'release.py')) execfile(join('bin', 'release.py'))
if sys.argv[1] == 'bdist_rpm': if 'bdist_rpm' in sys.argv:
version = version.split('-')[0] version = version.split('-')[0]
# get python short version # get python short version
@ -177,11 +177,14 @@ options = {
"compressed": 1, "compressed": 1,
"optimize": 2, "optimize": 2,
"dist_dir": 'dist', "dist_dir": 'dist',
"packages": ["lxml", "lxml.builder", "lxml._elementpath", "lxml.etree", "packages": [
"lxml.objectify", "decimal", "xml", "encodings", "lxml", "lxml.builder", "lxml._elementpath", "lxml.etree",
"dateutil", "wizard", "pychart", "PIL", "pyparsing", "lxml.objectify", "decimal", "xml", "xml", "xml.dom", "xml.xpath",
"pydot", "asyncore","asynchat", "reportlab", "vobject", "encodings", "dateutil", "wizard", "pychart", "PIL", "pyparsing",
"HTMLParser", "select", "yaml", "pywebdav"], "pydot", "asyncore","asynchat", "reportlab", "vobject",
"HTMLParser", "select", "mako", "poplib",
"imaplib", "smtplib", "email", "yaml","pywebdav",
],
"excludes" : ["Tkconstants","Tkinter","tcl"], "excludes" : ["Tkconstants","Tkinter","tcl"],
} }
} }