nvi-openerp 2010-12-08 11:52:49 +01:00
commit 38569b7754
52 changed files with 46159 additions and 30157 deletions

View File

@ -649,8 +649,7 @@ def load_module_graph(cr, graph, status=None, perform_checks=True, **kwargs):
try:
_load_data(cr, module_name, id_map, mode, 'test')
except Exception, e:
logger.notifyChannel('ERROR', netsvc.LOG_TEST, e)
pass
logging.getLogger('test').exception('Tests failed to execute in %s module %s', module_name)
finally:
if tools.config.options['test_commit']:
cr.commit()

View File

@ -86,7 +86,7 @@
],
'test': [
'test/base_test.xml',
#'test/base_test.yml'
'test/base_test.yml',
'test/test_context.xml',
'test/bug_lp541545.xml',
],

View File

@ -288,6 +288,7 @@ CREATE TABLE ir_module_module (
description text,
demo boolean default False,
web boolean DEFAULT FALSE,
license character varying(32),
primary key(id)
);
ALTER TABLE ir_module_module add constraint name_uniq unique (name);

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -401,6 +401,12 @@ class ir_model_data(osv.osv):
self.doinit = True
self.unlink_mark = {}
def _auto_init(self, cr, context=None):
super(ir_model_data, self)._auto_init(cr, context)
cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = \'ir_model_data_module_name_index\'')
if not cr.fetchone():
cr.execute('CREATE INDEX ir_model_data_module_name_index ON ir_model_data (module, name)')
@tools.cache()
def _get_id(self, cr, uid, module, xml_id):
"""Returns the id of the ir.model.data record corresponding to a given module and xml_id (cached) or raise a ValueError if not found"""
@ -448,18 +454,19 @@ class ir_model_data(osv.osv):
action_id = False
if xml_id:
cr.execute('select id,res_id from ir_model_data where module=%s and name=%s', (module,xml_id))
cr.execute('''SELECT imd.id, imd.res_id, md.id
FROM ir_model_data imd LEFT JOIN %s md ON (imd.res_id = md.id)
WHERE imd.module=%%s AND imd.name=%%s''' % model_obj._table,
(module, xml_id))
results = cr.fetchall()
for action_id2,res_id2 in results:
cr.execute('select id from '+model_obj._table+' where id=%s', (res_id2,))
result3 = cr.fetchone()
if not result3:
for imd_id2,res_id2,real_id2 in results:
if not real_id2:
self._get_id.clear_cache(cr.dbname, uid, module, xml_id)
self.get_object_reference.clear_cache(cr.dbname, uid, module, xml_id)
cr.execute('delete from ir_model_data where id=%s', (action_id2,))
cr.execute('delete from ir_model_data where id=%s', (imd_id2,))
res_id = False
else:
res_id,action_id = res_id2,action_id2
res_id,action_id = res_id2,imd_id2
if action_id and res_id:
model_obj.write(cr, uid, [res_id], values, context=context)

View File

@ -74,10 +74,18 @@ class ir_sequence(osv.osv):
def get_id(self, cr, uid, sequence_id, test='id', context=None):
assert test in ('code','id')
cr.execute('SELECT id, number_next, prefix, suffix, padding FROM ir_sequence WHERE '+test+'=%s AND active=%s FOR UPDATE NOWAIT', (sequence_id, True))
company_id = self.pool.get('res.users').read(cr, uid, uid, ['company_id'], context=context)['company_id'][0] or None
cr.execute('''SELECT id, number_next, prefix, suffix, padding
FROM ir_sequence
WHERE %s=%%s
AND active=true
AND (company_id = %%s or company_id is NULL)
ORDER BY company_id, id
FOR UPDATE NOWAIT''' % test,
(sequence_id, company_id))
res = cr.dictfetchone()
if res:
cr.execute('UPDATE ir_sequence SET number_next=number_next+number_increment WHERE id=%s AND active=%s', (res['id'], True))
cr.execute('UPDATE ir_sequence SET number_next=number_next+number_increment WHERE id=%s AND active=true', (res['id'],))
if res['number_next']:
return self._process(res['prefix']) + '%%0%sd' % res['padding'] % res['number_next'] + self._process(res['suffix'])
else:

View File

@ -57,7 +57,7 @@ class ir_translation(osv.osv):
_columns = {
'name': fields.char('Field Name', size=128, required=True),
'res_id': fields.integer('Resource ID', select=True),
'lang': fields.selection(_get_language, string='Language', size=5),
'lang': fields.selection(_get_language, string='Language', size=16),
'type': fields.selection(TRANSLATION_TYPE, string='Type', size=16, select=True),
'src': fields.text('Source'),
'value': fields.text('Translation Value'),

View File

@ -26,8 +26,7 @@ from tools.translate import _
EXCLUDED_FIELDS = set((
'report_sxw_content', 'report_rml_content', 'report_sxw', 'report_rml',
'report_sxw_content_data', 'report_rml_content_data', 'search_view',
'search_view_id'))
'report_sxw_content_data', 'report_rml_content_data', 'search_view', ))
class ir_values(osv.osv):
_name = 'ir.values'
@ -72,21 +71,21 @@ class ir_values(osv.osv):
'name': fields.char('Name', size=128),
'model_id': fields.many2one('ir.model', 'Object', size=128,
help="This field is not used, it only helps you to select a good model."),
'model': fields.char('Object Name', size=128),
'model': fields.char('Object Name', size=128, select=True),
'action_id': fields.many2one('ir.actions.actions', 'Action',
help="This field is not used, it only helps you to select the right action."),
'value': fields.text('Value'),
'value_unpickle': fields.function(_value_unpickle, fnct_inv=_value_pickle,
method=True, type='text', string='Value'),
'object': fields.boolean('Is Object'),
'key': fields.selection([('action','Action'),('default','Default')], 'Type', size=128),
'key2' : fields.char('Event Type',help="The kind of action or button in the client side that will trigger the action.", size=128),
'key': fields.selection([('action','Action'),('default','Default')], 'Type', size=128, select=True),
'key2' : fields.char('Event Type',help="The kind of action or button in the client side that will trigger the action.", size=128, select=True),
'meta': fields.text('Meta Datas'),
'meta_unpickle': fields.function(_value_unpickle, fnct_inv=_value_pickle,
method=True, type='text', string='Metadata'),
'res_id': fields.integer('Object ID', help="Keep 0 if the action must appear on all resources."),
'user_id': fields.many2one('res.users', 'User', ondelete='cascade'),
'company_id': fields.many2one('res.company', 'Company')
'res_id': fields.integer('Object ID', help="Keep 0 if the action must appear on all resources.", select=True),
'user_id': fields.many2one('res.users', 'User', ondelete='cascade', select=True),
'company_id': fields.many2one('res.company', 'Company', select=True)
}
_defaults = {
'key': lambda *a: 'action',
@ -94,11 +93,11 @@ class ir_values(osv.osv):
'company_id': lambda *a: False
}
def _auto_init(self, cr, context={}):
def _auto_init(self, cr, context=None):
super(ir_values, self)._auto_init(cr, context)
cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = \'ir_values_key_model_key2_index\'')
cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = \'ir_values_key_model_key2_res_id_user_id_idx\'')
if not cr.fetchone():
cr.execute('CREATE INDEX ir_values_key_model_key2_index ON ir_values (key, model, key2)')
cr.execute('CREATE INDEX ir_values_key_model_key2_res_id_user_id_idx ON ir_values (key, model, key2, res_id, user_id)')
def set(self, cr, uid, key, key2, name, models, value, replace=True, isobject=False, meta=False, preserve_user=False, company=False):
if isinstance(value, unicode):

View File

@ -175,7 +175,7 @@ class wkf_instance(osv.osv):
'res_type': fields.char('Resource Object', size=64, select=True),
'state': fields.char('State', size=32, select=True),
}
def _auto_init(self, cr, context={}):
def _auto_init(self, cr, context=None):
super(wkf_instance, self)._auto_init(cr, context)
cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = \'wkf_instance_res_id_res_type_state_index\'')
if not cr.fetchone():

View File

@ -57,7 +57,7 @@ class module_category(osv.osv):
return result
_columns = {
'name': fields.char("Name", size=128, required=True),
'name': fields.char("Name", size=128, required=True, select=True),
'parent_id': fields.many2one('ir.module.category', 'Parent Category', select=True),
'child_ids': fields.one2many('ir.module.category', 'parent_id', 'Child Categories'),
'module_nr': fields.function(_module_nbr, method=True, string='Number of Modules', type='integer')
@ -70,14 +70,15 @@ class module(osv.osv):
_description = "Module"
__logger = logging.getLogger('base.' + _name)
def get_module_info(self, name):
@classmethod
def get_module_info(cls, name):
info = {}
try:
info = addons.load_information_from_description_file(name)
if 'version' in info:
info['version'] = release.major_version + '.' + info['version']
except Exception:
self.__logger.debug('Error when trying to fetch informations for '
cls.__logger.debug('Error when trying to fetch informations for '
'module %s', name, exc_info=True)
return info
@ -96,26 +97,34 @@ class module(osv.osv):
mlist = self.browse(cr, uid, ids, context=context)
mnames = {}
for m in mlist:
mnames[m.name] = m.id
# skip uninstalled modules below,
# no data to find anyway
if m.state in ('installed', 'to upgrade', 'to remove'):
mnames[m.name] = m.id
res[m.id] = {
'menus_by_module':[],
'reports_by_module':[],
'views_by_module': []
}
if not mnames:
return res
view_id = model_data_obj.search(cr,uid,[('module','in', mnames.keys()),
('model','in',('ir.ui.view','ir.actions.report.xml','ir.ui.menu'))])
for data_id in model_data_obj.browse(cr,uid,view_id,context):
# We use try except, because views or menus may not exist
try:
key = data_id.model
res_mod_dic = res[mnames[data_id.module]]
if key=='ir.ui.view':
v = view_obj.browse(cr,uid,data_id.res_id)
aa = v.inherit_id and '* INHERIT ' or ''
res[mnames[data_id.module]]['views_by_module'].append(aa + v.name + '('+v.type+')')
res_mod_dic['views_by_module'].append(aa + v.name + '('+v.type+')')
elif key=='ir.actions.report.xml':
res[mnames[data_id.module]]['reports_by_module'].append(report_obj.browse(cr,uid,data_id.res_id).name)
res_mod_dic['reports_by_module'].append(report_obj.browse(cr,uid,data_id.res_id).name)
elif key=='ir.ui.menu':
res[mnames[data_id.module]]['menus_by_module'].append(menu_obj.browse(cr,uid,data_id.res_id).complete_name)
res_mod_dic['menus_by_module'].append(menu_obj.browse(cr,uid,data_id.res_id).complete_name)
except KeyError, e:
self.__logger.warning(
'Data not found for reference %s[%s:%s.%s]', data_id.model,
@ -234,6 +243,19 @@ class module(osv.osv):
if tools.find_in_path(binary) is None:
raise Exception('Unable to find %r in path' % (binary,))
@classmethod
def check_external_dependencies(cls, module_name, newstate='to install'):
terp = cls.get_module_info(module_name)
try:
cls._check_external_dependencies(terp)
except Exception, e:
if newstate == 'to install':
msg = _('Unable to install module "%s" because an external dependency is not met: %s')
elif newstate == 'to upgrade':
msg = _('Unable to upgrade module "%s" because an external dependency is not met: %s')
else:
msg = _('Unable to process module "%s" because an external dependency is not met: %s')
raise orm.except_orm(_('Error'), msg % (module_name, e.args[0]))
def state_update(self, cr, uid, ids, newstate, states_to_update, context=None, level=100):
if level<1:
@ -251,17 +273,7 @@ class module(osv.osv):
od = self.browse(cr, uid, ids2)[0]
mdemo = od.demo or mdemo
terp = self.get_module_info(module.name)
try:
self._check_external_dependencies(terp)
except Exception, e:
if newstate == 'to install':
msg = _('Unable to install module "%s" because an external dependency is not met: %s')
elif newstate == 'to upgrade':
msg = _('Unable to upgrade module "%s" because an external dependency is not met: %s')
else:
msg = _('Unable to process module "%s" because an external dependency is not met: %s')
raise orm.except_orm(_('Error'), msg % (module.name, e.args[0]))
self.check_external_dependencies(module.name, newstate)
if not module.dependencies_id:
mdemo = module.demo
if module.state in states_to_update:
@ -308,6 +320,7 @@ class module(osv.osv):
if mod.state not in ('installed','to upgrade'):
raise orm.except_orm(_('Error'),
_("Can not upgrade module '%s'. It is not installed.") % (mod.name,))
self.check_external_dependencies(mod.name, 'to upgrade')
iids = depobj.search(cr, uid, [('name', '=', mod.name)], context=context)
for dep in depobj.browse(cr, uid, iids, context=context):
if dep.module_id.state=='installed' and dep.module_id not in todo:
@ -344,8 +357,8 @@ class module(osv.osv):
'maintainer': terp.get('maintainer', False),
'contributors': ', '.join(terp.get('contributors', [])) or False,
'website': terp.get('website', ''),
'license': terp.get('license', 'GPL-2'),
'certificate': terp.get('certificate') or None,
'license': terp.get('license', 'AGPL-3'),
'certificate': terp.get('certificate') or False,
'web': terp.get('web') or False,
}
@ -353,34 +366,40 @@ class module(osv.osv):
def update_list(self, cr, uid, context={}):
res = [0, 0] # [update, add]
# iterate through installed modules and mark them as being so
known_mods = self.browse(cr, uid, self.search(cr, uid, []))
known_mods_names = dict([(m.name, m) for m in known_mods])
# iterate through detected modules and update/create them in db
for mod_name in addons.get_modules():
ids = self.search(cr, uid, [('name','=',mod_name)])
mod = known_mods_names.get(mod_name)
terp = self.get_module_info(mod_name)
values = self.get_values_from_terp(terp)
if ids:
id = ids[0]
mod = self.browse(cr, uid, id)
if mod:
updated_values = {}
for key in values:
old = getattr(mod, key)
updated = isinstance(values[key], basestring) and tools.ustr(values[key]) or values[key]
if not old == updated:
updated_values[key] = values[key]
if terp.get('installable', True) and mod.state == 'uninstallable':
self.write(cr, uid, id, {'state': 'uninstalled'})
updated_values['state'] = 'uninstalled'
if parse_version(terp.get('version', '')) > parse_version(mod.latest_version or ''):
self.write(cr, uid, id, {'url': ''})
res[0] += 1
self.write(cr, uid, id, values)
cr.execute('DELETE FROM ir_module_module_dependency WHERE module_id = %s', (id,))
if updated_values:
self.write(cr, uid, mod.id, updated_values)
else:
mod_path = addons.get_module_path(mod_name)
if not mod_path:
continue
if not terp or not terp.get('installable', True):
continue
ids = self.search(cr, uid, [('name','=',mod_name)])
id = self.create(cr, uid, dict(name=mod_name, state='uninstalled', **values))
mod = self.browse(cr, uid, id)
res[1] += 1
self._update_dependencies(cr, uid, id, terp.get('depends', []))
self._update_category(cr, uid, id, terp.get('category', 'Uncategorized'))
self._update_dependencies(cr, uid, mod, terp.get('depends', []))
self._update_category(cr, uid, mod, terp.get('category', 'Uncategorized'))
return res
@ -412,39 +431,49 @@ class module(osv.osv):
self.write(cr, uid, mod.id, self.get_values_from_terp(terp))
cr.execute('DELETE FROM ir_module_module_dependency ' \
'WHERE module_id = %s', (mod.id,))
self._update_dependencies(cr, uid, mod.id, terp.get('depends',
self._update_dependencies(cr, uid, mod, terp.get('depends',
[]))
self._update_category(cr, uid, mod.id, terp.get('category',
self._update_category(cr, uid, mod, terp.get('category',
'Uncategorized'))
# Import module
zimp = zipimport.zipimporter(fname)
zimp.load_module(mod.name)
return res
def _update_dependencies(self, cr, uid, id, depends=None):
def _update_dependencies(self, cr, uid, mod_browse, depends=None):
if depends is None:
depends = []
for d in depends:
cr.execute('INSERT INTO ir_module_module_dependency (module_id, name) values (%s, %s)', (id, d))
existing = set(x.name for x in mod_browse.dependencies_id)
needed = set(depends)
for dep in (needed - existing):
cr.execute('INSERT INTO ir_module_module_dependency (module_id, name) values (%s, %s)', (mod_browse.id, dep))
for dep in (existing - needed):
cr.execute('DELETE FROM ir_module_module_dependency WHERE module_id = %s and name = %s', (mod_browse.id, dep))
def _update_category(self, cr, uid, mod_browse, category='Uncategorized'):
current_category = mod_browse.category_id
current_category_path = []
while current_category:
current_category_path.insert(0, current_category.name)
current_category = current_category.parent_id
def _update_category(self, cr, uid, id, category='Uncategorized'):
categs = category.split('/')
p_id = None
while categs:
if p_id is not None:
cr.execute('select id from ir_module_category where name=%s and parent_id=%s', (categs[0], p_id))
else:
cr.execute('select id from ir_module_category where name=%s and parent_id is NULL', (categs[0],))
c_id = cr.fetchone()
if not c_id:
cr.execute('select nextval(\'ir_module_category_id_seq\')')
c_id = cr.fetchone()[0]
cr.execute('insert into ir_module_category (id, name, parent_id) values (%s, %s, %s)', (c_id, categs[0], p_id))
else:
c_id = c_id[0]
p_id = c_id
categs = categs[1:]
self.write(cr, uid, [id], {'category_id': p_id})
if categs != current_category_path:
p_id = None
while categs:
if p_id is not None:
cr.execute('SELECT id FROM ir_module_category WHERE name=%s AND parent_id=%s', (categs[0], p_id))
else:
cr.execute('SELECT id FROM ir_module_category WHERE name=%s AND parent_id is NULL', (categs[0],))
c_id = cr.fetchone()
if not c_id:
cr.execute('INSERT INTO ir_module_category (name, parent_id) VALUES (%s, %s) RETURNING id', (categs[0], p_id))
c_id = cr.fetchone()[0]
else:
c_id = c_id[0]
p_id = c_id
categs = categs[1:]
self.write(cr, uid, [mod_browse.id], {'category_id': p_id})
def update_translations(self, cr, uid, ids, filter_lang=None, context=None):
logger = logging.getLogger('i18n')
@ -464,8 +493,6 @@ class module(osv.osv):
# unable to find the module. we skip
continue
for lang in filter_lang:
if len(lang) > 5:
raise osv.except_osv(_('Error'), _('You Can Not Load Translation For language Due To Invalid Language/Country Code'))
iso_lang = tools.get_iso_codes(lang)
f = addons.get_module_resource(mod.name, 'i18n', iso_lang + '.po')
# Implementation notice: we must first search for the full name of

View File

@ -20,8 +20,8 @@
##############################################################################
import pooler
import wizard
from osv import osv, fields
from tools.translate import _
class base_module_upgrade(osv.osv_memory):
""" Module Upgrade """

View File

@ -7,8 +7,8 @@
<field name="arch" type="xml">
<form>
<group colspan="4" col="8">
<group colspan="3" width="220" height="250">
<field name="config_logo" widget="image" width="220" height="130" nolabel="1" colspan="1"/>
<group colspan="1" width="220" height="250">
<field name="config_logo" widget="image" width="220" height="130" nolabel="1"/>
<newline/>
<label align="0.0" string="description" width="200" colspan="2"/>
</group>

View File

@ -36,8 +36,8 @@ class lang(osv.osv):
_columns = {
'name': fields.char('Name', size=64, required=True),
'code': fields.char('Locale Code', size=5, required=True, help='This field is used to set/get locales for user'),
'iso_code': fields.char('ISO code', size=5, required=False, help='This ISO code is the name of po files to use for translations'),
'code': fields.char('Locale Code', size=16, required=True, help='This field is used to set/get locales for user'),
'iso_code': fields.char('ISO code', size=16, required=False, help='This ISO code is the name of po files to use for translations'),
'translatable': fields.boolean('Translatable'),
'active': fields.boolean('Active'),
'direction': fields.selection([('ltr', 'Left-to-Right'), ('rtl', 'Right-to-Left')], 'Direction',required=True),

View File

@ -20,6 +20,8 @@
##############################################################################
from osv import fields,osv
class res_widget(osv.osv):
_name = "res.widget"
_rec_name = "title"
@ -27,8 +29,10 @@ class res_widget(osv.osv):
'title' : fields.char('Title', size=64, required=True, translate=True),
'content': fields.text('Content', required=True),
}
res_widget()
class res_widget_user(osv.osv):
_name="res.widget.user"
_order = "sequence"
@ -37,8 +41,18 @@ class res_widget_user(osv.osv):
'user_id': fields.many2one('res.users','User', select=1),
'widget_id': fields.many2one('res.widget','Widget',required=True),
}
def create(self, cr, uid, vals, context=None):
existing = self.search(cr, uid, [('user_id', '=', vals.get('user_id')), ('widget_id', '=', vals.get('widget_id'))], context=context)
if existing:
res = existing[0]
else:
res = super(res_widget_user, self).create(cr, uid, vals, context=context)
return res
res_widget_user()
class res_widget_wizard(osv.osv_memory):
_name = "res.widget.wizard"
_description = "Add a widget for User"
@ -58,4 +72,6 @@ class res_widget_wizard(osv.osv_memory):
self.pool.get('res.widget.user').create(
cr, uid, {'user_id':uid, 'widget_id':wiz_id}, context=context)
return {'type': 'ir.actions.act_window_close'}
res_widget_wizard()

View File

@ -248,6 +248,7 @@
</rng:optional>
<rng:optional><rng:attribute name="color"/></rng:optional>
<rng:optional><rng:attribute name="date_delay"/></rng:optional>
<rng:optional><rng:attribute name="day_length"/></rng:optional>
<rng:optional><rng:attribute name="date_start"/></rng:optional>
<rng:optional><rng:attribute name="date_stop"/></rng:optional>
<rng:optional><rng:attribute name="date_string"/></rng:optional>

View File

@ -1,9 +1,6 @@
- |
Safe_Eval Scenario:
In order to check that common dangerous operations are
not allowed by the safe_eval mechanism, attempt to
evaluate some bad expressions, and verify that it triggers
an error.
To check that common dangerous operations are not allowed by the safe_eval mechanism, attempt to
evaluate unauthorized expressions, and verify that they trigger an error.
-
1. Try a few common expressions to verify they work with safe_eval
-
@ -47,11 +44,11 @@
from tools.safe_eval import safe_eval
try:
safe_eval('open("/etc/passwd","r")')
assert False, "safe_eval should not allow arbitrary expressions"
assert False, "safe_eval should not allow calling open() builtin"
except NameError:
pass
except:
# NameError should be raised because open() builtin is not found,
# but other exceptions probably indicate that open() was executed!
assert False, "safe_eval should not allow arbitrary expressions"
assert False, "safe_eval should not allow calling open() builtin"

View File

@ -0,0 +1,46 @@
-
Testing that some domain expressions work
-
!python {model: res.partner.address }: |
ids = self.search(cr, uid, [('partner_id','=','Agrolait')])
assert len(ids) >= 1, ids
-
Trying the "in" operator, for scalar value
-
!python {model: res.partner.address }: |
ids = self.search(cr, uid, [('partner_id','in','Agrolait')])
assert len(ids) >= 1, ids
-
Trying the "in" operator for list value
-
!python {model: res.partner.address }: |
ids = self.search(cr, uid, [('partner_id','in',['Agrolait','ASUStek'])])
assert len(ids) >= 1, ids
-
Check we can use "in" operator for plain fields.
-
!python {model: ir.ui.menu }: |
ids = self.search(cr, uid, [('sequence','in',[1, 2, 10, 20])])
assert len(ids) >= 1, ids
-
Test one2many operator with empty search list
-
!assert {model: res.partner, search: "[('address', 'in', [])]", count: 0, string: "Ids should be empty"}
-
Test one2many operator with False
-
!assert {model: res.partner, search: "[('address', '=', False)]"}:
- address in (False, None, [])
-
Test many2many operator with empty search list
-
!assert {model: res.partner, search: "[('category_id', 'in', [])]", count: 0, string: "Ids should be empty"}
-
Test many2many operator with False
-
!assert {model: res.partner, search: "[('category_id', '=', False)]"}:
- category_id in (False, None, [])
-
Filtering on invalid value across x2many relationship should return an empty set
-
!assert {model: res.partner, search: "[('address.city','=','foo')]", count: 0, string: "Searching for address.city = foo should give empty results"}

View File

@ -196,7 +196,6 @@
<rng:optional><rng:attribute name="domain" /> </rng:optional>
<rng:optional><rng:attribute name="src_model" /></rng:optional>
<rng:optional><rng:attribute name="context" /></rng:optional>
<rng:optional> <rng:attribute name="view"/> </rng:optional>
<rng:optional> <rng:attribute name="view_id"/> </rng:optional>
<rng:optional> <rng:attribute name="view_type"/> </rng:optional>
<rng:optional> <rng:attribute name="view_mode"/> </rng:optional>

View File

@ -132,8 +132,8 @@ class ExportService(object):
LOG_NOTSET = 'notset'
LOG_DEBUG_SQL = 'debug_sql'
LOG_DEBUG_RPC = 'debug_rpc'
LOG_DEBUG_RPC_ANSWER = 'debug_rpc_answer'
LOG_DEBUG_RPC = 'debug_rpc'
LOG_DEBUG = 'debug'
LOG_TEST = 'test'
LOG_INFO = 'info'

View File

@ -185,7 +185,7 @@ class expression(object):
else:
call_null = True
if right:
if right is not False:
if isinstance(right, basestring):
ids2 = [x[0] for x in field_obj.name_search(cr, uid, right, [], operator, context=context, limit=None)]
if ids2:
@ -234,7 +234,7 @@ class expression(object):
self.__exp[i] = ('id', 'in', _rec_convert(ids2))
else:
call_null_m2m = True
if right:
if right is not False:
if isinstance(right, basestring):
res_ids = [x[0] for x in field_obj.name_search(cr, uid, right, [], operator, context=context)]
if res_ids:

View File

@ -403,7 +403,7 @@ class orm_template(object):
"""Override this method to do specific things when a view on the object is opened."""
pass
def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None):
def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None, orderby=False):
raise NotImplementedError(_('The read_group method is not implemented on this object !'))
def _field_create(self, cr, context=None):
@ -1205,46 +1205,49 @@ class orm_template(object):
if self._columns.keys():
for f in self._columns.keys():
field_col = self._columns[f]
if allfields and f not in allfields:
continue
res[f] = {'type': self._columns[f]._type}
res[f] = {'type': field_col._type}
# This additional attributes for M2M and function field is added
# because we need to display tooltip with this additional information
# when client is started in debug mode.
if isinstance(self._columns[f], fields.function):
res[f]['function'] = self._columns[f]._fnct and self._columns[f]._fnct.func_name or False
res[f]['store'] = self._columns[f].store
if isinstance(self._columns[f].store, dict):
res[f]['store'] = str(self._columns[f].store)
res[f]['fnct_search'] = self._columns[f]._fnct_search and self._columns[f]._fnct_search.func_name or False
res[f]['fnct_inv'] = self._columns[f]._fnct_inv and self._columns[f]._fnct_inv.func_name or False
res[f]['fnct_inv_arg'] = self._columns[f]._fnct_inv_arg or False
res[f]['func_obj'] = self._columns[f]._obj or False
res[f]['func_method'] = self._columns[f]._method
if isinstance(self._columns[f], fields.many2many):
res[f]['related_columns'] = list((self._columns[f]._id1, self._columns[f]._id2))
res[f]['third_table'] = self._columns[f]._rel
if isinstance(field_col, fields.function):
res[f]['function'] = field_col._fnct and field_col._fnct.func_name or False
res[f]['store'] = field_col.store
if isinstance(field_col.store, dict):
res[f]['store'] = str(field_col.store)
res[f]['fnct_search'] = field_col._fnct_search and field_col._fnct_search.func_name or False
res[f]['fnct_inv'] = field_col._fnct_inv and field_col._fnct_inv.func_name or False
res[f]['fnct_inv_arg'] = field_col._fnct_inv_arg or False
res[f]['func_obj'] = field_col._obj or False
res[f]['func_method'] = field_col._method
if isinstance(field_col, fields.many2many):
res[f]['related_columns'] = list((field_col._id1, field_col._id2))
res[f]['third_table'] = field_col._rel
for arg in ('string', 'readonly', 'states', 'size', 'required', 'group_operator',
'change_default', 'translate', 'help', 'select', 'selectable'):
if getattr(self._columns[f], arg):
res[f][arg] = getattr(self._columns[f], arg)
if getattr(field_col, arg):
res[f][arg] = getattr(field_col, arg)
if not write_access:
res[f]['readonly'] = True
res[f]['states'] = {}
for arg in ('digits', 'invisible', 'filters'):
if getattr(self._columns[f], arg, None):
res[f][arg] = getattr(self._columns[f], arg)
if getattr(field_col, arg, None):
res[f][arg] = getattr(field_col, arg)
res_trans = translation_obj._get_source(cr, user, self._name + ',' + f, 'field', context.get('lang', False) or 'en_US', self._columns[f].string)
if res_trans:
res[f]['string'] = res_trans
help_trans = translation_obj._get_source(cr, user, self._name + ',' + f, 'help', context.get('lang', False) or 'en_US')
if help_trans:
res[f]['help'] = help_trans
if field_col.string:
res_trans = translation_obj._get_source(cr, user, self._name + ',' + f, 'field', context.get('lang', False) or 'en_US', field_col.string)
if res_trans:
res[f]['string'] = res_trans
if field_col.help:
help_trans = translation_obj._get_source(cr, user, self._name + ',' + f, 'help', context.get('lang', False) or 'en_US')
if help_trans:
res[f]['help'] = help_trans
if hasattr(self._columns[f], 'selection'):
if isinstance(self._columns[f].selection, (tuple, list)):
sel = self._columns[f].selection
if hasattr(field_col, 'selection'):
if isinstance(field_col.selection, (tuple, list)):
sel = field_col.selection
# translate each selection option
sel2 = []
for (key, val) in sel:
@ -1256,12 +1259,11 @@ class orm_template(object):
res[f]['selection'] = sel
else:
# call the 'dynamic selection' function
res[f]['selection'] = self._columns[f].selection(self, cr,
user, context)
res[f]['selection'] = field_col.selection(self, cr, user, context)
if res[f]['type'] in ('one2many', 'many2many', 'many2one', 'one2one'):
res[f]['relation'] = self._columns[f]._obj
res[f]['domain'] = self._columns[f]._domain
res[f]['context'] = self._columns[f]._context
res[f]['relation'] = field_col._obj
res[f]['domain'] = field_col._domain
res[f]['context'] = field_col._context
else:
#TODO : read the fields from the database
pass
@ -1318,13 +1320,13 @@ class orm_template(object):
new_xml = etree.fromstring(encode(xml))
ctx = context.copy()
ctx['base_model_name'] = self._name
xarch, xfields = self.pool.get(node.get('object', False)).__view_look_dom_arch(cr, user, new_xml, view_id, ctx)
views[str(f.tag)] = {
xarch, xfields = self.pool.get(node.get('object')).__view_look_dom_arch(cr, user, new_xml, view_id, ctx)
views['form'] = {
'arch': xarch,
'fields': xfields
}
attrs = {'views': views}
fields = views.get('field', False) and views['field'].get('fields', False)
fields = xfields
if node.get('name'):
attrs = {}
try:
@ -1332,7 +1334,7 @@ class orm_template(object):
column = self._columns[node.get('name')]
else:
column = self._inherit_fields[node.get('name')][2]
except:
except Exception:
column = False
if column:
@ -1394,6 +1396,10 @@ class orm_template(object):
trans = self.pool.get('ir.translation')._get_source(cr, user, context['base_model_name'], 'view', context['lang'], node.get('string'))
if trans:
node.set('string', trans)
if node.get('confirm'):
trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.get('confirm'))
if trans:
node.set('confirm', trans)
if node.get('sum'):
trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.get('sum'))
if trans:
@ -2207,7 +2213,7 @@ class orm(orm_template):
_protected = ['read', 'write', 'create', 'default_get', 'perm_read', 'unlink', 'fields_get', 'fields_view_get', 'search', 'name_get', 'distinct_field_get', 'name_search', 'copy', 'import_data', 'search_count', 'exists']
__logger = logging.getLogger('orm')
__schema = logging.getLogger('orm.schema')
def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None):
def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None, orderby=False):
"""
Get the list of records in list view grouped by the given ``groupby`` fields
@ -2220,6 +2226,9 @@ class orm(orm_template):
:param offset: optional number of records to skip
:param limit: optional max number of records to return
:param context: context arguments, like lang, time zone
:param order: optional ``order by`` specification, for overriding the natural
sort ordering of the groups, see also :py:meth:`~osv.osv.osv.search`
(supported only for many2one fields currently)
:return: list of dictionaries(one dictionary for each record) containing:
* the values of fields grouped by the fields in ``groupby`` argument
@ -2290,18 +2299,14 @@ class orm(orm_template):
if val == None: r[fld] = False
alldata[r['id']] = r
del r['id']
if groupby and fget[groupby]['type'] == 'many2one':
data_ids = self.search(cr, uid, [('id', 'in', alldata.keys())], order=groupby, context=context)
# the IDS of the records that has groupby field value = False or ''
# should be added too
data_ids += filter(lambda x:x not in data_ids, alldata.keys())
data = self.read(cr, uid, data_ids, groupby and [groupby] or ['id'], context=context)
# restore order of the search as read() uses the default _order (this is only for groups, so the size of data_read shoud be small):
data.sort(lambda x,y: cmp(data_ids.index(x['id']), data_ids.index(y['id'])))
else:
data = self.read(cr, uid, alldata.keys(), groupby and [groupby] or ['id'], context=context)
if groupby:
data.sort(lambda x,y:cmp(x[groupby],y[groupby]))
data_ids = self.search(cr, uid, [('id', 'in', alldata.keys())], order=orderby or groupby, context=context)
# the IDS of records that have groupby field value = False or '' should be sorted too
data_ids += filter(lambda x:x not in data_ids, alldata.keys())
data = self.read(cr, uid, data_ids, groupby and [groupby] or ['id'], context=context)
# restore order of the search as read() uses the default _order (this is only for groups, so the size of data_read shoud be small):
data.sort(lambda x,y: cmp(data_ids.index(x['id']), data_ids.index(y['id'])))
for d in data:
if groupby:
d['__domain'] = [(groupby, '=', alldata[d['id']][groupby] or False)] + domain
@ -4083,6 +4088,13 @@ class orm(orm_template):
if context is None:
context = {}
# avoid recursion through already copied records in case of circular relationship
seen_map = context.setdefault('__copy_data_seen',{})
if id in seen_map.setdefault(self._name,[]):
return
seen_map[self._name].append(id)
if default is None:
default = {}
if 'state' not in default:
@ -4110,7 +4122,7 @@ class orm(orm_template):
if f in default:
data[f] = default[f]
elif ftype == 'function':
elif 'function' in fields[f]:
del data[f]
elif ftype == 'many2one':
try:
@ -4130,7 +4142,8 @@ class orm(orm_template):
# parent but then are reassigned to the correct one thanks
# to the (0, 0, ...)
d = rel.copy_data(cr, uid, rel_id, context=context)
res.append((0, 0, d))
if d:
res.append((0, 0, d))
data[f] = res
elif ftype == 'many2many':
data[f] = [(6, 0, data[f])]
@ -4147,6 +4160,15 @@ class orm(orm_template):
return data
def copy_translations(self, cr, uid, old_id, new_id, context=None):
if context is None:
context = {}
# avoid recursion through already copied records in case of circular relationship
seen_map = context.setdefault('__copy_translations_seen',{})
if old_id in seen_map.setdefault(self._name,[]):
return
seen_map[self._name].append(old_id)
trans_obj = self.pool.get('ir.translation')
fields = self.fields_get(cr, uid, context=context)
@ -4161,7 +4183,6 @@ class orm(orm_template):
old_children = sorted(old_record[field_name])
new_children = sorted(new_record[field_name])
for (old_child, new_child) in zip(old_children, new_children):
# recursive copy of translations here
target_obj.copy_translations(cr, uid, old_child, new_child, context=context)
# and for translatable fields we keep them for copy
elif field_def.get('translate'):
@ -4197,6 +4218,9 @@ class orm(orm_template):
:return: True
"""
if context is None:
context = {}
context = context.copy()
data = self.copy_data(cr, uid, id, default, context)
new_id = self.create(cr, uid, data, context)
self.copy_translations(cr, uid, id, new_id, context)

View File

@ -186,7 +186,7 @@ class report_rml(report_int):
else:
if 'logo' in self.bin_datas:
del self.bin_datas['logo']
obj = render.rml(rml, localcontext, self.bin_datas, tools.config['root_path'],title)
obj = render.rml(rml, localcontext, self.bin_datas, self._get_path(), title)
obj.render()
return obj.get()
@ -226,5 +226,10 @@ class report_rml(report_int):
obj.render()
return obj.get()
def _get_path(self):
ret = []
ret.append(self.tmpl.replace(os.path.sep, '/').rsplit('/',1)[0]) # Same dir as the report rml
ret.append(tools.config['root_path'])
return ret
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:

View File

@ -36,9 +36,23 @@ import threading
# _render
#
class render(object):
def __init__(self, bin_datas={}, path='.'):
""" Represents a report job being rendered.
@param bin_datas a dictionary of name:<binary content> of images etc.
@param path the path in which binary files can be discovered, useful
for components (images) of the report. It can be:
- a string, relative or absolute path to images
- a list, containing strings of paths.
If a string is absolute path, it will be opened as such, else
it will be passed to tools.file_open() which also considers zip
addons.
"""
def __init__(self, bin_datas=None, path='.'):
self.done = False
self.bin_datas = bin_datas
if bin_datas is None:
self.bin_datas = {}
else:
self.bin_datas = bin_datas
self.path = path
def _render(self):

View File

@ -3,6 +3,7 @@
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 P. Christeas, Tiny SPRL (<http://tiny.be>).
# Copyright (C) 2010 OpenERP SA. (http://www.openerp.com)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
@ -19,15 +20,22 @@
#
##############################################################################
from reportlab import rl_config
import os
import glob
import logging
import os
import platform
from reportlab import rl_config
from tools import config
"""This module allows the mapping of some system-available TTF fonts to
the reportlab engine.
This file could be customized per distro (although most Linux/Unix ones)
should have the same filenames, only need the code below).
Due to an awful configuration that ships with reportlab at many Linux
and Ubuntu distros, we have to override the search path, too.
"""
CustomTTFonts = [ ('Helvetica',"DejaVu Sans", "DejaVuSans.ttf", 'normal'),
@ -42,16 +50,41 @@ CustomTTFonts = [ ('Helvetica',"DejaVu Sans", "DejaVuSans.ttf", 'normal'),
('Times-Roman',"Liberation Serif Bold", "LiberationSerif-Bold.ttf", 'bold'),
('Times-Roman',"Liberation Serif Italic", "LiberationSerif-Italic.ttf", 'italic'),
('Times-Roman',"Liberation Serif BoldItalic", "LiberationSerif-BoldItalic.ttf", 'bolditalic'),
('ZapfDingbats',"DejaVu Serif", "DejaVuSerif.ttf", 'normal'),
('ZapfDingbats',"DejaVu Serif Bold", "DejaVuSerif-Bold.ttf", 'bold'),
('ZapfDingbats',"DejaVu Serif Italic", "DejaVuSerif-Italic.ttf", 'italic'),
('ZapfDingbats',"DejaVu Serif BoldItalic", "DejaVuSerif-BoldItalic.ttf", 'bolditalic'),
('Courier',"FreeMono", "FreeMono.ttf", 'normal'),
('Courier',"FreeMono Bold", "FreeMonoBold.ttf", 'bold'),
('Courier',"FreeMono Oblique", "FreeMonoOblique.ttf", 'italic'),
('Courier',"FreeMono BoldOblique", "FreeMonoBoldOblique.ttf", 'bolditalic'),]
__foundFonts = []
TTFSearchPath_Linux = (
'/usr/share/fonts/truetype', # SuSE
'/usr/share/fonts/dejavu', '/usr/share/fonts/liberation', # Fedora, RHEL
'/usr/share/fonts/truetype/*', # Ubuntu,
'/usr/share/fonts/TTF/*', # at Mandriva/Mageia
)
TTFSearchPath_Windows = (
'c:/winnt/fonts',
'c:/windows/fonts'
)
TTFSearchPath_Darwin = (
#mac os X - from
#http://developer.apple.com/technotes/tn/tn2024.html
'~/Library/Fonts',
'/Library/Fonts',
'/Network/Library/Fonts',
'/System/Library/Fonts',
)
TTFSearchPathMap = {
'Darwin': TTFSearchPath_Darwin,
'Windows': TTFSearchPath_Windows,
'Linux': TTFSearchPath_Linux,
}
# ----- The code below is less distro-specific, please avoid editing! -------
__foundFonts = None
def FindCustomFonts():
"""Fill the __foundFonts list with those filenames, whose fonts
@ -64,22 +97,37 @@ def FindCustomFonts():
dirpath = []
log = logging.getLogger('report.fonts')
global __foundFonts
for dirname in rl_config.TTFSearchPath:
abp = os.path.abspath(dirname)
if os.path.isdir(abp):
dirpath.append(abp)
for k, (name, font, fname, mode) in enumerate(CustomTTFonts):
if fname in __foundFonts:
__foundFonts = {}
searchpath = []
if config.get('fonts_search_path'):
searchpath += map(str.strip, config.get('fonts_search_path').split(','))
local_platform = platform.system()
if local_platform in TTFSearchPathMap:
searchpath += TTFSearchPathMap[local_platform]
# Append the original search path of reportlab (at the end)
searchpath += rl_config.TTFSearchPath
# Perform the search for font files ourselves, as reportlab's
# TTFOpenFile is not very good at it.
for dirglob in searchpath:
dirglob = os.path.expanduser(dirglob)
for dirname in glob.iglob(dirglob):
abp = os.path.abspath(dirname)
if os.path.isdir(abp):
dirpath.append(abp)
for k, (name, font, filename, mode) in enumerate(CustomTTFonts):
if filename in __foundFonts:
continue
for d in dirpath:
if os.path.exists(os.path.join(d, fname)):
log.debug("Found font %s in %s as %s", fname, d, name)
__foundFonts.append(fname)
abs_filename = os.path.join(d, filename)
if os.path.exists(abs_filename):
log.debug("Found font %s at %s", filename, abs_filename)
__foundFonts[filename] = abs_filename
break
# print "Found fonts:", __foundFonts
def SetCustomFonts(rmldoc):
""" Map some font names to the corresponding TTF fonts
@ -90,11 +138,13 @@ def SetCustomFonts(rmldoc):
avoid system-wide processing (cache it, instead).
"""
global __foundFonts
if not len(__foundFonts):
if __foundFonts is None:
FindCustomFonts()
for name, font, fname, mode in CustomTTFonts:
if os.path.isabs(fname) or fname in __foundFonts:
rmldoc.setTTFontMapping(name, font, fname, mode)
for name, font, filename, mode in CustomTTFonts:
if os.path.isabs(filename) and os.path.exists(filename):
rmldoc.setTTFontMapping(name, font, filename, mode)
elif filename in __foundFonts:
rmldoc.setTTFontMapping(name, font, __foundFonts[filename], mode)
return True
#eof

View File

@ -35,6 +35,8 @@ import base64
from reportlab.platypus.doctemplate import ActionFlowable
from tools.safe_eval import safe_eval as eval
from reportlab.lib.units import inch,cm,mm
from tools.misc import file_open
from reportlab.pdfbase import pdfmetrics
try:
from cStringIO import StringIO
@ -44,6 +46,26 @@ except ImportError:
encoding = 'utf-8'
def _open_image(filename, path=None):
"""Attempt to open a binary file and return the descriptor
"""
if os.path.isfile(filename):
return open(filename, 'rb')
for p in (path or []):
if p and os.path.isabs(p):
fullpath = os.path.join(p, filename)
if os.path.isfile(fullpath):
return open(fullpath, 'rb')
try:
if p:
fullpath = os.path.join(p, filename)
else:
fullpath = filename
return file_open(fullpath)
except IOError:
pass
raise IOError("File %s cannot be found in image path" % filename)
class NumberedCanvas(canvas.Canvas):
def __init__(self, *args, **kwargs):
canvas.Canvas.__init__(self, *args, **kwargs)
@ -255,23 +277,24 @@ class _rml_doc(object):
def _images(self, el):
result = {}
for node in el.findall('image'):
for node in el.findall('.//image'):
rc =( node.text or '')
result[node.get('name')] = base64.decodestring(rc)
return result
def render(self, out):
el = self.etree.findall('docinit')
el = self.etree.findall('.//docinit')
if el:
self.docinit(el)
el = self.etree.findall('stylesheet')
el = self.etree.findall('.//stylesheet')
self.styles = _rml_styles(el,self.localcontext)
el = self.etree.findall('images')
el = self.etree.findall('.//images')
if el:
self.images.update( self._images(el[0]) )
el = self.etree.findall('template')
el = self.etree.findall('.//template')
if len(el):
pt_obj = _rml_template(self.localcontext, out, el[0], self, images=self.images, path=self.path, title=self.title)
el = utils._child_get(self.etree, self, 'story')
@ -295,6 +318,7 @@ class _rml_canvas(object):
self.images = images
self.path = path
self.title = title
self._logger = logging.getLogger('report.rml.canvas')
if self.title:
self.canvas.setTitle(self.title)
@ -412,10 +436,13 @@ class _rml_canvas(object):
def _image(self, node):
import urllib
import urlparse
from reportlab.lib.utils import ImageReader
if not node.get('file') :
nfile = node.get('file')
if not nfile:
if node.get('name'):
image_data = self.images[node.get('name')]
self._logger.debug("Image %s used", node.get('name'))
s = StringIO(image_data)
else:
if self.localcontext:
@ -429,21 +456,30 @@ class _rml_canvas(object):
if image_data:
s = StringIO(image_data)
else:
self._logger.debug("No image data!")
return False
else:
if node.get('file') in self.images:
s = StringIO(self.images[node.get('file')])
if nfile in self.images:
s = StringIO(self.images[nfile])
else:
try:
u = urllib.urlopen(str(node.get('file')))
s = StringIO(u.read())
except Exception:
u = file(os.path.join(self.path,str(node.get('file'))), 'rb')
up = urlparse.urlparse(str(nfile))
except ValueError:
up = False
if up and up.scheme:
# RFC: do we really want to open external URLs?
# Are we safe from cross-site scripting or attacks?
self._logger.debug("Retrieve image from %s", nfile)
u = urllib.urlopen(str(nfile))
s = StringIO(u.read())
else:
self._logger.debug("Open image file %s ", nfile)
s = _open_image(nfile, path=self.path)
img = ImageReader(s)
(sx,sy) = img.getSize()
self._logger.debug("Image is %dx%d", sx, sy)
args = {}
args = { 'x': 0.0, 'y': 0.0 }
for tag in ('width','height','x','y'):
if node.get(tag):
args[tag] = utils.unit_get(node.get(tag))
@ -485,11 +521,18 @@ class _rml_canvas(object):
self.canvas.drawPath(self.path, **utils.attr_get(node, [], {'fill':'bool','stroke':'bool'}))
def setFont(self, node):
fname = node.get('name')
try:
return self.canvas.setFont(fname, utils.unit_get(node.get('size')))
except KeyError, e:
raise KeyError('Font "%s" is not registered in the engine' % fname)
fontname = node.get('name')
if fontname not in pdfmetrics.getRegisteredFontNames()\
or fontname not in pdfmetrics.standardFonts:
# let reportlab attempt to find it
try:
pdfmetrics.getFont(fontname)
except Exception:
logging.getLogger('report.fonts').debug('Could not locate font %s, substituting default: %s',
fontname,
self.canvas._fontname)
fontname = self.canvas._fontname
return self.canvas.setFont(fontname, utils.unit_get(node.get('size')))
def render(self, node):
tags = {
@ -532,14 +575,29 @@ class _rml_draw(object):
cnv.render(self.node)
canvas.restoreState()
class _rml_Illustration(platypus.flowables.Flowable):
def __init__(self, node, localcontext, styles, self2):
self.localcontext = (localcontext or {}).copy()
self.node = node
self.styles = styles
self.width = utils.unit_get(node.get('width'))
self.height = utils.unit_get(node.get('height'))
self.self2 = self2
def wrap(self, *args):
return (self.width, self.height)
def draw(self):
drw = _rml_draw(self.localcontext ,self.node,self.styles, images=self.self2.images, path=self.self2.path, title=self.self2.title)
drw.render(self.canv, None)
class _rml_flowable(object):
def __init__(self, doc, localcontext, images={}, path='.', title=None):
def __init__(self, doc, localcontext, images=None, path='.', title=None):
self.localcontext = localcontext
self.doc = doc
self.styles = doc.styles
self.images = images
self.images = images or {}
self.path = path
self.title = title
self._logger = logging.getLogger('report.rml.flowable')
def _textual(self, node):
rc1 = utils._process_text(self, node.text or '')
@ -630,21 +688,7 @@ class _rml_flowable(object):
return table
def _illustration(self, node):
class Illustration(platypus.flowables.Flowable):
def __init__(self, node, localcontext, styles, self2):
self.localcontext = (localcontext or {}).copy()
self.node = node
self.styles = styles
self.width = utils.unit_get(node.get('width'))
self.height = utils.unit_get(node.get('height'))
self.self2 = self2
def wrap(self, *args):
return (self.width, self.height)
def draw(self):
canvas = self.canv
drw = _rml_draw(self.localcontext ,self.node,self.styles, images=self.self2.images, path=self.self2.path, title=self.self2.title)
drw.render(self.canv, None)
return Illustration(node, self.localcontext, self.styles, self)
return _rml_Illustration(node, self.localcontext, self.styles, self)
def _textual_image(self, node):
return base64.decodestring(node.text)
@ -690,6 +734,7 @@ class _rml_flowable(object):
from reportlab.graphics.barcode import fourstate
from reportlab.graphics.barcode import usps
except Exception, e:
self._logger.warning("Cannot use barcode renderers:", exc_info=True)
return None
args = utils.attr_get(node, [], {'ratio':'float','xdim':'unit','height':'unit','checksum':'int','quiet':'int','width':'unit','stop':'bool','bearers':'int','barWidth':'float','barHeight':'float'})
codes = {
@ -731,38 +776,29 @@ class _rml_flowable(object):
style = styles['Heading'+str(node.tag[1:])]
return platypus.Paragraph(self._textual(node), style, **(utils.attr_get(node, [], {'bulletText':'str'})))
elif node.tag=='image':
image_data = False
if not node.get('file'):
if node.get('name'):
image_data = self.doc.images[node.get('name')].read()
if node.get('name') in self.doc.images:
self._logger.debug("Image %s read ", node.get('name'))
image_data = self.doc.images[node.get('name')].read()
else:
self._logger.warning("Image %s not defined", node.get('name'))
return False
else:
import base64
if self.localcontext:
newtext = utils._process_text(self, node.text or '')
node.text = newtext
image_data = base64.decodestring(node.text)
if not image_data: return False
if not image_data:
self._logger.debug("No inline image data")
return False
image = StringIO(image_data)
return platypus.Image(image, mask=(250,255,250,255,250,255), **(utils.attr_get(node, ['width','height'])))
else:
return platypus.Image(node.get('file'), mask=(250,255,250,255,250,255), **(utils.attr_get(node, ['width','height'])))
from reportlab.lib.utils import ImageReader
name = str(node.get('file'))
img = ImageReader(name)
(sx,sy) = img.getSize()
args = {}
for tag in ('width','height'):
if node.get(tag):
args[tag] = utils.unit_get(node.get(tag))
if ('width' in args) and (not 'height' in args):
args['height'] = sy * args['width'] / sx
elif ('height' in args) and (not 'width' in args):
args['width'] = sx * args['height'] / sy
elif ('width' in args) and ('height' in args):
if (float(args['width'])/args['height'])>(float(sx)>sy):
args['width'] = sx * args['height'] / sy
else:
args['height'] = sy * args['width'] / sx
return platypus.Image(name, mask=(250,255,250,255,250,255), **args)
self._logger.debug("Image get from file %s", node.get('file'))
image = _open_image(node.get('file'), path=self.doc.path)
return platypus.Image(image, mask=(250,255,250,255,250,255), **(utils.attr_get(node, ['width','height'])))
elif node.tag=='spacer':
if node.get('width'):
width = utils.unit_get(node.get('width'))

View File

@ -34,6 +34,7 @@ import tools
import zipfile
import common
from osv.fields import float as float_class, function as function_class
from osv.orm import browse_record
DT_FORMAT = '%Y-%m-%d'
DHM_FORMAT = '%Y-%m-%d %H:%M:%S'
@ -153,25 +154,24 @@ class rml_parse(object):
user = self.pool.get('res.users').browse(cr, uid, uid, context=context)
self.localcontext = {
'user': user,
'company': user.company_id,
'setCompany': self.setCompany,
'repeatIn': self.repeatIn,
'setLang': self.setLang,
'setTag': self.setTag,
'removeParentNode': self.removeParentNode,
'format': self.format,
'formatLang': self.formatLang,
'logo' : user.company_id.logo,
'lang' : user.company_id.partner_id.lang,
'translate' : self._translate,
'setHtmlImage' : self.set_html_image,
'strip_name' : self._strip_name,
'time' : time
'time' : time,
# more context members are setup in setCompany() below:
# - company_id
# - logo
}
self.setCompany(user.company_id)
self.localcontext.update(context)
self.rml_header = user.company_id.rml_header
self.rml_header2 = user.company_id.rml_header2
self.rml_header3 = user.company_id.rml_header3
self.logo = user.company_id.logo
self.name = name
self._node = None
self.parents = parents
@ -190,6 +190,15 @@ class rml_parse(object):
return char
return char[:size-len(truncation_str)] + truncation_str
def setCompany(self, company_id):
if company_id:
self.localcontext['company'] = company_id
self.localcontext['logo'] = company_id.logo
self.rml_header = company_id.rml_header
self.rml_header2 = company_id.rml_header2
self.rml_header3 = company_id.rml_header3
self.logo = company_id.logo
def _strip_name(self, name, maxlen=50):
return self._ellipsis(name, maxlen)
@ -357,6 +366,13 @@ class rml_parse(object):
else:
self.localcontext.update({'name_space' :common.sxw_namespace})
if objects and len(objects) == 1 and \
'company_id' in objects[0] and objects[0].company_id:
# When we print only one record, we can auto-set the correct
# company in the localcontext. For other cases the report
# will have to call setCompany() inside the main repeatIn loop.
self.setCompany(objects[0].company_id)
class report_sxw(report_rml, preprocess.report):
def __init__(self, name, table, rml=False, parser=rml_parse, header='external', store=False):
report_rml.__init__(self, name, table, rml, '')

View File

@ -297,7 +297,11 @@ class ConnectionPool(object):
# note: this code is called only if the for loop has completed (no break)
raise PoolError('The Connection Pool Is Full')
result = psycopg2.connect(dsn=dsn, connection_factory=PsycoConnection)
try:
result = psycopg2.connect(dsn=dsn, connection_factory=PsycoConnection)
except psycopg2.Error, e:
self.__logger.exception('Connection to the database failed')
raise
self._connections.append((result, True))
self._debug('Create new connection')
return result

View File

@ -62,7 +62,7 @@ class ConvertError(Exception):
return 'Exception:\n\t%s\nUsing file:\n%s' % (self.orig, self.d)
def _ref(self, cr):
return lambda x: self.id_get(cr, False, x)
return lambda x: self.id_get(cr, x)
def _obj(pool, cr, uid, model_str, context=None):
model = pool.get(model_str)
@ -139,7 +139,7 @@ def _eval_xml(self, node, pool, cr, uid, idref, context=None):
m = re.findall('[^%]%\((.*?)\)[ds]', s)
for id in m:
if not id in idref:
idref[id]=self.id_get(cr, False, id)
idref[id]=self.id_get(cr, id)
return s % idref
_fix_multiple_roots(node)
return '<?xml version="1.0"?>\n'\
@ -177,7 +177,7 @@ def _eval_xml(self, node, pool, cr, uid, idref, context=None):
args = []
a_eval = node.get('eval','')
if a_eval:
idref['ref'] = lambda x: self.id_get(cr, False, x)
idref['ref'] = lambda x: self.id_get(cr, x)
args = unsafe_eval(a_eval, idref)
for n in node:
return_val = _eval_xml(self,n, pool, cr, uid, idref, context)
@ -262,7 +262,7 @@ class xml_import(object):
def get_uid(self, cr, uid, data_node, node):
node_uid = node.get('uid','') or (len(data_node) and data_node.get('uid',''))
if node_uid:
return self.id_get(cr, None, node_uid)
return self.id_get(cr, node_uid)
return uid
def _test_xml_id(self, xml_id):
@ -290,7 +290,7 @@ form: module.record_id""" % (xml_id,)
ids = self.pool.get(d_model).search(cr, self.uid, unsafe_eval(d_search, idref))
if d_id:
try:
ids.append(self.id_get(cr, d_model, d_id))
ids.append(self.id_get(cr, d_id))
except:
# d_id cannot be found. doesn't matter in this case
pass
@ -334,10 +334,10 @@ form: module.record_id""" % (xml_id,)
groups_value = []
for group in g_names:
if group.startswith('-'):
group_id = self.id_get(cr, 'res.groups', group[1:])
group_id = self.id_get(cr, group[1:])
groups_value.append((3, group_id))
else:
group_id = self.id_get(cr, 'res.groups', group)
group_id = self.id_get(cr, group)
groups_value.append((4, group_id))
res['groups_id'] = groups_value
@ -377,10 +377,10 @@ form: module.record_id""" % (xml_id,)
groups_value = []
for group in g_names:
if group.startswith('-'):
group_id = self.id_get(cr, 'res.groups', group[1:])
group_id = self.id_get(cr, group[1:])
groups_value.append((3, group_id))
else:
group_id = self.id_get(cr, 'res.groups', group)
group_id = self.id_get(cr, group)
groups_value.append((4, group_id))
res['groups_id'] = groups_value
@ -425,8 +425,8 @@ form: module.record_id""" % (xml_id,)
self._test_xml_id(xml_id)
type = rec.get('type','').encode('utf-8') or 'ir.actions.act_window'
view_id = False
if rec.get('view'):
view_id = self.id_get(cr, 'ir.actions.act_window', rec.get('view','').encode('utf-8'))
if rec.get('view_id'):
view_id = self.id_get(cr, rec.get('view_id','').encode('utf-8'))
domain = rec.get('domain','').encode('utf-8') or '{}'
res_model = rec.get('res_model','').encode('utf-8')
src_model = rec.get('src_model','').encode('utf-8')
@ -438,7 +438,7 @@ form: module.record_id""" % (xml_id,)
uid = self.uid
active_id = str("active_id") # for further reference in client/bin/tools/__init__.py
def ref(str_id):
return self.id_get(cr, None, str_id)
return self.id_get(cr, str_id)
# Include all locals() in eval_context, for backwards compatibility
eval_context = {
@ -489,10 +489,10 @@ form: module.record_id""" % (xml_id,)
groups_value = []
for group in g_names:
if group.startswith('-'):
group_id = self.id_get(cr, 'res.groups', group[1:])
group_id = self.id_get(cr, group[1:])
groups_value.append((3, group_id))
else:
group_id = self.id_get(cr, 'res.groups', group)
group_id = self.id_get(cr, group)
groups_value.append((4, group_id))
res['groups_id'] = groups_value
@ -527,7 +527,7 @@ form: module.record_id""" % (xml_id,)
model = str(rec.get('model',''))
w_ref = rec.get('ref','')
if w_ref:
id = self.id_get(cr, model, w_ref)
id = self.id_get(cr, w_ref)
else:
number_children = len(rec)
assert number_children > 0,\
@ -579,7 +579,7 @@ form: module.record_id""" % (xml_id,)
# The parent attribute was specified, if non-empty determine its ID, otherwise
# explicitly make a top-level menu
if rec.get('parent'):
menu_parent_id = self.id_get(cr, 'ir.ui.menu', rec.get('parent',''))
menu_parent_id = self.id_get(cr, rec.get('parent',''))
else:
# we get here with <menuitem parent="">, explicit clear of parent, or
# if no parent attribute at all but menu name is not a menu path
@ -588,7 +588,7 @@ form: module.record_id""" % (xml_id,)
if rec.get('name'):
values['name'] = rec.get('name')
try:
res = [ self.id_get(cr, 'ir.ui.menu', rec.get('id','')) ]
res = [ self.id_get(cr, rec.get('id','')) ]
except:
res = None
@ -603,7 +603,7 @@ form: module.record_id""" % (xml_id,)
}
values['icon'] = icons.get(a_type,'STOCK_NEW')
if a_type=='act_window':
a_id = self.id_get(cr, 'ir.actions.%s'% a_type, a_action)
a_id = self.id_get(cr, a_action)
cr.execute('select view_type,view_mode,name,view_id,target from ir_act_window where id=%s', (int(a_id),))
rrres = cr.fetchone()
assert rrres, "No window action defined for this id %s !\n" \
@ -628,7 +628,7 @@ form: module.record_id""" % (xml_id,)
if not values.get('name', False):
values['name'] = action_name
elif a_type=='wizard':
a_id = self.id_get(cr, 'ir.actions.%s'% a_type, a_action)
a_id = self.id_get(cr, a_action)
cr.execute('select name from ir_act_wizard where id=%s', (int(a_id),))
resw = cr.fetchone()
if (not values.get('name', False)) and resw:
@ -647,10 +647,10 @@ form: module.record_id""" % (xml_id,)
groups_value = []
for group in g_names:
if group.startswith('-'):
group_id = self.id_get(cr, 'res.groups', group[1:])
group_id = self.id_get(cr, group[1:])
groups_value.append((3, group_id))
else:
group_id = self.id_get(cr, 'res.groups', group)
group_id = self.id_get(cr, group)
groups_value.append((4, group_id))
values['groups_id'] = groups_value
@ -664,7 +664,7 @@ form: module.record_id""" % (xml_id,)
if rec.get('action') and pid:
a_action = rec.get('action').encode('utf8')
a_type = rec.get('type','').encode('utf8') or 'act_window'
a_id = self.id_get(cr, 'ir.actions.%s' % a_type, a_action)
a_id = self.id_get(cr, a_action)
action = "ir.actions.%s,%d" % (a_type, a_id)
self.pool.get('ir.model.data').ir_set(cr, self.uid, 'action', 'tree_but_open', 'Menuitem', [('ir.ui.menu', int(pid))], action, True, True, xml_id=rec_id)
return ('ir.ui.menu', pid)
@ -692,7 +692,7 @@ form: module.record_id""" % (xml_id,)
context = self.get_context(data_node, rec, eval_dict)
uid = self.get_uid(cr, self.uid, data_node, rec)
if rec_id:
ids = [self.id_get(cr, rec_model, rec_id)]
ids = [self.id_get(cr, rec_id)]
elif rec_src:
q = unsafe_eval(rec_src, eval_dict)
ids = self.pool.get(rec_model).search(cr, uid, q, context=context)
@ -814,10 +814,10 @@ form: module.record_id""" % (xml_id,)
else:
if f_name in model._columns \
and model._columns[f_name]._type == 'reference':
val = self.model_id_get(cr, f_model, f_ref)
val = self.model_id_get(cr, f_ref)
f_val = val[0] + ',' + str(val[1])
else:
f_val = self.id_get(cr, f_model, f_ref)
f_val = self.id_get(cr, f_ref)
else:
f_val = _eval_xml(self,field, self.pool, cr, self.uid, self.idref)
if model._columns.has_key(f_name):
@ -832,23 +832,19 @@ form: module.record_id""" % (xml_id,)
cr.commit()
return rec_model, id
def id_get(self, cr, model, id_str):
def id_get(self, cr, id_str):
if id_str in self.idref:
return self.idref[id_str]
res = self.model_id_get(cr, model, id_str)
res = self.model_id_get(cr, id_str)
if res and len(res)>1: res = res[1]
return res
def model_id_get(self, cr, model, id_str):
def model_id_get(self, cr, id_str):
model_data_obj = self.pool.get('ir.model.data')
mod = self.module
if '.' in id_str:
mod,id_str = id_str.split('.')
result = model_data_obj._get_id(cr, self.uid, mod, id_str)
res = model_data_obj.read(cr, self.uid, [result], ['model', 'res_id'])
if res and res[0] and res[0]['res_id']:
return res[0]['model'], int(res[0]['res_id'])
return False
return model_data_obj.get_object_reference(cr, self.uid, mod, id_str)
def parse(self, de):
if not de.tag in ['terp', 'openerp']:

View File

@ -76,20 +76,19 @@ def init_db(cr):
p_id = None
while categs:
if p_id is not None:
cr.execute('select id \
from ir_module_category \
where name=%s and parent_id=%s', (categs[0], p_id))
cr.execute('SELECT id \
FROM ir_module_category \
WHERE name=%s AND parent_id=%s', (categs[0], p_id))
else:
cr.execute('select id \
from ir_module_category \
where name=%s and parent_id is NULL', (categs[0],))
cr.execute('SELECT id \
FROM ir_module_category \
WHERE name=%s AND parent_id IS NULL', (categs[0],))
c_id = cr.fetchone()
if not c_id:
cr.execute('select nextval(\'ir_module_category_id_seq\')')
cr.execute('INSERT INTO ir_module_category \
(name, parent_id) \
VALUES (%s, %s) RETURNING id', (categs[0], p_id))
c_id = cr.fetchone()[0]
cr.execute('insert into ir_module_category \
(id, name, parent_id) \
values (%s, %s, %s)', (c_id, categs[0], p_id))
else:
c_id = c_id[0]
p_id = c_id
@ -104,23 +103,23 @@ def init_db(cr):
state = 'uninstalled'
else:
state = 'uninstallable'
cr.execute('select nextval(\'ir_module_module_id_seq\')')
id = cr.fetchone()[0]
cr.execute('insert into ir_module_module \
(id, author, website, name, shortdesc, description, \
category_id, state, certificate, web) \
values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)', (
id, info.get('author', ''),
cr.execute('INSERT INTO ir_module_module \
(author, website, name, shortdesc, description, \
category_id, state, certificate, web, license) \
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s) RETURNING id', (
info.get('author', ''),
info.get('website', ''), i, info.get('name', False),
info.get('description', ''), p_id, state, info.get('certificate') or None,
info.get('web') or False))
cr.execute('insert into ir_model_data \
(name,model,module, res_id, noupdate) values (%s,%s,%s,%s,%s)', (
info.get('web') or False,
info.get('license') or 'AGPL-3'))
id = cr.fetchone()[0]
cr.execute('INSERT INTO ir_model_data \
(name,model,module, res_id, noupdate) VALUES (%s,%s,%s,%s,%s)', (
'module_meta_information', 'ir.module.module', i, id, True))
dependencies = info.get('depends', [])
for d in dependencies:
cr.execute('insert into ir_module_module_dependency \
(module_id,name) values (%s, %s)', (id, d))
cr.execute('INSERT INTO ir_module_module_dependency \
(module_id,name) VALUES (%s, %s)', (id, d))
cr.commit()
def find_in_path(name):
@ -142,15 +141,15 @@ def exec_pg_command(name, *args):
prog = find_pg_tool(name)
if not prog:
raise Exception('Couldn\'t find %s' % name)
args2 = (os.path.basename(prog),) + args
args2 = (prog,) + args
return subprocess.call(args2, executable=prog)
return subprocess.call(args2)
def exec_pg_command_pipe(name, *args):
prog = find_pg_tool(name)
if not prog:
raise Exception('Couldn\'t find %s' % name)
pop = subprocess.Popen(args, executable=prog, shell=True, bufsize= -1,
pop = subprocess.Popen((prog,) + args, bufsize= -1,
stdin=subprocess.PIPE, stdout=subprocess.PIPE, close_fds=True)
return (pop.stdin, pop.stdout)
@ -158,7 +157,7 @@ def exec_command_pipe(name, *args):
prog = find_in_path(name)
if not prog:
raise Exception('Couldn\'t find %s' % name)
pop = subprocess.Popen(args, executable=prog, shell=True, bufsize= -1,
pop = subprocess.Popen((prog,) + args, bufsize= -1,
stdin=subprocess.PIPE, stdout=subprocess.PIPE, close_fds=True)
return (pop.stdin, pop.stdout)
@ -913,6 +912,8 @@ def get_iso_codes(lang):
return lang
def get_languages():
# The codes below are those from Launchpad's Rosetta, with the exception
# of some trivial codes where the Launchpad code is xx and we have xx_XX.
languages={
'ab_RU': u'Abkhazian / аҧсуа',
'ar_AR': u'Arabic / الْعَرَبيّة',
@ -979,7 +980,8 @@ def get_languages():
'sl_SI': u'Slovenian / slovenščina',
'sk_SK': u'Slovak / Slovenský jazyk',
'sq_AL': u'Albanian / Shqip',
'sr_RS': u'Serbian / српски језик',
'sr_RS': u'Serbian (Cyrillic) / српски',
'sr@latin': u'Serbian (Latin) / srpski',
'sv_SE': u'Swedish / svenska',
'te_IN': u'Telugu / తెలుగు',
'tr_TR': u'Turkish / Türkçe',

View File

@ -167,7 +167,7 @@ class GettextAlias(object):
cr = pooler.get_db(dbs[0]).cursor()
is_new_cr = True
return cr, is_new_cr
def _get_lang(self, frame):
lang = frame.f_locals.get('context', {}).get('lang', False)
if not lang:
@ -179,7 +179,7 @@ class GettextAlias(object):
c = getattr(s, 'localcontext', {})
lang = c.get('lang', False)
return lang
def __call__(self, source):
is_new_cr = False
res = source
@ -469,6 +469,8 @@ def trans_parse_view(de):
res.append(de.get('string').encode("utf8"))
if de.get("sum"):
res.append(de.get('sum').encode("utf8"))
if de.get("confirm"):
res.append(de.get('confirm').encode("utf8"))
for n in de:
res.extend(trans_parse_view(n))
return res

View File

@ -34,7 +34,8 @@ def is_comment(node):
return isinstance(node, types.StringTypes)
def is_assert(node):
return _is_yaml_mapping(node, yaml_tag.Assert)
return isinstance(node, yaml_tag.Assert) \
or _is_yaml_mapping(node, yaml_tag.Assert)
def is_record(node):
return _is_yaml_mapping(node, yaml_tag.Record)
@ -218,19 +219,22 @@ class YamlInterpreter(object):
elif assertion.search:
q = eval(assertion.search, self.eval_context)
ids = self.pool.get(assertion.model).search(self.cr, self.uid, q, context=assertion.context)
if not ids:
else:
raise YamlImportException('Nothing to assert: you must give either an id or a search criteria.')
return ids
def process_assert(self, node):
assertion, expressions = node.items()[0]
if isinstance(node, dict):
assertion, expressions = node.items()[0]
else:
assertion, expressions = node, []
if self.isnoupdate(assertion) and self.mode != 'init':
self.logger.warn('This assertion was not evaluated ("%s").' % assertion.string)
return
model = self.get_model(assertion.model)
ids = self._get_assertion_id(assertion)
if assertion.count and len(ids) != assertion.count:
if assertion.count is not None and len(ids) != assertion.count:
msg = 'assertion "%s" failed!\n' \
' Incorrect search count:\n' \
' expected count: %d\n' \

View File

@ -16,7 +16,7 @@ class YamlTag(object):
return "<%s %s>" % (self.__class__.__name__, sorted(self.__dict__.items()))
class Assert(YamlTag):
def __init__(self, model, id, severity=logging.WARNING, string="NONAME", **kwargs):
def __init__(self, model, id=None, severity=logging.WARNING, string="NONAME", **kwargs):
self.model = model
self.id = id
self.severity = severity

13
debian/po/ro.po vendored
View File

@ -8,13 +8,13 @@ msgstr ""
"Project-Id-Version: openobject-server\n"
"Report-Msgid-Bugs-To: FULL NAME <EMAIL@ADDRESS>\n"
"POT-Creation-Date: 2009-08-24 22:41+0300\n"
"PO-Revision-Date: 2010-09-27 23:36+0000\n"
"Last-Translator: Valentin Caragea <carageav@gmail.com>\n"
"PO-Revision-Date: 2010-12-07 07:37+0000\n"
"Last-Translator: OpenERP Administrators <Unknown>\n"
"Language-Team: Romanian <ro@li.org>\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"X-Launchpad-Export-Date: 2010-09-29 04:47+0000\n"
"X-Launchpad-Export-Date: 2010-12-08 04:48+0000\n"
"X-Generator: Launchpad (build Unknown)\n"
#. Type: string
@ -31,12 +31,11 @@ msgid ""
"the system's security is not compromised by running it with superuser "
"privileges."
msgstr ""
"Serverul Open ERP trebuie sa utilizeze un cont dedicat operatiilor sale "
"astfel incat sa nu fie compromisa securitatea sistemului prin rularea cu "
"privilegii de root"
"Serverul Open ERP trebuie sa utilizeze un cont dedicat, deoarece din motive "
"de securitate este contraindicat să ruleze cu privilegii de superuser."
#. Type: string
#. Description
#: ../openerp-server.templates:1001
msgid "Please choose that account's username."
msgstr "Alege numele utilizatorului (username)"
msgstr "Alegeţi numele utilizatorului (username)"