[MERGE] many improvements regarding translations, logging, reports, http, speed, ...

bzr revid: odo@openerp.com-20101125175328-pmu07gj694r5n2es
This commit is contained in:
P. Christeas, O. Dony 2010-11-25 18:53:28 +01:00 committed by Olivier Dony
commit c14af86177
36 changed files with 674 additions and 330 deletions

View File

@ -857,7 +857,7 @@ def load_modules(db, force_demo=False, status=None, update_module=False):
cr.execute("""select model,name from ir_model where id NOT IN (select distinct model_id from ir_model_access)""")
for (model, name) in cr.fetchall():
model_obj = pool.get(model)
if not isinstance(model_obj, osv.osv.osv_memory):
if model_obj and not isinstance(model_obj, osv.osv.osv_memory):
logger.notifyChannel('init', netsvc.LOG_WARNING, 'object %s (%s) has no access rules!' % (model, name))
# Temporary warning while we remove access rights on osv_memory objects, as they have
@ -873,6 +873,8 @@ def load_modules(db, force_demo=False, status=None, update_module=False):
obj = pool.get(model)
if obj:
obj._check_removed_columns(cr, log=True)
else:
logger.notifyChannel('init', netsvc.LOG_WARNING, "Model %s is referenced but not present in the orm pool!" % model)
if report.get_report():
logger.notifyChannel('init', netsvc.LOG_INFO, report)

View File

@ -23,7 +23,7 @@
{
'name': 'Base',
'version': '1.2',
'version': '1.3',
'category': 'Generic Modules/Base',
'description': """The kernel of OpenERP, needed for all installation.""",
'author': 'OpenERP SA',

View File

@ -989,7 +989,7 @@
<field name="name">Zambia</field>
<field name="code">zm</field>
</record>
<!-- DEPRECATED, News name of Zaire is Democratic Republic of the Congo ! -->
<!-- DEPRECATED, New name of Zaire is Democratic Republic of the Congo ! -->
<record id="zr" model="res.country">
<field name="name">Zaire</field>
<field name="code">zr</field>

View File

@ -26,6 +26,7 @@ import time
from tools.config import config
from tools.translate import _
import netsvc
import logging
import re
import copy
import os
@ -144,18 +145,22 @@ class act_window(osv.osv):
_table = 'ir_act_window'
_sequence = 'ir_actions_id_seq'
def _check_model(self, cr, uid, ids, context={}):
def _check_model(self, cr, uid, ids, context=None):
for action in self.browse(cr, uid, ids, context):
if not self.pool.get(action.res_model):
return False
if action.src_model and not self.pool.get(action.src_model):
return False
return True
def _invalid_model_msg(self, cr, uid, ids, context=None):
return _('Invalid model name in the action definition.')
_constraints = [
(_check_model, 'Invalid model name in the action definition.', ['res_model','src_model'])
(_check_model, _invalid_model_msg, ['res_model','src_model'])
]
def _views_get_fnc(self, cr, uid, ids, name, arg, context={}):
def _views_get_fnc(self, cr, uid, ids, name, arg, context=None):
res={}
for act in self.browse(cr, uid, ids):
res[act.id]=[(view.view_id.id, view.view_mode) for view in act.view_ids]
@ -171,30 +176,34 @@ class act_window(osv.osv):
res[act.id].append((False, t))
return res
def _search_view(self, cr, uid, ids, name, arg, context={}):
def _search_view(self, cr, uid, ids, name, arg, context=None):
res = {}
def encode(s):
if isinstance(s, unicode):
return s.encode('utf8')
return s
for act in self.browse(cr, uid, ids):
for act in self.browse(cr, uid, ids, context=context):
fields_from_fields_get = self.pool.get(act.res_model).fields_get(cr, uid, context=context)
search_view_id = False
if act.search_view_id:
search_view_id = act.search_view_id.id
else:
res_view = self.pool.get('ir.ui.view').search(cr, uid, [('model','=',act.res_model),('type','=','search'),('inherit_id','=',False)])
res_view = self.pool.get('ir.ui.view').search(cr, uid,
[('model','=',act.res_model),('type','=','search'),
('inherit_id','=',False)], context=context)
if res_view:
search_view_id = res_view[0]
if search_view_id:
field_get = self.pool.get(act.res_model).fields_view_get(cr, uid, search_view_id, 'search', context)
field_get = self.pool.get(act.res_model).fields_view_get(cr, uid, search_view_id,
'search', context)
fields_from_fields_get.update(field_get['fields'])
field_get['fields'] = fields_from_fields_get
res[act.id] = str(field_get)
else:
def process_child(node, new_node, doc):
for child in node.childNodes:
if child.localName=='field' and child.hasAttribute('select') and child.getAttribute('select')=='1':
if child.localName=='field' and child.hasAttribute('select') \
and child.getAttribute('select')=='1':
if child.childNodes:
fld = doc.createElement('field')
for attr in child.attributes.keys():
@ -218,7 +227,7 @@ class act_window(osv.osv):
res[act.id] = str(form_arch)
return res
def _get_help_status(self, cr, uid, ids, name, arg, context={}):
def _get_help_status(self, cr, uid, ids, name, arg, context=None):
activate_tips = self.pool.get('res.users').browse(cr, uid, uid).menu_tips
return dict([(id, activate_tips) for id in ids])
@ -349,7 +358,7 @@ class act_url(osv.osv):
}
act_url()
def model_get(self, cr, uid, context={}):
def model_get(self, cr, uid, context=None):
wkf_pool = self.pool.get('workflow')
ids = wkf_pool.search(cr, uid, [])
osvs = wkf_pool.read(cr, uid, ids, ['osv'])
@ -394,7 +403,7 @@ server_object_lines()
#
class actions_server(osv.osv):
def _select_signals(self, cr, uid, context={}):
def _select_signals(self, cr, uid, context=None):
cr.execute("SELECT distinct w.osv, t.signal FROM wkf w, wkf_activity a, wkf_transition t \
WHERE w.id = a.wkf_id AND t.act_from = a.id OR t.act_to = a.id AND t.signal!='' \
AND t.signal NOT IN (null, NULL)")
@ -406,13 +415,13 @@ class actions_server(osv.osv):
res.append(line)
return res
def _select_objects(self, cr, uid, context={}):
def _select_objects(self, cr, uid, context=None):
model_pool = self.pool.get('ir.model')
ids = model_pool.search(cr, uid, [('name','not ilike','.')])
res = model_pool.read(cr, uid, ids, ['model', 'name'])
return [(r['model'], r['name']) for r in res] + [('','')]
def change_object(self, cr, uid, ids, copy_object, state, context={}):
def change_object(self, cr, uid, ids, copy_object, state, context=None):
if state == 'object_copy':
model_pool = self.pool.get('ir.model')
model = copy_object.split(',')[0]
@ -483,7 +492,7 @@ class actions_server(osv.osv):
}
def get_email(self, cr, uid, action, context):
logger = netsvc.Logger()
logger = logging.getLogger('Workflow')
obj_pool = self.pool.get(action.model_id.model)
id = context.get('active_id')
obj = obj_pool.browse(cr, uid, id)
@ -499,12 +508,12 @@ class actions_server(osv.osv):
try:
obj = getattr(obj, field)
except Exception:
logger.notifyChannel('Workflow', netsvc.LOG_ERROR, 'Failed to parse : %s' % (field))
logger.exception('Failed to parse: %s', field)
return obj
def get_mobile(self, cr, uid, action, context):
logger = netsvc.Logger()
logger = logging.getLogger('Workflow')
obj_pool = self.pool.get(action.model_id.model)
id = context.get('active_id')
obj = obj_pool.browse(cr, uid, id)
@ -520,7 +529,7 @@ class actions_server(osv.osv):
try:
obj = getattr(obj, field)
except Exception:
logger.notifyChannel('Workflow', netsvc.LOG_ERROR, 'Failed to parse : %s' % (field))
logger.exception('Failed to parse: %s', field)
return obj
@ -556,7 +565,7 @@ class actions_server(osv.osv):
# FIXME: refactor all the eval() calls in run()!
def run(self, cr, uid, ids, context=None):
logger = netsvc.Logger()
logger = logging.getLogger(self._name)
if context is None:
context = {}
for action in self.browse(cr, uid, ids, context):
@ -604,18 +613,19 @@ class actions_server(osv.osv):
pass
if not address:
logger.notifyChannel('email', netsvc.LOG_INFO, 'Partner Email address not Specified!')
logger.info('Partner Email address not Specified!')
continue
if not user:
logger.info('Email-From address not Specified at server!')
raise osv.except_osv(_('Error'), _("Please specify server option --email-from !"))
subject = self.merge_message(cr, uid, action.subject, action, context)
body = self.merge_message(cr, uid, action.message, action, context)
if tools.email_send(user, [address], subject, body, debug=False, subtype='html') == True:
logger.notifyChannel('email', netsvc.LOG_INFO, 'Email successfully send to : %s' % (address))
logger.info('Email successfully sent to: %s', address)
else:
logger.notifyChannel('email', netsvc.LOG_ERROR, 'Failed to send email to : %s' % (address))
logger.warning('Failed to send email to: %s', address)
if action.state == 'trigger':
wf_service = netsvc.LocalService("workflow")
@ -629,7 +639,7 @@ class actions_server(osv.osv):
#TODO: set the user and password from the system
# for the sms gateway user / password
# USE smsclient module from extra-addons
logger.notifyChannel('sms', netsvc.LOG_ERROR, 'SMS Facility has not been implemented yet. Use smsclient module!')
logger.warning('SMS Facility has not been implemented yet. Use smsclient module!')
if action.state == 'other':
res = []

View File

@ -24,16 +24,29 @@ from osv.orm import except_orm
import tools
class ir_attachment(osv.osv):
def check(self, cr, uid, ids, mode, context=None):
def check(self, cr, uid, ids, mode, context=None, values=None):
"""Restricts the access to an ir.attachment, according to referred model
In the 'document' module, it is overriden to relax this hard rule, since
more complex ones apply there.
"""
if not ids:
return
ima = self.pool.get('ir.model.access')
if isinstance(ids, (int, long)):
ids = [ids]
cr.execute('select distinct res_model from ir_attachment where id IN %s', (tuple(ids),))
for obj in cr.fetchall():
if obj[0]:
ima.check(cr, uid, obj[0], mode, context=context)
res_ids = {}
if ids:
if isinstance(ids, (int, long)):
ids = [ids]
cr.execute('SELECT DISTINCT res_model, res_id FROM ir_attachment WHERE id = ANY (%s)', (ids,))
for rmod, rid in cr.fetchall():
if not (rmod and rid):
continue
res_ids.setdefault(rmod,[]).append(rid)
if values:
if 'res_model' in values and 'res_id' in values:
res_ids.setdefault(values['res_model'],[]).append(values['res_id'])
for model, mids in res_ids.items():
self.pool.get(model).check_access_rule(cr, uid, mids, mode, context=context)
def search(self, cr, uid, args, offset=0, limit=None, order=None,
context=None, count=False):
@ -64,7 +77,7 @@ class ir_attachment(osv.osv):
return super(ir_attachment, self).read(cr, uid, ids, fields_to_read, context, load)
def write(self, cr, uid, ids, vals, context=None):
self.check(cr, uid, ids, 'write', context=context)
self.check(cr, uid, ids, 'write', context=context, values=vals)
return super(ir_attachment, self).write(cr, uid, ids, vals, context)
def copy(self, cr, uid, id, default=None, context=None):
@ -76,8 +89,7 @@ class ir_attachment(osv.osv):
return super(ir_attachment, self).unlink(cr, uid, ids, context)
def create(self, cr, uid, values, context=None):
if 'res_model' in values and values['res_model'] != '':
self.pool.get('ir.model.access').check(cr, uid, values['res_model'], 'create', context=context)
self.check(cr, uid, [], mode='create', context=context, values=values)
return super(ir_attachment, self).create(cr, uid, values, context)
def action_get(self, cr, uid, context=None):
@ -112,11 +124,11 @@ class ir_attachment(osv.osv):
'url': fields.char('Url', size=512, oldname="link"),
'type': fields.selection(
[ ('url','URL'), ('binary','Binary'), ],
'Type', help="Binary File or external URL", required=True),
'Type', help="Binary File or external URL", required=True, change_default=True),
'create_date': fields.datetime('Date Created', readonly=True),
'create_uid': fields.many2one('res.users', 'Owner', readonly=True),
'company_id': fields.many2one('res.company', 'Company'),
'company_id': fields.many2one('res.company', 'Company', change_default=True),
}
_defaults = {

View File

@ -75,10 +75,12 @@ class ir_model(osv.osv):
fnct_search=_search_osv_memory,
help="Indicates whether this object model lives in memory only, i.e. is not persisted (osv.osv_memory)")
}
_defaults = {
'model': lambda *a: 'x_',
'state': lambda self,cr,uid,ctx={}: (ctx and ctx.get('manual',False)) and 'manual' or 'base',
'state': lambda self,cr,uid,ctx=None: (ctx and ctx.get('manual',False)) and 'manual' or 'base',
}
def _check_model_name(self, cr, uid, ids):
for model in self.browse(cr, uid, ids):
if model.state=='manual':
@ -88,8 +90,10 @@ class ir_model(osv.osv):
return False
return True
def _model_name_msg(self, cr, uid, ids, context=None):
return _('The Object name must start with x_ and not contain any special character !')
_constraints = [
(_check_model_name, 'The Object name must start with x_ and not contain any special character !', ['model']),
(_check_model_name, _model_name_msg, ['model']),
]
# overridden to allow searching both on model name (model field)
@ -181,8 +185,11 @@ class ir_model_fields(osv.osv):
'selectable': lambda *a: 1,
}
_order = "id"
def _size_gt_zero_msg(self, cr, user, ids, context=None):
return _('Size of the field can never be less than 1 !')
_sql_constraints = [
('size_gt_zero', 'CHECK (size>0)', 'Size of the field can never be less than 1 !'),
('size_gt_zero', 'CHECK (size>0)',_size_gt_zero_msg ),
]
def unlink(self, cr, user, ids, context=None):
for field in self.browse(cr, user, ids, context):
@ -381,8 +388,8 @@ class ir_model_data(osv.osv):
_defaults = {
'date_init': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
'date_update': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
'noupdate': lambda *a: False,
'module': lambda *a: ''
'noupdate': False,
'module': ''
}
_sql_constraints = [
('module_name_uniq', 'unique(name, module)', 'You cannot have multiple records with the same id for the same module !'),

View File

@ -19,11 +19,13 @@
#
##############################################################################
from osv import fields, osv
import re
import tools
import base64
import re
import tools
import addons
from osv import fields, osv
from tools.translate import _
def one_in(setA, setB):
"""Check the presence of an element of setA in setB
@ -69,8 +71,10 @@ class ir_ui_menu(osv.osv):
# radical but this doesn't frequently happen
self._cache = {}
def create_shortcut(self, cr, uid, values, context={}):
def create_shortcut(self, cr, uid, values, context=None):
dataobj = self.pool.get('ir.model.data')
if context is None:
context = {}
new_context = context.copy()
for key in context:
if key.startswith('default_'):
@ -309,13 +313,17 @@ class ir_ui_menu(osv.osv):
('ir.actions.server', 'ir.actions.server'),
]),
}
def _rec_message(self, cr, uid, ids, context=None):
return _('Error ! You can not create recursive Menu.')
_constraints = [
(_check_recursion, 'Error ! You can not create recursive Menu.', ['parent_id'])
(_check_recursion, _rec_message , ['parent_id'])
]
_defaults = {
'icon' : lambda *a: 'STOCK_OPEN',
'icon_pict': lambda *a: ('stock', ('STOCK_OPEN','ICON_SIZE_MENU')),
'sequence' : lambda *a: 10,
'icon' : 'STOCK_OPEN',
'icon_pict': ('stock', ('STOCK_OPEN','ICON_SIZE_MENU')),
'sequence' : 10,
}
_order = "sequence,id"
ir_ui_menu()

View File

@ -27,7 +27,6 @@ import urllib
import zipimport
import addons
import netsvc
import pooler
import release
import tools
@ -41,7 +40,7 @@ class module_category(osv.osv):
_name = "ir.module.category"
_description = "Module Category"
def _module_nbr(self,cr,uid, ids, prop, unknow_none,context):
def _module_nbr(self,cr,uid, ids, prop, unknow_none, context):
cr.execute('SELECT category_id, COUNT(*) \
FROM ir_module_module \
WHERE category_id IN %(ids)s \
@ -82,13 +81,13 @@ class module(osv.osv):
'module %s', name, exc_info=True)
return info
def _get_latest_version(self, cr, uid, ids, field_name=None, arg=None, context={}):
def _get_latest_version(self, cr, uid, ids, field_name=None, arg=None, context=None):
res = dict.fromkeys(ids, '')
for m in self.browse(cr, uid, ids):
res[m.id] = self.get_module_info(m.name).get('version', '')
return res
def _get_views(self, cr, uid, ids, field_name=None, arg=None, context={}):
def _get_views(self, cr, uid, ids, field_name=None, arg=None, context=None):
res = {}
model_data_obj = self.pool.get('ir.model.data')
view_obj = self.pool.get('ir.ui.view')
@ -99,37 +98,36 @@ class module(osv.osv):
for m in mlist:
mnames[m.name] = m.id
res[m.id] = {
'menus_by_module':'',
'reports_by_module':'',
'views_by_module': ''
'menus_by_module':[],
'reports_by_module':[],
'views_by_module': []
}
view_id = model_data_obj.search(cr,uid,[('module','in', mnames.keys()),
('model','in',('ir.ui.view','ir.actions.report.xml','ir.ui.menu'))])
for data_id in model_data_obj.browse(cr,uid,view_id,context):
# We use try except, because views or menus may not exist
try:
key = data_id['model']
key = data_id.model
if key=='ir.ui.view':
try:
v = view_obj.browse(cr,uid,data_id.res_id)
aa = v.inherit_id and '* INHERIT ' or ''
res[mnames[data_id.module]]['views_by_module'] += aa + v.name + ' ('+v.type+')\n'
except Exception:
self.__logger.debug(
'Unknown error while browsing ir.ui.view[%s]',
data_id.res_id, exc_info=True)
v = view_obj.browse(cr,uid,data_id.res_id)
aa = v.inherit_id and '* INHERIT ' or ''
res[mnames[data_id.module]]['views_by_module'].append(aa + v.name + '('+v.type+')')
elif key=='ir.actions.report.xml':
res[mnames[data_id.module]]['reports_by_module'] += report_obj.browse(cr,uid,data_id.res_id).name + '\n'
res[mnames[data_id.module]]['reports_by_module'].append(report_obj.browse(cr,uid,data_id.res_id).name)
elif key=='ir.ui.menu':
try:
m = menu_obj.browse(cr,uid,data_id.res_id)
res[mnames[data_id.module]]['menus_by_module'] += m.complete_name + '\n'
except Exception:
self.__logger.debug(
'Unknown error while browsing ir.ui.menu[%s]',
data_id.res_id, exc_info=True)
except KeyError:
res[mnames[data_id.module]]['menus_by_module'].append(menu_obj.browse(cr,uid,data_id.res_id).complete_name)
except KeyError, e:
self.__logger.warning(
'Data not found for reference %s[%s:%s.%s]', data_id.model,
data_id.res_id, data_id.model, data_id.name, exc_info=True)
pass
except Exception, e:
self.__logger.warning('Unknown error while browsing %s[%s]',
data_id.model, data_id.res_id, exc_info=True)
pass
for key, value in res.iteritems():
for k, v in res[key].iteritems() :
res[key][k] = "\n".join(sorted(v))
return res
_columns = {
@ -180,16 +178,21 @@ class module(osv.osv):
}
_defaults = {
'state': lambda *a: 'uninstalled',
'demo': lambda *a: False,
'license': lambda *a: 'AGPL-3',
'state': 'uninstalled',
'demo': False,
'license': 'AGPL-3',
'web': False,
}
_order = 'name'
def _name_uniq_msg(self, cr, uid, ids, context=None):
return _('The name of the module must be unique !')
def _certificate_uniq_msg(self, cr, uid, ids, context=None):
return _('The certificate ID of the module must be unique !')
_sql_constraints = [
('name_uniq', 'unique (name)', 'The name of the module must be unique !'),
('certificate_uniq', 'unique (certificate)', 'The certificate ID of the module must be unique !')
('name_uniq', 'UNIQUE (name)',_name_uniq_msg ),
('certificate_uniq', 'UNIQUE (certificate)',_certificate_uniq_msg )
]
def unlink(self, cr, uid, ids, context=None):
@ -266,14 +269,14 @@ class module(osv.osv):
demo = demo or mdemo
return demo
def button_install(self, cr, uid, ids, context={}):
def button_install(self, cr, uid, ids, context=None):
return self.state_update(cr, uid, ids, 'to install', ['uninstalled'], context)
def button_install_cancel(self, cr, uid, ids, context={}):
def button_install_cancel(self, cr, uid, ids, context=None):
self.write(cr, uid, ids, {'state': 'uninstalled', 'demo':False})
return True
def button_uninstall(self, cr, uid, ids, context={}):
def button_uninstall(self, cr, uid, ids, context=None):
for module in self.browse(cr, uid, ids):
cr.execute('''select m.state,m.name
from
@ -289,7 +292,7 @@ class module(osv.osv):
self.write(cr, uid, ids, {'state': 'to remove'})
return True
def button_uninstall_cancel(self, cr, uid, ids, context={}):
def button_uninstall_cancel(self, cr, uid, ids, context=None):
self.write(cr, uid, ids, {'state': 'installed'})
return True
@ -325,7 +328,7 @@ class module(osv.osv):
self.button_install(cr, uid, to_install, context=context)
return True
def button_upgrade_cancel(self, cr, uid, ids, context={}):
def button_upgrade_cancel(self, cr, uid, ids, context=None):
self.write(cr, uid, ids, {'state': 'installed'})
return True
def button_update_translations(self, cr, uid, ids, context=None):
@ -418,7 +421,9 @@ class module(osv.osv):
zimp.load_module(mod.name)
return res
def _update_dependencies(self, cr, uid, id, depends=[]):
def _update_dependencies(self, cr, uid, id, depends=None):
if depends is None:
depends = []
for d in depends:
cr.execute('INSERT INTO ir_module_module_dependency (module_id, name) values (%s, %s)', (id, d))
@ -441,8 +446,8 @@ class module(osv.osv):
categs = categs[1:]
self.write(cr, uid, [id], {'category_id': p_id})
def update_translations(self, cr, uid, ids, filter_lang=None, context={}):
logger = netsvc.Logger()
def update_translations(self, cr, uid, ids, filter_lang=None, context=None):
logger = logging.getLogger('i18n')
if not filter_lang:
pool = pooler.get_pool(cr.dbname)
lang_obj = pool.get('res.lang')
@ -462,13 +467,18 @@ class module(osv.osv):
if len(lang) > 5:
raise osv.except_osv(_('Error'), _('You Can Not Load Translation For language Due To Invalid Language/Country Code'))
iso_lang = tools.get_iso_codes(lang)
f = os.path.join(modpath, 'i18n', iso_lang + '.po')
if not os.path.exists(f) and iso_lang.find('_') != -1:
f = os.path.join(modpath, 'i18n', iso_lang.split('_')[0] + '.po')
f = addons.get_module_resource(mod.name, 'i18n', iso_lang + '.po')
# Implementation notice: we must first search for the full name of
# the language derivative, like "en_UK", and then the generic,
# like "en".
if (not f) and '_' in iso_lang:
f = addons.get_module_resource(mod.name, 'i18n', iso_lang.split('_')[0] + '.po')
iso_lang = iso_lang.split('_')[0]
if os.path.exists(f):
logger.notifyChannel("i18n", netsvc.LOG_INFO, 'module %s: loading translation file for language %s' % (mod.name, iso_lang))
if f:
logger.info('module %s: loading translation file for language %s', mod.name, iso_lang)
tools.trans_load(cr.dbname, f, lang, verbose=False, context=context)
elif iso_lang != 'en':
logger.warning('module %s: no translation for language %s', mod.name, iso_lang)
def check(self, cr, uid, ids, context=None):
logger = logging.getLogger('init')
@ -537,7 +547,7 @@ class module_dependency(osv.osv):
_name = "ir.module.module.dependency"
_description = "Module dependency"
def _state(self, cr, uid, ids, name, args, context={}):
def _state(self, cr, uid, ids, name, args, context=None):
result = {}
mod_obj = self.pool.get('ir.module.module')
for md in self.browse(cr, uid, ids):

View File

@ -38,7 +38,9 @@ class base_language_install(osv.osv_memory):
'state': 'init',
'overwrite': False
}
def lang_install(self, cr, uid, ids, context):
def lang_install(self, cr, uid, ids, context=None):
if context is None:
context = {}
language_obj = self.browse(cr, uid, ids)[0]
lang = language_obj.lang
if lang:

View File

@ -20,6 +20,7 @@
##############################################################################
import pooler
import wizard
from osv import osv, fields
class base_module_upgrade(osv.osv_memory):
@ -80,7 +81,7 @@ class base_module_upgrade(osv.osv_memory):
res = mod_obj.read(cr, uid, ids, ['name','state'], context)
return {'module_info': '\n'.join(map(lambda x: x['name']+' : '+x['state'], res))}
def upgrade_module(self, cr, uid, ids, context):
def upgrade_module(self, cr, uid, ids, context=None):
pool = pooler.get_pool(cr.dbname)
mod_obj = self.pool.get('ir.module.module')
data_obj = self.pool.get('ir.model.data')

View File

@ -35,7 +35,7 @@ class base_update_translations(osv.osv_memory):
lang_obj=pooler.get_pool(cr.dbname).get('res.lang')
ids=lang_obj.search(cr, uid, [('code', '=', lang_code)])
if not ids:
raise osv.except_osv(_('No language with code "%s" exists') % lang_code)
raise osv.except_osv(_('Error!'), _('No language with code "%s" exists') % lang_code)
lang = lang_obj.browse(cr, uid, ids[0])
return lang.name
def act_cancel(self, cr, uid, ids, context=None):
@ -51,12 +51,14 @@ class base_update_translations(osv.osv_memory):
return {'type': 'ir.actions.act_window_close'}
def default_get(self, cr, uid, fields, context=None):
if context is None:
context = {}
res = super(base_update_translations, self).default_get(cr, uid, fields, context=context)
if context.get('active_model') != "res.lang":
return res
record_id = context and context.get('active_id', False) or False
record_id = context.get('active_id', False) or False
if record_id:
lang = self.pool.get('res.lang').browse(cr, uid, record_id).code
res.update(lang=lang)

View File

@ -46,7 +46,7 @@ class groups(osv.osv):
('name_uniq', 'unique (name)', 'The name of the group must be unique !')
]
def copy(self, cr, uid, id, default=None, context={}):
def copy(self, cr, uid, id, default=None, context=None):
group_name = self.read(cr, uid, [id], ['name'])[0]['name']
default.update({'name': _('%s (copy)')%group_name})
return super(groups, self).copy(cr, uid, id, default, context)
@ -83,14 +83,14 @@ class groups(osv.osv):
groups()
def _lang_get(self, cr, uid, context={}):
def _lang_get(self, cr, uid, context=None):
obj = self.pool.get('res.lang')
ids = obj.search(cr, uid, [('translatable','=',True)])
res = obj.read(cr, uid, ids, ['code', 'name'], context)
res = obj.read(cr, uid, ids, ['code', 'name'], context=context)
res = [(r['code'], r['name']) for r in res]
return res
def _tz_get(self,cr,uid, context={}):
def _tz_get(self,cr,uid, context=None):
return [(x, x) for x in pytz.all_timezones]
class users(osv.osv):
@ -315,9 +315,9 @@ class users(osv.osv):
return result
_defaults = {
'password' : lambda *a : '',
'context_lang': lambda *args: 'en_US',
'active' : lambda *a: True,
'password' : '',
'context_lang': 'en_US',
'active' : True,
'menu_id': _get_menu,
'company_id': _get_company,
'company_ids': _get_companies,
@ -373,7 +373,7 @@ class users(osv.osv):
ids = self.search(cr, user, [('name',operator,name)]+ args, limit=limit)
return self.name_get(cr, user, ids)
def copy(self, cr, uid, id, default=None, context={}):
def copy(self, cr, uid, id, default=None, context=None):
user2copy = self.read(cr, uid, [id], ['login','name'])[0]
if default is None:
default = {}
@ -395,25 +395,28 @@ class users(osv.osv):
result[k[8:]] = res or False
return result
def action_get(self, cr, uid, context={}):
def action_get(self, cr, uid, context=None):
dataobj = self.pool.get('ir.model.data')
data_id = dataobj._get_id(cr, 1, 'base', 'action_res_users_my')
return dataobj.browse(cr, uid, data_id, context).res_id
return dataobj.browse(cr, uid, data_id, context=context).res_id
def login(self, db, login, password):
if not password:
return False
cr = pooler.get_db(db).cursor()
cr.execute('select id from res_users where login=%s and password=%s and active', (tools.ustr(login), tools.ustr(password)))
res = cr.fetchone()
result = False
if res:
cr.execute("update res_users set date=%s where id=%s", (time.strftime('%Y-%m-%d %H:%M:%S'),res[0]))
try:
cr.execute('UPDATE res_users SET date=now() WHERE login=%s AND password=%s AND active RETURNING id',
(tools.ustr(login), tools.ustr(password)))
res = cr.fetchone()
cr.commit()
result = res[0]
cr.close()
return result
if res:
return res[0]
else:
return False
finally:
cr.close()
def check_super(self, passwd):
if passwd == tools.config['admin_passwd']:
return True
@ -427,29 +430,34 @@ class users(osv.osv):
if (cached_pass is not None) and cached_pass == passwd:
return True
cr = pooler.get_db(db).cursor()
cr.execute('select count(1) from res_users where id=%s and password=%s and active=%s', (int(uid), passwd, True))
res = cr.fetchone()[0]
cr.close()
if not bool(res):
raise security.ExceptionNoTb('AccessDenied')
if res:
if self._uid_cache.has_key(db):
ulist = self._uid_cache[db]
ulist[uid] = passwd
else:
self._uid_cache[db] = {uid:passwd}
return bool(res)
try:
cr.execute('SELECT COUNT(1) FROM res_users WHERE id=%s AND password=%s AND active=%s',
(int(uid), passwd, True))
res = cr.fetchone()[0]
if not bool(res):
raise security.ExceptionNoTb('AccessDenied')
if res:
if self._uid_cache.has_key(db):
ulist = self._uid_cache[db]
ulist[uid] = passwd
else:
self._uid_cache[db] = {uid:passwd}
return bool(res)
finally:
cr.close()
def access(self, db, uid, passwd, sec_level, ids):
if not passwd:
return False
cr = pooler.get_db(db).cursor()
cr.execute('select id from res_users where id=%s and password=%s', (uid, passwd))
res = cr.fetchone()
cr.close()
if not res:
raise security.ExceptionNoTb('Bad username or password')
return res[0]
try:
cr.execute('SELECT id FROM res_users WHERE id=%s AND password=%s', (uid, passwd))
res = cr.fetchone()
if not res:
raise security.ExceptionNoTb('Bad username or password')
return res[0]
finally:
cr.close()
users()
@ -512,9 +520,19 @@ class groups2(osv.osv): ##FIXME: Is there a reason to inherit this object ?
}
def unlink(self, cr, uid, ids, context=None):
group_users = []
for record in self.read(cr, uid, ids, ['users'], context=context):
if record['users']:
raise osv.except_osv(_('Warning !'), _('Make sure you have no users linked with the group(s)!'))
group_users.extend(record['users'])
if group_users:
user_names = [user.name for user in self.pool.get('res.users').browse(cr, uid, group_users, context=context)]
if len(user_names) >= 5:
user_names = user_names[:5]
user_names += '...'
raise osv.except_osv(_('Warning !'),
_('Group(s) cannot be deleted, because some user(s) still belong to them: %s !') % \
', '.join(user_names))
return super(groups2, self).unlink(cr, uid, ids, context=context)
groups2()

View File

@ -341,8 +341,10 @@ class many2one(_column):
return result
def get(self, cr, obj, ids, name, user=None, context=None, values=None):
context = context or {}
values = values or {}
if context is None:
context = {}
if values is None:
values = {}
res = {}
for r in values:
@ -454,12 +456,12 @@ class one2many(_column):
raise _('Not Implemented')
def get(self, cr, obj, ids, name, user=None, offset=0, context=None, values=None):
if not context:
if context is None:
context = {}
if self._context:
context = context.copy()
context.update(self._context)
if not values:
if values is None:
values = {}
res = {}
@ -740,9 +742,9 @@ class function(_column):
return self._fnct_search(obj, cr, uid, obj, name, args, context=context)
def get(self, cr, obj, ids, name, user=None, context=None, values=None):
if not context:
if context is None:
context = {}
if not values:
if values is None:
values = {}
res = {}
if self._method:

View File

@ -41,6 +41,7 @@ import calendar
import copy
import datetime
import logging
import warnings
import operator
import pickle
import re
@ -978,6 +979,10 @@ class orm_template(object):
for key in self.pool._sql_error.keys():
if key in e[0]:
msg = self.pool._sql_error[key]
if hasattr(msg, '__call__'):
msg = msg(cr, uid, [res_id,], context=context)
else:
msg = _(msg)
break
return (-1, res, 'Line ' + str(counter) +' : ' + msg, '')
if isinstance(e, osv.orm.except_orm):
@ -1051,9 +1056,12 @@ class orm_template(object):
# Check presence of __call__ directly instead of using
# callable() because it will be deprecated as of Python 3.0
if hasattr(msg, '__call__'):
txt_msg, params = msg(self, cr, uid, ids)
tmp_msg = trans._get_source(cr, uid, self._name, 'constraint', lng, source=txt_msg) or txt_msg
translated_msg = tmp_msg % params
tmp_msg = msg(self, cr, uid, ids, context=context)
if isinstance(tmp_msg, tuple):
tmp_msg, params = tmp_msg
translated_msg = tmp_msg % params
else:
translated_msg = tmp_msg
else:
translated_msg = trans._get_source(cr, uid, self._name, 'constraint', lng, source=msg) or msg
error_msgs.append(
@ -2597,7 +2605,7 @@ class orm(orm_template):
if not ok:
i = 0
while True:
newname = self._table + '_moved' + str(i)
newname = k + '_moved' + str(i)
cr.execute("SELECT count(1) FROM pg_class c,pg_attribute a " \
"WHERE c.relname=%s " \
"AND a.attname=%s " \
@ -3048,21 +3056,6 @@ class orm(orm_template):
else:
res = map(lambda x: {'id': x}, ids)
# if not res:
# res = map(lambda x: {'id': x}, ids)
# for record in res:
# for f in fields_to_read:
# field_val = False
# if f in self._columns.keys():
# ftype = self._columns[f]._type
# elif f in self._inherit_fields.keys():
# ftype = self._inherit_fields[f][2]._type
# else:
# continue
# if ftype in ('one2many', 'many2many'):
# field_val = []
# record.update({f:field_val})
for f in fields_pre:
if f == self.CONCURRENCY_CHECK_FIELD:
continue
@ -3199,9 +3192,11 @@ class orm(orm_template):
for r in res:
for key in r:
r[key] = r[key] or False
if details and key in ('write_uid', 'create_uid'):
if r[key]:
if details and key in ('write_uid', 'create_uid') and r[key]:
try:
r[key] = self.pool.get('res.users').name_get(cr, user, [r[key]])[0]
except Exception:
pass # Leave the numeric uid there
r['xmlid'] = ("%(module)s.%(name)s" % r) if r['name'] else False
del r['name'], r['module']
if uniq:
@ -3355,7 +3350,7 @@ class orm(orm_template):
fobj = None
if field in self._columns:
fobj = self._columns[field]
else:
elif field in self._inherit_fields:
fobj = self._inherit_fields[field][2]
if not fobj:
continue
@ -4097,10 +4092,14 @@ class orm(orm_template):
else:
default['state'] = self._defaults['state']
context_wo_lang = context
context_wo_lang = context.copy()
if 'lang' in context:
del context_wo_lang['lang']
data = self.read(cr, uid, [id], context=context_wo_lang)[0]
data = self.read(cr, uid, [id,], context=context_wo_lang)
if data:
data = data[0]
else:
raise IndexError( _("Record #%d of %s not found, cannot copy!") %( id, self._name))
fields = self.fields_get(cr, uid, context=context)
for f in fields:
@ -4211,6 +4210,13 @@ class orm(orm_template):
return cr.fetchone()[0] == len(ids)
def check_recursion(self, cr, uid, ids, parent=None):
warnings.warn("You are using deprecated %s.check_recursion(). Please use the '_check_recursion()' instead!" % \
self._name, DeprecationWarning, stacklevel=3)
assert parent is None or parent in self._columns or parent in self._inherit_fields,\
"The 'parent' parameter passed to check_recursion() must be None or a valid field name"
return self._check_recursion(cr, uid, ids, parent)
def _check_recursion(self, cr, uid, ids, parent=None):
"""
Verifies that there is no loop in a hierarchical structure of records,
by following the parent relationship using the **parent** field until a loop

View File

@ -32,7 +32,7 @@ import traceback
import logging
from psycopg2 import IntegrityError, errorcodes
from tools.func import wraps
from tools.translate import _
from tools.translate import translate
module_list = []
module_class_list = {}
@ -51,6 +51,68 @@ class osv_pool(netsvc.Service):
def check(f):
@wraps(f)
def wrapper(self, dbname, *args, **kwargs):
""" Wraps around OSV functions and normalises a few exceptions
"""
def tr(src, ttype):
# We try to do the same as the _(), but without the frame
# inspection, since we aready are wrapping an osv function
# trans_obj = self.get('ir.translation') cannot work yet :(
ctx = {}
if not kwargs:
if args and isinstance(args[-1], dict):
ctx = args[-1]
elif isinstance(kwargs, dict):
ctx = kwargs.get('context', {})
uid = 1
if args and isinstance(args[0], (long, int)):
uid = args[0]
lang = ctx and ctx.get('lang')
if not (lang or hasattr(src, '__call__')):
return src
# We open a *new* cursor here, one reason is that failed SQL
# queries (as in IntegrityError) will invalidate the current one.
cr = False
if hasattr(src, '__call__'):
# callable. We need to find the right parameters to call
# the orm._sql_message(self, cr, uid, ids, context) function,
# or we skip..
# our signature is f(osv_pool, dbname [,uid, obj, method, args])
try:
if args and len(args) > 1:
obj = self.get(args[1])
if len(args) > 3 and isinstance(args[3], (long, int, list)):
ids = args[3]
else:
ids = []
cr = pooler.get_db_only(dbname).cursor()
return src(obj, cr, uid, ids, context=(ctx or {}))
except Exception:
pass
finally:
if cr: cr.close()
return False # so that the original SQL error will
# be returned, it is the best we have.
try:
cr = pooler.get_db_only(dbname).cursor()
res = translate(cr, name=False, source_type=ttype,
lang=lang, source=src)
if res:
return res
else:
return src
finally:
if cr: cr.close()
def _(src):
return tr(src, 'code')
try:
if not pooler.get_pool(dbname)._ready:
raise except_osv('Database not ready', 'Currently, this database is not fully loaded and can not be used.')
@ -64,7 +126,8 @@ class osv_pool(netsvc.Service):
except IntegrityError, inst:
for key in self._sql_error.keys():
if key in inst[0]:
self.abortResponse(1, _('Constraint Error'), 'warning', _(self._sql_error[key]))
self.abortResponse(1, _('Constraint Error'), 'warning',
tr(self._sql_error[key], 'sql_constraint') or inst[0])
if inst.pgcode in (errorcodes.NOT_NULL_VIOLATION, errorcodes.FOREIGN_KEY_VIOLATION, errorcodes.RESTRICT_VIOLATION):
msg = _('The operation cannot be completed, probably due to the following:\n- deletion: you may be trying to delete a record while other records still reference it\n- creation/update: a mandatory field is not correctly set')
self.logger.debug("IntegrityError", exc_info=True)

View File

@ -55,7 +55,7 @@ class rml2txt(render.render):
def __init__(self, rml, localcontext= None, datas={}):
super(rml2txt, self).__init__(datas)
self.rml = rml
self.localcontext = localcontext
self.localcontext = localcontext
self.output_type = 'txt'
def _render(self):

View File

@ -21,6 +21,14 @@
from reportlab import rl_config
import os
import logging
"""This module allows the mapping of some system-available TTF fonts to
the reportlab engine.
This file could be customized per distro (although most Linux/Unix ones)
should have the same filenames, only need the code below).
"""
CustomTTFonts = [ ('Helvetica',"DejaVu Sans", "DejaVuSans.ttf", 'normal'),
('Helvetica',"DejaVu Sans Bold", "DejaVuSans-Bold.ttf", 'bold'),
@ -43,23 +51,50 @@ CustomTTFonts = [ ('Helvetica',"DejaVu Sans", "DejaVuSans.ttf", 'normal'),
('Courier',"FreeMono Oblique", "FreeMonoOblique.ttf", 'italic'),
('Courier',"FreeMono BoldOblique", "FreeMonoBoldOblique.ttf", 'bolditalic'),]
def SearchFontPath(font_file):
__foundFonts = []
def FindCustomFonts():
"""Fill the __foundFonts list with those filenames, whose fonts
can be found in the reportlab ttf font path.
This process needs only be done once per loading of this module,
it is cached. But, if the system admin adds some font in the
meanwhile, the server must be restarted eventually.
"""
dirpath = []
log = logging.getLogger('report.fonts')
global __foundFonts
for dirname in rl_config.TTFSearchPath:
for root, dirs, files in os.walk(os.path.abspath(dirname)):
for file_name in files:
filename = os.path.join(root, file_name)
extension = os.path.splitext(filename)[1]
if extension.lower() in ['.ttf']:
if file_name==font_file:
return True
return False
abp = os.path.abspath(dirname)
if os.path.isdir(abp):
dirpath.append(abp)
for k, (name, font, fname, mode) in enumerate(CustomTTFonts):
if fname in __foundFonts:
continue
for d in dirpath:
if os.path.exists(os.path.join(d, fname)):
log.debug("Found font %s in %s as %s", fname, d, name)
__foundFonts.append(fname)
break
# print "Found fonts:", __foundFonts
def SetCustomFonts(rmldoc):
""" Map some font names to the corresponding TTF fonts
The ttf font may not even have the same name, as in
Times -> Liberation Serif.
This function is called once per report, so it should
avoid system-wide processing (cache it, instead).
"""
global __foundFonts
if not len(__foundFonts):
FindCustomFonts()
for name, font, fname, mode in CustomTTFonts:
if SearchFontPath(fname):
rmldoc.setTTFontMapping(name, font,filename, mode)
if os.path.isabs(fname) or fname in __foundFonts:
rmldoc.setTTFontMapping(name, font, fname, mode)
return True
#eof

View File

@ -21,22 +21,27 @@
import sys
from StringIO import StringIO
import copy
import reportlab
import re
from reportlab.pdfgen import canvas
from reportlab import platypus
import cStringIO
import utils
import color
import os
import logging
from lxml import etree
import base64
from reportlab.platypus.doctemplate import ActionFlowable
from tools.safe_eval import safe_eval as eval
from reportlab.lib.units import inch,cm,mm
try:
from cStringIO import StringIO
_hush_pyflakes = [ StringIO ]
except ImportError:
from StringIO import StringIO
encoding = 'utf-8'
class NumberedCanvas(canvas.Canvas):
@ -222,7 +227,7 @@ class _rml_doc(object):
addMapping(name, 1, 0, name) #bold
addMapping(name, 1, 1, name) #italic and bold
def setTTFontMapping(self,face, fontname,filename, mode='all'):
def setTTFontMapping(self,face, fontname, filename, mode='all'):
from reportlab.lib.fonts import addMapping
from reportlab.pdfbase import pdfmetrics
from reportlab.pdfbase.ttfonts import TTFont
@ -411,7 +416,7 @@ class _rml_canvas(object):
if not node.get('file') :
if node.get('name'):
image_data = self.images[node.get('name')]
s = cStringIO.StringIO(image_data)
s = StringIO(image_data)
else:
if self.localcontext:
res = utils._regex.findall(node.text)
@ -422,19 +427,19 @@ class _rml_canvas(object):
if node.text:
image_data = base64.decodestring(node.text)
if image_data:
s = cStringIO.StringIO(image_data)
s = StringIO(image_data)
else:
return False
else:
if node.get('file') in self.images:
s = cStringIO.StringIO(self.images[node.get('file')])
s = StringIO(self.images[node.get('file')])
else:
try:
u = urllib.urlopen(str(node.get('file')))
s = cStringIO.StringIO(u.read())
s = StringIO(u.read())
except Exception:
u = file(os.path.join(self.path,str(node.get('file'))), 'rb')
s = cStringIO.StringIO(u.read())
s = StringIO(u.read())
img = ImageReader(s)
(sx,sy) = img.getSize()
@ -480,12 +485,11 @@ class _rml_canvas(object):
self.canvas.drawPath(self.path, **utils.attr_get(node, [], {'fill':'bool','stroke':'bool'}))
def setFont(self, node):
from reportlab.pdfbase import pdfmetrics
fname = node.get('name')
#TODO : other fonts should be supported
if fname not in pdfmetrics.standardFonts:
fname = self.canvas._fontname
return self.canvas.setFont(fname, utils.unit_get(node.get('size')))
try:
return self.canvas.setFont(fname, utils.unit_get(node.get('size')))
except KeyError, e:
raise KeyError('Font "%s" is not registered in the engine' % fname)
def render(self, node):
tags = {
@ -737,7 +741,7 @@ class _rml_flowable(object):
node.text = newtext
image_data = base64.decodestring(node.text)
if not image_data: return False
image = cStringIO.StringIO(image_data)
image = StringIO(image_data)
return platypus.Image(image, mask=(250,255,250,255,250,255), **(utils.attr_get(node, ['width','height'])))
else:
return platypus.Image(node.get('file'), mask=(250,255,250,255,250,255), **(utils.attr_get(node, ['width','height'])))
@ -909,9 +913,13 @@ def parseNode(rml, localcontext = {},fout=None, images={}, path='.',title=None):
try:
from customfonts import SetCustomFonts
SetCustomFonts(r)
except Exception:
except ImportError:
# means there is no custom fonts mapping in this system.
pass
fp = cStringIO.StringIO()
except Exception:
logging.getLogger('report').warning('Cannot set font mapping', exc_info=True)
pass
fp = StringIO()
r.render(fp)
return fp.getvalue()
@ -932,7 +940,7 @@ def parseString(rml, localcontext = {},fout=None, images={}, path='.',title=None
fp.close()
return fout
else:
fp = cStringIO.StringIO()
fp = StringIO()
r.render(fp)
return fp.getvalue()

View File

@ -38,12 +38,14 @@
import copy
import locale
import logging
import re
import reportlab
import tools
from tools.safe_eval import safe_eval as eval
from tools import ustr
_regex = re.compile('\[\[(.+?)\]\]')
@ -66,7 +68,7 @@ def _child_get(node, self=None, tagname=None):
except GeneratorExit:
continue
except Exception, e:
logging.getLogger('report').exception(e)
logging.getLogger('report').warning('rml_except: "%s"',n.get('rml_except',''), exc_info=True)
continue
if n.get('rml_tag'):
try:
@ -78,7 +80,7 @@ def _child_get(node, self=None, tagname=None):
except GeneratorExit:
yield n
except Exception, e:
logging.getLogger('report').exception(e)
logging.getLogger('report').warning('rml_tag: "%s"',n.get('rml_tag',''), exc_info=True)
yield n
else:
yield n
@ -89,7 +91,7 @@ def _child_get(node, self=None, tagname=None):
except GeneratorExit:
continue
except Exception, e:
logging.getLogger('report').exception(e)
logging.getLogger('report').warning('rml_except: "%s"',n.get('rml_except',''), exc_info=True)
continue
if self and self.localcontext and n.get('rml_tag'):
try:
@ -102,7 +104,7 @@ def _child_get(node, self=None, tagname=None):
except GeneratorExit:
pass
except Exception, e:
logging.getLogger('report').exception(e)
logging.getLogger('report').warning('rml_tag: "%s"',n.get('rml_tag',''), exc_info=True)
pass
if (tagname is None) or (n.tag==tagname):
yield n
@ -130,11 +132,11 @@ def _process_text(self, txt):
if isinstance(txt, basestring):
result += str2xml(txt)
elif (txt is not None) and (txt is not False):
result += unicode(txt)
result += ustr(txt)
return result
def text_get(node):
return ''.join([unicode(n.text) for n in node])
return ''.join([ustr(n.text) for n in node])
units = [
(re.compile('^(-?[0-9\.]+)\s*in$'), reportlab.lib.units.inch),

View File

@ -1,14 +0,0 @@
-----BEGIN CERTIFICATE-----
MIICNDCCAZ0CAQEwDQYJKoZIhvcNAQEEBQAweTEQMA4GA1UEChMHVGlueUVSUDEM
MAoGA1UECxMDRVJQMRkwFwYJKoZIhvcNAQkBFgpmcEB0aW55LmJlMRAwDgYDVQQH
EwdXYWxoYWluMQswCQYDVQQIEwJCVzELMAkGA1UEBhMCQkUxEDAOBgNVBAMTB1Rp
bnlFUlAwHhcNMDYwNTI0MDgzODUxWhcNMDcwNTI0MDgzODUxWjBMMQswCQYDVQQG
EwJCRTELMAkGA1UECBMCQlcxEDAOBgNVBAoTB1RpbnlFUlAxDDAKBgNVBAsTA0VS
UDEQMA4GA1UEAxMHVGlueUVSUDCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA
xzIDlU2PrczPsgXtxCskYxuwMPgNCNSCBfWsUZ9nJzlZfRAEXEq4LxaTPIgkzkIF
82bmJLgFz6/CyCFid4mkBLQBj30Opp2Vco39WRncNKHKxbk+/wZpZtQ0bSpvf+F4
MBqCLldYIqsoyenombVCb8X62IUu0ENF1wR22owvyKcCAwEAATANBgkqhkiG9w0B
AQQFAAOBgQB2yUqJ3gbQ8I6rcmaVJlcLDHfC5w1Jr1cUzcJevOPh3wygSZYYoUoe
yeYlzEag/DpPSHyRiJJVOKdiwU0yfmZPhfDNtDiBr47bz8qzIsYq5VeMmSeXrq/f
AA3iI4xE8YFzJHWtiBCqqyUok+j9pVad7iV7+UVIePHZLEkGGWIjDA==
-----END CERTIFICATE-----

View File

@ -1,15 +0,0 @@
-----BEGIN RSA PRIVATE KEY-----
MIICXAIBAAKBgQDHMgOVTY+tzM+yBe3EKyRjG7Aw+A0I1IIF9axRn2cnOVl9EARc
SrgvFpM8iCTOQgXzZuYkuAXPr8LIIWJ3iaQEtAGPfQ6mnZVyjf1ZGdw0ocrFuT7/
Bmlm1DRtKm9/4XgwGoIuV1giqyjJ6eiZtUJvxfrYhS7QQ0XXBHbajC/IpwIDAQAB
AoGAVwAxMHS/3FkoHckZICT3r5HYUosEpmaqo4+5w6yrkSYrP8RPI0A/UdG6XSXZ
bXzIvJakzkTRxPQvTtnF+A/V4rF9hxwB8cGXSywv5eDGmZ91qIsxY7Sv99VqSKNH
dNr9aZHloTvI51e/oramIJ/O3A+TbAS5i+u1DJC2IIFJcAECQQD8iRPTlPIqzjYD
Lg7KYGvwW9TE4ONAhC86kJbzV5o3amlV5duJgnkl/mNlfN1ihA7f3Gx9dfCjfRKp
V1rcjtCBAkEAye2aMw2v1m+MEqcPxyTUzVf5Y8BIXWbk15T43czXec9YclZSOBCX
Dgv4a3Fk+yxQUE0cZUH0U4FJq6mTgpuFJwJASFqZ9KATNlJ4xTZ4BGHV6zrUXkg0
tDJrObNdnID37XKulW7TFLXuMgWNwvEgmO5POLJ13whglubp5tzhapn8gQJAJz9Z
U0b7wFAaB54VAP31ppvMy0iaSB0xqX05CdNAplpYtJB2lpMS6RYGiMuXdwJb8d+q
/ztcg8aDTSw+kYoszQJBAPBrt694VkGT1k9Be6e5wyVDrE05bkHhFxPk/HMeWMDX
sZqHPs9vVaLBqu/uU84FdwRMOV71RG90g6eUEl7HWsg=
-----END RSA PRIVATE KEY-----

View File

@ -67,7 +67,7 @@ class ThreadedHTTPServer(ConnThreadingMixIn, SimpleXMLRPCDispatcher, HTTPServer)
_send_traceback_header = False
i = 0
def __init__(self, addr, requestHandler,
def __init__(self, addr, requestHandler, proto='http',
logRequests=True, allow_none=False, encoding=None, bind_and_activate=True):
self.logRequests = logRequests
@ -75,6 +75,7 @@ class ThreadedHTTPServer(ConnThreadingMixIn, SimpleXMLRPCDispatcher, HTTPServer)
HTTPServer.__init__(self, addr, requestHandler)
self.numThreads = 0
self.proto = proto
self.__threadno = 0
# [Bug #1222790] If possible, set close-on-exec flag; if a
@ -143,7 +144,7 @@ class BaseHttpDaemon(threading.Thread, netsvc.Server):
self.__interface = interface
try:
self.server = ThreadedHTTPServer((interface, port), handler)
self.server = ThreadedHTTPServer((interface, port), handler, proto=self._RealProto)
self.server.vdirs = []
self.server.logRequests = True
self.server.timeout = self._busywait_timeout
@ -178,7 +179,7 @@ class BaseHttpDaemon(threading.Thread, netsvc.Server):
def stats(self):
res = "%sd: " % self._RealProto + ((self.running and "running") or "stopped")
if self.server:
res += ", %d threads" % (self.server.numThreads,)
res += ", %d threads" % (self.server.numThreads,)
return res
def append_svc(self, service):

View File

@ -36,6 +36,7 @@ import release
import sql_db
import tools
import locale
import logging
from cStringIO import StringIO
class db(netsvc.ExportService):
@ -385,7 +386,7 @@ class common(_ObjectService):
'login_message','get_stats', 'check_connectivity',
'list_http_services']:
pass
elif method in ['get_available_updates', 'get_migration_scripts', 'set_loglevel']:
elif method in ['get_available_updates', 'get_migration_scripts', 'set_loglevel', 'get_os_time', 'get_sqlcount']:
passwd = params[0]
params = params[1:]
security.check_super(passwd)
@ -567,6 +568,15 @@ GNU Public Licence.
def exp_check_connectivity(self):
return bool(sql_db.db_connect('template1'))
def exp_get_os_time(self):
return os.times()
def exp_get_sqlcount(self):
logger = logging.getLogger('db.cursor')
if not logger.isEnabledFor(logging.DEBUG_SQL):
logger.warning("Counters of SQL will not be reliable unless DEBUG_SQL is set at the server's config.")
return sql_db.sql_counter
common()

View File

@ -1,7 +1,6 @@
# -*- coding: utf-8 -*-
#
# Copyright P. Christeas <p_christ@hol.gr> 2008,2009
# A part of the code comes from the ganeti project: http://www.mail-archive.com/ganeti-devel@googlegroups.com/msg00713.html#
# Copyright P. Christeas <p_christ@hol.gr> 2008-2010
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
@ -136,15 +135,25 @@ class HTTPDir:
return self.path
return False
class noconnection:
class noconnection(object):
""" a class to use instead of the real connection
"""
def __init__(self, realsocket=None):
self.__hidden_socket = realsocket
def makefile(self, mode, bufsize):
return None
def close(self):
pass
def getsockname(self):
""" We need to return info about the real socket that is used for the request
"""
if not self.__hidden_socket:
raise AttributeError("No-connection class cannot tell real socket")
return self.__hidden_socket.getsockname()
class dummyconn:
def shutdown(self, tru):
pass
@ -191,6 +200,12 @@ class HttpOptions:
self.send_response(200)
self.send_header("Content-Length", 0)
if 'Microsoft' in self.headers.get('User-Agent', ''):
self.send_header('MS-Author-Via', 'DAV')
# Microsoft's webdav lib ass-umes that the server would
# be a FrontPage(tm) one, unless we send a non-standard
# header that we are not an elephant.
# http://www.ibm.com/developerworks/rational/library/2089.html
for key, value in opts.items():
if isinstance(value, basestring):
@ -239,13 +254,21 @@ class MultiHTTPHandler(FixSendError, HttpOptions, BaseHTTPRequestHandler):
fore.raw_requestline = "%s %s %s\n" % (self.command, path, self.version)
if not fore.parse_request(): # An error code has been sent, just exit
return
if fore.headers.status:
self.log_error("Parse error at headers: %s", fore.headers.status)
self.close_connection = 1
self.send_error(400,"Parse error at HTTP headers")
return
self.request_version = fore.request_version
if auth_provider and auth_provider.realm:
try:
self.sec_realms[auth_provider.realm].checkRequest(fore,path)
except AuthRequiredExc,ae:
if self.request_version != 'HTTP/1.1':
self.log_error("Cannot require auth at %s",self.request_version)
# Darwin 9.x.x webdav clients will report "HTTP/1.0" to us, while they support (and need) the
# authorisation features of HTTP/1.1
if self.request_version != 'HTTP/1.1' and ('Darwin/9.' not in fore.headers.get('User-Agent', '')):
self.log_error("Cannot require auth at %s", self.request_version)
self.send_error(403)
return
self._get_ignore_body(fore) # consume any body that came, not loose sync with input
@ -390,15 +413,21 @@ class MultiHTTPHandler(FixSendError, HttpOptions, BaseHTTPRequestHandler):
npath = '/' + npath
if not self.in_handlers.has_key(p):
self.in_handlers[p] = vdir.handler(noconnection(),self.client_address,self.server)
self.in_handlers[p] = vdir.handler(noconnection(self.request),self.client_address,self.server)
if vdir.auth_provider:
vdir.auth_provider.setupAuth(self, self.in_handlers[p])
hnd = self.in_handlers[p]
hnd.rfile = self.rfile
hnd.wfile = self.wfile
self.rlpath = self.raw_requestline
self._handle_one_foreign(hnd,npath, vdir.auth_provider)
# print "Handled, closing = ", self.close_connection
try:
self._handle_one_foreign(hnd,npath, vdir.auth_provider)
except IOError, e:
if e.errno == errno.EPIPE:
self.log_message("Could not complete request %s," \
"client closed connection", self.rlpath.rstrip())
else:
raise
return
# if no match:
self.send_error(404, "Path not found: %s" % self.path)

View File

@ -28,6 +28,7 @@ from psycopg2.psycopg1 import cursor as psycopg1cursor
from psycopg2.pool import PoolError
import psycopg2.extensions
import warnings
psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
@ -51,11 +52,11 @@ psycopg2.extensions.register_type(psycopg2.extensions.new_type((700, 701, 1700,)
import tools
from tools.func import wraps
from tools.func import wraps, frame_codeinfo
from datetime import datetime as mdt
from datetime import timedelta
import threading
from inspect import stack
from inspect import currentframe
import re
re_from = re.compile('.* from "?([a-zA-Z_0-9]+)"? .*$');
@ -71,7 +72,7 @@ class Cursor(object):
@wraps(f)
def wrapper(self, *args, **kwargs):
if self.__closed:
raise psycopg2.ProgrammingError('Unable to use the cursor after having closed it')
raise psycopg2.OperationalError('Unable to use the cursor after having closed it')
return f(self, *args, **kwargs)
return wrapper
@ -93,7 +94,10 @@ class Cursor(object):
self._obj = self._cnx.cursor(cursor_factory=psycopg1cursor)
self.__closed = False # real initialisation value
self.autocommit(False)
self.__caller = tuple(stack()[2][1:3])
if self.sql_log:
self.__caller = frame_codeinfo(currentframe(),2)
else:
self.__caller = False
def __del__(self):
if not self.__closed:
@ -102,9 +106,12 @@ class Cursor(object):
# but the database connection is not put back into the connection
# pool, preventing some operation on the database like dropping it.
# This can also lead to a server overload.
msg = "Cursor not closed explicitly\n" \
"Cursor was created at %s:%s"
self.__logger.warn(msg, *self.__caller)
msg = "Cursor not closed explicitly\n"
if self.__caller:
msg += "Cursor was created at %s:%s" % self.__caller
else:
msg += "Please enable sql debugging to trace the caller."
self.__logger.warn(msg)
self._close(True)
@check
@ -337,10 +344,12 @@ class Connection(object):
def __nonzero__(self):
"""Check if connection is possible"""
try:
warnings.warn("You use an expensive function to test a connection.",
DeprecationWarning, stacklevel=1)
cr = self.cursor()
cr.close()
return True
except:
except Exception:
return False

View File

@ -277,7 +277,7 @@ form: module.record_id""" % (xml_id,)
assert modcnt == 1, """The ID "%s" refers to an uninstalled module""" % (xml_id,)
if len(id) > 64:
self.logger.notifyChannel('init', netsvc.LOG_ERROR, 'id: %s is to long (max: 64)'% (id,))
self.logger.error('id: %s is to long (max: 64)', id)
def _tag_delete(self, cr, rec, data_node=None):
d_model = rec.get("model",'')
@ -572,7 +572,7 @@ form: module.record_id""" % (xml_id,)
pid = res[0]
else:
# the menuitem does't exist but we are in branch (not a leaf)
self.logger.notifyChannel("init", netsvc.LOG_WARNING, 'Warning no ID for submenu %s of menu %s !' % (menu_elem, str(m_l)))
self.logger.warning('Warning no ID for submenu %s of menu %s !', menu_elem, str(m_l))
pid = self.pool.get('ir.ui.menu').create(cr, self.uid, {'parent_id' : pid, 'name' : menu_elem})
values['parent_id'] = pid
else:
@ -705,7 +705,7 @@ form: module.record_id""" % (xml_id,)
' expected count: %d\n' \
' obtained count: %d\n' \
% (rec_string, count, len(ids))
self.logger.notifyChannel('init', severity, msg)
self.logger.log(severity, msg)
sevval = getattr(logging, severity.upper())
if sevval >= config['assert_exit_level']:
# TODO: define a dedicated exception
@ -737,7 +737,7 @@ form: module.record_id""" % (xml_id,)
' expected value: %r\n' \
' obtained value: %r\n' \
% (rec_string, etree.tostring(test), expected_value, expression_value)
self.logger.notifyChannel('init', severity, msg)
self.logger.log(severity, msg)
sevval = getattr(logging, severity.upper())
if sevval >= config['assert_exit_level']:
# TODO: define a dedicated exception
@ -852,11 +852,11 @@ form: module.record_id""" % (xml_id,)
def parse(self, de):
if not de.tag in ['terp', 'openerp']:
self.logger.notifyChannel("init", netsvc.LOG_ERROR, "Mismatch xml format" )
self.logger.error("Mismatch xml format")
raise Exception( "Mismatch xml format: only terp or openerp as root tag" )
if de.tag == 'terp':
self.logger.notifyChannel("init", netsvc.LOG_WARNING, "The tag <terp/> is deprecated, use <openerp/>")
self.logger.warning("The tag <terp/> is deprecated, use <openerp/>")
for n in de.findall('./data'):
for rec in n:
@ -874,7 +874,7 @@ form: module.record_id""" % (xml_id,)
def __init__(self, cr, module, idref, mode, report=None, noupdate=False):
self.logger = netsvc.Logger()
self.logger = logging.getLogger('init')
self.mode = mode
self.module = module
self.cr = cr
@ -907,13 +907,14 @@ def convert_csv_import(cr, module, fname, csvcontent, idref=None, mode='init',
encoding: utf-8'''
if not idref:
idref={}
logger = logging.getLogger('init')
model = ('.'.join(fname.split('.')[:-1]).split('-'))[0]
#remove folder path from model
head, model = os.path.split(model)
pool = pooler.get_pool(cr.dbname)
input = cStringIO.StringIO(csvcontent)
input = cStringIO.StringIO(csvcontent) #FIXME
reader = csv.reader(input, quotechar='"', delimiter=',')
fields = reader.next()
fname_partial = ""
@ -931,9 +932,7 @@ def convert_csv_import(cr, module, fname, csvcontent, idref=None, mode='init',
reader.next()
if not (mode == 'init' or 'id' in fields):
logger = netsvc.Logger()
logger.notifyChannel("init", netsvc.LOG_ERROR,
"Import specification does not contain 'id' and we are in init mode, Cannot continue.")
logger.error("Import specification does not contain 'id' and we are in init mode, Cannot continue.")
return
uid = 1
@ -944,8 +943,7 @@ def convert_csv_import(cr, module, fname, csvcontent, idref=None, mode='init',
try:
datas.append(map(lambda x: misc.ustr(x), line))
except:
logger = netsvc.Logger()
logger.notifyChannel("init", netsvc.LOG_ERROR, "Cannot import the line: %s" % line)
logger.error("Cannot import the line: %s", line)
pool.get(model).import_data(cr, uid, fields, datas,mode, module, noupdate, filename=fname_partial)
if config.get('import_partial'):
data = pickle.load(file(config.get('import_partial')))

View File

@ -93,3 +93,24 @@ def synchronized(lock_attr='_lock'):
return decorator
from inspect import getsourcefile
def frame_codeinfo(fframe, back=0):
""" Return a (filename, line) pair for a previous frame .
@return (filename, lineno) where lineno is either int or string==''
"""
try:
if not fframe:
return ("<unknown>", '')
for i in range(back):
fframe = fframe.f_back
try:
fname = getsourcefile(fframe)
except TypeError:
fname = '<builtin>'
lineno = fframe.f_lineno or ''
return (fname, lineno)
except Exception:
return ("<unknown>", '')

View File

@ -25,6 +25,7 @@ Miscelleanous tools used by OpenERP.
"""
import inspect
import subprocess
import logging
import os
import re
@ -33,6 +34,7 @@ import socket
import sys
import threading
import time
import warnings
import zipfile
from datetime import datetime
from email.MIMEText import MIMEText
@ -141,27 +143,24 @@ def exec_pg_command(name, *args):
if not prog:
raise Exception('Couldn\'t find %s' % name)
args2 = (os.path.basename(prog),) + args
return os.spawnv(os.P_WAIT, prog, args2)
return subprocess.call(args2, executable=prog)
def exec_pg_command_pipe(name, *args):
prog = find_pg_tool(name)
if not prog:
raise Exception('Couldn\'t find %s' % name)
if os.name == "nt":
cmd = '"' + prog + '" ' + ' '.join(args)
else:
cmd = prog + ' ' + ' '.join(args)
return os.popen2(cmd, 'b')
pop = subprocess.Popen(args, executable=prog, shell=True, bufsize= -1,
stdin=subprocess.PIPE, stdout=subprocess.PIPE, close_fds=True)
return (pop.stdin, pop.stdout)
def exec_command_pipe(name, *args):
prog = find_in_path(name)
if not prog:
raise Exception('Couldn\'t find %s' % name)
if os.name == "nt":
cmd = '"'+prog+'" '+' '.join(args)
else:
cmd = prog+' '+' '.join(args)
return os.popen2(cmd, 'b')
pop = subprocess.Popen(args, executable=prog, shell=True, bufsize= -1,
stdin=subprocess.PIPE, stdout=subprocess.PIPE, close_fds=True)
return (pop.stdin, pop.stdout)
#----------------------------------------------------------
# File paths
@ -1114,6 +1113,8 @@ def debug(what):
--log-level=debug
"""
warnings.warn("The tools.debug() method is deprecated, please use logging.",
DeprecationWarning, stacklevel=2)
from inspect import stack
from pprint import pformat
st = stack()[1]
@ -1122,10 +1123,10 @@ def debug(what):
what = pformat(what)
if param != what:
what = "%s = %s" % (param, what)
netsvc.Logger().notifyChannel(st[3], netsvc.LOG_DEBUG, what)
logging.getLogger(st[3]).debug(what)
icons = map(lambda x: (x,x), ['STOCK_ABOUT', 'STOCK_ADD', 'STOCK_APPLY', 'STOCK_BOLD',
__icons_list = ['STOCK_ABOUT', 'STOCK_ADD', 'STOCK_APPLY', 'STOCK_BOLD',
'STOCK_CANCEL', 'STOCK_CDROM', 'STOCK_CLEAR', 'STOCK_CLOSE', 'STOCK_COLOR_PICKER',
'STOCK_CONNECT', 'STOCK_CONVERT', 'STOCK_COPY', 'STOCK_CUT', 'STOCK_DELETE',
'STOCK_DIALOG_AUTHENTICATION', 'STOCK_DIALOG_ERROR', 'STOCK_DIALOG_INFO',
@ -1161,7 +1162,11 @@ icons = map(lambda x: (x,x), ['STOCK_ABOUT', 'STOCK_ADD', 'STOCK_APPLY', 'STOCK_
'terp-gdu-smart-failing','terp-go-week','terp-gtk-select-all','terp-locked','terp-mail-forward',
'terp-mail-message-new','terp-mail-replied','terp-rating-rated','terp-stage','terp-stock_format-scientific',
'terp-dolar_ok!','terp-idea','terp-stock_format-default','terp-mail-','terp-mail_delete'
])
]
def icons(*a, **kw):
global __icons_list
return [(x, x) for x in __icons_list ]
def extract_zip_file(zip_file, outdirectory):
zf = zipfile.ZipFile(zip_file, 'r')

91
bin/tools/test_reports.py Normal file
View File

@ -0,0 +1,91 @@
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2010 OpenERP s.a. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
""" Helper functions for reports testing.
Please /do not/ import this file by default, but only explicitly call it
through the code of yaml tests.
"""
import netsvc
import tools
import logging
from subprocess import Popen, PIPE
import os
import tempfile
def try_report(cr, uid, rname, ids, data=None, context=None):
""" Try to render a report <rname> with contents of ids
This function should also check for common pitfalls of reports.
"""
log = logging.getLogger('tools.test_reports')
if data is None:
data = {}
if context is None:
context = {}
if rname.startswith('report.'):
rname_s = rname[7:]
else:
rname_s = rname
log.debug("Trying %s.create(%r)", rname, ids)
res = netsvc.LocalService(rname).create(cr, uid, ids, data, context)
if not isinstance(res, tuple):
raise RuntimeError("Result of %s.create() should be a (data,format) tuple, now it is a %s" % \
(rname, type(res)))
(res_data, res_format) = res
if not res_data:
raise ValueError("Report %s produced an empty result!" % rname)
if tools.config['test_report_directory']:
file(os.path.join(tools.config['test_report_directory'], rname+ '.'+res_format), 'wb+').write(res_data)
log.debug("Have a %s report for %s, will examine it", res_format, rname)
if res_format == 'pdf':
if res_data[:5] != '%PDF-':
raise ValueError("Report %s produced a non-pdf header, %r" % (rname, res_data[:10]))
res_text = False
try:
fd, rfname = tempfile.mkstemp(suffix=res_format)
os.write(fd, res_data)
os.close(fd)
fp = Popen(['pdftotext', '-enc', 'UTF-8', '-nopgbrk', rfname, '-'], shell=False, stdout=PIPE).stdout
res_text = tools.ustr(fp.read())
os.unlink(rfname)
except Exception:
log.warning("Cannot extract report's text:", exc_info=True)
if res_text is not False:
for line in res_text.split('\n'):
if ('[[' in line) or ('[ [' in line):
log.error("Report %s may have bad expression near: \"%s\".", rname, line[80:])
# TODO more checks, what else can be a sign of a faulty report?
elif res_format == 'foobar':
# TODO
pass
else:
log.warning("Report %s produced a \"%s\" chunk, cannot examine it", rname, res_format)
return True
#eof

View File

@ -27,6 +27,7 @@ import itertools
import locale
import os
import re
import logging
import tarfile
import tempfile
from os.path import join
@ -220,11 +221,11 @@ def unquote(str):
# class to handle po files
class TinyPoFile(object):
def __init__(self, buffer):
self.logger = netsvc.Logger()
self.logger = logging.getLogger('i18n')
self.buffer = buffer
def warn(self, msg):
self.logger.notifyChannel("i18n", netsvc.LOG_WARNING, msg)
self.logger.warning(msg)
def __iter__(self):
self.buffer.seek(0)
@ -316,7 +317,9 @@ class TinyPoFile(object):
self.first = False
if name is None:
self.warn('Missing "#:" formated comment for the following source:\n\t%s' % (source,))
if not fuzzy:
self.warn('Missing "#:" formated comment at line %d for the following source:\n\t%s',
self.cur_line(), source[:30])
return self.next()
return type, name, res_id, source, trad
@ -483,7 +486,7 @@ def in_modules(object_name, modules):
return module in modules
def trans_generate(lang, modules, dbname=None):
logger = netsvc.Logger()
logger = logging.getLogger('i18n')
if not dbname:
dbname=tools.config['db_name']
if not modules:
@ -499,13 +502,21 @@ def trans_generate(lang, modules, dbname=None):
query = 'SELECT name, model, res_id, module' \
' FROM ir_model_data'
query_models = """SELECT m.id, m.model, imd.module
FROM ir_model AS m, ir_model_data AS imd
WHERE m.id = imd.res_id AND imd.model = 'ir.model' """
if 'all_installed' in modules:
query += ' WHERE module IN ( SELECT name FROM ir_module_module WHERE state = \'installed\') '
query_models += " AND imd.module in ( SELECT name FROM ir_module_module WHERE state = 'installed') "
query_param = None
if 'all' not in modules:
query += ' WHERE module IN %s'
query_models += ' AND imd.module in %s'
query_param = (tuple(modules),)
query += ' ORDER BY module, model, name'
query_models += ' ORDER BY module, model'
cr.execute(query, query_param)
@ -526,12 +537,12 @@ def trans_generate(lang, modules, dbname=None):
xml_name = "%s.%s" % (module, encode(xml_name))
if not pool.get(model):
logger.notifyChannel("db", netsvc.LOG_ERROR, "Unable to find object %r" % (model,))
logger.error("Unable to find object %r", model)
continue
exists = pool.get(model).exists(cr, uid, res_id)
if not exists:
logger.notifyChannel("db", netsvc.LOG_WARNING, "Unable to find object %r with id %d" % (model, res_id))
logger.warning("Unable to find object %r with id %d", model, res_id)
continue
obj = pool.get(model).browse(cr, uid, res_id)
@ -558,7 +569,7 @@ def trans_generate(lang, modules, dbname=None):
# export fields
if not result.has_key('fields'):
logger.notifyChannel("db",netsvc.LOG_WARNING,"res has no fields: %r" % result)
logger.warning("res has no fields: %r", result)
continue
for field_name, field_def in result['fields'].iteritems():
res_name = name + ',' + field_name
@ -587,7 +598,7 @@ def trans_generate(lang, modules, dbname=None):
try:
field_name = encode(obj.name)
except AttributeError, exc:
logger.notifyChannel("db", netsvc.LOG_ERROR, "name error in %s: %s" % (xml_name,str(exc)))
logger.error("name error in %s: %s", xml_name, str(exc))
continue
objmodel = pool.get(obj.model)
if not objmodel or not field_name in objmodel._columns:
@ -635,22 +646,9 @@ def trans_generate(lang, modules, dbname=None):
for t in parse_func(d.iter()):
push_translation(module, report_type, name, 0, t)
except (IOError, etree.XMLSyntaxError):
logging.getLogger("i18n").exception("couldn't export translation for report %s %s %s", name, report_type, fname)
logger.exception("couldn't export translation for report %s %s %s", name, report_type, fname)
model_obj = pool.get(model)
def push_constraint_msg(module, term_type, model, msg):
# Check presence of __call__ directly instead of using
# callable() because it will be deprecated as of Python 3.0
if not hasattr(msg, '__call__'):
push_translation(module, term_type, model, 0, encode(msg))
for constraint in model_obj._constraints:
push_constraint_msg(module, 'constraint', model, constraint[1])
for constraint in model_obj._sql_constraints:
push_constraint_msg(module, 'sql_constraint', model, constraint[2])
for field_name,field_def in model_obj._columns.items():
for field_name,field_def in obj._table._columns.items():
if field_def.translate:
name = model + "," + field_name
try:
@ -659,6 +657,32 @@ def trans_generate(lang, modules, dbname=None):
trad = ''
push_translation(module, 'model', name, xml_name, encode(trad))
# End of data for ir.model.data query results
cr.execute(query_models, query_param)
def push_constraint_msg(module, term_type, model, msg):
# Check presence of __call__ directly instead of using
# callable() because it will be deprecated as of Python 3.0
if not hasattr(msg, '__call__'):
push_translation(module, term_type, model, 0, encode(msg))
for (model_id, model, module) in cr.fetchall():
module = encode(module)
model = encode(model)
model_obj = pool.get(model)
if not model_obj:
logging.getLogger("i18n").error("Unable to find object %r", model)
continue
for constraint in getattr(model_obj, '_constraints', []):
push_constraint_msg(module, 'constraint', model, constraint[1])
for constraint in getattr(model_obj, '_sql_constraints', []):
push_constraint_msg(module, 'sql_constraint', model, constraint[2])
# parse source code for _() calls
def get_module_from_path(path, mod_paths=None):
if not mod_paths:
@ -690,7 +714,7 @@ def trans_generate(lang, modules, dbname=None):
else :
path_list = [root_path,] + apaths
logger.notifyChannel("i18n", netsvc.LOG_DEBUG, "Scanning modules at paths: %s" % (' '.join(path_list),))
logger.debug("Scanning modules at paths: ", path_list)
mod_paths = []
join_dquotes = re.compile(r'([^\\])"[\s\\]*"', re.DOTALL)
@ -704,7 +728,7 @@ def trans_generate(lang, modules, dbname=None):
module = get_module_from_path(fabsolutepath, mod_paths=mod_paths)
is_mod_installed = module in installed_modules
if (('all' in modules) or (module in modules)) and is_mod_installed:
logger.notifyChannel("i18n", netsvc.LOG_DEBUG, "Scanning code of %s at module: %s" % (frelativepath, module))
logger.debug("Scanning code of %s at module: %s", frelativepath, module)
code_string = tools.file_open(fabsolutepath, subdir='').read()
if module in installed_modules:
frelativepath = str("addons" + frelativepath)
@ -732,7 +756,7 @@ def trans_generate(lang, modules, dbname=None):
push_translation(module, terms_type, frelativepath, 0, encode(src))
for path in path_list:
logger.notifyChannel("i18n", netsvc.LOG_DEBUG, "Scanning files of modules at %s" % path)
logger.debug("Scanning files of modules at %s", path)
for root, dummy, files in tools.osutil.walksymlinks(path):
for fname in itertools.chain(fnmatch.filter(files, '*.py')):
export_code_terms_from_file(fname, path, root, 'code')
@ -750,23 +774,26 @@ def trans_generate(lang, modules, dbname=None):
cr.close()
return out
def trans_load(db_name, filename, lang, strict=False, verbose=True, context={}):
logger = netsvc.Logger()
def trans_load(db_name, filename, lang, strict=False, verbose=True, context=None):
logger = logging.getLogger('i18n')
try:
fileobj = open(filename,'r')
logger.info("loading %s", filename)
fileformat = os.path.splitext(filename)[-1][1:].lower()
r = trans_load_data(db_name, fileobj, fileformat, lang, strict=strict, verbose=verbose, context=context)
fileobj.close()
return r
except IOError:
if verbose:
logger.notifyChannel("i18n", netsvc.LOG_ERROR, "couldn't read translation file %s" % (filename,))
logger.error("couldn't read translation file %s", filename)
return None
def trans_load_data(db_name, fileobj, fileformat, lang, strict=False, lang_name=None, verbose=True, context={}):
logger = netsvc.Logger()
def trans_load_data(db_name, fileobj, fileformat, lang, strict=False, lang_name=None, verbose=True, context=None):
logger = logging.getLogger('i18n')
if verbose:
logger.notifyChannel("i18n", netsvc.LOG_INFO, 'loading translation file for language %s' % (lang))
logger.info('loading translation file for language %s', lang)
if context is None:
context = {}
pool = pooler.get_pool(db_name)
lang_obj = pool.get('res.lang')
trans_obj = pool.get('ir.translation')
@ -790,7 +817,7 @@ def trans_load_data(db_name, fileobj, fileformat, lang, strict=False, lang_name=
if fail:
lc = locale.getdefaultlocale()[0]
msg = 'Unable to get information for locale %s. Information from the default locale (%s) have been used.'
logger.notifyChannel('i18n', netsvc.LOG_WARNING, msg % (lang, lc))
logger.warning(msg, lang, lc)
if not lang_name:
lang_name = tools.get_languages().get(lang, lang)
@ -829,6 +856,7 @@ def trans_load_data(db_name, fileobj, fileformat, lang, strict=False, lang_name=
reader = TinyPoFile(fileobj)
f = ['type', 'name', 'res_id', 'src', 'value']
else:
logger.error('Bad file format: %s', fileformat)
raise Exception(_('Bad file format'))
# read the rest of the file
@ -849,7 +877,7 @@ def trans_load_data(db_name, fileobj, fileformat, lang, strict=False, lang_name=
dic[f[i]] = row[i]
try:
dic['res_id'] = int(dic['res_id'])
dic['res_id'] = dic['res_id'] and int(dic['res_id']) or 0
except:
model_data_ids = model_data_obj.search(cr, uid, [
('model', '=', dic['name'].split(',')[0]),
@ -907,11 +935,10 @@ def trans_load_data(db_name, fileobj, fileformat, lang, strict=False, lang_name=
cr.commit()
cr.close()
if verbose:
logger.notifyChannel("i18n", netsvc.LOG_INFO,
"translation file loaded succesfully")
logger.info("translation file loaded succesfully")
except IOError:
filename = '[lang: %s][format: %s]' % (iso_lang or 'new', fileformat)
logger.notifyChannel("i18n", netsvc.LOG_ERROR, "couldn't read translation file %s" % (filename,))
logger.exception("couldn't read translation file %s", filename)
def get_locales(lang=None):
if lang is None:

View File

@ -174,9 +174,7 @@ class YamlInterpreter(object):
else:
module = self.module
checked_xml_id = xml_id
ir_id = self.pool.get('ir.model.data')._get_id(self.cr, self.uid, module, checked_xml_id)
obj = self.pool.get('ir.model.data').read(self.cr, self.uid, ir_id, ['res_id'])
id = int(obj['res_id'])
_, id = self.pool.get('ir.model.data').get_object_reference(self.cr, self.uid, module, checked_xml_id)
self.id_map[xml_id] = id
return id

View File

@ -103,9 +103,7 @@ def data_files():
files.append((root, [join(root, name) for name in names]))
#for root, _, names in os.walk('pixmaps'):
# files.append((root, [join(root, name) for name in names]))
files.append(('.', [join('bin', 'import_xml.rng'),
join('bin', 'server.pkey'),
join('bin', 'server.cert')]))
files.append(('.', [join('bin', 'import_xml.rng'),]))
else:
man_directory = join('share', 'man')
files.append((join(man_directory, 'man1'), ['man/openerp-server.1']))
@ -120,9 +118,7 @@ def data_files():
openerp_site_packages = join(get_python_lib(prefix=''), 'openerp-server')
files.append((openerp_site_packages, [join('bin', 'import_xml.rng'),
join('bin', 'server.pkey'),
join('bin', 'server.cert')]))
files.append((openerp_site_packages, [join('bin', 'import_xml.rng'),]))
if sys.version_info[0:2] == (2,5):
files.append((openerp_site_packages, [ join('python25-compat','BaseHTTPServer.py'),

View File

@ -3,19 +3,19 @@
# DN options
# The organization of the subject.
organization = "Acme inc."
organization = "Some organization."
# The organizational unit of the subject.
unit = "dept."
unit = "ERP dept."
# The locality of the subject.
# locality =
# The state of the certificate owner.
state = "Attiki"
state = "State"
# The country of the subject. Two letter code.
country = GR
country = BE
# The common name of the certificate owner.
cn = "Some company"