Merged with stable

bzr revid: hda@tinyerp.com-20100216063114-d2crv3zayarq2vk6
This commit is contained in:
HDA (OpenERP) 2010-02-16 12:01:14 +05:30
commit a0ba43d800
19 changed files with 231 additions and 238 deletions

View File

@ -44,6 +44,7 @@ CREATE TABLE ir_model_fields (
state varchar(64) default 'base', state varchar(64) default 'base',
view_load boolean, view_load boolean,
relate boolean default False, relate boolean default False,
relation_field varchar(128),
primary key(id) primary key(id)
); );

View File

@ -427,7 +427,6 @@ class actions_server(osv.osv):
'sequence': lambda *a: 5, 'sequence': lambda *a: 5,
'code': lambda *a: """# You can use the following variables 'code': lambda *a: """# You can use the following variables
# - object # - object
# - object2
# - time # - time
# - cr # - cr
# - uid # - uid
@ -488,7 +487,7 @@ class actions_server(osv.osv):
result = eval(exp, {'object':obj, 'context': context,'time':time}) result = eval(exp, {'object':obj, 'context': context,'time':time})
if result in (None, False): if result in (None, False):
return str("--------") return str("--------")
return str(result) return tools.ustr(result)
com = re.compile('(\[\[.+?\]\])') com = re.compile('(\[\[.+?\]\])')
message = com.sub(merge, keystr) message = com.sub(merge, keystr)
@ -534,7 +533,7 @@ class actions_server(osv.osv):
'ids': ids, 'ids': ids,
'cr': cr, 'cr': cr,
'uid': uid, 'uid': uid,
'obj':obj 'object':obj
} }
exec action.code in localdict exec action.code in localdict
if 'action' in localdict: if 'action' in localdict:
@ -553,10 +552,10 @@ class actions_server(osv.osv):
continue continue
if not user: if not user:
raise osv.except_osv(_('Error'), _("Please specify server option --smtp-from !")) raise osv.except_osv(_('Error'), _("Please specify server option --smtp-from !"))
subject = self.merge_message(cr, uid, str(action.subject), action, context) subject = self.merge_message(cr, uid, action.subject, action, context)
body = self.merge_message(cr, uid, str(action.message), action, context) body = self.merge_message(cr, uid, action.message, action, context)
if tools.email_send(user, [address], subject, body, debug=False, subtype='html') == True: if tools.email_send(user, [address], subject, body, debug=False, subtype='html') == True:
logger.notifyChannel('email', netsvc.LOG_INFO, 'Email successfully send to : %s' % (address)) logger.notifyChannel('email', netsvc.LOG_INFO, 'Email successfully send to : %s' % (address))
else: else:
@ -573,14 +572,8 @@ class actions_server(osv.osv):
if action.state == 'sms': if action.state == 'sms':
#TODO: set the user and password from the system #TODO: set the user and password from the system
# for the sms gateway user / password # for the sms gateway user / password
api_id = '' # USE smsclient module from extra-addons
text = action.sms logger.notifyChannel('sms', netsvc.LOG_ERROR, 'SMS Facility has not been implemented yet. Use smsclient module!')
to = self.get_mobile(cr, uid, action, context)
#TODO: Apply message mearge with the field
if tools.sms_send(user, password, api_id, text, to) == True:
logger.notifyChannel('sms', netsvc.LOG_INFO, 'SMS successfully send to : %s' % (action.address))
else:
logger.notifyChannel('sms', netsvc.LOG_ERROR, 'Failed to send SMS to : %s' % (action.address))
if action.state == 'other': if action.state == 'other':
res = [] res = []

View File

@ -79,7 +79,7 @@ class ir_model(osv.osv):
if context: if context:
context.pop('__last_update', None) context.pop('__last_update', None)
return super(ir_model,self).write(cr, user, ids, vals, context) return super(ir_model,self).write(cr, user, ids, vals, context)
def create(self, cr, user, vals, context=None): def create(self, cr, user, vals, context=None):
if context and context.get('manual',False): if context and context.get('manual',False):
vals['state']='manual' vals['state']='manual'
@ -87,7 +87,9 @@ class ir_model(osv.osv):
if vals.get('state','base')=='manual': if vals.get('state','base')=='manual':
self.instanciate(cr, user, vals['model'], context) self.instanciate(cr, user, vals['model'], context)
self.pool.get(vals['model']).__init__(self.pool, cr) self.pool.get(vals['model']).__init__(self.pool, cr)
self.pool.get(vals['model'])._auto_init(cr,{'field_name':vals['name'],'field_state':'manual','select':vals.get('select_level','0')}) ctx = context.copy()
ctx.update({'field_name':vals['name'],'field_state':'manual','select':vals.get('select_level','0')})
self.pool.get(vals['model'])._auto_init(cr, ctx)
#pooler.restart_pool(cr.dbname) #pooler.restart_pool(cr.dbname)
return res return res
@ -241,10 +243,12 @@ class ir_model_fields(osv.osv):
def unlink(self, cr, user, ids, context=None): def unlink(self, cr, user, ids, context=None):
for field in self.browse(cr, user, ids, context): for field in self.browse(cr, user, ids, context):
if field.state <> 'manual': if field.state <> 'manual':
raise except_orm(_('Error'), _("You can not remove the field '%s' !") %(field.name,)) raise except_orm(_('Error'), _("You cannot remove the field '%s' !") %(field.name,))
# #
# MAY BE ADD A ALTER TABLE DROP ? # MAY BE ADD A ALTER TABLE DROP ?
# #
#Removing _columns entry for that table
self.pool.get(field.model)._columns.pop(field.name,None)
return super(ir_model_fields, self).unlink(cr, user, ids, context) return super(ir_model_fields, self).unlink(cr, user, ids, context)
def create(self, cr, user, vals, context=None): def create(self, cr, user, vals, context=None):
@ -264,9 +268,12 @@ class ir_model_fields(osv.osv):
if self.pool.get(vals['model']): if self.pool.get(vals['model']):
self.pool.get(vals['model']).__init__(self.pool, cr) self.pool.get(vals['model']).__init__(self.pool, cr)
#Added context to _auto_init for special treatment to custom field for select_level #Added context to _auto_init for special treatment to custom field for select_level
self.pool.get(vals['model'])._auto_init(cr, {'field_name':vals['name'],'field_state':'manual','select':vals.get('select_level','0')}) ctx = context.copy()
ctx.update({'field_name':vals['name'],'field_state':'manual','select':vals.get('select_level','0'),'update_custom_fields':True})
self.pool.get(vals['model'])._auto_init(cr, ctx)
return res return res
ir_model_fields() ir_model_fields()
class ir_model_access(osv.osv): class ir_model_access(osv.osv):

View File

@ -42,24 +42,6 @@ class ir_rule_group(osv.osv):
'global': lambda *a: True, 'global': lambda *a: True,
} }
def unlink(self, cr, uid, ids, context=None):
res = super(ir_rule_group, self).unlink(cr, uid, ids, context=context)
# Restart the cache on the domain_get method of ir.rule
self.pool.get('ir.rule').domain_get.clear_cache(cr.dbname)
return res
def create(self, cr, user, vals, context=None):
res = super(ir_rule_group, self).create(cr, user, vals, context=context)
# Restart the cache on the domain_get method of ir.rule
self.pool.get('ir.rule').domain_get.clear_cache(cr.dbname)
return res
def write(self, cr, uid, ids, vals, context=None):
res = super(ir_rule_group, self).write(cr, uid, ids, vals, context=context)
# Restart the cache on the domain_get method of ir.rule
self.pool.get('ir.rule').domain_get.clear_cache(cr.dbname)
return res
ir_rule_group() ir_rule_group()

View File

@ -20,7 +20,6 @@
############################################################################## ##############################################################################
from osv import fields, osv from osv import fields, osv
from osv.orm import browse_null, browse_record
import re import re
import tools import tools
@ -169,8 +168,8 @@ class ir_ui_menu(osv.osv):
('res_id', '=', id), ('res_id', '=', id),
]) ])
for iv in ir_values_obj.browse(cr, uid, ids): for iv in ir_values_obj.browse(cr, uid, ids):
new_id = ir_values_obj.copy(cr, uid, iv.id, ir_values_obj.copy(cr, uid, iv.id, default={'res_id': res},
default={'res_id': res}, context=context) context=context)
return res return res
def _action(self, cursor, user, ids, name, arg, context=None): def _action(self, cursor, user, ids, name, arg, context=None):
@ -243,6 +242,7 @@ class ir_ui_menu(osv.osv):
('ir.actions.act_window', 'ir.actions.act_window'), ('ir.actions.act_window', 'ir.actions.act_window'),
('ir.actions.wizard', 'ir.actions.wizard'), ('ir.actions.wizard', 'ir.actions.wizard'),
('ir.actions.url', 'ir.actions.url'), ('ir.actions.url', 'ir.actions.url'),
('ir.actions.server', 'ir.actions.server'),
]), ]),
} }
_defaults = { _defaults = {

View File

@ -47,7 +47,7 @@ class Country(osv.osv):
context={} context={}
ids = False ids = False
if len(name) == 2: if len(name) == 2:
ids = self.search(cr, user, [('code', '=', name)] + args, ids = self.search(cr, user, [('code', 'ilike', name)] + args,
limit=limit, context=context) limit=limit, context=context)
if not ids: if not ids:
ids = self.search(cr, user, [('name', operator, name)] + args, ids = self.search(cr, user, [('name', operator, name)] + args,

View File

@ -161,7 +161,6 @@ class res_company(osv.osv):
def write(self, cr, *args, **argv): def write(self, cr, *args, **argv):
self.cache_restart(cr) self.cache_restart(cr)
# Restart the cache on the company_get method # Restart the cache on the company_get method
self.pool.get('ir.rule').domain_get.clear_cache(cr.dbname)
return super(res_company, self).write(cr, *args, **argv) return super(res_company, self).write(cr, *args, **argv)
def _get_euro(self, cr, uid, context={}): def _get_euro(self, cr, uid, context={}):

View File

@ -42,11 +42,12 @@ class groups(osv.osv):
_sql_constraints = [ _sql_constraints = [
('name_uniq', 'unique (name)', 'The name of the group must be unique !') ('name_uniq', 'unique (name)', 'The name of the group must be unique !')
] ]
def copy(self, cr, uid, id, default=None, context={}): def copy(self, cr, uid, id, default=None, context={}):
group_name = self.read(cr, uid, [id], ['name'])[0]['name'] group_name = self.read(cr, uid, [id], ['name'])[0]['name']
default.update({'name': group_name +' (copy)'}) default.update({'name': group_name +' (copy)'})
return super(groups, self).copy(cr, uid, id, default, context) return super(groups, self).copy(cr, uid, id, default, context)
def write(self, cr, uid, ids, vals, context=None): def write(self, cr, uid, ids, vals, context=None):
if 'name' in vals: if 'name' in vals:
if vals['name'].startswith('-'): if vals['name'].startswith('-'):
@ -54,7 +55,6 @@ class groups(osv.osv):
_('The name of the group can not start with "-"')) _('The name of the group can not start with "-"'))
res = super(groups, self).write(cr, uid, ids, vals, context=context) res = super(groups, self).write(cr, uid, ids, vals, context=context)
# Restart the cache on the company_get method # Restart the cache on the company_get method
self.pool.get('ir.rule').domain_get.clear_cache(cr.dbname)
self.pool.get('ir.model.access').call_cache_clearing_methods(cr) self.pool.get('ir.model.access').call_cache_clearing_methods(cr)
return res return res
@ -265,7 +265,6 @@ class users(osv.osv):
res = super(users, self).write(cr, uid, ids, values, *args, **argv) res = super(users, self).write(cr, uid, ids, values, *args, **argv)
self.company_get.clear_cache(cr.dbname) self.company_get.clear_cache(cr.dbname)
# Restart the cache on the company_get method # Restart the cache on the company_get method
self.pool.get('ir.rule').domain_get.clear_cache(cr.dbname)
self.pool.get('ir.model.access').call_cache_clearing_methods(cr) self.pool.get('ir.model.access').call_cache_clearing_methods(cr)
return res return res

View File

@ -148,7 +148,7 @@ class expression(object):
self.__exp[i] = ('id', 'in', right1) self.__exp[i] = ('id', 'in', right1)
continue continue
if field._properties and not field.store: if field._properties and ((not field.store) or field._fnct_search):
# this is a function field # this is a function field
if not field._fnct_search: if not field._fnct_search:
# the function field doesn't provide a search function and doesn't store # the function field doesn't provide a search function and doesn't store
@ -164,7 +164,6 @@ class expression(object):
self.__exp.insert(i + 2 + j, se) self.__exp.insert(i + 2 + j, se)
# else, the value of the field is store in the database, so we search on it # else, the value of the field is store in the database, so we search on it
elif field._type == 'one2many': elif field._type == 'one2many':
# Applying recursivity on field(one2many) # Applying recursivity on field(one2many)
if operator == 'child_of': if operator == 'child_of':
@ -184,15 +183,21 @@ class expression(object):
if right: if right:
if isinstance(right, basestring): if isinstance(right, basestring):
ids2 = [x[0] for x in field_obj.name_search(cr, uid, right, [], operator, context=context, limit=None)] ids2 = [x[0] for x in field_obj.name_search(cr, uid, right, [], operator, context=context, limit=None)]
operator = 'in' if ids2:
operator = 'in'
else: else:
if not isinstance(right,list): if not isinstance(right,list):
ids2 = [right] ids2 = [right]
else: else:
ids2 = right ids2 = right
if not ids2: if not ids2:
call_null = True if operator in ['like','ilike','in','=']:
operator = 'in' # operator changed because ids are directly related to main object #no result found with given search criteria
call_null = False
self.__exp[i] = ('id','=',0)
else:
call_null = True
operator = 'in' # operator changed because ids are directly related to main object
else: else:
call_null = False call_null = False
o2m_op = 'in' o2m_op = 'in'
@ -227,15 +232,21 @@ class expression(object):
if right: if right:
if isinstance(right, basestring): if isinstance(right, basestring):
res_ids = [x[0] for x in field_obj.name_search(cr, uid, right, [], operator, context=context)] res_ids = [x[0] for x in field_obj.name_search(cr, uid, right, [], operator, context=context)]
operator = 'in' if res_ids:
opeartor = 'in'
else: else:
if not isinstance(right, list): if not isinstance(right, list):
res_ids = [right] res_ids = [right]
else: else:
res_ids = right res_ids = right
if not res_ids: if not res_ids:
call_null_m2m = True if operator in ['like','ilike','in','=']:
operator = 'in' # operator changed because ids are directly related to main object #no result found with given search criteria
call_null_m2m = False
self.__exp[i] = ('id','=',0)
else:
call_null_m2m = True
operator = 'in' # operator changed because ids are directly related to main object
else: else:
call_null_m2m = False call_null_m2m = False
m2m_op = 'in' m2m_op = 'in'
@ -248,7 +259,7 @@ class expression(object):
if operator in ['not like','not ilike','not in','<>','!=']: if operator in ['not like','not ilike','not in','<>','!=']:
m2m_op = 'in' m2m_op = 'in'
self.__exp[i] = ('id', m2m_op, self.__execute_recursive_in(cr, field._id1, field._rel, field._id2, [], operator, field._type) or [0]) self.__exp[i] = ('id', m2m_op, self.__execute_recursive_in(cr, field._id1, field._rel, field._id2, [], operator, field._type) or [0])
elif field._type == 'many2one': elif field._type == 'many2one':
if operator == 'child_of': if operator == 'child_of':
if isinstance(right, basestring): if isinstance(right, basestring):

View File

@ -307,13 +307,15 @@ class many2one(_column):
# build a dictionary of the form {'id_of_distant_resource': name_of_distant_resource} # build a dictionary of the form {'id_of_distant_resource': name_of_distant_resource}
from orm import except_orm from orm import except_orm
try: names = {}
names = dict(obj.name_get(cr, user, filter(None, res.values()), context)) for record in list(set(filter(None, res.values()))):
except except_orm: try:
names = {} record_name = dict(obj.name_get(cr, user, [record], context))
iids = filter(None, res.values()) except except_orm:
for iiid in iids: record_name = {}
names[iiid] = '// Access Denied //' record_name[record] = '// Access Denied //'
names.update(record_name)
for r in res.keys(): for r in res.keys():
if res[r] and res[r] in names: if res[r] and res[r] in names:
res[r] = (res[r], names[res[r]]) res[r] = (res[r], names[res[r]])
@ -731,15 +733,18 @@ class related(function):
field_detail = self._relations[i] field_detail = self._relations[i]
relation = field_detail['object'] relation = field_detail['object']
if not t_data[self.arg[i]]: if not t_data[self.arg[i]]:
if self._type not in ('one2many', 'many2many'):
t_id = t_data['id']
t_data = False t_data = False
break break
if field_detail['type'] in ('one2many', 'many2many'): if field_detail['type'] in ('one2many', 'many2many'):
if self._type != "many2one": if self._type != "many2one":
t_id=t_data.id t_id = t_data.id
t_data = t_data[self.arg[i]][0] t_data = t_data[self.arg[i]][0]
else: else:
t_id=t_data['id'] t_id = t_data['id']
t_data = t_data[self.arg[i]] t_data = t_data[self.arg[i]]
if t_id: if t_id:
obj.pool.get(field_detail['object']).write(cr,uid,[t_id],{args[-1]:values}, context=context) obj.pool.get(field_detail['object']).write(cr,uid,[t_id],{args[-1]:values}, context=context)

View File

@ -49,15 +49,24 @@ import sys
import time import time
import traceback import traceback
import types import types
from lxml import etree
import fields import fields
import netsvc import netsvc
import tools import tools
from tools.config import config
from tools.translate import _ from tools.translate import _
regex_order = re.compile('^([a-z0-9_]+( *desc| *asc)?( *, *|))+$', re.I) import copy
import sys
try:
from lxml import etree
except ImportError:
sys.stderr.write("ERROR: Import lxml module\n")
sys.stderr.write("ERROR: Try to install the python-lxml package\n")
from tools.config import config
regex_order = re.compile('^(([a-z0-9_]+|"[a-z0-9_]+")( *desc| *asc)?( *, *|))+$', re.I)
def last_day_of_current_month(): def last_day_of_current_month():
today = datetime.date.today() today = datetime.date.today()
@ -209,6 +218,7 @@ class browse_record(object):
for data in datas: for data in datas:
if len(str(data['id']).split('-')) > 1: if len(str(data['id']).split('-')) > 1:
data['id'] = int(str(data['id']).split('-')[0]) data['id'] = int(str(data['id']).split('-')[0])
new_data = {}
for n, f in ffields: for n, f in ffields:
if f._type in ('many2one', 'one2one'): if f._type in ('many2one', 'one2one'):
if data[n]: if data[n]:
@ -219,14 +229,25 @@ class browse_record(object):
else: else:
ids2 = data[n] ids2 = data[n]
if ids2: if ids2:
data[n] = browse_record(self._cr, self._uid, ids2, obj, self._cache, context=self._context, list_class=self._list_class, fields_process=self._fields_process) # FIXME: this happen when a _inherits object
# overwrite a field of it parent. Need
# testing to be sure we got the right
# object and not the parent one.
if not isinstance(ids2, browse_record):
new_data[n] = browse_record(self._cr,
self._uid, ids2, obj, self._cache,
context=self._context,
list_class=self._list_class,
fields_process=self._fields_process)
else: else:
data[n] = browse_null() new_data[n] = browse_null()
else: else:
data[n] = browse_null() new_data[n] = browse_null()
elif f._type in ('one2many', 'many2many') and len(data[n]): elif f._type in ('one2many', 'many2many') and len(data[n]):
data[n] = self._list_class([browse_record(self._cr, self._uid, id, self._table.pool.get(f._obj), self._cache, context=self._context, list_class=self._list_class, fields_process=self._fields_process) for id in data[n]], self._context) new_data[n] = self._list_class([browse_record(self._cr, self._uid, id, self._table.pool.get(f._obj), self._cache, context=self._context, list_class=self._list_class, fields_process=self._fields_process) for id in data[n]], self._context)
self._data[data['id']].update(data) else:
new_data[n] = data[n]
self._data[data['id']].update(new_data)
if not name in self._data[self._id]: if not name in self._data[self._id]:
#how did this happen? #how did this happen?
self.logger.notifyChannel("browse_record", netsvc.LOG_ERROR, self.logger.notifyChannel("browse_record", netsvc.LOG_ERROR,
@ -386,6 +407,7 @@ class orm_template(object):
'readonly':(f.readonly and 1) or 0, 'readonly':(f.readonly and 1) or 0,
'required':(f.required and 1) or 0, 'required':(f.required and 1) or 0,
'selectable' : (f.selectable and 1) or 0, 'selectable' : (f.selectable and 1) or 0,
'relation_field': (f._type=='one2many' and isinstance(f,fields.one2many)) and f._fields_id or '',
} }
# When its a custom field,it does not contain f.select # When its a custom field,it does not contain f.select
if context.get('field_state','base') == 'manual': if context.get('field_state','base') == 'manual':
@ -401,13 +423,13 @@ class orm_template(object):
vals['id'] = id vals['id'] = id
cr.execute("""INSERT INTO ir_model_fields ( cr.execute("""INSERT INTO ir_model_fields (
id, model_id, model, name, field_description, ttype, id, model_id, model, name, field_description, ttype,
relation,view_load,state,select_level relation,view_load,state,select_level,relation_field
) VALUES ( ) VALUES (
%s,%s,%s,%s,%s,%s,%s,%s,%s,%s %s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s
)""", ( )""", (
id, vals['model_id'], vals['model'], vals['name'], vals['field_description'], vals['ttype'], id, vals['model_id'], vals['model'], vals['name'], vals['field_description'], vals['ttype'],
vals['relation'], bool(vals['view_load']), 'base', vals['relation'], bool(vals['view_load']), 'base',
vals['select_level'] vals['select_level'],vals['relation_field']
)) ))
if 'module' in context: if 'module' in context:
name1 = 'field_' + self._table + '_' + k name1 = 'field_' + self._table + '_' + k
@ -424,12 +446,12 @@ class orm_template(object):
cr.commit() cr.commit()
cr.execute("""UPDATE ir_model_fields SET cr.execute("""UPDATE ir_model_fields SET
model_id=%s, field_description=%s, ttype=%s, relation=%s, model_id=%s, field_description=%s, ttype=%s, relation=%s,
view_load=%s, select_level=%s, readonly=%s ,required=%s, selectable=%s view_load=%s, select_level=%s, readonly=%s ,required=%s, selectable=%s, relation_field=%s
WHERE WHERE
model=%s AND name=%s""", ( model=%s AND name=%s""", (
vals['model_id'], vals['field_description'], vals['ttype'], vals['model_id'], vals['field_description'], vals['ttype'],
vals['relation'], bool(vals['view_load']), vals['relation'], bool(vals['view_load']),
vals['select_level'], bool(vals['readonly']),bool(vals['required']),bool(vals['selectable']),vals['model'], vals['name'] vals['select_level'], bool(vals['readonly']),bool(vals['required']),bool(vals['selectable']),vals['relation_field'],vals['model'], vals['name']
)) ))
continue continue
cr.commit() cr.commit()
@ -883,6 +905,8 @@ class orm_template(object):
if isinstance(e, osv.orm.except_orm ): if isinstance(e, osv.orm.except_orm ):
msg = _('Insertion Failed! ' + e[1]) msg = _('Insertion Failed! ' + e[1])
return (-1, res, 'Line ' + str(counter) +' : ' + msg, '' ) return (-1, res, 'Line ' + str(counter) +' : ' + msg, '' )
#Raising Uncaught exception
raise
for lang in translate: for lang in translate:
context2 = context.copy() context2 = context.copy()
context2['lang'] = lang context2['lang'] = lang
@ -1997,14 +2021,24 @@ class orm(orm_template):
# iterate on the "object columns" # iterate on the "object columns"
todo_update_store = [] todo_update_store = []
update_custom_fields = context.get('update_custom_fields', False)
for k in self._columns: for k in self._columns:
if k in ('id', 'write_uid', 'write_date', 'create_uid', 'create_date'): if k in ('id', 'write_uid', 'write_date', 'create_uid', 'create_date'):
continue continue
#raise _('Can not define a column %s. Reserved keyword !') % (k,) #raise _('Can not define a column %s. Reserved keyword !') % (k,)
#Not Updating Custom fields
if k.startswith('x_') and not update_custom_fields:
continue
f = self._columns[k] f = self._columns[k]
if isinstance(f, fields.one2many): if isinstance(f, fields.one2many):
cr.execute("SELECT relname FROM pg_class WHERE relkind='r' AND relname=%s", (f._obj,)) cr.execute("SELECT relname FROM pg_class WHERE relkind='r' AND relname=%s", (f._obj,))
if self.pool.get(f._obj):
if f._fields_id not in self.pool.get(f._obj)._columns.keys():
if not self.pool.get(f._obj)._inherits or (f._fields_id not in self.pool.get(f._obj)._inherit_fields.keys()):
raise except_orm('Programming Error', ("There is no reference field '%s' found for '%s'") % (f._fields_id,f._obj,))
if cr.fetchone(): if cr.fetchone():
cr.execute("SELECT count(1) as c FROM pg_class c,pg_attribute a WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid", (f._obj, f._fields_id)) cr.execute("SELECT count(1) as c FROM pg_class c,pg_attribute a WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid", (f._obj, f._fields_id))
res = cr.fetchone()[0] res = cr.fetchone()[0]
@ -2098,7 +2132,7 @@ class orm(orm_template):
if isinstance(f, fields.function) and not f.store and\ if isinstance(f, fields.function) and not f.store and\
not getattr(f, 'nodrop', False): not getattr(f, 'nodrop', False):
logger.notifyChannel('orm', netsvc.LOG_INFO, 'column %s (%s) in table %s removed: converted to a function !\n' % (k, f.string, self._table)) logger.notifyChannel('orm', netsvc.LOG_INFO, 'column %s (%s) in table %s removed: converted to a function !\n' % (k, f.string, self._table))
cr.execute('ALTER TABLE "%s" DROP COLUMN "%s"'% (self._table, k)) cr.execute('ALTER TABLE "%s" DROP COLUMN "%s" CASCADE'% (self._table, k))
cr.commit() cr.commit()
f_obj_type = None f_obj_type = None
else: else:
@ -2367,7 +2401,7 @@ class orm(orm_template):
fld_def = (field in self._columns) and self._columns[field] or self._inherit_fields[field][2] fld_def = (field in self._columns) and self._columns[field] or self._inherit_fields[field][2]
if fld_def._type in ('many2one', 'one2one'): if fld_def._type in ('many2one', 'one2one'):
obj = self.pool.get(fld_def._obj) obj = self.pool.get(fld_def._obj)
if not obj.search(cr, uid, [('id', '=', field_value)]): if not obj.search(cr, uid, [('id', '=', field_value or False)]):
continue continue
if fld_def._type in ('many2many'): if fld_def._type in ('many2many'):
obj = self.pool.get(fld_def._obj) obj = self.pool.get(fld_def._obj)
@ -2394,7 +2428,7 @@ class orm(orm_template):
field_value = field_value2 field_value = field_value2
value[field] = field_value value[field] = field_value
for key in context or {}: for key in context or {}:
if key.startswith('default_'): if key.startswith('default_') and (key[8:] in fields_list):
value[key[8:]] = context[key] value[key[8:]] = context[key]
return value return value
@ -2529,6 +2563,8 @@ class orm(orm_template):
del r['id'] del r['id']
for record in res: for record in res:
if not record[col]:# if the record is deleted from _inherits table?
continue
record.update(res3[record[col]]) record.update(res3[record[col]])
if col not in fields_to_read: if col not in fields_to_read:
del record[col] del record[col]

View File

@ -28,18 +28,16 @@ import netsvc
import pooler import pooler
import copy import copy
import sys import sys
import traceback
from psycopg2 import IntegrityError from psycopg2 import IntegrityError
from netsvc import Logger, LOG_ERROR from tools.func import wraps
from tools.misc import UpdateableDict
from tools.translate import _
module_list = [] module_list = []
module_class_list = {} module_class_list = {}
class_pool = {} class_pool = {}
class except_osv(Exception): class except_osv(Exception):
def __init__(self, name, value, exc_type='warning'): def __init__(self, name, value, exc_type='warning'):
self.name = name self.name = name
@ -48,7 +46,6 @@ class except_osv(Exception):
self.args = (exc_type, name) self.args = (exc_type, name)
from tools.func import wraps
class osv_pool(netsvc.Service): class osv_pool(netsvc.Service):
def check(f): def check(f):
@ -59,19 +56,20 @@ class osv_pool(netsvc.Service):
raise except_osv('Database not ready', 'Currently, this database is not fully loaded and can not be used.') raise except_osv('Database not ready', 'Currently, this database is not fully loaded and can not be used.')
return f(self, dbname, *args, **kwargs) return f(self, dbname, *args, **kwargs)
except orm.except_orm, inst: except orm.except_orm, inst:
if inst.name == 'AccessError':
tb_s = "AccessError\n" + "".join(traceback.format_exception(*sys.exc_info()))
self.logger.notifyChannel('web-services', netsvc.LOG_DEBUG, tb_s)
self.abortResponse(1, inst.name, 'warning', inst.value) self.abortResponse(1, inst.name, 'warning', inst.value)
except except_osv, inst: except except_osv, inst:
self.abortResponse(1, inst.name, inst.exc_type, inst.value) self.abortResponse(1, inst.name, inst.exc_type, inst.value)
except IntegrityError, inst: except IntegrityError, inst:
for key in self._sql_error.keys(): for key in self._sql_error.keys():
if key in inst[0]: if key in inst[0]:
self.abortResponse(1, _('Constraint Error'), 'warning', _(self._sql_error[key])) self.abortResponse(1, 'Constraint Error', 'warning', self._sql_error[key])
self.abortResponse(1, 'Integrity Error', 'warning', inst[0]) self.abortResponse(1, 'Integrity Error', 'warning', inst[0])
except Exception, e: except Exception, e:
import traceback, sys
tb_s = "".join(traceback.format_exception(*sys.exc_info())) tb_s = "".join(traceback.format_exception(*sys.exc_info()))
logger = Logger() self.logger.notifyChannel('web-services', netsvc.LOG_ERROR, tb_s)
logger.notifyChannel('web-services', LOG_ERROR, tb_s)
raise raise
return wrapper return wrapper
@ -86,6 +84,7 @@ class osv_pool(netsvc.Service):
self._store_function = {} self._store_function = {}
self._init = True self._init = True
self._init_parent = {} self._init_parent = {}
self.logger = netsvc.Logger()
netsvc.Service.__init__(self, 'object_proxy', audience='') netsvc.Service.__init__(self, 'object_proxy', audience='')
self.exportMethod(self.obj_list) self.exportMethod(self.obj_list)
self.exportMethod(self.exec_workflow) self.exportMethod(self.exec_workflow)

View File

@ -106,11 +106,13 @@ def _process_text(self, txt):
sps = _regex.split(txt) sps = _regex.split(txt)
while sps: while sps:
# This is a simple text to translate # This is a simple text to translate
result += self.localcontext.get('translate', lambda x:x)(sps.pop(0)) result += tools.ustr(self.localcontext.get('translate', lambda x:x)(sps.pop(0)))
if sps: if sps:
try: try:
expr = sps.pop(0) expr = sps.pop(0)
txt = eval(expr,self.localcontext) txt = eval(expr,self.localcontext)
if txt and (isinstance(txt, unicode) or isinstance(txt, str)):
txt = tools.ustr(self.localcontext.get('translate', lambda x:x)(txt))
except Exception,e: except Exception,e:
tb_s = reduce(lambda x, y: x+y, traceback.format_exception(sys.exc_type, sys.exc_value, sys.exc_traceback)) tb_s = reduce(lambda x, y: x+y, traceback.format_exception(sys.exc_type, sys.exc_value, sys.exc_traceback))
netsvc.Logger().notifyChannel('report', netsvc.LOG_ERROR,'report :\n%s\n%s\nexpr: %s' % (tb_s, str(e),expr.encode('utf-8'))) netsvc.Logger().notifyChannel('report', netsvc.LOG_ERROR,'report :\n%s\n%s\nexpr: %s' % (tb_s, str(e),expr.encode('utf-8')))
@ -118,7 +120,7 @@ def _process_text(self, txt):
txt2 = str2xml(txt) txt2 = str2xml(txt)
result += tools.ustr(txt2) result += tools.ustr(txt2)
elif (txt is not None) and (txt is not False): elif (txt is not None) and (txt is not False):
result += str(txt) result += tools.ustr(txt)
return result return result
def text_get(node): def text_get(node):

View File

@ -23,7 +23,6 @@ import base64
import logging import logging
import os import os
import security import security
import string
import thread import thread
import threading import threading
import time import time
@ -38,6 +37,8 @@ import release
import sql_db import sql_db
import tools import tools
import locale import locale
from cStringIO import StringIO
logging.basicConfig() logging.basicConfig()
class db(netsvc.ExportService): class db(netsvc.ExportService):
@ -68,6 +69,14 @@ class db(netsvc.ExportService):
def new_dispatch(self,method,auth,params): def new_dispatch(self,method,auth,params):
pass pass
def _create_empty_database(self, name):
db = sql_db.db_connect('template1')
cr = db.cursor()
try:
cr.autocommit(True) # avoid transaction block
cr.execute("""CREATE DATABASE "%s" ENCODING 'unicode' TEMPLATE "template0" """ % name)
finally:
cr.close()
def exp_create(self, db_name, demo, lang, user_password='admin'): def exp_create(self, db_name, demo, lang, user_password='admin'):
self.id_protect.acquire() self.id_protect.acquire()
@ -77,24 +86,13 @@ class db(netsvc.ExportService):
self.actions[id] = {'clean': False} self.actions[id] = {'clean': False}
db = sql_db.db_connect('template1') self._create_empty_database(db_name)
db.lock()
try:
cr = db.cursor()
try:
cr.autocommit(True) # avoid transaction block
cr.execute('CREATE DATABASE "%s" ENCODING \'unicode\'' % db_name)
finally:
cr.close()
finally:
db.release()
class DBInitialize(object): class DBInitialize(object):
def __call__(self, serv, id, db_name, demo, lang, user_password='admin'): def __call__(self, serv, id, db_name, demo, lang, user_password='admin'):
cr = None cr = None
try: try:
serv.actions[id]['progress'] = 0 serv.actions[id]['progress'] = 0
clean = False
cr = sql_db.db_connect(db_name).cursor() cr = sql_db.db_connect(db_name).cursor()
tools.init_db(cr) tools.init_db(cr)
cr.commit() cr.commit()
@ -122,7 +120,6 @@ class db(netsvc.ExportService):
except Exception, e: except Exception, e:
serv.actions[id]['clean'] = False serv.actions[id]['clean'] = False
serv.actions[id]['exception'] = e serv.actions[id]['exception'] = e
from cStringIO import StringIO
import traceback import traceback
e_str = StringIO() e_str = StringIO()
traceback.print_exc(file=e_str) traceback.print_exc(file=e_str)
@ -149,11 +146,11 @@ class db(netsvc.ExportService):
clean = self.actions[id]['clean'] clean = self.actions[id]['clean']
if clean: if clean:
users = self.actions[id]['users'] users = self.actions[id]['users']
del self.actions[id] self.actions.pop(id)
return (1.0, users) return (1.0, users)
else: else:
e = self.actions[id]['exception'] e = self.actions[id]['exception']
del self.actions[id] self.actions.pop(id)
raise Exception, e raise Exception, e
def exp_drop(self, db_name): def exp_drop(self, db_name):
@ -161,24 +158,20 @@ class db(netsvc.ExportService):
logger = netsvc.Logger() logger = netsvc.Logger()
db = sql_db.db_connect('template1') db = sql_db.db_connect('template1')
db.lock() cr = db.cursor()
cr.autocommit(True) # avoid transaction block
try: try:
cr = db.cursor()
cr.autocommit(True) # avoid transaction block
try: try:
try: cr.execute('DROP DATABASE "%s"' % db_name)
cr.execute('DROP DATABASE "%s"' % db_name) except Exception, e:
except Exception, e: logger.notifyChannel("web-services", netsvc.LOG_ERROR,
logger.notifyChannel("web-services", netsvc.LOG_ERROR, 'DROP DB: %s failed:\n%s' % (db_name, e))
'DROP DB: %s failed:\n%s' % (db_name, e)) raise Exception("Couldn't drop database %s: %s" % (db_name, e))
raise Exception("Couldn't drop database %s: %s" % (db_name, e)) else:
else: logger.notifyChannel("web-services", netsvc.LOG_INFO,
logger.notifyChannel("web-services", netsvc.LOG_INFO, 'DROP DB: %s' % (db_name))
'DROP DB: %s' % (db_name))
finally:
cr.close()
finally: finally:
db.release() cr.close()
return True return True
def _set_pg_psw_env_var(self): def _set_pg_psw_env_var(self):
@ -229,17 +222,7 @@ class db(netsvc.ExportService):
'RESTORE DB: %s already exists' % (db_name,)) 'RESTORE DB: %s already exists' % (db_name,))
raise Exception, "Database already exists" raise Exception, "Database already exists"
db = sql_db.db_connect('template1') self._create_empty_database(db_name)
db.lock()
try:
cr = db.cursor()
cr.autocommit(True) # avoid transaction block
try:
cr.execute("""CREATE DATABASE "%s" ENCODING 'unicode' TEMPLATE "template0" """ % db_name)
finally:
cr.close()
finally:
db.release()
cmd = ['pg_restore', '--no-owner'] cmd = ['pg_restore', '--no-owner']
if tools.config['db_user']: if tools.config['db_user']:
@ -277,27 +260,23 @@ class db(netsvc.ExportService):
logger = netsvc.Logger() logger = netsvc.Logger()
db = sql_db.db_connect('template1') db = sql_db.db_connect('template1')
db.lock() cr = db.cursor()
try: try:
cr = db.cursor()
try: try:
try: cr.execute('ALTER DATABASE "%s" RENAME TO "%s"' % (old_name, new_name))
cr.execute('ALTER DATABASE "%s" RENAME TO "%s"' % (old_name, new_name)) except Exception, e:
except Exception, e: logger.notifyChannel("web-services", netsvc.LOG_ERROR,
logger.notifyChannel("web-services", netsvc.LOG_ERROR, 'RENAME DB: %s -> %s failed:\n%s' % (old_name, new_name, e))
'RENAME DB: %s -> %s failed:\n%s' % (old_name, new_name, e)) raise Exception("Couldn't rename database %s to %s: %s" % (old_name, new_name, e))
raise Exception("Couldn't rename database %s to %s: %s" % (old_name, new_name, e)) else:
else: fs = os.path.join(tools.config['root_path'], 'filestore')
fs = os.path.join(tools.config['root_path'], 'filestore') if os.path.exists(os.path.join(fs, old_name)):
if os.path.exists(os.path.join(fs, old_name)): os.rename(os.path.join(fs, old_name), os.path.join(fs, new_name))
os.rename(os.path.join(fs, old_name), os.path.join(fs, new_name))
logger.notifyChannel("web-services", netsvc.LOG_INFO, logger.notifyChannel("web-services", netsvc.LOG_INFO,
'RENAME DB: %s -> %s' % (old_name, new_name)) 'RENAME DB: %s -> %s' % (old_name, new_name))
finally:
cr.close()
finally: finally:
db.release() cr.close()
return True return True
def exp_db_exist(self, db_name): def exp_db_exist(self, db_name):
@ -309,30 +288,26 @@ class db(netsvc.ExportService):
raise Exception('AccessDenied') raise Exception('AccessDenied')
db = sql_db.db_connect('template1') db = sql_db.db_connect('template1')
db.lock() cr = db.cursor()
try: try:
cr = db.cursor()
try: try:
try: db_user = tools.config["db_user"]
db_user = tools.config["db_user"] if not db_user and os.name == 'posix':
if not db_user and os.name == 'posix': import pwd
import pwd db_user = pwd.getpwuid(os.getuid())[0]
db_user = pwd.getpwuid(os.getuid())[0] if not db_user:
if not db_user: cr.execute("select decode(usename, 'escape') from pg_user where usesysid=(select datdba from pg_database where datname=%s)", (tools.config["db_name"],))
cr.execute("select decode(usename, 'escape') from pg_user where usesysid=(select datdba from pg_database where datname=%s)", (tools.config["db_name"],)) res = cr.fetchone()
res = cr.fetchone() db_user = res and str(res[0])
db_user = res and str(res[0]) if db_user:
if db_user: cr.execute("select decode(datname, 'escape') from pg_database where datdba=(select usesysid from pg_user where usename=%s) and datname not in ('template0', 'template1', 'postgres') order by datname", (db_user,))
cr.execute("select decode(datname, 'escape') from pg_database where datdba=(select usesysid from pg_user where usename=%s) and datname not in ('template0', 'template1', 'postgres') order by datname", (db_user,)) else:
else: cr.execute("select decode(datname, 'escape') from pg_database where datname not in('template0', 'template1','postgres') order by datname")
cr.execute("select decode(datname, 'escape') from pg_database where datname not in('template0', 'template1','postgres') order by datname") res = [str(name) for (name,) in cr.fetchall()]
res = [str(name) for (name,) in cr.fetchall()] except:
except: res = []
res = []
finally:
cr.close()
finally: finally:
db.release() cr.close()
res.sort() res.sort()
return res return res
@ -365,7 +340,7 @@ class db(netsvc.ExportService):
self.abortResponse(1, inst.name, 'warning', inst.value) self.abortResponse(1, inst.name, 'warning', inst.value)
except except_osv, inst: except except_osv, inst:
self.abortResponse(1, inst.name, inst.exc_type, inst.value) self.abortResponse(1, inst.name, inst.exc_type, inst.value)
except Exception, e: except Exception:
import traceback import traceback
tb_s = reduce(lambda x, y: x+y, traceback.format_exception( sys.exc_type, sys.exc_value, sys.exc_traceback)) tb_s = reduce(lambda x, y: x+y, traceback.format_exception( sys.exc_type, sys.exc_value, sys.exc_traceback))
l.notifyChannel('web-services', netsvc.LOG_ERROR, tb_s) l.notifyChannel('web-services', netsvc.LOG_ERROR, tb_s)
@ -523,7 +498,7 @@ GNU Public Licence.
l.notifyChannel('migration', netsvc.LOG_ERROR, 'unable to read the module %s' % (module,)) l.notifyChannel('migration', netsvc.LOG_ERROR, 'unable to read the module %s' % (module,))
raise raise
zip_contents = cStringIO.StringIO(base64_decoded) zip_contents = StringIO(base64_decoded)
zip_contents.seek(0) zip_contents.seek(0)
try: try:
try: try:
@ -575,7 +550,6 @@ GNU Public Licence.
%(platform.release(), platform.version(), platform.architecture()[0], %(platform.release(), platform.version(), platform.architecture()[0],
os_lang, platform.python_version(),release.version) os_lang, platform.python_version(),release.version)
return environment return environment
def exp_login_message(self): def exp_login_message(self):
return tools.config.get('login_message', False) return tools.config.get('login_message', False)
@ -590,6 +564,8 @@ GNU Public Licence.
res = "OpenERP server: %d threads\n" % threading.active_count() res = "OpenERP server: %d threads\n" % threading.active_count()
res += netsvc.Server.allStats() res += netsvc.Server.allStats()
return res return res
def check_connectivity(self):
return bool(sql_db.db_connect('template1'))
common() common()

View File

@ -54,6 +54,7 @@ from tools.func import wraps
from datetime import datetime as mdt from datetime import datetime as mdt
from datetime import timedelta from datetime import timedelta
import threading import threading
from inspect import stack
import re import re
re_from = re.compile('.* from "?([a-zA-Z_0-9]+)"? .*$'); re_from = re.compile('.* from "?([a-zA-Z_0-9]+)"? .*$');
@ -91,10 +92,7 @@ class Cursor(object):
self._obj = self._cnx.cursor(cursor_factory=psycopg1cursor) self._obj = self._cnx.cursor(cursor_factory=psycopg1cursor)
self.__closed = False # real initialisation value self.__closed = False # real initialisation value
self.autocommit(False) self.autocommit(False)
self.__caller = tuple(stack()[2][1:3])
if tools.config['log_level'] in (netsvc.LOG_DEBUG, netsvc.LOG_DEBUG_RPC):
from inspect import stack
self.__caller = tuple(stack()[2][1:3])
def __del__(self): def __del__(self):
if not self.__closed: if not self.__closed:
@ -103,10 +101,9 @@ class Cursor(object):
# but the database connection is not put back into the connection # but the database connection is not put back into the connection
# pool, preventing some operation on the database like dropping it. # pool, preventing some operation on the database like dropping it.
# This can also lead to a server overload. # This can also lead to a server overload.
if tools.config['log_level'] in (netsvc.LOG_DEBUG, netsvc.LOG_DEBUG_RPC): msg = "Cursor not closed explicitly\n" \
msg = "Cursor not closed explicitly\n" \ "Cursor was created at %s:%s" % self.__caller
"Cursor was created at %s:%s" % self.__caller log(msg, netsvc.LOG_WARNING)
log(msg, netsvc.LOG_WARNING)
self.close() self.close()
@check @check
@ -119,7 +116,7 @@ class Cursor(object):
if self.sql_log: if self.sql_log:
now = mdt.now() now = mdt.now()
try: try:
params = params or None params = params or None
res = self._obj.execute(query, params) res = self._obj.execute(query, params)
@ -200,11 +197,11 @@ class Cursor(object):
def autocommit(self, on): def autocommit(self, on):
offlevel = [ISOLATION_LEVEL_READ_COMMITTED, ISOLATION_LEVEL_SERIALIZABLE][bool(self._serialized)] offlevel = [ISOLATION_LEVEL_READ_COMMITTED, ISOLATION_LEVEL_SERIALIZABLE][bool(self._serialized)]
self._cnx.set_isolation_level([offlevel, ISOLATION_LEVEL_AUTOCOMMIT][bool(on)]) self._cnx.set_isolation_level([offlevel, ISOLATION_LEVEL_AUTOCOMMIT][bool(on)])
@check @check
def commit(self): def commit(self):
return self._cnx.commit() return self._cnx.commit()
@check @check
def rollback(self): def rollback(self):
return self._cnx.rollback() return self._cnx.rollback()
@ -233,16 +230,18 @@ class ConnectionPool(object):
self._lock = threading.Lock() self._lock = threading.Lock()
self._logger = netsvc.Logger() self._logger = netsvc.Logger()
def _log(self, msg): def __repr__(self):
#self._logger.notifyChannel('ConnectionPool', netsvc.LOG_INFO, msg) used = len([1 for c, u in self._connections[:] if u])
pass count = len(self._connections)
return "ConnectionPool(used=%d/count=%d/max=%d)" % (used, count, self._maxconn)
def _debug(self, msg): def _debug(self, msg):
#self._logger.notifyChannel('ConnectionPool', netsvc.LOG_DEBUG, msg) self._logger.notifyChannel('ConnectionPool', netsvc.LOG_DEBUG, repr(self))
pass self._logger.notifyChannel('ConnectionPool', netsvc.LOG_DEBUG, msg)
@locked @locked
def borrow(self, dsn): def borrow(self, dsn):
self._log('Borrow connection to %s' % (dsn,)) self._debug('Borrow connection to %s' % (dsn,))
result = None result = None
for i, (cnx, used) in enumerate(self._connections): for i, (cnx, used) in enumerate(self._connections):
@ -258,7 +257,7 @@ class ConnectionPool(object):
return result return result
if len(self._connections) >= self._maxconn: if len(self._connections) >= self._maxconn:
# try to remove the older connection not used # try to remove the oldest connection not used
for i, (cnx, used) in enumerate(self._connections): for i, (cnx, used) in enumerate(self._connections):
if not used: if not used:
self._debug('Removing old connection at index %d: %s' % (i, cnx.dsn)) self._debug('Removing old connection at index %d: %s' % (i, cnx.dsn))
@ -266,7 +265,7 @@ class ConnectionPool(object):
break break
else: else:
# note: this code is called only if the for loop has completed (no break) # note: this code is called only if the for loop has completed (no break)
raise PoolError('Connection Pool Full') raise PoolError('The Connection Pool Is Full')
self._debug('Create new connection') self._debug('Create new connection')
result = psycopg2.connect(dsn=dsn) result = psycopg2.connect(dsn=dsn)
@ -275,7 +274,7 @@ class ConnectionPool(object):
@locked @locked
def give_back(self, connection): def give_back(self, connection):
self._log('Give back connection to %s' % (connection.dsn,)) self._debug('Give back connection to %s' % (connection.dsn,))
for i, (cnx, used) in enumerate(self._connections): for i, (cnx, used) in enumerate(self._connections):
if cnx is connection: if cnx is connection:
self._connections.pop(i) self._connections.pop(i)
@ -286,6 +285,7 @@ class ConnectionPool(object):
@locked @locked
def close_all(self, dsn): def close_all(self, dsn):
self._debug('Close all connections to %s' % (dsn,))
for i, (cnx, used) in tools.reverse_enumerate(self._connections): for i, (cnx, used) in tools.reverse_enumerate(self._connections):
if dsn_are_equals(cnx.dsn, dsn): if dsn_are_equals(cnx.dsn, dsn):
cnx.close() cnx.close()
@ -293,37 +293,17 @@ class ConnectionPool(object):
class Connection(object): class Connection(object):
__LOCKS = {} def _debug(self, msg):
self._logger.notifyChannel('Connection', netsvc.LOG_DEBUG, msg)
def __init__(self, pool, dbname, unique=False): def __init__(self, pool, dbname):
self.dbname = dbname self.dbname = dbname
self._pool = pool self._pool = pool
self._unique = unique self._logger = netsvc.Logger()
def __enter__(self):
if self._unique:
self.lock()
return self
def __exit__(self, exc_type, exc_value, traceback):
if self._unique:
self.release()
def lock(self):
if self.dbname not in self.__LOCKS:
self.__LOCKS[self.dbname] = threading.Lock()
self.__LOCKS[self.dbname].acquire()
def release(self):
close_db(self.dbname)
self.__LOCKS[self.dbname].release()
def cursor(self, serialized=False): def cursor(self, serialized=False):
if self._unique: cursor_type = serialized and 'serialized ' or ''
lock = self.__LOCKS.get(self.dbname, None) self._debug('create %scursor to "%s"' % (cursor_type, self.dbname,))
if not (lock and lock.locked()):
netsvc.Logger().notifyChannel('Connection', netsvc.LOG_WARNING, 'Unprotected connection to %s' % (self.dbname,))
return Cursor(self._pool, self.dbname, serialized=serialized) return Cursor(self._pool, self.dbname, serialized=serialized)
def serialized_cursor(self): def serialized_cursor(self):
@ -359,8 +339,7 @@ def dsn_are_equals(first, second):
_Pool = ConnectionPool(int(tools.config['db_maxconn'])) _Pool = ConnectionPool(int(tools.config['db_maxconn']))
def db_connect(db_name): def db_connect(db_name):
unique = db_name in ['template1', 'template0'] return Connection(_Pool, db_name)
return Connection(_Pool, db_name, unique)
def close_db(db_name): def close_db(db_name):
_Pool.close_all(dsn(db_name)) _Pool.close_all(dsn(db_name))

View File

@ -352,7 +352,6 @@ class configmanager(object):
res = os.path.abspath(os.path.expanduser(value)) res = os.path.abspath(os.path.expanduser(value))
if not os.path.exists(res): if not os.path.exists(res):
raise optparse.OptionValueError("option %s: no such directory: %r" % (opt, value)) raise optparse.OptionValueError("option %s: no such directory: %r" % (opt, value))
contains_addons = False contains_addons = False
for f in os.listdir(res): for f in os.listdir(res):
modpath = os.path.join(res, f) modpath = os.path.join(res, f)

View File

@ -763,16 +763,20 @@ if __name__=='__main__':
import ImageDraw import ImageDraw
img = Image.new("RGB", (800, 600), "#ffffff") img = Image.new("RGB", (800, 600), "#ffffff")
draw = ImageDraw.Draw(img) draw = ImageDraw.Draw(img)
for name,node in g.result.items(): result = g.result_get()
node_res = {}
for node in nodes:
node_res[node] = result[node]
for name,node in node_res.items():
draw.arc( (int(node['y']-radius), int(node['x']-radius),int(node['y']+radius), int(node['x']+radius) ), 0, 360, (128,128,128)) draw.arc( (int(node['y']-radius), int(node['x']-radius),int(node['y']+radius), int(node['x']+radius) ), 0, 360, (128,128,128))
draw.text( (int(node['y']), int(node['x'])), name, (128,128,128)) draw.text( (int(node['y']), int(node['x'])), name, (128,128,128))
for nodefrom in g.transitions: for t in transitions:
for nodeto in g.transitions[nodefrom]: draw.line( (int(node_res[t[0]]['y']), int(node_res[t[0]]['x']),int(node_res[t[1]]['y']),int(node_res[t[1]]['x'])),(128,128,128) )
draw.line( (int(g.result[nodefrom]['y']), int(g.result[nodefrom]['x']),int(g.result[nodeto]['y']),int(g.result[nodeto]['x'])),(128,128,128) )
img.save("graph.png", "PNG") img.save("graph.png", "PNG")

View File

@ -750,14 +750,14 @@ class cache(object):
if *args and **kwargs are both empty, clear all the keys related to this database if *args and **kwargs are both empty, clear all the keys related to this database
""" """
if not args and not kwargs: if not args and not kwargs:
keys_to_del = [key for key in self.cache if key[0][1] == dbname] keys_to_del = [key for key in self.cache.keys() if key[0][1] == dbname]
else: else:
kwargs2 = self._unify_args(*args, **kwargs) kwargs2 = self._unify_args(*args, **kwargs)
keys_to_del = [key for key, _ in self._generate_keys(dbname, kwargs2) if key in self.cache] keys_to_del = [key for key, _ in self._generate_keys(dbname, kwargs2) if key in self.cache.keys()]
for key in keys_to_del: for key in keys_to_del:
del self.cache[key] self.cache.pop(key)
@classmethod @classmethod
def clean_caches_for_db(cls, dbname): def clean_caches_for_db(cls, dbname):
for c in cls.__caches: for c in cls.__caches:
@ -778,9 +778,9 @@ class cache(object):
if time.time()-int(self.timeout) > self.lasttime: if time.time()-int(self.timeout) > self.lasttime:
self.lasttime = time.time() self.lasttime = time.time()
t = time.time()-int(self.timeout) t = time.time()-int(self.timeout)
old_keys = [key for key in self.cache if self.cache[key][1] < t] old_keys = [key for key in self.cache.keys() if self.cache[key][1] < t]
for key in old_keys: for key in old_keys:
del self.cache[key] self.cache.pop(key)
kwargs2 = self._unify_args(*args, **kwargs) kwargs2 = self._unify_args(*args, **kwargs)

View File

@ -148,6 +148,10 @@ class interface(netsvc.Service):
res['arch'] = arch res['arch'] = arch
res['state'] = button_list res['state'] = button_list
elif result_def['type'] == 'choice':
next_state = result_def['next_state'](self, cr, uid, data, context)
return self.execute_cr(cr, uid, data, next_state, context)
except Exception, e: except Exception, e:
if isinstance(e, except_wizard) \ if isinstance(e, except_wizard) \
or isinstance(e, except_osv) \ or isinstance(e, except_osv) \
@ -162,9 +166,6 @@ class interface(netsvc.Service):
'Exception in call: ' + tb_s) 'Exception in call: ' + tb_s)
raise raise
if result_def['type'] == 'choice':
next_state = result_def['next_state'](self, cr, uid, data, context)
return self.execute_cr(cr, uid, data, next_state, context)
return res return res
def execute(self, db, uid, data, state='init', context=None): def execute(self, db, uid, data, state='init', context=None):