bugfix_cache_speed_improvement_store

bzr revid: fp@tinyerp.com-20081213060118-icn54am74m6ntyqw
This commit is contained in:
Fabien Pinckaers 2008-12-13 07:01:18 +01:00
parent aa900102ec
commit cb59d883b8
12 changed files with 107 additions and 255 deletions

View File

@ -21,7 +21,7 @@
##############################################################################
{
"name" : "Base",
"version" : "1.0",
"version" : "1.1",
"author" : "Tiny",
"website" : "http://www.openerp.com",
"category" : "Generic Modules/Base",

View File

@ -36,9 +36,7 @@ class ir_attachment(osv.osv):
for o in objs:
if o and o.res_model:
ima.check(cr, uid, o.res_model, mode)
check = tools.cache()(check)
def search(self, cr, uid, args, offset=0, limit=None, order=None,
context=None, count=False):
ids = super(ir_attachment, self).search(cr, uid, args, offset=offset,

View File

@ -435,7 +435,7 @@ class ir_model_data(osv.osv):
ids = self.search(cr, uid, [('module','=',module),('name','=', xml_id)])
assert len(ids)==1, '%d reference(s) to %s.%s. You should have one and only one !' % (len(ids), module, xml_id)
return ids[0]
_get_id = tools.cache()(_get_id)
_get_id = tools.cache(skiparg=3)(_get_id)
def _update_dummy(self,cr, uid, model, module, xml_id=False, store=True):
if not xml_id:

View File

@ -21,7 +21,6 @@
##############################################################################
from osv import fields, osv
from osv.osv import Cacheable
import tools
TRANSLATION_TYPE = [
@ -39,11 +38,12 @@ TRANSLATION_TYPE = [
('constraint', 'Constraint'),
]
class ir_translation(osv.osv, Cacheable):
class ir_translation(osv.osv):
_name = "ir.translation"
_log_access = False
def _get_language(self, cr, uid, context):
print '***', _get_language
lang_obj = self.pool.get('res.lang')
lang_ids = lang_obj.search(cr, uid, [('translatable', '=', True)],
context=context)
@ -81,27 +81,19 @@ class ir_translation(osv.osv, Cacheable):
cr.commit()
def _get_ids(self, cr, uid, name, tt, lang, ids):
translations, to_fetch = {}, []
for id in ids:
trans = self.get((lang, name, id))
if trans is not None:
translations[id] = trans
else:
to_fetch.append(id)
if to_fetch:
translations = {}
if ids:
cr.execute('select res_id,value ' \
'from ir_translation ' \
'where lang=%s ' \
'and type=%s ' \
'and name=%s ' \
'and res_id in ('+','.join(map(str, to_fetch))+')',
'and res_id in ('+','.join(map(str, ids))+')',
(lang,tt,name))
for res_id, value in cr.fetchall():
self.add((lang, tt, name, res_id), value)
translations[res_id] = value
for res_id in ids:
if res_id not in translations:
self.add((lang, tt, name, res_id), False)
translations[res_id] = False
return translations
@ -122,11 +114,8 @@ class ir_translation(osv.osv, Cacheable):
})
return len(ids)
@tools.cache(skiparg=3)
def _get_source(self, cr, uid, name, tt, lang, source=None):
trans = self.get((lang, tt, name, source))
if trans is not None:
return trans
if source:
#if isinstance(source, unicode):
# source = source.encode('utf8')
@ -145,26 +134,9 @@ class ir_translation(osv.osv, Cacheable):
'and name=%s',
(lang, tt, str(name)))
res = cr.fetchone()
trad = res and res[0] or ''
self.add((lang, tt, name, source), trad)
return trad
def unlink(self, cursor, user, ids, context=None):
self.clear()
return super(ir_translation, self).unlink(cursor, user, ids,
context=context)
def create(self, cursor, user, vals, context=None):
self.clear()
return super(ir_translation, self).create(cursor, user, vals,
context=context)
def write(self, cursor, user, ids, vals, context=None):
self.clear()
return super(ir_translation, self).write(cursor, user, ids, vals,
context=context)
ir_translation()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:

View File

@ -169,9 +169,9 @@ class module(osv.osv):
('GPL-3 or any later version', 'GPL-3 or later version'),
('Other proprietary', 'Other proprietary')
], string='License', readonly=True),
'menus_by_module': fields.function(_get_views, method=True, string='Menus', type='text', multi="meta"),
'reports_by_module': fields.function(_get_views, method=True, string='Reports', type='text', multi="meta"),
'views_by_module': fields.function(_get_views, method=True, string='Views', type='text', multi="meta"),
'menus_by_module': fields.function(_get_views, method=True, string='Menus', type='text', multi="meta", store=True),
'reports_by_module': fields.function(_get_views, method=True, string='Reports', type='text', multi="meta", store=True),
'views_by_module': fields.function(_get_views, method=True, string='Views', type='text', multi="meta", store=True),
}
_defaults = {

View File

@ -152,7 +152,7 @@ class users(osv.osv):
'groups_id': _get_group,
}
def company_get(self, cr, uid, uid2):
company_id = self.pool.get('res.users').browse(cr, uid, uid).company_id.id
company_id = self.pool.get('res.users').browse(cr, uid, uid2).company_id.id
return company_id
company_get = tools.cache()(company_get)

View File

@ -727,7 +727,6 @@ class orm_template(object):
and getattr(self._columns[f], arg):
res[f][arg] = getattr(self._columns[f], arg)
# translate the field label
res_trans = translation_obj._get_source(cr, user,
self._name + ',' + f, 'field', context.get('lang', False) or 'en_US')
if res_trans:
@ -838,13 +837,13 @@ class orm_template(object):
# translate view
if ('lang' in context) and not result:
if node.hasAttribute('string') and node.getAttribute('string'):
trans = tools.translate(cr, self._name, 'view', context['lang'], node.getAttribute('string').encode('utf8'))
trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.getAttribute('string').encode('utf8'))
if not trans and ('base_model_name' in context):
trans = tools.translate(cr, context['base_model_name'], 'view', context['lang'], node.getAttribute('string').encode('utf8'))
trans = self.pool.get('ir.translation')._get_source(cr, user, context['base_model_name'], 'view', context['lang'], node.getAttribute('string').encode('utf8'))
if trans:
node.setAttribute('string', trans)
if node.hasAttribute('sum') and node.getAttribute('sum'):
trans = tools.translate(cr, self._name, 'view', context['lang'], node.getAttribute('sum').encode('utf8'))
trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.getAttribute('sum').encode('utf8'))
if trans:
node.setAttribute('sum', trans)
@ -2005,11 +2004,7 @@ class orm(orm_template):
if isinstance(ids, (int, long)):
ids = [ids]
fn_list = []
for fnct in self.pool._store_function.get(self._name, []):
ids2 = filter(None, fnct[2](self,cr, uid, ids, context))
if ids2:
fn_list.append( (fnct[0], fnct[1], ids2) )
result_store = self._store_get_values(cr, user, ids, vals.keys(), context)
delta = context.get('read_delta', False)
if delta and self._log_access:
@ -2057,10 +2052,8 @@ class orm(orm_template):
cr.execute('delete from "'+self._table+'" ' \
'where id in ('+str_d+')', sub_ids)
for object,field,ids in fn_list:
ids = self.pool.get(object).search(cr, uid, [('id','in', ids)], context=context)
if ids:
self.pool.get(object)._store_set_values(cr, uid, ids, field, context)
for order, object, ids, fields in result_store:
self.pool.get(object)._store_set_values(cr, user, ids, fields, context)
return True
#
@ -2268,20 +2261,14 @@ class orm(orm_template):
if 'read_delta' in context:
del context['read_delta']
result = self._store_get_values(cr, user, ids, vals.keys(), context)
for order, object, ids, fields in result:
self.pool.get(object)._store_set_values(cr, user, ids, fields, context)
wf_service = netsvc.LocalService("workflow")
for id in ids:
wf_service.trg_write(user, self._name, id, cr)
for fnct in self.pool._store_function.get(self._name, []):
ok = False
for key in vals.keys():
if (not fnct[3]) or (key in fnct[3]):
ok = True
if ok:
ids2 = fnct[2](self,cr, user, ids, context)
ids2 = filter(None, ids2)
if ids2:
self.pool.get(fnct[0])._store_set_values(cr, user, ids2, fnct[1], context)
return True
#
@ -2386,34 +2373,68 @@ class orm(orm_template):
cr.execute('update '+self._table+' set parent_right=parent_right+2 where parent_right>%s', (pleft,))
cr.execute('update '+self._table+' set parent_left=%s,parent_right=%s where id=%s', (pleft+1,pleft+2,id_new))
result = self._store_get_values(cr, user, [id_new], vals.keys(), context)
for order, object, ids, fields in result:
self.pool.get(object)._store_set_values(cr, user, ids, fields, context)
wf_service = netsvc.LocalService("workflow")
wf_service.trg_create(user, self._name, id_new, cr)
for fnct in self.pool._store_function.get(self._name, []):
ids2 = fnct[2](self,cr, user, [id_new], context)
ids2 = filter(None, ids2)
if ids2:
self.pool.get(fnct[0])._store_set_values(cr, user, ids2, fnct[1], context)
return id_new
def _store_set_values(self, cr, uid, ids, field, context):
args = {}
result = self._columns[field].get(cr, self, ids, field, uid, context=context)
for id,value in result.items():
upd0 = []
upd1 = []
if self._columns[field]._multi:
value = value[field]
if self._columns[field]._type in ('many2one', 'one2one'):
try:
value = value[0]
except:
pass
upd0.append('"'+field+'"='+self._columns[field]._symbol_set[0])
upd1.append(self._columns[field]._symbol_set[1](value))
upd1.append(id)
cr.execute('update "' + self._table + '" set ' + \
string.join(upd0, ',') + ' where id = %s', upd1)
def _store_get_values(self, cr, uid, ids, fields, context):
result = {}
fncts = self.pool._store_function.get(self._name, [])
for fnct in range(len(fncts)):
result.setdefault(fncts[fnct][0], {})
ids2 = fncts[fnct][2](self,cr, uid, ids, context)
for id in filter(None, ids2):
result[fncts[fnct][0]].setdefault(id, [])
result[fncts[fnct][0]][id].append(fnct)
result2 = []
for object in result:
k2 = {}
for id,fnct in result[object].items():
k2.setdefault(tuple(fnct), [])
k2[tuple(fnct)].append(id)
for fnct,id in k2.items():
result2.append((fncts[fnct[0]][4],object,id,map(lambda x: fncts[x][1], fnct)))
result2.sort()
return result2
def _store_set_values(self, cr, uid, ids, fields, context):
todo = {}
for f in fields:
todo.setdefault(self._columns[f]._multi, [])
todo[self._columns[f]._multi].append(f)
for key,val in todo.items():
if key:
result = self._columns[val[0]].get(cr, self, ids, val, uid, context=context)
for id,value in result.items():
upd0 = []
upd1 = []
for v in value:
if self._columns[v]._type in ('many2one', 'one2one'):
try:
value[v] = value[v][0]
except:
pass
upd0.append('"'+v+'"='+self._columns[v]._symbol_set[0])
upd1.append(self._columns[v]._symbol_set[1](value[v]))
upd1.append(id)
cr.execute('update "' + self._table + '" set ' + \
string.join(upd0, ',') + ' where id = %s', upd1)
else:
for f in val:
result = self._columns[f].get(cr, self, ids, f, uid, context=context)
for id,value in result.items():
if self._columns[f]._type in ('many2one', 'one2one'):
try:
value = value[0]
except:
pass
cr.execute('update "' + self._table + '" set ' + \
'"'+f+'"='+self._columns[f]._symbol_set[0] + ' where id = %s', (value,id))
return True
#

View File

@ -251,122 +251,3 @@ class osv(orm.orm):
self.pool = pool
orm.orm.__init__(self, cr)
class Cacheable(object):
_cache = UpdateableDict()
def add(self, key, value):
self._cache[key] = value
def invalidate(self, key):
del self._cache[key]
def get(self, key):
try:
w = self._cache[key]
return w
except KeyError:
return None
def clear(self):
self._cache.clear()
self._items = []
def filter_dict(d, fields):
res = {}
for f in fields + ['id']:
if f in d:
res[f] = d[f]
return res
class cacheable_osv(osv, Cacheable):
_relevant = ['lang']
def __init__(self):
super(cacheable_osv, self).__init__()
def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'):
if not fields:
fields = []
if not context:
context = {}
fields = fields or self._columns.keys()
ctx = [context.get(x, False) for x in self._relevant]
result, tofetch = [], []
for id in ids:
res = self.get(self._name, id, ctx)
if not res:
tofetch.append(id)
else:
result.append(filter_dict(res, fields))
# gen the list of "local" (ie not inherited) fields which are classic or many2one
nfields = filter(lambda x: x[1]._classic_write, self._columns.items())
# gen the list of inherited fields
inherits = map(lambda x: (x[0], x[1][2]), self._inherit_fields.items())
# complete the field list with the inherited fields which are classic or many2one
nfields += filter(lambda x: x[1]._classic_write, inherits)
nfields = [x[0] for x in nfields]
res = super(cacheable_osv, self).read(cr, user, tofetch, nfields, context, load)
for r in res:
self.add((self._name, r['id'], ctx), r)
result.append(filter_dict(r, fields))
# Appel de fonction si necessaire
tofetch = []
for f in fields:
if f not in nfields:
tofetch.append(f)
for f in tofetch:
fvals = self._columns[f].get(cr, self, ids, f, user, context=context)
for r in result:
r[f] = fvals[r['id']]
# TODO: tri par self._order !!
return result
def invalidate(self, key):
del self._cache[key[0]][key[1]]
def write(self, cr, user, ids, values, context=None):
if not context:
context = {}
for id in ids:
self.invalidate((self._name, id))
return super(cacheable_osv, self).write(cr, user, ids, values, context)
def unlink(self, cr, user, ids):
self.clear()
return super(cacheable_osv, self).unlink(cr, user, ids)
#cacheable_osv = osv
#class FakePool(object):
# def __init__(self, module):
# self.preferred_module = module
# def get(self, name):
# localpool = module_objects_dict.get(self.preferred_module, {'dict': {}})['dict']
# if name in localpool:
# obj = localpool[name]
# else:
# obj = pooler.get_pool(cr.dbname).get(name)
# return obj
# fake_pool = self
# class fake_class(obj.__class__):
# def __init__(self):
# super(fake_class, self).__init__()
# self.pool = fake_pool
# return fake_class()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:

View File

@ -35,16 +35,6 @@ def login(db, login, password):
else:
return False
def logout(db, login, password):
cr = pooler.get_db(db).cursor()
cr.execute('select login from res_users where id=%d'%(login))
res = cr.fetchone()
cr.close()
if res:
return res[0]
else:
return False
def check_super(passwd):
if passwd == tools.config['admin_passwd']:
return True

View File

@ -319,29 +319,11 @@ class common(netsvc.Service):
msg = res and 'successful login' or 'bad login or password'
logger.notifyChannel("web-service", netsvc.LOG_INFO, "%s from '%s' using database '%s'" % (msg, login, db.lower()))
return res or False
def logout(self, db, login, password):
# FIXME: WTF !!! what is this hardcoding ?
res = security.logout(db, login, password)
service = netsvc.LocalService("object_proxy")
fields = service.execute(db, login, 'res.users', 'fields_get', {})
try:
if 'current_status' in fields.keys():
service.execute(db, login, 'res.users', 'write', login, {'current_status':False})
emp_id = service.execute(db, login, 'hr.employee', 'search',[('user_id','=',login)])
emp = emp_id[0]
service.execute(db, login, 'hr.attendance', 'create',{'action':'sign_out','employee_id':emp})
cr = pooler.get_db(db).cursor()
cr.execute("delete from time_sheet_remote_temp where user_id = '%s'"%(res))
cr.commit()
cr.close()
except:
pass
logger = netsvc.Logger()
logger.notifyChannel("web-service", netsvc.LOG_INFO,'Logout=>%s from database %s'%(res,db.lower()))
logger.notifyChannel("web-service", netsvc.LOG_INFO,'Logout %s from database %s'%(login,db))
return True
def about(self, extended=False):
"""Return information about the OpenERP Server.

View File

@ -385,7 +385,6 @@ def sms_send(user, password, api_id, text, to):
#f = urllib.urlopen("http://api.clickatell.com/http/sendmsg", params)
f = urllib.urlopen("http://196.7.150.220/http/sendmsg", params)
# FIXME: Use the logger if there is an error
print f.read()
return True
#---------------------------------------------------------
@ -537,29 +536,36 @@ def is_hashable(h):
# Timeout: 0 = no timeout, otherwise in seconds
#
class cache(object):
def __init__(self, timeout=10000, skiparg=2):
def __init__(self, timeout=10000, skiparg=2, multi=None):
self.timeout = timeout
self.skiparg = skiparg
self.multi = multi
self.cache = {}
def __call__(self, fn):
arg_names = inspect.getargspec(fn)[0][2:]
arg_names = inspect.getargspec(fn)[0][self.skiparg:]
def cached_result(self2, cr=None, *args, **kwargs):
if cr is None:
self.cache = {}
return True
if ('clear_keys' in kwargs):
if (kwargs['clear_keys'] in self.cache):
del self.cache[kwargs['clear_keys']]
return True
# Update named arguments with positional argument values
kwargs.update(dict(zip(arg_names, args)))
for k in kwargs:
if isinstance(kwargs[k], (list, dict, set)):
kwargs[k] = tuple(kwargs[k])
elif not is_hashable(kwargs[k]):
kwargs[k] = repr(kwargs[k])
kwargs = kwargs.items()
kwargs.sort()
kwargs2 = kwargs.copy()
kwargs2.update(dict(zip(arg_names, args)))
for k in kwargs2:
if isinstance(kwargs2[k], (list, dict, set)):
kwargs2[k] = tuple(kwargs2[k])
elif not is_hashable(kwargs2[k]):
kwargs2[k] = repr(kwargs2[k])
kwargs2 = kwargs2.items()
kwargs2.sort()
# Work out key as a tuple of ('argname', value) pairs
key = (('dbname', cr.dbname),) + tuple(kwargs)
key = (('dbname', cr.dbname),) + tuple(kwargs2)
# Check cache and return cached value if possible
if key in self.cache:
@ -571,7 +577,7 @@ class cache(object):
# Work out new value, cache it and return it
# FIXME Should copy() this value to avoid futur modifications of the cache ?
# FIXME What about exceptions ?
result = fn(self2,cr,**dict(kwargs))
result = fn(self2,cr,*args, **kwargs)
self.cache[key] = (result, time.time())
return result

View File

@ -41,7 +41,7 @@ class UNIX_LINE_TERMINATOR(csv.excel):
csv.register_dialect("UNIX", UNIX_LINE_TERMINATOR)
#
# TODO: a caching method
# Warning: better use self.pool.get('ir.translation')._get_source if you can
#
def translate(cr, name, source_type, lang, source=None):
if source and name:
@ -61,8 +61,10 @@ class GettextAlias(object):
lang = frame.f_locals.get('context', {}).get('lang', False)
if not (lang and cr):
return source
return translate(cr, None, 'code', lang, source) or source
cr.execute('select value from ir_translation where lang=%s and type=%s and src=%s', (lang, 'code', source))
res_trans = cr.fetchone()
return res_trans and res_trans[0] or source
_ = GettextAlias()