[IMP] simplified cache implementation (and stored on the osv), does not support contexts.
bzr revid: vmt@openerp.com-20110830121541-abov2wezdn6kl2dc
This commit is contained in:
commit
bdd0c7cf3f
|
@ -33,7 +33,6 @@ class ir_attachment(osv.osv):
|
|||
"""
|
||||
if not ids:
|
||||
return
|
||||
ima = self.pool.get('ir.model.access')
|
||||
res_ids = {}
|
||||
if ids:
|
||||
if isinstance(ids, (int, long)):
|
||||
|
@ -47,12 +46,13 @@ class ir_attachment(osv.osv):
|
|||
if 'res_model' in values and 'res_id' in values:
|
||||
res_ids.setdefault(values['res_model'],set()).add(values['res_id'])
|
||||
|
||||
ima = self.pool.get('ir.model.access')
|
||||
for model, mids in res_ids.items():
|
||||
# ignore attachments that are not attached to a resource anymore when checking access rights
|
||||
# (resource was deleted but attachment was not)
|
||||
cr.execute('select id from '+self.pool.get(model)._table+' where id in %s', (tuple(mids),))
|
||||
mids = [x[0] for x in cr.fetchall()]
|
||||
ima.check(cr, uid, model, mode, context=context)
|
||||
ima.check(cr, uid, model, mode)
|
||||
self.pool.get(model).check_access_rule(cr, uid, mids, mode, context=context)
|
||||
|
||||
def search(self, cr, uid, args, offset=0, limit=None, order=None,
|
||||
|
@ -80,7 +80,7 @@ class ir_attachment(osv.osv):
|
|||
# performed in batch as much as possible.
|
||||
ima = self.pool.get('ir.model.access')
|
||||
for model, targets in model_attachments.iteritems():
|
||||
if not ima.check(cr, uid, model, 'read', raise_exception=False, context=context):
|
||||
if not ima.check(cr, uid, model, 'read', False):
|
||||
# remove all corresponding attachment ids
|
||||
for attach_id in itertools.chain(*targets.values()):
|
||||
ids.remove(attach_id)
|
||||
|
|
|
@ -466,6 +466,7 @@ class ir_model_access(osv.osv):
|
|||
a.perm_''' + access_mode, (model_name,))
|
||||
return [x[0] for x in cr.fetchall()]
|
||||
|
||||
@tools.ormcache()
|
||||
def check(self, cr, uid, model, mode='read', raise_exception=True, context=None):
|
||||
if uid==1:
|
||||
# User root have all accesses
|
||||
|
@ -520,8 +521,6 @@ class ir_model_access(osv.osv):
|
|||
raise except_orm(_('AccessError'), msgs[mode] % (model_name, groups) )
|
||||
return r
|
||||
|
||||
check = tools.cache()(check)
|
||||
|
||||
__cache_clearing_methods = []
|
||||
|
||||
def register_cache_clearing_method(self, model, method):
|
||||
|
@ -535,7 +534,7 @@ class ir_model_access(osv.osv):
|
|||
pass
|
||||
|
||||
def call_cache_clearing_methods(self, cr):
|
||||
self.check.clear_cache(cr.dbname) # clear the cache of check function
|
||||
self.check.clear_cache(self) # clear the cache of check function
|
||||
for model, method in self.__cache_clearing_methods:
|
||||
object_ = self.pool.get(model)
|
||||
if object_:
|
||||
|
@ -599,7 +598,7 @@ class ir_model_data(osv.osv):
|
|||
if not cr.fetchone():
|
||||
cr.execute('CREATE INDEX ir_model_data_module_name_index ON ir_model_data (module, name)')
|
||||
|
||||
@tools.cache()
|
||||
@tools.ormcache()
|
||||
def _get_id(self, cr, uid, module, xml_id):
|
||||
"""Returns the id of the ir.model.data record corresponding to a given module and xml_id (cached) or raise a ValueError if not found"""
|
||||
ids = self.search(cr, uid, [('module','=',module), ('name','=', xml_id)])
|
||||
|
@ -608,7 +607,7 @@ class ir_model_data(osv.osv):
|
|||
# the sql constraints ensure us we have only one result
|
||||
return ids[0]
|
||||
|
||||
@tools.cache()
|
||||
@tools.ormcache()
|
||||
def get_object_reference(self, cr, uid, module, xml_id):
|
||||
"""Returns (model, res_id) corresponding to a given module and xml_id (cached) or raise ValueError if not found"""
|
||||
data_id = self._get_id(cr, uid, module, xml_id)
|
||||
|
@ -637,10 +636,8 @@ class ir_model_data(osv.osv):
|
|||
|
||||
def unlink(self, cr, uid, ids, context=None):
|
||||
""" Regular unlink method, but make sure to clear the caches. """
|
||||
ref_ids = self.browse(cr, uid, ids, context=context)
|
||||
for ref_id in ref_ids:
|
||||
self._get_id.clear_cache(cr.dbname, uid, ref_id.module, ref_id.name)
|
||||
self.get_object_reference.clear_cache(cr.dbname, uid, ref_id.module, ref_id.name)
|
||||
self._get_id.clear_cache(self)
|
||||
self.get_object_reference.clear_cache(self)
|
||||
return super(ir_model_data,self).unlink(cr, uid, ids, context=context)
|
||||
|
||||
def _update(self,cr, uid, model, module, values, xml_id=False, store=True, noupdate=False, mode='init', res_id=False, context=None):
|
||||
|
@ -666,8 +663,8 @@ class ir_model_data(osv.osv):
|
|||
results = cr.fetchall()
|
||||
for imd_id2,res_id2,real_id2 in results:
|
||||
if not real_id2:
|
||||
self._get_id.clear_cache(cr.dbname, uid, module, xml_id)
|
||||
self.get_object_reference.clear_cache(cr.dbname, uid, module, xml_id)
|
||||
self._get_id.clear_cache(self, uid, module, xml_id)
|
||||
self.get_object_reference.clear_cache(self, uid, module, xml_id)
|
||||
cr.execute('delete from ir_model_data where id=%s', (imd_id2,))
|
||||
res_id = False
|
||||
else:
|
||||
|
|
|
@ -99,7 +99,7 @@ class ir_rule(osv.osv):
|
|||
(_check_model_obj, 'Rules are not supported for osv_memory objects !', ['model_id'])
|
||||
]
|
||||
|
||||
@tools.cache()
|
||||
@tools.ormcache()
|
||||
def _compute_domain(self, cr, uid, model_name, mode="read"):
|
||||
if mode not in self._MODES:
|
||||
raise ValueError('Invalid mode: %r' % (mode,))
|
||||
|
@ -139,25 +139,10 @@ class ir_rule(osv.osv):
|
|||
return []
|
||||
|
||||
def clear_cache(self, cr, uid):
|
||||
cr.execute("""SELECT DISTINCT m.model
|
||||
FROM ir_rule r
|
||||
JOIN ir_model m
|
||||
ON r.model_id = m.id
|
||||
WHERE r.global
|
||||
OR EXISTS (SELECT 1
|
||||
FROM rule_group_rel g_rel
|
||||
JOIN res_groups_users_rel u_rel
|
||||
ON g_rel.group_id = u_rel.gid
|
||||
WHERE g_rel.rule_group_id = r.id
|
||||
AND u_rel.uid = %s)
|
||||
""", (uid,))
|
||||
models = map(itemgetter(0), cr.fetchall())
|
||||
clear = partial(self._compute_domain.clear_cache, cr.dbname, uid)
|
||||
[clear(model, mode) for model in models for mode in self._MODES]
|
||||
|
||||
self._compute_domain.clear_cache(self)
|
||||
|
||||
def domain_get(self, cr, uid, model_name, mode='read', context=None):
|
||||
dom = self._compute_domain(cr, uid, model_name, mode=mode)
|
||||
dom = self._compute_domain(cr, uid, model_name, mode)
|
||||
if dom:
|
||||
# _where_calc is called as superuser. This means that rules can
|
||||
# involve objects on which the real uid has no acces rights.
|
||||
|
@ -169,20 +154,17 @@ class ir_rule(osv.osv):
|
|||
|
||||
def unlink(self, cr, uid, ids, context=None):
|
||||
res = super(ir_rule, self).unlink(cr, uid, ids, context=context)
|
||||
# Restart the cache on the _compute_domain method of ir.rule
|
||||
self._compute_domain.clear_cache(cr.dbname)
|
||||
self.clear_cache(cr, uid)
|
||||
return res
|
||||
|
||||
def create(self, cr, user, vals, context=None):
|
||||
res = super(ir_rule, self).create(cr, user, vals, context=context)
|
||||
# Restart the cache on the _compute_domain method of ir.rule
|
||||
self._compute_domain.clear_cache(cr.dbname)
|
||||
def create(self, cr, uid, vals, context=None):
|
||||
res = super(ir_rule, self).create(cr, uid, vals, context=context)
|
||||
self.clear_cache(cr, uid)
|
||||
return res
|
||||
|
||||
def write(self, cr, uid, ids, vals, context=None):
|
||||
res = super(ir_rule, self).write(cr, uid, ids, vals, context=context)
|
||||
# Restart the cache on the _compute_domain method
|
||||
self._compute_domain.clear_cache(cr.dbname)
|
||||
self.clear_cache(cr,uid)
|
||||
return res
|
||||
|
||||
ir_rule()
|
||||
|
|
|
@ -87,7 +87,7 @@ class ir_translation(osv.osv):
|
|||
cr.execute('CREATE INDEX ir_translation_ltn ON ir_translation (name, lang, type)')
|
||||
cr.commit()
|
||||
|
||||
@tools.cache(skiparg=3, multi='ids')
|
||||
@tools.ormcache_multi(skiparg=3, multi=6)
|
||||
def _get_ids(self, cr, uid, name, tt, lang, ids):
|
||||
translations = dict.fromkeys(ids, False)
|
||||
if ids:
|
||||
|
@ -107,9 +107,9 @@ class ir_translation(osv.osv):
|
|||
tr = self._get_ids(cr, uid, name, tt, lang, ids)
|
||||
for res_id in tr:
|
||||
if tr[res_id]:
|
||||
self._get_source.clear_cache(cr.dbname, uid, name, tt, lang, tr[res_id])
|
||||
self._get_source.clear_cache(cr.dbname, uid, name, tt, lang)
|
||||
self._get_ids.clear_cache(cr.dbname, uid, name, tt, lang, ids)
|
||||
self._get_source.clear_cache(self, uid, name, tt, lang, tr[res_id])
|
||||
self._get_source.clear_cache(self, uid, name, tt, lang)
|
||||
self._get_ids.clear_cache(self, uid, name, tt, lang, ids)
|
||||
|
||||
cr.execute('delete from ir_translation ' \
|
||||
'where lang=%s ' \
|
||||
|
@ -128,7 +128,7 @@ class ir_translation(osv.osv):
|
|||
})
|
||||
return len(ids)
|
||||
|
||||
@tools.cache(skiparg=3)
|
||||
@tools.ormcache(skiparg=3)
|
||||
def _get_source(self, cr, uid, name, types, lang, source=None):
|
||||
"""
|
||||
Returns the translation for the given combination of name, type, language
|
||||
|
@ -173,13 +173,11 @@ class ir_translation(osv.osv):
|
|||
return tools.ustr(source)
|
||||
return trad
|
||||
|
||||
def create(self, cursor, user, vals, context=None):
|
||||
def create(self, cr, uid, vals, context=None):
|
||||
if not context:
|
||||
context = {}
|
||||
ids = super(ir_translation, self).create(cursor, user, vals, context=context)
|
||||
for trans_obj in self.read(cursor, user, [ids], ['name','type','res_id','src','lang'], context=context):
|
||||
self._get_source.clear_cache(cursor.dbname, user, trans_obj['name'], trans_obj['type'], trans_obj['lang'], source=trans_obj['src'])
|
||||
self._get_ids.clear_cache(cursor.dbname, user, trans_obj['name'], trans_obj['type'], trans_obj['lang'], [trans_obj['res_id']])
|
||||
ids = super(ir_translation, self).create(cr, uid, vals, context=context)
|
||||
self._get_source.clear_cache(self, vals.get('name',0), vals.get('type',0), vals.get('lang',0), vals.get('src',0))
|
||||
return ids
|
||||
|
||||
def write(self, cursor, user, ids, vals, context=None):
|
||||
|
@ -188,9 +186,9 @@ class ir_translation(osv.osv):
|
|||
if isinstance(ids, (int, long)):
|
||||
ids = [ids]
|
||||
result = super(ir_translation, self).write(cursor, user, ids, vals, context=context)
|
||||
self._get_source.clear_cache(self, user, trans_obj['name'], trans_obj['type'], trans_obj['lang'], source=trans_obj['src'])
|
||||
for trans_obj in self.read(cursor, user, ids, ['name','type','res_id','src','lang'], context=context):
|
||||
self._get_source.clear_cache(cursor.dbname, user, trans_obj['name'], trans_obj['type'], trans_obj['lang'], source=trans_obj['src'])
|
||||
self._get_ids.clear_cache(cursor.dbname, user, trans_obj['name'], trans_obj['type'], trans_obj['lang'], [trans_obj['res_id']])
|
||||
self._get_ids.clear_cache(self, user, trans_obj['name'], trans_obj['type'], trans_obj['lang'], [trans_obj['res_id']])
|
||||
return result
|
||||
|
||||
def unlink(self, cursor, user, ids, context=None):
|
||||
|
@ -199,8 +197,8 @@ class ir_translation(osv.osv):
|
|||
if isinstance(ids, (int, long)):
|
||||
ids = [ids]
|
||||
for trans_obj in self.read(cursor, user, ids, ['name','type','res_id','src','lang'], context=context):
|
||||
self._get_source.clear_cache(cursor.dbname, user, trans_obj['name'], trans_obj['type'], trans_obj['lang'], source=trans_obj['src'])
|
||||
self._get_ids.clear_cache(cursor.dbname, user, trans_obj['name'], trans_obj['type'], trans_obj['lang'], [trans_obj['res_id']])
|
||||
self._get_source.clear_cache(self, user, trans_obj['name'], trans_obj['type'], trans_obj['lang'], source=trans_obj['src'])
|
||||
self._get_ids.clear_cache(self, user, trans_obj['name'], trans_obj['type'], trans_obj['lang'], [trans_obj['res_id']])
|
||||
result = super(ir_translation, self).unlink(cursor, user, ids, context=context)
|
||||
return result
|
||||
|
||||
|
|
|
@ -88,7 +88,7 @@ class ir_ui_menu(osv.osv):
|
|||
|
||||
field = model_field.get(menu.action._name)
|
||||
if field and data[field]:
|
||||
if not modelaccess.check(cr, uid, data[field], raise_exception=False):
|
||||
if not modelaccess.check(cr, uid, data[field], False):
|
||||
continue
|
||||
else:
|
||||
# if there is no action, it's a 'folder' menu
|
||||
|
|
|
@ -180,7 +180,7 @@ class res_company(osv.osv):
|
|||
return rule.company_dest_id.id
|
||||
return user.company_id.id
|
||||
|
||||
@tools.cache()
|
||||
@tools.ormcache()
|
||||
def _get_company_children(self, cr, uid=None, company=None):
|
||||
if not company:
|
||||
return []
|
||||
|
@ -207,7 +207,7 @@ class res_company(osv.osv):
|
|||
# This function restart the cache on the _get_company_children method
|
||||
#
|
||||
def cache_restart(self, cr):
|
||||
self._get_company_children.clear_cache(cr.dbname)
|
||||
self._get_company_children.clear_cache(self)
|
||||
|
||||
def create(self, cr, uid, vals, context=None):
|
||||
if not vals.get('name', False) or vals.get('partner_id', False):
|
||||
|
|
|
@ -163,7 +163,7 @@ class lang(osv.osv):
|
|||
(_check_format, 'Invalid date/time format directive specified. Please refer to the list of allowed directives, displayed when you edit a language.', ['time_format', 'date_format'])
|
||||
]
|
||||
|
||||
@tools.cache(skiparg=3)
|
||||
@tools.ormcache(skiparg=3)
|
||||
def _lang_data_get(self, cr, uid, lang_id, monetary=False):
|
||||
conv = localeconv()
|
||||
lang_obj = self.browse(cr, uid, lang_id)
|
||||
|
@ -174,7 +174,7 @@ class lang(osv.osv):
|
|||
|
||||
def write(self, cr, uid, ids, vals, context=None):
|
||||
for lang_id in ids :
|
||||
self._lang_data_get.clear_cache(cr.dbname,lang_id= lang_id)
|
||||
self._lang_data_get.clear_cache(self,lang_id= lang_id)
|
||||
return super(lang, self).write(cr, uid, ids, vals, context)
|
||||
|
||||
def unlink(self, cr, uid, ids, context=None):
|
||||
|
|
|
@ -263,7 +263,7 @@ class users(osv.osv):
|
|||
o['password'] = '********'
|
||||
return o
|
||||
result = super(users, self).read(cr, uid, ids, fields, context, load)
|
||||
canwrite = self.pool.get('ir.model.access').check(cr, uid, 'res.users', 'write', raise_exception=False)
|
||||
canwrite = self.pool.get('ir.model.access').check(cr, uid, 'res.users', 'write', False)
|
||||
if not canwrite:
|
||||
if isinstance(ids, (int, float)):
|
||||
result = override_password(result)
|
||||
|
|
|
@ -77,6 +77,15 @@ class Registry(object):
|
|||
|
||||
return res
|
||||
|
||||
def clear_caches():
|
||||
""" Clear the caches
|
||||
|
||||
This clears the caches associated to methods decorated with
|
||||
``tools.ormcache`` or ``tools.ormcache_multi`` for all the models.
|
||||
"""
|
||||
for model in self.models.itervalues():
|
||||
model.clear_caches()
|
||||
|
||||
|
||||
class RegistryManager(object):
|
||||
""" Model registries manager.
|
||||
|
@ -150,4 +159,19 @@ class RegistryManager(object):
|
|||
del cls.registries[db_name]
|
||||
|
||||
|
||||
@classmethod
|
||||
def clear_caches(db_name):
|
||||
""" Clear the caches
|
||||
|
||||
This clears the caches associated to methods decorated with
|
||||
``tools.ormcache`` or ``tools.ormcache_multi`` for all the models
|
||||
of the given database name.
|
||||
|
||||
This method is given to spare you a ``RegistryManager.get(db_name)``
|
||||
that would loads the given database if it was not already loaded.
|
||||
"""
|
||||
if db_name in cls.registries:
|
||||
cls.registries[db_name].clear_caches()
|
||||
|
||||
|
||||
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
||||
|
|
|
@ -1259,7 +1259,7 @@ class orm_template(object):
|
|||
else:
|
||||
translated_msg = tmp_msg
|
||||
else:
|
||||
translated_msg = trans._get_source(cr, uid, self._name, 'constraint', lng, source=msg) or msg
|
||||
translated_msg = trans._get_source(cr, uid, self._name, 'constraint', lng, msg) or msg
|
||||
error_msgs.append(
|
||||
_("Error occurred while validating the field(s) %s: %s") % (','.join(fields), translated_msg)
|
||||
)
|
||||
|
@ -2177,7 +2177,7 @@ class orm_template(object):
|
|||
def read_string(self, cr, uid, id, langs, fields=None, context=None):
|
||||
res = {}
|
||||
res2 = {}
|
||||
self.pool.get('ir.model.access').check(cr, uid, 'ir.translation', 'read', context=context)
|
||||
self.pool.get('ir.model.access').check(cr, uid, 'ir.translation', 'read')
|
||||
if not fields:
|
||||
fields = self._columns.keys() + self._inherit_fields.keys()
|
||||
#FIXME: collect all calls to _get_source into one SQL call.
|
||||
|
@ -2201,7 +2201,7 @@ class orm_template(object):
|
|||
return res
|
||||
|
||||
def write_string(self, cr, uid, id, langs, vals, context=None):
|
||||
self.pool.get('ir.model.access').check(cr, uid, 'ir.translation', 'write', context=context)
|
||||
self.pool.get('ir.model.access').check(cr, uid, 'ir.translation', 'write')
|
||||
#FIXME: try to only call the translation in one SQL
|
||||
for lang in langs:
|
||||
for field in vals:
|
||||
|
@ -2246,6 +2246,18 @@ class orm_template(object):
|
|||
values = defaults
|
||||
return values
|
||||
|
||||
def clear_caches(self):
|
||||
""" Clear the caches
|
||||
|
||||
This clears the caches associated to methods decorated with
|
||||
``tools.ormcache`` or ``tools.ormcache_multi``.
|
||||
"""
|
||||
try:
|
||||
getattr(self, '_ormcache')
|
||||
self._ormcache = {}
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
class orm_memory(orm_template):
|
||||
|
||||
_protected = ['read', 'write', 'create', 'default_get', 'perm_read', 'unlink', 'fields_get', 'fields_view_get', 'search', 'name_get', 'distinct_field_get', 'name_search', 'copy', 'import_data', 'search_count', 'exists']
|
||||
|
@ -2511,7 +2523,7 @@ class orm(orm_template):
|
|||
|
||||
"""
|
||||
context = context or {}
|
||||
self.pool.get('ir.model.access').check(cr, uid, self._name, 'read', context=context)
|
||||
self.pool.get('ir.model.access').check(cr, uid, self._name, 'read')
|
||||
if not fields:
|
||||
fields = self._columns.keys()
|
||||
|
||||
|
@ -3393,14 +3405,14 @@ class orm(orm_template):
|
|||
|
||||
"""
|
||||
ira = self.pool.get('ir.model.access')
|
||||
write_access = ira.check(cr, user, self._name, 'write', raise_exception=False, context=context) or \
|
||||
ira.check(cr, user, self._name, 'create', raise_exception=False, context=context)
|
||||
write_access = ira.check(cr, user, self._name, 'write', False) or \
|
||||
ira.check(cr, user, self._name, 'create', False)
|
||||
return super(orm, self).fields_get(cr, user, fields, context, write_access)
|
||||
|
||||
def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'):
|
||||
if not context:
|
||||
context = {}
|
||||
self.pool.get('ir.model.access').check(cr, user, self._name, 'read', context=context)
|
||||
self.pool.get('ir.model.access').check(cr, user, self._name, 'read')
|
||||
if not fields:
|
||||
fields = list(set(self._columns.keys() + self._inherit_fields.keys()))
|
||||
if isinstance(ids, (int, long)):
|
||||
|
@ -3686,7 +3698,7 @@ class orm(orm_template):
|
|||
|
||||
self._check_concurrency(cr, ids, context)
|
||||
|
||||
self.pool.get('ir.model.access').check(cr, uid, self._name, 'unlink', context=context)
|
||||
self.pool.get('ir.model.access').check(cr, uid, self._name, 'unlink')
|
||||
|
||||
properties = self.pool.get('ir.property')
|
||||
domain = [('res_id', '=', False),
|
||||
|
@ -3823,7 +3835,7 @@ class orm(orm_template):
|
|||
ids = [ids]
|
||||
|
||||
self._check_concurrency(cr, ids, context)
|
||||
self.pool.get('ir.model.access').check(cr, user, self._name, 'write', context=context)
|
||||
self.pool.get('ir.model.access').check(cr, user, self._name, 'write')
|
||||
|
||||
result = self._store_get_values(cr, user, ids, vals.keys(), context) or []
|
||||
|
||||
|
@ -4032,7 +4044,7 @@ class orm(orm_template):
|
|||
"""
|
||||
if not context:
|
||||
context = {}
|
||||
self.pool.get('ir.model.access').check(cr, user, self._name, 'create', context=context)
|
||||
self.pool.get('ir.model.access').check(cr, user, self._name, 'create')
|
||||
|
||||
vals = self._add_missing_default_values(cr, user, vals, context)
|
||||
|
||||
|
@ -4488,7 +4500,7 @@ class orm(orm_template):
|
|||
"""
|
||||
if context is None:
|
||||
context = {}
|
||||
self.pool.get('ir.model.access').check(cr, access_rights_uid or user, self._name, 'read', context=context)
|
||||
self.pool.get('ir.model.access').check(cr, access_rights_uid or user, self._name, 'read')
|
||||
|
||||
query = self._where_calc(cr, user, args, context=context)
|
||||
self._apply_ir_rules(cr, user, query, 'read', context=context)
|
||||
|
|
|
@ -155,6 +155,7 @@ class db(netsvc.ExportService):
|
|||
|
||||
def exp_drop(self, db_name):
|
||||
sql_db.close_db(db_name)
|
||||
openerp.modules.registry.RegistryManager.clear_caches(db_name)
|
||||
openerp.netsvc.Agent.cancel(db_name)
|
||||
logger = netsvc.Logger()
|
||||
|
||||
|
@ -258,7 +259,8 @@ class db(netsvc.ExportService):
|
|||
|
||||
def exp_rename(self, old_name, new_name):
|
||||
sql_db.close_db(old_name)
|
||||
openerp.netsvc.Agent.cancel(db_name)
|
||||
openerp.modules.registry.RegistryManager.clear_caches(old_name)
|
||||
openerp.netsvc.Agent.cancel(old_name)
|
||||
logger = netsvc.Logger()
|
||||
|
||||
db = sql_db.db_connect('template1')
|
||||
|
|
|
@ -500,7 +500,6 @@ def db_connect(db_name):
|
|||
def close_db(db_name):
|
||||
""" You might want to call openerp.netsvc.Agent.cancel(db_name) along this function."""
|
||||
_Pool.close_all(dsn(db_name))
|
||||
tools.cache.clean_caches_for_db(db_name)
|
||||
ct = currentThread()
|
||||
if hasattr(ct, 'dbname'):
|
||||
delattr(ct, 'dbname')
|
||||
|
|
|
@ -0,0 +1,137 @@
|
|||
import lru
|
||||
|
||||
class ormcache(object):
|
||||
""" LRU cache decorator for orm methods,
|
||||
"""
|
||||
|
||||
def __init__(self, skiparg=2, size=8192, multi=None, timeout=None):
|
||||
self.skiparg = skiparg
|
||||
self.size = size
|
||||
self.method = None
|
||||
self.stat_miss = 0
|
||||
self.stat_hit = 0
|
||||
self.stat_err = 0
|
||||
|
||||
def __call__(self,m):
|
||||
self.method = m
|
||||
def lookup(self2, cr, *args):
|
||||
r = self.lookup(self2, cr, *args)
|
||||
return r
|
||||
lookup.clear_cache = self.clear
|
||||
return lookup
|
||||
|
||||
def stat(self):
|
||||
return "lookup-stats hit=%s miss=%s err=%s ratio=%.1f" % (self.stat_hit,self.stat_miss,self.stat_err, (100*float(self.stat_hit))/(self.stat_miss+self.stat_hit) )
|
||||
|
||||
def lru(self, self2):
|
||||
try:
|
||||
ormcache = getattr(self2, '_ormcache')
|
||||
except AttributeError:
|
||||
ormcache = self2._ormcache = {}
|
||||
try:
|
||||
d = ormcache[self.method]
|
||||
except KeyError:
|
||||
d = ormcache[self.method] = lru.LRU(self.size)
|
||||
return d
|
||||
|
||||
def lookup(self, self2, cr, *args):
|
||||
d = self.lru(self2)
|
||||
key = args[self.skiparg-2:]
|
||||
try:
|
||||
r = d[key]
|
||||
self.stat_hit += 1
|
||||
return r
|
||||
except KeyError:
|
||||
self.stat_miss += 1
|
||||
value = d[args] = self.method(self2, cr, *args)
|
||||
return value
|
||||
except TypeError:
|
||||
self.stat_err += 1
|
||||
return self.method(self2, cr, *args)
|
||||
|
||||
def clear(self, self2, *args):
|
||||
""" Remove *args entry from the cache or all keys if *args is undefined
|
||||
"""
|
||||
d = self.lru(self2)
|
||||
if args:
|
||||
try:
|
||||
key = args[self.skiparg-2:]
|
||||
del d[key]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
d.clear()
|
||||
|
||||
class ormcache_multi(ormcache):
|
||||
def __init__(self, skiparg=2, size=8192, multi=3):
|
||||
super(ormcache_multi,self).__init__(skiparg,size)
|
||||
self.multi = multi - 2
|
||||
|
||||
def lookup(self, self2, cr, *args):
|
||||
d = self.lru(self2)
|
||||
args = list(args)
|
||||
multi = self.multi
|
||||
ids = args[multi]
|
||||
r = {}
|
||||
miss = []
|
||||
|
||||
for i in ids:
|
||||
args[multi] = i
|
||||
key = tuple(args[self.skiparg-2:])
|
||||
try:
|
||||
r[i] = d[key]
|
||||
self.stat_hit += 1
|
||||
except Exception:
|
||||
self.stat_miss += 1
|
||||
miss.append(i)
|
||||
|
||||
if miss:
|
||||
args[multi] = miss
|
||||
r.update(self.method(self2, cr, *args))
|
||||
|
||||
for i in miss:
|
||||
args[multi] = i
|
||||
key = tuple(args[self.skiparg-2:])
|
||||
d[key] = r[i]
|
||||
|
||||
return r
|
||||
|
||||
class dummy_cache(object):
|
||||
""" Cache decorator replacement to actually do no caching.
|
||||
"""
|
||||
def __init__(self, *l, **kw):
|
||||
pass
|
||||
def __call__(self, fn):
|
||||
fn.clear_cache = self.clear
|
||||
return fn
|
||||
def clear(self, *l, **kw):
|
||||
pass
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
class A():
|
||||
@ormcache()
|
||||
def m(self,a,b):
|
||||
print "A::m(", self,a,b
|
||||
return 1
|
||||
|
||||
@ormcache_multi(multi=3)
|
||||
def n(self,cr,uid,ids):
|
||||
print "m", self,cr,uid,ids
|
||||
return dict([(i,i) for i in ids])
|
||||
|
||||
a=A()
|
||||
r=a.m(1,2)
|
||||
r=a.m(1,2)
|
||||
r=a.n("cr",1,[1,2,3,4])
|
||||
r=a.n("cr",1,[1,2])
|
||||
print r
|
||||
for i in a._ormcache:
|
||||
print a._ormcache[i].d
|
||||
a.n.clear_cache(a,1,1)
|
||||
r=a.n("cr",1,[1,2])
|
||||
print r
|
||||
r=a.n("cr",1,[1,2])
|
||||
|
||||
# For backward compatibility
|
||||
cache = ormcache
|
|
@ -12,7 +12,7 @@ class LRUNode(object):
|
|||
self.me = me
|
||||
self.next = None
|
||||
|
||||
class LRU:
|
||||
class LRU(object):
|
||||
"""
|
||||
Implementation of a length-limited O(1) LRU queue.
|
||||
Built for and used by PyPE:
|
||||
|
@ -113,3 +113,8 @@ class LRU:
|
|||
del self[key]
|
||||
return v
|
||||
|
||||
@synchronized()
|
||||
def clear(self):
|
||||
self.d = {}
|
||||
self.first = None
|
||||
self.last = None
|
||||
|
|
|
@ -60,7 +60,7 @@ except ImportError:
|
|||
|
||||
import openerp.loglevels as loglevels
|
||||
from config import config
|
||||
from lru import LRU
|
||||
from cache import *
|
||||
|
||||
# get_encodings, ustr and exception_to_unicode were originally from tools.misc.
|
||||
# There are moved to loglevels until we refactor tools.
|
||||
|
@ -628,7 +628,6 @@ class UpdateableDict(local):
|
|||
def __ne__(self, y):
|
||||
return self.dict.__ne__(y)
|
||||
|
||||
|
||||
class currency(float):
|
||||
""" Deprecate
|
||||
|
||||
|
@ -650,163 +649,6 @@ class currency(float):
|
|||
# display_value = int(self*(10**(-self.accuracy))/self.rounding)*self.rounding/(10**(-self.accuracy))
|
||||
# return str(display_value)
|
||||
|
||||
|
||||
def is_hashable(h):
|
||||
try:
|
||||
hash(h)
|
||||
return True
|
||||
except TypeError:
|
||||
return False
|
||||
|
||||
class dummy_cache(object):
|
||||
""" Cache decorator replacement to actually do no caching.
|
||||
|
||||
This can be useful to benchmark and/or track memory leak.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, timeout=None, skiparg=2, multi=None, size=8192):
|
||||
pass
|
||||
|
||||
def clear(self, dbname, *args, **kwargs):
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def clean_caches_for_db(cls, dbname):
|
||||
pass
|
||||
|
||||
def __call__(self, fn):
|
||||
fn.clear_cache = self.clear
|
||||
return fn
|
||||
|
||||
class real_cache(object):
|
||||
"""
|
||||
Use it as a decorator of the function you plan to cache
|
||||
Timeout: 0 = no timeout, otherwise in seconds
|
||||
"""
|
||||
|
||||
__caches = []
|
||||
|
||||
def __init__(self, timeout=None, skiparg=2, multi=None, size=8192):
|
||||
assert skiparg >= 2 # at least self and cr
|
||||
if timeout is None:
|
||||
self.timeout = config['cache_timeout']
|
||||
else:
|
||||
self.timeout = timeout
|
||||
self.skiparg = skiparg
|
||||
self.multi = multi
|
||||
self.lasttime = time.time()
|
||||
self.cache = LRU(size) # TODO take size from config
|
||||
self.fun = None
|
||||
cache.__caches.append(self)
|
||||
|
||||
|
||||
def _generate_keys(self, dbname, kwargs2):
|
||||
"""
|
||||
Generate keys depending of the arguments and the self.mutli value
|
||||
"""
|
||||
|
||||
def to_tuple(d):
|
||||
pairs = d.items()
|
||||
pairs.sort(key=lambda (k,v): k)
|
||||
for i, (k, v) in enumerate(pairs):
|
||||
if isinstance(v, dict):
|
||||
pairs[i] = (k, to_tuple(v))
|
||||
if isinstance(v, (list, set)):
|
||||
pairs[i] = (k, tuple(v))
|
||||
elif not is_hashable(v):
|
||||
pairs[i] = (k, repr(v))
|
||||
return tuple(pairs)
|
||||
|
||||
if not self.multi:
|
||||
key = (('dbname', dbname),) + to_tuple(kwargs2)
|
||||
yield key, None
|
||||
else:
|
||||
multis = kwargs2[self.multi][:]
|
||||
for id in multis:
|
||||
kwargs2[self.multi] = (id,)
|
||||
key = (('dbname', dbname),) + to_tuple(kwargs2)
|
||||
yield key, id
|
||||
|
||||
def _unify_args(self, *args, **kwargs):
|
||||
# Update named arguments with positional argument values (without self and cr)
|
||||
kwargs2 = self.fun_default_values.copy()
|
||||
kwargs2.update(kwargs)
|
||||
kwargs2.update(dict(zip(self.fun_arg_names, args[self.skiparg-2:])))
|
||||
return kwargs2
|
||||
|
||||
def clear(self, dbname, *args, **kwargs):
|
||||
"""clear the cache for database dbname
|
||||
if *args and **kwargs are both empty, clear all the keys related to this database
|
||||
"""
|
||||
if not args and not kwargs:
|
||||
keys_to_del = [key for key in self.cache.keys() if key[0][1] == dbname]
|
||||
else:
|
||||
kwargs2 = self._unify_args(*args, **kwargs)
|
||||
keys_to_del = [key for key, _ in self._generate_keys(dbname, kwargs2) if key in self.cache.keys()]
|
||||
|
||||
for key in keys_to_del:
|
||||
self.cache.pop(key)
|
||||
|
||||
@classmethod
|
||||
def clean_caches_for_db(cls, dbname):
|
||||
for c in cls.__caches:
|
||||
c.clear(dbname)
|
||||
|
||||
def __call__(self, fn):
|
||||
if self.fun is not None:
|
||||
raise Exception("Can not use a cache instance on more than one function")
|
||||
self.fun = fn
|
||||
|
||||
argspec = inspect.getargspec(fn)
|
||||
self.fun_arg_names = argspec[0][self.skiparg:]
|
||||
self.fun_default_values = {}
|
||||
if argspec[3]:
|
||||
self.fun_default_values = dict(zip(self.fun_arg_names[-len(argspec[3]):], argspec[3]))
|
||||
|
||||
def cached_result(self2, cr, *args, **kwargs):
|
||||
if time.time()-int(self.timeout) > self.lasttime:
|
||||
self.lasttime = time.time()
|
||||
t = time.time()-int(self.timeout)
|
||||
old_keys = [key for key in self.cache.keys() if self.cache[key][1] < t]
|
||||
for key in old_keys:
|
||||
self.cache.pop(key)
|
||||
|
||||
kwargs2 = self._unify_args(*args, **kwargs)
|
||||
|
||||
result = {}
|
||||
notincache = {}
|
||||
for key, id in self._generate_keys(cr.dbname, kwargs2):
|
||||
if key in self.cache:
|
||||
result[id] = self.cache[key][0]
|
||||
else:
|
||||
notincache[id] = key
|
||||
|
||||
if notincache:
|
||||
if self.multi:
|
||||
kwargs2[self.multi] = notincache.keys()
|
||||
|
||||
result2 = fn(self2, cr, *args[:self.skiparg-2], **kwargs2)
|
||||
if not self.multi:
|
||||
key = notincache[None]
|
||||
self.cache[key] = (result2, time.time())
|
||||
result[None] = result2
|
||||
else:
|
||||
for id in result2:
|
||||
key = notincache[id]
|
||||
self.cache[key] = (result2[id], time.time())
|
||||
result.update(result2)
|
||||
|
||||
if not self.multi:
|
||||
return result[None]
|
||||
return result
|
||||
|
||||
cached_result.clear_cache = self.clear
|
||||
return cached_result
|
||||
|
||||
# TODO make it an option
|
||||
cache = real_cache
|
||||
|
||||
def to_xml(s):
|
||||
return s.replace('&','&').replace('<','<').replace('>','>')
|
||||
|
||||
|
|
Loading…
Reference in New Issue