Merge pull request #5676 from odoo-dev/8.0-memory_optimization-rco

Reduce memory footprint of server

The tricks used are:
- use a global LRU for `ormcache` (auto-balancing between registries);
- remove unnecessary data structures (binary class hierarchy of models);
- compute some data structures on demand (`_all_columns`);
- optimize field attributes (empty collections may be shared);
- optimize memory size of fields and columns by using slots.

On a database with modules sale, purchase and stock installed, the memory footprint of the registry went from 20.3Mb to 7.4Mb (as measured with heapy). In other words, the memory footprint was reduced to 1/3 !
This commit is contained in:
Raphael Collet 2015-03-23 14:39:21 +01:00
commit 0ed63d73a6
11 changed files with 569 additions and 379 deletions

View File

@ -48,11 +48,6 @@ class decimal_precision(orm.Model):
def clear_cache(self, cr): def clear_cache(self, cr):
"""clear cache and update models. Notify other workers to restart their registry.""" """clear cache and update models. Notify other workers to restart their registry."""
self.precision_get.clear_cache(self) self.precision_get.clear_cache(self)
env = openerp.api.Environment(cr, SUPERUSER_ID, {})
for model in self.pool.values():
for field in model._fields.values():
if field.type == 'float':
field._setup_digits(env)
RegistryManager.signal_registry_change(cr.dbname) RegistryManager.signal_registry_change(cr.dbname)
def create(self, cr, uid, data, context=None): def create(self, cr, uid, data, context=None):

View File

@ -56,11 +56,15 @@ del time
# The hard-coded super-user id (a.k.a. administrator, or root user). # The hard-coded super-user id (a.k.a. administrator, or root user).
SUPERUSER_ID = 1 SUPERUSER_ID = 1
def registry(database_name): def registry(database_name=None):
""" """
Return the model registry for the given database. If the registry does not Return the model registry for the given database, or the database mentioned
exist yet, it is created on the fly. on the current thread. If the registry does not exist yet, it is created on
the fly.
""" """
if database_name is None:
import threading
database_name = threading.currentThread().dbname
return modules.registry.RegistryManager.get(database_name) return modules.registry.RegistryManager.get(database_name)
#---------------------------------------------------------- #----------------------------------------------------------

View File

@ -31,12 +31,10 @@ class TestAPI(common.TransactionCase):
self.assertTrue(ids) self.assertTrue(ids)
self.assertTrue(partners) self.assertTrue(partners)
# partners and its contents are instance of the model, and share its ormcache # partners and its contents are instance of the model
self.assertIsRecordset(partners, 'res.partner') self.assertIsRecordset(partners, 'res.partner')
self.assertIs(partners._ormcache, self.env['res.partner']._ormcache)
for p in partners: for p in partners:
self.assertIsRecord(p, 'res.partner') self.assertIsRecord(p, 'res.partner')
self.assertIs(p._ormcache, self.env['res.partner']._ormcache)
self.assertEqual([p.id for p in partners], ids) self.assertEqual([p.id for p in partners], ids)
self.assertEqual(self.env['res.partner'].browse(ids), partners) self.assertEqual(self.env['res.partner'].browse(ids), partners)

View File

@ -111,9 +111,6 @@ class TestPropertyField(common.TransactionCase):
self.partner._columns.update({ self.partner._columns.update({
'property_country': fields.property(type='many2one', relation="res.country", string="Country by company"), 'property_country': fields.property(type='many2one', relation="res.country", string="Country by company"),
}) })
self.partner._all_columns.update({
'property_country': fields.column_info('property_country', self.partner._columns['property_country'], None, None, None),
})
self.partner._field_create(cr) self.partner._field_create(cr)
partner_id = self.partner.create(cr, alice, { partner_id = self.partner.create(cr, alice, {

View File

@ -30,12 +30,13 @@ import logging
import pytz import pytz
import xmlrpclib import xmlrpclib
from openerp.tools import float_round, ustr, html_sanitize from openerp.tools import float_round, frozendict, html_sanitize, ustr
from openerp.tools import DEFAULT_SERVER_DATE_FORMAT as DATE_FORMAT from openerp.tools import DEFAULT_SERVER_DATE_FORMAT as DATE_FORMAT
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT as DATETIME_FORMAT from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT as DATETIME_FORMAT
DATE_LENGTH = len(date.today().strftime(DATE_FORMAT)) DATE_LENGTH = len(date.today().strftime(DATE_FORMAT))
DATETIME_LENGTH = len(datetime.now().strftime(DATETIME_FORMAT)) DATETIME_LENGTH = len(datetime.now().strftime(DATETIME_FORMAT))
EMPTY_DICT = frozendict()
_logger = logging.getLogger(__name__) _logger = logging.getLogger(__name__)
@ -72,10 +73,25 @@ class MetaField(type):
""" Metaclass for field classes. """ """ Metaclass for field classes. """
by_type = {} by_type = {}
def __new__(meta, name, bases, attrs):
""" Combine the ``_slots`` dict from parent classes, and determine
`__slots__` for them on the new class.
"""
base_slots = {}
for base in reversed(bases):
base_slots.update(getattr(base, '_slots', ()))
slots = dict(base_slots)
slots.update(attrs.get('_slots', ()))
attrs['__slots__'] = set(slots) - set(base_slots)
attrs['_slots'] = slots
return type.__new__(meta, name, bases, attrs)
def __init__(cls, name, bases, attrs): def __init__(cls, name, bases, attrs):
super(MetaField, cls).__init__(name, bases, attrs) super(MetaField, cls).__init__(name, bases, attrs)
if cls.type: if cls.type and cls.type not in MetaField.by_type:
cls.by_type[cls.type] = cls MetaField.by_type[cls.type] = cls
# compute class attributes to avoid calling dir() on fields # compute class attributes to avoid calling dir() on fields
cls.column_attrs = [] cls.column_attrs = []
@ -253,56 +269,81 @@ class Field(object):
""" """
__metaclass__ = MetaField __metaclass__ = MetaField
_attrs = None # dictionary with all field attributes type = None # type of the field (string)
_free_attrs = None # list of semantic-free attribute names relational = False # whether the field is a relational one
automatic = False # whether the field is automatically created ("magic" field) _slots = {
inherited = False # whether the field is inherited (_inherits) '_attrs': EMPTY_DICT, # dictionary of field attributes; it contains:
column = None # the column corresponding to the field # - all attributes after __init__()
setup_done = False # whether the field has been set up # - free attributes only after set_class_name()
name = None # name of the field 'automatic': False, # whether the field is automatically created ("magic" field)
type = None # type of the field (string) 'inherited': False, # whether the field is inherited (_inherits)
relational = False # whether the field is a relational one 'column': None, # the column corresponding to the field
model_name = None # name of the model of this field 'setup_done': False, # whether the field has been set up
comodel_name = None # name of the model of values (if relational)
inverse_fields = None # list of inverse fields (objects)
store = True # whether the field is stored in database 'name': None, # name of the field
index = False # whether the field is indexed in database 'model_name': None, # name of the model of this field
manual = False # whether the field is a custom field 'comodel_name': None, # name of the model of values (if relational)
copy = True # whether the field is copied over by BaseModel.copy()
depends = () # collection of field dependencies
recursive = False # whether self depends on itself
compute = None # compute(recs) computes field on recs
compute_sudo = False # whether field should be recomputed as admin
inverse = None # inverse(recs) inverses field on recs
search = None # search(recs, operator, value) searches on self
related = None # sequence of field names, for related fields
related_sudo = True # whether related fields should be read as admin
company_dependent = False # whether `self` is company-dependent (property field)
default = None # default(recs) returns the default value
string = None # field label 'store': True, # whether the field is stored in database
help = None # field tooltip 'index': False, # whether the field is indexed in database
readonly = False 'manual': False, # whether the field is a custom field
required = False 'copy': True, # whether the field is copied over by BaseModel.copy()
states = None 'depends': (), # collection of field dependencies
groups = False # csv list of group xml ids 'recursive': False, # whether self depends on itself
change_default = None # whether the field may trigger a "user-onchange" 'compute': None, # compute(recs) computes field on recs
deprecated = None # whether the field is ... deprecated 'compute_sudo': False, # whether field should be recomputed as admin
'inverse': None, # inverse(recs) inverses field on recs
'search': None, # search(recs, operator, value) searches on self
'related': None, # sequence of field names, for related fields
'related_sudo': True, # whether related fields should be read as admin
'company_dependent': False, # whether `self` is company-dependent (property field)
'default': None, # default(recs) returns the default value
'string': None, # field label
'help': None, # field tooltip
'readonly': False, # whether the field is readonly
'required': False, # whether the field is required
'states': None, # set readonly and required depending on state
'groups': None, # csv list of group xml ids
'change_default': False, # whether the field may trigger a "user-onchange"
'deprecated': None, # whether the field is deprecated
'inverse_fields': (), # collection of inverse fields (objects)
'computed_fields': (), # fields computed with the same method as self
'related_field': None, # corresponding related field
'_triggers': (), # invalidation and recomputation triggers
}
def __init__(self, string=None, **kwargs): def __init__(self, string=None, **kwargs):
kwargs['string'] = string kwargs['string'] = string
self._attrs = {key: val for key, val in kwargs.iteritems() if val is not None} attrs = {key: val for key, val in kwargs.iteritems() if val is not None}
self._free_attrs = [] self._attrs = attrs or EMPTY_DICT
# self._triggers is a set of pairs (field, path) that represents the def __getattr__(self, name):
# computed fields that depend on `self`. When `self` is modified, it """ Access non-slot field attribute. """
# invalidates the cache of each `field`, and registers the records to try:
# recompute based on `path`. See method `modified` below for details. return self._attrs[name]
self._triggers = set() except KeyError:
self.inverse_fields = [] raise AttributeError(name)
def __setattr__(self, name, value):
""" Set slot or non-slot field attribute. """
try:
object.__setattr__(self, name, value)
except AttributeError:
if self._attrs:
self._attrs[name] = value
else:
self._attrs = {name: value} # replace EMPTY_DICT
def __delattr__(self, name):
""" Remove non-slot field attribute. """
try:
del self._attrs[name]
except KeyError:
raise AttributeError(name)
def new(self, **kwargs): def new(self, **kwargs):
""" Return a field of the same type as `self`, with its own parameters. """ """ Return a field of the same type as `self`, with its own parameters. """
@ -310,6 +351,10 @@ class Field(object):
def set_class_name(self, cls, name): def set_class_name(self, cls, name):
""" Assign the model class and field name of `self`. """ """ Assign the model class and field name of `self`. """
self_attrs = self._attrs
for attr, value in self._slots.iteritems():
setattr(self, attr, value)
self.model_name = cls._name self.model_name = cls._name
self.name = name self.name = name
@ -320,7 +365,7 @@ class Field(object):
attrs.update(field._attrs) attrs.update(field._attrs)
else: else:
attrs.clear() attrs.clear()
attrs.update(self._attrs) # necessary in case self is not in cls attrs.update(self_attrs) # necessary in case self is not in cls
# initialize `self` with `attrs` # initialize `self` with `attrs`
if attrs.get('compute'): if attrs.get('compute'):
@ -338,8 +383,6 @@ class Field(object):
attrs.pop('store', None) attrs.pop('store', None)
for attr, value in attrs.iteritems(): for attr, value in attrs.iteritems():
if not hasattr(self, attr):
self._free_attrs.append(attr)
setattr(self, attr, value) setattr(self, attr, value)
if not self.string and not self.related: if not self.string and not self.related:
@ -478,10 +521,9 @@ class Field(object):
if not getattr(self, attr): if not getattr(self, attr):
setattr(self, attr, getattr(field, prop)) setattr(self, attr, getattr(field, prop))
for attr in field._free_attrs: for attr, value in field._attrs.iteritems():
if attr not in self._free_attrs: if attr not in self._attrs:
self._free_attrs.append(attr) setattr(self, attr, value)
setattr(self, attr, getattr(field, attr))
# special case for states: copy it only for inherited fields # special case for states: copy it only for inherited fields
if not self.states and self.inherited: if not self.states and self.inherited:
@ -533,6 +575,16 @@ class Field(object):
# #
# Setup of field triggers # Setup of field triggers
# #
# The triggers is a collection of pairs (field, path) of computed fields
# that depend on `self`. When `self` is modified, it invalidates the cache
# of each `field`, and registers the records to recompute based on `path`.
# See method `modified` below for details.
#
def add_trigger(self, trigger):
""" Add a recomputation trigger on `self`. """
if trigger not in self._triggers:
self._triggers += (trigger,)
def setup_triggers(self, env): def setup_triggers(self, env):
""" Add the necessary triggers to invalidate/recompute `self`. """ """ Add the necessary triggers to invalidate/recompute `self`. """
@ -561,12 +613,12 @@ class Field(object):
continue continue
#_logger.debug("Add trigger on %s to recompute %s", field, self) #_logger.debug("Add trigger on %s to recompute %s", field, self)
field._triggers.add((self, '.'.join(path0 or ['id']))) field.add_trigger((self, '.'.join(path0 or ['id'])))
# add trigger on inverse fields, too # add trigger on inverse fields, too
for invf in field.inverse_fields: for invf in field.inverse_fields:
#_logger.debug("Add trigger on %s to recompute %s", invf, self) #_logger.debug("Add trigger on %s to recompute %s", invf, self)
invf._triggers.add((self, '.'.join(path0 + [head]))) invf.add_trigger((self, '.'.join(path0 + [head])))
# recursively traverse the dependency # recursively traverse the dependency
if tail: if tail:
@ -648,8 +700,8 @@ class Field(object):
args = {} args = {}
for attr, prop in self.column_attrs: for attr, prop in self.column_attrs:
args[attr] = getattr(self, prop) args[attr] = getattr(self, prop)
for attr in self._free_attrs: for attr, value in self._attrs.iteritems():
args[attr] = getattr(self, attr) args[attr] = value
if self.company_dependent: if self.company_dependent:
# company-dependent fields are mapped to former property fields # company-dependent fields are mapped to former property fields
@ -959,10 +1011,11 @@ class Boolean(Field):
class Integer(Field): class Integer(Field):
type = 'integer' type = 'integer'
group_operator = None # operator for aggregating values _slots = {
'group_operator': None, # operator for aggregating values
}
_related_group_operator = property(attrgetter('group_operator')) _related_group_operator = property(attrgetter('group_operator'))
_column_group_operator = property(attrgetter('group_operator')) _column_group_operator = property(attrgetter('group_operator'))
def convert_to_cache(self, value, record, validate=True): def convert_to_cache(self, value, record, validate=True):
@ -990,27 +1043,31 @@ class Float(Field):
cursor and returning a pair (total, decimal) cursor and returning a pair (total, decimal)
""" """
type = 'float' type = 'float'
_digits = None # digits argument passed to class initializer _slots = {
digits = None # digits as computed by setup() '_digits': None, # digits argument passed to class initializer
group_operator = None # operator for aggregating values 'group_operator': None, # operator for aggregating values
}
def __init__(self, string=None, digits=None, **kwargs): def __init__(self, string=None, digits=None, **kwargs):
super(Float, self).__init__(string=string, _digits=digits, **kwargs) super(Float, self).__init__(string=string, _digits=digits, **kwargs)
@property
def digits(self):
if callable(self._digits):
with registry().cursor() as cr:
return self._digits(cr)
else:
return self._digits
def _setup_digits(self, env): def _setup_digits(self, env):
""" Setup the digits for `self` and its corresponding column """ """ Setup the digits for `self` and its corresponding column """
self.digits = self._digits(env.cr) if callable(self._digits) else self._digits pass
if self.digits:
assert isinstance(self.digits, (tuple, list)) and len(self.digits) >= 2, \
"Float field %s with digits %r, expecting (total, decimal)" % (self, self.digits)
if self.column:
self.column.digits_change(env.cr)
def _setup_regular(self, env): def _setup_regular(self, env):
super(Float, self)._setup_regular(env) super(Float, self)._setup_regular(env)
self._setup_digits(env) self._setup_digits(env)
_related_digits = property(attrgetter('digits')) _related__digits = property(attrgetter('_digits'))
_related_group_operator = property(attrgetter('group_operator')) _related_group_operator = property(attrgetter('group_operator'))
_description_digits = property(attrgetter('digits')) _description_digits = property(attrgetter('digits'))
@ -1021,15 +1078,16 @@ class Float(Field):
def convert_to_cache(self, value, record, validate=True): def convert_to_cache(self, value, record, validate=True):
# apply rounding here, otherwise value in cache may be wrong! # apply rounding here, otherwise value in cache may be wrong!
if self.digits: value = float(value or 0.0)
return float_round(float(value or 0.0), precision_digits=self.digits[1]) digits = self.digits
else: return float_round(value, precision_digits=digits[1]) if digits else value
return float(value or 0.0)
class _String(Field): class _String(Field):
""" Abstract class for string fields. """ """ Abstract class for string fields. """
translate = False _slots = {
'translate': False, # whether the field is translated
}
_column_translate = property(attrgetter('translate')) _column_translate = property(attrgetter('translate'))
_related_translate = property(attrgetter('translate')) _related_translate = property(attrgetter('translate'))
@ -1044,17 +1102,19 @@ class Char(_String):
:param bool translate: whether the values of this field can be translated :param bool translate: whether the values of this field can be translated
""" """
type = 'char' type = 'char'
size = None _slots = {
'size': None, # maximum size of values (deprecated)
}
_column_size = property(attrgetter('size'))
_related_size = property(attrgetter('size'))
_description_size = property(attrgetter('size'))
def _setup_regular(self, env): def _setup_regular(self, env):
super(Char, self)._setup_regular(env) super(Char, self)._setup_regular(env)
assert isinstance(self.size, (NoneType, int)), \ assert isinstance(self.size, (NoneType, int)), \
"Char field %s with non-integer size %r" % (self, self.size) "Char field %s with non-integer size %r" % (self, self.size)
_column_size = property(attrgetter('size'))
_related_size = property(attrgetter('size'))
_description_size = property(attrgetter('size'))
def convert_to_cache(self, value, record, validate=True): def convert_to_cache(self, value, record, validate=True):
if value is None or value is False: if value is None or value is False:
return False return False
@ -1075,8 +1135,10 @@ class Text(_String):
class Html(_String): class Html(_String):
type = 'html' type = 'html'
sanitize = True # whether value must be sanitized _slots = {
strip_style = False # whether to strip style attributes 'sanitize': True, # whether value must be sanitized
'strip_style': False, # whether to strip style attributes
}
_column_sanitize = property(attrgetter('sanitize')) _column_sanitize = property(attrgetter('sanitize'))
_related_sanitize = property(attrgetter('sanitize')) _related_sanitize = property(attrgetter('sanitize'))
@ -1245,8 +1307,9 @@ class Selection(Field):
<field-incremental-definition>`. <field-incremental-definition>`.
""" """
type = 'selection' type = 'selection'
selection = None # [(value, string), ...], function or method name _slots = {
selection_add = None # [(value, string), ...] 'selection': None, # [(value, string), ...], function or method name
}
def __init__(self, selection=None, string=None, **kwargs): def __init__(self, selection=None, string=None, **kwargs):
if callable(selection): if callable(selection):
@ -1337,20 +1400,18 @@ class Selection(Field):
class Reference(Selection): class Reference(Selection):
type = 'reference' type = 'reference'
size = None _slots = {
'size': None, # maximum size of values (deprecated)
}
def __init__(self, selection=None, string=None, **kwargs): _related_size = property(attrgetter('size'))
super(Reference, self).__init__(selection=selection, string=string, **kwargs) _column_size = property(attrgetter('size'))
def _setup_regular(self, env): def _setup_regular(self, env):
super(Reference, self)._setup_regular(env) super(Reference, self)._setup_regular(env)
assert isinstance(self.size, (NoneType, int)), \ assert isinstance(self.size, (NoneType, int)), \
"Reference field %s with non-integer size %r" % (self, self.size) "Reference field %s with non-integer size %r" % (self, self.size)
_related_size = property(attrgetter('size'))
_column_size = property(attrgetter('size'))
def convert_to_cache(self, value, record, validate=True): def convert_to_cache(self, value, record, validate=True):
if isinstance(value, BaseModel): if isinstance(value, BaseModel):
if ((not validate or value._name in self.get_values(record.env)) if ((not validate or value._name in self.get_values(record.env))
@ -1376,8 +1437,10 @@ class Reference(Selection):
class _Relational(Field): class _Relational(Field):
""" Abstract class for relational fields. """ """ Abstract class for relational fields. """
relational = True relational = True
domain = None # domain for searching values _slots = {
context = None # context for searching values 'domain': [], # domain for searching values
'context': {}, # context for searching values
}
def _setup_regular(self, env): def _setup_regular(self, env):
super(_Relational, self)._setup_regular(env) super(_Relational, self)._setup_regular(env)
@ -1445,9 +1508,11 @@ class Many2one(_Relational):
fields or field extensions. fields or field extensions.
""" """
type = 'many2one' type = 'many2one'
ondelete = 'set null' # what to do when value is deleted _slots = {
auto_join = False # whether joins are generated upon search 'ondelete': 'set null', # what to do when value is deleted
delegate = False # whether self implements delegation 'auto_join': False, # whether joins are generated upon search
'delegate': False, # whether self implements delegation
}
def __init__(self, comodel_name=None, string=None, **kwargs): def __init__(self, comodel_name=None, string=None, **kwargs):
super(Many2one, self).__init__(comodel_name=comodel_name, string=string, **kwargs) super(Many2one, self).__init__(comodel_name=comodel_name, string=string, **kwargs)
@ -1650,10 +1715,12 @@ class One2many(_RelationalMulti):
the case of related fields or field extensions. the case of related fields or field extensions.
""" """
type = 'one2many' type = 'one2many'
inverse_name = None # name of the inverse field _slots = {
auto_join = False # whether joins are generated upon search 'inverse_name': None, # name of the inverse field
limit = None # optional limit to use upon read 'auto_join': False, # whether joins are generated upon search
copy = False # o2m are not copied by default 'limit': None, # optional limit to use upon read
'copy': False, # o2m are not copied by default
}
def __init__(self, comodel_name=None, inverse_name=None, string=None, **kwargs): def __init__(self, comodel_name=None, inverse_name=None, string=None, **kwargs):
super(One2many, self).__init__( super(One2many, self).__init__(
@ -1674,8 +1741,8 @@ class One2many(_RelationalMulti):
# (res_model/res_id pattern). Only inverse the field if this is # (res_model/res_id pattern). Only inverse the field if this is
# a `Many2one` field. # a `Many2one` field.
if isinstance(invf, Many2one): if isinstance(invf, Many2one):
self.inverse_fields.append(invf) self.inverse_fields += (invf,)
invf.inverse_fields.append(self) invf.inverse_fields += (self,)
_description_relation_field = property(attrgetter('inverse_name')) _description_relation_field = property(attrgetter('inverse_name'))
@ -1715,10 +1782,12 @@ class Many2many(_RelationalMulti):
""" """
type = 'many2many' type = 'many2many'
relation = None # name of table _slots = {
column1 = None # column of table referring to model 'relation': None, # name of table
column2 = None # column of table referring to comodel 'column1': None, # column of table referring to model
limit = None # optional limit to use upon read 'column2': None, # column of table referring to comodel
'limit': None, # optional limit to use upon read
}
def __init__(self, comodel_name=None, relation=None, column1=None, column2=None, def __init__(self, comodel_name=None, relation=None, column1=None, column2=None,
string=None, **kwargs): string=None, **kwargs):
@ -1746,8 +1815,8 @@ class Many2many(_RelationalMulti):
# if inverse field has already been setup, it is present in m2m # if inverse field has already been setup, it is present in m2m
invf = m2m.get((self.relation, self.column2, self.column1)) invf = m2m.get((self.relation, self.column2, self.column1))
if invf: if invf:
self.inverse_fields.append(invf) self.inverse_fields += (invf,)
invf.inverse_fields.append(self) invf.inverse_fields += (self,)
else: else:
# add self in m2m, so that its inverse field can find it # add self in m2m, so that its inverse field can find it
m2m[(self.relation, self.column1, self.column2)] = self m2m[(self.relation, self.column1, self.column2)] = self
@ -1768,15 +1837,15 @@ class Serialized(Field):
class Id(Field): class Id(Field):
""" Special case for field 'id'. """ """ Special case for field 'id'. """
store = True type = 'integer'
#: Can't write this! _slots = {
readonly = True 'string': 'ID',
'store': True,
def __init__(self, string=None, **kwargs): 'readonly': True,
super(Id, self).__init__(type='integer', string=string, **kwargs) }
def to_column(self): def to_column(self):
self.column = fields.integer('ID') self.column = fields.integer(self.string)
return self.column return self.column
def __get__(self, record, owner): def __get__(self, record, owner):
@ -1790,7 +1859,7 @@ class Id(Field):
raise TypeError("field 'id' cannot be assigned") raise TypeError("field 'id' cannot be assigned")
# imported here to avoid dependency cycle issues # imported here to avoid dependency cycle issues
from openerp import SUPERUSER_ID from openerp import SUPERUSER_ID, registry
from .exceptions import Warning, AccessError, MissingError from .exceptions import Warning, AccessError, MissingError
from .models import BaseModel, MAGIC_COLUMNS from .models import BaseModel, MAGIC_COLUMNS
from .osv import fields from .osv import fields

View File

@ -336,13 +336,6 @@ class BaseModel(object):
# field_column_obj, origina_parent_model), ... } # field_column_obj, origina_parent_model), ... }
_inherit_fields = {} _inherit_fields = {}
# Mapping field name/column_info object
# This is similar to _inherit_fields but:
# 1. includes self fields,
# 2. uses column_info instead of a triple.
# Warning: _all_columns is deprecated, use _fields instead
_all_columns = {}
_table = None _table = None
_log_create = False _log_create = False
_sql_constraints = [] _sql_constraints = []
@ -491,7 +484,6 @@ class BaseModel(object):
""" """
field = cls._fields.pop(name) field = cls._fields.pop(name)
cls._columns.pop(name, None) cls._columns.pop(name, None)
cls._all_columns.pop(name, None)
if hasattr(cls, name): if hasattr(cls, name):
delattr(cls, name) delattr(cls, name)
return field return field
@ -570,11 +562,22 @@ class BaseModel(object):
""" """
# IMPORTANT: the registry contains an instance for each model. The class # The model's class inherits from cls and the classes of the inherited
# of each model carries inferred metadata that is shared among the # models. All those classes are combined in a flat hierarchy:
# model's instances for this registry, but not among registries. Hence #
# we cannot use that "registry class" for combining model classes by # Model the base class of all models
# inheritance, since it confuses the metadata inference process. # / | \
# cls c2 c1 the classes defined in modules
# \ | /
# ModelClass the final class of the model
# / | \
# model recordset ... the class' instances
#
# The registry contains the instance `model`. Its class, `ModelClass`,
# carries inferred metadata that is shared between all the model's
# instances for this registry only. When we '_inherit' from another
# model, we do not inherit its `ModelClass`, but this class' parents.
# This is a limitation of the inheritance mechanism.
# Keep links to non-inherited constraints in cls; this is useful for # Keep links to non-inherited constraints in cls; this is useful for
# instance when exporting translations # instance when exporting translations
@ -591,65 +594,54 @@ class BaseModel(object):
# determine the module that introduced the model # determine the module that introduced the model
original_module = pool[name]._original_module if name in parents else cls._module original_module = pool[name]._original_module if name in parents else cls._module
# build the class hierarchy for the model # determine all the classes the model should inherit from
bases = [cls]
hierarchy = cls
for parent in parents: for parent in parents:
if parent not in pool: if parent not in pool:
raise TypeError('The model "%s" specifies an unexisting parent class "%s"\n' raise TypeError('The model "%s" specifies an unexisting parent class "%s"\n'
'You may need to add a dependency on the parent class\' module.' % (name, parent)) 'You may need to add a dependency on the parent class\' module.' % (name, parent))
parent_model = pool[parent] parent_class = type(pool[parent])
bases += parent_class.__bases__
hierarchy = type(name, (hierarchy, parent_class), {'_register': False})
# do no use the class of parent_model, since that class contains # order bases following the mro of class hierarchy
# inferred metadata; use its ancestor instead bases = [base for base in hierarchy.mro() if base in bases]
parent_class = type(parent_model).__base__
inherits = dict(parent_class._inherits) # determine the attributes of the model's class
inherits.update(cls._inherits) inherits = {}
depends = {}
constraints = {}
sql_constraints = []
depends = dict(parent_class._depends) for base in reversed(bases):
for m, fs in cls._depends.iteritems(): inherits.update(base._inherits)
depends[m] = depends.get(m, []) + fs
old_constraints = parent_class._constraints for mname, fnames in base._depends.iteritems():
new_constraints = cls._constraints depends[mname] = depends.get(mname, []) + fnames
# filter out from old_constraints the ones overridden by a
# constraint with the same function name in new_constraints
constraints = new_constraints + [oldc
for oldc in old_constraints
if not any(newc[2] == oldc[2] and same_name(newc[0], oldc[0])
for newc in new_constraints)
]
sql_constraints = cls._sql_constraints + \ for cons in base._constraints:
parent_class._sql_constraints # cons may override a constraint with the same function name
constraints[getattr(cons[0], '__name__', id(cons[0]))] = cons
attrs = { sql_constraints += base._sql_constraints
'_name': name,
'_register': False,
'_inherits': inherits,
'_depends': depends,
'_constraints': constraints,
'_sql_constraints': sql_constraints,
}
cls = type(name, (cls, parent_class), attrs)
# introduce the "registry class" of the model; # build the actual class of the model
# duplicate some attributes so that the ORM can modify them ModelClass = type(name, tuple(bases), {
attrs = {
'_name': name, '_name': name,
'_register': False, '_register': False,
'_columns': None, # recomputed in _setup_fields() '_columns': None, # recomputed in _setup_fields()
'_defaults': None, # recomputed in _setup_base() '_defaults': None, # recomputed in _setup_base()
'_fields': frozendict(), # idem '_fields': frozendict(), # idem
'_inherits': dict(cls._inherits), '_inherits': inherits,
'_depends': dict(cls._depends), '_depends': depends,
'_constraints': list(cls._constraints), '_constraints': constraints.values(),
'_sql_constraints': list(cls._sql_constraints), '_sql_constraints': sql_constraints,
'_original_module': original_module, '_original_module': original_module,
} })
cls = type(cls._name, (cls,), attrs)
# instantiate the model, and initialize it # instantiate the model, and initialize it
model = object.__new__(cls) model = object.__new__(ModelClass)
model.__init__(pool, cr) model.__init__(pool, cr)
return model return model
@ -660,8 +652,6 @@ class BaseModel(object):
# process store of low-level function fields # process store of low-level function fields
for fname, column in cls._columns.iteritems(): for fname, column in cls._columns.iteritems():
if hasattr(column, 'digits_change'):
column.digits_change(cr)
# filter out existing store about this field # filter out existing store about this field
pool._store_function[cls._name] = [ pool._store_function[cls._name] = [
stored stored
@ -827,9 +817,6 @@ class BaseModel(object):
"TransientModels must have log_access turned on, " \ "TransientModels must have log_access turned on, " \
"in order to implement their access rights policy" "in order to implement their access rights policy"
# prepare ormcache, which must be shared by all instances of the model
cls._ormcache = {}
@api.model @api.model
@ormcache() @ormcache()
def _is_an_ordinary_table(self): def _is_an_ordinary_table(self):
@ -1827,7 +1814,7 @@ class BaseModel(object):
``tools.ormcache`` or ``tools.ormcache_multi``. ``tools.ormcache`` or ``tools.ormcache_multi``.
""" """
try: try:
self._ormcache.clear() self.pool.cache.clear_prefix((self.pool.db_name, self._name))
self.pool._any_cache_cleared = True self.pool._any_cache_cleared = True
except AttributeError: except AttributeError:
pass pass
@ -2901,7 +2888,7 @@ class BaseModel(object):
@classmethod @classmethod
def _inherits_reload(cls): def _inherits_reload(cls):
""" Recompute the _inherit_fields and _all_columns mappings. """ """ Recompute the _inherit_fields mapping. """
cls._inherit_fields = struct = {} cls._inherit_fields = struct = {}
for parent_model, parent_field in cls._inherits.iteritems(): for parent_model, parent_field in cls._inherits.iteritems():
parent = cls.pool[parent_model] parent = cls.pool[parent_model]
@ -2911,19 +2898,17 @@ class BaseModel(object):
for name, source in parent._inherit_fields.iteritems(): for name, source in parent._inherit_fields.iteritems():
struct[name] = (parent_model, parent_field, source[2], source[3]) struct[name] = (parent_model, parent_field, source[2], source[3])
# old-api stuff @property
cls._all_columns = cls._get_column_infos() def _all_columns(self):
""" Returns a dict mapping all fields names (self fields and inherited
@classmethod field via _inherits) to a ``column_info`` object giving detailed column
def _get_column_infos(cls): information. This property is deprecated, use ``_fields`` instead.
"""Returns a dict mapping all fields names (direct fields and """
inherited field via _inherits) to a ``column_info`` struct
giving detailed columns """
result = {} result = {}
# do not inverse for loops, since local fields may hide inherited ones! # do not inverse for loops, since local fields may hide inherited ones!
for k, (parent, m2o, col, original_parent) in cls._inherit_fields.iteritems(): for k, (parent, m2o, col, original_parent) in self._inherit_fields.iteritems():
result[k] = fields.column_info(k, col, parent, m2o, original_parent) result[k] = fields.column_info(k, col, parent, m2o, original_parent)
for k, col in cls._columns.iteritems(): for k, col in self._columns.iteritems():
result[k] = fields.column_info(k, col) result[k] = fields.column_info(k, col)
return result return result
@ -3000,14 +2985,15 @@ class BaseModel(object):
if column: if column:
cls._columns[name] = column cls._columns[name] = column
# group fields by compute to determine field.computed_fields # determine field.computed_fields
fields_by_compute = defaultdict(list) computed_fields = defaultdict(list)
for field in cls._fields.itervalues(): for field in cls._fields.itervalues():
if field.compute: if field.compute:
field.computed_fields = fields_by_compute[field.compute] computed_fields[field.compute].append(field)
field.computed_fields.append(field)
else: for fields in computed_fields.itervalues():
field.computed_fields = [] for field in fields:
field.computed_fields = fields
@api.model @api.model
def _setup_complete(self): def _setup_complete(self):
@ -3025,9 +3011,10 @@ class BaseModel(object):
model = self.env[model_name] model = self.env[model_name]
for field_name in field_names: for field_name in field_names:
field = model._fields[field_name] field = model._fields[field_name]
field._triggers.update(triggers) for trigger in triggers:
field.add_trigger(trigger)
# determine old-api cls._inherit_fields and cls._all_columns # determine old-api structures about inherited fields
cls._inherits_reload() cls._inherits_reload()
# register stuff about low-level function fields # register stuff about low-level function fields

View File

@ -104,6 +104,10 @@ class Registry(Mapping):
""" Same as ``self[model_name]``. """ """ Same as ``self[model_name]``. """
return self.models[model_name] return self.models[model_name]
@lazy_property
def cache(self):
return RegistryManager.cache
@lazy_property @lazy_property
def pure_function_fields(self): def pure_function_fields(self):
""" Return the list of pure function fields (field objects) """ """ Return the list of pure function fields (field objects) """
@ -287,6 +291,7 @@ class RegistryManager(object):
""" """
_registries = None _registries = None
_cache = None
_lock = threading.RLock() _lock = threading.RLock()
_saved_lock = None _saved_lock = None
@ -300,13 +305,26 @@ class RegistryManager(object):
# cannot specify the memory limit soft on windows... # cannot specify the memory limit soft on windows...
size = 42 size = 42
else: else:
# On average, a clean registry take 25MB of memory + cache # A registry takes 10MB of memory on average, so we reserve
avgsz = 30 * 1024 * 1024 # 10Mb (registry) + 5Mb (working memory) per registry
avgsz = 15 * 1024 * 1024
size = int(config['limit_memory_soft'] / avgsz) size = int(config['limit_memory_soft'] / avgsz)
cls._registries = LRU(size) cls._registries = LRU(size)
return cls._registries return cls._registries
@classproperty
def cache(cls):
""" Return the global LRU ormcache. Its keys are tuples with the
following structure: (db_name, model_name, method, args...).
"""
with cls.lock():
if cls._cache is None:
# we allocate 8192 cache entries per registry
size = 8192 * cls.registries.count
cls._cache = LRU(size)
return cls._cache
@classmethod @classmethod
def lock(cls): def lock(cls):
""" Return the current registry lock. """ """ Return the current registry lock. """

View File

@ -48,10 +48,11 @@ from psycopg2 import Binary
import openerp import openerp
import openerp.tools as tools import openerp.tools as tools
from openerp.tools.translate import _ from openerp.tools.translate import _
from openerp.tools import float_round, float_repr from openerp.tools import float_repr, float_round, frozendict, html_sanitize
from openerp.tools import html_sanitize
import simplejson import simplejson
from openerp import SUPERUSER_ID from openerp import SUPERUSER_ID, registry
EMPTY_DICT = frozendict()
_logger = logging.getLogger(__name__) _logger = logging.getLogger(__name__)
@ -73,7 +74,6 @@ class _column(object):
_classic_read = True _classic_read = True
_classic_write = True _classic_write = True
_auto_join = False _auto_join = False
_prefetch = True
_properties = False _properties = False
_type = 'unknown' _type = 'unknown'
_obj = None _obj = None
@ -84,59 +84,64 @@ class _column(object):
_symbol_get = None _symbol_get = None
_deprecated = False _deprecated = False
copy = True # whether value is copied by BaseModel.copy() __slots__ = [
string = None 'copy', # whether value is copied by BaseModel.copy()
help = "" 'string',
required = False 'help',
readonly = False 'required',
_domain = [] 'readonly',
_context = {} '_domain',
states = None '_context',
priority = 0 'states',
change_default = False 'priority',
size = None 'change_default',
ondelete = None 'size',
translate = False 'ondelete',
select = False 'translate',
manual = False 'select',
write = False 'manual',
read = False 'write',
selectable = True 'read',
group_operator = False 'selectable',
groups = False # CSV list of ext IDs of groups 'group_operator',
deprecated = False # Optional deprecation warning 'groups', # CSV list of ext IDs of groups
'deprecated', # Optional deprecation warning
'_args',
'_prefetch',
]
def __init__(self, string='unknown', required=False, readonly=False, domain=None, context=None, states=None, priority=0, change_default=False, size=None, ondelete=None, translate=False, select=False, manual=False, **args): def __init__(self, string='unknown', required=False, readonly=False, domain=[], context={}, states=None, priority=0, change_default=False, size=None, ondelete=None, translate=False, select=False, manual=False, **args):
""" """
The 'manual' keyword argument specifies if the field is a custom one. The 'manual' keyword argument specifies if the field is a custom one.
It corresponds to the 'state' column in ir_model_fields. It corresponds to the 'state' column in ir_model_fields.
""" """
args0 = { # add parameters and default values
'string': string, args['copy'] = args.get('copy', True)
'help': args.pop('help', None), args['string'] = string
'required': required, args['help'] = args.get('help', '')
'readonly': readonly, args['required'] = required
'_domain': domain, args['readonly'] = readonly
'_context': context, args['_domain'] = domain
'states': states, args['_context'] = context
'priority': priority, args['states'] = states
'change_default': change_default, args['priority'] = priority
'size': size, args['change_default'] = change_default
'ondelete': ondelete.lower() if ondelete else None, args['size'] = size
'translate': translate, args['ondelete'] = ondelete.lower() if ondelete else None
'select': select, args['translate'] = translate
'manual': manual, args['select'] = select
'group_operator': args.pop('group_operator', None), args['manual'] = manual
'groups': args.pop('groups', None), args['write'] = args.get('write', False)
'deprecated': args.pop('deprecated', None), args['read'] = args.get('read', False)
} args['selectable'] = args.get('selectable', True)
for key, val in args0.iteritems(): args['group_operator'] = args.get('group_operator', None)
if val: args['groups'] = args.get('groups', None)
setattr(self, key, val) args['deprecated'] = args.get('deprecated', None)
args['_prefetch'] = args.get('_prefetch', True)
self._args = args self._args = EMPTY_DICT
for key, val in args.iteritems(): for key, val in args.iteritems():
setattr(self, key, val) setattr(self, key, val)
@ -144,6 +149,30 @@ class _column(object):
if not self._classic_write or self.deprecated or self.manual: if not self._classic_write or self.deprecated or self.manual:
self._prefetch = False self._prefetch = False
def __getattr__(self, name):
""" Access a non-slot attribute. """
try:
return self._args[name]
except KeyError:
raise AttributeError(name)
def __setattr__(self, name, value):
""" Set a slot or non-slot attribute. """
try:
object.__setattr__(self, name, value)
except AttributeError:
if self._args:
self._args[name] = value
else:
self._args = {name: value} # replace EMPTY_DICT
def __delattr__(self, name):
""" Remove a non-slot attribute. """
try:
del self._args[name]
except KeyError:
raise AttributeError(name)
def new(self, _computed_field=False, **args): def new(self, _computed_field=False, **args):
""" Return a column like `self` with the given parameters; the parameter """ Return a column like `self` with the given parameters; the parameter
`_computed_field` tells whether the corresponding field is computed. `_computed_field` tells whether the corresponding field is computed.
@ -223,6 +252,7 @@ class boolean(_column):
_symbol_c = '%s' _symbol_c = '%s'
_symbol_f = bool _symbol_f = bool
_symbol_set = (_symbol_c, _symbol_f) _symbol_set = (_symbol_c, _symbol_f)
__slots__ = []
def __init__(self, string='unknown', required=False, **args): def __init__(self, string='unknown', required=False, **args):
super(boolean, self).__init__(string=string, required=required, **args) super(boolean, self).__init__(string=string, required=required, **args)
@ -238,6 +268,7 @@ class integer(_column):
_symbol_f = lambda x: int(x or 0) _symbol_f = lambda x: int(x or 0)
_symbol_set = (_symbol_c, _symbol_f) _symbol_set = (_symbol_c, _symbol_f)
_symbol_get = lambda self,x: x or 0 _symbol_get = lambda self,x: x or 0
__slots__ = []
def __init__(self, string='unknown', required=False, **args): def __init__(self, string='unknown', required=False, **args):
super(integer, self).__init__(string=string, required=required, **args) super(integer, self).__init__(string=string, required=required, **args)
@ -245,6 +276,7 @@ class integer(_column):
class reference(_column): class reference(_column):
_type = 'reference' _type = 'reference'
_classic_read = False # post-process to handle missing target _classic_read = False # post-process to handle missing target
__slots__ = ['selection']
def __init__(self, string, selection, size=None, **args): def __init__(self, string, selection, size=None, **args):
if callable(selection): if callable(selection):
@ -297,6 +329,7 @@ def _symbol_set_char(self, symb):
class char(_column): class char(_column):
_type = 'char' _type = 'char'
__slots__ = ['_symbol_f', '_symbol_set', '_symbol_set_char']
def __init__(self, string="unknown", size=None, **args): def __init__(self, string="unknown", size=None, **args):
_column.__init__(self, string=string, size=size or None, **args) _column.__init__(self, string=string, size=size or None, **args)
@ -306,11 +339,13 @@ class char(_column):
class text(_column): class text(_column):
_type = 'text' _type = 'text'
__slots__ = []
class html(text): class html(text):
_type = 'html' _type = 'html'
_symbol_c = '%s' _symbol_c = '%s'
__slots__ = ['_sanitize', '_strip_style', '_symbol_f', '_symbol_set']
def _symbol_set_html(self, value): def _symbol_set_html(self, value):
if value is None or value is False: if value is None or value is False:
@ -334,39 +369,47 @@ class html(text):
import __builtin__ import __builtin__
def _symbol_set_float(self, x):
result = __builtin__.float(x or 0.0)
digits = self.digits
if digits:
precision, scale = digits
result = float_repr(float_round(result, precision_digits=scale), precision_digits=scale)
return result
class float(_column): class float(_column):
_type = 'float' _type = 'float'
_symbol_c = '%s' _symbol_c = '%s'
_symbol_f = lambda x: __builtin__.float(x or 0.0)
_symbol_set = (_symbol_c, _symbol_f)
_symbol_get = lambda self,x: x or 0.0 _symbol_get = lambda self,x: x or 0.0
__slots__ = ['_digits', '_digits_compute', '_symbol_f', '_symbol_set']
@property
def digits(self):
if self._digits_compute:
with registry().cursor() as cr:
return self._digits_compute(cr)
else:
return self._digits
def __init__(self, string='unknown', digits=None, digits_compute=None, required=False, **args): def __init__(self, string='unknown', digits=None, digits_compute=None, required=False, **args):
_column.__init__(self, string=string, required=required, **args) _column.__init__(self, string=string, required=required, **args)
self.digits = digits
# synopsis: digits_compute(cr) -> (precision, scale) # synopsis: digits_compute(cr) -> (precision, scale)
self.digits_compute = digits_compute self._digits = digits
self._digits_compute = digits_compute
def new(self, _computed_field=False, **args): self._symbol_f = lambda x: _symbol_set_float(self, x)
# float columns are database-dependent, so always recreate them self._symbol_set = (self._symbol_c, self._symbol_f)
return type(self)(**args)
def to_field_args(self): def to_field_args(self):
args = super(float, self).to_field_args() args = super(float, self).to_field_args()
args['digits'] = self.digits_compute or self.digits args['digits'] = self._digits_compute or self._digits
return args return args
def digits_change(self, cr): def digits_change(self, cr):
if self.digits_compute: pass
self.digits = self.digits_compute(cr)
if self.digits:
precision, scale = self.digits
self._symbol_set = ('%s', lambda x: float_repr(float_round(__builtin__.float(x or 0.0),
precision_digits=scale),
precision_digits=scale))
class date(_column): class date(_column):
_type = 'date' _type = 'date'
__slots__ = []
MONTHS = [ MONTHS = [
('01', 'January'), ('01', 'January'),
@ -457,6 +500,7 @@ class date(_column):
class datetime(_column): class datetime(_column):
_type = 'datetime' _type = 'datetime'
__slots__ = []
MONTHS = [ MONTHS = [
('01', 'January'), ('01', 'January'),
@ -526,7 +570,7 @@ class datetime(_column):
class binary(_column): class binary(_column):
_type = 'binary' _type = 'binary'
_symbol_c = '%s' _classic_read = False
# Binary values may be byte strings (python 2.6 byte array), but # Binary values may be byte strings (python 2.6 byte array), but
# the legacy OpenERP convention is to transfer and store binaries # the legacy OpenERP convention is to transfer and store binaries
@ -534,17 +578,16 @@ class binary(_column):
# unicode in some circumstances, hence the str() cast in symbol_f. # unicode in some circumstances, hence the str() cast in symbol_f.
# This str coercion will only work for pure ASCII unicode strings, # This str coercion will only work for pure ASCII unicode strings,
# on purpose - non base64 data must be passed as a 8bit byte strings. # on purpose - non base64 data must be passed as a 8bit byte strings.
_symbol_c = '%s'
_symbol_f = lambda symb: symb and Binary(str(symb)) or None _symbol_f = lambda symb: symb and Binary(str(symb)) or None
_symbol_set = (_symbol_c, _symbol_f) _symbol_set = (_symbol_c, _symbol_f)
_symbol_get = lambda self, x: x and str(x) _symbol_get = lambda self, x: x and str(x)
_classic_read = False __slots__ = ['filters']
_prefetch = False
def __init__(self, string='unknown', filters=None, **args): def __init__(self, string='unknown', filters=None, **args):
_column.__init__(self, string=string, **args) args['_prefetch'] = args.get('_prefetch', False)
self.filters = filters _column.__init__(self, string=string, filters=filters, **args)
def get(self, cr, obj, ids, name, user=None, context=None, values=None): def get(self, cr, obj, ids, name, user=None, context=None, values=None):
if not context: if not context:
@ -572,13 +615,13 @@ class binary(_column):
class selection(_column): class selection(_column):
_type = 'selection' _type = 'selection'
__slots__ = ['selection']
def __init__(self, selection, string='unknown', **args): def __init__(self, selection, string='unknown', **args):
if callable(selection): if callable(selection):
from openerp import api from openerp import api
selection = api.expected(api.cr_uid_context, selection) selection = api.expected(api.cr_uid_context, selection)
_column.__init__(self, string=string, **args) _column.__init__(self, string=string, selection=selection, **args)
self.selection = selection
def to_field_args(self): def to_field_args(self):
args = super(selection, self).to_field_args() args = super(selection, self).to_field_args()
@ -639,9 +682,10 @@ class many2one(_column):
_symbol_f = lambda x: x or None _symbol_f = lambda x: x or None
_symbol_set = (_symbol_c, _symbol_f) _symbol_set = (_symbol_c, _symbol_f)
ondelete = 'set null' __slots__ = ['_obj', '_auto_join']
def __init__(self, obj, string='unknown', auto_join=False, **args): def __init__(self, obj, string='unknown', auto_join=False, **args):
args['ondelete'] = args.get('ondelete', 'set null')
_column.__init__(self, string=string, **args) _column.__init__(self, string=string, **args)
self._obj = obj self._obj = obj
self._auto_join = auto_join self._auto_join = auto_join
@ -687,13 +731,14 @@ class many2one(_column):
class one2many(_column): class one2many(_column):
_classic_read = False _classic_read = False
_classic_write = False _classic_write = False
_prefetch = False
_type = 'one2many' _type = 'one2many'
# one2many columns are not copied by default __slots__ = ['_obj', '_fields_id', '_limit', '_auto_join']
copy = False
def __init__(self, obj, fields_id, string='unknown', limit=None, auto_join=False, **args): def __init__(self, obj, fields_id, string='unknown', limit=None, auto_join=False, **args):
# one2many columns are not copied by default
args['copy'] = args.get('copy', False)
args['_prefetch'] = args.get('_prefetch', False)
_column.__init__(self, string=string, **args) _column.__init__(self, string=string, **args)
self._obj = obj self._obj = obj
self._fields_id = fields_id self._fields_id = fields_id
@ -834,12 +879,14 @@ class many2many(_column):
""" """
_classic_read = False _classic_read = False
_classic_write = False _classic_write = False
_prefetch = False
_type = 'many2many' _type = 'many2many'
__slots__ = ['_obj', '_rel', '_id1', '_id2', '_limit', '_auto_join']
def __init__(self, obj, rel=None, id1=None, id2=None, string='unknown', limit=None, **args): def __init__(self, obj, rel=None, id1=None, id2=None, string='unknown', limit=None, **args):
""" """
""" """
args['_prefetch'] = args.get('_prefetch', False)
_column.__init__(self, string=string, **args) _column.__init__(self, string=string, **args)
self._obj = obj self._obj = obj
if rel and '.' in rel: if rel and '.' in rel:
@ -849,6 +896,7 @@ class many2many(_column):
self._id1 = id1 self._id1 = id1
self._id2 = id2 self._id2 = id2
self._limit = limit self._limit = limit
self._auto_join = False
def to_field_args(self): def to_field_args(self):
args = super(many2many, self).to_field_args() args = super(many2many, self).to_field_args()
@ -1231,44 +1279,80 @@ class function(_column):
} }
""" """
_classic_read = False
_classic_write = False
_prefetch = False
_type = 'function'
_properties = True _properties = True
# function fields are not copied by default __slots__ = [
copy = False '_type',
'_classic_read',
'_classic_write',
'_symbol_c',
'_symbol_f',
'_symbol_set',
'_symbol_get',
'_fnct',
'_arg',
'_fnct_inv',
'_fnct_inv_arg',
'_fnct_search',
'_multi',
'store',
'_digits',
'_digits_compute',
'selection',
'_obj',
]
@property
def digits(self):
if self._digits_compute:
with registry().cursor() as cr:
return self._digits_compute(cr)
else:
return self._digits
# #
# multi: compute several fields in one call # multi: compute several fields in one call
# #
def __init__(self, fnct, arg=None, fnct_inv=None, fnct_inv_arg=None, type='float', fnct_search=None, obj=None, store=False, multi=False, **args): def __init__(self, fnct, arg=None, fnct_inv=None, fnct_inv_arg=None, type='float', fnct_search=None, obj=None, store=False, multi=False, **args):
self._classic_read = False
self._classic_write = False
self._prefetch = False
self._symbol_c = '%s'
self._symbol_f = _symbol_set
self._symbol_set = (self._symbol_c, self._symbol_f)
self._symbol_get = None
# pop attributes that should not be assigned to self
self._digits = args.pop('digits', (16,2))
self._digits_compute = args.pop('digits_compute', None)
self._obj = args.pop('relation', obj)
# function fields are not copied by default
args['copy'] = args.get('copy', False)
_column.__init__(self, **args) _column.__init__(self, **args)
self._obj = obj
self._fnct = fnct
self._fnct_inv = fnct_inv
self._arg = arg
self._multi = multi
if 'relation' in args:
self._obj = args['relation']
self.digits = args.get('digits', (16,2))
self.digits_compute = args.get('digits_compute', None)
if callable(args.get('selection')):
from openerp import api
self.selection = api.expected(api.cr_uid_context, args['selection'])
self._fnct_inv_arg = fnct_inv_arg
if not fnct_inv:
self.readonly = 1
self._type = type self._type = type
self._fnct = fnct
self._arg = arg
self._fnct_inv = fnct_inv
self._fnct_inv_arg = fnct_inv_arg
self._fnct_search = fnct_search self._fnct_search = fnct_search
self.store = store self.store = store
self._multi = multi
if not fnct_inv:
self.readonly = 1
if not fnct_search and not store: if not fnct_search and not store:
self.selectable = False self.selectable = False
if callable(args.get('selection')):
from openerp import api
self.selection = api.expected(api.cr_uid_context, args['selection'])
if store: if store:
if self._type != 'many2one': if self._type != 'many2one':
# m2o fields need to return tuples with name_get, not just foreign keys # m2o fields need to return tuples with name_get, not just foreign keys
@ -1283,6 +1367,10 @@ class function(_column):
self._symbol_c = char._symbol_c self._symbol_c = char._symbol_c
self._symbol_f = lambda x: _symbol_set_char(self, x) self._symbol_f = lambda x: _symbol_set_char(self, x)
self._symbol_set = (self._symbol_c, self._symbol_f) self._symbol_set = (self._symbol_c, self._symbol_f)
elif type == 'float':
self._symbol_c = float._symbol_c
self._symbol_f = lambda x: _symbol_set_float(self, x)
self._symbol_set = (self._symbol_c, self._symbol_f)
else: else:
type_class = globals().get(type) type_class = globals().get(type)
if type_class is not None: if type_class is not None:
@ -1304,7 +1392,7 @@ class function(_column):
args = super(function, self).to_field_args() args = super(function, self).to_field_args()
args['store'] = bool(self.store) args['store'] = bool(self.store)
if self._type in ('float',): if self._type in ('float',):
args['digits'] = self.digits_compute or self.digits args['digits'] = self._digits_compute or self._digits
elif self._type in ('selection', 'reference'): elif self._type in ('selection', 'reference'):
args['selection'] = self.selection args['selection'] = self.selection
elif self._type in ('many2one', 'one2many', 'many2many'): elif self._type in ('many2one', 'one2many', 'many2many'):
@ -1312,14 +1400,7 @@ class function(_column):
return args return args
def digits_change(self, cr): def digits_change(self, cr):
if self._type == 'float': pass
if self.digits_compute:
self.digits = self.digits_compute(cr)
if self.digits:
precision, scale = self.digits
self._symbol_set = ('%s', lambda x: float_repr(float_round(__builtin__.float(x or 0.0),
precision_digits=scale),
precision_digits=scale))
def search(self, cr, uid, obj, name, args, context=None): def search(self, cr, uid, obj, name, args, context=None):
if not self._fnct_search: if not self._fnct_search:
@ -1407,14 +1488,15 @@ class related(function):
'bar': fields.related('foo_id', 'frol', type='char', string='Frol of Foo'), 'bar': fields.related('foo_id', 'frol', type='char', string='Frol of Foo'),
} }
""" """
__slots__ = ['arg', '_relations']
def _fnct_search(self, tobj, cr, uid, obj=None, name=None, domain=None, context=None): def _related_search(self, tobj, cr, uid, obj=None, name=None, domain=None, context=None):
# assume self._arg = ('foo', 'bar', 'baz') # assume self._arg = ('foo', 'bar', 'baz')
# domain = [(name, op, val)] => search [('foo.bar.baz', op, val)] # domain = [(name, op, val)] => search [('foo.bar.baz', op, val)]
field = '.'.join(self._arg) field = '.'.join(self._arg)
return map(lambda x: (field, x[1], x[2]), domain) return map(lambda x: (field, x[1], x[2]), domain)
def _fnct_write(self, obj, cr, uid, ids, field_name, values, args, context=None): def _related_write(self, obj, cr, uid, ids, field_name, values, args, context=None):
if isinstance(ids, (int, long)): if isinstance(ids, (int, long)):
ids = [ids] ids = [ids]
for instance in obj.browse(cr, uid, ids, context=context): for instance in obj.browse(cr, uid, ids, context=context):
@ -1425,7 +1507,7 @@ class related(function):
# write on the last field of the target record # write on the last field of the target record
instance.write({self.arg[-1]: values}) instance.write({self.arg[-1]: values})
def _fnct_read(self, obj, cr, uid, ids, field_name, args, context=None): def _related_read(self, obj, cr, uid, ids, field_name, args, context=None):
res = {} res = {}
for record in obj.browse(cr, SUPERUSER_ID, ids, context=context): for record in obj.browse(cr, SUPERUSER_ID, ids, context=context):
value = record value = record
@ -1452,13 +1534,14 @@ class related(function):
def __init__(self, *arg, **args): def __init__(self, *arg, **args):
self.arg = arg self.arg = arg
self._relations = [] self._relations = []
super(related, self).__init__(self._fnct_read, arg, self._fnct_write, fnct_inv_arg=arg, fnct_search=self._fnct_search, **args) super(related, self).__init__(self._related_read, arg, self._related_write, fnct_inv_arg=arg, fnct_search=self._related_search, **args)
if self.store is True: if self.store is True:
# TODO: improve here to change self.store = {...} according to related objects # TODO: improve here to change self.store = {...} according to related objects
pass pass
class sparse(function): class sparse(function):
__slots__ = ['serialization_field']
def convert_value(self, obj, cr, uid, record, value, read_value, context=None): def convert_value(self, obj, cr, uid, record, value, read_value, context=None):
""" """
@ -1507,8 +1590,7 @@ class sparse(function):
return read_value return read_value
return value return value
def _sparse_write(self,obj,cr, uid, ids, field_name, value, args, context=None):
def _fnct_write(self,obj,cr, uid, ids, field_name, value, args, context=None):
if not type(ids) == list: if not type(ids) == list:
ids = [ids] ids = [ids]
records = obj.browse(cr, uid, ids, context=context) records = obj.browse(cr, uid, ids, context=context)
@ -1523,7 +1605,7 @@ class sparse(function):
obj.write(cr, uid, ids, {self.serialization_field: serialized}, context=context) obj.write(cr, uid, ids, {self.serialization_field: serialized}, context=context)
return True return True
def _fnct_read(self, obj, cr, uid, ids, field_names, args, context=None): def _sparse_read(self, obj, cr, uid, ids, field_names, args, context=None):
results = {} results = {}
records = obj.browse(cr, uid, ids, context=context) records = obj.browse(cr, uid, ids, context=context)
for record in records: for record in records:
@ -1549,8 +1631,7 @@ class sparse(function):
def __init__(self, serialization_field, **kwargs): def __init__(self, serialization_field, **kwargs):
self.serialization_field = serialization_field self.serialization_field = serialization_field
super(sparse, self).__init__(self._fnct_read, fnct_inv=self._fnct_write, multi='__sparse_multi', **kwargs) super(sparse, self).__init__(self._sparse_read, fnct_inv=self._sparse_write, multi='__sparse_multi', **kwargs)
# --------------------------------------------------------- # ---------------------------------------------------------
@ -1558,19 +1639,21 @@ class sparse(function):
# --------------------------------------------------------- # ---------------------------------------------------------
class dummy(function): class dummy(function):
def _fnct_search(self, tobj, cr, uid, obj=None, name=None, domain=None, context=None): __slots__ = ['arg', '_relations']
def _dummy_search(self, tobj, cr, uid, obj=None, name=None, domain=None, context=None):
return [] return []
def _fnct_write(self, obj, cr, uid, ids, field_name, values, args, context=None): def _dummy_write(self, obj, cr, uid, ids, field_name, values, args, context=None):
return False return False
def _fnct_read(self, obj, cr, uid, ids, field_name, args, context=None): def _dummy_read(self, obj, cr, uid, ids, field_name, args, context=None):
return {} return {}
def __init__(self, *arg, **args): def __init__(self, *arg, **args):
self.arg = arg self.arg = arg
self._relations = [] self._relations = []
super(dummy, self).__init__(self._fnct_read, arg, self._fnct_write, fnct_inv_arg=arg, fnct_search=self._fnct_search, **args) super(dummy, self).__init__(self._dummy_read, arg, self._dummy_write, fnct_inv_arg=arg, fnct_search=self._dummy_search, **args)
# --------------------------------------------------------- # ---------------------------------------------------------
# Serialized fields # Serialized fields
@ -1581,42 +1664,46 @@ class serialized(_column):
Note: only plain components allowed. Note: only plain components allowed.
""" """
_type = 'serialized'
__slots__ = []
def _symbol_set_struct(val): def _symbol_set_struct(val):
return simplejson.dumps(val) return simplejson.dumps(val)
def _symbol_get_struct(self, val): def _symbol_get_struct(self, val):
return simplejson.loads(val or '{}') return simplejson.loads(val or '{}')
_prefetch = False
_type = 'serialized'
_symbol_c = '%s' _symbol_c = '%s'
_symbol_f = _symbol_set_struct _symbol_f = _symbol_set_struct
_symbol_set = (_symbol_c, _symbol_f) _symbol_set = (_symbol_c, _symbol_f)
_symbol_get = _symbol_get_struct _symbol_get = _symbol_get_struct
def __init__(self, *args, **kwargs):
kwargs['_prefetch'] = kwargs.get('_prefetch', False)
super(serialized, self).__init__(*args, **kwargs)
# TODO: review completly this class for speed improvement # TODO: review completly this class for speed improvement
class property(function): class property(function):
__slots__ = []
def to_field_args(self): def to_field_args(self):
args = super(property, self).to_field_args() args = super(property, self).to_field_args()
args['company_dependent'] = True args['company_dependent'] = True
return args return args
def _fnct_search(self, tobj, cr, uid, obj, name, domain, context=None): def _property_search(self, tobj, cr, uid, obj, name, domain, context=None):
ir_property = obj.pool['ir.property'] ir_property = obj.pool['ir.property']
result = [] result = []
for field, operator, value in domain: for field, operator, value in domain:
result += ir_property.search_multi(cr, uid, name, tobj._name, operator, value, context=context) result += ir_property.search_multi(cr, uid, name, tobj._name, operator, value, context=context)
return result return result
def _fnct_write(self, obj, cr, uid, id, prop_name, value, obj_dest, context=None): def _property_write(self, obj, cr, uid, id, prop_name, value, obj_dest, context=None):
ir_property = obj.pool['ir.property'] ir_property = obj.pool['ir.property']
ir_property.set_multi(cr, uid, prop_name, obj._name, {id: value}, context=context) ir_property.set_multi(cr, uid, prop_name, obj._name, {id: value}, context=context)
return True return True
def _fnct_read(self, obj, cr, uid, ids, prop_names, obj_dest, context=None): def _property_read(self, obj, cr, uid, ids, prop_names, obj_dest, context=None):
ir_property = obj.pool['ir.property'] ir_property = obj.pool['ir.property']
res = {id: {} for id in ids} res = {id: {} for id in ids}
@ -1645,9 +1732,9 @@ class property(function):
args = dict(args) args = dict(args)
args['obj'] = args.pop('relation', '') or args.get('obj', '') args['obj'] = args.pop('relation', '') or args.get('obj', '')
super(property, self).__init__( super(property, self).__init__(
fnct=self._fnct_read, fnct=self._property_read,
fnct_inv=self._fnct_write, fnct_inv=self._property_write,
fnct_search=self._fnct_search, fnct_search=self._property_search,
multi='properties', multi='properties',
**args **args
) )
@ -1681,6 +1768,8 @@ class column_info(object):
contains it i.e in case of multilevel inheritance, ``None`` for contains it i.e in case of multilevel inheritance, ``None`` for
local columns. local columns.
""" """
__slots__ = ['name', 'column', 'parent_model', 'parent_column', 'original_parent']
def __init__(self, name, column, parent_model=None, parent_column=None, original_parent=None): def __init__(self, name, column, parent_model=None, parent_column=None, original_parent=None):
self.name = name self.name = name
self.column = column self.column = column

View File

@ -38,7 +38,7 @@ import openerp
from openerp.modules.registry import RegistryManager from openerp.modules.registry import RegistryManager
from openerp.release import nt_service_name from openerp.release import nt_service_name
import openerp.tools.config as config import openerp.tools.config as config
from openerp.tools.misc import stripped_sys_argv, dumpstacks from openerp.tools import stripped_sys_argv, dumpstacks, log_ormcache_stats
_logger = logging.getLogger(__name__) _logger = logging.getLogger(__name__)
@ -296,6 +296,7 @@ class ThreadedServer(CommonServer):
signal.signal(signal.SIGCHLD, self.signal_handler) signal.signal(signal.SIGCHLD, self.signal_handler)
signal.signal(signal.SIGHUP, self.signal_handler) signal.signal(signal.SIGHUP, self.signal_handler)
signal.signal(signal.SIGQUIT, dumpstacks) signal.signal(signal.SIGQUIT, dumpstacks)
signal.signal(signal.SIGUSR1, log_ormcache_stats)
elif os.name == 'nt': elif os.name == 'nt':
import win32api import win32api
win32api.SetConsoleCtrlHandler(lambda sig: self.signal_handler(sig, None), 1) win32api.SetConsoleCtrlHandler(lambda sig: self.signal_handler(sig, None), 1)
@ -389,6 +390,7 @@ class GeventServer(CommonServer):
if os.name == 'posix': if os.name == 'posix':
signal.signal(signal.SIGQUIT, dumpstacks) signal.signal(signal.SIGQUIT, dumpstacks)
signal.signal(signal.SIGUSR1, log_ormcache_stats)
gevent.spawn(self.watch_parent) gevent.spawn(self.watch_parent)
self.httpd = WSGIServer((self.interface, self.port), self.app) self.httpd = WSGIServer((self.interface, self.port), self.app)
@ -510,6 +512,9 @@ class PreforkServer(CommonServer):
elif sig == signal.SIGQUIT: elif sig == signal.SIGQUIT:
# dump stacks on kill -3 # dump stacks on kill -3
self.dumpstacks() self.dumpstacks()
elif sig == signal.SIGUSR1:
# log ormcache stats on kill -SIGUSR1
log_ormcache_stats()
elif sig == signal.SIGTTIN: elif sig == signal.SIGTTIN:
# increase number of workers # increase number of workers
self.population += 1 self.population += 1
@ -586,6 +591,7 @@ class PreforkServer(CommonServer):
signal.signal(signal.SIGTTIN, self.signal_handler) signal.signal(signal.SIGTTIN, self.signal_handler)
signal.signal(signal.SIGTTOU, self.signal_handler) signal.signal(signal.SIGTTOU, self.signal_handler)
signal.signal(signal.SIGQUIT, dumpstacks) signal.signal(signal.SIGQUIT, dumpstacks)
signal.signal(signal.SIGUSR1, log_ormcache_stats)
# listen to socket # listen to socket
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)

View File

@ -21,13 +21,29 @@
# decorator makes wrappers that have the same API as their wrapped function; # decorator makes wrappers that have the same API as their wrapped function;
# this is important for the openerp.api.guess() that relies on signatures # this is important for the openerp.api.guess() that relies on signatures
from collections import defaultdict
from decorator import decorator from decorator import decorator
from inspect import getargspec from inspect import getargspec
import lru
import logging import logging
logger = logging.getLogger(__name__) _logger = logging.getLogger(__name__)
class ormcache_counter(object):
""" Statistic counters for cache entries. """
__slots__ = ['hit', 'miss', 'err']
def __init__(self):
self.hit = 0
self.miss = 0
self.err = 0
@property
def ratio(self):
return 100.0 * self.hit / (self.hit + self.miss or 1)
# statistic counters dictionary, maps (dbname, modelname, method) to counter
STAT = defaultdict(ormcache_counter)
class ormcache(object): class ormcache(object):
@ -35,10 +51,6 @@ class ormcache(object):
def __init__(self, skiparg=2, size=8192, multi=None, timeout=None): def __init__(self, skiparg=2, size=8192, multi=None, timeout=None):
self.skiparg = skiparg self.skiparg = skiparg
self.size = size
self.stat_miss = 0
self.stat_hit = 0
self.stat_err = 0
def __call__(self, method): def __call__(self, method):
self.method = method self.method = method
@ -46,42 +58,32 @@ class ormcache(object):
lookup.clear_cache = self.clear lookup.clear_cache = self.clear
return lookup return lookup
def stat(self):
return "lookup-stats hit=%s miss=%s err=%s ratio=%.1f" % \
(self.stat_hit, self.stat_miss, self.stat_err,
(100*float(self.stat_hit))/(self.stat_miss+self.stat_hit))
def lru(self, model): def lru(self, model):
ormcache = model._ormcache return model.pool.cache, (model.pool.db_name, model._name, self.method)
try:
d = ormcache[self.method]
except KeyError:
d = ormcache[self.method] = lru.LRU(self.size)
return d
def lookup(self, method, *args, **kwargs): def lookup(self, method, *args, **kwargs):
d = self.lru(args[0]) d, key0 = self.lru(args[0])
key = args[self.skiparg:] key = key0 + args[self.skiparg:]
try: try:
r = d[key] r = d[key]
self.stat_hit += 1 STAT[key0].hit += 1
return r return r
except KeyError: except KeyError:
self.stat_miss += 1 STAT[key0].miss += 1
value = d[key] = self.method(*args, **kwargs) value = d[key] = self.method(*args, **kwargs)
return value return value
except TypeError: except TypeError:
self.stat_err += 1 STAT[key0].err += 1
return self.method(*args, **kwargs) return self.method(*args, **kwargs)
def clear(self, model, *args): def clear(self, model, *args):
""" Remove *args entry from the cache or all keys if *args is undefined """ """ Remove *args entry from the cache or all keys if *args is undefined """
d = self.lru(model) d, key0 = self.lru(model)
if args: if args:
logger.warn("ormcache.clear arguments are deprecated and ignored " _logger.warn("ormcache.clear arguments are deprecated and ignored "
"(while clearing caches on (%s).%s)", "(while clearing caches on (%s).%s)",
model._name, self.method.__name__) model._name, self.method.__name__)
d.clear() d.clear_prefix(key0)
model.pool._any_cache_cleared = True model.pool._any_cache_cleared = True
@ -97,7 +99,7 @@ class ormcache_context(ormcache):
return super(ormcache_context, self).__call__(method) return super(ormcache_context, self).__call__(method)
def lookup(self, method, *args, **kwargs): def lookup(self, method, *args, **kwargs):
d = self.lru(args[0]) d, key0 = self.lru(args[0])
# Note. The decorator() wrapper (used in __call__ above) will resolve # Note. The decorator() wrapper (used in __call__ above) will resolve
# arguments, and pass them positionally to lookup(). This is why context # arguments, and pass them positionally to lookup(). This is why context
@ -109,17 +111,17 @@ class ormcache_context(ormcache):
ckey = [(k, context[k]) for k in self.accepted_keys if k in context] ckey = [(k, context[k]) for k in self.accepted_keys if k in context]
# Beware: do not take the context from args! # Beware: do not take the context from args!
key = args[self.skiparg:self.context_pos] + tuple(ckey) key = key0 + args[self.skiparg:self.context_pos] + tuple(ckey)
try: try:
r = d[key] r = d[key]
self.stat_hit += 1 STAT[key0].hit += 1
return r return r
except KeyError: except KeyError:
self.stat_miss += 1 STAT[key0].miss += 1
value = d[key] = self.method(*args, **kwargs) value = d[key] = self.method(*args, **kwargs)
return value return value
except TypeError: except TypeError:
self.stat_err += 1 STAT[key0].err += 1
return self.method(*args, **kwargs) return self.method(*args, **kwargs)
@ -130,8 +132,8 @@ class ormcache_multi(ormcache):
self.multi = multi self.multi = multi
def lookup(self, method, *args, **kwargs): def lookup(self, method, *args, **kwargs):
d = self.lru(args[0]) d, key0 = self.lru(args[0])
base_key = args[self.skiparg:self.multi] + args[self.multi+1:] base_key = key0 + args[self.skiparg:self.multi] + args[self.multi+1:]
ids = args[self.multi] ids = args[self.multi]
result = {} result = {}
missed = [] missed = []
@ -141,9 +143,9 @@ class ormcache_multi(ormcache):
key = base_key + (i,) key = base_key + (i,)
try: try:
result[i] = d[key] result[i] = d[key]
self.stat_hit += 1 STAT[key0].hit += 1
except Exception: except Exception:
self.stat_miss += 1 STAT[key0].miss += 1
missed.append(i) missed.append(i)
if missed: if missed:
@ -173,6 +175,23 @@ class dummy_cache(object):
pass pass
def log_ormcache_stats(sig=None, frame=None):
""" Log statistics of ormcache usage by database, model, and method. """
from openerp.modules.registry import RegistryManager
import threading
me = threading.currentThread()
entries = defaultdict(int)
for key in RegistryManager.cache.iterkeys():
entries[key[:3]] += 1
for key, count in sorted(entries.items()):
dbname, model_name, method = key
me.dbname = dbname
stat = STAT[key]
_logger.info("%6d entries, %6d hit, %6d miss, %6d err, %4.1f%% ratio, for %s.%s",
count, stat.hit, stat.miss, stat.err, stat.ratio, model_name, method.__name__)
# For backward compatibility # For backward compatibility
cache = ormcache cache = ormcache

View File

@ -119,4 +119,12 @@ class LRU(object):
self.first = None self.first = None
self.last = None self.last = None
@synchronized()
def clear_prefix(self, prefix):
""" Remove from `self` all the items with the given `prefix`. """
n = len(prefix)
for key in self.keys():
if key[:n] == prefix:
del self[key]
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: