[MERGE] trunk's CORS support (merge with changes)

bzr revid: chs@openerp.com-20140130120448-tj9pu30o7dj74zid
bzr revid: fme@openerp.com-20140130125425-j1yn6p4lr7lincr3
This commit is contained in:
Fabien Meghazi 2014-01-30 13:54:25 +01:00
commit 53098c666c
9 changed files with 81 additions and 28 deletions

View File

@ -14,7 +14,7 @@
domain="[('comments', 'like', 'openerp-web')]"/> domain="[('comments', 'like', 'openerp-web')]"/>
<field name="name" operator="="/> <field name="name" operator="="/>
<field name="lang"/> <field name="lang"/>
<field name="source"/> <field name="src"/>
<field name="value"/> <field name="value"/>
</search> </search>
</field> </field>

View File

@ -612,26 +612,36 @@ class res_partner(osv.osv, format_address):
if not args: if not args:
args = [] args = []
if name and operator in ('=', 'ilike', '=ilike', 'like', '=like'): if name and operator in ('=', 'ilike', '=ilike', 'like', '=like'):
self.check_access_rights(cr, uid, 'read')
where_query = self._where_calc(cr, uid, args, context=context)
self._apply_ir_rules(cr, uid, where_query, 'read', context=context)
from_clause, where_clause, where_clause_params = where_query.get_sql()
where_str = where_clause and (" WHERE %s AND " % where_clause) or ' WHERE '
# search on the name of the contacts and of its company # search on the name of the contacts and of its company
search_name = name search_name = name
if operator in ('ilike', 'like'): if operator in ('ilike', 'like'):
search_name = '%%%s%%' % name search_name = '%%%s%%' % name
if operator in ('=ilike', '=like'): if operator in ('=ilike', '=like'):
operator = operator[1:] operator = operator[1:]
query_args = {'name': search_name}
query = ('''SELECT id FROM res_partner query = ('SELECT id FROM res_partner ' +
WHERE email ''' + operator + ''' %(name)s where_str + '(email ' + operator + ''' %s
OR display_name ''' + operator + ''' %(name)s OR display_name ''' + operator + ''' %s)
ORDER BY display_name ORDER BY display_name''')
''')
where_clause_params += [search_name, search_name]
if limit: if limit:
query += ' limit %(limit)s' query += ' limit %s'
query_args['limit'] = limit where_clause_params.append(limit)
cr.execute(query, query_args) cr.execute(query, where_clause_params)
ids = map(lambda x: x[0], cr.fetchall()) ids = map(lambda x: x[0], cr.fetchall())
ids = self.search(cr, uid, [('id', 'in', ids)] + args, limit=limit, context=context)
if ids: if ids:
return self.name_get(cr, uid, ids, context) return self.name_get(cr, uid, ids, context)
else:
return []
return super(res_partner,self).name_search(cr, uid, name, args, operator=operator, context=context, limit=limit) return super(res_partner,self).name_search(cr, uid, name, args, operator=operator, context=context, limit=limit)
def find_or_create(self, cr, uid, email, context=None): def find_or_create(self, cr, uid, email, context=None):

View File

@ -39,6 +39,20 @@ class test_base(common.TransactionCase):
new_id2 = self.res_partner.find_or_create(cr, uid, self.samples[2][0]) new_id2 = self.res_partner.find_or_create(cr, uid, self.samples[2][0])
self.assertTrue(new_id2 > new_id, 'find_or_create failed - should have created new one again') self.assertTrue(new_id2 > new_id, 'find_or_create failed - should have created new one again')
def test_15_res_partner_name_search(self):
cr,uid = self.cr, self.uid
for name, active in [
('"A Raoul Grosbedon" <raoul@chirurgiens-dentistes.fr>', False),
('B Raoul chirurgiens-dentistes.fr', True),
("C Raoul O'hara <!@historicalsociety.museum>", True),
('ryu+giga-Sushi@aizubange.fukushima.jp', True),
]:
partner_id, dummy = self.res_partner.name_create(cr, uid, name, context={'default_active': active})
partners = self.res_partner.name_search(cr, uid, 'Raoul')
self.assertEqual(len(partners), 2, 'Incorrect search number result for name_search')
partners = self.res_partner.name_search(cr, uid, 'Raoul', limit=1)
self.assertEqual(len(partners), 1, 'Incorrect search number result for name_search with a limit')
self.assertEqual(partners[0][1], 'B Raoul chirurgiens-dentistes.fr', 'Incorrect partner returned, should be the first active')
def test_20_res_partner_address_sync(self): def test_20_res_partner_address_sync(self):
cr, uid = self.cr, self.uid cr, uid = self.cr, self.uid

View File

@ -248,6 +248,7 @@ def route(route=None, **kw):
authentication modules. There request code will not have any facilities to access the database nor have any authentication modules. There request code will not have any facilities to access the database nor have any
configuration indicating the current database nor the current user. configuration indicating the current database nor the current user.
:param methods: A sequence of http methods this route applies to. If not specified, all methods are allowed. :param methods: A sequence of http methods this route applies to. If not specified, all methods are allowed.
:param cors: The Access-Control-Allow-Origin cors directive value.
""" """
routing = kw.copy() routing = kw.copy()
assert not 'type' in routing or routing['type'] in ("http", "json") assert not 'type' in routing or routing['type'] in ("http", "json")
@ -908,7 +909,7 @@ class Root(object):
self.load_addons() self.load_addons()
_logger.info("Generating nondb routing") _logger.info("Generating nondb routing")
self.nodb_routing_map = routing_map(['', "web"], True) self.nodb_routing_map = routing_map([''] + openerp.conf.server_wide_modules, True)
def __call__(self, environ, start_response): def __call__(self, environ, start_response):
""" Handle a WSGI request """ Handle a WSGI request
@ -1009,6 +1010,16 @@ class Root(object):
if not explicit_session and hasattr(response, 'set_cookie'): if not explicit_session and hasattr(response, 'set_cookie'):
response.set_cookie('session_id', httprequest.session.sid, max_age=90 * 24 * 60 * 60) response.set_cookie('session_id', httprequest.session.sid, max_age=90 * 24 * 60 * 60)
# Support for Cross-Origin Resource Sharing
if 'cors' in request.func.routing:
response.headers.set('Access-Control-Allow-Origin', request.func.routing['cors'])
methods = 'GET, POST'
if request.func_request_type == 'json':
methods = 'POST'
elif request.func.routing.get('methods'):
methods = ', '.join(request.func.routing['methods'])
response.headers.set('Access-Control-Allow-Methods', methods)
return response return response
def dispatch(self, environ, start_response): def dispatch(self, environ, start_response):

View File

@ -68,8 +68,8 @@ class Registry(Mapping):
# must be reloaded. # must be reloaded.
# The `base_cache_signaling sequence` indicates all caches must be # The `base_cache_signaling sequence` indicates all caches must be
# invalidated (i.e. cleared). # invalidated (i.e. cleared).
self.base_registry_signaling_sequence = 1 self.base_registry_signaling_sequence = None
self.base_cache_signaling_sequence = 1 self.base_cache_signaling_sequence = None
# Flag indicating if at least one model cache has been cleared. # Flag indicating if at least one model cache has been cleared.
# Useful only in a multi-process context. # Useful only in a multi-process context.
@ -159,7 +159,7 @@ class Registry(Mapping):
@classmethod @classmethod
def setup_multi_process_signaling(cls, cr): def setup_multi_process_signaling(cls, cr):
if not openerp.multi_process: if not openerp.multi_process:
return return None, None
# Inter-process signaling: # Inter-process signaling:
# The `base_registry_signaling` sequence indicates the whole registry # The `base_registry_signaling` sequence indicates the whole registry
@ -172,6 +172,16 @@ class Registry(Mapping):
cr.execute("""SELECT nextval('base_registry_signaling')""") cr.execute("""SELECT nextval('base_registry_signaling')""")
cr.execute("""CREATE SEQUENCE base_cache_signaling INCREMENT BY 1 START WITH 1""") cr.execute("""CREATE SEQUENCE base_cache_signaling INCREMENT BY 1 START WITH 1""")
cr.execute("""SELECT nextval('base_cache_signaling')""") cr.execute("""SELECT nextval('base_cache_signaling')""")
cr.execute("""
SELECT base_registry_signaling.last_value,
base_cache_signaling.last_value
FROM base_registry_signaling, base_cache_signaling""")
r, c = cr.fetchone()
_logger.debug("Multiprocess load registry signaling: [Registry: # %s] "\
"[Cache: # %s]",
r, c)
return r, c
@contextmanager @contextmanager
def cursor(self, auto_commit=True): def cursor(self, auto_commit=True):
@ -229,6 +239,10 @@ class RegistryManager(object):
cls.delete(db_name) cls.delete(db_name)
cls.registries[db_name] = registry cls.registries[db_name] = registry
try: try:
with registry.cursor() as cr:
seq_registry, seq_cache = Registry.setup_multi_process_signaling(cr)
registry.base_registry_signaling_sequence = seq_registry
registry.base_cache_signaling_sequence = seq_cache
# This should be a method on Registry # This should be a method on Registry
openerp.modules.load_modules(registry.db, force_demo, status, update_module) openerp.modules.load_modules(registry.db, force_demo, status, update_module)
except Exception: except Exception:
@ -242,7 +256,6 @@ class RegistryManager(object):
cr = registry.db.cursor() cr = registry.db.cursor()
try: try:
Registry.setup_multi_process_signaling(cr)
registry.do_parent_store(cr) registry.do_parent_store(cr)
cr.commit() cr.commit()
finally: finally:
@ -304,16 +317,20 @@ class RegistryManager(object):
base_cache_signaling.last_value base_cache_signaling.last_value
FROM base_registry_signaling, base_cache_signaling""") FROM base_registry_signaling, base_cache_signaling""")
r, c = cr.fetchone() r, c = cr.fetchone()
_logger.debug("Multiprocess signaling check: [Registry - old# %s new# %s] "\
"[Cache - old# %s new# %s]",
registry.base_registry_signaling_sequence, r,
registry.base_cache_signaling_sequence, c)
# Check if the model registry must be reloaded (e.g. after the # Check if the model registry must be reloaded (e.g. after the
# database has been updated by another process). # database has been updated by another process).
if registry.base_registry_signaling_sequence > 1 and registry.base_registry_signaling_sequence != r: if registry.base_registry_signaling_sequence is not None and registry.base_registry_signaling_sequence != r:
changed = True changed = True
_logger.info("Reloading the model registry after database signaling.") _logger.info("Reloading the model registry after database signaling.")
registry = cls.new(db_name) registry = cls.new(db_name)
# Check if the model caches must be invalidated (e.g. after a write # Check if the model caches must be invalidated (e.g. after a write
# occured on another process). Don't clear right after a registry # occured on another process). Don't clear right after a registry
# has been reload. # has been reload.
elif registry.base_cache_signaling_sequence > 1 and registry.base_cache_signaling_sequence != c: elif registry.base_cache_signaling_sequence is not None and registry.base_cache_signaling_sequence != c:
changed = True changed = True
_logger.info("Invalidating all model caches after database signaling.") _logger.info("Invalidating all model caches after database signaling.")
registry.clear_caches() registry.clear_caches()
@ -352,6 +369,7 @@ class RegistryManager(object):
@classmethod @classmethod
def signal_registry_change(cls, db_name): def signal_registry_change(cls, db_name):
if openerp.multi_process and db_name in cls.registries: if openerp.multi_process and db_name in cls.registries:
_logger.info("Registry changed, signaling through the database")
registry = cls.get(db_name) registry = cls.get(db_name)
cr = registry.db.cursor() cr = registry.db.cursor()
r = 1 r = 1

View File

@ -1018,7 +1018,7 @@ class expression(object):
right += ' 23:59:59' right += ' 23:59:59'
push(create_substitution_leaf(leaf, (left, operator, right), working_model)) push(create_substitution_leaf(leaf, (left, operator, right), working_model))
elif field.translate: elif field.translate and right:
need_wildcard = operator in ('like', 'ilike', 'not like', 'not ilike') need_wildcard = operator in ('like', 'ilike', 'not like', 'not ilike')
sql_operator = {'=like': 'like', '=ilike': 'ilike'}.get(operator, operator) sql_operator = {'=like': 'like', '=ilike': 'ilike'}.get(operator, operator)
if need_wildcard: if need_wildcard:

View File

@ -899,11 +899,6 @@ class BaseModel(object):
for c in new.keys(): for c in new.keys():
if new[c].manual: if new[c].manual:
del new[c] del new[c]
# Duplicate float fields because they have a .digits
# cache (which must be per-registry, not server-wide).
for c in new.keys():
if new[c]._type == 'float':
new[c] = copy.copy(new[c])
if hasattr(new, 'update'): if hasattr(new, 'update'):
new.update(cls.__dict__.get(s, {})) new.update(cls.__dict__.get(s, {}))
elif s=='_constraints': elif s=='_constraints':
@ -939,6 +934,13 @@ class BaseModel(object):
if not getattr(cls, '_original_module', None): if not getattr(cls, '_original_module', None):
cls._original_module = cls._module cls._original_module = cls._module
obj = object.__new__(cls) obj = object.__new__(cls)
if hasattr(obj, '_columns'):
# float fields are registry-dependent (digit attribute). Duplicate them to avoid issues.
for c, f in obj._columns.items():
if f._type == 'float':
obj._columns[c] = copy.copy(f)
obj.__init__(pool, cr) obj.__init__(pool, cr)
return obj return obj

View File

@ -255,9 +255,7 @@ class rml_parse(object):
d = DEFAULT_DIGITS = 2 d = DEFAULT_DIGITS = 2
if dp: if dp:
decimal_precision_obj = self.pool['decimal.precision'] decimal_precision_obj = self.pool['decimal.precision']
ids = decimal_precision_obj.search(self.cr, self.uid, [('name', '=', dp)]) d = decimal_precision_obj.precision_get(self.cr, self.uid, dp)
if ids:
d = decimal_precision_obj.browse(self.cr, self.uid, ids)[0].digits
elif obj and f: elif obj and f:
res_digits = getattr(obj._columns[f], 'digits', lambda x: ((16, DEFAULT_DIGITS))) res_digits = getattr(obj._columns[f], 'digits', lambda x: ((16, DEFAULT_DIGITS)))
if isinstance(res_digits, tuple): if isinstance(res_digits, tuple):

View File

@ -145,7 +145,7 @@ def exp_get_progress(id):
raise Exception, e raise Exception, e
def exp_drop(db_name): def exp_drop(db_name):
if not exp_db_exist(db_name): if db_name not in exp_list(True):
return False return False
openerp.modules.registry.RegistryManager.delete(db_name) openerp.modules.registry.RegistryManager.delete(db_name)
openerp.sql_db.close_db(db_name) openerp.sql_db.close_db(db_name)