[MERGE] merged trunk.

bzr revid: vmt@openerp.com-20110831091325-t5gfm57tmkr0fbee
This commit is contained in:
Vo Minh Thu 2011-08-31 11:13:25 +02:00
commit b8a633e578
52 changed files with 536 additions and 547 deletions

View File

@ -30,9 +30,6 @@ GNU Public Licence.
(c) 2003-TODAY, Fabien Pinckaers - OpenERP s.a.
"""
#----------------------------------------------------------
# python imports
#----------------------------------------------------------
import logging
import os
import signal
@ -45,139 +42,140 @@ import openerp
__author__ = openerp.release.author
__version__ = openerp.release.version
if os.name == 'posix':
import pwd
# We DON't log this using the standard logger, because we might mess
# with the logfile's permissions. Just do a quick exit here.
if pwd.getpwuid(os.getuid())[0] == 'root' :
sys.stderr.write("Attempted to run OpenERP server as root. This is not good, aborting.\n")
def check_root_user():
""" Exit if the process's user is 'root' (on POSIX system)."""
if os.name == 'posix':
import pwd
if pwd.getpwuid(os.getuid())[0] == 'root' :
sys.stderr.write("Running as user 'root' is a security risk, aborting.\n")
sys.exit(1)
def check_postgres_user():
""" Exit if the configured database user is 'postgres'.
This function assumes the configuration has been initialized.
"""
config = openerp.tools.config
if config['db_user'] == 'postgres':
sys.stderr.write("Using the database user 'postgres' is a security risk, aborting.")
sys.exit(1)
#-----------------------------------------------------------------------
# parse the command line
#-----------------------------------------------------------------------
openerp.tools.config.parse_config(sys.argv[1:])
config = openerp.tools.config
def report_configuration():
""" Log the server version and some configuration values.
#----------------------------------------------------------
# get logger
#----------------------------------------------------------
openerp.netsvc.init_logger()
logger = logging.getLogger('server')
This function assumes the configuration has been initialized.
"""
config = openerp.tools.config
logger = logging.getLogger('server')
logger.info("OpenERP version %s", __version__)
for name, value in [('addons paths', config['addons_path']),
('database hostname', config['db_host'] or 'localhost'),
('database port', config['db_port'] or '5432'),
('database user', config['db_user'])]:
logger.info("%s: %s", name, value)
logger.info("OpenERP version - %s", __version__)
for name, value in [('addons_path', config['addons_path']),
('database hostname', config['db_host'] or 'localhost'),
('database port', config['db_port'] or '5432'),
('database user', config['db_user'])]:
logger.info("%s - %s", name, value)
def setup_pid_file():
""" Create a file with the process id written in it.
# Don't allow if the connection to PostgreSQL done by postgres user
if config['db_user'] == 'postgres':
logger.error("Connecting to the database as 'postgres' user is forbidden, as it present major security issues. Shutting down.")
sys.exit(1)
This function assumes the configuration has been initialized.
"""
config = openerp.tools.config
if config['pidfile']:
fd = open(config['pidfile'], 'w')
pidtext = "%d" % (os.getpid())
fd.write(pidtext)
fd.close()
#----------------------------------------------------------
# init net service
#----------------------------------------------------------
logger.info('initialising distributed objects services')
def preload_registry(dbname):
""" Preload a registry, and start the cron."""
db, pool = openerp.pooler.get_db_and_pool(dbname, update_module=config['init'] or config['update'], pooljobs=False)
pool.get('ir.cron').restart(db.dbname)
#----------------------------------------------------------
# Load and update databases if requested
#----------------------------------------------------------
def run_test_file(dbname, test_file):
""" Preload a registry, possibly run a test file, and start the cron."""
db, pool = openerp.pooler.get_db_and_pool(dbname, update_module=config['init'] or config['update'], pooljobs=False)
if not ( config["stop_after_init"] or \
config["translate_in"] or \
config["translate_out"] ):
openerp.osv.osv.start_object_proxy()
openerp.service.web_services.start_web_services()
cr = db.cursor()
logger = logging.getLogger('server')
logger.info('loading test file %s', test_file)
openerp.tools.convert_yaml_import(cr, 'base', file(test_file), {}, 'test', True)
cr.rollback()
cr.close()
def export_translation():
config = openerp.tools.config
dbname = config['db_name']
logger = logging.getLogger('server')
if config["language"]:
msg = "language %s" % (config["language"],)
else:
msg = "new language"
logger.info('writing translation file for %s to %s', msg,
config["translate_out"])
fileformat = os.path.splitext(config["translate_out"])[-1][1:].lower()
buf = file(config["translate_out"], "w")
cr = openerp.pooler.get_db(dbname).cursor()
openerp.tools.trans_export(config["language"],
config["translate_modules"] or ["all"], buf, fileformat, cr)
cr.close()
buf.close()
logger.info('translation file written successfully')
def import_translation():
config = openerp.tools.config
context = {'overwrite': config["overwrite_existing_translations"]}
dbname = config['db_name']
cr = openerp.pooler.get_db(dbname).cursor()
openerp.tools.trans_load( cr, config["translate_in"], config["language"],
context=context)
openerp.tools.trans_update_res_ids(cr)
cr.commit()
cr.close()
def start_services():
http_server = openerp.service.http_server
netrpc_server = openerp.service.netrpc_server
# Instantiate local services (this is a legacy design).
openerp.osv.osv.start_object_proxy()
# Export (for RPC) services.
openerp.service.web_services.start_web_services()
# Initialize the HTTP stack.
http_server.init_servers()
http_server.init_xmlrpc()
http_server.init_static_http()
netrpc_server.init_servers()
if config['db_name']:
for dbname in config['db_name'].split(','):
db, pool = openerp.pooler.get_db_and_pool(dbname, update_module=config['init'] or config['update'], pooljobs=False)
cr = db.cursor()
# Start the main cron thread.
openerp.netsvc.start_agent()
if config["test_file"]:
logger.info('loading test file %s', config["test_file"])
openerp.tools.convert_yaml_import(cr, 'base', file(config["test_file"]), {}, 'test', True)
cr.rollback()
pool.get('ir.cron').restart(db.dbname)
cr.close()
#----------------------------------------------------------
# translation stuff
#----------------------------------------------------------
if config["translate_out"]:
if config["language"]:
msg = "language %s" % (config["language"],)
else:
msg = "new language"
logger.info('writing translation file for %s to %s', msg, config["translate_out"])
fileformat = os.path.splitext(config["translate_out"])[-1][1:].lower()
buf = file(config["translate_out"], "w")
dbname = config['db_name']
cr = openerp.pooler.get_db(dbname).cursor()
openerp.tools.trans_export(config["language"], config["translate_modules"] or ["all"], buf, fileformat, cr)
cr.close()
buf.close()
logger.info('translation file written successfully')
sys.exit(0)
if config["translate_in"]:
context = {'overwrite': config["overwrite_existing_translations"]}
dbname = config['db_name']
cr = openerp.pooler.get_db(dbname).cursor()
openerp.tools.trans_load(cr,
config["translate_in"],
config["language"],
context=context)
openerp.tools.trans_update_res_ids(cr)
cr.commit()
cr.close()
sys.exit(0)
#----------------------------------------------------------------------------------
# if we don't want the server to continue to run after initialization, we quit here
#----------------------------------------------------------------------------------
if config["stop_after_init"]:
sys.exit(0)
openerp.netsvc.start_agent()
#----------------------------------------------------------
# Launch Servers
#----------------------------------------------------------
LST_SIGNALS = ['SIGINT', 'SIGTERM']
SIGNALS = dict(
[(getattr(signal, sign), sign) for sign in LST_SIGNALS]
)
# Start the top-level servers threads (normally HTTP, HTTPS, and NETRPC).
openerp.netsvc.Server.startAll()
# Variable keeping track of the number of calls to the signal handler defined
# below. This variable is monitored by ``quit_on_signals()``.
quit_signals_received = 0
def handler(signum, frame):
"""
:param signum: the signal number
def signal_handler(sig, frame):
""" Signal handler: exit ungracefully on the second handled signal.
:param sig: the signal number
:param frame: the interrupted stack frame or None
"""
global quit_signals_received
quit_signals_received += 1
if quit_signals_received > 1:
# logging.shutdown was already called at this point.
sys.stderr.write("Forced shutdown.\n")
os._exit(0)
def dumpstacks(signum, frame):
def dumpstacks(sig, frame):
""" Signal handler: dump a stack trace for each existing thread."""
# code from http://stackoverflow.com/questions/132058/getting-stack-trace-from-a-running-python-application#answer-2569696
# modified for python 2.5 compatibility
thread_map = dict(threading._active, **threading._limbo)
@ -191,20 +189,33 @@ def dumpstacks(signum, frame):
code.append(" %s" % (line.strip()))
logging.getLogger('dumpstacks').info("\n".join(code))
for signum in SIGNALS:
signal.signal(signum, handler)
def setup_signal_handlers():
""" Register the signal handler defined above. """
SIGNALS = map(lambda x: getattr(signal, "SIG%s" % x), "INT TERM".split())
map(lambda sig: signal.signal(sig, signal_handler), SIGNALS)
if os.name == 'posix':
signal.signal(signal.SIGQUIT, dumpstacks)
if os.name == 'posix':
signal.signal(signal.SIGQUIT, dumpstacks)
def quit_on_signals():
""" Wait for one or two signals then shutdown the server.
The first SIGINT or SIGTERM signal will initiate a graceful shutdown while
a second one if any will force an immediate exit.
"""
# Wait for a first signal to be handled. (time.sleep will be interrupted
# by the signal handler.)
while quit_signals_received == 0:
time.sleep(60)
def quit():
openerp.netsvc.Agent.quit()
openerp.netsvc.Server.quitAll()
config = openerp.tools.config
if config['pidfile']:
os.unlink(config['pidfile'])
logger = logging.getLogger('shutdown')
logger.info("Initiating OpenERP Server shutdown")
logger.info("Hit CTRL-C again or send a second signal to immediately terminate the server...")
logger = logging.getLogger('server')
logger.info("Initiating shutdown")
logger.info("Hit CTRL-C again or send a second signal to force the sutdown.")
logging.shutdown()
# manually join() all threads before calling sys.exit() to allow a second signal
@ -219,19 +230,40 @@ def quit():
time.sleep(0.05)
sys.exit(0)
if config['pidfile']:
fd = open(config['pidfile'], 'w')
pidtext = "%d" % (os.getpid())
fd.write(pidtext)
fd.close()
if __name__ == "__main__":
openerp.netsvc.Server.startAll()
check_root_user()
openerp.tools.config.parse_config(sys.argv[1:])
check_postgres_user()
openerp.netsvc.init_logger()
report_configuration()
logger.info('OpenERP server is running, waiting for connections...')
config = openerp.tools.config
while quit_signals_received == 0:
time.sleep(60)
if config["test_file"]:
run_test_file(config['db_name'], config['test_file'])
sys.exit(0)
quit()
if config["translate_out"]:
export_translation()
sys.exit(0)
if config["translate_in"]:
import_translation()
sys.exit(0)
if config['db_name']:
for dbname in config['db_name'].split(','):
preload_registry(dbname)
if config["stop_after_init"]:
sys.exit(0)
setup_pid_file()
setup_signal_handlers()
start_services()
logger = logging.getLogger('server')
logger.info('OpenERP server is running, waiting for connections...')
quit_on_signals()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:

View File

@ -59,18 +59,18 @@
'res/res_request_view.xml',
'res/res_lang_view.xml',
'res/res_log_view.xml',
'res/partner/partner_report.xml',
'res/partner/partner_view.xml',
'res/partner/partner_shortcut_data.xml',
'res/partner/bank_view.xml',
'res/country_view.xml',
'res/res_partner_report.xml',
'res/res_partner_view.xml',
'res/res_partner_shortcut_data.xml',
'res/res_bank_view.xml',
'res/res_country_view.xml',
'res/res_currency_view.xml',
'res/partner/crm_view.xml',
'res/partner/wizard/partner_sms_send_view.xml',
'res/partner/wizard/partner_wizard_spam_view.xml',
'res/partner/wizard/partner_clear_ids_view.xml',
'res/partner/wizard/partner_wizard_ean_check_view.xml',
'res/partner/partner_data.xml',
'res/res_partner_event_view.xml',
'res/wizard/partner_sms_send_view.xml',
'res/wizard/partner_wizard_spam_view.xml',
'res/wizard/partner_clear_ids_view.xml',
'res/wizard/partner_wizard_ean_check_view.xml',
'res/res_partner_data.xml',
'res/ir_property_view.xml',
'security/base_security.xml',
'publisher_warranty/publisher_warranty_view.xml',
@ -81,8 +81,7 @@
],
'demo_xml': [
'base_demo.xml',
'res/partner/partner_demo.xml',
'res/partner/crm_demo.xml',
'res/res_partner_demo.xml',
'res/res_widget_demo.xml',
],
'test': [

View File

@ -33,7 +33,6 @@ class ir_attachment(osv.osv):
"""
if not ids:
return
ima = self.pool.get('ir.model.access')
res_ids = {}
if ids:
if isinstance(ids, (int, long)):
@ -47,12 +46,13 @@ class ir_attachment(osv.osv):
if 'res_model' in values and 'res_id' in values:
res_ids.setdefault(values['res_model'],set()).add(values['res_id'])
ima = self.pool.get('ir.model.access')
for model, mids in res_ids.items():
# ignore attachments that are not attached to a resource anymore when checking access rights
# (resource was deleted but attachment was not)
cr.execute('select id from '+self.pool.get(model)._table+' where id in %s', (tuple(mids),))
mids = [x[0] for x in cr.fetchall()]
ima.check(cr, uid, model, mode, context=context)
ima.check(cr, uid, model, mode)
self.pool.get(model).check_access_rule(cr, uid, mids, mode, context=context)
def search(self, cr, uid, args, offset=0, limit=None, order=None,
@ -80,7 +80,7 @@ class ir_attachment(osv.osv):
# performed in batch as much as possible.
ima = self.pool.get('ir.model.access')
for model, targets in model_attachments.iteritems():
if not ima.check(cr, uid, model, 'read', raise_exception=False, context=context):
if not ima.check(cr, uid, model, 'read', False):
# remove all corresponding attachment ids
for attach_id in itertools.chain(*targets.values()):
ids.remove(attach_id)

View File

@ -466,6 +466,7 @@ class ir_model_access(osv.osv):
a.perm_''' + access_mode, (model_name,))
return [x[0] for x in cr.fetchall()]
@tools.ormcache()
def check(self, cr, uid, model, mode='read', raise_exception=True, context=None):
if uid==1:
# User root have all accesses
@ -520,8 +521,6 @@ class ir_model_access(osv.osv):
raise except_orm(_('AccessError'), msgs[mode] % (model_name, groups) )
return r
check = tools.cache()(check)
__cache_clearing_methods = []
def register_cache_clearing_method(self, model, method):
@ -535,7 +534,7 @@ class ir_model_access(osv.osv):
pass
def call_cache_clearing_methods(self, cr):
self.check.clear_cache(cr.dbname) # clear the cache of check function
self.check.clear_cache(self) # clear the cache of check function
for model, method in self.__cache_clearing_methods:
object_ = self.pool.get(model)
if object_:
@ -599,7 +598,7 @@ class ir_model_data(osv.osv):
if not cr.fetchone():
cr.execute('CREATE INDEX ir_model_data_module_name_index ON ir_model_data (module, name)')
@tools.cache()
@tools.ormcache()
def _get_id(self, cr, uid, module, xml_id):
"""Returns the id of the ir.model.data record corresponding to a given module and xml_id (cached) or raise a ValueError if not found"""
ids = self.search(cr, uid, [('module','=',module), ('name','=', xml_id)])
@ -608,7 +607,7 @@ class ir_model_data(osv.osv):
# the sql constraints ensure us we have only one result
return ids[0]
@tools.cache()
@tools.ormcache()
def get_object_reference(self, cr, uid, module, xml_id):
"""Returns (model, res_id) corresponding to a given module and xml_id (cached) or raise ValueError if not found"""
data_id = self._get_id(cr, uid, module, xml_id)
@ -637,10 +636,8 @@ class ir_model_data(osv.osv):
def unlink(self, cr, uid, ids, context=None):
""" Regular unlink method, but make sure to clear the caches. """
ref_ids = self.browse(cr, uid, ids, context=context)
for ref_id in ref_ids:
self._get_id.clear_cache(cr.dbname, uid, ref_id.module, ref_id.name)
self.get_object_reference.clear_cache(cr.dbname, uid, ref_id.module, ref_id.name)
self._get_id.clear_cache(self)
self.get_object_reference.clear_cache(self)
return super(ir_model_data,self).unlink(cr, uid, ids, context=context)
def _update(self,cr, uid, model, module, values, xml_id=False, store=True, noupdate=False, mode='init', res_id=False, context=None):
@ -666,8 +663,8 @@ class ir_model_data(osv.osv):
results = cr.fetchall()
for imd_id2,res_id2,real_id2 in results:
if not real_id2:
self._get_id.clear_cache(cr.dbname, uid, module, xml_id)
self.get_object_reference.clear_cache(cr.dbname, uid, module, xml_id)
self._get_id.clear_cache(self, uid, module, xml_id)
self.get_object_reference.clear_cache(self, uid, module, xml_id)
cr.execute('delete from ir_model_data where id=%s', (imd_id2,))
res_id = False
else:

View File

@ -99,7 +99,7 @@ class ir_rule(osv.osv):
(_check_model_obj, 'Rules are not supported for osv_memory objects !', ['model_id'])
]
@tools.cache()
@tools.ormcache()
def _compute_domain(self, cr, uid, model_name, mode="read"):
if mode not in self._MODES:
raise ValueError('Invalid mode: %r' % (mode,))
@ -139,25 +139,10 @@ class ir_rule(osv.osv):
return []
def clear_cache(self, cr, uid):
cr.execute("""SELECT DISTINCT m.model
FROM ir_rule r
JOIN ir_model m
ON r.model_id = m.id
WHERE r.global
OR EXISTS (SELECT 1
FROM rule_group_rel g_rel
JOIN res_groups_users_rel u_rel
ON g_rel.group_id = u_rel.gid
WHERE g_rel.rule_group_id = r.id
AND u_rel.uid = %s)
""", (uid,))
models = map(itemgetter(0), cr.fetchall())
clear = partial(self._compute_domain.clear_cache, cr.dbname, uid)
[clear(model, mode) for model in models for mode in self._MODES]
self._compute_domain.clear_cache(self)
def domain_get(self, cr, uid, model_name, mode='read', context=None):
dom = self._compute_domain(cr, uid, model_name, mode=mode)
dom = self._compute_domain(cr, uid, model_name, mode)
if dom:
# _where_calc is called as superuser. This means that rules can
# involve objects on which the real uid has no acces rights.
@ -169,20 +154,17 @@ class ir_rule(osv.osv):
def unlink(self, cr, uid, ids, context=None):
res = super(ir_rule, self).unlink(cr, uid, ids, context=context)
# Restart the cache on the _compute_domain method of ir.rule
self._compute_domain.clear_cache(cr.dbname)
self.clear_cache(cr, uid)
return res
def create(self, cr, user, vals, context=None):
res = super(ir_rule, self).create(cr, user, vals, context=context)
# Restart the cache on the _compute_domain method of ir.rule
self._compute_domain.clear_cache(cr.dbname)
def create(self, cr, uid, vals, context=None):
res = super(ir_rule, self).create(cr, uid, vals, context=context)
self.clear_cache(cr, uid)
return res
def write(self, cr, uid, ids, vals, context=None):
res = super(ir_rule, self).write(cr, uid, ids, vals, context=context)
# Restart the cache on the _compute_domain method
self._compute_domain.clear_cache(cr.dbname)
self.clear_cache(cr,uid)
return res
ir_rule()

View File

@ -87,7 +87,7 @@ class ir_translation(osv.osv):
cr.execute('CREATE INDEX ir_translation_ltn ON ir_translation (name, lang, type)')
cr.commit()
@tools.cache(skiparg=3, multi='ids')
@tools.ormcache_multi(skiparg=3, multi=6)
def _get_ids(self, cr, uid, name, tt, lang, ids):
translations = dict.fromkeys(ids, False)
if ids:
@ -107,9 +107,9 @@ class ir_translation(osv.osv):
tr = self._get_ids(cr, uid, name, tt, lang, ids)
for res_id in tr:
if tr[res_id]:
self._get_source.clear_cache(cr.dbname, uid, name, tt, lang, tr[res_id])
self._get_source.clear_cache(cr.dbname, uid, name, tt, lang)
self._get_ids.clear_cache(cr.dbname, uid, name, tt, lang, ids)
self._get_source.clear_cache(self, uid, name, tt, lang, tr[res_id])
self._get_source.clear_cache(self, uid, name, tt, lang)
self._get_ids.clear_cache(self, uid, name, tt, lang, ids)
cr.execute('delete from ir_translation ' \
'where lang=%s ' \
@ -128,7 +128,7 @@ class ir_translation(osv.osv):
})
return len(ids)
@tools.cache(skiparg=3)
@tools.ormcache(skiparg=3)
def _get_source(self, cr, uid, name, types, lang, source=None):
"""
Returns the translation for the given combination of name, type, language
@ -173,13 +173,11 @@ class ir_translation(osv.osv):
return tools.ustr(source)
return trad
def create(self, cursor, user, vals, context=None):
def create(self, cr, uid, vals, context=None):
if not context:
context = {}
ids = super(ir_translation, self).create(cursor, user, vals, context=context)
for trans_obj in self.read(cursor, user, [ids], ['name','type','res_id','src','lang'], context=context):
self._get_source.clear_cache(cursor.dbname, user, trans_obj['name'], trans_obj['type'], trans_obj['lang'], source=trans_obj['src'])
self._get_ids.clear_cache(cursor.dbname, user, trans_obj['name'], trans_obj['type'], trans_obj['lang'], [trans_obj['res_id']])
ids = super(ir_translation, self).create(cr, uid, vals, context=context)
self._get_source.clear_cache(self, vals.get('name',0), vals.get('type',0), vals.get('lang',0), vals.get('src',0))
return ids
def write(self, cursor, user, ids, vals, context=None):
@ -188,9 +186,9 @@ class ir_translation(osv.osv):
if isinstance(ids, (int, long)):
ids = [ids]
result = super(ir_translation, self).write(cursor, user, ids, vals, context=context)
self._get_source.clear_cache(self, user, trans_obj['name'], trans_obj['type'], trans_obj['lang'], source=trans_obj['src'])
for trans_obj in self.read(cursor, user, ids, ['name','type','res_id','src','lang'], context=context):
self._get_source.clear_cache(cursor.dbname, user, trans_obj['name'], trans_obj['type'], trans_obj['lang'], source=trans_obj['src'])
self._get_ids.clear_cache(cursor.dbname, user, trans_obj['name'], trans_obj['type'], trans_obj['lang'], [trans_obj['res_id']])
self._get_ids.clear_cache(self, user, trans_obj['name'], trans_obj['type'], trans_obj['lang'], [trans_obj['res_id']])
return result
def unlink(self, cursor, user, ids, context=None):
@ -199,8 +197,8 @@ class ir_translation(osv.osv):
if isinstance(ids, (int, long)):
ids = [ids]
for trans_obj in self.read(cursor, user, ids, ['name','type','res_id','src','lang'], context=context):
self._get_source.clear_cache(cursor.dbname, user, trans_obj['name'], trans_obj['type'], trans_obj['lang'], source=trans_obj['src'])
self._get_ids.clear_cache(cursor.dbname, user, trans_obj['name'], trans_obj['type'], trans_obj['lang'], [trans_obj['res_id']])
self._get_source.clear_cache(self, user, trans_obj['name'], trans_obj['type'], trans_obj['lang'], source=trans_obj['src'])
self._get_ids.clear_cache(self, user, trans_obj['name'], trans_obj['type'], trans_obj['lang'], [trans_obj['res_id']])
result = super(ir_translation, self).unlink(cursor, user, ids, context=context)
return result

View File

@ -88,7 +88,7 @@ class ir_ui_menu(osv.osv):
field = model_field.get(menu.action._name)
if field and data[field]:
if not modelaccess.check(cr, uid, data[field], raise_exception=False):
if not modelaccess.check(cr, uid, data[field], False):
continue
else:
# if there is no action, it's a 'folder' menu

View File

@ -21,9 +21,11 @@
import tools
import country
import res_country
import res_lang
import partner
import res_partner
import res_partner_event
import res_bank
import res_config
import res_currency
import res_company
@ -31,6 +33,11 @@ import res_user
import res_request
import res_lang
import res_log
import ir_property
import res_widget
import ir_property
import wizard
import report
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:

View File

@ -1,34 +0,0 @@
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import tools
from partner import *
from bank import *
from crm import *
import wizard
import report
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:

View File

@ -1,18 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<openerp>
<data noupdate="1">
<record model="res.partner.canal">
<field name="name">website</field>
</record>
<record model="res.partner.canal">
<field name="name">phone</field>
</record>
<record model="res.partner.canal">
<field name="name">direct</field>
</record>
<record model="res.partner.canal">
<field name="name">E-mail</field>
</record>
</data>
</openerp>

View File

@ -1,68 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<openerp>
<data>
<record id="res_partner_canal-view-tree" model="ir.ui.view">
<field name="name">res.partner.canal.tree</field>
<field name="model">res.partner.canal</field>
<field name="type">tree</field>
<field name="arch" type="xml">
<tree string="Channels">
<field name="name" select="1"/>
<field name="active" select="1"/>
</tree>
</field>
</record>
<record id="res_partner_canal-view" model="ir.ui.view">
<field name="name">res.partner.canal.form</field>
<field name="model">res.partner.canal</field>
<field name="type">form</field>
<field name="arch" type="xml">
<form string="Channel">
<field name="name" select="1"/>
<field name="active" select="1"/>
</form>
</field>
</record>
<record id="res_partner_canal-act" model="ir.actions.act_window">
<field name="name">Channels</field>
<field name="res_model">res.partner.canal</field>
<field name="view_type">form</field>
<field name="view_mode">tree,form</field>
<field name="help">Track from where is coming your leads and opportunities by creating specific channels that will be maintained at the creation of a document in the system. Some examples of channels can be: Website, Phone Call, Reseller, etc.</field>
</record>
<!-- <menuitem id="menu_partner_events" name="Events" parent="menu_config_address_book" sequence="7"/>-->
<menuitem id="base.menu_crm_config_lead" name="Leads &amp; Opportunities"
parent="base.menu_base_config" sequence="2" groups="base.group_extended"/>
<menuitem action="res_partner_canal-act" id="menu_res_partner_canal-act" parent="base.menu_crm_config_lead" sequence="4"/>
<record id="res_partner_event-wopartner-view_form" model="ir.ui.view">
<field name="name">res.partner.event.form</field>
<field name="model">res.partner.event</field>
<field name="type">form</field>
<field name="arch" type="xml">
<form string="Event Logs">
<separator colspan="4" string="General Description"/>
<field name="name" select="1" string="Name"/>
<field name="date" select="1"/>
<field name="user_id" select="1"/>
<separator colspan="4" string="Description"/>
<field colspan="4" name="description" nolabel="1"/>
</form>
</field>
</record>
<record id="res_partner_event-wopartner-view_tree" model="ir.ui.view">
<field name="name">res.partner.event.tree</field>
<field name="model">res.partner.event</field>
<field name="type">tree</field>
<field name="arch" type="xml">
<tree string="Event Logs">
<field name="date"/>
<field name="name" string="Event"/>
<field name="user_id"/>
</tree>
</field>
</record>
</data>
</openerp>

View File

@ -1,9 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<openerp>
<data>
<report id="res_partner_address_report" model="res.partner" name="res.partner.address" string="Labels" xml="base/res/partner/report/partner_address.xml" xsl="base/res/partner/report/partner_address.xsl" groups="base.group_extended"/>
<!--
<report string="Business Cards" model="res.partner" name="res.partner.businesscard" xml="base/res/partner/report/business_card.xml" xsl="base/res/partner/report/business_card.xsl"/>
-->
</data>
</openerp>

View File

@ -180,7 +180,7 @@ class res_company(osv.osv):
return rule.company_dest_id.id
return user.company_id.id
@tools.cache()
@tools.ormcache()
def _get_company_children(self, cr, uid=None, company=None):
if not company:
return []
@ -207,7 +207,7 @@ class res_company(osv.osv):
# This function restart the cache on the _get_company_children method
#
def cache_restart(self, cr):
self._get_company_children.clear_cache(cr.dbname)
self._get_company_children.clear_cache(self)
def create(self, cr, uid, vals, context=None):
if not vals.get('name', False) or vals.get('partner_id', False):

View File

@ -36,8 +36,12 @@ class res_currency(osv.osv):
else:
date = time.strftime('%Y-%m-%d')
date = date or time.strftime('%Y-%m-%d')
# Convert False values to None ...
currency_rate_type = context.get('currency_rate_type_id') or None
# ... and use 'is NULL' instead of '= some-id'.
operator = '=' if currency_rate_type else 'is'
for id in ids:
cr.execute("SELECT currency_id, rate FROM res_currency_rate WHERE currency_id = %s AND name <= %s ORDER BY name desc LIMIT 1" ,(id, date))
cr.execute("SELECT currency_id, rate FROM res_currency_rate WHERE currency_id = %s AND name <= %s AND currency_rate_type_id " + operator +" %s ORDER BY name desc LIMIT 1" ,(id, date, currency_rate_type))
if cr.rowcount:
id, rate = cr.fetchall()[0]
res[id] = rate
@ -69,12 +73,12 @@ class res_currency(osv.osv):
def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'):
res = super(res_currency, self).read(cr, user, ids, fields, context, load)
currency_rate_obj = self.pool.get('res.currency.rate')
for r in res:
if r.__contains__('rate_ids'):
rates=r['rate_ids']
if rates:
currency_rate_obj= self.pool.get('res.currency.rate')
currency_date = currency_rate_obj.read(cr,user,rates[0],['name'])['name']
currency_date = currency_rate_obj.read(cr, user, rates[0], ['name'])['name']
r['date'] = currency_date
return res
@ -100,9 +104,16 @@ class res_currency(osv.osv):
def _get_conversion_rate(self, cr, uid, from_currency, to_currency, context=None):
if context is None:
context = {}
if from_currency['rate'] == 0 or to_currency['rate'] == 0:
ctx = context.copy()
ctx.update({'currency_rate_type_id': ctx.get('currency_rate_type_from')})
from_currency = self.browse(cr, uid, from_currency.id, context=ctx)
ctx.update({'currency_rate_type_id': ctx.get('currency_rate_type_to')})
to_currency = self.browse(cr, uid, to_currency.id, context=ctx)
if from_currency.rate == 0 or to_currency.rate == 0:
date = context.get('date', time.strftime('%Y-%m-%d'))
if from_currency['rate'] == 0:
if from_currency.rate == 0:
currency_symbol = from_currency.symbol
else:
currency_symbol = to_currency.symbol
@ -111,7 +122,10 @@ class res_currency(osv.osv):
'at the date: %s') % (currency_symbol, date))
return to_currency.rate/from_currency.rate
def compute(self, cr, uid, from_currency_id, to_currency_id, from_amount, round=True, context=None):
def compute(self, cr, uid, from_currency_id, to_currency_id, from_amount,
round=True, currency_rate_type_from=False, currency_rate_type_to=False, context=None):
if not context:
context = {}
if not from_currency_id:
from_currency_id = to_currency_id
if not to_currency_id:
@ -119,12 +133,13 @@ class res_currency(osv.osv):
xc = self.browse(cr, uid, [from_currency_id,to_currency_id], context=context)
from_currency = (xc[0].id == from_currency_id and xc[0]) or xc[1]
to_currency = (xc[0].id == to_currency_id and xc[0]) or xc[1]
if to_currency_id == from_currency_id:
if (to_currency_id == from_currency_id) and (currency_rate_type_from == currency_rate_type_to):
if round:
return self.round(cr, uid, to_currency, from_amount)
else:
return from_amount
else:
context.update({'currency_rate_type_from': currency_rate_type_from, 'currency_rate_type_to': currency_rate_type_to})
rate = self._get_conversion_rate(cr, uid, from_currency, to_currency, context=context)
if round:
return self.round(cr, uid, to_currency, from_amount * rate)
@ -133,19 +148,31 @@ class res_currency(osv.osv):
res_currency()
class res_currency_rate_type(osv.osv):
_name = "res.currency.rate.type"
_description = "Used to define the type of Currency Rates"
_columns = {
'name': fields.char('Name', size=64, required=True, translate=True),
}
res_currency_rate_type()
class res_currency_rate(osv.osv):
_name = "res.currency.rate"
_description = "Currency Rate"
_columns = {
'name': fields.date('Date', required=True, select=True),
'rate': fields.float('Rate', digits=(12,6), required=True,
help='The rate of the currency to the currency of rate 1'),
'currency_id': fields.many2one('res.currency', 'Currency', readonly=True),
'currency_rate_type_id': fields.many2one('res.currency.rate.type', 'Currency Rate Type', help="Allow you to define your own currency rate types, like 'Average' or 'Year to Date'. Leave empty if you simply want to use the normal 'spot' rate type"),
}
_defaults = {
'name': lambda *a: time.strftime('%Y-%m-%d'),
}
_order = "name desc"
res_currency_rate()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:

View File

@ -66,5 +66,38 @@
<menuitem action="action_currency_form" id="menu_action_currency_form" parent="menu_localisation" sequence="3"/>
<!--
Currency Rate Type
-->
<record id="view_currency_rate_type_form" model="ir.ui.view">
<field name="name">res.currency.rate.type.form</field>
<field name="model">res.currency.rate.type</field>
<field name="type">form</field>
<field name="arch" type="xml">
<form string="Currency Rate Type">
<field name="name"/>
</form>
</field>
</record>
<record id="action_currency_rate_type_form" model="ir.actions.act_window">
<field name="name">Currency Rate Type</field>
<field name="res_model">res.currency.rate.type</field>
<field name="view_type">form</field>
<field name="view_mode">tree,form</field>
</record>
<record id="view_currency_rate_type_search" model="ir.ui.view">
<field name="name">res.currency.rate.type.search</field>
<field name="model">res.currency.rate.type</field>
<field name="type">search</field>
<field name="arch" type="xml">
<search string="Currency Rate Type">
<field name="name"/>
</search>
</field>
</record>
</data>
</openerp>

View File

@ -163,7 +163,7 @@ class lang(osv.osv):
(_check_format, 'Invalid date/time format directive specified. Please refer to the list of allowed directives, displayed when you edit a language.', ['time_format', 'date_format'])
]
@tools.cache(skiparg=3)
@tools.ormcache(skiparg=3)
def _lang_data_get(self, cr, uid, lang_id, monetary=False):
conv = localeconv()
lang_obj = self.browse(cr, uid, lang_id)
@ -174,7 +174,7 @@ class lang(osv.osv):
def write(self, cr, uid, ids, vals, context=None):
for lang_id in ids :
self._lang_data_get.clear_cache(cr.dbname,lang_id= lang_id)
self._lang_data_get.clear_cache(self,lang_id= lang_id)
return super(lang, self).write(cr, uid, ids, vals, context)
def unlink(self, cr, uid, ids, context=None):

View File

@ -23,22 +23,6 @@ import time
from osv import osv
from osv import fields
#
# Sale/Purchase Canal, Media
#
class res_partner_canal(osv.osv):
_name = "res.partner.canal"
_description = "Channels"
_order = 'name'
_columns = {
'name': fields.char('Channel Name', size=64, required=True),
'active': fields.boolean('Active'),
}
_defaults = {
'active': lambda *a: 1,
}
res_partner_canal()
class res_partner_event(osv.osv):
_name = "res.partner.event"
_columns = {

View File

@ -0,0 +1,36 @@
<?xml version="1.0" encoding="utf-8"?>
<openerp>
<data>
<!-- <menuitem id="menu_partner_events" name="Events" parent="menu_config_address_book" sequence="7"/>-->
<record id="res_partner_event-wopartner-view_form" model="ir.ui.view">
<field name="name">res.partner.event.form</field>
<field name="model">res.partner.event</field>
<field name="type">form</field>
<field name="arch" type="xml">
<form string="Event Logs">
<separator colspan="4" string="General Description"/>
<field name="name" select="1" string="Name"/>
<field name="date" select="1"/>
<field name="user_id" select="1"/>
<separator colspan="4" string="Description"/>
<field colspan="4" name="description" nolabel="1"/>
</form>
</field>
</record>
<record id="res_partner_event-wopartner-view_tree" model="ir.ui.view">
<field name="name">res.partner.event.tree</field>
<field name="model">res.partner.event</field>
<field name="type">tree</field>
<field name="arch" type="xml">
<tree string="Event Logs">
<field name="date"/>
<field name="name" string="Event"/>
<field name="user_id"/>
</tree>
</field>
</record>
</data>
</openerp>

View File

@ -0,0 +1,9 @@
<?xml version="1.0" encoding="utf-8"?>
<openerp>
<data>
<report id="res_partner_address_report" model="res.partner" name="res.partner.address" string="Labels" xml="base/res/report/partner_address.xml" xsl="base/res/report/partner_address.xsl" groups="base.group_extended"/>
<!--
<report string="Business Cards" model="res.partner" name="res.partner.businesscard" xml="base/res/report/business_card.xml" xsl="base/res/report/business_card.xsl"/>
-->
</data>
</openerp>

View File

@ -518,7 +518,7 @@
<field name="help">Manage the partner categories in order to better classify them for tracking and analysis purposes. A partner may belong to several categories and categories have a hierarchy structure: a partner belonging to a category also belong to his parent category.</field>
</record>
<menuitem action="action_partner_category_form" id="menu_partner_category_form" name="Partner Categories" sequence="4" parent="menu_config_address_book"/>
<menuitem action="action_partner_category_form" id="menu_partner_category_form" name="Partner Categories" sequence="4" parent="menu_config_address_book" groups="base.group_extended"/>
<act_window domain="[('partner_id', '=', active_id)]" context="{'default_partner_id':active_id}"
id="act_res_partner_event" name="Events"

View File

@ -263,7 +263,7 @@ class users(osv.osv):
o['password'] = '********'
return o
result = super(users, self).read(cr, uid, ids, fields, context, load)
canwrite = self.pool.get('ir.model.access').check(cr, uid, 'res.users', 'write', raise_exception=False)
canwrite = self.pool.get('ir.model.access').check(cr, uid, 'res.users', 'write', False)
if not canwrite:
if isinstance(ids, (int, float)):
result = override_password(result)

View File

@ -64,8 +64,6 @@
"access_res_partner_bank_type_group_user","res_partner_bank_type group_user","model_res_partner_bank_type",,1,0,0,0
"access_res_partner_bank_type_field_group_partner_manager","res_partner_bank_type_field group_partner_manager","model_res_partner_bank_type_field","group_partner_manager",1,1,1,1
"access_res_partner_bank_type_field_group_user","res_partner_bank_type_field group_user","model_res_partner_bank_type_field",,1,0,0,0
"access_res_partner_canal_group_user","res_partner_canal group_user","model_res_partner_canal","group_user",1,0,0,0
"access_res_partner_canal_group_partner_manager","res_partner_canal group_partner_manager","model_res_partner_canal","group_partner_manager",1,1,1,1
"access_res_partner_category_group_user","res_partner_category group_user","model_res_partner_category","group_user",1,0,0,0
"access_res_partner_category_group_partner_manager","res_partner_category group_partner_manager","model_res_partner_category","group_partner_manager",1,1,1,1
"access_res_partner_event_group_user","res_partner_event group_user","model_res_partner_event","group_user",1,0,0,0

1 id name model_id:id group_id:id perm_read perm_write perm_create perm_unlink
64 access_res_partner_bank_type_group_user res_partner_bank_type group_user model_res_partner_bank_type 1 0 0 0
65 access_res_partner_bank_type_field_group_partner_manager res_partner_bank_type_field group_partner_manager model_res_partner_bank_type_field group_partner_manager 1 1 1 1
66 access_res_partner_bank_type_field_group_user res_partner_bank_type_field group_user model_res_partner_bank_type_field 1 0 0 0
access_res_partner_canal_group_user res_partner_canal group_user model_res_partner_canal group_user 1 0 0 0
access_res_partner_canal_group_partner_manager res_partner_canal group_partner_manager model_res_partner_canal group_partner_manager 1 1 1 1
67 access_res_partner_category_group_user res_partner_category group_user model_res_partner_category group_user 1 0 0 0
68 access_res_partner_category_group_partner_manager res_partner_category group_partner_manager model_res_partner_category group_partner_manager 1 1 1 1
69 access_res_partner_event_group_user res_partner_event group_user model_res_partner_event group_user 1 0 0 0

View File

@ -70,18 +70,22 @@ class Registry(object):
res = []
# Instanciate classes registered through their constructor and
# add them to the pool.
for klass in openerp.osv.orm.module_class_list.get(module, []):
res.append(klass.create_instance(self, cr))
# Instanciate classes automatically discovered.
# Instantiate registered classes (via the MetaModel automatic discovery
# or via explicit constructor call), and add them to the pool.
for cls in openerp.osv.orm.MetaModel.module_to_models.get(module, []):
if cls not in openerp.osv.orm.module_class_list.get(module, []):
res.append(cls.create_instance(self, cr))
res.append(cls.create_instance(self, cr))
return res
def clear_caches(self):
""" Clear the caches
This clears the caches associated to methods decorated with
``tools.ormcache`` or ``tools.ormcache_multi`` for all the models.
"""
for model in self.models.itervalues():
model.clear_caches()
class RegistryManager(object):
""" Model registries manager.
@ -155,4 +159,19 @@ class RegistryManager(object):
del cls.registries[db_name]
@classmethod
def clear_caches(cls, db_name):
""" Clear the caches
This clears the caches associated to methods decorated with
``tools.ormcache`` or ``tools.ormcache_multi`` for all the models
of the given database name.
This method is given to spare you a ``RegistryManager.get(db_name)``
that would loads the given database if it was not already loaded.
"""
if db_name in cls.registries:
cls.registries[db_name].clear_caches()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:

View File

@ -70,9 +70,6 @@ from openerp.tools import SKIPPED_ELEMENT_TYPES
regex_order = re.compile('^(([a-z0-9_]+|"[a-z0-9_]+")( *desc| *asc)?( *, *|))+$', re.I)
regex_object_name = re.compile(r'^[a-z0-9_.]+$')
# Mapping between openerp module names and their osv classes.
module_class_list = {}
# Super-user identifier (aka Administrator aka root)
ROOT_USER_ID = 1
@ -809,20 +806,22 @@ class Model(object):
return obj
def __new__(cls):
""" Register this model.
"""Register this model.
This doesn't create an instance but simply register the model
as being part of the module where it is defined.
"""
# Set the module name (e.g. base, sale, accounting, ...) on the class.
module = cls.__module__.split('.')[0]
if not hasattr(cls, '_module'):
cls._module = module
# Remember which models to instanciate for this module.
module_class_list.setdefault(cls._module, []).append(cls)
# Record this class in the list of models to instantiate for this module,
# managed by the metaclass.
module_model_list = MetaModel.module_to_models.setdefault(cls._module, [])
if cls not in module_model_list:
module_model_list.append(cls)
# Since we don't return an instance here, the __init__
# method won't be called.
@ -1311,7 +1310,7 @@ class Model(object):
else:
translated_msg = tmp_msg
else:
translated_msg = trans._get_source(cr, uid, self._name, 'constraint', lng, source=msg) or msg
translated_msg = trans._get_source(cr, uid, self._name, 'constraint', lng, msg) or msg
error_msgs.append(
_("Error occurred while validating the field(s) %s: %s") % (','.join(fields), translated_msg)
)
@ -2149,7 +2148,7 @@ class Model(object):
def read_string(self, cr, uid, id, langs, fields=None, context=None):
res = {}
res2 = {}
self.pool.get('ir.model.access').check(cr, uid, 'ir.translation', 'read', context=context)
self.pool.get('ir.model.access').check(cr, uid, 'ir.translation', 'read')
if not fields:
fields = self._columns.keys() + self._inherit_fields.keys()
#FIXME: collect all calls to _get_source into one SQL call.
@ -2173,7 +2172,7 @@ class Model(object):
return res
def write_string(self, cr, uid, id, langs, vals, context=None):
self.pool.get('ir.model.access').check(cr, uid, 'ir.translation', 'write', context=context)
self.pool.get('ir.model.access').check(cr, uid, 'ir.translation', 'write')
#FIXME: try to only call the translation in one SQL
for lang in langs:
for field in vals:
@ -2215,6 +2214,18 @@ class Model(object):
values = defaults
return values
def clear_caches(self):
""" Clear the caches
This clears the caches associated to methods decorated with
``tools.ormcache`` or ``tools.ormcache_multi``.
"""
try:
getattr(self, '_ormcache')
self._ormcache = {}
except AttributeError:
pass
def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None, orderby=False):
"""
Get the list of records in list view grouped by the given ``groupby`` fields
@ -2242,7 +2253,7 @@ class Model(object):
"""
context = context or {}
self.pool.get('ir.model.access').check(cr, uid, self._name, 'read', context=context)
self.pool.get('ir.model.access').check(cr, uid, self._name, 'read')
if not fields:
fields = self._columns.keys()
@ -3028,8 +3039,8 @@ class Model(object):
context = {}
ira = self.pool.get('ir.model.access')
write_access = ira.check(cr, user, self._name, 'write', raise_exception=False, context=context) or \
ira.check(cr, user, self._name, 'create', raise_exception=False, context=context)
write_access = ira.check(cr, user, self._name, 'write', False) or \
ira.check(cr, user, self._name, 'create', False)
res = {}
@ -3092,7 +3103,7 @@ class Model(object):
if not context:
context = {}
self.pool.get('ir.model.access').check(cr, user, self._name, 'read', context=context)
self.pool.get('ir.model.access').check(cr, user, self._name, 'read')
if not fields:
fields = list(set(self._columns.keys() + self._inherit_fields.keys()))
if isinstance(ids, (int, long)):
@ -3379,7 +3390,7 @@ class Model(object):
self._check_concurrency(cr, ids, context)
self.pool.get('ir.model.access').check(cr, uid, self._name, 'unlink', context=context)
self.pool.get('ir.model.access').check(cr, uid, self._name, 'unlink')
properties = self.pool.get('ir.property')
domain = [('res_id', '=', False),
@ -3516,7 +3527,7 @@ class Model(object):
ids = [ids]
self._check_concurrency(cr, ids, context)
self.pool.get('ir.model.access').check(cr, user, self._name, 'write', context=context)
self.pool.get('ir.model.access').check(cr, user, self._name, 'write')
result = self._store_get_values(cr, user, ids, vals.keys(), context) or []
@ -3725,7 +3736,7 @@ class Model(object):
"""
if not context:
context = {}
self.pool.get('ir.model.access').check(cr, user, self._name, 'create', context=context)
self.pool.get('ir.model.access').check(cr, user, self._name, 'create')
vals = self._add_missing_default_values(cr, user, vals, context)
@ -4202,7 +4213,7 @@ class Model(object):
"""
if context is None:
context = {}
self.pool.get('ir.model.access').check(cr, access_rights_uid or user, self._name, 'read', context=context)
self.pool.get('ir.model.access').check(cr, access_rights_uid or user, self._name, 'read')
query = self._where_calc(cr, user, args, context=context)
self._apply_ir_rules(cr, user, query, 'read', context=context)

View File

@ -155,6 +155,7 @@ class db(netsvc.ExportService):
def exp_drop(self, db_name):
sql_db.close_db(db_name)
openerp.modules.registry.RegistryManager.clear_caches(db_name)
openerp.netsvc.Agent.cancel(db_name)
logger = netsvc.Logger()
@ -258,7 +259,8 @@ class db(netsvc.ExportService):
def exp_rename(self, old_name, new_name):
sql_db.close_db(old_name)
openerp.netsvc.Agent.cancel(db_name)
openerp.modules.registry.RegistryManager.clear_caches(old_name)
openerp.netsvc.Agent.cancel(old_name)
logger = netsvc.Logger()
db = sql_db.db_connect('template1')

View File

@ -500,7 +500,6 @@ def db_connect(db_name):
def close_db(db_name):
""" You might want to call openerp.netsvc.Agent.cancel(db_name) along this function."""
_Pool.close_all(dsn(db_name))
tools.cache.clean_caches_for_db(db_name)
ct = currentThread()
if hasattr(ct, 'dbname'):
delattr(ct, 'dbname')

137
openerp/tools/cache.py Normal file
View File

@ -0,0 +1,137 @@
import lru
class ormcache(object):
""" LRU cache decorator for orm methods,
"""
def __init__(self, skiparg=2, size=8192, multi=None, timeout=None):
self.skiparg = skiparg
self.size = size
self.method = None
self.stat_miss = 0
self.stat_hit = 0
self.stat_err = 0
def __call__(self,m):
self.method = m
def lookup(self2, cr, *args):
r = self.lookup(self2, cr, *args)
return r
lookup.clear_cache = self.clear
return lookup
def stat(self):
return "lookup-stats hit=%s miss=%s err=%s ratio=%.1f" % (self.stat_hit,self.stat_miss,self.stat_err, (100*float(self.stat_hit))/(self.stat_miss+self.stat_hit) )
def lru(self, self2):
try:
ormcache = getattr(self2, '_ormcache')
except AttributeError:
ormcache = self2._ormcache = {}
try:
d = ormcache[self.method]
except KeyError:
d = ormcache[self.method] = lru.LRU(self.size)
return d
def lookup(self, self2, cr, *args):
d = self.lru(self2)
key = args[self.skiparg-2:]
try:
r = d[key]
self.stat_hit += 1
return r
except KeyError:
self.stat_miss += 1
value = d[args] = self.method(self2, cr, *args)
return value
except TypeError:
self.stat_err += 1
return self.method(self2, cr, *args)
def clear(self, self2, *args):
""" Remove *args entry from the cache or all keys if *args is undefined
"""
d = self.lru(self2)
if args:
try:
key = args[self.skiparg-2:]
del d[key]
except KeyError:
pass
else:
d.clear()
class ormcache_multi(ormcache):
def __init__(self, skiparg=2, size=8192, multi=3):
super(ormcache_multi,self).__init__(skiparg,size)
self.multi = multi - 2
def lookup(self, self2, cr, *args):
d = self.lru(self2)
args = list(args)
multi = self.multi
ids = args[multi]
r = {}
miss = []
for i in ids:
args[multi] = i
key = tuple(args[self.skiparg-2:])
try:
r[i] = d[key]
self.stat_hit += 1
except Exception:
self.stat_miss += 1
miss.append(i)
if miss:
args[multi] = miss
r.update(self.method(self2, cr, *args))
for i in miss:
args[multi] = i
key = tuple(args[self.skiparg-2:])
d[key] = r[i]
return r
class dummy_cache(object):
""" Cache decorator replacement to actually do no caching.
"""
def __init__(self, *l, **kw):
pass
def __call__(self, fn):
fn.clear_cache = self.clear
return fn
def clear(self, *l, **kw):
pass
if __name__ == '__main__':
class A():
@ormcache()
def m(self,a,b):
print "A::m(", self,a,b
return 1
@ormcache_multi(multi=3)
def n(self,cr,uid,ids):
print "m", self,cr,uid,ids
return dict([(i,i) for i in ids])
a=A()
r=a.m(1,2)
r=a.m(1,2)
r=a.n("cr",1,[1,2,3,4])
r=a.n("cr",1,[1,2])
print r
for i in a._ormcache:
print a._ormcache[i].d
a.n.clear_cache(a,1,1)
r=a.n("cr",1,[1,2])
print r
r=a.n("cr",1,[1,2])
# For backward compatibility
cache = ormcache

View File

@ -12,7 +12,7 @@ class LRUNode(object):
self.me = me
self.next = None
class LRU:
class LRU(object):
"""
Implementation of a length-limited O(1) LRU queue.
Built for and used by PyPE:
@ -113,3 +113,8 @@ class LRU:
del self[key]
return v
@synchronized()
def clear(self):
self.d = {}
self.first = None
self.last = None

View File

@ -60,7 +60,7 @@ except ImportError:
import openerp.loglevels as loglevels
from config import config
from lru import LRU
from cache import *
# get_encodings, ustr and exception_to_unicode were originally from tools.misc.
# There are moved to loglevels until we refactor tools.
@ -628,7 +628,6 @@ class UpdateableDict(local):
def __ne__(self, y):
return self.dict.__ne__(y)
class currency(float):
""" Deprecate
@ -650,163 +649,6 @@ class currency(float):
# display_value = int(self*(10**(-self.accuracy))/self.rounding)*self.rounding/(10**(-self.accuracy))
# return str(display_value)
def is_hashable(h):
try:
hash(h)
return True
except TypeError:
return False
class dummy_cache(object):
""" Cache decorator replacement to actually do no caching.
This can be useful to benchmark and/or track memory leak.
"""
def __init__(self, timeout=None, skiparg=2, multi=None, size=8192):
pass
def clear(self, dbname, *args, **kwargs):
pass
@classmethod
def clean_caches_for_db(cls, dbname):
pass
def __call__(self, fn):
fn.clear_cache = self.clear
return fn
class real_cache(object):
"""
Use it as a decorator of the function you plan to cache
Timeout: 0 = no timeout, otherwise in seconds
"""
__caches = []
def __init__(self, timeout=None, skiparg=2, multi=None, size=8192):
assert skiparg >= 2 # at least self and cr
if timeout is None:
self.timeout = config['cache_timeout']
else:
self.timeout = timeout
self.skiparg = skiparg
self.multi = multi
self.lasttime = time.time()
self.cache = LRU(size) # TODO take size from config
self.fun = None
cache.__caches.append(self)
def _generate_keys(self, dbname, kwargs2):
"""
Generate keys depending of the arguments and the self.mutli value
"""
def to_tuple(d):
pairs = d.items()
pairs.sort(key=lambda (k,v): k)
for i, (k, v) in enumerate(pairs):
if isinstance(v, dict):
pairs[i] = (k, to_tuple(v))
if isinstance(v, (list, set)):
pairs[i] = (k, tuple(v))
elif not is_hashable(v):
pairs[i] = (k, repr(v))
return tuple(pairs)
if not self.multi:
key = (('dbname', dbname),) + to_tuple(kwargs2)
yield key, None
else:
multis = kwargs2[self.multi][:]
for id in multis:
kwargs2[self.multi] = (id,)
key = (('dbname', dbname),) + to_tuple(kwargs2)
yield key, id
def _unify_args(self, *args, **kwargs):
# Update named arguments with positional argument values (without self and cr)
kwargs2 = self.fun_default_values.copy()
kwargs2.update(kwargs)
kwargs2.update(dict(zip(self.fun_arg_names, args[self.skiparg-2:])))
return kwargs2
def clear(self, dbname, *args, **kwargs):
"""clear the cache for database dbname
if *args and **kwargs are both empty, clear all the keys related to this database
"""
if not args and not kwargs:
keys_to_del = [key for key in self.cache.keys() if key[0][1] == dbname]
else:
kwargs2 = self._unify_args(*args, **kwargs)
keys_to_del = [key for key, _ in self._generate_keys(dbname, kwargs2) if key in self.cache.keys()]
for key in keys_to_del:
self.cache.pop(key)
@classmethod
def clean_caches_for_db(cls, dbname):
for c in cls.__caches:
c.clear(dbname)
def __call__(self, fn):
if self.fun is not None:
raise Exception("Can not use a cache instance on more than one function")
self.fun = fn
argspec = inspect.getargspec(fn)
self.fun_arg_names = argspec[0][self.skiparg:]
self.fun_default_values = {}
if argspec[3]:
self.fun_default_values = dict(zip(self.fun_arg_names[-len(argspec[3]):], argspec[3]))
def cached_result(self2, cr, *args, **kwargs):
if time.time()-int(self.timeout) > self.lasttime:
self.lasttime = time.time()
t = time.time()-int(self.timeout)
old_keys = [key for key in self.cache.keys() if self.cache[key][1] < t]
for key in old_keys:
self.cache.pop(key)
kwargs2 = self._unify_args(*args, **kwargs)
result = {}
notincache = {}
for key, id in self._generate_keys(cr.dbname, kwargs2):
if key in self.cache:
result[id] = self.cache[key][0]
else:
notincache[id] = key
if notincache:
if self.multi:
kwargs2[self.multi] = notincache.keys()
result2 = fn(self2, cr, *args[:self.skiparg-2], **kwargs2)
if not self.multi:
key = notincache[None]
self.cache[key] = (result2, time.time())
result[None] = result2
else:
for id in result2:
key = notincache[id]
self.cache[key] = (result2[id], time.time())
result.update(result2)
if not self.multi:
return result[None]
return result
cached_result.clear_cache = self.clear
return cached_result
# TODO make it an option
cache = real_cache
def to_xml(s):
return s.replace('&','&amp;').replace('<','&lt;').replace('>','&gt;')

View File

@ -458,7 +458,8 @@ def trans_export(lang, modules, buffer, format, cr):
tar.close()
else:
raise Exception(_('Bad file format'))
raise Exception(_('Unrecognized extension: must be one of '
'.csv, .po, or .tgz (received .%s).' % format))
newlang = not bool(lang)
if newlang: