diff --git a/bin/addons/__init__.py b/bin/addons/__init__.py index 7aa8a884f44..43dd256055c 100644 --- a/bin/addons/__init__.py +++ b/bin/addons/__init__.py @@ -138,7 +138,7 @@ def get_module_path(module): if os.path.exists(opj(_ad, module)) or os.path.exists(opj(_ad, '%s.zip' % module)): return opj(_ad, module) - logger.notifyChannel('init', netsvc.LOG_WARNING, 'addon %s: module not found' % (module,)) + logger.notifyChannel('init', netsvc.LOG_WARNING, 'module %s: module not found' % (module,)) return False raise IOError, 'Module not found : %s' % module @@ -216,7 +216,7 @@ def create_graph(module_list, force=None): try: info = eval(tools.file_open(terp_file).read()) except: - logger.notifyChannel('init', netsvc.LOG_ERROR, 'addon %s: eval file %s' % (module, terp_file)) + logger.notifyChannel('init', netsvc.LOG_ERROR, 'module %s: eval file %s' % (module, terp_file)) raise if info.get('installable', True): packages.append((module, info.get('depends', []), info)) @@ -246,18 +246,25 @@ def create_graph(module_list, force=None): for package in later: unmet_deps = filter(lambda p: p not in graph, dependencies[package]) - logger.notifyChannel('init', netsvc.LOG_ERROR, 'addon %s: Unmet dependencies: %s' % (package, ', '.join(unmet_deps))) + logger.notifyChannel('init', netsvc.LOG_ERROR, 'module %s: Unmet dependencies: %s' % (package, ', '.join(unmet_deps))) return graph def init_module_objects(cr, module_name, obj_list): pool = pooler.get_pool(cr.dbname) - logger.notifyChannel('init', netsvc.LOG_INFO, 'addon %s: creating or updating database tables' % module_name) + logger.notifyChannel('init', netsvc.LOG_INFO, 'module %s: creating or updating database tables' % module_name) + todo = [] for obj in obj_list: if hasattr(obj, 'init'): obj.init(cr) - obj._auto_init(cr, {'module': module_name}) + result = obj._auto_init(cr, {'module': module_name}) + if result: + todo += result cr.commit() + todo.sort() + for t in todo: + t[1](cr, *t[2]) + cr.commit() # # Register module named m, if not already registered @@ -266,7 +273,7 @@ def register_class(m): global loaded if m in loaded: return - logger.notifyChannel('init', netsvc.LOG_INFO, 'addon %s: registering classes' % m) + logger.notifyChannel('init', netsvc.LOG_INFO, 'module %s: registering objects' % m) loaded.append(m) mod_path = get_module_path(m) if not os.path.isfile(mod_path+'.zip'): @@ -380,7 +387,7 @@ class MigrationManager(object): from tools.parse_version import parse_version - parsed_installed_version = parse_version(pkg.latest_version) + parsed_installed_version = parse_version(pkg.installed_version or '') current_version = parse_version(convert_version(pkg.data.get('version', '0'))) versions = _get_migration_versions(pkg) @@ -401,20 +408,20 @@ class MigrationManager(object): mod = None try: mod = imp.load_source(name, pyfile, fp) - logger.notifyChannel('migration', netsvc.LOG_INFO, 'addon %(addon)s: Running migration %(version)s %(name)s"' % mergedict({'name': mod.__name__},strfmt)) - mod.migrate(self.cr, pkg.latest_version) + logger.notifyChannel('migration', netsvc.LOG_INFO, 'module %(addon)s: Running migration %(version)s %(name)s"' % mergedict({'name': mod.__name__},strfmt)) + mod.migrate(self.cr, pkg.installed_version) except ImportError: - logger.notifyChannel('migration', netsvc.LOG_ERROR, 'addon %(addon)s: Unable to load %(stage)-migration file %(file)s' % mergedict({'file': opj(modulename,pyfile)}, strfmt)) + logger.notifyChannel('migration', netsvc.LOG_ERROR, 'module %(addon)s: Unable to load %(stage)-migration file %(file)s' % mergedict({'file': opj(modulename,pyfile)}, strfmt)) raise except AttributeError: - logger.notifyChannel('migration', netsvc.LOG_ERROR, 'addon %(addon)s: Each %(stage)-migration file must have a "migrate(cr, installed_version)" function' % strfmt) + logger.notifyChannel('migration', netsvc.LOG_ERROR, 'module %(addon)s: Each %(stage)-migration file must have a "migrate(cr, installed_version)" function' % strfmt) except: raise fp.close() del mod -def load_module_graph(cr, graph, status=None, **kwargs): +def load_module_graph(cr, graph, status=None, check_access_rules=True, **kwargs): # **kwargs is passed directly to convert_xml_import if not status: status={} @@ -427,13 +434,14 @@ def load_module_graph(cr, graph, status=None, **kwargs): # update the graph with values from the database (if exist) ## First, we set the default values for each package in graph - additional_data = dict.fromkeys([p.name for p in graph], {'id': 0, 'state': 'uninstalled', 'dbdemo': False, 'latest_version': None}) + additional_data = dict.fromkeys([p.name for p in graph], {'id': 0, 'state': 'uninstalled', 'dbdemo': False, 'installed_version': None}) ## Then we get the values from the database - cr.execute('SELECT name, id, state, demo AS dbdemo, latest_version' + cr.execute('SELECT name, id, state, demo AS dbdemo, latest_version AS installed_version' ' FROM ir_module_module' ' WHERE name in (%s)' % (','.join(['%s'] * len(graph))), additional_data.keys() ) + ## and we update the default values with values from the database additional_data.update(dict([(x.pop('name'), x) for x in cr.dictfetchall()])) @@ -443,6 +451,7 @@ def load_module_graph(cr, graph, status=None, **kwargs): migrations = MigrationManager(cr, graph) + check_rules = False for package in graph: status['progress'] = (float(statusi)+0.1)/len(graph) m = package.name @@ -451,19 +460,20 @@ def load_module_graph(cr, graph, status=None, **kwargs): migrations.migrate_module(package, 'pre') register_class(m) - logger.notifyChannel('init', netsvc.LOG_INFO, 'addon %s' % m) + logger.notifyChannel('init', netsvc.LOG_INFO, 'module %s loading objects' % m) modules = pool.instanciate(m, cr) idref = {} status['progress'] = (float(statusi)+0.4)/len(graph) if hasattr(package, 'init') or hasattr(package, 'update') or package.state in ('to install', 'to upgrade'): + check_rules = True init_module_objects(cr, m, modules) for kind in ('init', 'update'): for filename in package.data.get('%s_xml' % kind, []): mode = 'update' if hasattr(package, 'init') or package.state=='to install': mode = 'init' - logger.notifyChannel('init', netsvc.LOG_INFO, 'addon %s: loading %s' % (m, filename)) + logger.notifyChannel('init', netsvc.LOG_INFO, 'module %s: loading %s' % (m, filename)) name, ext = os.path.splitext(filename) fp = tools.file_open(opj(m, filename)) if ext == '.csv': @@ -481,17 +491,18 @@ def load_module_graph(cr, graph, status=None, **kwargs): status['progress'] = (float(statusi)+0.75)/len(graph) for xml in package.data.get('demo_xml', []): name, ext = os.path.splitext(xml) - logger.notifyChannel('init', netsvc.LOG_INFO, 'addon %s: loading %s' % (m, xml)) + logger.notifyChannel('init', netsvc.LOG_INFO, 'module %s: loading %s' % (m, xml)) fp = tools.file_open(opj(m, xml)) if ext == '.csv': tools.convert_csv_import(cr, m, os.path.basename(xml), fp.read(), idref, noupdate=True) else: tools.convert_xml_import(cr, m, fp, idref, noupdate=True, **kwargs) fp.close() - cr.execute('update ir_module_module set demo=%s where id=%d', (True, mid)) + cr.execute('update ir_module_module set demo=%s where id=%s', (True, mid)) package_todo.append(package.name) ver = release.major_version + '.' + package.data.get('version', '1.0') - cr.execute("update ir_module_module set state='installed', latest_version=%s where id=%d", (ver, mid,)) + # update the installed version in database... + cr.execute("update ir_module_module set state='installed', latest_version=%s where id=%s", (ver, mid,)) cr.commit() # Set new modules and dependencies @@ -501,18 +512,18 @@ def load_module_graph(cr, graph, status=None, **kwargs): if modobj: modobj.update_translations(cr, 1, [mid], None) cr.commit() - - migrations.migrate_module(package, 'post') statusi+=1 - cr.execute("""select model,name from ir_model where id not in (select model_id from ir_model_access)""") - for (model,name) in cr.fetchall(): - logger.notifyChannel('init', netsvc.LOG_WARNING, 'addon object %s (%s) has no access rules!' % (model,name)) + if check_access_rules and check_rules: + cr.execute("""select model,name from ir_model where id not in (select model_id from ir_model_access)""") + for (model,name) in cr.fetchall(): + logger.notifyChannel('init', netsvc.LOG_WARNING, 'object %s (%s) has no access rules!' % (model,name)) + pool = pooler.get_pool(cr.dbname) - cr.execute('select * from ir_model where state=%s', ('manual',)) + cr.execute('select model from ir_model where state=%s', ('manual',)) for model in cr.dictfetchall(): pool.get('ir.model').instanciate(cr, 1, model['model'], {}) @@ -527,31 +538,46 @@ def load_modules(db, force_demo=False, status=None, update_module=False): if force_demo: force.append('demo') pool = pooler.get_pool(cr.dbname) + report = tools.assertion_report() if update_module: - for module in tools.config['init']: - cr.execute('update ir_module_module set state=%s where state=%s and name=%s', ('to install', 'uninstalled', module)) - cr.commit() + basegraph = create_graph(['base'], force) + load_module_graph(cr, basegraph, status, check_access_rules=False, report=report) - register_class('base') - pool.instanciate('base', cr) - modobj = pool.get('ir.module.module') + modobj = pool.get('ir.module.module') + logger.notifyChannel('init', netsvc.LOG_INFO, 'updating modules list') + cr.execute("select id from ir_module_module where state in ('to install','to upgrade') and name=%s", ('base',)) + if cr.rowcount: modobj.update_list(cr, 1) - mids = modobj.search(cr, 1, [('state','in',('installed','to install'))]) - for m in modobj.browse(cr, 1, mids): - for dep in m.dependencies_id: - if dep.state=='uninstalled': - modobj.button_install(cr, 1, [m.id]) - - cr.execute("select name from ir_module_module where state in ('installed', 'to install', 'to upgrade','to remove')") + mods = [k for k in tools.config['init'] if tools.config['init'][k]] + if mods: + ids = modobj.search(cr, 1, ['&', ('state', '=', 'uninstalled'), ('name', 'in', mods)]) + if ids: + modobj.button_install(cr, 1, ids) + + mods = [k for k in tools.config['update'] if tools.config['update'][k]] + if mods: + ids = modobj.search(cr, 1, ['&',('state', '=', 'installed'), ('name', 'in', mods)]) + if ids: + modobj.button_upgrade(cr, 1, ids) + + cr.execute("update ir_module_module set state=%s where name=%s", ('installed', 'base')) + cr.execute("select name from ir_module_module where state in ('installed', 'to install', 'to upgrade')") else: - cr.execute("select name from ir_module_module where state in ('installed', 'to upgrade', 'to remove')") + cr.execute("select name from ir_module_module where state in ('installed', 'to upgrade')") module_list = [name for (name,) in cr.fetchall()] graph = create_graph(module_list, force) - report = tools.assertion_report() + + # the 'base' module has already been updated + base = graph['base'] + base.state = 'installed' + for kind in ('init', 'demo', 'update'): + if hasattr(base, kind): + delattr(base, kind) + load_module_graph(cr, graph, status, report=report) if report.get_report(): - logger.notifyChannel('init', netsvc.LOG_INFO, 'assert: %s' % report) + logger.notifyChannel('init', netsvc.LOG_INFO, report) for kind in ('init', 'demo', 'update'): tools.config[kind]={} @@ -563,29 +589,29 @@ def load_modules(db, force_demo=False, status=None, update_module=False): pool = pooler.get_pool(cr.dbname) cr.execute('select model,res_id from ir_model_data where not noupdate and module=%s order by id desc', (mod_name,)) for rmod,rid in cr.fetchall(): - # - # TO BE Improved: - # I can not use the class_pool has _table could be defined in __init__ - # and I can not use the pool has the module could not be loaded in the pool - # uid = 1 pool.get(rmod).unlink(cr, uid, [rid]) cr.commit() # # TODO: remove menu without actions of childs # - cr.execute('''delete from - ir_ui_menu - where - (id not in (select parent_id from ir_ui_menu where parent_id is not null)) - and - (id not in (select res_id from ir_values where model='ir.ui.menu')) - and - (id not in (select res_id from ir_model_data where model='ir.ui.menu'))''') + while True: + cr.execute('''delete from + ir_ui_menu + where + (id not in (select parent_id from ir_ui_menu where parent_id is not null)) + and + (id not in (select res_id from ir_values where model='ir.ui.menu')) + and + (id not in (select res_id from ir_model_data where model='ir.ui.menu'))''') + if not cr.rowcount: + break + else: + logger.notifyChannel('init', netsvc.LOG_INFO, 'removed %d unused menus' % (cr.rowcount,)) cr.execute("update ir_module_module set state=%s where state in ('to remove')", ('uninstalled', )) cr.commit() - pooler.restart_pool(cr.dbname) + #pooler.restart_pool(cr.dbname) cr.close() diff --git a/bin/addons/base/__terp__.py b/bin/addons/base/__terp__.py index 380fb16c55c..1e2e5f9985d 100644 --- a/bin/addons/base/__terp__.py +++ b/bin/addons/base/__terp__.py @@ -21,9 +21,9 @@ ############################################################################## { "name" : "Base", - "version" : "1.0", + "version" : "1.1", "author" : "Tiny", - "website" : "http://openerp.com", + "website" : "http://www.openerp.com", "category" : "Generic Modules/Base", "description": "The kernel of OpenERP, needed for all installation.", "depends" : [], diff --git a/bin/addons/base/base_menu.xml b/bin/addons/base/base_menu.xml index 1204ab5b42b..84a5bb9e230 100644 --- a/bin/addons/base/base_menu.xml +++ b/bin/addons/base/base_menu.xml @@ -2,7 +2,11 @@ - + + + + + diff --git a/bin/addons/base/base_update.xml b/bin/addons/base/base_update.xml index a8b8e269919..49bc289f337 100644 --- a/bin/addons/base/base_update.xml +++ b/bin/addons/base/base_update.xml @@ -6,7 +6,7 @@ Languages ====================== --> - + @@ -114,7 +113,7 @@ {'active_test': False} - + @@ -152,7 +151,6 @@ - @@ -465,7 +463,7 @@ ir.ui.view form -
+ @@ -481,7 +479,7 @@ ir.ui.view tree - + @@ -489,7 +487,7 @@ - View + Views ir.actions.act_window ir.ui.view @@ -690,9 +688,9 @@ - - + @@ -720,7 +718,7 @@ - + @@ -769,9 +767,11 @@ - - --> + @@ -787,7 +787,7 @@ - + @@ -934,7 +934,7 @@ - Grant access to menu + Grant Access To Menus ir.ui.menu form @@ -992,7 +992,7 @@ {'active_test': False} - + @@ -1143,15 +1143,16 @@ - + - + + @@ -1159,42 +1160,49 @@ - + - - - + + + + + + + + + - - + - + + - + - + + - - + @@ -1219,7 +1227,7 @@ {'key':'server_action'} - + ir.model.fields.tree @@ -1279,7 +1287,8 @@ form [('type','=','configure')] - + + Main Configuration Wizard diff --git a/bin/addons/base/ir/ir_actions.py b/bin/addons/base/ir/ir_actions.py index a0550e6ff93..7d7255be87f 100644 --- a/bin/addons/base/ir/ir_actions.py +++ b/bin/addons/base/ir/ir_actions.py @@ -34,7 +34,6 @@ class actions(osv.osv): 'name': fields.char('Action Name', required=True, size=64), 'type': fields.char('Action Type', required=True, size=32), 'usage': fields.char('Action Usage', size=32), - 'parent_id': fields.many2one('ir.actions.server', 'Parent Action'), } _defaults = { 'usage': lambda *a: False, @@ -291,7 +290,7 @@ class ir_model_fields(osv.osv): 'complete_name': fields.char('Complete Name', size=64, select=1), } - def name_search(self, cr, uid, name, args=None, operator='ilike', context=None, limit=80): + def name_search(self, cr, uid, name, args=None, operator='ilike', context=None, limit=800): def get_fields(cr, uid, field, rel): result = [] mobj = self.pool.get('ir.model') @@ -366,6 +365,23 @@ server_object_lines() # Actions that are run on the server side # class actions_server(osv.osv): + + def _select_signals(self, cr, uid, context={}): + cr.execute("select distinct t.signal as key, t.signal || ' - [ ' || w.osv || ' ] ' as val from wkf w, wkf_activity a, wkf_transition t "\ + " where w.id = a.wkf_id " \ + " and t.act_from = a.wkf_id " \ + " or t.act_to = a.wkf_id ") + return cr.fetchall() + + def on_trigger_obj_id(self, cr, uid, ids, context={}): + cr.execute("select distinct t.signal as key, t.signal as val from wkf w, wkf_activity a, wkf_transition t "\ + " where w.id = a.wkf_id " \ + " and t.act_from = a.wkf_id " \ + " or t.act_to = a.wkf_id " \ + " and w.osv = %s ", ('account.invoice')) + data = cr.fetchall() + return {"values":{'trigger_name':data}} + _name = 'ir.actions.server' _table = 'ir_act_server' _sequence = 'ir_actions_id_seq' @@ -373,33 +389,31 @@ class actions_server(osv.osv): 'name': fields.char('Action Name', required=True, size=64), 'state': fields.selection([ ('client_action','Client Action'), - ('python','Python Code'), ('dummy','Dummy'), ('trigger','Trigger'), ('email','Email'), ('sms','SMS'), ('object_create','Create Object'), ('object_write','Write Object'), - ('other','Others Actions'), + ('other','Multi Actions'), ], 'Action State', required=True, size=32), 'code': fields.text('Python Code'), 'sequence': fields.integer('Sequence'), 'model_id': fields.many2one('ir.model', 'Object', required=True), 'action_id': fields.many2one('ir.actions.actions', 'Client Action'), - 'trigger_name': fields.char('Trigger Name', size=128), - 'trigger_obj_id': fields.reference('Trigger On', selection=model_get, size=128), + 'trigger_name': fields.selection(_select_signals, string='Trigger Name', size=128), + 'wkf_model_id': fields.many2one('ir.model', 'Workflow on'), + 'trigger_obj_id': fields.many2one('ir.model.fields','Trigger On'), + 'email': fields.many2one('ir.model.fields', 'Contact'), 'message': fields.text('Message', translate=True), - 'address': fields.many2one('ir.model.fields', 'Email / Mobile'), + 'mobile': fields.many2one('ir.model.fields', 'Contact'), 'sms': fields.char('SMS', size=160, translate=True), - 'child_ids': fields.one2many('ir.actions.actions', 'parent_id', 'Others Actions'), + 'child_ids': fields.many2many('ir.actions.server', 'rel_server_actions', 'server_id', 'action_id', 'Others Actions'), 'usage': fields.char('Action Usage', size=32), - 'type': fields.char('Report Type', size=32, required=True), - 'srcmodel_id': fields.many2one('ir.model', 'Model'), + 'type': fields.char('Action Type', size=32, required=True), + 'srcmodel_id': fields.many2one('ir.model', 'Model', help="In which object you want to create / write the object if its empty refer to the Object field"), 'fields_lines': fields.one2many('ir.server.object.lines', 'server_id', 'Fields Mapping'), - 'otype': fields.selection([ - ('copy','Create in Same Model'), - ('new','Create in Other Model') - ], 'Create Model', size=32, change_default=True), + 'record_id':fields.many2one('ir.model.fields', 'Record Id', help="privide the field name from where the record id refers, if its empty it will refer to the active id of the object") } _defaults = { 'state': lambda *a: 'dummy', @@ -414,26 +428,49 @@ class actions_server(osv.osv): # - ids # If you plan to return an action, assign: action = {...} """, - 'otype': lambda *a: 'copy', } - def get_field_value(self, cr, uid, action, context): + + def get_email(self, cr, uid, action, context): + logger = netsvc.Logger() obj_pool = self.pool.get(action.model_id.model) id = context.get('active_id') obj = obj_pool.browse(cr, uid, id) fields = None - if '/' in action.address.complete_name: - fields = action.address.complete_name.split('/') - elif '.' in action.address.complete_name: - fields = action.address.complete_name.split('.') + if '/' in action.email.complete_name: + fields = action.email.complete_name.split('/') + elif '.' in action.email.complete_name: + fields = action.email.complete_name.split('.') for field in fields: try: obj = getattr(obj, field) except Exception,e : - logger.notifyChannel('Workflow', netsvc.LOG_ERROR, 'Failed to parse : %s' % (match.group())) + logger.notifyChannel('Workflow', netsvc.LOG_ERROR, 'Failed to parse : %s' % (field)) + + return obj + + + def get_mobile(self, cr, uid, action, context): + logger = netsvc.Logger() + obj_pool = self.pool.get(action.model_id.model) + id = context.get('active_id') + obj = obj_pool.browse(cr, uid, id) + + fields = None + + if '/' in action.mobile.complete_name: + fields = action.mobile.complete_name.split('/') + elif '.' in action.mobile.complete_name: + fields = action.mobile.complete_name.split('.') + + for field in fields: + try: + obj = getattr(obj, field) + except Exception,e : + logger.notifyChannel('Workflow', netsvc.LOG_ERROR, 'Failed to parse : %s' % (field)) return obj @@ -443,13 +480,17 @@ class actions_server(osv.osv): obj_pool = self.pool.get(action.model_id.model) id = context.get('active_id') obj = obj_pool.browse(cr, uid, id) - return eval(match[2:-2], {'object':obj, 'context': context,'time':time}) - + exp = str(match.group()[2:-2]).strip() + result = eval(exp, {'object':obj, 'context': context,'time':time}) + if result in (None, False): + return str("--------") + return str(result) + com = re.compile('(\[\[.+?\]\])') message = com.sub(merge, keystr) + return message - # # Context should contains: # ids : original ids # id : current id of the object @@ -459,10 +500,12 @@ class actions_server(osv.osv): def run(self, cr, uid, ids, context={}): logger = netsvc.Logger() for action in self.browse(cr, uid, ids, context): + if action.state=='client_action': if not action.action_id: raise osv.except_osv(_('Error'), _("Please specify an action to launch !")) return self.pool.get(action.action_id.type).read(cr, uid, action.action_id.id, context=context) + if action.state=='python': localdict = { 'self': self.pool.get(action.model_id.model), @@ -479,20 +522,24 @@ class actions_server(osv.osv): if action.state == 'email': user = config['email_from'] subject = action.name - address = self.get_field_value(cr, uid, action, context) + address = self.get_email(cr, uid, action, context) if not address: raise osv.except_osv(_('Error'), _("Please specify the Partner Email address !")) + if not user: + raise osv.except_osv(_('Error'), _("Please specify server option --smtp-from !")) + body = self.merge_message(cr, uid, str(action.message), action, context) - if tools.email_send_attach(user, address, subject, body, debug=False) == True: + if tools.email_send(user, [address], subject, body, debug=False, subtype='html') == True: logger.notifyChannel('email', netsvc.LOG_INFO, 'Email successfully send to : %s' % (address)) else: logger.notifyChannel('email', netsvc.LOG_ERROR, 'Failed to send email to : %s' % (address)) if action.state == 'trigger': wf_service = netsvc.LocalService("workflow") - res = str(action.trigger_obj_id).split(',') - model = res[0] - id = res[1] + model = action.wkf_model_id.model + obj_pool = self.pool.get(action.model_id.model) + res_id = self.pool.get(action.model_id.model).read(cr, uid, [context.get('active_id')], [action.trigger_obj_id.name]) + id = res_id [0][action.trigger_obj_id.name] wf_service.trg_validate(uid, model, int(id), action.trigger_name, cr) if action.state == 'sms': @@ -500,27 +547,20 @@ class actions_server(osv.osv): # for the sms gateway user / password api_id = '' text = action.sms - to = self.get_field_value(cr, uid, action, context) + to = self.get_mobile(cr, uid, action, context) #TODO: Apply message mearge with the field if tools.sms_send(user, password, api_id, text, to) == True: logger.notifyChannel('sms', netsvc.LOG_INFO, 'SMS successfully send to : %s' % (action.address)) else: logger.notifyChannel('sms', netsvc.LOG_ERROR, 'Failed to send SMS to : %s' % (action.address)) + if action.state == 'other': - localdict = { - 'self': self.pool.get(action.model_id.model), - 'context': context, - 'time': time, - 'ids': ids, - 'cr': cr, - 'uid': uid - } - + res = None for act in action.child_ids: - code = """action = {'model':'%s','type':'%s', %s}""" % (action.model_id.model, act.type, act.usage) - exec code in localdict - if 'action' in localdict: - return localdict['action'] + result = self.run(cr, uid, [act.id], context) + if result: + res = result + return res if action.state == 'object_write': res = {} @@ -533,9 +573,19 @@ class actions_server(osv.osv): else: expr = exp.value res[exp.col1.name] = expr - obj_pool = self.pool.get(action.model_id.model) - obj_pool.write(cr, uid, [context.get('active_id')], res) + if not action.record_id: + if not action.srcmodel_id: + obj_pool = self.pool.get(action.model_id.model) + obj_pool.write(cr, uid, [context.get('active_id')], res) + else: + obj_pool = self.pool.get(action.srcmodel_id.model) + obj_pool.write(cr, uid, [context.get('active_id')], res) + else: + obj_pool = self.pool.get(action.srcmodel_id.model) + id = self.pool.get(action.model_id.model).read(cr, uid, [context.get('active_id')], [action.record_id.name]) + obj_pool.write(cr, uid, [int(id[0][action.record_id.name])], res) + if action.state == 'object_create': res = {} for exp in action.fields_lines: @@ -549,13 +599,10 @@ class actions_server(osv.osv): res[exp.col1.name] = expr obj_pool = None - if action.state == 'object_create' and action.otype == 'new': - obj_pool = self.pool.get(action.srcmodel_id.model) - obj_pool.create(cr, uid, res) - else: - obj_pool = self.pool.get(action.model_id.model) - id = context.get('active_id') - obj_pool.copy(cr, uid, id, res) + res_id = False + obj_pool = self.pool.get(action.srcmodel_id.model) + res_id = obj_pool.create(cr, uid, res) + self.pool.get(action.model_id.model).write(cr, uid, [context.get('active_id')], {action.record_id.name:res_id}) return False actions_server() diff --git a/bin/addons/base/ir/ir_attachment.py b/bin/addons/base/ir/ir_attachment.py index e1a04a5f85a..da603202a1a 100644 --- a/bin/addons/base/ir/ir_attachment.py +++ b/bin/addons/base/ir/ir_attachment.py @@ -25,20 +25,16 @@ from osv.orm import except_orm import tools class ir_attachment(osv.osv): - def check(self, cr, uid, ids, mode): if not ids: return ima = self.pool.get('ir.model.access') if isinstance(ids, (int, long)): ids = [ids] - objs = self.browse(cr, uid, ids) or [] - for o in objs: - if o and o.res_model: - ima.check(cr, uid, o.res_model, mode) - - check = tools.cache()(check) - + cr.execute('select distinct res_model from ir_attachment where id in ('+','.join(map(str, ids))+')') + for obj in cr.fetchall(): + ima.check(cr, uid, obj[0], mode) + def search(self, cr, uid, args, offset=0, limit=None, order=None, context=None, count=False): ids = super(ir_attachment, self).search(cr, uid, args, offset=offset, @@ -83,24 +79,12 @@ class ir_attachment(osv.osv): self.pool.get('ir.model.access').check(cr, uid, values['res_model'], 'create') return super(ir_attachment, self).create(cr, uid, values, *args, **kwargs) - def clear_cache(self): - self.check() - def action_get(self, cr, uid, context=None): dataobj = self.pool.get('ir.model.data') data_id = dataobj._get_id(cr, 1, 'base', 'action_attachment') res_id = dataobj.browse(cr, uid, data_id, context).res_id return self.pool.get('ir.actions.act_window').read(cr, uid, res_id, [], context) - def __init__(self, *args, **kwargs): - r = super(ir_attachment, self).__init__(*args, **kwargs) - self.pool.get('ir.model.access').register_cache_clearing_method(self._name, 'clear_cache') - return r - - def __del__(self): - self.pool.get('ir.model.access').unregister_cache_clearing_method(self._name, 'clear_cache') - return super(ir_attachment, self).__del__() - def _get_preview(self, cr, uid, ids, name, arg, context=None): result = {} if context is None: diff --git a/bin/addons/base/ir/ir_cron.py b/bin/addons/base/ir/ir_cron.py index 31ad86612ad..f20381fc6e3 100644 --- a/bin/addons/base/ir/ir_cron.py +++ b/bin/addons/base/ir/ir_cron.py @@ -101,7 +101,7 @@ class ir_cron(osv.osv, netsvc.Agent): addsql='' if not numbercall: addsql = ', active=False' - cr.execute("update ir_cron set nextcall=%s, numbercall=%d"+addsql+" where id=%d", (nextcall.strftime('%Y-%m-%d %H:%M:%S'), numbercall, job['id'])) + cr.execute("update ir_cron set nextcall=%s, numbercall=%s"+addsql+" where id=%s", (nextcall.strftime('%Y-%m-%d %H:%M:%S'), numbercall, job['id'])) cr.commit() finally: cr.close() diff --git a/bin/addons/base/ir/ir_model.py b/bin/addons/base/ir/ir_model.py index 91360668243..40ce882e280 100644 --- a/bin/addons/base/ir/ir_model.py +++ b/bin/addons/base/ir/ir_model.py @@ -193,7 +193,9 @@ class ir_model_fields(osv.osv): _columns = { 'name': fields.char('Name', required=True, size=64, select=1), 'model': fields.char('Object Name', size=64, required=True), + 'relation_id':fields.many2one('ir.model', 'Object Relation'), 'relation': fields.char('Object Relation', size=64), + 'relation_field_id':fields.many2one('ir.model.fields', 'Relation Field'), 'relation_field': fields.char('Relation Field', size=64), 'model_id': fields.many2one('ir.model', 'Object id', required=True, select=True, ondelete='cascade'), 'field_description': fields.char('Field Label', required=True, size=256), @@ -231,8 +233,26 @@ class ir_model_fields(osv.osv): # MAY BE ADD A ALTER TABLE DROP ? # return super(ir_model_fields, self).unlink(cr, user, ids, context) - + + def write(self, cr, uid, ids, vals, context=None): + res = False + if 'relation_id' in vals: + model_data = self.pool.get('ir.model').browse(cr, uid, vals['relation_id']) + vals['relation'] = model_data.model + if 'relation_field_id' in vals: + field_data = self.pool.get('ir.model.fields').browse(cr, uid, vals['relation_field_id']) + vals['relation'] = field_data.name + + res = super(ir_model_fields, self).write(cr, uid, ids, vals, context) + return res + def create(self, cr, user, vals, context=None): + if 'relation_id' in vals: + model_data = self.pool.get('ir.model').browse(cr,user,vals['relation_id']) + vals['relation']=model_data.model + if 'relation_field_id' in vals: + field_data = self.pool.get('ir.model.fields').browse(cr, uid, vals['relation_field_id']) + vals['relation_field'] = field_data.name if 'model_id' in vals: model_data=self.pool.get('ir.model').browse(cr,user,vals['model_id']) vals['model']=model_data.model @@ -266,7 +286,7 @@ class ir_model_access(osv.osv): if not grouparr: return False - cr.execute("select 1 from res_groups_users_rel where uid=%d and gid in(select res_id from ir_model_data where module=%s and name=%s)", (uid, grouparr[0], grouparr[1],)) + cr.execute("select 1 from res_groups_users_rel where uid=%s and gid in(select res_id from ir_model_data where module=%s and name=%s)", (uid, grouparr[0], grouparr[1],)) return bool(cr.fetchone()) def check_group(self, cr, uid, model, mode, group_ids): @@ -285,7 +305,7 @@ class ir_model_access(osv.osv): cr.execute("SELECT perm_" + mode + " " " FROM ir_model_access a " " JOIN ir_model m ON (m.id = a.model_id) " - " WHERE m.model = %s AND a.group_id = %d", (model_name, group_id) + " WHERE m.model = %s AND a.group_id = %s", (model_name, group_id) ) r = cr.fetchone() if r is None: @@ -415,7 +435,7 @@ class ir_model_data(osv.osv): ids = self.search(cr, uid, [('module','=',module),('name','=', xml_id)]) assert len(ids)==1, '%d reference(s) to %s.%s. You should have one and only one !' % (len(ids), module, xml_id) return ids[0] - _get_id = tools.cache()(_get_id) + _get_id = tools.cache(skiparg=2)(_get_id) def _update_dummy(self,cr, uid, model, module, xml_id=False, store=True): if not xml_id: @@ -442,10 +462,10 @@ class ir_model_data(osv.osv): cr.execute('select id,res_id from ir_model_data where module=%s and name=%s', (module,xml_id)) results = cr.fetchall() for action_id2,res_id2 in results: - cr.execute('select id from '+self.pool.get(model)._table+' where id=%d', (res_id2,)) + cr.execute('select id from '+self.pool.get(model)._table+' where id=%s', (res_id2,)) result3 = cr.fetchone() if not result3: - cr.execute('delete from ir_model_data where id=%d', (action_id2,)) + cr.execute('delete from ir_model_data where id=%s', (action_id2,)) else: res_id,action_id = res_id2,action_id2 @@ -513,7 +533,7 @@ class ir_model_data(osv.osv): #self.pool.get(model).unlink(cr, uid, ids) for id in ids: self.unlink_mark[(model, id)]=False - cr.execute('delete from ir_model_data where res_id=%d and model=\'%s\'', (id,model)) + cr.execute('delete from ir_model_data where res_id=%s and model=\'%s\'', (id,model)) return True def ir_set(self, cr, uid, key, key2, name, models, value, replace=True, isobject=False, meta=None, xml_id=False): @@ -525,7 +545,7 @@ class ir_model_data(osv.osv): model = models[0] if res_id: - where = ' and res_id=%d' % (res_id,) + where = ' and res_id=%s' % (res_id,) else: where = ' and (res_id is null)' @@ -552,10 +572,10 @@ class ir_model_data(osv.osv): if (module,name) not in self.loads: self.unlink_mark[(model,res_id)] = id if model=='workflow.activity': - cr.execute('select res_type,res_id from wkf_instance where id in (select inst_id from wkf_workitem where act_id=%d)', (res_id,)) + cr.execute('select res_type,res_id from wkf_instance where id in (select inst_id from wkf_workitem where act_id=%s)', (res_id,)) wkf_todo.extend(cr.fetchall()) - cr.execute("update wkf_transition set condition='True', role_id=NULL, signal=NULL,act_to=act_from,act_from=%d where act_to=%d", (res_id,res_id)) - cr.execute("delete from wkf_transition where act_to=%d", (res_id,)) + cr.execute("update wkf_transition set condition='True', role_id=NULL, signal=NULL,act_to=act_from,act_from=%s where act_to=%s", (res_id,res_id)) + cr.execute("delete from wkf_transition where act_to=%s", (res_id,)) for model,id in wkf_todo: wf_service = netsvc.LocalService("workflow") diff --git a/bin/addons/base/ir/ir_report_custom.py b/bin/addons/base/ir/ir_report_custom.py index 64fbeca83ef..15fd0e0b475 100644 --- a/bin/addons/base/ir/ir_report_custom.py +++ b/bin/addons/base/ir/ir_report_custom.py @@ -25,6 +25,7 @@ from osv.orm import browse_null import ir import report.custom from tools.translate import _ +import netsvc class report_custom(osv.osv): _name = 'ir.report.custom' @@ -187,7 +188,7 @@ class report_custom_fields(osv.osv): } } else: - print _("Warning: using a relation field which uses an unknown object") #TODO use the logger + netsvc.Logger().notifyChannel('web-services', netsvc.LOG_WARNING, _("Using a relation field which uses an unknown object")) return {'required': {next_level_field_name: True}} else: return {'domain': {next_level_field_name: []}} diff --git a/bin/addons/base/ir/ir_rule.py b/bin/addons/base/ir/ir_rule.py index ef9e90c6b64..3db6218e442 100644 --- a/bin/addons/base/ir/ir_rule.py +++ b/bin/addons/base/ir/ir_rule.py @@ -148,7 +148,7 @@ class ir_rule(osv.osv): WHERE m.model = %s AND (g.id IN (SELECT rule_group_id FROM group_rule_group_rel g_rel JOIN res_groups_users_rel u_rel ON (g_rel.group_id = u_rel.gid) - WHERE u_rel.uid = %d) OR g.global)""", (model_name, uid)) + WHERE u_rel.uid = %s) OR g.global)""", (model_name, uid)) ids = map(lambda x:x[0], cr.fetchall()) if not ids: return '', [] diff --git a/bin/addons/base/ir/ir_sequence.py b/bin/addons/base/ir/ir_sequence.py index 7c160fe98da..23e4ee90c93 100644 --- a/bin/addons/base/ir/ir_sequence.py +++ b/bin/addons/base/ir/ir_sequence.py @@ -69,11 +69,11 @@ class ir_sequence(osv.osv): 'sec': time.strftime('%S'), } - def get_id(self, cr, uid, sequence_id, test='id=%d'): + def get_id(self, cr, uid, sequence_id, test='id=%s'): cr.execute('select id,number_next,number_increment,prefix,suffix,padding from ir_sequence where '+test+' and active=True FOR UPDATE', (sequence_id,)) res = cr.dictfetchone() if res: - cr.execute('update ir_sequence set number_next=number_next+number_increment where id=%d and active=True', (res['id'],)) + cr.execute('update ir_sequence set number_next=number_next+number_increment where id=%s and active=True', (res['id'],)) if res['number_next']: return self._process(res['prefix']) + '%%0%sd' % res['padding'] % res['number_next'] + self._process(res['suffix']) else: diff --git a/bin/addons/base/ir/ir_translation.py b/bin/addons/base/ir/ir_translation.py index c7eb75211fe..0e9e28ddeed 100644 --- a/bin/addons/base/ir/ir_translation.py +++ b/bin/addons/base/ir/ir_translation.py @@ -21,7 +21,6 @@ ############################################################################## from osv import fields, osv -from osv.osv import Cacheable import tools TRANSLATION_TYPE = [ @@ -39,7 +38,7 @@ TRANSLATION_TYPE = [ ('constraint', 'Constraint'), ] -class ir_translation(osv.osv, Cacheable): +class ir_translation(osv.osv): _name = "ir.translation" _log_access = False @@ -48,7 +47,7 @@ class ir_translation(osv.osv, Cacheable): lang_ids = lang_obj.search(cr, uid, [('translatable', '=', True)], context=context) langs = lang_obj.browse(cr, uid, lang_ids, context=context) - res = [(lang.code, unicode(lang.name,'utf-8')) for lang in langs] + res = [(lang.code, lang.name) for lang in langs] for lang_dict in tools.scan_languages(): if lang_dict not in res: res.append(lang_dict) @@ -81,27 +80,19 @@ class ir_translation(osv.osv, Cacheable): cr.commit() def _get_ids(self, cr, uid, name, tt, lang, ids): - translations, to_fetch = {}, [] - for id in ids: - trans = self.get((lang, name, id)) - if trans is not None: - translations[id] = trans - else: - to_fetch.append(id) - if to_fetch: + translations = {} + if ids: cr.execute('select res_id,value ' \ 'from ir_translation ' \ 'where lang=%s ' \ 'and type=%s ' \ 'and name=%s ' \ - 'and res_id in ('+','.join(map(str, to_fetch))+')', + 'and res_id in ('+','.join(map(str, ids))+')', (lang,tt,name)) for res_id, value in cr.fetchall(): - self.add((lang, tt, name, res_id), value) translations[res_id] = value for res_id in ids: if res_id not in translations: - self.add((lang, tt, name, res_id), False) translations[res_id] = False return translations @@ -122,11 +113,8 @@ class ir_translation(osv.osv, Cacheable): }) return len(ids) + @tools.cache(skiparg=3) def _get_source(self, cr, uid, name, tt, lang, source=None): - trans = self.get((lang, tt, name, source)) - if trans is not None: - return trans - if source: #if isinstance(source, unicode): # source = source.encode('utf8') @@ -145,26 +133,9 @@ class ir_translation(osv.osv, Cacheable): 'and name=%s', (lang, tt, str(name))) res = cr.fetchone() - trad = res and res[0] or '' - self.add((lang, tt, name, source), trad) return trad - def unlink(self, cursor, user, ids, context=None): - self.clear() - return super(ir_translation, self).unlink(cursor, user, ids, - context=context) - - def create(self, cursor, user, vals, context=None): - self.clear() - return super(ir_translation, self).create(cursor, user, vals, - context=context) - - def write(self, cursor, user, ids, vals, context=None): - self.clear() - return super(ir_translation, self).write(cursor, user, ids, vals, - context=context) - ir_translation() # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: diff --git a/bin/addons/base/ir/ir_ui_menu.py b/bin/addons/base/ir/ir_ui_menu.py index e44c5f6b65c..0a0b0442652 100644 --- a/bin/addons/base/ir/ir_ui_menu.py +++ b/bin/addons/base/ir/ir_ui_menu.py @@ -41,7 +41,7 @@ class many2many_unique(fields.many2many): for act in values: if act[0]==4: cr.execute('SELECT * FROM '+self._rel+' \ - WHERE '+self._id1+'=%d AND '+self._id2+'=%d', (id, act[1])) + WHERE '+self._id1+'=%s AND '+self._id2+'=%s', (id, act[1])) if cr.fetchall(): val.remove(act) return super(many2many_unique, self).set(cr, obj, id, name, val, user=user, diff --git a/bin/addons/base/ir/ir_ui_view.py b/bin/addons/base/ir/ir_ui_view.py index 7e7fc29459f..afe4990aefc 100644 --- a/bin/addons/base/ir/ir_ui_view.py +++ b/bin/addons/base/ir/ir_ui_view.py @@ -27,14 +27,13 @@ import netsvc import os def _check_xml(self, cr, uid, ids, context={}): - return True for view in self.browse(cr, uid, ids, context): eview = etree.fromstring(view.arch) frng = tools.file_open(os.path.join('base','rng','view.rng')) relaxng = etree.RelaxNG(file=frng) if not relaxng.validate(eview): logger = netsvc.Logger() - logger.notifyChannel('init', netsvc.LOG_ERROR, 'The view do not fit the required schema !') + logger.notifyChannel('init', netsvc.LOG_ERROR, 'The view does not fit the required schema !') logger.notifyChannel('init', netsvc.LOG_ERROR, relaxng.error_log.last_error) return False return True @@ -99,7 +98,7 @@ class view(osv.osv): for rs in result: if rs.get('model') == 'board.board': - cr.execute("select id,arch,ref_id from ir_ui_view_custom where user_id=%d and ref_id=%d", (uid, rs['id'])) + cr.execute("select id,arch,ref_id from ir_ui_view_custom where user_id=%s and ref_id=%s", (uid, rs['id'])) oview = cr.dictfetchall() if oview: rs['arch'] = oview[0]['arch'] diff --git a/bin/addons/base/ir/ir_values.py b/bin/addons/base/ir/ir_values.py index 2beff6c22fb..bd9a5ba2c69 100644 --- a/bin/addons/base/ir/ir_values.py +++ b/bin/addons/base/ir/ir_values.py @@ -211,7 +211,7 @@ class ir_values(osv.osv): #ir_del(cr, uid, x[0]) return False else: - datas = pickle.loads(x[2]) + datas = pickle.loads(str(x[2])) if meta: meta2 = pickle.loads(x[4]) return (x[0],x[1],datas,meta2) diff --git a/bin/addons/base/ir/workflow/print_instance.py b/bin/addons/base/ir/workflow/print_instance.py index 48a958c5945..2f225c44209 100644 --- a/bin/addons/base/ir/workflow/print_instance.py +++ b/bin/addons/base/ir/workflow/print_instance.py @@ -28,7 +28,7 @@ import report,pooler,tools def graph_get(cr, graph, wkf_id, nested=False, workitem={}): import pydot - cr.execute('select * from wkf_activity where wkf_id=%d', (wkf_id,)) + cr.execute('select * from wkf_activity where wkf_id=%s', (wkf_id,)) nodes = cr.dictfetchall() activities = {} actfrom = {} @@ -36,7 +36,7 @@ def graph_get(cr, graph, wkf_id, nested=False, workitem={}): for n in nodes: activities[n['id']] = n if n['subflow_id'] and nested: - cr.execute('select * from wkf where id=%d', (n['subflow_id'],)) + cr.execute('select * from wkf where id=%s', (n['subflow_id'],)) wkfinfo = cr.dictfetchone() graph2 = pydot.Cluster('subflow'+str(n['subflow_id']), fontsize='12', label = """\"Subflow: %s\\nOSV: %s\"""" % ( n['name'], wkfinfo['osv']) ) (s1,s2) = graph_get(cr, graph2, n['subflow_id'], nested,workitem) @@ -78,9 +78,9 @@ def graph_get(cr, graph, wkf_id, nested=False, workitem={}): activity_to = actto[t['act_to']][1].get(t['signal'], actto[t['act_to']][0]) graph.add_edge(pydot.Edge( str(activity_from) ,str(activity_to), fontsize='10', **args)) nodes = cr.dictfetchall() - cr.execute('select id from wkf_activity where flow_start=True and wkf_id=%d limit 1', (wkf_id,)) + cr.execute('select id from wkf_activity where flow_start=True and wkf_id=%s limit 1', (wkf_id,)) start = cr.fetchone()[0] - cr.execute("select 'subflow.'||name,id from wkf_activity where flow_stop=True and wkf_id=%d", (wkf_id,)) + cr.execute("select 'subflow.'||name,id from wkf_activity where flow_stop=True and wkf_id=%s", (wkf_id,)) stop = cr.fetchall() stop = (stop[0][1], dict(stop)) return ((start,{}),stop) @@ -88,14 +88,14 @@ def graph_get(cr, graph, wkf_id, nested=False, workitem={}): def graph_instance_get(cr, graph, inst_id, nested=False): workitems = {} - cr.execute('select * from wkf_instance where id=%d', (inst_id,)) + cr.execute('select * from wkf_instance where id=%s', (inst_id,)) inst = cr.dictfetchone() def workitem_get(instance): - cr.execute('select act_id,count(*) from wkf_workitem where inst_id=%d group by act_id', (instance,)) + cr.execute('select act_id,count(*) from wkf_workitem where inst_id=%s group by act_id', (instance,)) workitems = dict(cr.fetchall()) - cr.execute('select subflow_id from wkf_workitem where inst_id=%d', (instance,)) + cr.execute('select subflow_id from wkf_workitem where inst_id=%s', (instance,)) for (subflow_id,) in cr.fetchall(): workitems.update(workitem_get(subflow_id)) return workitems @@ -130,7 +130,7 @@ class report_graph_instance(object): showpage''' else: cr.execute('SELECT id FROM wkf_instance \ - WHERE res_id=%d AND wkf_id=%d \ + WHERE res_id=%s AND wkf_id=%s \ ORDER BY state LIMIT 1', (data['id'], wkfinfo['id'])) inst_id = cr.fetchone() diff --git a/bin/addons/base/ir/workflow/workflow_view.xml b/bin/addons/base/ir/workflow/workflow_view.xml index 20f1cf1685b..b21b996bda1 100644 --- a/bin/addons/base/ir/workflow/workflow_view.xml +++ b/bin/addons/base/ir/workflow/workflow_view.xml @@ -1,7 +1,7 @@ - +