convert tabs to 4 spaces

bzr revid: christophe@tinyerp.com-20080722142436-143iu4ryy47w3av0
This commit is contained in:
Christophe Simonis 2008-07-22 16:24:36 +02:00
parent c5c7c2ef07
commit 2dd9723b8b
104 changed files with 15820 additions and 15820 deletions

View File

@ -45,310 +45,310 @@ logger = netsvc.Logger()
opj = os.path.join
_ad = os.path.abspath(opj(tools.config['root_path'], 'addons')) # default addons path (base)
ad = os.path.abspath(tools.config['addons_path']) # alternate addons path
_ad = os.path.abspath(opj(tools.config['root_path'], 'addons')) # default addons path (base)
ad = os.path.abspath(tools.config['addons_path']) # alternate addons path
sys.path.insert(1, _ad)
if ad != _ad:
sys.path.insert(1, ad)
sys.path.insert(1, ad)
class Graph(dict):
def addNode(self, name, deps):
max_depth, father = 0, None
for n in [Node(x, self) for x in deps]:
if n.depth >= max_depth:
father = n
max_depth = n.depth
if father:
father.addChild(name)
else:
Node(name, self)
def addNode(self, name, deps):
max_depth, father = 0, None
for n in [Node(x, self) for x in deps]:
if n.depth >= max_depth:
father = n
max_depth = n.depth
if father:
father.addChild(name)
else:
Node(name, self)
def __iter__(self):
level = 0
done = Set(self.keys())
while done:
level_modules = [(name, module) for name, module in self.items() if module.depth==level]
for name, module in level_modules:
done.remove(name)
yield module
level += 1
def __iter__(self):
level = 0
done = Set(self.keys())
while done:
level_modules = [(name, module) for name, module in self.items() if module.depth==level]
for name, module in level_modules:
done.remove(name)
yield module
level += 1
class Singleton(object):
def __new__(cls, name, graph):
if name in graph:
inst = graph[name]
else:
inst = object.__new__(cls)
inst.name = name
graph[name] = inst
return inst
def __new__(cls, name, graph):
if name in graph:
inst = graph[name]
else:
inst = object.__new__(cls)
inst.name = name
graph[name] = inst
return inst
class Node(Singleton):
def __init__(self, name, graph):
self.graph = graph
if not hasattr(self, 'childs'):
self.childs = []
if not hasattr(self, 'depth'):
self.depth = 0
def __init__(self, name, graph):
self.graph = graph
if not hasattr(self, 'childs'):
self.childs = []
if not hasattr(self, 'depth'):
self.depth = 0
def addChild(self, name):
node = Node(name, self.graph)
node.depth = self.depth + 1
if node not in self.childs:
self.childs.append(node)
for attr in ('init', 'update', 'demo'):
if hasattr(self, attr):
setattr(node, attr, True)
self.childs.sort(lambda x,y: cmp(x.name, y.name))
def addChild(self, name):
node = Node(name, self.graph)
node.depth = self.depth + 1
if node not in self.childs:
self.childs.append(node)
for attr in ('init', 'update', 'demo'):
if hasattr(self, attr):
setattr(node, attr, True)
self.childs.sort(lambda x,y: cmp(x.name, y.name))
def hasChild(self, name):
return Node(name, self.graph) in self.childs or \
bool([c for c in self.childs if c.hasChild(name)])
def hasChild(self, name):
return Node(name, self.graph) in self.childs or \
bool([c for c in self.childs if c.hasChild(name)])
def __setattr__(self, name, value):
super(Singleton, self).__setattr__(name, value)
if name in ('init', 'update', 'demo'):
tools.config[name][self.name] = 1
for child in self.childs:
setattr(child, name, value)
if name == 'depth':
for child in self.childs:
setattr(child, name, value + 1)
def __setattr__(self, name, value):
super(Singleton, self).__setattr__(name, value)
if name in ('init', 'update', 'demo'):
tools.config[name][self.name] = 1
for child in self.childs:
setattr(child, name, value)
if name == 'depth':
for child in self.childs:
setattr(child, name, value + 1)
def __iter__(self):
return itertools.chain(iter(self.childs), *map(iter, self.childs))
def __iter__(self):
return itertools.chain(iter(self.childs), *map(iter, self.childs))
def __str__(self):
return self._pprint()
def __str__(self):
return self._pprint()
def _pprint(self, depth=0):
s = '%s\n' % self.name
for c in self.childs:
s += '%s`-> %s' % (' ' * depth, c._pprint(depth+1))
return s
def _pprint(self, depth=0):
s = '%s\n' % self.name
for c in self.childs:
s += '%s`-> %s' % (' ' * depth, c._pprint(depth+1))
return s
def get_module_path(module):
"""Return the path of the given module.
"""
"""Return the path of the given module.
"""
if os.path.exists(opj(ad, module)) or os.path.exists(opj(ad, '%s.zip' % module)):
return opj(ad, module)
if os.path.exists(opj(ad, module)) or os.path.exists(opj(ad, '%s.zip' % module)):
return opj(ad, module)
if os.path.exists(opj(_ad, module)) or os.path.exists(opj(_ad, '%s.zip' % module)):
return opj(_ad, module)
if os.path.exists(opj(_ad, module)) or os.path.exists(opj(_ad, '%s.zip' % module)):
return opj(_ad, module)
raise IOError, 'Module not found : %s' % module
raise IOError, 'Module not found : %s' % module
def get_module_resource(module, *args):
"""Return the full path of a resource of the given module.
"""Return the full path of a resource of the given module.
@param module: the module
@param args: the resource path components
@param module: the module
@param args: the resource path components
@return: absolute path to the resource
"""
return opj(get_module_path(module), *args)
@return: absolute path to the resource
"""
return opj(get_module_path(module), *args)
def get_modules():
"""Returns the list of module names
"""
"""Returns the list of module names
"""
module_list = os.listdir(ad)
module_names = [os.path.basename(m) for m in module_list]
module_list += [m for m in os.listdir(_ad) if m not in module_names]
module_list = os.listdir(ad)
module_names = [os.path.basename(m) for m in module_list]
module_list += [m for m in os.listdir(_ad) if m not in module_names]
return module_list
return module_list
def create_graph(module_list, force=None):
if not force:
force=[]
graph = Graph()
packages = []
if not force:
force=[]
graph = Graph()
packages = []
for module in module_list:
if module[-4:]=='.zip':
module = module[:-4]
mod_path = get_module_path(module)
terp_file = get_module_resource(module, '__terp__.py')
if os.path.isfile(terp_file) or zipfile.is_zipfile(mod_path):
try:
info = eval(tools.file_open(terp_file).read())
except:
logger.notifyChannel('init', netsvc.LOG_ERROR, 'addon:%s:eval file %s' % (module, terp_file))
raise
if info.get('installable', True):
packages.append((module, info.get('depends', []), info))
for module in module_list:
if module[-4:]=='.zip':
module = module[:-4]
mod_path = get_module_path(module)
terp_file = get_module_resource(module, '__terp__.py')
if os.path.isfile(terp_file) or zipfile.is_zipfile(mod_path):
try:
info = eval(tools.file_open(terp_file).read())
except:
logger.notifyChannel('init', netsvc.LOG_ERROR, 'addon:%s:eval file %s' % (module, terp_file))
raise
if info.get('installable', True):
packages.append((module, info.get('depends', []), info))
current,later = Set([p for p, dep, data in packages]), Set()
while packages and current > later:
package, deps, datas = packages[0]
current,later = Set([p for p, dep, data in packages]), Set()
while packages and current > later:
package, deps, datas = packages[0]
# if all dependencies of 'package' are already in the graph, add 'package' in the graph
if reduce(lambda x,y: x and y in graph, deps, True):
if not package in current:
packages.pop(0)
continue
later.clear()
current.remove(package)
graph.addNode(package, deps)
node = Node(package, graph)
node.datas = datas
for kind in ('init', 'demo', 'update'):
if package in tools.config[kind] or 'all' in tools.config[kind] or kind in force:
setattr(node, kind, True)
else:
later.add(package)
packages.append((package, deps, datas))
packages.pop(0)
# if all dependencies of 'package' are already in the graph, add 'package' in the graph
if reduce(lambda x,y: x and y in graph, deps, True):
if not package in current:
packages.pop(0)
continue
later.clear()
current.remove(package)
graph.addNode(package, deps)
node = Node(package, graph)
node.datas = datas
for kind in ('init', 'demo', 'update'):
if package in tools.config[kind] or 'all' in tools.config[kind] or kind in force:
setattr(node, kind, True)
else:
later.add(package)
packages.append((package, deps, datas))
packages.pop(0)
for package in later:
logger.notifyChannel('init', netsvc.LOG_ERROR, 'addon:%s:Unmet dependency' % package)
for package in later:
logger.notifyChannel('init', netsvc.LOG_ERROR, 'addon:%s:Unmet dependency' % package)
return graph
return graph
def init_module_objects(cr, module_name, obj_list):
pool = pooler.get_pool(cr.dbname)
logger.notifyChannel('init', netsvc.LOG_INFO, 'addon:%s:creating or updating database tables' % module_name)
for obj in obj_list:
if hasattr(obj, 'init'):
obj.init(cr)
obj._auto_init(cr, {'module': module_name})
cr.commit()
pool = pooler.get_pool(cr.dbname)
logger.notifyChannel('init', netsvc.LOG_INFO, 'addon:%s:creating or updating database tables' % module_name)
for obj in obj_list:
if hasattr(obj, 'init'):
obj.init(cr)
obj._auto_init(cr, {'module': module_name})
cr.commit()
def load_module_graph(cr, graph, status=None, **kwargs):
# **kwargs is passed directly to convert_xml_import
if not status:
status={}
# **kwargs is passed directly to convert_xml_import
if not status:
status={}
status = status.copy()
package_todo = []
statusi = 0
for package in graph:
status['progress'] = (float(statusi)+0.1)/len(graph)
m = package.name
logger.notifyChannel('init', netsvc.LOG_INFO, 'addon:%s' % m)
sys.stdout.flush()
pool = pooler.get_pool(cr.dbname)
modules = pool.instanciate(m, cr)
cr.execute('select state, demo from ir_module_module where name=%s', (m,))
(package_state, package_demo) = (cr.rowcount and cr.fetchone()) or ('uninstalled', False)
idref = {}
status['progress'] = (float(statusi)+0.4)/len(graph)
if hasattr(package, 'init') or hasattr(package, 'update') or package_state in ('to install', 'to upgrade'):
init_module_objects(cr, m, modules)
for kind in ('init', 'update'):
for filename in package.datas.get('%s_xml' % kind, []):
mode = 'update'
if hasattr(package, 'init') or package_state=='to install':
mode = 'init'
logger.notifyChannel('init', netsvc.LOG_INFO, 'addon:%s:loading %s' % (m, filename))
name, ext = os.path.splitext(filename)
if ext == '.csv':
tools.convert_csv_import(cr, m, os.path.basename(filename), tools.file_open(opj(m, filename)).read(), idref, mode=mode)
elif ext == '.sql':
queries = tools.file_open(opj(m, filename)).read().split(';')
for query in queries:
new_query = ' '.join(query.split())
if new_query:
cr.execute(new_query)
else:
tools.convert_xml_import(cr, m, tools.file_open(opj(m, filename)), idref, mode=mode, **kwargs)
if hasattr(package, 'demo') or (package_demo and package_state != 'installed'):
status['progress'] = (float(statusi)+0.75)/len(graph)
for xml in package.datas.get('demo_xml', []):
name, ext = os.path.splitext(xml)
logger.notifyChannel('init', netsvc.LOG_INFO, 'addon:%s:loading %s' % (m, xml))
if ext == '.csv':
tools.convert_csv_import(cr, m, os.path.basename(xml), tools.file_open(opj(m, xml)).read(), idref, noupdate=True)
else:
tools.convert_xml_import(cr, m, tools.file_open(opj(m, xml)), idref, noupdate=True, **kwargs)
cr.execute('update ir_module_module set demo=%s where name=%s', (True, package.name))
package_todo.append(package.name)
cr.execute("update ir_module_module set state='installed' where state in ('to upgrade', 'to install') and name=%s", (package.name,))
cr.commit()
statusi+=1
status = status.copy()
package_todo = []
statusi = 0
for package in graph:
status['progress'] = (float(statusi)+0.1)/len(graph)
m = package.name
logger.notifyChannel('init', netsvc.LOG_INFO, 'addon:%s' % m)
sys.stdout.flush()
pool = pooler.get_pool(cr.dbname)
modules = pool.instanciate(m, cr)
cr.execute('select state, demo from ir_module_module where name=%s', (m,))
(package_state, package_demo) = (cr.rowcount and cr.fetchone()) or ('uninstalled', False)
idref = {}
status['progress'] = (float(statusi)+0.4)/len(graph)
if hasattr(package, 'init') or hasattr(package, 'update') or package_state in ('to install', 'to upgrade'):
init_module_objects(cr, m, modules)
for kind in ('init', 'update'):
for filename in package.datas.get('%s_xml' % kind, []):
mode = 'update'
if hasattr(package, 'init') or package_state=='to install':
mode = 'init'
logger.notifyChannel('init', netsvc.LOG_INFO, 'addon:%s:loading %s' % (m, filename))
name, ext = os.path.splitext(filename)
if ext == '.csv':
tools.convert_csv_import(cr, m, os.path.basename(filename), tools.file_open(opj(m, filename)).read(), idref, mode=mode)
elif ext == '.sql':
queries = tools.file_open(opj(m, filename)).read().split(';')
for query in queries:
new_query = ' '.join(query.split())
if new_query:
cr.execute(new_query)
else:
tools.convert_xml_import(cr, m, tools.file_open(opj(m, filename)), idref, mode=mode, **kwargs)
if hasattr(package, 'demo') or (package_demo and package_state != 'installed'):
status['progress'] = (float(statusi)+0.75)/len(graph)
for xml in package.datas.get('demo_xml', []):
name, ext = os.path.splitext(xml)
logger.notifyChannel('init', netsvc.LOG_INFO, 'addon:%s:loading %s' % (m, xml))
if ext == '.csv':
tools.convert_csv_import(cr, m, os.path.basename(xml), tools.file_open(opj(m, xml)).read(), idref, noupdate=True)
else:
tools.convert_xml_import(cr, m, tools.file_open(opj(m, xml)), idref, noupdate=True, **kwargs)
cr.execute('update ir_module_module set demo=%s where name=%s', (True, package.name))
package_todo.append(package.name)
cr.execute("update ir_module_module set state='installed' where state in ('to upgrade', 'to install') and name=%s", (package.name,))
cr.commit()
statusi+=1
pool = pooler.get_pool(cr.dbname)
cr.execute('select * from ir_model where state=%s', ('manual',))
for model in cr.dictfetchall():
pool.get('ir.model').instanciate(cr, 1, model['model'], {})
pool = pooler.get_pool(cr.dbname)
cr.execute('select * from ir_model where state=%s', ('manual',))
for model in cr.dictfetchall():
pool.get('ir.model').instanciate(cr, 1, model['model'], {})
pool.get('ir.model.data')._process_end(cr, 1, package_todo)
cr.commit()
pool.get('ir.model.data')._process_end(cr, 1, package_todo)
cr.commit()
def register_classes():
module_list = get_modules()
for package in create_graph(module_list):
m = package.name
logger.notifyChannel('init', netsvc.LOG_INFO, 'addon:%s:registering classes' % m)
sys.stdout.flush()
module_list = get_modules()
for package in create_graph(module_list):
m = package.name
logger.notifyChannel('init', netsvc.LOG_INFO, 'addon:%s:registering classes' % m)
sys.stdout.flush()
mod_path = get_module_path(m)
if not os.path.isfile(mod_path+'.zip'):
# XXX must restrict to only addons paths
imp.load_module(m, *imp.find_module(m))
else:
import zipimport
try:
zimp = zipimport.zipimporter(mod_path+'.zip')
zimp.load_module(m)
except zipimport.ZipImportError:
logger.notifyChannel('init', netsvc.LOG_ERROR, 'Couldn\'t find module %s' % m)
mod_path = get_module_path(m)
if not os.path.isfile(mod_path+'.zip'):
# XXX must restrict to only addons paths
imp.load_module(m, *imp.find_module(m))
else:
import zipimport
try:
zimp = zipimport.zipimporter(mod_path+'.zip')
zimp.load_module(m)
except zipimport.ZipImportError:
logger.notifyChannel('init', netsvc.LOG_ERROR, 'Couldn\'t find module %s' % m)
def load_modules(db, force_demo=False, status=None, update_module=False):
if not status:
status={}
cr = db.cursor()
force = []
if force_demo:
force.append('demo')
if update_module:
cr.execute("select name from ir_module_module where state in ('installed', 'to install', 'to upgrade','to remove')")
else:
cr.execute("select name from ir_module_module where state in ('installed', 'to upgrade', 'to remove')")
module_list = [name for (name,) in cr.fetchall()]
graph = create_graph(module_list, force)
report = tools.assertion_report()
load_module_graph(cr, graph, status, report=report)
if report.get_report():
logger.notifyChannel('init', netsvc.LOG_INFO, 'assert:%s' % report)
if not status:
status={}
cr = db.cursor()
force = []
if force_demo:
force.append('demo')
if update_module:
cr.execute("select name from ir_module_module where state in ('installed', 'to install', 'to upgrade','to remove')")
else:
cr.execute("select name from ir_module_module where state in ('installed', 'to upgrade', 'to remove')")
module_list = [name for (name,) in cr.fetchall()]
graph = create_graph(module_list, force)
report = tools.assertion_report()
load_module_graph(cr, graph, status, report=report)
if report.get_report():
logger.notifyChannel('init', netsvc.LOG_INFO, 'assert:%s' % report)
for kind in ('init', 'demo', 'update'):
tools.config[kind]={}
for kind in ('init', 'demo', 'update'):
tools.config[kind]={}
cr.commit()
if update_module:
cr.execute("select id,name from ir_module_module where state in ('to remove')")
for mod_id, mod_name in cr.fetchall():
pool = pooler.get_pool(cr.dbname)
cr.execute('select model,res_id from ir_model_data where not noupdate and module=%s order by id desc', (mod_name,))
for rmod,rid in cr.fetchall():
#
# TO BE Improved:
# I can not use the class_pool has _table could be defined in __init__
# and I can not use the pool has the module could not be loaded in the pool
#
uid = 1
pool.get(rmod).unlink(cr, uid, [rid])
cr.commit()
#
# TODO: remove menu without actions of childs
#
cr.execute('''delete from
ir_ui_menu
where
(id not in (select parent_id from ir_ui_menu where parent_id is not null))
and
(id not in (select res_id from ir_values where model='ir.ui.menu'))
and
(id not in (select res_id from ir_model_data where model='ir.ui.menu'))''')
cr.commit()
if update_module:
cr.execute("select id,name from ir_module_module where state in ('to remove')")
for mod_id, mod_name in cr.fetchall():
pool = pooler.get_pool(cr.dbname)
cr.execute('select model,res_id from ir_model_data where not noupdate and module=%s order by id desc', (mod_name,))
for rmod,rid in cr.fetchall():
#
# TO BE Improved:
# I can not use the class_pool has _table could be defined in __init__
# and I can not use the pool has the module could not be loaded in the pool
#
uid = 1
pool.get(rmod).unlink(cr, uid, [rid])
cr.commit()
#
# TODO: remove menu without actions of childs
#
cr.execute('''delete from
ir_ui_menu
where
(id not in (select parent_id from ir_ui_menu where parent_id is not null))
and
(id not in (select res_id from ir_values where model='ir.ui.menu'))
and
(id not in (select res_id from ir_model_data where model='ir.ui.menu'))''')
cr.execute("update ir_module_module set state=%s where state in ('to remove')", ('uninstalled', ))
cr.commit()
pooler.restart_pool(cr.dbname)
cr.close()
cr.execute("update ir_module_module set state=%s where state in ('to remove')", ('uninstalled', ))
cr.commit()
pooler.restart_pool(cr.dbname)
cr.close()

View File

@ -26,44 +26,44 @@
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
###############################################################################
{
"name" : "Base",
"version" : "1.0",
"author" : "Tiny",
"website" : "http://tinyerp.com",
"category" : "Generic Modules/Base",
"description": "The kernel of Tiny ERP, needed for all installation.",
"depends" : [],
"init_xml" : [
"base_data.xml",
"base_menu.xml",
],
"demo_xml" : [
"base_demo.xml",
"res/partner/partner_demo.xml",
"res/partner/crm_demo.xml",
],
"update_xml" : [
"base_update.xml",
"ir/wizard/wizard_menu_view.xml",
"ir/ir.xml",
"ir/workflow/workflow_view.xml",
"module/module_data.xml",
"module/module_wizard.xml",
"module/module_view.xml",
"module/module_report.xml",
"res/res_request_view.xml",
"res/res_lang_view.xml",
"res/partner/partner_report.xml",
"res/partner/partner_view.xml",
"res/partner/partner_wizard.xml",
"res/bank_view.xml",
"res/country_view.xml",
"res/res_currency_view.xml",
"res/partner/crm_view.xml",
"res/partner/partner_data.xml",
"res/ir_property_view.xml",
"base_security.xml",
],
"active": True,
"installable": True,
"name" : "Base",
"version" : "1.0",
"author" : "Tiny",
"website" : "http://tinyerp.com",
"category" : "Generic Modules/Base",
"description": "The kernel of Tiny ERP, needed for all installation.",
"depends" : [],
"init_xml" : [
"base_data.xml",
"base_menu.xml",
],
"demo_xml" : [
"base_demo.xml",
"res/partner/partner_demo.xml",
"res/partner/crm_demo.xml",
],
"update_xml" : [
"base_update.xml",
"ir/wizard/wizard_menu_view.xml",
"ir/ir.xml",
"ir/workflow/workflow_view.xml",
"module/module_data.xml",
"module/module_wizard.xml",
"module/module_view.xml",
"module/module_report.xml",
"res/res_request_view.xml",
"res/res_lang_view.xml",
"res/partner/partner_report.xml",
"res/partner/partner_view.xml",
"res/partner/partner_wizard.xml",
"res/bank_view.xml",
"res/country_view.xml",
"res/res_currency_view.xml",
"res/partner/crm_view.xml",
"res/partner/partner_data.xml",
"res/ir_property_view.xml",
"base_security.xml",
],
"active": True,
"installable": True,
}

View File

@ -32,263 +32,263 @@ import tools
import time
class actions(osv.osv):
_name = 'ir.actions.actions'
_table = 'ir_actions'
_columns = {
'name': fields.char('Action Name', required=True, size=64),
'type': fields.char('Action Type', required=True, size=32),
'usage': fields.char('Action Usage', size=32),
'parent_id': fields.many2one('ir.actions.server', 'Parent Action'),
}
_defaults = {
'usage': lambda *a: False,
}
_name = 'ir.actions.actions'
_table = 'ir_actions'
_columns = {
'name': fields.char('Action Name', required=True, size=64),
'type': fields.char('Action Type', required=True, size=32),
'usage': fields.char('Action Usage', size=32),
'parent_id': fields.many2one('ir.actions.server', 'Parent Action'),
}
_defaults = {
'usage': lambda *a: False,
}
actions()
class report_custom(osv.osv):
_name = 'ir.actions.report.custom'
_table = 'ir_act_report_custom'
_sequence = 'ir_actions_id_seq'
_columns = {
'name': fields.char('Report Name', size=64, required=True, translate=True),
'type': fields.char('Report Type', size=32, required=True),
'model':fields.char('Model', size=64, required=True),
'report_id': fields.integer('Report Ref.', required=True),
'usage': fields.char('Action Usage', size=32),
'multi': fields.boolean('On multiple doc.', help="If set to true, the action will not be displayed on the right toolbar of a form views.")
}
_defaults = {
'multi': lambda *a: False,
'type': lambda *a: 'ir.actions.report.custom',
}
_name = 'ir.actions.report.custom'
_table = 'ir_act_report_custom'
_sequence = 'ir_actions_id_seq'
_columns = {
'name': fields.char('Report Name', size=64, required=True, translate=True),
'type': fields.char('Report Type', size=32, required=True),
'model':fields.char('Model', size=64, required=True),
'report_id': fields.integer('Report Ref.', required=True),
'usage': fields.char('Action Usage', size=32),
'multi': fields.boolean('On multiple doc.', help="If set to true, the action will not be displayed on the right toolbar of a form views.")
}
_defaults = {
'multi': lambda *a: False,
'type': lambda *a: 'ir.actions.report.custom',
}
report_custom()
class report_xml(osv.osv):
def _report_content(self, cursor, user, ids, name, arg, context=None):
res = {}
for report in self.browse(cursor, user, ids, context=context):
data = report[name + '_data']
if not data and report[name[:-8]]:
try:
fp = tools.file_open(report[name[:-8]], mode='rb')
data = fp.read()
except:
data = False
res[report.id] = data
return res
def _report_content(self, cursor, user, ids, name, arg, context=None):
res = {}
for report in self.browse(cursor, user, ids, context=context):
data = report[name + '_data']
if not data and report[name[:-8]]:
try:
fp = tools.file_open(report[name[:-8]], mode='rb')
data = fp.read()
except:
data = False
res[report.id] = data
return res
def _report_content_inv(self, cursor, user, id, name, value, arg, context=None):
self.write(cursor, user, id, {name+'_data': value}, context=context)
def _report_content_inv(self, cursor, user, id, name, value, arg, context=None):
self.write(cursor, user, id, {name+'_data': value}, context=context)
def _report_sxw(self, cursor, user, ids, name, arg, context=None):
res = {}
for report in self.browse(cursor, user, ids, context=context):
if report.report_rml:
res[report.id] = report.report_rml.replace('.rml', '.sxw')
else:
res[report.id] = False
return res
def _report_sxw(self, cursor, user, ids, name, arg, context=None):
res = {}
for report in self.browse(cursor, user, ids, context=context):
if report.report_rml:
res[report.id] = report.report_rml.replace('.rml', '.sxw')
else:
res[report.id] = False
return res
_name = 'ir.actions.report.xml'
_table = 'ir_act_report_xml'
_sequence = 'ir_actions_id_seq'
_columns = {
'name': fields.char('Name', size=64, required=True, translate=True),
'type': fields.char('Report Type', size=32, required=True),
'model': fields.char('Model', size=64, required=True),
'report_name': fields.char('Internal Name', size=64, required=True),
'report_xsl': fields.char('XSL path', size=256),
'report_xml': fields.char('XML path', size=256),
'report_rml': fields.char('RML path', size=256,
help="The .rml path of the file or NULL if the content is in report_rml_content"),
'report_sxw': fields.function(_report_sxw, method=True, type='char',
string='SXW path'),
'report_sxw_content_data': fields.binary('SXW content'),
'report_rml_content_data': fields.binary('RML content'),
'report_sxw_content': fields.function(_report_content,
fnct_inv=_report_content_inv, method=True,
type='binary', string='SXW content',),
'report_rml_content': fields.function(_report_content,
fnct_inv=_report_content_inv, method=True,
type='binary', string='RML content'),
'auto': fields.boolean('Automatic XSL:RML', required=True),
'usage': fields.char('Action Usage', size=32),
'header': fields.boolean('Add RML header',
help="Add or not the coporate RML header"),
'multi': fields.boolean('On multiple doc.',
help="If set to true, the action will not be displayed on the right toolbar of a form views."),
'report_type': fields.selection([
('pdf', 'pdf'),
('html', 'html'),
('raw', 'raw'),
('sxw', 'sxw'),
], string='Type', required=True),
'groups_id': fields.many2many('res.groups', 'res_groups_report_rel', 'uid', 'gid', 'Groups')
_name = 'ir.actions.report.xml'
_table = 'ir_act_report_xml'
_sequence = 'ir_actions_id_seq'
_columns = {
'name': fields.char('Name', size=64, required=True, translate=True),
'type': fields.char('Report Type', size=32, required=True),
'model': fields.char('Model', size=64, required=True),
'report_name': fields.char('Internal Name', size=64, required=True),
'report_xsl': fields.char('XSL path', size=256),
'report_xml': fields.char('XML path', size=256),
'report_rml': fields.char('RML path', size=256,
help="The .rml path of the file or NULL if the content is in report_rml_content"),
'report_sxw': fields.function(_report_sxw, method=True, type='char',
string='SXW path'),
'report_sxw_content_data': fields.binary('SXW content'),
'report_rml_content_data': fields.binary('RML content'),
'report_sxw_content': fields.function(_report_content,
fnct_inv=_report_content_inv, method=True,
type='binary', string='SXW content',),
'report_rml_content': fields.function(_report_content,
fnct_inv=_report_content_inv, method=True,
type='binary', string='RML content'),
'auto': fields.boolean('Automatic XSL:RML', required=True),
'usage': fields.char('Action Usage', size=32),
'header': fields.boolean('Add RML header',
help="Add or not the coporate RML header"),
'multi': fields.boolean('On multiple doc.',
help="If set to true, the action will not be displayed on the right toolbar of a form views."),
'report_type': fields.selection([
('pdf', 'pdf'),
('html', 'html'),
('raw', 'raw'),
('sxw', 'sxw'),
], string='Type', required=True),
'groups_id': fields.many2many('res.groups', 'res_groups_report_rel', 'uid', 'gid', 'Groups')
}
_defaults = {
'type': lambda *a: 'ir.actions.report.xml',
'multi': lambda *a: False,
'auto': lambda *a: True,
'header': lambda *a: True,
'report_sxw_content': lambda *a: False,
'report_type': lambda *a: 'pdf',
}
}
_defaults = {
'type': lambda *a: 'ir.actions.report.xml',
'multi': lambda *a: False,
'auto': lambda *a: True,
'header': lambda *a: True,
'report_sxw_content': lambda *a: False,
'report_type': lambda *a: 'pdf',
}
report_xml()
class act_window(osv.osv):
_name = 'ir.actions.act_window'
_table = 'ir_act_window'
_sequence = 'ir_actions_id_seq'
_name = 'ir.actions.act_window'
_table = 'ir_act_window'
_sequence = 'ir_actions_id_seq'
def _views_get_fnc(self, cr, uid, ids, name, arg, context={}):
res={}
for act in self.browse(cr, uid, ids):
res[act.id]=[(view.view_id.id, view.view_mode) for view in act.view_ids]
if (not act.view_ids):
modes = act.view_mode.split(',')
find = False
if act.view_id.id:
res[act.id].append((act.view_id.id, act.view_id.type))
for t in modes:
if act.view_id and (t == act.view_id.type) and not find:
find = True
continue
res[act.id].append((False, t))
def _views_get_fnc(self, cr, uid, ids, name, arg, context={}):
res={}
for act in self.browse(cr, uid, ids):
res[act.id]=[(view.view_id.id, view.view_mode) for view in act.view_ids]
if (not act.view_ids):
modes = act.view_mode.split(',')
find = False
if act.view_id.id:
res[act.id].append((act.view_id.id, act.view_id.type))
for t in modes:
if act.view_id and (t == act.view_id.type) and not find:
find = True
continue
res[act.id].append((False, t))
if 'calendar' not in modes:
mobj = self.pool.get(act.res_model)
if mobj._date_name in mobj._columns:
res[act.id].append((False, 'calendar'))
return res
if 'calendar' not in modes:
mobj = self.pool.get(act.res_model)
if mobj._date_name in mobj._columns:
res[act.id].append((False, 'calendar'))
return res
_columns = {
'name': fields.char('Action Name', size=64, translate=True),
'type': fields.char('Action Type', size=32, required=True),
'view_id': fields.many2one('ir.ui.view', 'View Ref.', ondelete='cascade'),
'domain': fields.char('Domain Value', size=250),
'context': fields.char('Context Value', size=250),
'res_model': fields.char('Model', size=64),
'src_model': fields.char('Source model', size=64),
'target': fields.selection([('current','Current Window'),('new','New Window')], 'Target Window'),
'view_type': fields.selection((('tree','Tree'),('form','Form')),string='Type of view'),
'view_mode': fields.char('Mode of view', size=250),
'usage': fields.char('Action Usage', size=32),
'view_ids': fields.one2many('ir.actions.act_window.view', 'act_window_id', 'Views'),
'views': fields.function(_views_get_fnc, method=True, type='binary', string='Views'),
'limit': fields.integer('Limit', help='Default limit for the list view'),
'auto_refresh': fields.integer('Auto-Refresh',
help='Add an auto-refresh on the view'),
}
_defaults = {
'type': lambda *a: 'ir.actions.act_window',
'view_type': lambda *a: 'form',
'view_mode': lambda *a: 'tree,form',
'context': lambda *a: '{}',
'limit': lambda *a: 80,
'target': lambda *a: 'current',
'auto_refresh': lambda *a: 0,
}
_columns = {
'name': fields.char('Action Name', size=64, translate=True),
'type': fields.char('Action Type', size=32, required=True),
'view_id': fields.many2one('ir.ui.view', 'View Ref.', ondelete='cascade'),
'domain': fields.char('Domain Value', size=250),
'context': fields.char('Context Value', size=250),
'res_model': fields.char('Model', size=64),
'src_model': fields.char('Source model', size=64),
'target': fields.selection([('current','Current Window'),('new','New Window')], 'Target Window'),
'view_type': fields.selection((('tree','Tree'),('form','Form')),string='Type of view'),
'view_mode': fields.char('Mode of view', size=250),
'usage': fields.char('Action Usage', size=32),
'view_ids': fields.one2many('ir.actions.act_window.view', 'act_window_id', 'Views'),
'views': fields.function(_views_get_fnc, method=True, type='binary', string='Views'),
'limit': fields.integer('Limit', help='Default limit for the list view'),
'auto_refresh': fields.integer('Auto-Refresh',
help='Add an auto-refresh on the view'),
}
_defaults = {
'type': lambda *a: 'ir.actions.act_window',
'view_type': lambda *a: 'form',
'view_mode': lambda *a: 'tree,form',
'context': lambda *a: '{}',
'limit': lambda *a: 80,
'target': lambda *a: 'current',
'auto_refresh': lambda *a: 0,
}
act_window()
class act_window_view(osv.osv):
_name = 'ir.actions.act_window.view'
_table = 'ir_act_window_view'
_rec_name = 'view_id'
_columns = {
'sequence': fields.integer('Sequence'),
'view_id': fields.many2one('ir.ui.view', 'View'),
'view_mode': fields.selection((
('tree', 'Tree'),
('form', 'Form'),
('graph', 'Graph'),
('calendar', 'Calendar')), string='Type of view', required=True),
'act_window_id': fields.many2one('ir.actions.act_window', 'Action', ondelete='cascade'),
'multi': fields.boolean('On multiple doc.',
help="If set to true, the action will not be displayed on the right toolbar of a form views."),
}
_defaults = {
'multi': lambda *a: False,
}
_order = 'sequence'
_name = 'ir.actions.act_window.view'
_table = 'ir_act_window_view'
_rec_name = 'view_id'
_columns = {
'sequence': fields.integer('Sequence'),
'view_id': fields.many2one('ir.ui.view', 'View'),
'view_mode': fields.selection((
('tree', 'Tree'),
('form', 'Form'),
('graph', 'Graph'),
('calendar', 'Calendar')), string='Type of view', required=True),
'act_window_id': fields.many2one('ir.actions.act_window', 'Action', ondelete='cascade'),
'multi': fields.boolean('On multiple doc.',
help="If set to true, the action will not be displayed on the right toolbar of a form views."),
}
_defaults = {
'multi': lambda *a: False,
}
_order = 'sequence'
act_window_view()
class act_wizard(osv.osv):
_name = 'ir.actions.wizard'
_table = 'ir_act_wizard'
_sequence = 'ir_actions_id_seq'
_columns = {
'name': fields.char('Wizard info', size=64, required=True, translate=True),
'type': fields.char('Action type', size=32, required=True),
'wiz_name': fields.char('Wizard name', size=64, required=True),
'multi': fields.boolean('Action on multiple doc.', help="If set to true, the wizard will not be displayed on the right toolbar of a form views."),
'groups_id': fields.many2many('res.groups', 'res_groups_wizard_rel', 'uid', 'gid', 'Groups')
}
_defaults = {
'type': lambda *a: 'ir.actions.wizard',
'multi': lambda *a: False,
}
_name = 'ir.actions.wizard'
_table = 'ir_act_wizard'
_sequence = 'ir_actions_id_seq'
_columns = {
'name': fields.char('Wizard info', size=64, required=True, translate=True),
'type': fields.char('Action type', size=32, required=True),
'wiz_name': fields.char('Wizard name', size=64, required=True),
'multi': fields.boolean('Action on multiple doc.', help="If set to true, the wizard will not be displayed on the right toolbar of a form views."),
'groups_id': fields.many2many('res.groups', 'res_groups_wizard_rel', 'uid', 'gid', 'Groups')
}
_defaults = {
'type': lambda *a: 'ir.actions.wizard',
'multi': lambda *a: False,
}
act_wizard()
class act_url(osv.osv):
_name = 'ir.actions.url'
_table = 'ir_act_url'
_sequence = 'ir_actions_id_seq'
_columns = {
'name': fields.char('Action Name', size=64, translate=True),
'type': fields.char('Action Type', size=32, required=True),
'url': fields.text('Action Url',required=True),
'target': fields.selection((
('new', 'New Window'),
('self', 'This Window')),
'Action Target', required=True
)
}
_defaults = {
'type': lambda *a: 'ir.actions.act_url',
'target': lambda *a: 'new'
}
_name = 'ir.actions.url'
_table = 'ir_act_url'
_sequence = 'ir_actions_id_seq'
_columns = {
'name': fields.char('Action Name', size=64, translate=True),
'type': fields.char('Action Type', size=32, required=True),
'url': fields.text('Action Url',required=True),
'target': fields.selection((
('new', 'New Window'),
('self', 'This Window')),
'Action Target', required=True
)
}
_defaults = {
'type': lambda *a: 'ir.actions.act_url',
'target': lambda *a: 'new'
}
act_url()
#
# Actions that are run on the server side
#
class actions_server(osv.osv):
_name = 'ir.actions.server'
_table = 'ir_act_server'
_sequence = 'ir_actions_id_seq'
_columns = {
'name': fields.char('Action Name', required=True, size=64),
'state': fields.selection([
('python','Python Code'),
('dummy','Dummy'),
('trigger','Trigger'),
('email','Email'),
('sms','SMS'),
('object_create','Create Object'),
('object_write','Write Object'),
('client_action','Client Action'),
('other','Others Actions'),
], 'Action State', required=True, size=32),
'code': fields.text('Python Code'),
'sequence': fields.integer('Sequence'),
'model_id': fields.many2one('ir.model', 'Model', required=True),
'trigger_name': fields.char('Trigger Name', size=128),
'trigger_object': fields.char('Trigger Object', size=128),
'trigger_object_id': fields.char('Trigger Object ID', size=128),
'message': fields.text('Message', translate=True),
'address': fields.char('Email Address', size=128),
'child_ids': fields.one2many('ir.actions.actions', 'parent_id', 'Others Actions'),
'usage': fields.char('Action Usage', size=32),
'type': fields.char('Report Type', size=32, required=True),
}
_defaults = {
'state': lambda *a: 'dummy',
'type': lambda *a: 'ir.actions.server',
'sequence': lambda *a: 0,
'code': lambda *a: """# You can use the following variables
_name = 'ir.actions.server'
_table = 'ir_act_server'
_sequence = 'ir_actions_id_seq'
_columns = {
'name': fields.char('Action Name', required=True, size=64),
'state': fields.selection([
('python','Python Code'),
('dummy','Dummy'),
('trigger','Trigger'),
('email','Email'),
('sms','SMS'),
('object_create','Create Object'),
('object_write','Write Object'),
('client_action','Client Action'),
('other','Others Actions'),
], 'Action State', required=True, size=32),
'code': fields.text('Python Code'),
'sequence': fields.integer('Sequence'),
'model_id': fields.many2one('ir.model', 'Model', required=True),
'trigger_name': fields.char('Trigger Name', size=128),
'trigger_object': fields.char('Trigger Object', size=128),
'trigger_object_id': fields.char('Trigger Object ID', size=128),
'message': fields.text('Message', translate=True),
'address': fields.char('Email Address', size=128),
'child_ids': fields.one2many('ir.actions.actions', 'parent_id', 'Others Actions'),
'usage': fields.char('Action Usage', size=32),
'type': fields.char('Report Type', size=32, required=True),
}
_defaults = {
'state': lambda *a: 'dummy',
'type': lambda *a: 'ir.actions.server',
'sequence': lambda *a: 0,
'code': lambda *a: """# You can use the following variables
# - object
# - object2
# - time
@ -297,43 +297,43 @@ class actions_server(osv.osv):
# - ids
# If you plan to return an action, assign: action = {...}
"""
}
#
# Context should contains:
# ids : original ids
# id : current id of the object
# OUT:
# False : Finnished correctly
# ACTION_ID : Action to launch
def run(self, cr, uid, ids, context={}):
for action in self.browse(cr, uid, ids, context):
if action.state=='python':
localdict = {
'self': self.pool.get(action.model_id.model),
'context': context,
'time': time,
'ids': ids,
'cr': cr,
'uid': uid
}
print action.code
exec action.code in localdict
print localdict.keys()
if 'action' in localdict:
return localdict['action']
return False
}
#
# Context should contains:
# ids : original ids
# id : current id of the object
# OUT:
# False : Finnished correctly
# ACTION_ID : Action to launch
def run(self, cr, uid, ids, context={}):
for action in self.browse(cr, uid, ids, context):
if action.state=='python':
localdict = {
'self': self.pool.get(action.model_id.model),
'context': context,
'time': time,
'ids': ids,
'cr': cr,
'uid': uid
}
print action.code
exec action.code in localdict
print localdict.keys()
if 'action' in localdict:
return localdict['action']
return False
actions_server()
class act_window_close(osv.osv):
_name = 'ir.actions.act_window_close'
_table = 'ir_actions'
_sequence = 'ir_actions_id_seq'
_columns = {
'name': fields.char('Action Name', size=64, translate=True),
'type': fields.char('Action Type', size=32, required=True),
}
_defaults = {
'type': lambda *a: 'ir.actions.act_window_close',
}
_name = 'ir.actions.act_window_close'
_table = 'ir_actions'
_sequence = 'ir_actions_id_seq'
_columns = {
'name': fields.char('Action Name', size=64, translate=True),
'type': fields.char('Action Type', size=32, required=True),
}
_defaults = {
'type': lambda *a: 'ir.actions.act_window_close',
}
act_window_close()

View File

@ -30,16 +30,16 @@
from osv import fields,osv
class ir_attachment(osv.osv):
_name = 'ir.attachment'
_columns = {
'name': fields.char('Attachment Name',size=64, required=True),
'datas': fields.binary('Data'),
'datas_fname': fields.char('Data Filename',size=64),
'description': fields.text('Description'),
# Not required due to the document module !
'res_model': fields.char('Resource Model',size=64, readonly=True),
'res_id': fields.integer('Resource ID', readonly=True),
'link': fields.char('Link', size=256)
}
_name = 'ir.attachment'
_columns = {
'name': fields.char('Attachment Name',size=64, required=True),
'datas': fields.binary('Data'),
'datas_fname': fields.char('Data Filename',size=64),
'description': fields.text('Description'),
# Not required due to the document module !
'res_model': fields.char('Resource Model',size=64, readonly=True),
'res_id': fields.integer('Resource ID', readonly=True),
'link': fields.char('Link', size=256)
}
ir_attachment()

View File

@ -30,12 +30,12 @@
from osv import fields,osv
class board(osv.osv):
_name = 'ir.board'
def create(self, cr, user, vals, context={}):
return False
def copy(self, cr, uid, id, default=None, context={}):
return False
_columns = {
'name': fields.char('Board', size=64),
}
_name = 'ir.board'
def create(self, cr, user, vals, context={}):
return False
def copy(self, cr, uid, id, default=None, context={}):
return False
_columns = {
'name': fields.char('Board', size=64),
}
board()

View File

@ -28,13 +28,13 @@
##############################################################################
#
# SPEC: Execute "model.function(*eval(args))" periodically
# date : date to execute the job or NULL if directly
# delete_after: delete the ir.cron entry after execution
# interval_* : period
# max_repeat : number of execution or NULL if endlessly
# date : date to execute the job or NULL if directly
# delete_after: delete the ir.cron entry after execution
# interval_* : period
# max_repeat : number of execution or NULL if endlessly
#
# TODO:
# Error treatment: exception, request, ... -> send request to uid
# Error treatment: exception, request, ... -> send request to uid
#
from mx import DateTime
@ -47,85 +47,85 @@ from osv import fields,osv
next_wait = 60
_intervalTypes = {
'work_days': lambda interval: DateTime.RelativeDateTime(days=interval),
'days': lambda interval: DateTime.RelativeDateTime(days=interval),
'hours': lambda interval: DateTime.RelativeDateTime(hours=interval),
'weeks': lambda interval: DateTime.RelativeDateTime(days=7*interval),
'months': lambda interval: DateTime.RelativeDateTime(months=interval),
'minutes': lambda interval: DateTime.RelativeDateTime(minutes=interval),
'work_days': lambda interval: DateTime.RelativeDateTime(days=interval),
'days': lambda interval: DateTime.RelativeDateTime(days=interval),
'hours': lambda interval: DateTime.RelativeDateTime(hours=interval),
'weeks': lambda interval: DateTime.RelativeDateTime(days=7*interval),
'months': lambda interval: DateTime.RelativeDateTime(months=interval),
'minutes': lambda interval: DateTime.RelativeDateTime(minutes=interval),
}
class ir_cron(osv.osv, netsvc.Agent):
_name = "ir.cron"
_columns = {
'name': fields.char('Name', size=60, required=True),
'user_id': fields.many2one('res.users', 'User', required=True),
'active': fields.boolean('Active'),
'interval_number': fields.integer('Interval Number'),
'interval_type': fields.selection( [('minutes', 'Minutes'),
('hours', 'Hours'), ('work_days','Work Days'), ('days', 'Days'),('weeks', 'Weeks'), ('months', 'Months')], 'Interval Unit'),
'numbercall': fields.integer('Number of calls', help='Number of time the function is called,\na negative number indicates that the function will always be called'),
'doall' : fields.boolean('Repeat missed'),
'nextcall' : fields.datetime('Next call date', required=True),
'model': fields.char('Model', size=64),
'function': fields.char('Function', size=64),
'args': fields.text('Arguments'),
'priority': fields.integer('Priority', help='0=Very Urgent\n10=Not urgent')
}
_name = "ir.cron"
_columns = {
'name': fields.char('Name', size=60, required=True),
'user_id': fields.many2one('res.users', 'User', required=True),
'active': fields.boolean('Active'),
'interval_number': fields.integer('Interval Number'),
'interval_type': fields.selection( [('minutes', 'Minutes'),
('hours', 'Hours'), ('work_days','Work Days'), ('days', 'Days'),('weeks', 'Weeks'), ('months', 'Months')], 'Interval Unit'),
'numbercall': fields.integer('Number of calls', help='Number of time the function is called,\na negative number indicates that the function will always be called'),
'doall' : fields.boolean('Repeat missed'),
'nextcall' : fields.datetime('Next call date', required=True),
'model': fields.char('Model', size=64),
'function': fields.char('Function', size=64),
'args': fields.text('Arguments'),
'priority': fields.integer('Priority', help='0=Very Urgent\n10=Not urgent')
}
_defaults = {
'nextcall' : lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
'priority' : lambda *a: 5,
'user_id' : lambda obj,cr,uid,context: uid,
'interval_number' : lambda *a: 1,
'interval_type' : lambda *a: 'months',
'numbercall' : lambda *a: 1,
'active' : lambda *a: 1,
'doall' : lambda *a: 1
}
_defaults = {
'nextcall' : lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
'priority' : lambda *a: 5,
'user_id' : lambda obj,cr,uid,context: uid,
'interval_number' : lambda *a: 1,
'interval_type' : lambda *a: 'months',
'numbercall' : lambda *a: 1,
'active' : lambda *a: 1,
'doall' : lambda *a: 1
}
def _callback(self, cr, uid, model, func, args):
args = (args or []) and eval(args)
m=self.pool.get(model)
if m and hasattr(m, func):
f = getattr(m, func)
f(cr, uid, *args)
def _callback(self, cr, uid, model, func, args):
args = (args or []) and eval(args)
m=self.pool.get(model)
if m and hasattr(m, func):
f = getattr(m, func)
f(cr, uid, *args)
def _poolJobs(self, db_name, check=False):
now = DateTime.now()
#FIXME: multidb. Solution: a l'instanciation d'une nouvelle connection bd (ds pooler) fo que j'instancie
# un nouveau pooljob avec comme parametre la bd
try:
cr = pooler.get_db(db_name).cursor()
except:
return False
def _poolJobs(self, db_name, check=False):
now = DateTime.now()
#FIXME: multidb. Solution: a l'instanciation d'une nouvelle connection bd (ds pooler) fo que j'instancie
# un nouveau pooljob avec comme parametre la bd
try:
cr = pooler.get_db(db_name).cursor()
except:
return False
try:
cr.execute('select * from ir_cron where numbercall<>0 and active and nextcall<=now() order by priority')
for job in cr.dictfetchall():
nextcall = DateTime.strptime(job['nextcall'], '%Y-%m-%d %H:%M:%S')
numbercall = job['numbercall']
try:
cr.execute('select * from ir_cron where numbercall<>0 and active and nextcall<=now() order by priority')
for job in cr.dictfetchall():
nextcall = DateTime.strptime(job['nextcall'], '%Y-%m-%d %H:%M:%S')
numbercall = job['numbercall']
ok = False
while nextcall<now and numbercall:
if numbercall > 0:
numbercall -= 1
if not ok or job['doall']:
self._callback(cr, job['user_id'], job['model'], job['function'], job['args'])
if numbercall:
nextcall += _intervalTypes[job['interval_type']](job['interval_number'])
ok = True
addsql=''
if not numbercall:
addsql = ', active=False'
cr.execute("update ir_cron set nextcall=%s, numbercall=%d"+addsql+" where id=%d", (nextcall.strftime('%Y-%m-%d %H:%M:%S'), numbercall, job['id']))
cr.commit()
finally:
cr.close()
#
# Can be improved to do at the min(min(nextcalls), time()+next_wait)
# But is this an improvement ?
#
if not check:
self.setAlarm(self._poolJobs, int(time.time())+next_wait, [db_name])
ok = False
while nextcall<now and numbercall:
if numbercall > 0:
numbercall -= 1
if not ok or job['doall']:
self._callback(cr, job['user_id'], job['model'], job['function'], job['args'])
if numbercall:
nextcall += _intervalTypes[job['interval_type']](job['interval_number'])
ok = True
addsql=''
if not numbercall:
addsql = ', active=False'
cr.execute("update ir_cron set nextcall=%s, numbercall=%d"+addsql+" where id=%d", (nextcall.strftime('%Y-%m-%d %H:%M:%S'), numbercall, job['id']))
cr.commit()
finally:
cr.close()
#
# Can be improved to do at the min(min(nextcalls), time()+next_wait)
# But is this an improvement ?
#
if not check:
self.setAlarm(self._poolJobs, int(time.time())+next_wait, [db_name])
ir_cron()

View File

@ -30,26 +30,26 @@
from osv import fields,osv
class ir_default(osv.osv):
_name = 'ir.default'
_columns = {
'field_tbl': fields.char('Model',size=64),
'field_name': fields.char('Model field',size=64),
'value': fields.char('Default Value',size=64),
'uid': fields.many2one('res.users', 'Users'),
'page': fields.char('View',size=64),
'ref_table': fields.char('Table Ref.',size=64),
'ref_id': fields.integer('ID Ref.',size=64),
'company_id': fields.many2one('res.company','Company')
}
_name = 'ir.default'
_columns = {
'field_tbl': fields.char('Model',size=64),
'field_name': fields.char('Model field',size=64),
'value': fields.char('Default Value',size=64),
'uid': fields.many2one('res.users', 'Users'),
'page': fields.char('View',size=64),
'ref_table': fields.char('Table Ref.',size=64),
'ref_id': fields.integer('ID Ref.',size=64),
'company_id': fields.many2one('res.company','Company')
}
def _get_company_id(self, cr, uid, context={}):
res = self.pool.get('res.users').read(cr, uid, [uid], ['company_id'], context=context)
if res and res[0]['company_id']:
return res[0]['company_id'][0]
return False
def _get_company_id(self, cr, uid, context={}):
res = self.pool.get('res.users').read(cr, uid, [uid], ['company_id'], context=context)
if res and res[0]['company_id']:
return res[0]['company_id'][0]
return False
_defaults = {
'company_id': _get_company_id,
}
_defaults = {
'company_id': _get_company_id,
}
ir_default()

View File

@ -31,20 +31,20 @@ from osv import fields,osv
class ir_exports(osv.osv):
_name = "ir.exports"
_columns = {
'name': fields.char('Export name', size=128),
'resource': fields.char('Resource', size=128),
'export_fields': fields.one2many('ir.exports.line', 'export_id',
'Export Id'),
}
_name = "ir.exports"
_columns = {
'name': fields.char('Export name', size=128),
'resource': fields.char('Resource', size=128),
'export_fields': fields.one2many('ir.exports.line', 'export_id',
'Export Id'),
}
ir_exports()
class ir_exports_line(osv.osv):
_name = 'ir.exports.line'
_columns = {
'name': fields.char('Field name', size=64),
'export_id': fields.many2one('ir.exports', 'Exportation', select=True, ondelete='cascade'),
}
_name = 'ir.exports.line'
_columns = {
'name': fields.char('Field name', size=64),
'export_id': fields.many2one('ir.exports', 'Exportation', select=True, ondelete='cascade'),
}
ir_exports_line()

View File

@ -37,395 +37,395 @@ import tools
import pooler
def _get_fields_type(self, cr, uid, context=None):
cr.execute('select distinct ttype,ttype from ir_model_fields')
return cr.fetchall()
cr.execute('select distinct ttype,ttype from ir_model_fields')
return cr.fetchall()
class ir_model(osv.osv):
_name = 'ir.model'
_description = "Objects"
_rec_name = 'name'
_columns = {
'name': fields.char('Model Name', size=64, translate=True, required=True),
'model': fields.char('Object Name', size=64, required=True, search=1),
'info': fields.text('Information'),
'field_id': fields.one2many('ir.model.fields', 'model_id', 'Fields', required=True),
'state': fields.selection([('manual','Custom Object'),('base','Base Field')],'Manualy Created',readonly=1),
}
_defaults = {
'model': lambda *a: 'x_',
'state': lambda self,cr,uid,ctx={}: (ctx and ctx.get('manual',False)) and 'manual' or 'base',
}
_name = 'ir.model'
_description = "Objects"
_rec_name = 'name'
_columns = {
'name': fields.char('Model Name', size=64, translate=True, required=True),
'model': fields.char('Object Name', size=64, required=True, search=1),
'info': fields.text('Information'),
'field_id': fields.one2many('ir.model.fields', 'model_id', 'Fields', required=True),
'state': fields.selection([('manual','Custom Object'),('base','Base Field')],'Manualy Created',readonly=1),
}
_defaults = {
'model': lambda *a: 'x_',
'state': lambda self,cr,uid,ctx={}: (ctx and ctx.get('manual',False)) and 'manual' or 'base',
}
def _check_model_name(self, cr, uid, ids):
for model in self.browse(cr, uid, ids):
if model.state=='manual':
if not model.model.startswith('x_'):
return False
if not re.match('^[a-z_A-Z0-9]+$',model.model):
return False
return True
def _check_model_name(self, cr, uid, ids):
for model in self.browse(cr, uid, ids):
if model.state=='manual':
if not model.model.startswith('x_'):
return False
if not re.match('^[a-z_A-Z0-9]+$',model.model):
return False
return True
_constraints = [
(_check_model_name, 'The model name must start with x_ and not contain any special character !', ['model']),
]
def unlink(self, cr, user, ids, context=None):
for model in self.browse(cr, user, ids, context):
if model.state <> 'manual':
raise except_orm(_('Error'), _("You can not remove the model '%s' !") %(field.name,))
res = super(ir_model, self).unlink(cr, user, ids, context)
pooler.restart_pool(cr.dbname)
return res
_constraints = [
(_check_model_name, 'The model name must start with x_ and not contain any special character !', ['model']),
]
def unlink(self, cr, user, ids, context=None):
for model in self.browse(cr, user, ids, context):
if model.state <> 'manual':
raise except_orm(_('Error'), _("You can not remove the model '%s' !") %(field.name,))
res = super(ir_model, self).unlink(cr, user, ids, context)
pooler.restart_pool(cr.dbname)
return res
def create(self, cr, user, vals, context=None):
if context and context.get('manual',False):
vals['state']='manual'
res = super(ir_model,self).create(cr, user, vals, context)
if vals.get('state','base')=='manual':
pooler.restart_pool(cr.dbname)
return res
def create(self, cr, user, vals, context=None):
if context and context.get('manual',False):
vals['state']='manual'
res = super(ir_model,self).create(cr, user, vals, context)
if vals.get('state','base')=='manual':
pooler.restart_pool(cr.dbname)
return res
def instanciate(self, cr, user, model, context={}):
class x_custom_model(osv.osv):
pass
x_custom_model._name = model
x_custom_model._module = False
x_custom_model.createInstance(self.pool, '', cr)
if 'x_name' in x_custom_model._columns:
x_custom_model._rec_name = 'x_name'
else:
x_custom_model._rec_name = x_custom_model._columns.keys()[0]
def instanciate(self, cr, user, model, context={}):
class x_custom_model(osv.osv):
pass
x_custom_model._name = model
x_custom_model._module = False
x_custom_model.createInstance(self.pool, '', cr)
if 'x_name' in x_custom_model._columns:
x_custom_model._rec_name = 'x_name'
else:
x_custom_model._rec_name = x_custom_model._columns.keys()[0]
ir_model()
class ir_model_fields(osv.osv):
_name = 'ir.model.fields'
_description = "Fields"
_columns = {
'name': fields.char('Name', required=True, size=64, select=1),
'model': fields.char('Object Name', size=64, required=True),
'relation': fields.char('Model Relation', size=64),
'model_id': fields.many2one('ir.model', 'Model id', required=True, select=True, ondelete='cascade'),
'field_description': fields.char('Field Label', required=True, size=256),
'relate': fields.boolean('Click and Relate'),
_name = 'ir.model.fields'
_description = "Fields"
_columns = {
'name': fields.char('Name', required=True, size=64, select=1),
'model': fields.char('Object Name', size=64, required=True),
'relation': fields.char('Model Relation', size=64),
'model_id': fields.many2one('ir.model', 'Model id', required=True, select=True, ondelete='cascade'),
'field_description': fields.char('Field Label', required=True, size=256),
'relate': fields.boolean('Click and Relate'),
'ttype': fields.selection(_get_fields_type, 'Field Type',size=64, required=True),
'selection': fields.char('Field Selection',size=128),
'required': fields.boolean('Required'),
'readonly': fields.boolean('Readonly'),
'select_level': fields.selection([('0','Not Searchable'),('1','Always Searchable'),('2','Advanced Search')],'Searchable', required=True),
'translate': fields.boolean('Translate'),
'size': fields.integer('Size'),
'state': fields.selection([('manual','Custom Field'),('base','Base Field')],'Manualy Created'),
'on_delete': fields.selection([('cascade','Cascade'),('set null','Set NULL')], 'On delete', help='On delete property for many2one fields'),
'domain': fields.char('Domain', size=256),
'ttype': fields.selection(_get_fields_type, 'Field Type',size=64, required=True),
'selection': fields.char('Field Selection',size=128),
'required': fields.boolean('Required'),
'readonly': fields.boolean('Readonly'),
'select_level': fields.selection([('0','Not Searchable'),('1','Always Searchable'),('2','Advanced Search')],'Searchable', required=True),
'translate': fields.boolean('Translate'),
'size': fields.integer('Size'),
'state': fields.selection([('manual','Custom Field'),('base','Base Field')],'Manualy Created'),
'on_delete': fields.selection([('cascade','Cascade'),('set null','Set NULL')], 'On delete', help='On delete property for many2one fields'),
'domain': fields.char('Domain', size=256),
'groups': fields.many2many('res.groups', 'ir_model_fields_group_rel', 'field_id', 'group_id', 'Groups'),
'group_name': fields.char('Group Name', size=128),
'view_load': fields.boolean('View Auto-Load'),
}
_defaults = {
'relate': lambda *a: 0,
'view_load': lambda *a: 0,
'selection': lambda *a: "[]",
'domain': lambda *a: "[]",
'name': lambda *a: 'x_',
'state': lambda self,cr,uid,ctx={}: (ctx and ctx.get('manual',False)) and 'manual' or 'base',
'on_delete': lambda *a: 'set null',
'select_level': lambda *a: '0',
'size': lambda *a: 64,
'field_description': lambda *a: '',
}
_order = "id"
def unlink(self, cr, user, ids, context=None):
for field in self.browse(cr, user, ids, context):
if field.state <> 'manual':
raise except_orm(_('Error'), _("You can not remove the field '%s' !") %(field.name,))
#
# MAY BE ADD A ALTER TABLE DROP ?
#
return super(ir_model_fields, self).unlink(cr, user, ids, context)
'groups': fields.many2many('res.groups', 'ir_model_fields_group_rel', 'field_id', 'group_id', 'Groups'),
'group_name': fields.char('Group Name', size=128),
'view_load': fields.boolean('View Auto-Load'),
}
_defaults = {
'relate': lambda *a: 0,
'view_load': lambda *a: 0,
'selection': lambda *a: "[]",
'domain': lambda *a: "[]",
'name': lambda *a: 'x_',
'state': lambda self,cr,uid,ctx={}: (ctx and ctx.get('manual',False)) and 'manual' or 'base',
'on_delete': lambda *a: 'set null',
'select_level': lambda *a: '0',
'size': lambda *a: 64,
'field_description': lambda *a: '',
}
_order = "id"
def unlink(self, cr, user, ids, context=None):
for field in self.browse(cr, user, ids, context):
if field.state <> 'manual':
raise except_orm(_('Error'), _("You can not remove the field '%s' !") %(field.name,))
#
# MAY BE ADD A ALTER TABLE DROP ?
#
return super(ir_model_fields, self).unlink(cr, user, ids, context)
def create(self, cr, user, vals, context=None):
if 'model_id' in vals:
model_data=self.pool.get('ir.model').read(cr,user,vals['model_id'])
vals['model']=model_data['model']
if context and context.get('manual',False):
vals['state']='manual'
res = super(ir_model_fields,self).create(cr, user, vals, context)
if vals.get('state','base')=='manual':
if not vals['name'].startswith('x_'):
raise except_orm(_('Error'), _("Custom fields must have a name that starts with 'x_' !"))
if self.pool.get(vals['model']):
self.pool.get(vals['model']).__init__(self.pool, cr)
self.pool.get(vals['model'])._auto_init(cr,{})
return res
def create(self, cr, user, vals, context=None):
if 'model_id' in vals:
model_data=self.pool.get('ir.model').read(cr,user,vals['model_id'])
vals['model']=model_data['model']
if context and context.get('manual',False):
vals['state']='manual'
res = super(ir_model_fields,self).create(cr, user, vals, context)
if vals.get('state','base')=='manual':
if not vals['name'].startswith('x_'):
raise except_orm(_('Error'), _("Custom fields must have a name that starts with 'x_' !"))
if self.pool.get(vals['model']):
self.pool.get(vals['model']).__init__(self.pool, cr)
self.pool.get(vals['model'])._auto_init(cr,{})
return res
ir_model_fields()
class ir_model_access(osv.osv):
_name = 'ir.model.access'
_columns = {
'name': fields.char('Name', size=64, required=True),
'model_id': fields.many2one('ir.model', 'Model', required=True),
'group_id': fields.many2one('res.groups', 'Group'),
'perm_read': fields.boolean('Read Access'),
'perm_write': fields.boolean('Write Access'),
'perm_create': fields.boolean('Create Access'),
'perm_unlink': fields.boolean('Delete Permission'),
}
_name = 'ir.model.access'
_columns = {
'name': fields.char('Name', size=64, required=True),
'model_id': fields.many2one('ir.model', 'Model', required=True),
'group_id': fields.many2one('res.groups', 'Group'),
'perm_read': fields.boolean('Read Access'),
'perm_write': fields.boolean('Write Access'),
'perm_create': fields.boolean('Create Access'),
'perm_unlink': fields.boolean('Delete Permission'),
}
def check_groups(self, cr, uid, group):
res = False
grouparr = group.split('.')
if grouparr:
cr.execute("select * from res_groups_users_rel where uid=" + str(uid) + " and gid in(select res_id from ir_model_data where module='%s' and name='%s')" % (grouparr[0], grouparr[1]))
r = cr.fetchall()
if not r:
res = False
else:
res = True
else:
res = False
return res
def check_groups(self, cr, uid, group):
res = False
grouparr = group.split('.')
if grouparr:
cr.execute("select * from res_groups_users_rel where uid=" + str(uid) + " and gid in(select res_id from ir_model_data where module='%s' and name='%s')" % (grouparr[0], grouparr[1]))
r = cr.fetchall()
if not r:
res = False
else:
res = True
else:
res = False
return res
def check(self, cr, uid, model_name, mode='read',raise_exception=True):
assert mode in ['read','write','create','unlink'], 'Invalid access mode for security'
if uid == 1:
return True # TODO: check security: don't allow xml-rpc request with uid == 1
def check(self, cr, uid, model_name, mode='read',raise_exception=True):
assert mode in ['read','write','create','unlink'], 'Invalid access mode for security'
if uid == 1:
return True # TODO: check security: don't allow xml-rpc request with uid == 1
cr.execute('SELECT MAX(CASE WHEN perm_'+mode+' THEN 1 else 0 END) '
'FROM ir_model_access a '
'JOIN ir_model m '
'ON (a.model_id=m.id) '
'JOIN res_groups_users_rel gu '
'ON (gu.gid = a.group_id) '
'WHERE m.model = %s AND gu.uid = %s', (model_name, uid,))
r = cr.fetchall()
if r[0][0] == None:
cr.execute('SELECT MAX(CASE WHEN perm_'+mode+' THEN 1 else 0 END) '
'FROM ir_model_access a '
'JOIN ir_model m '
'ON (a.model_id = m.id) '
'WHERE a.group_id IS NULL AND m.model = %s', (model_name,))
r= cr.fetchall()
if r[0][0] == None:
return False # by default, the user had no access
cr.execute('SELECT MAX(CASE WHEN perm_'+mode+' THEN 1 else 0 END) '
'FROM ir_model_access a '
'JOIN ir_model m '
'ON (a.model_id=m.id) '
'JOIN res_groups_users_rel gu '
'ON (gu.gid = a.group_id) '
'WHERE m.model = %s AND gu.uid = %s', (model_name, uid,))
r = cr.fetchall()
if r[0][0] == None:
cr.execute('SELECT MAX(CASE WHEN perm_'+mode+' THEN 1 else 0 END) '
'FROM ir_model_access a '
'JOIN ir_model m '
'ON (a.model_id = m.id) '
'WHERE a.group_id IS NULL AND m.model = %s', (model_name,))
r= cr.fetchall()
if r[0][0] == None:
return False # by default, the user had no access
if not r[0][0]:
if raise_exception:
msgs = {
'read': _('You can not read this document! (%s)'),
'write': _('You can not write in this document! (%s)'),
'create': _('You can not create this kind of document! (%s)'),
'unlink': _('You can not delete this document! (%s)'),
}
# due to the assert at the begin of the function, we will never have a KeyError
raise except_orm(_('AccessError'), msgs[mode] % model_name )
return r[0][0]
if not r[0][0]:
if raise_exception:
msgs = {
'read': _('You can not read this document! (%s)'),
'write': _('You can not write in this document! (%s)'),
'create': _('You can not create this kind of document! (%s)'),
'unlink': _('You can not delete this document! (%s)'),
}
# due to the assert at the begin of the function, we will never have a KeyError
raise except_orm(_('AccessError'), msgs[mode] % model_name )
return r[0][0]
check = tools.cache()(check)
check = tools.cache()(check)
#
# Methods to clean the cache on the Check Method.
#
def write(self, cr, uid, *args, **argv):
res = super(ir_model_access, self).write(cr, uid, *args, **argv)
self.check()
return res
def create(self, cr, uid, *args, **argv):
res = super(ir_model_access, self).create(cr, uid, *args, **argv)
self.check()
return res
def unlink(self, cr, uid, *args, **argv):
res = super(ir_model_access, self).unlink(cr, uid, *args, **argv)
self.check()
return res
#
# Methods to clean the cache on the Check Method.
#
def write(self, cr, uid, *args, **argv):
res = super(ir_model_access, self).write(cr, uid, *args, **argv)
self.check()
return res
def create(self, cr, uid, *args, **argv):
res = super(ir_model_access, self).create(cr, uid, *args, **argv)
self.check()
return res
def unlink(self, cr, uid, *args, **argv):
res = super(ir_model_access, self).unlink(cr, uid, *args, **argv)
self.check()
return res
ir_model_access()
class ir_model_data(osv.osv):
_name = 'ir.model.data'
_columns = {
'name': fields.char('XML Identifier', required=True, size=64),
'model': fields.char('Model', required=True, size=64),
'module': fields.char('Module', required=True, size=64),
'res_id': fields.integer('Resource ID'),
'noupdate': fields.boolean('Non Updatable'),
'date_update': fields.datetime('Update Date'),
'date_init': fields.datetime('Init Date')
}
_defaults = {
'date_init': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
'date_update': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
'noupdate': lambda *a: False
}
_name = 'ir.model.data'
_columns = {
'name': fields.char('XML Identifier', required=True, size=64),
'model': fields.char('Model', required=True, size=64),
'module': fields.char('Module', required=True, size=64),
'res_id': fields.integer('Resource ID'),
'noupdate': fields.boolean('Non Updatable'),
'date_update': fields.datetime('Update Date'),
'date_init': fields.datetime('Init Date')
}
_defaults = {
'date_init': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
'date_update': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
'noupdate': lambda *a: False
}
def __init__(self, pool, cr):
osv.osv.__init__(self, pool, cr)
self.loads = {}
self.doinit = True
self.unlink_mark = {}
def __init__(self, pool, cr):
osv.osv.__init__(self, pool, cr)
self.loads = {}
self.doinit = True
self.unlink_mark = {}
def _get_id(self,cr, uid, module, xml_id):
ids = self.search(cr, uid, [('module','=',module),('name','=', xml_id)])
assert len(ids)==1, '%d reference(s) to %s. You should have only one !' % (len(ids),xml_id)
return ids[0]
_get_id = tools.cache()(_get_id)
def _get_id(self,cr, uid, module, xml_id):
ids = self.search(cr, uid, [('module','=',module),('name','=', xml_id)])
assert len(ids)==1, '%d reference(s) to %s. You should have only one !' % (len(ids),xml_id)
return ids[0]
_get_id = tools.cache()(_get_id)
def _update_dummy(self,cr, uid, model, module, xml_id=False, store=True):
if not xml_id:
return False
try:
id = self.read(cr, uid, [self._get_id(cr, uid, module, xml_id)], ['res_id'])[0]['res_id']
self.loads[(module,xml_id)] = (model,id)
except:
id = False
return id
def _update_dummy(self,cr, uid, model, module, xml_id=False, store=True):
if not xml_id:
return False
try:
id = self.read(cr, uid, [self._get_id(cr, uid, module, xml_id)], ['res_id'])[0]['res_id']
self.loads[(module,xml_id)] = (model,id)
except:
id = False
return id
def _update(self,cr, uid, model, module, values, xml_id=False, store=True, noupdate=False, mode='init', res_id=False):
warning = True
model_obj = self.pool.get(model)
context = {}
if xml_id and ('.' in xml_id):
assert len(xml_id.split('.'))==2, _('"%s" contains too many dots. XML ids should not contain dots ! These are used to refer to other modules data, as in module.reference_id') % (xml_id)
warning = False
module, xml_id = xml_id.split('.')
if (not xml_id) and (not self.doinit):
return False
action_id = False
if xml_id:
cr.execute('select id,res_id from ir_model_data where module=%s and name=%s', (module,xml_id))
results = cr.fetchall()
for action_id2,res_id2 in results:
cr.execute('select id from '+self.pool.get(model)._table+' where id=%d', (res_id2,))
result3 = cr.fetchone()
if not result3:
cr.execute('delete from ir_model_data where id=%d', (action_id2,))
else:
res_id,action_id = res_id2,action_id2
def _update(self,cr, uid, model, module, values, xml_id=False, store=True, noupdate=False, mode='init', res_id=False):
warning = True
model_obj = self.pool.get(model)
context = {}
if xml_id and ('.' in xml_id):
assert len(xml_id.split('.'))==2, _('"%s" contains too many dots. XML ids should not contain dots ! These are used to refer to other modules data, as in module.reference_id') % (xml_id)
warning = False
module, xml_id = xml_id.split('.')
if (not xml_id) and (not self.doinit):
return False
action_id = False
if xml_id:
cr.execute('select id,res_id from ir_model_data where module=%s and name=%s', (module,xml_id))
results = cr.fetchall()
for action_id2,res_id2 in results:
cr.execute('select id from '+self.pool.get(model)._table+' where id=%d', (res_id2,))
result3 = cr.fetchone()
if not result3:
cr.execute('delete from ir_model_data where id=%d', (action_id2,))
else:
res_id,action_id = res_id2,action_id2
if action_id and res_id:
model_obj.write(cr, uid, [res_id], values)
self.write(cr, uid, [action_id], {
'date_update': time.strftime('%Y-%m-%d %H:%M:%S'),
})
elif res_id:
model_obj.write(cr, uid, [res_id], values)
if xml_id:
self.create(cr, uid, {
'name': xml_id,
'model': model,
'module':module,
'res_id':res_id,
'noupdate': noupdate,
})
if model_obj._inherits:
for table in model_obj._inherits:
inherit_id = model_obj.browse(cr, uid,
res_id)[model_obj._inherits[table]]
self.create(cr, uid, {
'name': xml_id + '_' + table.replace('.', '_'),
'model': table,
'module': module,
'res_id': inherit_id,
'noupdate': noupdate,
})
else:
if mode=='init' or (mode=='update' and xml_id):
res_id = model_obj.create(cr, uid, values)
if xml_id:
self.create(cr, uid, {
'name': xml_id,
'model': model,
'module': module,
'res_id': res_id,
'noupdate': noupdate
})
if model_obj._inherits:
for table in model_obj._inherits:
inherit_id = model_obj.browse(cr, uid,
res_id)[model_obj._inherits[table]]
self.create(cr, uid, {
'name': xml_id + '_' + table.replace('.', '_'),
'model': table,
'module': module,
'res_id': inherit_id,
'noupdate': noupdate,
})
if xml_id:
if res_id:
self.loads[(module, xml_id)] = (model, res_id)
if model_obj._inherits:
for table in model_obj._inherits:
inherit_field = model_obj._inherits[table]
inherit_id = model_obj.read(cr, uid, res_id,
[inherit_field])[inherit_field]
self.loads[(module, xml_id + '_' + \
table.replace('.', '_'))] = (table, inherit_id)
return res_id
if action_id and res_id:
model_obj.write(cr, uid, [res_id], values)
self.write(cr, uid, [action_id], {
'date_update': time.strftime('%Y-%m-%d %H:%M:%S'),
})
elif res_id:
model_obj.write(cr, uid, [res_id], values)
if xml_id:
self.create(cr, uid, {
'name': xml_id,
'model': model,
'module':module,
'res_id':res_id,
'noupdate': noupdate,
})
if model_obj._inherits:
for table in model_obj._inherits:
inherit_id = model_obj.browse(cr, uid,
res_id)[model_obj._inherits[table]]
self.create(cr, uid, {
'name': xml_id + '_' + table.replace('.', '_'),
'model': table,
'module': module,
'res_id': inherit_id,
'noupdate': noupdate,
})
else:
if mode=='init' or (mode=='update' and xml_id):
res_id = model_obj.create(cr, uid, values)
if xml_id:
self.create(cr, uid, {
'name': xml_id,
'model': model,
'module': module,
'res_id': res_id,
'noupdate': noupdate
})
if model_obj._inherits:
for table in model_obj._inherits:
inherit_id = model_obj.browse(cr, uid,
res_id)[model_obj._inherits[table]]
self.create(cr, uid, {
'name': xml_id + '_' + table.replace('.', '_'),
'model': table,
'module': module,
'res_id': inherit_id,
'noupdate': noupdate,
})
if xml_id:
if res_id:
self.loads[(module, xml_id)] = (model, res_id)
if model_obj._inherits:
for table in model_obj._inherits:
inherit_field = model_obj._inherits[table]
inherit_id = model_obj.read(cr, uid, res_id,
[inherit_field])[inherit_field]
self.loads[(module, xml_id + '_' + \
table.replace('.', '_'))] = (table, inherit_id)
return res_id
def _unlink(self, cr, uid, model, ids, direct=False):
#self.pool.get(model).unlink(cr, uid, ids)
for id in ids:
self.unlink_mark[(model, id)]=False
cr.execute('delete from ir_model_data where res_id=%d and model=\'%s\'', (id,model))
return True
def _unlink(self, cr, uid, model, ids, direct=False):
#self.pool.get(model).unlink(cr, uid, ids)
for id in ids:
self.unlink_mark[(model, id)]=False
cr.execute('delete from ir_model_data where res_id=%d and model=\'%s\'', (id,model))
return True
def ir_set(self, cr, uid, key, key2, name, models, value, replace=True, isobject=False, meta=None, xml_id=False):
obj = self.pool.get('ir.values')
if type(models[0])==type([]) or type(models[0])==type(()):
model,res_id = models[0]
else:
res_id=None
model = models[0]
def ir_set(self, cr, uid, key, key2, name, models, value, replace=True, isobject=False, meta=None, xml_id=False):
obj = self.pool.get('ir.values')
if type(models[0])==type([]) or type(models[0])==type(()):
model,res_id = models[0]
else:
res_id=None
model = models[0]
if res_id:
where = ' and res_id=%d' % (res_id,)
else:
where = ' and (res_id is null)'
if res_id:
where = ' and res_id=%d' % (res_id,)
else:
where = ' and (res_id is null)'
if key2:
where += ' and key2=\'%s\'' % (key2,)
else:
where += ' and (key2 is null)'
if key2:
where += ' and key2=\'%s\'' % (key2,)
else:
where += ' and (key2 is null)'
cr.execute('select * from ir_values where model=%s and key=%s and name=%s'+where,(model, key, name))
res = cr.fetchone()
if not res:
res = ir.ir_set(cr, uid, key, key2, name, models, value, replace, isobject, meta)
elif xml_id:
cr.execute('UPDATE ir_values set value=%s WHERE model=%s and key=%s and name=%s'+where,(value, model, key, name))
return True
cr.execute('select * from ir_values where model=%s and key=%s and name=%s'+where,(model, key, name))
res = cr.fetchone()
if not res:
res = ir.ir_set(cr, uid, key, key2, name, models, value, replace, isobject, meta)
elif xml_id:
cr.execute('UPDATE ir_values set value=%s WHERE model=%s and key=%s and name=%s'+where,(value, model, key, name))
return True
def _process_end(self, cr, uid, modules):
if not modules:
return True
module_str = ["'%s'" % m for m in modules]
cr.execute('select id,name,model,res_id,module from ir_model_data where module in ('+','.join(module_str)+') and not noupdate')
wkf_todo = []
for (id, name, model, res_id,module) in cr.fetchall():
if (module,name) not in self.loads:
self.unlink_mark[(model,res_id)] = id
if model=='workflow.activity':
cr.execute('select res_type,res_id from wkf_instance where id in (select inst_id from wkf_workitem where act_id=%d)', (res_id,))
wkf_todo.extend(cr.fetchall())
cr.execute("update wkf_transition set condition='True', role_id=NULL, signal=NULL,act_to=act_from,act_from=%d where act_to=%d", (res_id,res_id))
cr.execute("delete from wkf_transition where act_to=%d", (res_id,))
def _process_end(self, cr, uid, modules):
if not modules:
return True
module_str = ["'%s'" % m for m in modules]
cr.execute('select id,name,model,res_id,module from ir_model_data where module in ('+','.join(module_str)+') and not noupdate')
wkf_todo = []
for (id, name, model, res_id,module) in cr.fetchall():
if (module,name) not in self.loads:
self.unlink_mark[(model,res_id)] = id
if model=='workflow.activity':
cr.execute('select res_type,res_id from wkf_instance where id in (select inst_id from wkf_workitem where act_id=%d)', (res_id,))
wkf_todo.extend(cr.fetchall())
cr.execute("update wkf_transition set condition='True', role_id=NULL, signal=NULL,act_to=act_from,act_from=%d where act_to=%d", (res_id,res_id))
cr.execute("delete from wkf_transition where act_to=%d", (res_id,))
for model,id in wkf_todo:
wf_service = netsvc.LocalService("workflow")
wf_service.trg_write(uid, model, id, cr)
for model,id in wkf_todo:
wf_service = netsvc.LocalService("workflow")
wf_service.trg_write(uid, model, id, cr)
cr.commit()
for (model,id) in self.unlink_mark.keys():
if self.pool.get(model):
logger = netsvc.Logger()
logger.notifyChannel('init', netsvc.LOG_INFO, 'Deleting %s@%s' % (id, model))
try:
self.pool.get(model).unlink(cr, uid, [id])
if self.unlink_mark[(model,id)]:
self.unlink(cr, uid, [self.unlink_mark[(model,id)]])
cr.execute('DELETE FROM ir_values WHERE value=%s', (model+','+str(id),))
cr.commit()
except:
logger.notifyChannel('init', netsvc.LOG_ERROR, 'Could not delete id: %d of model %s\tThere should be some relation that points to this resource\tYou should manually fix this and restart --update=module' % (id, model))
return True
cr.commit()
for (model,id) in self.unlink_mark.keys():
if self.pool.get(model):
logger = netsvc.Logger()
logger.notifyChannel('init', netsvc.LOG_INFO, 'Deleting %s@%s' % (id, model))
try:
self.pool.get(model).unlink(cr, uid, [id])
if self.unlink_mark[(model,id)]:
self.unlink(cr, uid, [self.unlink_mark[(model,id)]])
cr.execute('DELETE FROM ir_values WHERE value=%s', (model+','+str(id),))
cr.commit()
except:
logger.notifyChannel('init', netsvc.LOG_ERROR, 'Could not delete id: %d of model %s\tThere should be some relation that points to this resource\tYou should manually fix this and restart --update=module' % (id, model))
return True
ir_model_data()

View File

@ -34,176 +34,176 @@ import report.custom
from tools.translate import _
class report_custom(osv.osv):
_name = 'ir.report.custom'
_columns = {
'name': fields.char('Report Name', size=64, required=True, translate=True),
'model_id': fields.many2one('ir.model','Model', required=True, change_default=True),
'type': fields.selection([('table','Tabular'),('pie','Pie Chart'),('bar','Bar Chart'),('line','Line Plot')], "Report Type", size=64, required='True'),
'title': fields.char("Report title", size=64, required='True', translate=True),
'print_format': fields.selection((('A4','a4'),('A5','a5')), 'Print format', required=True),
'print_orientation': fields.selection((('landscape','Landscape'),('portrait','Portrait')), 'Print orientation', required=True, size=16),
'repeat_header': fields.boolean('Repeat Header'),
'footer': fields.char('Report Footer', size=64, required=True),
'sortby': fields.char('Sorted By', size=64),
'fields_child0': fields.one2many('ir.report.custom.fields', 'report_id','Fields', required=True),
'field_parent': fields.many2one('ir.model.fields','Child Field'),
'state': fields.selection([('unsubscribed','Unsubscribed'),('subscribed','Subscribed')], 'State', size=64),
'frequency': fields.selection([('Y','Yearly'),('M','Monthly'),('D','Daily')], 'Frequency', size=64),
'limitt': fields.char('Limit', size=9),
'menu_id': fields.many2one('ir.ui.menu', 'Menu')
}
_defaults = {
'print_format': lambda *a: 'A4',
'print_orientation': lambda *a: 'portrait',
'state': lambda *a: 'unsubscribed',
'type': lambda *a: 'table',
'footer': lambda *a: 'Generated by Tiny ERP'
}
_name = 'ir.report.custom'
_columns = {
'name': fields.char('Report Name', size=64, required=True, translate=True),
'model_id': fields.many2one('ir.model','Model', required=True, change_default=True),
'type': fields.selection([('table','Tabular'),('pie','Pie Chart'),('bar','Bar Chart'),('line','Line Plot')], "Report Type", size=64, required='True'),
'title': fields.char("Report title", size=64, required='True', translate=True),
'print_format': fields.selection((('A4','a4'),('A5','a5')), 'Print format', required=True),
'print_orientation': fields.selection((('landscape','Landscape'),('portrait','Portrait')), 'Print orientation', required=True, size=16),
'repeat_header': fields.boolean('Repeat Header'),
'footer': fields.char('Report Footer', size=64, required=True),
'sortby': fields.char('Sorted By', size=64),
'fields_child0': fields.one2many('ir.report.custom.fields', 'report_id','Fields', required=True),
'field_parent': fields.many2one('ir.model.fields','Child Field'),
'state': fields.selection([('unsubscribed','Unsubscribed'),('subscribed','Subscribed')], 'State', size=64),
'frequency': fields.selection([('Y','Yearly'),('M','Monthly'),('D','Daily')], 'Frequency', size=64),
'limitt': fields.char('Limit', size=9),
'menu_id': fields.many2one('ir.ui.menu', 'Menu')
}
_defaults = {
'print_format': lambda *a: 'A4',
'print_orientation': lambda *a: 'portrait',
'state': lambda *a: 'unsubscribed',
'type': lambda *a: 'table',
'footer': lambda *a: 'Generated by Tiny ERP'
}
def onchange_model_id(self, cr, uid, ids, model_id):
if not(model_id):
return {}
return {'domain': {'field_parent': [('model_id','=',model_id)]}}
def onchange_model_id(self, cr, uid, ids, model_id):
if not(model_id):
return {}
return {'domain': {'field_parent': [('model_id','=',model_id)]}}
def unsubscribe(self, cr, uid, ids, context={}):
def unsubscribe(self, cr, uid, ids, context={}):
#TODO: should delete the ir.actions.report.custom for these reports and do an ir_del
self.write(cr, uid, ids, {'state':'unsubscribed'})
return True
self.write(cr, uid, ids, {'state':'unsubscribed'})
return True
def subscribe(self, cr, uid, ids, context={}):
for report in self.browse(cr, uid, ids):
report.fields_child0.sort(lambda x,y : x.sequence - y.sequence)
def subscribe(self, cr, uid, ids, context={}):
for report in self.browse(cr, uid, ids):
report.fields_child0.sort(lambda x,y : x.sequence - y.sequence)
# required on field0 does not seem to work( cause we use o2m_l ?)
if not report.fields_child0:
raise osv.except_osv(_('Invalid operation'), _('Enter at least one field !'))
# required on field0 does not seem to work( cause we use o2m_l ?)
if not report.fields_child0:
raise osv.except_osv(_('Invalid operation'), _('Enter at least one field !'))
if report.type in ['pie', 'bar', 'line'] and report.field_parent:
raise osv.except_osv(_('Invalid operation'), _('Tree can only be used in tabular reports'))
if report.type in ['pie', 'bar', 'line'] and report.field_parent:
raise osv.except_osv(_('Invalid operation'), _('Tree can only be used in tabular reports'))
# Otherwise it won't build a good tree. See level.pop in custom.py.
if report.type == 'table' and report.field_parent and report.fields_child0 and not report.fields_child0[0].groupby:
raise osv.except_osv('Invalid operation :', 'When creating tree (field child) report, data must be group by the first field')
# Otherwise it won't build a good tree. See level.pop in custom.py.
if report.type == 'table' and report.field_parent and report.fields_child0 and not report.fields_child0[0].groupby:
raise osv.except_osv('Invalid operation :', 'When creating tree (field child) report, data must be group by the first field')
if report.type == 'pie':
if len(report.fields_child0) != 2:
raise osv.except_osv(_('Invalid operation'), _('Pie charts need exactly two fields'))
else:
c_f = {}
for i in range(2):
c_f[i] = []
tmp = report.fields_child0[i]
for j in range(3):
c_f[i].append((not isinstance(eval('tmp.field_child'+str(j)), browse_null) and eval('tmp.field_child'+str(j)+'.ttype')) or None)
if not reduce(lambda x,y : x or y, map(lambda x: x in ['integer', 'float'], c_f[1])):
raise osv.except_osv(_('Invalid operation'), _('Second field should be figures'))
if report.type == 'pie':
if len(report.fields_child0) != 2:
raise osv.except_osv(_('Invalid operation'), _('Pie charts need exactly two fields'))
else:
c_f = {}
for i in range(2):
c_f[i] = []
tmp = report.fields_child0[i]
for j in range(3):
c_f[i].append((not isinstance(eval('tmp.field_child'+str(j)), browse_null) and eval('tmp.field_child'+str(j)+'.ttype')) or None)
if not reduce(lambda x,y : x or y, map(lambda x: x in ['integer', 'float'], c_f[1])):
raise osv.except_osv(_('Invalid operation'), _('Second field should be figures'))
if report.type == 'bar':
if len(report.fields_child0) < 2:
raise osv.except_osv(_('Invalid operation'), _('Bar charts need at least two fields'))
else:
c_f = {}
for i in range(len(report.fields_child0)):
c_f[i] = []
tmp = report.fields_child0[i]
for j in range(3):
c_f[i].append((not isinstance(eval('tmp.field_child'+str(j)), browse_null) and eval('tmp.field_child'+str(j)+'.ttype')) or None)
if report.type == 'bar':
if len(report.fields_child0) < 2:
raise osv.except_osv(_('Invalid operation'), _('Bar charts need at least two fields'))
else:
c_f = {}
for i in range(len(report.fields_child0)):
c_f[i] = []
tmp = report.fields_child0[i]
for j in range(3):
c_f[i].append((not isinstance(eval('tmp.field_child'+str(j)), browse_null) and eval('tmp.field_child'+str(j)+'.ttype')) or None)
if i == 0:
pass
else:
if not reduce(lambda x,y : x or y, map(lambda x: x in ['integer', 'float'], c_f[i])):
raise osv.except_osv(_('Invalid operation'), _('Field %d should be a figure') %(i,))
if i == 0:
pass
else:
if not reduce(lambda x,y : x or y, map(lambda x: x in ['integer', 'float'], c_f[i])):
raise osv.except_osv(_('Invalid operation'), _('Field %d should be a figure') %(i,))
if report.state=='subscribed':
continue
if report.state=='subscribed':
continue
name = report.name
model = report.model_id.model
name = report.name
model = report.model_id.model
action_def = {'report_id':report.id, 'type':'ir.actions.report.custom', 'model':model, 'name':name}
id = self.pool.get('ir.actions.report.custom').create(cr, uid, action_def)
m_id = report.menu_id.id
action = "ir.actions.report.custom,%d" % (id,)
if not report.menu_id:
ir.ir_set(cr, uid, 'action', 'client_print_multi', name, [(model, False)], action, False, True)
else:
ir.ir_set(cr, uid, 'action', 'tree_but_open', 'Menuitem', [('ir.ui.menu', int(m_id))], action, False, True)
action_def = {'report_id':report.id, 'type':'ir.actions.report.custom', 'model':model, 'name':name}
id = self.pool.get('ir.actions.report.custom').create(cr, uid, action_def)
m_id = report.menu_id.id
action = "ir.actions.report.custom,%d" % (id,)
if not report.menu_id:
ir.ir_set(cr, uid, 'action', 'client_print_multi', name, [(model, False)], action, False, True)
else:
ir.ir_set(cr, uid, 'action', 'tree_but_open', 'Menuitem', [('ir.ui.menu', int(m_id))], action, False, True)
self.write(cr, uid, [report.id], {'state':'subscribed'}, context)
return True
self.write(cr, uid, [report.id], {'state':'subscribed'}, context)
return True
report_custom()
class report_custom_fields(osv.osv):
_name = 'ir.report.custom.fields'
_columns = {
'name': fields.char('Name', size=64, required=True),
'report_id': fields.many2one('ir.report.custom', 'Report Ref', select=True),
'field_child0': fields.many2one('ir.model.fields', 'field child0', required=True),
'fc0_operande': fields.many2one('ir.model.fields', 'Constraint'),
'fc0_condition': fields.char('Condition', size=64),
'fc0_op': fields.selection((('>','>'),('<','<'),('==','='),('in','in'),('gety,==','(year)=')), 'Relation'),
'field_child1': fields.many2one('ir.model.fields', 'field child1'),
'fc1_operande': fields.many2one('ir.model.fields', 'Constraint'),
'fc1_condition': fields.char('condition', size=64),
'fc1_op': fields.selection((('>','>'),('<','<'),('==','='),('in','in'),('gety,==','(year)=')), 'Relation'),
'field_child2': fields.many2one('ir.model.fields', 'field child2'),
'fc2_operande': fields.many2one('ir.model.fields', 'Constraint'),
'fc2_condition': fields.char('condition', size=64),
'fc2_op': fields.selection((('>','>'),('<','<'),('==','='),('in','in'),('gety,==','(year)=')), 'Relation'),
'field_child3': fields.many2one('ir.model.fields', 'field child3'),
'fc3_operande': fields.many2one('ir.model.fields', 'Constraint'),
'fc3_condition': fields.char('condition', size=64),
'fc3_op': fields.selection((('>','>'),('<','<'),('==','='),('in','in'),('gety,==','(year)=')), 'Relation'),
'alignment': fields.selection((('left','left'),('right','right'),('center','center')), 'Alignment', required=True),
'sequence': fields.integer('Sequence', required=True),
'width': fields.integer('Fixed Width'),
'operation': fields.selection((('none', 'None'),('calc_sum','Calculate Sum'),('calc_avg','Calculate Average'),('calc_count','Calculate Count'),('calc_max', 'Get Max'))),
'groupby' : fields.boolean('Group by'),
'bgcolor': fields.char('Background Color', size=64),
'fontcolor': fields.char('Font color', size=64),
'cumulate': fields.boolean('Cumulate')
}
_defaults = {
'alignment': lambda *a: 'left',
'bgcolor': lambda *a: 'white',
'fontcolor': lambda *a: 'black',
'operation': lambda *a: 'none',
}
_order = "sequence"
_name = 'ir.report.custom.fields'
_columns = {
'name': fields.char('Name', size=64, required=True),
'report_id': fields.many2one('ir.report.custom', 'Report Ref', select=True),
'field_child0': fields.many2one('ir.model.fields', 'field child0', required=True),
'fc0_operande': fields.many2one('ir.model.fields', 'Constraint'),
'fc0_condition': fields.char('Condition', size=64),
'fc0_op': fields.selection((('>','>'),('<','<'),('==','='),('in','in'),('gety,==','(year)=')), 'Relation'),
'field_child1': fields.many2one('ir.model.fields', 'field child1'),
'fc1_operande': fields.many2one('ir.model.fields', 'Constraint'),
'fc1_condition': fields.char('condition', size=64),
'fc1_op': fields.selection((('>','>'),('<','<'),('==','='),('in','in'),('gety,==','(year)=')), 'Relation'),
'field_child2': fields.many2one('ir.model.fields', 'field child2'),
'fc2_operande': fields.many2one('ir.model.fields', 'Constraint'),
'fc2_condition': fields.char('condition', size=64),
'fc2_op': fields.selection((('>','>'),('<','<'),('==','='),('in','in'),('gety,==','(year)=')), 'Relation'),
'field_child3': fields.many2one('ir.model.fields', 'field child3'),
'fc3_operande': fields.many2one('ir.model.fields', 'Constraint'),
'fc3_condition': fields.char('condition', size=64),
'fc3_op': fields.selection((('>','>'),('<','<'),('==','='),('in','in'),('gety,==','(year)=')), 'Relation'),
'alignment': fields.selection((('left','left'),('right','right'),('center','center')), 'Alignment', required=True),
'sequence': fields.integer('Sequence', required=True),
'width': fields.integer('Fixed Width'),
'operation': fields.selection((('none', 'None'),('calc_sum','Calculate Sum'),('calc_avg','Calculate Average'),('calc_count','Calculate Count'),('calc_max', 'Get Max'))),
'groupby' : fields.boolean('Group by'),
'bgcolor': fields.char('Background Color', size=64),
'fontcolor': fields.char('Font color', size=64),
'cumulate': fields.boolean('Cumulate')
}
_defaults = {
'alignment': lambda *a: 'left',
'bgcolor': lambda *a: 'white',
'fontcolor': lambda *a: 'black',
'operation': lambda *a: 'none',
}
_order = "sequence"
def onchange_any_field_child(self, cr, uid, ids, field_id, level):
if not(field_id):
return {}
next_level_field_name = 'field_child%d' % (level+1)
next_level_operande = 'fc%d_operande' % (level+1)
field = self.pool.get('ir.model.fields').browse(cr, uid, [field_id])[0]
res = self.pool.get(field.model).fields_get(cr, uid, field.name)
if res[field.name].has_key('relation'):
cr.execute('select id from ir_model where model=%s', (res[field.name]['relation'],))
(id,) = cr.fetchone() or (False,)
if id:
return {
'domain': {
next_level_field_name: [('model_id', '=', id)],
next_level_operande: [('model_id', '=', id)]
},
'required': {
next_level_field_name: True
}
}
else:
print _("Warning: using a relation field which uses an unknown object") #TODO use the logger
return {'required': {next_level_field_name: True}}
else:
return {'domain': {next_level_field_name: []}}
def onchange_any_field_child(self, cr, uid, ids, field_id, level):
if not(field_id):
return {}
next_level_field_name = 'field_child%d' % (level+1)
next_level_operande = 'fc%d_operande' % (level+1)
field = self.pool.get('ir.model.fields').browse(cr, uid, [field_id])[0]
res = self.pool.get(field.model).fields_get(cr, uid, field.name)
if res[field.name].has_key('relation'):
cr.execute('select id from ir_model where model=%s', (res[field.name]['relation'],))
(id,) = cr.fetchone() or (False,)
if id:
return {
'domain': {
next_level_field_name: [('model_id', '=', id)],
next_level_operande: [('model_id', '=', id)]
},
'required': {
next_level_field_name: True
}
}
else:
print _("Warning: using a relation field which uses an unknown object") #TODO use the logger
return {'required': {next_level_field_name: True}}
else:
return {'domain': {next_level_field_name: []}}
def get_field_child_onchange_method(level):
return lambda self, cr, uid, ids, field_id: self.onchange_any_field_child(cr, uid, ids, field_id, level)
def get_field_child_onchange_method(level):
return lambda self, cr, uid, ids, field_id: self.onchange_any_field_child(cr, uid, ids, field_id, level)
onchange_field_child0 = get_field_child_onchange_method(0)
onchange_field_child1 = get_field_child_onchange_method(1)
onchange_field_child2 = get_field_child_onchange_method(2)
onchange_field_child0 = get_field_child_onchange_method(0)
onchange_field_child1 = get_field_child_onchange_method(1)
onchange_field_child2 = get_field_child_onchange_method(2)
report_custom_fields()

View File

@ -33,208 +33,208 @@ import tools
class ir_rule_group(osv.osv):
_name = 'ir.rule.group'
_name = 'ir.rule.group'
_columns = {
'name': fields.char('Name', size=128, select=1),
'model_id': fields.many2one('ir.model', 'Model',select=1, required=True),
'global': fields.boolean('Global', select=1, help="Make the rule global or it needs to be put on a group or user"),
'rules': fields.one2many('ir.rule', 'rule_group', 'Tests', help="The rule is satisfied if at least one test is True"),
'groups': fields.many2many('res.groups', 'group_rule_group_rel', 'rule_group_id', 'group_id', 'Groups'),
'users': fields.many2many('res.users', 'user_rule_group_rel', 'rule_group_id', 'user_id', 'Users'),
}
_columns = {
'name': fields.char('Name', size=128, select=1),
'model_id': fields.many2one('ir.model', 'Model',select=1, required=True),
'global': fields.boolean('Global', select=1, help="Make the rule global or it needs to be put on a group or user"),
'rules': fields.one2many('ir.rule', 'rule_group', 'Tests', help="The rule is satisfied if at least one test is True"),
'groups': fields.many2many('res.groups', 'group_rule_group_rel', 'rule_group_id', 'group_id', 'Groups'),
'users': fields.many2many('res.users', 'user_rule_group_rel', 'rule_group_id', 'user_id', 'Users'),
}
_order = 'model_id, global DESC'
_order = 'model_id, global DESC'
_defaults={
'global': lambda *a: True,
}
_defaults={
'global': lambda *a: True,
}
def unlink(self, cr, uid, ids, context=None):
res = super(ir_rule_group, self).unlink(cr, uid, ids, context=context)
# Restart the cache on the domain_get method of ir.rule
self.pool.get('ir.rule').domain_get()
return res
def unlink(self, cr, uid, ids, context=None):
res = super(ir_rule_group, self).unlink(cr, uid, ids, context=context)
# Restart the cache on the domain_get method of ir.rule
self.pool.get('ir.rule').domain_get()
return res
def create(self, cr, user, vals, context=None):
res = super(ir_rule_group, self).create(cr, user, vals, context=context)
# Restart the cache on the domain_get method of ir.rule
self.pool.get('ir.rule').domain_get()
return res
def create(self, cr, user, vals, context=None):
res = super(ir_rule_group, self).create(cr, user, vals, context=context)
# Restart the cache on the domain_get method of ir.rule
self.pool.get('ir.rule').domain_get()
return res
def write(self, cr, uid, ids, vals, context=None):
if not context:
context={}
res = super(ir_rule_group, self).write(cr, uid, ids, vals, context=context)
# Restart the cache on the domain_get method of ir.rule
self.pool.get('ir.rule').domain_get()
return res
def write(self, cr, uid, ids, vals, context=None):
if not context:
context={}
res = super(ir_rule_group, self).write(cr, uid, ids, vals, context=context)
# Restart the cache on the domain_get method of ir.rule
self.pool.get('ir.rule').domain_get()
return res
ir_rule_group()
class ir_rule(osv.osv):
_name = 'ir.rule'
_rec_name = 'field_id'
_name = 'ir.rule'
_rec_name = 'field_id'
def _operand(self,cr,uid,context):
def _operand(self,cr,uid,context):
def get(object, level=3, recur=None, root_tech='', root=''):
res = []
if not recur:
recur = []
fields = self.pool.get(object).fields_get(cr,uid)
key = fields.keys()
key.sort()
for k in key:
def get(object, level=3, recur=None, root_tech='', root=''):
res = []
if not recur:
recur = []
fields = self.pool.get(object).fields_get(cr,uid)
key = fields.keys()
key.sort()
for k in key:
if fields[k]['type'] in ('many2one'):
res.append((root_tech+'.'+k+'.id',
root+'/'+fields[k]['string']))
if fields[k]['type'] in ('many2one'):
res.append((root_tech+'.'+k+'.id',
root+'/'+fields[k]['string']))
elif fields[k]['type'] in ('many2many', 'one2many'):
res.append(('\',\'.join(map(lambda x: str(x.id), '+root_tech+'.'+k+'))',
root+'/'+fields[k]['string']))
elif fields[k]['type'] in ('many2many', 'one2many'):
res.append(('\',\'.join(map(lambda x: str(x.id), '+root_tech+'.'+k+'))',
root+'/'+fields[k]['string']))
else:
res.append((root_tech+'.'+k,
root+'/'+fields[k]['string']))
else:
res.append((root_tech+'.'+k,
root+'/'+fields[k]['string']))
if (fields[k]['type'] in recur) and (level>0):
res.extend(get(fields[k]['relation'], level-1,
recur, root_tech+'.'+k, root+'/'+fields[k]['string']))
if (fields[k]['type'] in recur) and (level>0):
res.extend(get(fields[k]['relation'], level-1,
recur, root_tech+'.'+k, root+'/'+fields[k]['string']))
return res
return res
res = [("False", "False"), ("True", "True"), ("user.id", "User")]
res += get('res.users', level=1,
recur=['many2one'], root_tech='user', root='User')
return res
res = [("False", "False"), ("True", "True"), ("user.id", "User")]
res += get('res.users', level=1,
recur=['many2one'], root_tech='user', root='User')
return res
def _domain_force_get(self, cr, uid, ids, field_name, arg, context={}):
res = {}
for rule in self.browse(cr, uid, ids, context):
if rule.domain_force:
res[rule.id] = eval(rule.domain_force, {'user': self.pool.get('res.users').browse(cr, 1, uid),
'time':time})
else:
if rule.operator in ('in', 'child_of'):
dom = eval("[('%s', '%s', [%s])]" % (rule.field_id.name, rule.operator,
rule.operand), {'user': self.pool.get('res.users').browse(cr, 1, uid),
'time':time})
else:
dom = eval("[('%s', '%s', %s)]" % (rule.field_id.name, rule.operator,
rule.operand), {'user': self.pool.get('res.users').browse(cr, 1, uid),
'time':time})
res[rule.id] = dom
return res
def _domain_force_get(self, cr, uid, ids, field_name, arg, context={}):
res = {}
for rule in self.browse(cr, uid, ids, context):
if rule.domain_force:
res[rule.id] = eval(rule.domain_force, {'user': self.pool.get('res.users').browse(cr, 1, uid),
'time':time})
else:
if rule.operator in ('in', 'child_of'):
dom = eval("[('%s', '%s', [%s])]" % (rule.field_id.name, rule.operator,
rule.operand), {'user': self.pool.get('res.users').browse(cr, 1, uid),
'time':time})
else:
dom = eval("[('%s', '%s', %s)]" % (rule.field_id.name, rule.operator,
rule.operand), {'user': self.pool.get('res.users').browse(cr, 1, uid),
'time':time})
res[rule.id] = dom
return res
_columns = {
'field_id': fields.many2one('ir.model.fields', 'Field',domain= "[('model_id','=', parent.model_id)]", select=1, required=True),
'operator':fields.selection((('=', '='), ('<>', '<>'), ('<=', '<='), ('>=', '>='), ('in', 'in'), ('child_of', 'child_of')), 'Operator', required=True),
'operand':fields.selection(_operand,'Operand', size=64, required=True),
'rule_group': fields.many2one('ir.rule.group', 'Group', select=2, required=True, ondelete="cascade"),
'domain_force': fields.char('Force Domain', size=250),
'domain': fields.function(_domain_force_get, method=True, string='Domain', type='char', size=250)
}
_columns = {
'field_id': fields.many2one('ir.model.fields', 'Field',domain= "[('model_id','=', parent.model_id)]", select=1, required=True),
'operator':fields.selection((('=', '='), ('<>', '<>'), ('<=', '<='), ('>=', '>='), ('in', 'in'), ('child_of', 'child_of')), 'Operator', required=True),
'operand':fields.selection(_operand,'Operand', size=64, required=True),
'rule_group': fields.many2one('ir.rule.group', 'Group', select=2, required=True, ondelete="cascade"),
'domain_force': fields.char('Force Domain', size=250),
'domain': fields.function(_domain_force_get, method=True, string='Domain', type='char', size=250)
}
def onchange_all(self, cr, uid, ids, field_id, operator, operand):
if not (field_id or operator or operand):
return {}
def onchange_all(self, cr, uid, ids, field_id, operator, operand):
if not (field_id or operator or operand):
return {}
def domain_get(self, cr, uid, model_name):
# root user above constraint
if uid == 1:
return '', []
def domain_get(self, cr, uid, model_name):
# root user above constraint
if uid == 1:
return '', []
cr.execute("""SELECT r.id FROM
ir_rule r
JOIN (ir_rule_group g
JOIN ir_model m ON (g.model_id = m.id))
ON (g.id = r.rule_group)
WHERE m.model = %s
AND (g.id IN (SELECT rule_group_id FROM group_rule_group_rel g_rel
JOIN res_groups_users_rel u_rel ON (g_rel.group_id = u_rel.gid)
WHERE u_rel.uid = %d) OR g.global)""", (model_name, uid))
ids = map(lambda x:x[0], cr.fetchall())
if not ids:
return '', []
obj = self.pool.get(model_name)
add = []
add_str = []
sub = []
sub_str = []
clause={}
clause_global={}
for rule in self.browse(cr, uid, ids):
dom = rule.domain
if rule.rule_group['global']:
clause_global.setdefault(rule.rule_group.id, [])
clause_global[rule.rule_group.id].append(obj._where_calc(cr, uid, dom, active_test=False))
else:
clause.setdefault(rule.rule_group.id, [])
clause[rule.rule_group.id].append(obj._where_calc(cr, uid, dom, active_test=False))
cr.execute("""SELECT r.id FROM
ir_rule r
JOIN (ir_rule_group g
JOIN ir_model m ON (g.model_id = m.id))
ON (g.id = r.rule_group)
WHERE m.model = %s
AND (g.id IN (SELECT rule_group_id FROM group_rule_group_rel g_rel
JOIN res_groups_users_rel u_rel ON (g_rel.group_id = u_rel.gid)
WHERE u_rel.uid = %d) OR g.global)""", (model_name, uid))
ids = map(lambda x:x[0], cr.fetchall())
if not ids:
return '', []
obj = self.pool.get(model_name)
add = []
add_str = []
sub = []
sub_str = []
clause={}
clause_global={}
for rule in self.browse(cr, uid, ids):
dom = rule.domain
if rule.rule_group['global']:
clause_global.setdefault(rule.rule_group.id, [])
clause_global[rule.rule_group.id].append(obj._where_calc(cr, uid, dom, active_test=False))
else:
clause.setdefault(rule.rule_group.id, [])
clause[rule.rule_group.id].append(obj._where_calc(cr, uid, dom, active_test=False))
def _query(clause, test):
query = ''
val = []
for g in clause.values():
if not g:
continue
if len(query):
query += ' '+test+' '
query += '('
first = True
for c in g:
if not first:
query += ' AND '
first = False
query += '('
first2 = True
for clause in c[0]:
if not first2:
query += ' AND '
first2 = False
query += clause
query += ')'
val += c[1]
query += ')'
return query, val
def _query(clause, test):
query = ''
val = []
for g in clause.values():
if not g:
continue
if len(query):
query += ' '+test+' '
query += '('
first = True
for c in g:
if not first:
query += ' AND '
first = False
query += '('
first2 = True
for clause in c[0]:
if not first2:
query += ' AND '
first2 = False
query += clause
query += ')'
val += c[1]
query += ')'
return query, val
query, val = _query(clause, 'OR')
query_global, val_global = _query(clause_global, 'OR')
if query_global:
if query:
query = '('+query+') OR '+query_global
val.extend(val_global)
else:
query = query_global
val = val_global
query, val = _query(clause, 'OR')
query_global, val_global = _query(clause_global, 'OR')
if query_global:
if query:
query = '('+query+') OR '+query_global
val.extend(val_global)
else:
query = query_global
val = val_global
if query:
query = '('+query+')'
return query, val
domain_get = tools.cache()(domain_get)
if query:
query = '('+query+')'
return query, val
domain_get = tools.cache()(domain_get)
def unlink(self, cr, uid, ids, context=None):
res = super(ir_rule, self).unlink(cr, uid, ids, context=context)
# Restart the cache on the domain_get method of ir.rule
self.domain_get()
return res
def unlink(self, cr, uid, ids, context=None):
res = super(ir_rule, self).unlink(cr, uid, ids, context=context)
# Restart the cache on the domain_get method of ir.rule
self.domain_get()
return res
def create(self, cr, user, vals, context=None):
res = super(ir_rule, self).create(cr, user, vals, context=context)
# Restart the cache on the domain_get method of ir.rule
self.domain_get()
return res
def create(self, cr, user, vals, context=None):
res = super(ir_rule, self).create(cr, user, vals, context=context)
# Restart the cache on the domain_get method of ir.rule
self.domain_get()
return res
def write(self, cr, uid, ids, vals, context=None):
if not context:
context={}
res = super(ir_rule, self).write(cr, uid, ids, vals, context=context)
# Restart the cache on the domain_get method
self.domain_get()
return res
def write(self, cr, uid, ids, vals, context=None):
if not context:
context={}
res = super(ir_rule, self).write(cr, uid, ids, vals, context=context)
# Restart the cache on the domain_get method
self.domain_get()
return res
ir_rule()

View File

@ -31,52 +31,52 @@ import time
from osv import fields,osv
class ir_sequence_type(osv.osv):
_name = 'ir.sequence.type'
_columns = {
'name': fields.char('Sequence Name',size=64, required=True),
'code': fields.char('Sequence Code',size=32, required=True),
}
_name = 'ir.sequence.type'
_columns = {
'name': fields.char('Sequence Name',size=64, required=True),
'code': fields.char('Sequence Code',size=32, required=True),
}
ir_sequence_type()
def _code_get(self, cr, uid, context={}):
cr.execute('select code, name from ir_sequence_type')
return cr.fetchall()
cr.execute('select code, name from ir_sequence_type')
return cr.fetchall()
class ir_sequence(osv.osv):
_name = 'ir.sequence'
_columns = {
'name': fields.char('Sequence Name',size=64, required=True),
'code': fields.selection(_code_get, 'Sequence Code',size=64, required=True),
'active': fields.boolean('Active'),
'prefix': fields.char('Prefix',size=64),
'suffix': fields.char('Suffix',size=64),
'number_next': fields.integer('Next Number', required=True),
'number_increment': fields.integer('Increment Number', required=True),
'padding' : fields.integer('Number padding', required=True),
}
_defaults = {
'active': lambda *a: True,
'number_increment': lambda *a: 1,
'number_next': lambda *a: 1,
'padding' : lambda *a : 0,
}
_name = 'ir.sequence'
_columns = {
'name': fields.char('Sequence Name',size=64, required=True),
'code': fields.selection(_code_get, 'Sequence Code',size=64, required=True),
'active': fields.boolean('Active'),
'prefix': fields.char('Prefix',size=64),
'suffix': fields.char('Suffix',size=64),
'number_next': fields.integer('Next Number', required=True),
'number_increment': fields.integer('Increment Number', required=True),
'padding' : fields.integer('Number padding', required=True),
}
_defaults = {
'active': lambda *a: True,
'number_increment': lambda *a: 1,
'number_next': lambda *a: 1,
'padding' : lambda *a : 0,
}
def _process(self, s):
return (s or '') % {'year':time.strftime('%Y'), 'month': time.strftime('%m'), 'day':time.strftime('%d')}
def _process(self, s):
return (s or '') % {'year':time.strftime('%Y'), 'month': time.strftime('%m'), 'day':time.strftime('%d')}
def get_id(self, cr, uid, sequence_id, test='id=%d'):
cr.execute('lock table ir_sequence')
cr.execute('select id,number_next,number_increment,prefix,suffix,padding from ir_sequence where '+test+' and active=True', (sequence_id,))
res = cr.dictfetchone()
if res:
cr.execute('update ir_sequence set number_next=number_next+number_increment where id=%d and active=True', (res['id'],))
if res['number_next']:
return self._process(res['prefix']) + '%%0%sd' % res['padding'] % res['number_next'] + self._process(res['suffix'])
else:
return self._process(res['prefix']) + self._process(res['suffix'])
return False
def get_id(self, cr, uid, sequence_id, test='id=%d'):
cr.execute('lock table ir_sequence')
cr.execute('select id,number_next,number_increment,prefix,suffix,padding from ir_sequence where '+test+' and active=True', (sequence_id,))
res = cr.dictfetchone()
if res:
cr.execute('update ir_sequence set number_next=number_next+number_increment where id=%d and active=True', (res['id'],))
if res['number_next']:
return self._process(res['prefix']) + '%%0%sd' % res['padding'] % res['number_next'] + self._process(res['suffix'])
else:
return self._process(res['prefix']) + self._process(res['suffix'])
return False
def get(self, cr, uid, code):
return self.get_id(cr, uid, code, test='code=%s')
def get(self, cr, uid, code):
return self.get_id(cr, uid, code, test='code=%s')
ir_sequence()

View File

@ -32,131 +32,131 @@ from osv.osv import Cacheable
import tools
TRANSLATION_TYPE = [
('field', 'Field'),
('model', 'Model'),
('rml', 'RML'),
('selection', 'Selection'),
('view', 'View'),
('wizard_button', 'Wizard Button'),
('wizard_field', 'Wizard Field'),
('wizard_view', 'Wizard View'),
('xsl', 'XSL'),
('help', 'Help'),
('code', 'Code'),
('constraint', 'Constraint'),
('field', 'Field'),
('model', 'Model'),
('rml', 'RML'),
('selection', 'Selection'),
('view', 'View'),
('wizard_button', 'Wizard Button'),
('wizard_field', 'Wizard Field'),
('wizard_view', 'Wizard View'),
('xsl', 'XSL'),
('help', 'Help'),
('code', 'Code'),
('constraint', 'Constraint'),
]
class ir_translation(osv.osv, Cacheable):
_name = "ir.translation"
_log_access = False
_name = "ir.translation"
_log_access = False
def _get_language(self, cr, uid, context):
lang_obj = self.pool.get('res.lang')
lang_ids = lang_obj.search(cr, uid, [('translatable', '=', True)],
context=context)
langs = lang_obj.browse(cr, uid, lang_ids, context=context)
res = [(lang.code, lang.name) for lang in langs]
for lang_dict in tools.scan_languages():
if lang_dict not in res:
res.append(lang_dict)
return res
def _get_language(self, cr, uid, context):
lang_obj = self.pool.get('res.lang')
lang_ids = lang_obj.search(cr, uid, [('translatable', '=', True)],
context=context)
langs = lang_obj.browse(cr, uid, lang_ids, context=context)
res = [(lang.code, lang.name) for lang in langs]
for lang_dict in tools.scan_languages():
if lang_dict not in res:
res.append(lang_dict)
return res
_columns = {
'name': fields.char('Field Name', size=128, required=True),
'res_id': fields.integer('Resource ID'),
'lang': fields.selection(_get_language, string='Language', size=5),
'type': fields.selection(TRANSLATION_TYPE, string='Type', size=16),
'src': fields.text('Source'),
'value': fields.text('Translation Value'),
}
_sql = """
create index ir_translation_ltn on ir_translation (lang,type,name);
create index ir_translation_res_id on ir_translation (res_id);
"""
_columns = {
'name': fields.char('Field Name', size=128, required=True),
'res_id': fields.integer('Resource ID'),
'lang': fields.selection(_get_language, string='Language', size=5),
'type': fields.selection(TRANSLATION_TYPE, string='Type', size=16),
'src': fields.text('Source'),
'value': fields.text('Translation Value'),
}
_sql = """
create index ir_translation_ltn on ir_translation (lang,type,name);
create index ir_translation_res_id on ir_translation (res_id);
"""
def _get_ids(self, cr, uid, name, tt, lang, ids):
translations, to_fetch = {}, []
for id in ids:
trans = self.get((lang, name, id))
if trans is not None:
translations[id] = trans
else:
to_fetch.append(id)
if to_fetch:
cr.execute('select res_id,value ' \
'from ir_translation ' \
'where lang=%s ' \
'and type=%s ' \
'and name=%s ' \
'and res_id in ('+','.join(map(str, to_fetch))+')',
(lang,tt,name))
for res_id, value in cr.fetchall():
self.add((lang, tt, name, res_id), value)
translations[res_id] = value
for res_id in ids:
if res_id not in translations:
self.add((lang, tt, name, res_id), False)
translations[res_id] = False
return translations
def _get_ids(self, cr, uid, name, tt, lang, ids):
translations, to_fetch = {}, []
for id in ids:
trans = self.get((lang, name, id))
if trans is not None:
translations[id] = trans
else:
to_fetch.append(id)
if to_fetch:
cr.execute('select res_id,value ' \
'from ir_translation ' \
'where lang=%s ' \
'and type=%s ' \
'and name=%s ' \
'and res_id in ('+','.join(map(str, to_fetch))+')',
(lang,tt,name))
for res_id, value in cr.fetchall():
self.add((lang, tt, name, res_id), value)
translations[res_id] = value
for res_id in ids:
if res_id not in translations:
self.add((lang, tt, name, res_id), False)
translations[res_id] = False
return translations
def _set_ids(self, cr, uid, name, tt, lang, ids, value):
cr.execute('delete from ir_translation ' \
'where lang=%s ' \
'and type=%s ' \
'and name=%s ' \
'and res_id in ('+','.join(map(str,ids))+')',
(lang,tt,name))
for id in ids:
self.create(cr, uid, {
'lang':lang,
'type':tt,
'name':name,
'res_id':id,
'value':value,
})
return len(ids)
def _set_ids(self, cr, uid, name, tt, lang, ids, value):
cr.execute('delete from ir_translation ' \
'where lang=%s ' \
'and type=%s ' \
'and name=%s ' \
'and res_id in ('+','.join(map(str,ids))+')',
(lang,tt,name))
for id in ids:
self.create(cr, uid, {
'lang':lang,
'type':tt,
'name':name,
'res_id':id,
'value':value,
})
return len(ids)
def _get_source(self, cr, uid, name, tt, lang, source=None):
trans = self.get((lang, tt, name, source))
if trans is not None:
return trans
def _get_source(self, cr, uid, name, tt, lang, source=None):
trans = self.get((lang, tt, name, source))
if trans is not None:
return trans
if source:
#if isinstance(source, unicode):
# source = source.encode('utf8')
cr.execute('select value ' \
'from ir_translation ' \
'where lang=%s ' \
'and type=%s ' \
'and name=%s ' \
'and src=%s',
(lang, tt, str(name), source))
else:
cr.execute('select value ' \
'from ir_translation ' \
'where lang=%s ' \
'and type=%s ' \
'and name=%s',
(lang, tt, str(name)))
res = cr.fetchone()
if source:
#if isinstance(source, unicode):
# source = source.encode('utf8')
cr.execute('select value ' \
'from ir_translation ' \
'where lang=%s ' \
'and type=%s ' \
'and name=%s ' \
'and src=%s',
(lang, tt, str(name), source))
else:
cr.execute('select value ' \
'from ir_translation ' \
'where lang=%s ' \
'and type=%s ' \
'and name=%s',
(lang, tt, str(name)))
res = cr.fetchone()
trad = res and res[0] or ''
self.add((lang, tt, name, source), trad)
return trad
trad = res and res[0] or ''
self.add((lang, tt, name, source), trad)
return trad
def unlink(self, cursor, user, ids, context=None):
self.clear()
return super(ir_translation, self).unlink(cusor, user, ids,
context=context)
def unlink(self, cursor, user, ids, context=None):
self.clear()
return super(ir_translation, self).unlink(cusor, user, ids,
context=context)
def create(self, cursor, user, vals, context=None):
self.clear()
return super(ir_translation, self).create(cursor, user, vals,
context=context)
def create(self, cursor, user, vals, context=None):
self.clear()
return super(ir_translation, self).create(cursor, user, vals,
context=context)
def write(self, cursor, user, ids, vals, context=None):
self.clear()
return super(ir_translation, self).write(cursor, user, ids, vals,
context=context)
def write(self, cursor, user, ids, vals, context=None):
self.clear()
return super(ir_translation, self).write(cursor, user, ids, vals,
context=context)
ir_translation()

View File

@ -32,12 +32,12 @@ from osv.orm import browse_null, browse_record
import re
def one_in(setA, setB):
"""Check the presence of an element of setA in setB
"""
for x in setA:
if x in setB:
return True
return False
"""Check the presence of an element of setA in setB
"""
for x in setA:
if x in setB:
return True
return False
icons = map(lambda x: (x,x), ['STOCK_ABOUT', 'STOCK_ADD', 'STOCK_APPLY', 'STOCK_BOLD',
'STOCK_CANCEL', 'STOCK_CDROM', 'STOCK_CLEAR', 'STOCK_CLOSE', 'STOCK_COLOR_PICKER',
@ -68,143 +68,143 @@ icons = map(lambda x: (x,x), ['STOCK_ABOUT', 'STOCK_ADD', 'STOCK_APPLY', 'STOCK_
])
class many2many_unique(fields.many2many):
def set(self, cr, obj, id, name, values, user=None, context=None):
if not values:
return
val = values[:]
for act in values:
if act[0]==4:
cr.execute('SELECT * FROM '+self._rel+' \
WHERE '+self._id1+'=%d AND '+self._id2+'=%d', (id, act[1]))
if cr.fetchall():
val.remove(act)
return super(many2many_unique, self).set(cr, obj, id, name, val, user=user,
context=context)
def set(self, cr, obj, id, name, values, user=None, context=None):
if not values:
return
val = values[:]
for act in values:
if act[0]==4:
cr.execute('SELECT * FROM '+self._rel+' \
WHERE '+self._id1+'=%d AND '+self._id2+'=%d', (id, act[1]))
if cr.fetchall():
val.remove(act)
return super(many2many_unique, self).set(cr, obj, id, name, val, user=user,
context=context)
class ir_ui_menu(osv.osv):
_name = 'ir.ui.menu'
def search(self, cr, uid, args, offset=0, limit=2000, order=None,
context=None, count=False):
if context is None:
context = {}
ids = osv.orm.orm.search(self, cr, uid, args, offset, limit, order,
context=context)
if uid==1:
return ids
user_groups = self.pool.get('res.users').read(cr, uid, [uid])[0]['groups_id']
result = []
for menu in self.browse(cr, uid, ids):
if not len(menu.groups_id):
result.append(menu.id)
continue
for g in menu.groups_id:
if g.id in user_groups:
result.append(menu.id)
break
return result
_name = 'ir.ui.menu'
def search(self, cr, uid, args, offset=0, limit=2000, order=None,
context=None, count=False):
if context is None:
context = {}
ids = osv.orm.orm.search(self, cr, uid, args, offset, limit, order,
context=context)
if uid==1:
return ids
user_groups = self.pool.get('res.users').read(cr, uid, [uid])[0]['groups_id']
result = []
for menu in self.browse(cr, uid, ids):
if not len(menu.groups_id):
result.append(menu.id)
continue
for g in menu.groups_id:
if g.id in user_groups:
result.append(menu.id)
break
return result
def _get_full_name(self, cr, uid, ids, name, args, context):
res = {}
for m in self.browse(cr, uid, ids, context=context):
res[m.id] = self._get_one_full_name(m)
return res
def _get_full_name(self, cr, uid, ids, name, args, context):
res = {}
for m in self.browse(cr, uid, ids, context=context):
res[m.id] = self._get_one_full_name(m)
return res
def _get_one_full_name(self, menu, level=6):
if level<=0:
return '...'
if menu.parent_id:
parent_path = self._get_one_full_name(menu.parent_id, level-1) + "/"
else:
parent_path = ''
return parent_path + menu.name
def _get_one_full_name(self, menu, level=6):
if level<=0:
return '...'
if menu.parent_id:
parent_path = self._get_one_full_name(menu.parent_id, level-1) + "/"
else:
parent_path = ''
return parent_path + menu.name
def copy(self, cr, uid, id, default=None, context=None):
ir_values_obj = self.pool.get('ir.values')
res = super(ir_ui_menu, self).copy(cr, uid, id, context=context)
datas=self.read(cr,uid,[res],['name'])[0]
rex=re.compile('\([0-9]+\)')
concat=rex.findall(datas['name'])
if concat:
next_num=eval(concat[0])+1
datas['name']=rex.sub(('(%d)'%next_num),datas['name'])
else:
datas['name']=datas['name']+'(1)'
self.write(cr,uid,[res],{'name':datas['name']})
ids = ir_values_obj.search(cr, uid, [
('model', '=', 'ir.ui.menu'),
('res_id', '=', id),
])
for iv in ir_values_obj.browse(cr, uid, ids):
new_id = ir_values_obj.copy(cr, uid, iv.id,
default={'res_id': res}, context=context)
return res
def copy(self, cr, uid, id, default=None, context=None):
ir_values_obj = self.pool.get('ir.values')
res = super(ir_ui_menu, self).copy(cr, uid, id, context=context)
datas=self.read(cr,uid,[res],['name'])[0]
rex=re.compile('\([0-9]+\)')
concat=rex.findall(datas['name'])
if concat:
next_num=eval(concat[0])+1
datas['name']=rex.sub(('(%d)'%next_num),datas['name'])
else:
datas['name']=datas['name']+'(1)'
self.write(cr,uid,[res],{'name':datas['name']})
ids = ir_values_obj.search(cr, uid, [
('model', '=', 'ir.ui.menu'),
('res_id', '=', id),
])
for iv in ir_values_obj.browse(cr, uid, ids):
new_id = ir_values_obj.copy(cr, uid, iv.id,
default={'res_id': res}, context=context)
return res
def _action(self, cursor, user, ids, name, arg, context=None):
res = {}
values_obj = self.pool.get('ir.values')
value_ids = values_obj.search(cursor, user, [
('model', '=', self._name), ('key', '=', 'action'),
('key2', '=', 'tree_but_open'), ('res_id', 'in', ids)],
context=context)
values_action = {}
for value in values_obj.browse(cursor, user, value_ids,
context=context):
values_action[value.res_id] = value.value
for menu_id in ids:
res[menu_id] = values_action.get(menu_id, False)
return res
def _action(self, cursor, user, ids, name, arg, context=None):
res = {}
values_obj = self.pool.get('ir.values')
value_ids = values_obj.search(cursor, user, [
('model', '=', self._name), ('key', '=', 'action'),
('key2', '=', 'tree_but_open'), ('res_id', 'in', ids)],
context=context)
values_action = {}
for value in values_obj.browse(cursor, user, value_ids,
context=context):
values_action[value.res_id] = value.value
for menu_id in ids:
res[menu_id] = values_action.get(menu_id, False)
return res
def _action_inv(self, cursor, user, menu_id, name, value, arg, context=None):
if context is None:
context = {}
ctx = context.copy()
if 'read_delta' in ctx:
del ctx['read_delta']
values_obj = self.pool.get('ir.values')
values_ids = values_obj.search(cursor, user, [
('model', '=', self._name), ('key', '=', 'action'),
('key2', '=', 'tree_but_open'), ('res_id', '=', menu_id)],
context=context)
if values_ids:
values_obj.write(cursor, user, values_ids[0], {'value': value},
context=ctx)
else:
values_obj.create(cursor, user, {
'name': 'Menuitem',
'model': self._name,
'value': value,
'object': True,
'key': 'action',
'key2': 'tree_but_open',
'res_id': menu_id,
}, context=ctx)
def _action_inv(self, cursor, user, menu_id, name, value, arg, context=None):
if context is None:
context = {}
ctx = context.copy()
if 'read_delta' in ctx:
del ctx['read_delta']
values_obj = self.pool.get('ir.values')
values_ids = values_obj.search(cursor, user, [
('model', '=', self._name), ('key', '=', 'action'),
('key2', '=', 'tree_but_open'), ('res_id', '=', menu_id)],
context=context)
if values_ids:
values_obj.write(cursor, user, values_ids[0], {'value': value},
context=ctx)
else:
values_obj.create(cursor, user, {
'name': 'Menuitem',
'model': self._name,
'value': value,
'object': True,
'key': 'action',
'key2': 'tree_but_open',
'res_id': menu_id,
}, context=ctx)
_columns = {
'name': fields.char('Menu', size=64, required=True, translate=True),
'sequence': fields.integer('Sequence'),
'child_id' : fields.one2many('ir.ui.menu', 'parent_id','Child ids'),
'parent_id': fields.many2one('ir.ui.menu', 'Parent Menu', select=True),
'groups_id': many2many_unique('res.groups', 'ir_ui_menu_group_rel',
'menu_id', 'gid', 'Groups'),
'complete_name': fields.function(_get_full_name, method=True,
string='Complete Name', type='char', size=128),
'icon': fields.selection(icons, 'Icon', size=64),
'action': fields.function(_action, fnct_inv=_action_inv,
method=True, type='reference', string='Action',
selection=[
('ir.actions.report.custom', 'ir.actions.report.custom'),
('ir.actions.report.xml', 'ir.actions.report.xml'),
('ir.actions.act_window', 'ir.actions.act_window'),
('ir.actions.wizard', 'ir.actions.wizard'),
_columns = {
'name': fields.char('Menu', size=64, required=True, translate=True),
'sequence': fields.integer('Sequence'),
'child_id' : fields.one2many('ir.ui.menu', 'parent_id','Child ids'),
'parent_id': fields.many2one('ir.ui.menu', 'Parent Menu', select=True),
'groups_id': many2many_unique('res.groups', 'ir_ui_menu_group_rel',
'menu_id', 'gid', 'Groups'),
'complete_name': fields.function(_get_full_name, method=True,
string='Complete Name', type='char', size=128),
'icon': fields.selection(icons, 'Icon', size=64),
'action': fields.function(_action, fnct_inv=_action_inv,
method=True, type='reference', string='Action',
selection=[
('ir.actions.report.custom', 'ir.actions.report.custom'),
('ir.actions.report.xml', 'ir.actions.report.xml'),
('ir.actions.act_window', 'ir.actions.act_window'),
('ir.actions.wizard', 'ir.actions.wizard'),
('ir.actions.url', 'ir.actions.act_url'),
]),
}
_defaults = {
'icon' : lambda *a: 'STOCK_OPEN',
'sequence' : lambda *a: 10
}
_order = "sequence,id"
]),
}
_defaults = {
'icon' : lambda *a: 'STOCK_OPEN',
'sequence' : lambda *a: 10
}
_order = "sequence,id"
ir_ui_menu()

View File

@ -34,89 +34,89 @@ import netsvc
import os
def _check_xml(self, cr, uid, ids, context={}):
for view in self.browse(cr, uid, ids, context):
eview = etree.fromstring(view.arch)
frng = tools.file_open(os.path.join('base','rng',view.type+'.rng'))
relaxng = etree.RelaxNG(file=frng)
if not relaxng.validate(eview):
logger = netsvc.Logger()
logger.notifyChannel('init', netsvc.LOG_ERROR, 'The view do not fit the required schema !')
logger.notifyChannel('init', netsvc.LOG_ERROR, relaxng.error_log.last_error)
print view.arch
return False
return True
for view in self.browse(cr, uid, ids, context):
eview = etree.fromstring(view.arch)
frng = tools.file_open(os.path.join('base','rng',view.type+'.rng'))
relaxng = etree.RelaxNG(file=frng)
if not relaxng.validate(eview):
logger = netsvc.Logger()
logger.notifyChannel('init', netsvc.LOG_ERROR, 'The view do not fit the required schema !')
logger.notifyChannel('init', netsvc.LOG_ERROR, relaxng.error_log.last_error)
print view.arch
return False
return True
class view(osv.osv):
_name = 'ir.ui.view'
_columns = {
'name': fields.char('View Name',size=64, required=True),
'model': fields.char('Model', size=64, required=True),
'priority': fields.integer('Priority', required=True),
'type': fields.selection((
('tree','Tree'),
('form','Form'),
('graph', 'Graph'),
('calendar', 'Calendar')), 'View Type', required=True),
'arch': fields.text('View Architecture', required=True),
'inherit_id': fields.many2one('ir.ui.view', 'Inherited View'),
'field_parent': fields.char('Childs Field',size=64),
}
_defaults = {
'arch': lambda *a: '<?xml version="1.0"?>\n<tree title="Unknwown">\n\t<field name="name"/>\n</tree>',
'priority': lambda *a: 16
}
_order = "priority"
_constraints = [
(_check_xml, 'Invalid XML for View Architecture!', ['arch'])
]
_name = 'ir.ui.view'
_columns = {
'name': fields.char('View Name',size=64, required=True),
'model': fields.char('Model', size=64, required=True),
'priority': fields.integer('Priority', required=True),
'type': fields.selection((
('tree','Tree'),
('form','Form'),
('graph', 'Graph'),
('calendar', 'Calendar')), 'View Type', required=True),
'arch': fields.text('View Architecture', required=True),
'inherit_id': fields.many2one('ir.ui.view', 'Inherited View'),
'field_parent': fields.char('Childs Field',size=64),
}
_defaults = {
'arch': lambda *a: '<?xml version="1.0"?>\n<tree title="Unknwown">\n\t<field name="name"/>\n</tree>',
'priority': lambda *a: 16
}
_order = "priority"
_constraints = [
(_check_xml, 'Invalid XML for View Architecture!', ['arch'])
]
view()
#class UserView(osv.osv):
# _name = 'ir.ui.view.user'
# _columns = {
# 'name': fields.char('View Name',size=64, required=True),
# 'model': fields.char('Model', size=64, required=True),
# 'priority': fields.integer('Priority', required=True),
# 'type': fields.selection((
# ('tree','Tree'),
# ('form','Form'),
# ('graph', 'Graph'),
# ('calendar', 'Calendar')), 'View Type', required=True),
# 'arch': fields.text('View Architecture', required=True),
# 'inherit_id': fields.many2one('ir.ui.view', 'Inherited View'),
# 'field_parent': fields.char('Childs Field',size=64),
# 'user_id': fields.many2one('res.users', 'User'),
# 'ref_id': fields.many2one('ir.ui.view', 'Inherited View'),
# }
# _defaults = {
# 'arch': lambda *a: '<?xml version="1.0"?>\n<tree title="Unknwown">\n\t<field name="name"/>\n</tree>',
# 'priority': lambda *a: 16
# }
# _order = "priority"
# _constraints = [
# (_check_xml, 'Invalid XML for View Architecture!', ['arch'])
# ]
# _name = 'ir.ui.view.user'
# _columns = {
# 'name': fields.char('View Name',size=64, required=True),
# 'model': fields.char('Model', size=64, required=True),
# 'priority': fields.integer('Priority', required=True),
# 'type': fields.selection((
# ('tree','Tree'),
# ('form','Form'),
# ('graph', 'Graph'),
# ('calendar', 'Calendar')), 'View Type', required=True),
# 'arch': fields.text('View Architecture', required=True),
# 'inherit_id': fields.many2one('ir.ui.view', 'Inherited View'),
# 'field_parent': fields.char('Childs Field',size=64),
# 'user_id': fields.many2one('res.users', 'User'),
# 'ref_id': fields.many2one('ir.ui.view', 'Inherited View'),
# }
# _defaults = {
# 'arch': lambda *a: '<?xml version="1.0"?>\n<tree title="Unknwown">\n\t<field name="name"/>\n</tree>',
# 'priority': lambda *a: 16
# }
# _order = "priority"
# _constraints = [
# (_check_xml, 'Invalid XML for View Architecture!', ['arch'])
# ]
#
#UserView()
class view_sc(osv.osv):
_name = 'ir.ui.view_sc'
_columns = {
'name': fields.char('Shortcut Name', size=64, required=True),
'res_id': fields.many2one('ir.values','Resource Ref.', ondelete='cascade'),
'sequence': fields.integer('Sequence'),
'user_id': fields.many2one('res.users', 'User Ref.', required=True, ondelete='cascade'),
'resource': fields.char('Resource Name', size=64, required=True)
}
_name = 'ir.ui.view_sc'
_columns = {
'name': fields.char('Shortcut Name', size=64, required=True),
'res_id': fields.many2one('ir.values','Resource Ref.', ondelete='cascade'),
'sequence': fields.integer('Sequence'),
'user_id': fields.many2one('res.users', 'User Ref.', required=True, ondelete='cascade'),
'resource': fields.char('Resource Name', size=64, required=True)
}
def get_sc(self, cr, uid, user_id, model='ir.ui.menu', context={}):
ids = self.search(cr, uid, [('user_id','=',user_id),('resource','=',model)], context=context)
return self.read(cr, uid, ids, ['res_id','name'], context=context)
def get_sc(self, cr, uid, user_id, model='ir.ui.menu', context={}):
ids = self.search(cr, uid, [('user_id','=',user_id),('resource','=',model)], context=context)
return self.read(cr, uid, ids, ['res_id','name'], context=context)
_order = 'sequence'
_defaults = {
'resource': lambda *a: 'ir.ui.menu',
'user_id': lambda obj, cr, uid, context: uid,
}
_order = 'sequence'
_defaults = {
'resource': lambda *a: 'ir.ui.menu',
'user_id': lambda obj, cr, uid, context: uid,
}
view_sc()

View File

@ -32,221 +32,221 @@ import pickle
from tools.translate import _
class ir_values(osv.osv):
_name = 'ir.values'
_name = 'ir.values'
def _value_unpickle(self, cursor, user, ids, name, arg, context=None):
res = {}
for report in self.browse(cursor, user, ids, context=context):
value = report[name[:-9]]
if not report.object and value:
try:
value = str(pickle.loads(value))
except:
pass
res[report.id] = value
return res
def _value_unpickle(self, cursor, user, ids, name, arg, context=None):
res = {}
for report in self.browse(cursor, user, ids, context=context):
value = report[name[:-9]]
if not report.object and value:
try:
value = str(pickle.loads(value))
except:
pass
res[report.id] = value
return res
def _value_pickle(self, cursor, user, id, name, value, arg, context=None):
if context is None:
context = {}
ctx = context.copy()
if 'read_delta' in ctx:
del ctx['read_delta']
if not self.browse(cursor, user, id, context=context).object:
value = pickle.dumps(eval(value))
self.write(cursor, user, id, {name[:-9]: value}, context=ctx)
def _value_pickle(self, cursor, user, id, name, value, arg, context=None):
if context is None:
context = {}
ctx = context.copy()
if 'read_delta' in ctx:
del ctx['read_delta']
if not self.browse(cursor, user, id, context=context).object:
value = pickle.dumps(eval(value))
self.write(cursor, user, id, {name[:-9]: value}, context=ctx)
_columns = {
'name': fields.char('Name', size=128),
'model': fields.char('Model', size=128),
'value': fields.text('Value'),
'value_unpickle': fields.function(_value_unpickle, fnct_inv=_value_pickle,
method=True, type='text', string='Value'),
'object': fields.boolean('Is Object'),
'key': fields.char('Type', size=128),
'key2': fields.char('Value', size=256),
'meta': fields.text('Meta Datas'),
'meta_unpickle': fields.function(_value_unpickle, fnct_inv=_value_pickle,
method=True, type='text', string='Meta Datas'),
'res_id': fields.integer('Resource ID'),
'user_id': fields.many2one('res.users', 'User', ondelete='cascade'),
'company_id': fields.many2one('res.company', 'Company')
}
_defaults = {
'key': lambda *a: 'action',
'key2': lambda *a: 'tree_but_open',
'company_id': lambda *a: False
}
_columns = {
'name': fields.char('Name', size=128),
'model': fields.char('Model', size=128),
'value': fields.text('Value'),
'value_unpickle': fields.function(_value_unpickle, fnct_inv=_value_pickle,
method=True, type='text', string='Value'),
'object': fields.boolean('Is Object'),
'key': fields.char('Type', size=128),
'key2': fields.char('Value', size=256),
'meta': fields.text('Meta Datas'),
'meta_unpickle': fields.function(_value_unpickle, fnct_inv=_value_pickle,
method=True, type='text', string='Meta Datas'),
'res_id': fields.integer('Resource ID'),
'user_id': fields.many2one('res.users', 'User', ondelete='cascade'),
'company_id': fields.many2one('res.company', 'Company')
}
_defaults = {
'key': lambda *a: 'action',
'key2': lambda *a: 'tree_but_open',
'company_id': lambda *a: False
}
def _auto_init(self, cr, context={}):
super(ir_values, self)._auto_init(cr, context)
cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = \'ir_values_key_model_key2_index\'')
if not cr.fetchone():
cr.execute('CREATE INDEX ir_values_key_model_key2_index ON ir_values (key, model, key2)')
cr.commit()
def _auto_init(self, cr, context={}):
super(ir_values, self)._auto_init(cr, context)
cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = \'ir_values_key_model_key2_index\'')
if not cr.fetchone():
cr.execute('CREATE INDEX ir_values_key_model_key2_index ON ir_values (key, model, key2)')
cr.commit()
def set(self, cr, uid, key, key2, name, models, value, replace=True, isobject=False, meta=False, preserve_user=False, company=False):
if type(value)==type(u''):
value = value.encode('utf8')
if not isobject:
value = pickle.dumps(value)
if meta:
meta = pickle.dumps(meta)
ids_res = []
for model in models:
if type(model)==type([]) or type(model)==type(()):
model,res_id = model
else:
res_id=False
if replace:
if key in ('meta', 'default'):
ids = self.search(cr, uid, [
('key', '=', key),
('key2', '=', key2),
('name', '=', name),
('model', '=', model),
('res_id', '=', res_id),
('user_id', '=', preserve_user and uid)
])
else:
ids = self.search(cr, uid, [
('key', '=', key),
('key2', '=', key2),
('value', '=', value),
('model', '=', model),
('res_id', '=', res_id),
('user_id', '=', preserve_user and uid)
])
self.unlink(cr, uid, ids)
vals = {
'name': name,
'value': value,
'model': model,
'object': isobject,
'key': key,
'key2': key2 and key2[:200],
'meta': meta,
'user_id': preserve_user and uid,
}
if company:
cid = self.pool.get('res.users').browse(cr, uid, uid, context={}).company_id.id
vals['company_id']=cid
if res_id:
vals['res_id']= res_id
ids_res.append(self.create(cr, uid, vals))
return ids_res
def set(self, cr, uid, key, key2, name, models, value, replace=True, isobject=False, meta=False, preserve_user=False, company=False):
if type(value)==type(u''):
value = value.encode('utf8')
if not isobject:
value = pickle.dumps(value)
if meta:
meta = pickle.dumps(meta)
ids_res = []
for model in models:
if type(model)==type([]) or type(model)==type(()):
model,res_id = model
else:
res_id=False
if replace:
if key in ('meta', 'default'):
ids = self.search(cr, uid, [
('key', '=', key),
('key2', '=', key2),
('name', '=', name),
('model', '=', model),
('res_id', '=', res_id),
('user_id', '=', preserve_user and uid)
])
else:
ids = self.search(cr, uid, [
('key', '=', key),
('key2', '=', key2),
('value', '=', value),
('model', '=', model),
('res_id', '=', res_id),
('user_id', '=', preserve_user and uid)
])
self.unlink(cr, uid, ids)
vals = {
'name': name,
'value': value,
'model': model,
'object': isobject,
'key': key,
'key2': key2 and key2[:200],
'meta': meta,
'user_id': preserve_user and uid,
}
if company:
cid = self.pool.get('res.users').browse(cr, uid, uid, context={}).company_id.id
vals['company_id']=cid
if res_id:
vals['res_id']= res_id
ids_res.append(self.create(cr, uid, vals))
return ids_res
def get(self, cr, uid, key, key2, models, meta=False, context={}, res_id_req=False, without_user=True, key2_req=True):
result = []
for m in models:
if type(m)==type([]) or type(m)==type(()):
m,res_id = m
else:
res_id=False
def get(self, cr, uid, key, key2, models, meta=False, context={}, res_id_req=False, without_user=True, key2_req=True):
result = []
for m in models:
if type(m)==type([]) or type(m)==type(()):
m,res_id = m
else:
res_id=False
where1 = ['key=%s','model=%s']
where2 = [key,str(m)]
where_opt = []
if key2:
where1.append('key2=%s')
where2.append(key2[:200])
else:
dest = where1
if not key2_req or meta:
dest=where_opt
dest.append('key2 is null')
where1 = ['key=%s','model=%s']
where2 = [key,str(m)]
where_opt = []
if key2:
where1.append('key2=%s')
where2.append(key2[:200])
else:
dest = where1
if not key2_req or meta:
dest=where_opt
dest.append('key2 is null')
if res_id_req and (models[-1][0]==m):
if res_id:
where1.append('res_id=%d' % (res_id,))
else:
where1.append('(res_id is NULL)')
elif res_id:
if (models[-1][0]==m):
where1.append('(res_id=%d or (res_id is null))' % (res_id,))
where_opt.append('res_id=%d' % (res_id,))
else:
where1.append('res_id=%d' % (res_id,))
if res_id_req and (models[-1][0]==m):
if res_id:
where1.append('res_id=%d' % (res_id,))
else:
where1.append('(res_id is NULL)')
elif res_id:
if (models[-1][0]==m):
where1.append('(res_id=%d or (res_id is null))' % (res_id,))
where_opt.append('res_id=%d' % (res_id,))
else:
where1.append('res_id=%d' % (res_id,))
# if not without_user:
where_opt.append('user_id=%d' % (uid,))
# if not without_user:
where_opt.append('user_id=%d' % (uid,))
result = []
ok = True
while ok:
if not where_opt:
cr.execute('select id from ir_values where ' +\
' and '.join(where1)+' and user_id is null', where2)
else:
cr.execute('select id from ir_values where ' +\
' and '.join(where1+where_opt), where2)
result.extend([x[0] for x in cr.fetchall()])
if len(where_opt):
where_opt.pop()
else:
ok = False
result = []
ok = True
while ok:
if not where_opt:
cr.execute('select id from ir_values where ' +\
' and '.join(where1)+' and user_id is null', where2)
else:
cr.execute('select id from ir_values where ' +\
' and '.join(where1+where_opt), where2)
result.extend([x[0] for x in cr.fetchall()])
if len(where_opt):
where_opt.pop()
else:
ok = False
if result:
break
if result:
break
if not result:
return []
cid = self.pool.get('res.users').browse(cr, uid, uid, context={}).company_id.id
cr.execute('select id,name,value,object,meta, key ' \
'from ir_values ' \
'where id in ('+','.join(map(str,result))+') ' \
'and (company_id is null or company_id = %d) '\
'ORDER BY user_id', (cid,))
result = cr.fetchall()
if not result:
return []
cid = self.pool.get('res.users').browse(cr, uid, uid, context={}).company_id.id
cr.execute('select id,name,value,object,meta, key ' \
'from ir_values ' \
'where id in ('+','.join(map(str,result))+') ' \
'and (company_id is null or company_id = %d) '\
'ORDER BY user_id', (cid,))
result = cr.fetchall()
def _result_get(x, keys):
if x[1] in keys:
return False
keys.append(x[1])
if x[3]:
model,id = x[2].split(',')
try:
id = int(id)
datas = self.pool.get(model).read(cr, uid, [id], False, context)
except:
return False
if not len(datas):
#ir_del(cr, uid, x[0])
return False
def clean(x):
for key in ('report_sxw_content', 'report_rml_content',
'report_sxw', 'report_rml', 'report_sxw_content_data',
'report_rml_content_data'):
if key in x:
del x[key]
return x
datas = clean(datas[0])
else:
datas = pickle.loads(x[2])
if meta:
meta2 = pickle.loads(x[4])
return (x[0],x[1],datas,meta2)
return (x[0],x[1],datas)
keys = []
res = filter(bool, map(lambda x: _result_get(x, keys), list(result)))
res2 = res[:]
for r in res:
if type(r) == type([]):
if r[2]['type'] == 'ir.actions.report.xml' or r[2]['type'] == 'ir.actions.report.xml':
print
if r[2].has_key('groups_id'):
groups = r[2]['groups_id']
if len(groups) > 0:
group_ids = ','.join([ str(x) for x in r[2]['groups_id']])
cr.execute("select count(*) from res_groups_users_rel where gid in (%s) and uid='%s'" % (group_ids, uid))
gr_ids = cr.fetchall()
if not gr_ids[0][0] > 0:
res2.remove(r)
else:
#raise osv.except_osv('Error !','You have not permission to perform operation !!!')
res2.remove(r)
return res2
def _result_get(x, keys):
if x[1] in keys:
return False
keys.append(x[1])
if x[3]:
model,id = x[2].split(',')
try:
id = int(id)
datas = self.pool.get(model).read(cr, uid, [id], False, context)
except:
return False
if not len(datas):
#ir_del(cr, uid, x[0])
return False
def clean(x):
for key in ('report_sxw_content', 'report_rml_content',
'report_sxw', 'report_rml', 'report_sxw_content_data',
'report_rml_content_data'):
if key in x:
del x[key]
return x
datas = clean(datas[0])
else:
datas = pickle.loads(x[2])
if meta:
meta2 = pickle.loads(x[4])
return (x[0],x[1],datas,meta2)
return (x[0],x[1],datas)
keys = []
res = filter(bool, map(lambda x: _result_get(x, keys), list(result)))
res2 = res[:]
for r in res:
if type(r) == type([]):
if r[2]['type'] == 'ir.actions.report.xml' or r[2]['type'] == 'ir.actions.report.xml':
print
if r[2].has_key('groups_id'):
groups = r[2]['groups_id']
if len(groups) > 0:
group_ids = ','.join([ str(x) for x in r[2]['groups_id']])
cr.execute("select count(*) from res_groups_users_rel where gid in (%s) and uid='%s'" % (group_ids, uid))
gr_ids = cr.fetchall()
if not gr_ids[0][0] > 0:
res2.remove(r)
else:
#raise osv.except_osv('Error !','You have not permission to perform operation !!!')
res2.remove(r)
return res2
ir_values()

View File

@ -30,57 +30,57 @@
from osv import fields,osv
class wizard_model_menu(osv.osv_memory):
_name = 'wizard.ir.model.menu.create'
_columns = {
'model_id': fields.many2one('ir.model','Model', required=True),
'menu_id': fields.many2one('ir.ui.menu', 'Parent Menu', required=True),
'name': fields.char('Menu Name', size=64, required=True),
'view_ids': fields.one2many('wizard.ir.model.menu.create.line', 'wizard_id', 'Views'),
}
_defaults = {
'model_id': lambda self,cr,uid,ctx: ctx.get('model_id', False)
}
def menu_create(self, cr, uid, ids, context={}):
for menu in self.browse(cr, uid, ids, context):
view_mode = []
views = []
for view in menu.view_ids:
view_mode.append(view.view_type)
views.append( (0,0,{
'view_id': view.view_id and view.view_id.id or False,
'view_mode': view.view_type,
'sequence': view.sequence
}))
val = {
'name': menu.name,
'res_model': menu.model_id.model,
'view_type': 'form',
'view_mode': ','.join(view_mode)
}
if views:
val['view_ids'] = views
else:
val['view_mode'] = 'tree,form'
action_id = self.pool.get('ir.actions.act_window').create(cr, uid, val)
self.pool.get('ir.ui.menu').create(cr, uid, {
'name': menu.name,
'parent_id': menu.menu_id.id,
'action': 'ir.actions.act_window,%d' % (action_id,),
'icon': 'STOCK_INDENT'
}, context)
return {'type':'ir.actions.act_window_close'}
_name = 'wizard.ir.model.menu.create'
_columns = {
'model_id': fields.many2one('ir.model','Model', required=True),
'menu_id': fields.many2one('ir.ui.menu', 'Parent Menu', required=True),
'name': fields.char('Menu Name', size=64, required=True),
'view_ids': fields.one2many('wizard.ir.model.menu.create.line', 'wizard_id', 'Views'),
}
_defaults = {
'model_id': lambda self,cr,uid,ctx: ctx.get('model_id', False)
}
def menu_create(self, cr, uid, ids, context={}):
for menu in self.browse(cr, uid, ids, context):
view_mode = []
views = []
for view in menu.view_ids:
view_mode.append(view.view_type)
views.append( (0,0,{
'view_id': view.view_id and view.view_id.id or False,
'view_mode': view.view_type,
'sequence': view.sequence
}))
val = {
'name': menu.name,
'res_model': menu.model_id.model,
'view_type': 'form',
'view_mode': ','.join(view_mode)
}
if views:
val['view_ids'] = views
else:
val['view_mode'] = 'tree,form'
action_id = self.pool.get('ir.actions.act_window').create(cr, uid, val)
self.pool.get('ir.ui.menu').create(cr, uid, {
'name': menu.name,
'parent_id': menu.menu_id.id,
'action': 'ir.actions.act_window,%d' % (action_id,),
'icon': 'STOCK_INDENT'
}, context)
return {'type':'ir.actions.act_window_close'}
wizard_model_menu()
class wizard_model_menu_line(osv.osv_memory):
_name = 'wizard.ir.model.menu.create.line'
_columns = {
'wizard_id': fields.many2one('wizard.ir.model.menu.create','Wizard'),
'sequence': fields.integer('Sequence'),
'view_type': fields.selection([('tree','Tree'),('form','Form'),('graph','Graph'),('calendar','Calendar')],'View Type',required=True),
'view_id': fields.many2one('ir.ui.view', 'View'),
}
_defaults = {
'view_type': lambda self,cr,uid,ctx: 'tree'
}
_name = 'wizard.ir.model.menu.create.line'
_columns = {
'wizard_id': fields.many2one('wizard.ir.model.menu.create','Wizard'),
'sequence': fields.integer('Sequence'),
'view_type': fields.selection([('tree','Tree'),('form','Form'),('graph','Graph'),('calendar','Calendar')],'View Type',required=True),
'view_id': fields.many2one('ir.ui.view', 'View'),
}
_defaults = {
'view_type': lambda self,cr,uid,ctx: 'tree'
}
wizard_model_menu_line()

View File

@ -34,175 +34,175 @@ import report,pooler,tools
def graph_get(cr, graph, wkf_id, nested=False, workitem={}):
import pydot
cr.execute('select * from wkf_activity where wkf_id=%d', (wkf_id,))
nodes = cr.dictfetchall()
activities = {}
actfrom = {}
actto = {}
for n in nodes:
activities[n['id']] = n
if n['subflow_id'] and nested:
cr.execute('select * from wkf where id=%d', (n['subflow_id'],))
wkfinfo = cr.dictfetchone()
graph2 = pydot.Cluster('subflow'+str(n['subflow_id']), fontsize=12, label = "Subflow: "+n['name']+'\\nOSV: '+wkfinfo['osv'])
(s1,s2) = graph_get(cr, graph2, n['subflow_id'], nested,workitem)
graph.add_subgraph(graph2)
actfrom[n['id']] = s2
actto[n['id']] = s1
else:
args = {}
if n['flow_start'] or n['flow_stop']:
args['style']='filled'
args['color']='lightgrey'
args['label']=n['name']
if n['subflow_id']:
args['shape'] = 'box'
if n['id'] in workitem:
args['label']+='\\nx '+str(workitem[n['id']])
args['color'] = "red"
graph.add_node(pydot.Node(n['id'], **args))
actfrom[n['id']] = (n['id'],{})
actto[n['id']] = (n['id'],{})
cr.execute('select * from wkf_transition where act_from in ('+','.join(map(lambda x: str(x['id']),nodes))+')')
transitions = cr.dictfetchall()
for t in transitions:
args = {}
args['label'] = str(t['condition']).replace(' or ', '\\nor ').replace(' and ', '\\nand ')
if t['signal']:
args['label'] += '\\n'+str(t['signal'])
args['style'] = 'bold'
import pydot
cr.execute('select * from wkf_activity where wkf_id=%d', (wkf_id,))
nodes = cr.dictfetchall()
activities = {}
actfrom = {}
actto = {}
for n in nodes:
activities[n['id']] = n
if n['subflow_id'] and nested:
cr.execute('select * from wkf where id=%d', (n['subflow_id'],))
wkfinfo = cr.dictfetchone()
graph2 = pydot.Cluster('subflow'+str(n['subflow_id']), fontsize=12, label = "Subflow: "+n['name']+'\\nOSV: '+wkfinfo['osv'])
(s1,s2) = graph_get(cr, graph2, n['subflow_id'], nested,workitem)
graph.add_subgraph(graph2)
actfrom[n['id']] = s2
actto[n['id']] = s1
else:
args = {}
if n['flow_start'] or n['flow_stop']:
args['style']='filled'
args['color']='lightgrey'
args['label']=n['name']
if n['subflow_id']:
args['shape'] = 'box'
if n['id'] in workitem:
args['label']+='\\nx '+str(workitem[n['id']])
args['color'] = "red"
graph.add_node(pydot.Node(n['id'], **args))
actfrom[n['id']] = (n['id'],{})
actto[n['id']] = (n['id'],{})
cr.execute('select * from wkf_transition where act_from in ('+','.join(map(lambda x: str(x['id']),nodes))+')')
transitions = cr.dictfetchall()
for t in transitions:
args = {}
args['label'] = str(t['condition']).replace(' or ', '\\nor ').replace(' and ', '\\nand ')
if t['signal']:
args['label'] += '\\n'+str(t['signal'])
args['style'] = 'bold'
if activities[t['act_from']]['split_mode']=='AND':
args['arrowtail']='box'
elif str(activities[t['act_from']]['split_mode'])=='OR ':
args['arrowtail']='inv'
if activities[t['act_from']]['split_mode']=='AND':
args['arrowtail']='box'
elif str(activities[t['act_from']]['split_mode'])=='OR ':
args['arrowtail']='inv'
if activities[t['act_to']]['join_mode']=='AND':
args['arrowhead']='crow'
if activities[t['act_to']]['join_mode']=='AND':
args['arrowhead']='crow'
activity_from = actfrom[t['act_from']][1].get(t['signal'], actfrom[t['act_from']][0])
activity_to = actto[t['act_to']][1].get(t['signal'], actto[t['act_to']][0])
graph.add_edge(pydot.Edge( activity_from ,activity_to, fontsize=10, **args))
nodes = cr.dictfetchall()
cr.execute('select id from wkf_activity where flow_start=True and wkf_id=%d limit 1', (wkf_id,))
start = cr.fetchone()[0]
cr.execute("select 'subflow.'||name,id from wkf_activity where flow_stop=True and wkf_id=%d", (wkf_id,))
stop = cr.fetchall()
stop = (stop[0][1], dict(stop))
return ((start,{}),stop)
activity_from = actfrom[t['act_from']][1].get(t['signal'], actfrom[t['act_from']][0])
activity_to = actto[t['act_to']][1].get(t['signal'], actto[t['act_to']][0])
graph.add_edge(pydot.Edge( activity_from ,activity_to, fontsize=10, **args))
nodes = cr.dictfetchall()
cr.execute('select id from wkf_activity where flow_start=True and wkf_id=%d limit 1', (wkf_id,))
start = cr.fetchone()[0]
cr.execute("select 'subflow.'||name,id from wkf_activity where flow_stop=True and wkf_id=%d", (wkf_id,))
stop = cr.fetchall()
stop = (stop[0][1], dict(stop))
return ((start,{}),stop)
def graph_instance_get(cr, graph, inst_id, nested=False):
workitems = {}
cr.execute('select * from wkf_instance where id=%d', (inst_id,))
inst = cr.dictfetchone()
workitems = {}
cr.execute('select * from wkf_instance where id=%d', (inst_id,))
inst = cr.dictfetchone()
def workitem_get(instance):
cr.execute('select act_id,count(*) from wkf_workitem where inst_id=%d group by act_id', (instance,))
workitems = dict(cr.fetchall())
def workitem_get(instance):
cr.execute('select act_id,count(*) from wkf_workitem where inst_id=%d group by act_id', (instance,))
workitems = dict(cr.fetchall())
cr.execute('select subflow_id from wkf_workitem where inst_id=%d', (instance,))
for (subflow_id,) in cr.fetchall():
workitems.update(workitem_get(subflow_id))
return workitems
graph_get(cr, graph, inst['wkf_id'], nested, workitem_get(inst_id))
cr.execute('select subflow_id from wkf_workitem where inst_id=%d', (instance,))
for (subflow_id,) in cr.fetchall():
workitems.update(workitem_get(subflow_id))
return workitems
graph_get(cr, graph, inst['wkf_id'], nested, workitem_get(inst_id))
#
# TODO: pas clean: concurrent !!!
#
class report_graph_instance(object):
def __init__(self, cr, uid, ids, data):
logger = netsvc.Logger()
try:
import pydot
except Exception,e:
logger.notifyChannel('workflow', netsvc.LOG_WARNING,
'Import Error for pydot, you will not be able to render workflows\n'
'Consider Installing PyDot or dependencies: http://dkbza.org/pydot.html')
raise e
self.done = False
def __init__(self, cr, uid, ids, data):
logger = netsvc.Logger()
try:
import pydot
except Exception,e:
logger.notifyChannel('workflow', netsvc.LOG_WARNING,
'Import Error for pydot, you will not be able to render workflows\n'
'Consider Installing PyDot or dependencies: http://dkbza.org/pydot.html')
raise e
self.done = False
try:
cr.execute('select * from wkf where osv=%s limit 1',
(data['model'],))
wkfinfo = cr.dictfetchone()
if not wkfinfo:
ps_string = '''%PS-Adobe-3.0
try:
cr.execute('select * from wkf where osv=%s limit 1',
(data['model'],))
wkfinfo = cr.dictfetchone()
if not wkfinfo:
ps_string = '''%PS-Adobe-3.0
/inch {72 mul} def
/Times-Roman findfont 50 scalefont setfont
1.5 inch 15 inch moveto
(No workflow defined) show
showpage'''
else:
cr.execute('SELECT id FROM wkf_instance \
WHERE res_id=%d AND wkf_id=%d \
ORDER BY state LIMIT 1',
(data['id'], wkfinfo['id']))
inst_id = cr.fetchone()
if not inst_id:
ps_string = '''%PS-Adobe-3.0
else:
cr.execute('SELECT id FROM wkf_instance \
WHERE res_id=%d AND wkf_id=%d \
ORDER BY state LIMIT 1',
(data['id'], wkfinfo['id']))
inst_id = cr.fetchone()
if not inst_id:
ps_string = '''%PS-Adobe-3.0
/inch {72 mul} def
/Times-Roman findfont 50 scalefont setfont
1.5 inch 15 inch moveto
(No workflow instance defined) show
showpage'''
else:
inst_id = inst_id[0]
graph = pydot.Dot(fontsize=16, label="\\n\\nWorkflow: %s\\n OSV: %s" % (wkfinfo['name'],wkfinfo['osv']))
graph.set('size', '10.7,7.3')
graph.set('center', '1')
graph.set('ratio', 'auto')
graph.set('rotate', '90')
graph.set('rankdir', 'LR')
graph_instance_get(cr, graph, inst_id, data.get('nested', False))
ps_string = graph.create_ps(prog='dot')
except Exception, e:
import traceback, sys
tb_s = reduce(lambda x, y: x+y, traceback.format_exception(sys.exc_type, sys.exc_value, sys.exc_traceback))
logger.notifyChannel('workflow', netsvc.LOG_ERROR, 'Exception in call: ' + tb_s)
# string is in PS, like the success message would have been
ps_string = '''%PS-Adobe-3.0
else:
inst_id = inst_id[0]
graph = pydot.Dot(fontsize=16, label="\\n\\nWorkflow: %s\\n OSV: %s" % (wkfinfo['name'],wkfinfo['osv']))
graph.set('size', '10.7,7.3')
graph.set('center', '1')
graph.set('ratio', 'auto')
graph.set('rotate', '90')
graph.set('rankdir', 'LR')
graph_instance_get(cr, graph, inst_id, data.get('nested', False))
ps_string = graph.create_ps(prog='dot')
except Exception, e:
import traceback, sys
tb_s = reduce(lambda x, y: x+y, traceback.format_exception(sys.exc_type, sys.exc_value, sys.exc_traceback))
logger.notifyChannel('workflow', netsvc.LOG_ERROR, 'Exception in call: ' + tb_s)
# string is in PS, like the success message would have been
ps_string = '''%PS-Adobe-3.0
/inch {72 mul} def
/Times-Roman findfont 50 scalefont setfont
1.5 inch 15 inch moveto
(No workflow available) show
showpage'''
if os.name == "nt":
prog = 'ps2pdf.bat'
else:
prog = 'ps2pdf'
args = (prog, '-', '-')
input, output = tools.exec_command_pipe(*args)
input.write(ps_string)
input.close()
self.result = output.read()
output.close()
self.done = True
if os.name == "nt":
prog = 'ps2pdf.bat'
else:
prog = 'ps2pdf'
args = (prog, '-', '-')
input, output = tools.exec_command_pipe(*args)
input.write(ps_string)
input.close()
self.result = output.read()
output.close()
self.done = True
def is_done(self):
return self.done
def is_done(self):
return self.done
def get(self):
if self.done:
return self.result
else:
return None
def get(self):
if self.done:
return self.result
else:
return None
class report_graph(report.interface.report_int):
def __init__(self, name, table):
report.interface.report_int.__init__(self, name)
self.table = table
def __init__(self, name, table):
report.interface.report_int.__init__(self, name)
self.table = table
def result(self):
if self.obj.is_done():
return (True, self.obj.get(), 'pdf')
else:
return (False, False, False)
def result(self):
if self.obj.is_done():
return (True, self.obj.get(), 'pdf')
else:
return (False, False, False)
def create(self, cr, uid, ids, data, context={}):
self.obj = report_graph_instance(cr, uid, ids, data)
return (self.obj.get(), 'pdf')
def create(self, cr, uid, ids, data, context={}):
self.obj = report_graph_instance(cr, uid, ids, data)
return (self.obj.get(), 'pdf')
report_graph('report.workflow.instance.graph', 'ir.workflow')

0
bin/addons/base/ir/workflow/pydot/__init__.py Executable file → Normal file
View File

486
bin/addons/base/ir/workflow/pydot/dot_parser.py Executable file → Normal file
View File

@ -14,339 +14,339 @@ import glob
import pydot
from pyparsing import __version__ as pyparsing_version
from pyparsing import Literal, CaselessLiteral, Word, \
Upcase, OneOrMore, ZeroOrMore, Forward, NotAny, \
delimitedList, oneOf, Group, Optional, Combine, \
alphas, nums, restOfLine, cStyleComment, nums, \
alphanums, printables, empty, quotedString, \
ParseException, ParseResults, CharsNotIn, _noncomma
from pyparsing import Literal, CaselessLiteral, Word, \
Upcase, OneOrMore, ZeroOrMore, Forward, NotAny, \
delimitedList, oneOf, Group, Optional, Combine, \
alphas, nums, restOfLine, cStyleComment, nums, \
alphanums, printables, empty, quotedString, \
ParseException, ParseResults, CharsNotIn, _noncomma
class P_AttrList:
def __init__(self, toks):
self.attrs = {}
i = 0
while i < len(toks):
attrname = toks[i]
attrvalue = toks[i+1]
self.attrs[attrname] = attrvalue
i += 2
def __init__(self, toks):
self.attrs = {}
i = 0
while i < len(toks):
attrname = toks[i]
attrvalue = toks[i+1]
self.attrs[attrname] = attrvalue
i += 2
def __repr__(self):
return "%s(%r)" % (self.__class__.__name__, self.attrs)
def __repr__(self):
return "%s(%r)" % (self.__class__.__name__, self.attrs)
class DefaultStatement(P_AttrList):
def __init__(self, default_type, attrs):
self.default_type = default_type
self.attrs = attrs
def __init__(self, default_type, attrs):
self.default_type = default_type
self.attrs = attrs
def __repr__(self):
return "%s(%s, %r)" % \
(self.__class__.__name__, self.default_type, self.attrs)
def __repr__(self):
return "%s(%s, %r)" % \
(self.__class__.__name__, self.default_type, self.attrs)
def push_top_graph_stmt(str, loc, toks):
attrs = {}
g = None
attrs = {}
g = None
for element in toks:
if isinstance(element, ParseResults) or \
isinstance(element, tuple) or \
isinstance(element, list):
for element in toks:
if isinstance(element, ParseResults) or \
isinstance(element, tuple) or \
isinstance(element, list):
element = element[0]
element = element[0]
if element == 'strict':
attrs['strict'] = True
elif element in ['graph', 'digraph']:
attrs['graph_type'] = element
elif type(element) == type(''):
attrs['graph_name'] = element
elif isinstance(element, pydot.Graph):
g = pydot.Graph(**attrs)
g.__dict__.update(element.__dict__)
for e in g.get_edge_list():
e.parent_graph = g
for e in g.get_node_list():
e.parent_graph = g
for e in g.get_subgraph_list():
e.set_graph_parent(g)
if element == 'strict':
attrs['strict'] = True
elif element in ['graph', 'digraph']:
attrs['graph_type'] = element
elif type(element) == type(''):
attrs['graph_name'] = element
elif isinstance(element, pydot.Graph):
g = pydot.Graph(**attrs)
g.__dict__.update(element.__dict__)
for e in g.get_edge_list():
e.parent_graph = g
for e in g.get_node_list():
e.parent_graph = g
for e in g.get_subgraph_list():
e.set_graph_parent(g)
elif isinstance(element, P_AttrList):
attrs.update(element.attrs)
else:
raise ValueError, "Unknown element statement: %r " % element
elif isinstance(element, P_AttrList):
attrs.update(element.attrs)
else:
raise ValueError, "Unknown element statement: %r " % element
if g is not None:
g.__dict__.update(attrs)
return g
if g is not None:
g.__dict__.update(attrs)
return g
def add_defaults(element, defaults):
d = element.__dict__
for key, value in defaults.items():
if not d.get(key):
d[key] = value
d = element.__dict__
for key, value in defaults.items():
if not d.get(key):
d[key] = value
def add_elements(g, toks, defaults_graph=None, defaults_node=None, defaults_edge=None):
if defaults_graph is None:
defaults_graph = {}
if defaults_node is None:
defaults_node = {}
if defaults_edge is None:
defaults_edge = {}
if defaults_graph is None:
defaults_graph = {}
if defaults_node is None:
defaults_node = {}
if defaults_edge is None:
defaults_edge = {}
for element in toks:
if isinstance(element, pydot.Graph):
add_defaults(element, defaults_graph)
g.add_subgraph(element)
elif isinstance(element, pydot.Node):
add_defaults(element, defaults_node)
g.add_node(element)
elif isinstance(element, pydot.Edge):
add_defaults(element, defaults_edge)
g.add_edge(element)
elif isinstance(element, ParseResults):
for e in element:
add_elements(g, [e], defaults_graph, defaults_node, defaults_edge)
elif isinstance(element, DefaultStatement):
if element.default_type == 'graph':
default_graph_attrs = pydot.Node('graph')
default_graph_attrs.__dict__.update(element.attrs)
g.add_node(default_graph_attrs)
# defaults_graph.update(element.attrs)
# g.__dict__.update(element.attrs)
elif element.default_type == 'node':
default_node_attrs = pydot.Node('node')
default_node_attrs.__dict__.update(element.attrs)
g.add_node(default_node_attrs)
#defaults_node.update(element.attrs)
elif element.default_type == 'edge':
default_edge_attrs = pydot.Node('edge')
default_edge_attrs.__dict__.update(element.attrs)
g.add_node(default_edge_attrs)
#defaults_edge.update(element.attrs)
else:
raise ValueError, "Unknown DefaultStatement: %s " % element.default_type
elif isinstance(element, P_AttrList):
g.__dict__.update(element.attrs)
else:
raise ValueError, "Unknown element statement: %r " % element
for element in toks:
if isinstance(element, pydot.Graph):
add_defaults(element, defaults_graph)
g.add_subgraph(element)
elif isinstance(element, pydot.Node):
add_defaults(element, defaults_node)
g.add_node(element)
elif isinstance(element, pydot.Edge):
add_defaults(element, defaults_edge)
g.add_edge(element)
elif isinstance(element, ParseResults):
for e in element:
add_elements(g, [e], defaults_graph, defaults_node, defaults_edge)
elif isinstance(element, DefaultStatement):
if element.default_type == 'graph':
default_graph_attrs = pydot.Node('graph')
default_graph_attrs.__dict__.update(element.attrs)
g.add_node(default_graph_attrs)
# defaults_graph.update(element.attrs)
# g.__dict__.update(element.attrs)
elif element.default_type == 'node':
default_node_attrs = pydot.Node('node')
default_node_attrs.__dict__.update(element.attrs)
g.add_node(default_node_attrs)
#defaults_node.update(element.attrs)
elif element.default_type == 'edge':
default_edge_attrs = pydot.Node('edge')
default_edge_attrs.__dict__.update(element.attrs)
g.add_node(default_edge_attrs)
#defaults_edge.update(element.attrs)
else:
raise ValueError, "Unknown DefaultStatement: %s " % element.default_type
elif isinstance(element, P_AttrList):
g.__dict__.update(element.attrs)
else:
raise ValueError, "Unknown element statement: %r " % element
def push_graph_stmt(str, loc, toks):
g = pydot.Subgraph()
add_elements(g, toks)
return g
g = pydot.Subgraph()
add_elements(g, toks)
return g
def push_subgraph_stmt(str, loc, toks):
for e in toks:
if len(e)==3:
g = e[2]
g.set_name(e[1])
for e in toks:
if len(e)==3:
g = e[2]
g.set_name(e[1])
return g
return g
def push_default_stmt(str, loc, toks):
# The pydot class instances should be marked as
# default statements to be inherited by actual
# graphs, nodes and edges.
# print "push_default_stmt", toks
default_type = toks[0][0]
if len(toks) > 1:
attrs = toks[1].attrs
else:
attrs = {}
# The pydot class instances should be marked as
# default statements to be inherited by actual
# graphs, nodes and edges.
# print "push_default_stmt", toks
default_type = toks[0][0]
if len(toks) > 1:
attrs = toks[1].attrs
else:
attrs = {}
if default_type in ['graph', 'node', 'edge']:
return DefaultStatement(default_type, attrs)
else:
raise ValueError, "Unknown default statement: %r " % toks
if default_type in ['graph', 'node', 'edge']:
return DefaultStatement(default_type, attrs)
else:
raise ValueError, "Unknown default statement: %r " % toks
def push_attr_list(str, loc, toks):
p = P_AttrList(toks)
return p
p = P_AttrList(toks)
return p
def get_port(node):
if len(node)>1:
if isinstance(node[1], ParseResults):
if len(node[1][0])==2:
if node[1][0][0]==':':
return node[1][0][1]
if len(node)>1:
if isinstance(node[1], ParseResults):
if len(node[1][0])==2:
if node[1][0][0]==':':
return node[1][0][1]
return None
return None
def push_edge_stmt(str, loc, toks):
tok_attrs = [a for a in toks if isinstance(a, P_AttrList)]
attrs = {}
for a in tok_attrs:
attrs.update(a.attrs)
tok_attrs = [a for a in toks if isinstance(a, P_AttrList)]
attrs = {}
for a in tok_attrs:
attrs.update(a.attrs)
n_prev = toks[0]
e = []
for n_next in tuple(toks)[2::2]:
port = get_port(n_prev)
if port is not None:
n_prev_port = ':'+port
else:
n_prev_port = ''
n_prev = toks[0]
e = []
for n_next in tuple(toks)[2::2]:
port = get_port(n_prev)
if port is not None:
n_prev_port = ':'+port
else:
n_prev_port = ''
port = get_port(n_next)
if port is not None:
n_next_port = ':'+port
else:
n_next_port = ''
port = get_port(n_next)
if port is not None:
n_next_port = ':'+port
else:
n_next_port = ''
e.append(pydot.Edge(n_prev[0]+n_prev_port, n_next[0]+n_next_port, **attrs))
n_prev = n_next
return e
e.append(pydot.Edge(n_prev[0]+n_prev_port, n_next[0]+n_next_port, **attrs))
n_prev = n_next
return e
def push_node_stmt(str, loc, toks):
if len(toks) == 2:
attrs = toks[1].attrs
else:
attrs = {}
if len(toks) == 2:
attrs = toks[1].attrs
else:
attrs = {}
node_name = toks[0]
if isinstance(node_name, list) or isinstance(node_name, tuple):
if len(node_name)>0:
node_name = node_name[0]
node_name = toks[0]
if isinstance(node_name, list) or isinstance(node_name, tuple):
if len(node_name)>0:
node_name = node_name[0]
n = pydot.Node(node_name, **attrs)
return n
n = pydot.Node(node_name, **attrs)
return n
def strip_quotes( s, l, t ):
return [ t[0].strip('"') ]
return [ t[0].strip('"') ]
graphparser = None
def GRAPH_DEF():
global graphparser
global graphparser
if not graphparser:
# punctuation
colon = Literal(":")
lbrace = Literal("{")
rbrace = Literal("}")
lbrack = Literal("[")
rbrack = Literal("]")
lparen = Literal("(")
rparen = Literal(")")
equals = Literal("=")
comma = Literal(",")
dot = Literal(".")
slash = Literal("/")
bslash = Literal("\\")
star = Literal("*")
semi = Literal(";")
at = Literal("@")
minus = Literal("-")
if not graphparser:
# punctuation
colon = Literal(":")
lbrace = Literal("{")
rbrace = Literal("}")
lbrack = Literal("[")
rbrack = Literal("]")
lparen = Literal("(")
rparen = Literal(")")
equals = Literal("=")
comma = Literal(",")
dot = Literal(".")
slash = Literal("/")
bslash = Literal("\\")
star = Literal("*")
semi = Literal(";")
at = Literal("@")
minus = Literal("-")
# keywords
strict_ = Literal("strict")
graph_ = Literal("graph")
digraph_ = Literal("digraph")
subgraph_ = Literal("subgraph")
node_ = Literal("node")
edge_ = Literal("edge")
# keywords
strict_ = Literal("strict")
graph_ = Literal("graph")
digraph_ = Literal("digraph")
subgraph_ = Literal("subgraph")
node_ = Literal("node")
edge_ = Literal("edge")
identifier = Word(alphanums + "_" ).setName("identifier")
identifier = Word(alphanums + "_" ).setName("identifier")
double_quote = Literal('"')
double_quoted_string = \
Combine( double_quote + ZeroOrMore(CharsNotIn('"')) + double_quote )
double_quote = Literal('"')
double_quoted_string = \
Combine( double_quote + ZeroOrMore(CharsNotIn('"')) + double_quote )
alphastring_ = OneOrMore(CharsNotIn(_noncomma))
alphastring_ = OneOrMore(CharsNotIn(_noncomma))
ID = (identifier | double_quoted_string.setParseAction(strip_quotes) |\
alphastring_).setName("ID")
ID = (identifier | double_quoted_string.setParseAction(strip_quotes) |\
alphastring_).setName("ID")
html_text = Combine(Literal("<<") + OneOrMore(CharsNotIn(",]")))
html_text = Combine(Literal("<<") + OneOrMore(CharsNotIn(",]")))
float_number = Combine(Optional(minus) + \
OneOrMore(Word(nums + "."))).setName("float_number")
float_number = Combine(Optional(minus) + \
OneOrMore(Word(nums + "."))).setName("float_number")
righthand_id = (float_number | ID | html_text).setName("righthand_id")
righthand_id = (float_number | ID | html_text).setName("righthand_id")
port_angle = (at + ID).setName("port_angle")
port_angle = (at + ID).setName("port_angle")
port_location = (Group(colon + ID) | \
Group(colon + lparen + ID + comma + ID + rparen)).setName("port_location")
port_location = (Group(colon + ID) | \
Group(colon + lparen + ID + comma + ID + rparen)).setName("port_location")
port = (Group(port_location + Optional(port_angle)) | \
Group(port_angle + Optional(port_location))).setName("port")
port = (Group(port_location + Optional(port_angle)) | \
Group(port_angle + Optional(port_location))).setName("port")
node_id = (ID + Optional(port))
a_list = OneOrMore(ID + Optional(equals.suppress() + righthand_id) + \
Optional(comma.suppress())).setName("a_list")
node_id = (ID + Optional(port))
a_list = OneOrMore(ID + Optional(equals.suppress() + righthand_id) + \
Optional(comma.suppress())).setName("a_list")
attr_list = OneOrMore(lbrack.suppress() + Optional(a_list) + \
rbrack.suppress()).setName("attr_list")
attr_list = OneOrMore(lbrack.suppress() + Optional(a_list) + \
rbrack.suppress()).setName("attr_list")
attr_stmt = (Group(graph_ | node_ | edge_) + attr_list).setName("attr_stmt")
attr_stmt = (Group(graph_ | node_ | edge_) + attr_list).setName("attr_stmt")
edgeop = (Literal("--") | Literal("->")).setName("edgeop")
edgeop = (Literal("--") | Literal("->")).setName("edgeop")
stmt_list = Forward()
graph_stmt = Group(lbrace.suppress() + Optional(stmt_list) + \
rbrace.suppress()).setName("graph_stmt")
stmt_list = Forward()
graph_stmt = Group(lbrace.suppress() + Optional(stmt_list) + \
rbrace.suppress()).setName("graph_stmt")
subgraph = (Group(Optional(subgraph_ + Optional(ID)) + graph_stmt) | \
Group(subgraph_ + ID)).setName("subgraph")
subgraph = (Group(Optional(subgraph_ + Optional(ID)) + graph_stmt) | \
Group(subgraph_ + ID)).setName("subgraph")
edgeRHS = OneOrMore(edgeop + Group(node_id | subgraph))
edgeRHS = OneOrMore(edgeop + Group(node_id | subgraph))
edge_stmt = Group(node_id | subgraph) + edgeRHS + Optional(attr_list)
edge_stmt = Group(node_id | subgraph) + edgeRHS + Optional(attr_list)
node_stmt = (node_id + Optional(attr_list) + semi.suppress()).setName("node_stmt")
node_stmt = (node_id + Optional(attr_list) + semi.suppress()).setName("node_stmt")
assignment = (ID + equals.suppress() + righthand_id).setName("assignment")
stmt = (assignment | edge_stmt | attr_stmt | node_stmt | subgraph).setName("stmt")
stmt_list << OneOrMore(stmt + Optional(semi.suppress()))
assignment = (ID + equals.suppress() + righthand_id).setName("assignment")
stmt = (assignment | edge_stmt | attr_stmt | node_stmt | subgraph).setName("stmt")
stmt_list << OneOrMore(stmt + Optional(semi.suppress()))
graphparser = (Optional(strict_) + Group((graph_ | digraph_)) + \
Optional(ID) + graph_stmt).setResultsName("graph")
graphparser = (Optional(strict_) + Group((graph_ | digraph_)) + \
Optional(ID) + graph_stmt).setResultsName("graph")
singleLineComment = "//" + restOfLine
graphparser.ignore(singleLineComment)
graphparser.ignore(cStyleComment)
singleLineComment = "//" + restOfLine
graphparser.ignore(singleLineComment)
graphparser.ignore(cStyleComment)
assignment.setParseAction(push_attr_list)
a_list.setParseAction(push_attr_list)
edge_stmt.setParseAction(push_edge_stmt)
node_stmt.setParseAction(push_node_stmt)
attr_stmt.setParseAction(push_default_stmt)
assignment.setParseAction(push_attr_list)
a_list.setParseAction(push_attr_list)
edge_stmt.setParseAction(push_edge_stmt)
node_stmt.setParseAction(push_node_stmt)
attr_stmt.setParseAction(push_default_stmt)
subgraph.setParseAction(push_subgraph_stmt)
graph_stmt.setParseAction(push_graph_stmt)
graphparser.setParseAction(push_top_graph_stmt)
subgraph.setParseAction(push_subgraph_stmt)
graph_stmt.setParseAction(push_graph_stmt)
graphparser.setParseAction(push_top_graph_stmt)
return graphparser
return graphparser
def parse_dot_data(data):
try:
graphparser = GRAPH_DEF()
if pyparsing_version >= '1.2':
graphparser.parseWithTabs()
tokens = graphparser.parseString(data)
graph = tokens.graph
return graph
except ParseException, err:
print err.line
print " "*(err.column-1) + "^"
print err
return None
try:
graphparser = GRAPH_DEF()
if pyparsing_version >= '1.2':
graphparser.parseWithTabs()
tokens = graphparser.parseString(data)
graph = tokens.graph
return graph
except ParseException, err:
print err.line
print " "*(err.column-1) + "^"
print err
return None

1624
bin/addons/base/ir/workflow/pydot/pydot.py Executable file → Normal file

File diff suppressed because it is too large Load Diff

36
bin/addons/base/ir/workflow/pydot/setup.py Executable file → Normal file
View File

@ -3,21 +3,21 @@
from distutils.core import setup
import pydot
setup( name = 'pydot',
version = pydot.__version__,
description = 'Python interface to Graphiz\'s Dot',
author = 'Ero Carrera',
author_email = 'ero@dkbza.org',
url = 'http://dkbza.org/pydot.html',
license = 'MIT',
platforms = ["any"],
classifiers = ['Development Status :: 5 - Production/Stable', \
'Intended Audience :: Science/Research', \
'License :: OSI Approved :: MIT License',\
'Natural Language :: English', \
'Operating System :: OS Independent', \
'Programming Language :: Python', \
'Topic :: Scientific/Engineering :: Visualization',\
'Topic :: Software Development :: Libraries :: Python Modules'],
long_description = "\n".join(pydot.__doc__.split('\n')),
py_modules = ['pydot', 'dot_parser'])
setup( name = 'pydot',
version = pydot.__version__,
description = 'Python interface to Graphiz\'s Dot',
author = 'Ero Carrera',
author_email = 'ero@dkbza.org',
url = 'http://dkbza.org/pydot.html',
license = 'MIT',
platforms = ["any"],
classifiers = ['Development Status :: 5 - Production/Stable', \
'Intended Audience :: Science/Research', \
'License :: OSI Approved :: MIT License',\
'Natural Language :: English', \
'Operating System :: OS Independent', \
'Programming Language :: Python', \
'Topic :: Scientific/Engineering :: Visualization',\
'Topic :: Software Development :: Libraries :: Python Modules'],
long_description = "\n".join(pydot.__doc__.split('\n')),
py_modules = ['pydot', 'dot_parser'])

View File

@ -32,157 +32,157 @@ from tools import graph
import netsvc
class workflow(osv.osv):
_name = "workflow"
_table = "wkf"
_log_access = False
_columns = {
'name': fields.char('Name', size=64, required=True),
'osv': fields.char('Resource Model', size=64, required=True),
'on_create': fields.boolean('On Create'),
'activities': fields.one2many('workflow.activity', 'wkf_id', 'Activities'),
}
_defaults = {
'on_create': lambda *a: True
}
_name = "workflow"
_table = "wkf"
_log_access = False
_columns = {
'name': fields.char('Name', size=64, required=True),
'osv': fields.char('Resource Model', size=64, required=True),
'on_create': fields.boolean('On Create'),
'activities': fields.one2many('workflow.activity', 'wkf_id', 'Activities'),
}
_defaults = {
'on_create': lambda *a: True
}
def write(self, cr, user, ids, vals, context=None):
if not context:
context={}
wf_service = netsvc.LocalService("workflow")
wf_service.clear_cache(cr, user)
return super(workflow, self).write(cr, user, ids, vals, context=context)
def write(self, cr, user, ids, vals, context=None):
if not context:
context={}
wf_service = netsvc.LocalService("workflow")
wf_service.clear_cache(cr, user)
return super(workflow, self).write(cr, user, ids, vals, context=context)
#
# scale = [stepx, stepy, posx, posy ]
#
#
# scale = [stepx, stepy, posx, posy ]
#
def graph_get(self, cr, uid, id, scale, context={}):
def graph_get(self, cr, uid, id, scale, context={}):
nodes= []
transitions = []
start = []
tres = {}
workflow = self.browse(cr, uid, id, context)
for a in workflow.activities:
nodes.append((a.id,a.name))
if a.flow_start:
start.append((a.id,a.name))
for t in a.out_transitions:
transitions.append( ((a.id,a.name), (t.act_to.id,t.act_to.name)) )
tres[t.id] = (a.id,t.act_to.id)
g = graph(nodes, transitions)
g.process(start)
g.scale(*scale)
result = g.result_get()
results = {}
nodes= []
transitions = []
start = []
tres = {}
workflow = self.browse(cr, uid, id, context)
for a in workflow.activities:
nodes.append((a.id,a.name))
if a.flow_start:
start.append((a.id,a.name))
for t in a.out_transitions:
transitions.append( ((a.id,a.name), (t.act_to.id,t.act_to.name)) )
tres[t.id] = (a.id,t.act_to.id)
g = graph(nodes, transitions)
g.process(start)
g.scale(*scale)
result = g.result_get()
results = {}
for r in result.items():
r[1]['name'] = r[0][1]
results[str(r[0][0])] = r[1]
return {'node': results, 'transition': tres}
for r in result.items():
r[1]['name'] = r[0][1]
results[str(r[0][0])] = r[1]
return {'node': results, 'transition': tres}
def create(self, cr, user, vals, context=None):
if not context:
context={}
wf_service = netsvc.LocalService("workflow")
wf_service.clear_cache(cr, user)
return super(workflow, self).create(cr, user, vals, context=context)
def create(self, cr, user, vals, context=None):
if not context:
context={}
wf_service = netsvc.LocalService("workflow")
wf_service.clear_cache(cr, user)
return super(workflow, self).create(cr, user, vals, context=context)
workflow()
class wkf_activity(osv.osv):
_name = "workflow.activity"
_table = "wkf_activity"
_log_access = False
_columns = {
'name': fields.char('Name', size=64, required=True),
'wkf_id': fields.many2one('workflow', 'Workflow', required=True, select=True, ondelete='cascade'),
'split_mode': fields.selection([('XOR', 'Xor'), ('OR','Or'), ('AND','And')], 'Split Mode', size=3, required=True),
'join_mode': fields.selection([('XOR', 'Xor'), ('AND', 'And')], 'Join Mode', size=3, required=True),
'kind': fields.selection([('dummy', 'Dummy'), ('function', 'Function'), ('subflow', 'Subflow'), ('stopall', 'Stop All')], 'Kind', size=64, required=True),
'action': fields.text('Python Action'),
'action_id': fields.many2one('ir.actions.server', 'Server Action', ondelete='set null'),
'flow_start': fields.boolean('Flow Start'),
'flow_stop': fields.boolean('Flow Stop'),
'subflow_id': fields.many2one('workflow', 'Subflow'),
'signal_send': fields.char('Signal (subflow.*)', size=32),
'out_transitions': fields.one2many('workflow.transition', 'act_from', 'Outgoing transitions'),
'in_transitions': fields.one2many('workflow.transition', 'act_to', 'Incoming transitions'),
}
_defaults = {
'kind': lambda *a: 'dummy',
'join_mode': lambda *a: 'XOR',
'split_mode': lambda *a: 'XOR',
}
_name = "workflow.activity"
_table = "wkf_activity"
_log_access = False
_columns = {
'name': fields.char('Name', size=64, required=True),
'wkf_id': fields.many2one('workflow', 'Workflow', required=True, select=True, ondelete='cascade'),
'split_mode': fields.selection([('XOR', 'Xor'), ('OR','Or'), ('AND','And')], 'Split Mode', size=3, required=True),
'join_mode': fields.selection([('XOR', 'Xor'), ('AND', 'And')], 'Join Mode', size=3, required=True),
'kind': fields.selection([('dummy', 'Dummy'), ('function', 'Function'), ('subflow', 'Subflow'), ('stopall', 'Stop All')], 'Kind', size=64, required=True),
'action': fields.text('Python Action'),
'action_id': fields.many2one('ir.actions.server', 'Server Action', ondelete='set null'),
'flow_start': fields.boolean('Flow Start'),
'flow_stop': fields.boolean('Flow Stop'),
'subflow_id': fields.many2one('workflow', 'Subflow'),
'signal_send': fields.char('Signal (subflow.*)', size=32),
'out_transitions': fields.one2many('workflow.transition', 'act_from', 'Outgoing transitions'),
'in_transitions': fields.one2many('workflow.transition', 'act_to', 'Incoming transitions'),
}
_defaults = {
'kind': lambda *a: 'dummy',
'join_mode': lambda *a: 'XOR',
'split_mode': lambda *a: 'XOR',
}
wkf_activity()
class wkf_transition(osv.osv):
_table = "wkf_transition"
_name = "workflow.transition"
_log_access = False
_rec_name = 'signal' #TODO: pas top mais bon...
_columns = {
'trigger_model': fields.char('Trigger Type', size=128),
'trigger_expr_id': fields.char('Trigger Expr ID', size=128),
'signal': fields.char('Signal (button Name)', size=64),
'role_id': fields.many2one('res.roles', 'Role Required'),
'condition': fields.char('Condition', required=True, size=128),
'act_from': fields.many2one('workflow.activity', 'Source Activity', required=True, select=True, ondelete='cascade'),
'act_to': fields.many2one('workflow.activity', 'Destination Activity', required=True, select=True, ondelete='cascade'),
}
_defaults = {
'condition': lambda *a: 'True',
}
_table = "wkf_transition"
_name = "workflow.transition"
_log_access = False
_rec_name = 'signal' #TODO: pas top mais bon...
_columns = {
'trigger_model': fields.char('Trigger Type', size=128),
'trigger_expr_id': fields.char('Trigger Expr ID', size=128),
'signal': fields.char('Signal (button Name)', size=64),
'role_id': fields.many2one('res.roles', 'Role Required'),
'condition': fields.char('Condition', required=True, size=128),
'act_from': fields.many2one('workflow.activity', 'Source Activity', required=True, select=True, ondelete='cascade'),
'act_to': fields.many2one('workflow.activity', 'Destination Activity', required=True, select=True, ondelete='cascade'),
}
_defaults = {
'condition': lambda *a: 'True',
}
wkf_transition()
class wkf_instance(osv.osv):
_table = "wkf_instance"
_name = "workflow.instance"
_rec_name = 'res_type'
_log_access = False
_columns = {
'wkf_id': fields.many2one('workflow', 'Workflow', ondelete='restrict'),
'uid': fields.integer('User ID'),
'res_id': fields.integer('Resource ID'),
'res_type': fields.char('Resource Model', size=64),
'state': fields.char('State', size=32),
}
def _auto_init(self, cr, context={}):
super(wkf_instance, self)._auto_init(cr, context)
cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = \'wkf_instance_res_id_res_type_state_index\'')
if not cr.fetchone():
cr.execute('CREATE INDEX wkf_instance_res_id_res_type_state_index ON wkf_instance (res_id, res_type, state)')
cr.commit()
_table = "wkf_instance"
_name = "workflow.instance"
_rec_name = 'res_type'
_log_access = False
_columns = {
'wkf_id': fields.many2one('workflow', 'Workflow', ondelete='restrict'),
'uid': fields.integer('User ID'),
'res_id': fields.integer('Resource ID'),
'res_type': fields.char('Resource Model', size=64),
'state': fields.char('State', size=32),
}
def _auto_init(self, cr, context={}):
super(wkf_instance, self)._auto_init(cr, context)
cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = \'wkf_instance_res_id_res_type_state_index\'')
if not cr.fetchone():
cr.execute('CREATE INDEX wkf_instance_res_id_res_type_state_index ON wkf_instance (res_id, res_type, state)')
cr.commit()
wkf_instance()
class wkf_workitem(osv.osv):
_table = "wkf_workitem"
_name = "workflow.workitem"
_log_access = False
_rec_name = 'state'
_columns = {
'act_id': fields.many2one('workflow.activity', 'Activity', required=True, ondelete="cascade"),
'subflow_id': fields.many2one('workflow.instance', 'Subflow', ondelete="cascade"),
'inst_id': fields.many2one('workflow.instance', 'Instance', required=True, ondelete="cascade", select=1),
'state': fields.char('State', size=64),
}
_table = "wkf_workitem"
_name = "workflow.workitem"
_log_access = False
_rec_name = 'state'
_columns = {
'act_id': fields.many2one('workflow.activity', 'Activity', required=True, ondelete="cascade"),
'subflow_id': fields.many2one('workflow.instance', 'Subflow', ondelete="cascade"),
'inst_id': fields.many2one('workflow.instance', 'Instance', required=True, ondelete="cascade", select=1),
'state': fields.char('State', size=64),
}
wkf_workitem()
class wkf_triggers(osv.osv):
_table = "wkf_triggers"
_name = "workflow.triggers"
_log_access = False
_columns = {
'res_id': fields.integer('Resource ID', size=128),
'model': fields.char('Model', size=128),
'instance_id': fields.many2one('workflow.instance', 'Destination Instance', ondelete="cascade"),
'workitem_id': fields.many2one('workflow.workitem', 'Workitem', required=True, ondelete="cascade"),
}
def _auto_init(self, cr, context={}):
super(wkf_triggers, self)._auto_init(cr, context)
cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = \'wkf_triggers_res_id_model_index\'')
if not cr.fetchone():
cr.execute('CREATE INDEX wkf_triggers_res_id_model_index ON wkf_triggers (res_id, model)')
cr.commit()
_table = "wkf_triggers"
_name = "workflow.triggers"
_log_access = False
_columns = {
'res_id': fields.integer('Resource ID', size=128),
'model': fields.char('Model', size=128),
'instance_id': fields.many2one('workflow.instance', 'Destination Instance', ondelete="cascade"),
'workitem_id': fields.many2one('workflow.workitem', 'Workitem', required=True, ondelete="cascade"),
}
def _auto_init(self, cr, context={}):
super(wkf_triggers, self)._auto_init(cr, context)
cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = \'wkf_triggers_res_id_model_index\'')
if not cr.fetchone():
cr.execute('CREATE INDEX wkf_triggers_res_id_model_index ON wkf_triggers (res_id, model)')
cr.commit()
wkf_triggers()

File diff suppressed because it is too large Load Diff

View File

@ -31,33 +31,33 @@ import time
from report import report_sxw
class ir_module_reference_print(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(ir_module_reference_print, self).__init__(cr, uid, name, context)
self.localcontext.update({
'time': time,
'findobj': self._object_find,
'objdoc': self._object_doc,
'findflds': self._fields_find,
})
def _object_doc(self, obj):
modobj = self.pool.get(obj)
return modobj.__doc__
def __init__(self, cr, uid, name, context):
super(ir_module_reference_print, self).__init__(cr, uid, name, context)
self.localcontext.update({
'time': time,
'findobj': self._object_find,
'objdoc': self._object_doc,
'findflds': self._fields_find,
})
def _object_doc(self, obj):
modobj = self.pool.get(obj)
return modobj.__doc__
def _object_find(self, module):
modobj = self.pool.get('ir.model')
if module=='base':
ids = modobj.search(self.cr, self.uid, [('model','=like','res%')])
ids += modobj.search(self.cr, self.uid, [('model','=like','ir%')])
else:
ids = modobj.search(self.cr, self.uid, [('model','=like',module+'%')])
return modobj.browse(self.cr, self.uid, ids)
def _object_find(self, module):
modobj = self.pool.get('ir.model')
if module=='base':
ids = modobj.search(self.cr, self.uid, [('model','=like','res%')])
ids += modobj.search(self.cr, self.uid, [('model','=like','ir%')])
else:
ids = modobj.search(self.cr, self.uid, [('model','=like',module+'%')])
return modobj.browse(self.cr, self.uid, ids)
def _fields_find(self, obj):
modobj = self.pool.get(obj)
res = modobj.fields_get(self.cr, self.uid).items()
return res
def _fields_find(self, obj):
modobj = self.pool.get(obj)
res = modobj.fields_get(self.cr, self.uid).items()
return res
report_sxw.report_sxw('report.ir.module.reference', 'ir.module.module',
'addons/base/module/report/ir_module_reference.rml',
parser=ir_module_reference_print, header=False)
'addons/base/module/report/ir_module_reference.rml',
parser=ir_module_reference_print, header=False)

View File

@ -49,48 +49,48 @@ _info_arch = '''<?xml version="1.0"?>
_info_fields = {}
class wizard_install_module(wizard.interface):
def watch_dir(self, cr, uid, data, context):
mod_obj = pooler.get_pool(cr.dbname).get('ir.module.module')
all_mods = mod_obj.read(cr, uid, mod_obj.search(cr, uid, []), ['name', 'state'])
known_modules = [x['name'] for x in all_mods]
ls_ad = glob.glob(os.path.join(tools.config['addons_path'], '*', '__terp__.py'))
modules = [module_name_re.match(name).group(1) for name in ls_ad]
for fname in os.listdir(tools.config['addons_path']):
if zipfile.is_zipfile(fname):
modules.append( fname.split('.')[0])
for module in modules:
if module in known_modules:
continue
terp = mod_obj.get_module_info(module)
if not terp.get('installable', True):
continue
imp.load_module(module, *imp.find_module(module))
mod_id = mod_obj.create(cr, uid, {
'name': module,
'state': 'uninstalled',
'description': terp.get('description', ''),
'shortdesc': terp.get('name', ''),
'author': terp.get('author', 'Unknown')})
dependencies = terp.get('depends', [])
for d in dependencies:
cr.execute('insert into ir_module_module_dependency (module_id,name) values (%s, %s)', (mod_id, d))
for module in known_modules:
terp = mod_obj.get_module_info(module)
if terp.get('installable', True):
for mod in all_mods:
if mod['name'] == module and mod['state'] == 'uninstallable':
mod_obj.write(cr, uid, [mod['id']], {'state': 'uninstalled'})
return {}
def watch_dir(self, cr, uid, data, context):
mod_obj = pooler.get_pool(cr.dbname).get('ir.module.module')
all_mods = mod_obj.read(cr, uid, mod_obj.search(cr, uid, []), ['name', 'state'])
known_modules = [x['name'] for x in all_mods]
ls_ad = glob.glob(os.path.join(tools.config['addons_path'], '*', '__terp__.py'))
modules = [module_name_re.match(name).group(1) for name in ls_ad]
for fname in os.listdir(tools.config['addons_path']):
if zipfile.is_zipfile(fname):
modules.append( fname.split('.')[0])
for module in modules:
if module in known_modules:
continue
terp = mod_obj.get_module_info(module)
if not terp.get('installable', True):
continue
imp.load_module(module, *imp.find_module(module))
mod_id = mod_obj.create(cr, uid, {
'name': module,
'state': 'uninstalled',
'description': terp.get('description', ''),
'shortdesc': terp.get('name', ''),
'author': terp.get('author', 'Unknown')})
dependencies = terp.get('depends', [])
for d in dependencies:
cr.execute('insert into ir_module_module_dependency (module_id,name) values (%s, %s)', (mod_id, d))
for module in known_modules:
terp = mod_obj.get_module_info(module)
if terp.get('installable', True):
for mod in all_mods:
if mod['name'] == module and mod['state'] == 'uninstallable':
mod_obj.write(cr, uid, [mod['id']], {'state': 'uninstalled'})
return {}
states = {
'init': {
'actions': [],
'result': {'type':'form', 'arch': _info_arch, 'fields': _info_fields, 'state':[('end','Cancel','gtk-cancel'),('addmod','Check new modules','gtk-ok')]}
},
'addmod': {
'actions': [watch_dir],
'result': {'type':'state', 'state':'end'}
},
}
states = {
'init': {
'actions': [],
'result': {'type':'form', 'arch': _info_arch, 'fields': _info_fields, 'state':[('end','Cancel','gtk-cancel'),('addmod','Check new modules','gtk-ok')]}
},
'addmod': {
'actions': [watch_dir],
'result': {'type':'state', 'state':'end'}
},
}
wizard_install_module('module.module.scan')

View File

@ -40,114 +40,114 @@ from osv import fields,osv
'''
view_form_init="""<?xml version="1.0"?>
<form string="Export language">
<image name="gtk-dialog-info" colspan="2"/>
<group colspan="2" col="4">
<separator string="Export translation file" colspan="4"/>
<label align="0.0" string="Choose a language to export:" colspan="4"/>
<field name="lang" colspan="4"/>
</group>
<image name="gtk-dialog-info" colspan="2"/>
<group colspan="2" col="4">
<separator string="Export translation file" colspan="4"/>
<label align="0.0" string="Choose a language to export:" colspan="4"/>
<field name="lang" colspan="4"/>
</group>
</form>"""
view_form_finish="""<?xml version="1.0"?>
<form string="Export language">
<image name="gtk-dialog-info" colspan="2"/>
<group colspan="2" col="4">
<separator string="Export done" colspan="4"/>
<field name="data" readonly="1" colspan="3"/>
<label align="0.0" string="Save this document to a .CSV file and open it with\n your favourite spreadsheet software. The file\n encoding is UTF-8. You have to translate the latest\n column before reimporting it." colspan="4"/>
</group>
<image name="gtk-dialog-info" colspan="2"/>
<group colspan="2" col="4">
<separator string="Export done" colspan="4"/>
<field name="data" readonly="1" colspan="3"/>
<label align="0.0" string="Save this document to a .CSV file and open it with\n your favourite spreadsheet software. The file\n encoding is UTF-8. You have to translate the latest\n column before reimporting it." colspan="4"/>
</group>
</form>"""
class wizard_export_lang(wizard.interface):
def _get_language(self, cr, uid, context):
lang_obj=pooler.get_pool(cr.dbname).get('res.lang')
ids=lang_obj.search(cr, uid, [('active', '=', True),])
langs=lang_obj.browse(cr, uid, ids)
return [(lang.code, lang.translatable and lang.name or _('New language')) for lang in langs]
def _get_language(self, cr, uid, context):
lang_obj=pooler.get_pool(cr.dbname).get('res.lang')
ids=lang_obj.search(cr, uid, [('active', '=', True),])
langs=lang_obj.browse(cr, uid, ids)
return [(lang.code, lang.translatable and lang.name or _('New language')) for lang in langs]
def _get_file(self, cr, uid, data, context):
file=tools.trans_generate(data['form']['lang'], 'all', dbname=cr.dbname)
buf=StringIO.StringIO()
writer=csv.writer(buf, 'UNIX')
for row in file:
writer.writerow(row)
del file
out=base64.encodestring(buf.getvalue())
buf.close()
return {'data': out}
def _get_file(self, cr, uid, data, context):
file=tools.trans_generate(data['form']['lang'], 'all', dbname=cr.dbname)
buf=StringIO.StringIO()
writer=csv.writer(buf, 'UNIX')
for row in file:
writer.writerow(row)
del file
out=base64.encodestring(buf.getvalue())
buf.close()
return {'data': out}
fields_form={
'lang': {'string':'Language', 'type':'selection', 'selection':_get_language,},
}
fields_form_finish={
'data': {'string':'File', 'type':'binary', 'readonly': True,},
}
states={
'init':{
'actions': [],
'result': {'type': 'form', 'arch': view_form_init, 'fields': fields_form,
'state': [
('end', 'Cancel', 'gtk-cancel'),
('finish', 'Ok', 'gtk-ok', True)
]
}
},
'finish':{
'actions': [_get_file],
'result': {'type': 'form', 'arch': view_form_finish,
'fields': fields_form_finish,
'state': [
('end', 'Close', 'gtk-cancel', True)
]
}
},
}
fields_form={
'lang': {'string':'Language', 'type':'selection', 'selection':_get_language,},
}
fields_form_finish={
'data': {'string':'File', 'type':'binary', 'readonly': True,},
}
states={
'init':{
'actions': [],
'result': {'type': 'form', 'arch': view_form_init, 'fields': fields_form,
'state': [
('end', 'Cancel', 'gtk-cancel'),
('finish', 'Ok', 'gtk-ok', True)
]
}
},
'finish':{
'actions': [_get_file],
'result': {'type': 'form', 'arch': view_form_finish,
'fields': fields_form_finish,
'state': [
('end', 'Close', 'gtk-cancel', True)
]
}
},
}
wizard_export_lang('module.lang.export')
'''
class wizard_export_lang(osv.osv_memory):
def _get_languages(self, cr, uid, context):
lang_obj=pooler.get_pool(cr.dbname).get('res.lang')
ids=lang_obj.search(cr, uid, [('active', '=', True),])
langs=lang_obj.browse(cr, uid, ids)
return [(lang.code, lang.translatable and lang.name or _('New language')) for lang in langs]
def _get_languages(self, cr, uid, context):
lang_obj=pooler.get_pool(cr.dbname).get('res.lang')
ids=lang_obj.search(cr, uid, [('active', '=', True),])
langs=lang_obj.browse(cr, uid, ids)
return [(lang.code, lang.translatable and lang.name or _('New language')) for lang in langs]
def act_cancel(self, cr, uid, ids, context=None):
#self.unlink(cr, uid, ids, context)
return {'type':'ir.actions.act_window_close' }
def act_cancel(self, cr, uid, ids, context=None):
#self.unlink(cr, uid, ids, context)
return {'type':'ir.actions.act_window_close' }
def act_destroy(self, *args):
return {'type':'ir.actions.act_window_close' }
def act_destroy(self, *args):
return {'type':'ir.actions.act_window_close' }
def act_getfile(self, cr, uid, ids, context=None):
this = self.browse(cr, uid, ids)[0]
mods = map(lambda m: m.name, this.modules)
mods.sort()
buf=StringIO.StringIO()
def act_getfile(self, cr, uid, ids, context=None):
this = self.browse(cr, uid, ids)[0]
mods = map(lambda m: m.name, this.modules)
mods.sort()
buf=StringIO.StringIO()
tools.trans_export(this.lang, mods, buf, this.format, dbname=cr.dbname)
tools.trans_export(this.lang, mods, buf, this.format, dbname=cr.dbname)
if this.format == 'csv':
this.advice = _("Save this document to a .CSV file and open it with your favourite spreadsheet software. The file encoding is UTF-8. You have to translate the latest column before reimporting it.")
elif this.format == 'po':
this.advice = _("Save this document to a .po file and edit it with a specific software or a text editor. The file encoding is UTF-8.")
if this.format == 'csv':
this.advice = _("Save this document to a .CSV file and open it with your favourite spreadsheet software. The file encoding is UTF-8. You have to translate the latest column before reimporting it.")
elif this.format == 'po':
this.advice = _("Save this document to a .po file and edit it with a specific software or a text editor. The file encoding is UTF-8.")
out=base64.encodestring(buf.getvalue())
buf.close()
return self.write(cr, uid, ids, {'state':'get', 'data':out, 'advice':this.advice}, context=context)
out=base64.encodestring(buf.getvalue())
buf.close()
return self.write(cr, uid, ids, {'state':'get', 'data':out, 'advice':this.advice}, context=context)
_name = "wizard.module.lang.export"
_columns = {
'lang': fields.selection(_get_languages, 'Language',required=True),
'format': fields.selection( ( ('csv','CSV File'), ('po','PO File') ), 'File Format', required=True),
'modules': fields.many2many('ir.module.module', 'rel_modules_langexport', 'wiz_id', 'module_id', 'Modules', domain=[('state','=','installed')]),
'data': fields.binary('File', readonly=True),
'advice': fields.text('', readonly=True),
'state': fields.selection( ( ('choose','choose'), # choose language
('get','get'), # get the file
) ),
}
_defaults = { 'state': lambda *a: 'choose', }
_name = "wizard.module.lang.export"
_columns = {
'lang': fields.selection(_get_languages, 'Language',required=True),
'format': fields.selection( ( ('csv','CSV File'), ('po','PO File') ), 'File Format', required=True),
'modules': fields.many2many('ir.module.module', 'rel_modules_langexport', 'wiz_id', 'module_id', 'Modules', domain=[('state','=','installed')]),
'data': fields.binary('File', readonly=True),
'advice': fields.text('', readonly=True),
'state': fields.selection( ( ('choose','choose'), # choose language
('get','get'), # get the file
) ),
}
_defaults = { 'state': lambda *a: 'choose', }
wizard_export_lang()

View File

@ -36,53 +36,53 @@ from tempfile import TemporaryFile
view_form="""<?xml version="1.0"?>
<form string="Import language">
<image name="gtk-dialog-info" colspan="2"/>
<group colspan="2" col="4">
<separator string="Import new language" colspan="4"/>
<field name="name"/>
<field name="code"/>
<field name="data" colspan="3"/>
<label string="You have to import a .CSV file wich is encoded in UTF-8.\nPlease check that the first line of your file is:" colspan="4" align="0.0"/>
<label string="type,name,res_id,src,value" colspan="4"/>
</group>
<image name="gtk-dialog-info" colspan="2"/>
<group colspan="2" col="4">
<separator string="Import new language" colspan="4"/>
<field name="name"/>
<field name="code"/>
<field name="data" colspan="3"/>
<label string="You have to import a .CSV file wich is encoded in UTF-8.\nPlease check that the first line of your file is:" colspan="4" align="0.0"/>
<label string="type,name,res_id,src,value" colspan="4"/>
</group>
</form>"""
fields_form={
'name':{'string':'Language name', 'type':'char', 'size':64, 'required':True},
'code':{'string':'Code (eg:en__US)', 'type':'char', 'size':5, 'required':True},
'data':{'string':'File', 'type':'binary', 'required':True},
'name':{'string':'Language name', 'type':'char', 'size':64, 'required':True},
'code':{'string':'Code (eg:en__US)', 'type':'char', 'size':5, 'required':True},
'data':{'string':'File', 'type':'binary', 'required':True},
}
class wizard_import_lang(wizard.interface):
def _import_lang(self, cr, uid, data, context):
form=data['form']
fileobj = TemporaryFile('w+')
fileobj.write( base64.decodestring(form['data']) )
def _import_lang(self, cr, uid, data, context):
form=data['form']
fileobj = TemporaryFile('w+')
fileobj.write( base64.decodestring(form['data']) )
# now we determine the file format
fileobj.seek(0)
first_line = fileobj.readline().strip()
fileformat = first_line.endswith("type,name,res_id,src,value") and 'csv' or 'po'
fileobj.seek(0)
# now we determine the file format
fileobj.seek(0)
first_line = fileobj.readline().strip()
fileformat = first_line.endswith("type,name,res_id,src,value") and 'csv' or 'po'
fileobj.seek(0)
tools.trans_load_data(cr.dbname, fileobj, fileformat, form['code'], lang_name=form['name'])
fileobj.close()
return {}
tools.trans_load_data(cr.dbname, fileobj, fileformat, form['code'], lang_name=form['name'])
fileobj.close()
return {}
states={
'init':{
'actions': [],
'result': {'type': 'form', 'arch': view_form, 'fields': fields_form,
'state':[
('end', 'Cancel', 'gtk-cancel'),
('finish', 'Ok', 'gtk-ok', True)
]
}
},
'finish':{
'actions':[],
'result':{'type':'action', 'action':_import_lang, 'state':'end'}
},
}
states={
'init':{
'actions': [],
'result': {'type': 'form', 'arch': view_form, 'fields': fields_form,
'state':[
('end', 'Cancel', 'gtk-cancel'),
('finish', 'Ok', 'gtk-ok', True)
]
}
},
'finish':{
'actions':[],
'result':{'type':'action', 'action':_import_lang, 'state':'end'}
},
}
wizard_import_lang('module.lang.import')

View File

@ -40,82 +40,82 @@ import base64
finish_form ='''<?xml version="1.0"?>
<form string="Module import">
<label string="Module successfully imported !" colspan="4"/>
<label string="Module successfully imported !" colspan="4"/>
</form>
'''
ask_form ='''<?xml version="1.0"?>
<form string="Module import">
<separator string="Module import" colspan="4"/>
<label string="Please give your module .ZIP file to import." colspan="4"/>
<field name="module_file"/>
<separator string="Module import" colspan="4"/>
<label string="Please give your module .ZIP file to import." colspan="4"/>
<field name="module_file"/>
</form>
'''
ask_fields = {
'module_file': {'string': 'Module .ZIP file', 'type': 'binary', 'required': True},
'module_file': {'string': 'Module .ZIP file', 'type': 'binary', 'required': True},
}
class move_module_wizard(wizard.interface):
def importzip(self, cr, uid, data, context):
module_obj=pooler.get_pool(cr.dbname).get('ir.module.module')
module_data = data['form']['module_file']
def importzip(self, cr, uid, data, context):
module_obj=pooler.get_pool(cr.dbname).get('ir.module.module')
module_data = data['form']['module_file']
val =base64.decodestring(module_data)
fp = StringIO.StringIO(val)
fdata = zipfile.ZipFile(fp, 'r')
fname = fdata.namelist()[0]
module_name = os.path.split(fname)[0]
val =base64.decodestring(module_data)
fp = StringIO.StringIO(val)
fdata = zipfile.ZipFile(fp, 'r')
fname = fdata.namelist()[0]
module_name = os.path.split(fname)[0]
ad = tools.config['addons_path']
ad = tools.config['addons_path']
fname = os.path.join(ad,module_name+'.zip')
try:
fp = file(fname, 'wb')
fp.write(val)
fp.close()
except IOError, e:
raise wizard.except_wizard(_('Error !'), _('Can not create the module file: %s !') % (fname,) )
fname = os.path.join(ad,module_name+'.zip')
try:
fp = file(fname, 'wb')
fp.write(val)
fp.close()
except IOError, e:
raise wizard.except_wizard(_('Error !'), _('Can not create the module file: %s !') % (fname,) )
pooler.get_pool(cr.dbname).get('ir.module.module').update_list(cr, uid)
return {'module_name': module_name}
pooler.get_pool(cr.dbname).get('ir.module.module').update_list(cr, uid)
return {'module_name': module_name}
def _action_module_open(self, cr, uid, data, context):
return {
'domain': str([('name', '=', data['form']['module_name'])]),
'name': 'Module List',
'view_type': 'form',
'view_mode': 'tree,form',
'res_model': 'ir.module.module',
'view_id': False,
'type': 'ir.actions.act_window'
}
def _action_module_open(self, cr, uid, data, context):
return {
'domain': str([('name', '=', data['form']['module_name'])]),
'name': 'Module List',
'view_type': 'form',
'view_mode': 'tree,form',
'res_model': 'ir.module.module',
'view_id': False,
'type': 'ir.actions.act_window'
}
states = {
'init': {
'actions': [],
'result': {
'type': 'form',
'arch': ask_form,
'fields': ask_fields,
'state': [
('end', 'Cancel', 'gtk-cancel'),
('import', 'Import module', 'gtk-ok', True)
]
}
},
'import': {
'actions': [importzip],
'result': {
'type':'form',
'arch':finish_form,
'fields':{},
'state':[('open_window','Close')]
}
},
'open_window': {
'actions': [],
'result': {'type': 'action', 'action': _action_module_open, 'state':'end'}
},
}
states = {
'init': {
'actions': [],
'result': {
'type': 'form',
'arch': ask_form,
'fields': ask_fields,
'state': [
('end', 'Cancel', 'gtk-cancel'),
('import', 'Import module', 'gtk-ok', True)
]
}
},
'import': {
'actions': [importzip],
'result': {
'type':'form',
'arch':finish_form,
'fields':{},
'state':[('open_window','Close')]
}
},
'open_window': {
'actions': [],
'result': {'type': 'action', 'action': _action_module_open, 'state':'end'}
},
}
move_module_wizard('base.module.import')

View File

@ -33,59 +33,59 @@ import tools
view_form_end = """<?xml version="1.0"?>
<form string="Language file loaded.">
<image name="gtk-dialog-info" colspan="2"/>
<group colspan="2" col="4">
<separator string="Installation done" colspan="4"/>
<label align="0.0" string="The selected language has been successfully installed.\nYou must change the preferences of the user and open a new menu to view changes." colspan="4"/>
</group>
<image name="gtk-dialog-info" colspan="2"/>
<group colspan="2" col="4">
<separator string="Installation done" colspan="4"/>
<label align="0.0" string="The selected language has been successfully installed.\nYou must change the preferences of the user and open a new menu to view changes." colspan="4"/>
</group>
</form>"""
view_form = """<?xml version="1.0"?>
<form string="System Upgrade">
<image name="gtk-dialog-info" colspan="2"/>
<group colspan="2" col="4">
<separator string="System Upgrade" colspan="4"/>
<label align="0.0" string="Choose a language to install:" colspan="4"/>
<field name="lang" colspan="4"/>
<label align="0.0" string="Note that this operation may take a few minutes." colspan="4"/>
</group>
<image name="gtk-dialog-info" colspan="2"/>
<group colspan="2" col="4">
<separator string="System Upgrade" colspan="4"/>
<label align="0.0" string="Choose a language to install:" colspan="4"/>
<field name="lang" colspan="4"/>
<label align="0.0" string="Note that this operation may take a few minutes." colspan="4"/>
</group>
</form>"""
class wizard_lang_install(wizard.interface):
def _lang_install(self, cr, uid, data, context):
lang = data['form']['lang']
if lang and lang != 'en_US':
filename = tools.config["root_path"] + "/i18n/" + lang + ".csv"
tools.trans_load(cr.dbname, filename, lang)
return {}
def _lang_install(self, cr, uid, data, context):
lang = data['form']['lang']
if lang and lang != 'en_US':
filename = tools.config["root_path"] + "/i18n/" + lang + ".csv"
tools.trans_load(cr.dbname, filename, lang)
return {}
def _get_language(sel, cr, uid, context):
return tools.scan_languages()
def _get_language(sel, cr, uid, context):
return tools.scan_languages()
fields_form = {
'lang': {'string':'Language', 'type':'selection', 'selection':_get_language,
},
}
fields_form = {
'lang': {'string':'Language', 'type':'selection', 'selection':_get_language,
},
}
states = {
'init': {
'actions': [],
'result': {'type': 'form', 'arch': view_form, 'fields': fields_form,
'state': [
('end', 'Cancel', 'gtk-cancel'),
('start', 'Start installation', 'gtk-ok', True)
]
}
},
'start': {
'actions': [_lang_install],
'result': {'type': 'form', 'arch': view_form_end, 'fields': {},
'state': [
('end', 'Ok', 'gtk-ok', True)
]
}
},
}
states = {
'init': {
'actions': [],
'result': {'type': 'form', 'arch': view_form, 'fields': fields_form,
'state': [
('end', 'Cancel', 'gtk-cancel'),
('start', 'Start installation', 'gtk-ok', True)
]
}
},
'start': {
'actions': [_lang_install],
'result': {'type': 'form', 'arch': view_form_end, 'fields': {},
'state': [
('end', 'Ok', 'gtk-ok', True)
]
}
},
}
wizard_lang_install('module.lang.install')

View File

@ -35,121 +35,121 @@ import tools
view_form_end = """<?xml version="1.0"?>
<form string="System upgrade done">
<separator string="System upgrade done"/>
<label align="0.0" string="The modules have been upgraded / installed !" colspan="4"/>
<label align="0.0" string="You may have to reinstall some language pack." colspan="4"/>
<label align="0.0" string="We suggest you to reload the menu tab (Ctrl+t Ctrl+r)." colspan="4"/>
<separator string="System upgrade done"/>
<label align="0.0" string="The modules have been upgraded / installed !" colspan="4"/>
<label align="0.0" string="You may have to reinstall some language pack." colspan="4"/>
<label align="0.0" string="We suggest you to reload the menu tab (Ctrl+t Ctrl+r)." colspan="4"/>
</form>"""
view_form = """<?xml version="1.0"?>
<form string="System Upgrade">
<image name="gtk-dialog-info" colspan="2"/>
<group colspan="2" col="4">
<label align="0.0" string="Your system will be upgraded." colspan="4"/>
<label align="0.0" string="Note that this operation my take a few minutes." colspan="4"/>
<separator string="Modules to update"/>
<field name="module_info" nolabel="1" colspan="4"/>
<separator string="Modules to download"/>
<field name="module_download" nolabel="1" colspan="4"/>
</group>
<image name="gtk-dialog-info" colspan="2"/>
<group colspan="2" col="4">
<label align="0.0" string="Your system will be upgraded." colspan="4"/>
<label align="0.0" string="Note that this operation my take a few minutes." colspan="4"/>
<separator string="Modules to update"/>
<field name="module_info" nolabel="1" colspan="4"/>
<separator string="Modules to download"/>
<field name="module_download" nolabel="1" colspan="4"/>
</group>
</form>"""
view_field = {
"module_info": {'type': 'text', 'string': 'Modules to update',
'readonly': True},
"module_download": {'type': 'text', 'string': 'Modules to download',
'readonly': True},
"module_info": {'type': 'text', 'string': 'Modules to update',
'readonly': True},
"module_download": {'type': 'text', 'string': 'Modules to download',
'readonly': True},
}
class wizard_info_get(wizard.interface):
def _get_install(self, cr, uid, data, context):
pool=pooler.get_pool(cr.dbname)
mod_obj = pool.get('ir.module.module')
ids = mod_obj.search(cr, uid, [
('state', 'in', ['to upgrade', 'to remove', 'to install'])])
res = mod_obj.read(cr, uid, ids, ['name','state'], context)
url = mod_obj.download(cr, uid, ids, download=False, context=context)
return {'module_info': '\n'.join(map(lambda x: x['name']+' : '+x['state'], res)),
'module_download': '\n'.join(url)}
def _get_install(self, cr, uid, data, context):
pool=pooler.get_pool(cr.dbname)
mod_obj = pool.get('ir.module.module')
ids = mod_obj.search(cr, uid, [
('state', 'in', ['to upgrade', 'to remove', 'to install'])])
res = mod_obj.read(cr, uid, ids, ['name','state'], context)
url = mod_obj.download(cr, uid, ids, download=False, context=context)
return {'module_info': '\n'.join(map(lambda x: x['name']+' : '+x['state'], res)),
'module_download': '\n'.join(url)}
def _check_upgrade_module(self,cr,uid,data,context):
db, pool = pooler.get_db_and_pool(cr.dbname)
cr = db.cursor()
mod_obj = pool.get('ir.module.module')
ids = mod_obj.search(cr, uid, [
('state', 'in', ['to upgrade', 'to remove', 'to install'])])
if ids and len(ids):
return 'next'
else:
return 'end'
def _check_upgrade_module(self,cr,uid,data,context):
db, pool = pooler.get_db_and_pool(cr.dbname)
cr = db.cursor()
mod_obj = pool.get('ir.module.module')
ids = mod_obj.search(cr, uid, [
('state', 'in', ['to upgrade', 'to remove', 'to install'])])
if ids and len(ids):
return 'next'
else:
return 'end'
def _upgrade_module(self, cr, uid, data, context):
db, pool = pooler.get_db_and_pool(cr.dbname)
cr = db.cursor()
mod_obj = pool.get('ir.module.module')
ids = mod_obj.search(cr, uid, [
('state', 'in', ['to upgrade', 'to remove', 'to install'])])
mod_obj.download(cr, uid, ids, context=context)
cr.commit()
db, pool = pooler.restart_pool(cr.dbname, update_module=True)
def _upgrade_module(self, cr, uid, data, context):
db, pool = pooler.get_db_and_pool(cr.dbname)
cr = db.cursor()
mod_obj = pool.get('ir.module.module')
ids = mod_obj.search(cr, uid, [
('state', 'in', ['to upgrade', 'to remove', 'to install'])])
mod_obj.download(cr, uid, ids, context=context)
cr.commit()
db, pool = pooler.restart_pool(cr.dbname, update_module=True)
lang_obj=pool.get('res.lang')
lang_ids=lang_obj.search(cr, uid, [])
langs=lang_obj.browse(cr, uid, lang_ids)
for lang in langs:
if lang.code and lang.code != 'en_US':
filename=os.path.join(tools.config["root_path"], "i18n", lang.code + ".csv")
tools.trans_load(cr.dbname, filename, lang.code)
return {}
lang_obj=pool.get('res.lang')
lang_ids=lang_obj.search(cr, uid, [])
langs=lang_obj.browse(cr, uid, lang_ids)
for lang in langs:
if lang.code and lang.code != 'en_US':
filename=os.path.join(tools.config["root_path"], "i18n", lang.code + ".csv")
tools.trans_load(cr.dbname, filename, lang.code)
return {}
def _config(self, cr, uid, data, context=None):
return {
def _config(self, cr, uid, data, context=None):
return {
'view_type': 'form',
"view_mode": 'form',
'res_model': 'ir.module.module.configuration.wizard',
'type': 'ir.actions.act_window',
'target':'new',
'res_model': 'ir.module.module.configuration.wizard',
'type': 'ir.actions.act_window',
'target':'new',
}
states = {
'init': {
'actions': [],
'result' : {'type': 'choice', 'next_state': _check_upgrade_module }
},
'next': {
'actions': [_get_install],
'result': {'type':'form', 'arch':view_form, 'fields': view_field,
'state':[
('end', 'Cancel', 'gtk-cancel'),
('start', 'Start Upgrade', 'gtk-ok', True)
]
}
},
'start': {
'actions': [_upgrade_module],
'result': {'type':'form', 'arch':view_form_end, 'fields': {},
'state':[
('end', 'Close', 'gtk-close', True),
('config', 'Start configuration', 'gtk-ok', True)
]
}
},
'end': {
'actions': [],
'result': {'type':'form', 'arch':view_form_end, 'fields': {},
'state':[
('end', 'Close', 'gtk-close', True),
('config', 'Start configuration', 'gtk-ok', True)
]
}
},
'config':{
states = {
'init': {
'actions': [],
'result' : {'type': 'choice', 'next_state': _check_upgrade_module }
},
'next': {
'actions': [_get_install],
'result': {'type':'form', 'arch':view_form, 'fields': view_field,
'state':[
('end', 'Cancel', 'gtk-cancel'),
('start', 'Start Upgrade', 'gtk-ok', True)
]
}
},
'start': {
'actions': [_upgrade_module],
'result': {'type':'form', 'arch':view_form_end, 'fields': {},
'state':[
('end', 'Close', 'gtk-close', True),
('config', 'Start configuration', 'gtk-ok', True)
]
}
},
'end': {
'actions': [],
'result': {'type':'form', 'arch':view_form_end, 'fields': {},
'state':[
('end', 'Close', 'gtk-close', True),
('config', 'Start configuration', 'gtk-ok', True)
]
}
},
'config':{
'result': {
'type': 'action',
'action': _config,
'state': 'end',
},
}
}
}
}
wizard_info_get('module.upgrade')

View File

@ -34,70 +34,70 @@ import pooler
class wizard_update_module(wizard.interface):
arch = '''<?xml version="1.0"?>
<form string="Scan for new modules">
<label string="This function will check for new modules in the 'addons' path and on module repositories:" colspan="4" align="0.0"/>
<field name="repositories" colspan="4" nolabel="1"/>
</form>'''
fields = {
'repositories': {'type': 'text', 'string': 'Repositories', 'readonly': True},
}
arch = '''<?xml version="1.0"?>
<form string="Scan for new modules">
<label string="This function will check for new modules in the 'addons' path and on module repositories:" colspan="4" align="0.0"/>
<field name="repositories" colspan="4" nolabel="1"/>
</form>'''
fields = {
'repositories': {'type': 'text', 'string': 'Repositories', 'readonly': True},
}
arch_module = '''<?xml version="1.0"?>
<form string="New modules">
<field name="update" colspan="4"/>
<field name="add" colspan="4"/>
</form>'''
arch_module = '''<?xml version="1.0"?>
<form string="New modules">
<field name="update" colspan="4"/>
<field name="add" colspan="4"/>
</form>'''
fields_module = {
'update': {'type': 'integer', 'string': 'Number of modules updated', 'readonly': True},
'add': {'type': 'integer', 'string': 'Number of modules added', 'readonly': True},
}
fields_module = {
'update': {'type': 'integer', 'string': 'Number of modules updated', 'readonly': True},
'add': {'type': 'integer', 'string': 'Number of modules added', 'readonly': True},
}
def _update_module(self, cr, uid, data, context):
update, add = pooler.get_pool(cr.dbname).get('ir.module.module').update_list(cr, uid)
return {'update': update, 'add': add}
def _update_module(self, cr, uid, data, context):
update, add = pooler.get_pool(cr.dbname).get('ir.module.module').update_list(cr, uid)
return {'update': update, 'add': add}
def _action_module_open(self, cr, uid, data, context):
return {
'domain': str([]),
'name': 'Module List',
'view_type': 'form',
'view_mode': 'tree,form',
'res_model': 'ir.module.module',
'view_id': False,
'type': 'ir.actions.act_window'
}
def _action_module_open(self, cr, uid, data, context):
return {
'domain': str([]),
'name': 'Module List',
'view_type': 'form',
'view_mode': 'tree,form',
'res_model': 'ir.module.module',
'view_id': False,
'type': 'ir.actions.act_window'
}
def _get_repositories(self, cr, uid, data, context):
pool = pooler.get_pool(cr.dbname)
repository_obj = pool.get('ir.module.repository')
ids = repository_obj.search(cr, uid, [])
res = repository_obj.read(cr, uid, ids, ['name', 'url'], context)
return {'repositories': '\n'.join(map(lambda x: x['name']+': '+x['url'], res))}
def _get_repositories(self, cr, uid, data, context):
pool = pooler.get_pool(cr.dbname)
repository_obj = pool.get('ir.module.repository')
ids = repository_obj.search(cr, uid, [])
res = repository_obj.read(cr, uid, ids, ['name', 'url'], context)
return {'repositories': '\n'.join(map(lambda x: x['name']+': '+x['url'], res))}
states = {
'init': {
'actions': [_get_repositories],
'result': {'type': 'form', 'arch': arch, 'fields': fields,
'state': [
('end', 'Cancel', 'gtk-cancel'),
('update', 'Check new modules', 'gtk-ok', True)
]
}
},
'update': {
'actions': [_update_module],
'result': {'type': 'form', 'arch': arch_module, 'fields': fields_module,
'state': [
('open_window', 'Ok', 'gtk-ok', True)
]
}
},
'open_window': {
'actions': [],
'result': {'type': 'action', 'action': _action_module_open, 'state':'end'}
}
}
states = {
'init': {
'actions': [_get_repositories],
'result': {'type': 'form', 'arch': arch, 'fields': fields,
'state': [
('end', 'Cancel', 'gtk-cancel'),
('update', 'Check new modules', 'gtk-ok', True)
]
}
},
'update': {
'actions': [_update_module],
'result': {'type': 'form', 'arch': arch_module, 'fields': fields_module,
'state': [
('open_window', 'Ok', 'gtk-ok', True)
]
}
},
'open_window': {
'actions': [],
'result': {'type': 'action', 'action': _action_module_open, 'state':'end'}
}
}
wizard_update_module('module.module.update')

View File

@ -32,27 +32,27 @@ from osv import fields, osv
class Bank(osv.osv):
_description='Bank'
_name = 'res.bank'
_columns = {
'name': fields.char('Name', size=128, required=True),
'code': fields.char('Code', size=64),
'street': fields.char('Street', size=128),
'street2': fields.char('Street2', size=128),
'zip': fields.char('Zip', change_default=True, size=24),
'city': fields.char('City', size=128),
'state': fields.many2one("res.country.state", 'State',
domain="[('country_id', '=', country)]"),
'country': fields.many2one('res.country', 'Country'),
'email': fields.char('E-Mail', size=64),
'phone': fields.char('Phone', size=64),
'fax': fields.char('Fax', size=64),
'active': fields.boolean('Active'),
'bic': fields.char('BIC/Swift code', size=11,
help="Bank Identifier Code"),
}
_defaults = {
'active': lambda *a: 1,
}
_description='Bank'
_name = 'res.bank'
_columns = {
'name': fields.char('Name', size=128, required=True),
'code': fields.char('Code', size=64),
'street': fields.char('Street', size=128),
'street2': fields.char('Street2', size=128),
'zip': fields.char('Zip', change_default=True, size=24),
'city': fields.char('City', size=128),
'state': fields.many2one("res.country.state", 'State',
domain="[('country_id', '=', country)]"),
'country': fields.many2one('res.country', 'Country'),
'email': fields.char('E-Mail', size=64),
'phone': fields.char('Phone', size=64),
'fax': fields.char('Fax', size=64),
'active': fields.boolean('Active'),
'bic': fields.char('BIC/Swift code', size=11,
help="Bank Identifier Code"),
}
_defaults = {
'active': lambda *a: 1,
}
Bank()

View File

@ -32,76 +32,76 @@ from osv import fields, osv
class Country(osv.osv):
_name = 'res.country'
_description = 'Country'
_columns = {
'name': fields.char('Country Name', size=64,
help='The full name of the country.', required=True),
'code': fields.char('Country Code', size=2,
help='The ISO country code in two chars.\n'
'You can use this field for quick search.', required=True),
}
_sql_constraints = [
('name_uniq', 'unique (name)',
'The name of the country must be unique !'),
('code_uniq', 'unique (code)',
'The code of the country must be unique !')
]
_name = 'res.country'
_description = 'Country'
_columns = {
'name': fields.char('Country Name', size=64,
help='The full name of the country.', required=True),
'code': fields.char('Country Code', size=2,
help='The ISO country code in two chars.\n'
'You can use this field for quick search.', required=True),
}
_sql_constraints = [
('name_uniq', 'unique (name)',
'The name of the country must be unique !'),
('code_uniq', 'unique (code)',
'The code of the country must be unique !')
]
def name_search(self, cr, user, name='', args=None, operator='ilike',
context=None, limit=80):
if not args:
args=[]
if not context:
context={}
ids = False
if len(name) == 2:
ids = self.search(cr, user, [('code', '=', name.upper())] + args,
limit=limit, context=context)
if not ids:
ids = self.search(cr, user, [('name', operator, name)] + args,
limit=limit, context=context)
return self.name_get(cr, user, ids, context)
_order='name'
def name_search(self, cr, user, name='', args=None, operator='ilike',
context=None, limit=80):
if not args:
args=[]
if not context:
context={}
ids = False
if len(name) == 2:
ids = self.search(cr, user, [('code', '=', name.upper())] + args,
limit=limit, context=context)
if not ids:
ids = self.search(cr, user, [('name', operator, name)] + args,
limit=limit, context=context)
return self.name_get(cr, user, ids, context)
_order='name'
def create(self, cursor, user, vals, context=None):
if 'code' in vals:
vals['code'] = vals['code'].upper()
return super(Country, self).create(cursor, user, vals,
context=context)
def create(self, cursor, user, vals, context=None):
if 'code' in vals:
vals['code'] = vals['code'].upper()
return super(Country, self).create(cursor, user, vals,
context=context)
def write(self, cursor, user, ids, vals, context=None):
if 'code' in vals:
vals['code'] = vals['code'].upper()
return super(Country, self).write(cursor, user, ids, vals,
context=context)
def write(self, cursor, user, ids, vals, context=None):
if 'code' in vals:
vals['code'] = vals['code'].upper()
return super(Country, self).write(cursor, user, ids, vals,
context=context)
Country()
class CountryState(osv.osv):
_description="Country state"
_name = 'res.country.state'
_columns = {
'country_id': fields.many2one('res.country', 'Country',
required=True),
'name': fields.char('State Name', size=64, required=True),
'code': fields.char('State Code', size=3, required=True),
}
def name_search(self, cr, user, name='', args=None, operator='ilike',
context=None, limit=80):
if not args:
args = []
if not context:
context = {}
ids = self.search(cr, user, [('code', '=', name)] + args, limit=limit,
context=context)
if not ids:
ids = self.search(cr, user, [('name', operator, name)] + args,
limit=limit, context=context)
return self.name_get(cr, user, ids, context)
_description="Country state"
_name = 'res.country.state'
_columns = {
'country_id': fields.many2one('res.country', 'Country',
required=True),
'name': fields.char('State Name', size=64, required=True),
'code': fields.char('State Code', size=3, required=True),
}
def name_search(self, cr, user, name='', args=None, operator='ilike',
context=None, limit=80):
if not args:
args = []
if not context:
context = {}
ids = self.search(cr, user, [('code', '=', name)] + args, limit=limit,
context=context)
if not ids:
ids = self.search(cr, user, [('name', operator, name)] + args,
limit=limit, context=context)
return self.name_get(cr, user, ids, context)
_order = 'code'
_order = 'code'
CountryState()

View File

@ -34,52 +34,52 @@ from osv import osv,fields
# -------------------------------------------------------------------------
def _models_get2(self, cr, uid, context={}):
obj = self.pool.get('ir.model.fields')
ids = obj.search(cr, uid, [('view_load','=',1)])
res = []
done = {}
for o in obj.browse(cr, uid, ids, context=context):
if o.relation not in done:
res.append( [o.relation, o.relation])
done[o.relation] = True
return res
obj = self.pool.get('ir.model.fields')
ids = obj.search(cr, uid, [('view_load','=',1)])
res = []
done = {}
for o in obj.browse(cr, uid, ids, context=context):
if o.relation not in done:
res.append( [o.relation, o.relation])
done[o.relation] = True
return res
def _models_get(self, cr, uid, context={}):
obj = self.pool.get('ir.model.fields')
ids = obj.search(cr, uid, [('view_load','=',1)])
res = []
done = {}
for o in obj.browse(cr, uid, ids, context=context):
if o.model_id.id not in done:
res.append( [o.model_id.model, o.model_id.name])
done[o.model_id.id] = True
return res
obj = self.pool.get('ir.model.fields')
ids = obj.search(cr, uid, [('view_load','=',1)])
res = []
done = {}
for o in obj.browse(cr, uid, ids, context=context):
if o.model_id.id not in done:
res.append( [o.model_id.model, o.model_id.name])
done[o.model_id.id] = True
return res
class ir_property(osv.osv):
_name = 'ir.property'
_columns = {
'name': fields.char('Name', size=128),
'value': fields.reference('Value', selection=_models_get2, size=128),
'res_id': fields.reference('Resource', selection=_models_get, size=128),
'company_id': fields.many2one('res.company', 'Company'),
'fields_id': fields.many2one('ir.model.fields', 'Fields', ondelete='cascade', required=True)
}
def unlink(self, cr, uid, ids, context={}):
if ids:
cr.execute('delete from ir_model_fields where id in (select fields_id from ir_property where (fields_id is not null) and (id in ('+','.join(map(str,ids))+')))')
res = super(ir_property, self).unlink(cr, uid, ids, context)
return res
_name = 'ir.property'
_columns = {
'name': fields.char('Name', size=128),
'value': fields.reference('Value', selection=_models_get2, size=128),
'res_id': fields.reference('Resource', selection=_models_get, size=128),
'company_id': fields.many2one('res.company', 'Company'),
'fields_id': fields.many2one('ir.model.fields', 'Fields', ondelete='cascade', required=True)
}
def unlink(self, cr, uid, ids, context={}):
if ids:
cr.execute('delete from ir_model_fields where id in (select fields_id from ir_property where (fields_id is not null) and (id in ('+','.join(map(str,ids))+')))')
res = super(ir_property, self).unlink(cr, uid, ids, context)
return res
def get(self, cr, uid, name, model, res_id=False, context={}):
cr.execute('select id from ir_model_fields where name=%s and model=%s', (name, model))
res = cr.fetchone()
if res:
ucid = self.pool.get('res.users').browse(cr, uid, uid).company_id.id
nid = self.search(cr, uid, [('fields_id','=',res[0]),('res_id','=',res_id),('company_id','=',ucid)])
if nid:
d = self.browse(cr, uid, nid[0], context).value
return (d and int(d.split(',')[1])) or False
return False
def get(self, cr, uid, name, model, res_id=False, context={}):
cr.execute('select id from ir_model_fields where name=%s and model=%s', (name, model))
res = cr.fetchone()
if res:
ucid = self.pool.get('res.users').browse(cr, uid, uid).company_id.id
nid = self.search(cr, uid, [('fields_id','=',res[0]),('res_id','=',res_id),('company_id','=',ucid)])
if nid:
d = self.browse(cr, uid, nid[0], context).value
return (d and int(d.split(',')[1])) or False
return False
ir_property()

View File

@ -35,72 +35,72 @@ from osv import fields,osv
# Sale/Purchase Canal, Media
#
class res_partner_canal(osv.osv):
_name = "res.partner.canal"
_description = "Channels"
_columns = {
'name': fields.char('Channel Name',size=64, required=True),
'active': fields.boolean('Active'),
}
_defaults = {
'active': lambda *a: 1,
}
_name = "res.partner.canal"
_description = "Channels"
_columns = {
'name': fields.char('Channel Name',size=64, required=True),
'active': fields.boolean('Active'),
}
_defaults = {
'active': lambda *a: 1,
}
res_partner_canal()
#
# Partner: State of Mind
#
class res_partner_som(osv.osv):
_name = "res.partner.som"
_columns = {
'name': fields.char('State of Mind',size=64, required=True),
'factor': fields.float('Factor', required=True)
}
_name = "res.partner.som"
_columns = {
'name': fields.char('State of Mind',size=64, required=True),
'factor': fields.float('Factor', required=True)
}
res_partner_som()
def _links_get(self, cr, uid, context={}):
obj = self.pool.get('res.request.link')
ids = obj.search(cr, uid, [])
res = obj.read(cr, uid, ids, ['object', 'name'], context)
return [(r['object'], r['name']) for r in res]
obj = self.pool.get('res.request.link')
ids = obj.search(cr, uid, [])
res = obj.read(cr, uid, ids, ['object', 'name'], context)
return [(r['object'], r['name']) for r in res]
class res_partner_event(osv.osv):
_name = "res.partner.event"
_columns = {
'name': fields.char('Events',size=64, required=True),
'som': fields.many2one('res.partner.som', 'State of Mind'),
'description': fields.text('Description'),
'planned_cost': fields.float('Planned Cost'),
'planned_revenue': fields.float('Planned Revenue'),
'probability': fields.float('Probability (0.50)'),
'document': fields.reference('Document', selection=_links_get, size=128),
'partner_id': fields.many2one('res.partner', 'Partner', select=True),
'date': fields.datetime('Date', size=16),
'user_id': fields.many2one('res.users', 'User'),
'canal_id': fields.many2one('res.partner.canal', 'Channel'),
'partner_type': fields.selection([('customer','Customer'),('retailer','Retailer'),('prospect','Commercial Prospect')], 'Partner Relation'),
'type': fields.selection([('sale','Sale Opportunity'),('purchase','Purchase Offer'),('prospect','Prospect Contact')], 'Type of Event'),
'event_ical_id': fields.char('iCal id', size=64),
}
_order = 'date desc'
_defaults = {
'date': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
}
_name = "res.partner.event"
_columns = {
'name': fields.char('Events',size=64, required=True),
'som': fields.many2one('res.partner.som', 'State of Mind'),
'description': fields.text('Description'),
'planned_cost': fields.float('Planned Cost'),
'planned_revenue': fields.float('Planned Revenue'),
'probability': fields.float('Probability (0.50)'),
'document': fields.reference('Document', selection=_links_get, size=128),
'partner_id': fields.many2one('res.partner', 'Partner', select=True),
'date': fields.datetime('Date', size=16),
'user_id': fields.many2one('res.users', 'User'),
'canal_id': fields.many2one('res.partner.canal', 'Channel'),
'partner_type': fields.selection([('customer','Customer'),('retailer','Retailer'),('prospect','Commercial Prospect')], 'Partner Relation'),
'type': fields.selection([('sale','Sale Opportunity'),('purchase','Purchase Offer'),('prospect','Prospect Contact')], 'Type of Event'),
'event_ical_id': fields.char('iCal id', size=64),
}
_order = 'date desc'
_defaults = {
'date': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
}
res_partner_event()
class res_partner_event_type(osv.osv):
_name = "res.partner.event.type"
_description = "Partner Events"
_columns = {
'name': fields.char('Event Type',size=64, required=True),
'key': fields.char('Key', size=64, required=True),
'active': fields.boolean('Active'),
}
_defaults = {
'active': lambda *a: 1
}
def check(self, cr, uid, key, context={}):
return self.search(cr, uid, [('key','=',key)])
_name = "res.partner.event.type"
_description = "Partner Events"
_columns = {
'name': fields.char('Event Type',size=64, required=True),
'key': fields.char('Key', size=64, required=True),
'active': fields.boolean('Active'),
}
_defaults = {
'active': lambda *a: 1
}
def check(self, cr, uid, key, context={}):
return self.search(cr, uid, [('key','=',key)])
res_partner_event_type()

View File

@ -35,399 +35,399 @@ import ir
import pooler
class res_partner_function(osv.osv):
_name = 'res.partner.function'
_description = 'Function of the contact'
_columns = {
'name': fields.char('Function name', size=64, required=True),
'code': fields.char('Code', size=8),
}
_order = 'name'
_name = 'res.partner.function'
_description = 'Function of the contact'
_columns = {
'name': fields.char('Function name', size=64, required=True),
'code': fields.char('Code', size=8),
}
_order = 'name'
res_partner_function()
class res_payterm(osv.osv):
_description = 'Payment term'
_name = 'res.payterm'
_columns = {
'name': fields.char('Payment term (short name)', size=64),
}
_description = 'Payment term'
_name = 'res.payterm'
_columns = {
'name': fields.char('Payment term (short name)', size=64),
}
res_payterm()
class res_partner_category(osv.osv):
def name_get(self, cr, uid, ids, context={}):
if not len(ids):
return []
reads = self.read(cr, uid, ids, ['name','parent_id'], context)
res = []
for record in reads:
name = record['name']
if record['parent_id']:
name = record['parent_id'][1]+' / '+name
res.append((record['id'], name))
return res
def name_get(self, cr, uid, ids, context={}):
if not len(ids):
return []
reads = self.read(cr, uid, ids, ['name','parent_id'], context)
res = []
for record in reads:
name = record['name']
if record['parent_id']:
name = record['parent_id'][1]+' / '+name
res.append((record['id'], name))
return res
def _name_get_fnc(self, cr, uid, ids, prop, unknow_none, unknow_dict):
res = self.name_get(cr, uid, ids)
return dict(res)
def _check_recursion(self, cr, uid, ids):
level = 100
while len(ids):
cr.execute('select distinct parent_id from res_partner_category where id in ('+','.join(map(str,ids))+')')
ids = filter(None, map(lambda x:x[0], cr.fetchall()))
if not level:
return False
level -= 1
return True
def _name_get_fnc(self, cr, uid, ids, prop, unknow_none, unknow_dict):
res = self.name_get(cr, uid, ids)
return dict(res)
def _check_recursion(self, cr, uid, ids):
level = 100
while len(ids):
cr.execute('select distinct parent_id from res_partner_category where id in ('+','.join(map(str,ids))+')')
ids = filter(None, map(lambda x:x[0], cr.fetchall()))
if not level:
return False
level -= 1
return True
_description='Partner Categories'
_name = 'res.partner.category'
_columns = {
'name': fields.char('Category Name', required=True, size=64),
'parent_id': fields.many2one('res.partner.category', 'Parent Category', select=True),
'complete_name': fields.function(_name_get_fnc, method=True, type="char", string='Name'),
'child_ids': fields.one2many('res.partner.category', 'parent_id', 'Childs Category'),
'active' : fields.boolean('Active', help="The active field allows you to hide the category, without removing it."),
}
_constraints = [
(_check_recursion, 'Error ! You can not create recursive categories.', ['parent_id'])
]
_defaults = {
'active' : lambda *a: 1,
}
_order = 'parent_id,name'
_description='Partner Categories'
_name = 'res.partner.category'
_columns = {
'name': fields.char('Category Name', required=True, size=64),
'parent_id': fields.many2one('res.partner.category', 'Parent Category', select=True),
'complete_name': fields.function(_name_get_fnc, method=True, type="char", string='Name'),
'child_ids': fields.one2many('res.partner.category', 'parent_id', 'Childs Category'),
'active' : fields.boolean('Active', help="The active field allows you to hide the category, without removing it."),
}
_constraints = [
(_check_recursion, 'Error ! You can not create recursive categories.', ['parent_id'])
]
_defaults = {
'active' : lambda *a: 1,
}
_order = 'parent_id,name'
res_partner_category()
class res_partner_title(osv.osv):
_name = 'res.partner.title'
_columns = {
'name': fields.char('Title', required=True, size=46, translate=True),
'shortcut': fields.char('Shortcut', required=True, size=16),
'domain': fields.selection([('partner','Partner'),('contact','Contact')], 'Domain', required=True, size=24)
}
_order = 'name'
_name = 'res.partner.title'
_columns = {
'name': fields.char('Title', required=True, size=46, translate=True),
'shortcut': fields.char('Shortcut', required=True, size=16),
'domain': fields.selection([('partner','Partner'),('contact','Contact')], 'Domain', required=True, size=24)
}
_order = 'name'
res_partner_title()
def _contact_title_get(self, cr, uid, context={}):
obj = self.pool.get('res.partner.title')
ids = obj.search(cr, uid, [('domain', '=', 'contact')])
res = obj.read(cr, uid, ids, ['shortcut','name'], context)
return [(r['shortcut'], r['name']) for r in res]
obj = self.pool.get('res.partner.title')
ids = obj.search(cr, uid, [('domain', '=', 'contact')])
res = obj.read(cr, uid, ids, ['shortcut','name'], context)
return [(r['shortcut'], r['name']) for r in res]
def _partner_title_get(self, cr, uid, context={}):
obj = self.pool.get('res.partner.title')
ids = obj.search(cr, uid, [('domain', '=', 'partner')])
res = obj.read(cr, uid, ids, ['shortcut','name'], context)
return [(r['shortcut'], r['name']) for r in res]
obj = self.pool.get('res.partner.title')
ids = obj.search(cr, uid, [('domain', '=', 'partner')])
res = obj.read(cr, uid, ids, ['shortcut','name'], context)
return [(r['shortcut'], r['name']) for r in res]
def _lang_get(self, cr, uid, context={}):
obj = self.pool.get('res.lang')
ids = obj.search(cr, uid, [])
res = obj.read(cr, uid, ids, ['code', 'name'], context)
res = [(r['code'], r['name']) for r in res]
return res + [(False, '')]
obj = self.pool.get('res.lang')
ids = obj.search(cr, uid, [])
res = obj.read(cr, uid, ids, ['code', 'name'], context)
res = [(r['code'], r['name']) for r in res]
return res + [(False, '')]
class res_partner(osv.osv):
_description='Partner'
_name = "res.partner"
_order = "name"
_columns = {
'name': fields.char('Name', size=128, required=True, select=True),
'date': fields.date('Date', select=1),
'title': fields.selection(_partner_title_get, 'Title', size=32),
'parent_id': fields.many2one('res.partner','Main Company', select=2),
'child_ids': fields.one2many('res.partner', 'parent_id', 'Partner Ref.'),
'ref': fields.char('Code', size=64),
'lang': fields.selection(_lang_get, 'Language', size=5),
'user_id': fields.many2one('res.users', 'Dedicated Salesman', help='The internal user that is in charge of communicating with this partner if any.'),
'responsible': fields.many2one('res.users', 'Users'),
'vat': fields.char('VAT',size=32 ,help="Value Added Tax number"),
'bank_ids': fields.one2many('res.partner.bank', 'partner_id', 'Banks'),
'website': fields.char('Website',size=64),
'comment': fields.text('Notes'),
'address': fields.one2many('res.partner.address', 'partner_id', 'Contacts'),
'category_id': fields.many2many('res.partner.category', 'res_partner_category_rel', 'partner_id', 'category_id', 'Categories'),
'events': fields.one2many('res.partner.event', 'partner_id', 'Events'),
'credit_limit': fields.float(string='Credit Limit'),
'ean13': fields.char('EAN13', size=13),
'active': fields.boolean('Active'),
}
_defaults = {
'active': lambda *a: 1,
}
_sql_constraints = [
('name_uniq', 'unique (name)', 'The name of the partner must be unique !')
]
_description='Partner'
_name = "res.partner"
_order = "name"
_columns = {
'name': fields.char('Name', size=128, required=True, select=True),
'date': fields.date('Date', select=1),
'title': fields.selection(_partner_title_get, 'Title', size=32),
'parent_id': fields.many2one('res.partner','Main Company', select=2),
'child_ids': fields.one2many('res.partner', 'parent_id', 'Partner Ref.'),
'ref': fields.char('Code', size=64),
'lang': fields.selection(_lang_get, 'Language', size=5),
'user_id': fields.many2one('res.users', 'Dedicated Salesman', help='The internal user that is in charge of communicating with this partner if any.'),
'responsible': fields.many2one('res.users', 'Users'),
'vat': fields.char('VAT',size=32 ,help="Value Added Tax number"),
'bank_ids': fields.one2many('res.partner.bank', 'partner_id', 'Banks'),
'website': fields.char('Website',size=64),
'comment': fields.text('Notes'),
'address': fields.one2many('res.partner.address', 'partner_id', 'Contacts'),
'category_id': fields.many2many('res.partner.category', 'res_partner_category_rel', 'partner_id', 'category_id', 'Categories'),
'events': fields.one2many('res.partner.event', 'partner_id', 'Events'),
'credit_limit': fields.float(string='Credit Limit'),
'ean13': fields.char('EAN13', size=13),
'active': fields.boolean('Active'),
}
_defaults = {
'active': lambda *a: 1,
}
_sql_constraints = [
('name_uniq', 'unique (name)', 'The name of the partner must be unique !')
]
def copy(self, cr, uid, id, default=None, context={}):
name = self.read(cr, uid, [id], ['name'])[0]['name']
default.update({'name': name+' (copy)'})
return super(res_partner, self).copy(cr, uid, id, default, context)
def copy(self, cr, uid, id, default=None, context={}):
name = self.read(cr, uid, [id], ['name'])[0]['name']
default.update({'name': name+' (copy)'})
return super(res_partner, self).copy(cr, uid, id, default, context)
def _check_ean_key(self, cr, uid, ids):
for partner_o in pooler.get_pool(cr.dbname).get('res.partner').read(cr, uid, ids, ['ean13',]):
thisean=partner_o['ean13']
if thisean and thisean!='':
if len(thisean)!=13:
return False
sum=0
for i in range(12):
if not (i % 2):
sum+=int(thisean[i])
else:
sum+=3*int(thisean[i])
if math.ceil(sum/10.0)*10-sum!=int(thisean[12]):
return False
return True
def _check_ean_key(self, cr, uid, ids):
for partner_o in pooler.get_pool(cr.dbname).get('res.partner').read(cr, uid, ids, ['ean13',]):
thisean=partner_o['ean13']
if thisean and thisean!='':
if len(thisean)!=13:
return False
sum=0
for i in range(12):
if not (i % 2):
sum+=int(thisean[i])
else:
sum+=3*int(thisean[i])
if math.ceil(sum/10.0)*10-sum!=int(thisean[12]):
return False
return True
# _constraints = [(_check_ean_key, 'Error: Invalid ean code', ['ean13'])]
# _constraints = [(_check_ean_key, 'Error: Invalid ean code', ['ean13'])]
def name_get(self, cr, uid, ids, context={}):
if not len(ids):
return []
if context.get('show_ref', False):
rec_name = 'ref'
else:
rec_name = 'name'
def name_get(self, cr, uid, ids, context={}):
if not len(ids):
return []
if context.get('show_ref', False):
rec_name = 'ref'
else:
rec_name = 'name'
res = [(r['id'], r[rec_name]) for r in self.read(cr, uid, ids, [rec_name], context)]
return res
res = [(r['id'], r[rec_name]) for r in self.read(cr, uid, ids, [rec_name], context)]
return res
def name_search(self, cr, uid, name, args=None, operator='ilike', context=None, limit=80):
if not args:
args=[]
if not context:
context={}
if name:
ids = self.search(cr, uid, [('ref', '=', name)] + args, limit=limit, context=context)
if not ids:
ids = self.search(cr, uid, [('name', operator, name)] + args, limit=limit, context=context)
else:
ids = self.search(cr, uid, args, limit=limit, context=context)
return self.name_get(cr, uid, ids, context)
def name_search(self, cr, uid, name, args=None, operator='ilike', context=None, limit=80):
if not args:
args=[]
if not context:
context={}
if name:
ids = self.search(cr, uid, [('ref', '=', name)] + args, limit=limit, context=context)
if not ids:
ids = self.search(cr, uid, [('name', operator, name)] + args, limit=limit, context=context)
else:
ids = self.search(cr, uid, args, limit=limit, context=context)
return self.name_get(cr, uid, ids, context)
def _email_send(self, cr, uid, ids, email_from, subject, body, on_error=None):
partners = self.browse(cr, uid, ids)
for partner in partners:
if len(partner.address):
if partner.address[0].email:
tools.email_send(email_from, [partner.address[0].email], subject, body, on_error)
return True
def _email_send(self, cr, uid, ids, email_from, subject, body, on_error=None):
partners = self.browse(cr, uid, ids)
for partner in partners:
if len(partner.address):
if partner.address[0].email:
tools.email_send(email_from, [partner.address[0].email], subject, body, on_error)
return True
def email_send(self, cr, uid, ids, email_from, subject, body, on_error=''):
while len(ids):
self.pool.get('ir.cron').create(cr, uid, {
'name': 'Send Partner Emails',
'user_id': uid,
# 'nextcall': False,
'model': 'res.partner',
'function': '_email_send',
'args': repr([ids[:16], email_from, subject, body, on_error])
})
ids = ids[16:]
return True
def email_send(self, cr, uid, ids, email_from, subject, body, on_error=''):
while len(ids):
self.pool.get('ir.cron').create(cr, uid, {
'name': 'Send Partner Emails',
'user_id': uid,
# 'nextcall': False,
'model': 'res.partner',
'function': '_email_send',
'args': repr([ids[:16], email_from, subject, body, on_error])
})
ids = ids[16:]
return True
def address_get(self, cr, uid, ids, adr_pref=['default']):
cr.execute('select type,id from res_partner_address where partner_id in ('+','.join(map(str,ids))+')')
res = cr.fetchall()
adr = dict(res)
# get the id of the (first) default address if there is one,
# otherwise get the id of the first address in the list
if res:
default_address = adr.get('default', res[0][1])
else:
default_address = False
result = {}
for a in adr_pref:
result[a] = adr.get(a, default_address)
return result
def address_get(self, cr, uid, ids, adr_pref=['default']):
cr.execute('select type,id from res_partner_address where partner_id in ('+','.join(map(str,ids))+')')
res = cr.fetchall()
adr = dict(res)
# get the id of the (first) default address if there is one,
# otherwise get the id of the first address in the list
if res:
default_address = adr.get('default', res[0][1])
else:
default_address = False
result = {}
for a in adr_pref:
result[a] = adr.get(a, default_address)
return result
def gen_next_ref(self, cr, uid, ids):
if len(ids) != 1:
return True
def gen_next_ref(self, cr, uid, ids):
if len(ids) != 1:
return True
# compute the next number ref
cr.execute("select ref from res_partner where ref is not null order by char_length(ref) desc, ref desc limit 1")
res = cr.dictfetchall()
ref = res and res[0]['ref'] or '0'
try:
nextref = int(ref)+1
except e:
raise osv.except_osv(_('Warning'), _("Couldn't generate the next id because some partners have an alphabetic id !"))
# compute the next number ref
cr.execute("select ref from res_partner where ref is not null order by char_length(ref) desc, ref desc limit 1")
res = cr.dictfetchall()
ref = res and res[0]['ref'] or '0'
try:
nextref = int(ref)+1
except e:
raise osv.except_osv(_('Warning'), _("Couldn't generate the next id because some partners have an alphabetic id !"))
# update the current partner
cr.execute("update res_partner set ref=%d where id=%d", (nextref, ids[0]))
return True
# update the current partner
cr.execute("update res_partner set ref=%d where id=%d", (nextref, ids[0]))
return True
res_partner()
class res_partner_address(osv.osv):
_description ='Partner Contact'
_name = 'res.partner.address'
_order = 'id'
_columns = {
'partner_id': fields.many2one('res.partner', 'Partner', required=True, ondelete='cascade', select=True),
'type': fields.selection( [ ('default','Default'),('invoice','Invoice'), ('delivery','Delivery'), ('contact','Contact'), ('other','Other') ],'Address Type'),
'function': fields.many2one('res.partner.function', 'Function'),
'title': fields.selection(_contact_title_get, 'Title', size=32),
'name': fields.char('Contact Name', size=64),
'street': fields.char('Street', size=128),
'street2': fields.char('Street2', size=128),
'zip': fields.char('Zip', change_default=True, size=24),
'city': fields.char('City', size=128),
'state_id': fields.many2one("res.country.state", 'State', change_default=True, domain="[('country_id','=',country_id)]"),
'country_id': fields.many2one('res.country', 'Country', change_default=True),
'email': fields.char('E-Mail', size=240),
'phone': fields.char('Phone', size=64),
'fax': fields.char('Fax', size=64),
'mobile': fields.char('Mobile', size=64),
'birthdate': fields.char('Birthdate', size=64),
'active': fields.boolean('Active'),
}
_defaults = {
'active': lambda *a: 1,
}
_description ='Partner Contact'
_name = 'res.partner.address'
_order = 'id'
_columns = {
'partner_id': fields.many2one('res.partner', 'Partner', required=True, ondelete='cascade', select=True),
'type': fields.selection( [ ('default','Default'),('invoice','Invoice'), ('delivery','Delivery'), ('contact','Contact'), ('other','Other') ],'Address Type'),
'function': fields.many2one('res.partner.function', 'Function'),
'title': fields.selection(_contact_title_get, 'Title', size=32),
'name': fields.char('Contact Name', size=64),
'street': fields.char('Street', size=128),
'street2': fields.char('Street2', size=128),
'zip': fields.char('Zip', change_default=True, size=24),
'city': fields.char('City', size=128),
'state_id': fields.many2one("res.country.state", 'State', change_default=True, domain="[('country_id','=',country_id)]"),
'country_id': fields.many2one('res.country', 'Country', change_default=True),
'email': fields.char('E-Mail', size=240),
'phone': fields.char('Phone', size=64),
'fax': fields.char('Fax', size=64),
'mobile': fields.char('Mobile', size=64),
'birthdate': fields.char('Birthdate', size=64),
'active': fields.boolean('Active'),
}
_defaults = {
'active': lambda *a: 1,
}
def name_get(self, cr, user, ids, context={}):
if not len(ids):
return []
res = []
for r in self.read(cr, user, ids, ['name','zip','city','partner_id', 'street']):
if context.get('contact_display', 'contact')=='partner':
res.append((r['id'], r['partner_id'][1]))
else:
addr = str(r['name'] or '')
if r['name'] and (r['zip'] or r['city']):
addr += ', '
addr += str(r['street'] or '') + ' ' + str(r['zip'] or '') + ' ' + str(r['city'] or '')
res.append((r['id'], addr.strip() or '/'))
return res
def name_get(self, cr, user, ids, context={}):
if not len(ids):
return []
res = []
for r in self.read(cr, user, ids, ['name','zip','city','partner_id', 'street']):
if context.get('contact_display', 'contact')=='partner':
res.append((r['id'], r['partner_id'][1]))
else:
addr = str(r['name'] or '')
if r['name'] and (r['zip'] or r['city']):
addr += ', '
addr += str(r['street'] or '') + ' ' + str(r['zip'] or '') + ' ' + str(r['city'] or '')
res.append((r['id'], addr.strip() or '/'))
return res
def name_search(self, cr, user, name, args=None, operator='ilike', context=None, limit=80):
if not args:
args=[]
if not context:
context={}
if context.get('contact_display', 'contact')=='partner':
ids = self.search(cr, user, [('partner_id',operator,name)], limit=limit, context=context)
else:
ids = self.search(cr, user, [('zip','=',name)] + args, limit=limit, context=context)
if not ids:
ids = self.search(cr, user, [('city',operator,name)] + args, limit=limit, context=context)
if name:
ids += self.search(cr, user, [('name',operator,name)] + args, limit=limit, context=context)
ids += self.search(cr, user, [('partner_id',operator,name)] + args, limit=limit, context=context)
return self.name_get(cr, user, ids, context=context)
def name_search(self, cr, user, name, args=None, operator='ilike', context=None, limit=80):
if not args:
args=[]
if not context:
context={}
if context.get('contact_display', 'contact')=='partner':
ids = self.search(cr, user, [('partner_id',operator,name)], limit=limit, context=context)
else:
ids = self.search(cr, user, [('zip','=',name)] + args, limit=limit, context=context)
if not ids:
ids = self.search(cr, user, [('city',operator,name)] + args, limit=limit, context=context)
if name:
ids += self.search(cr, user, [('name',operator,name)] + args, limit=limit, context=context)
ids += self.search(cr, user, [('partner_id',operator,name)] + args, limit=limit, context=context)
return self.name_get(cr, user, ids, context=context)
res_partner_address()
class res_partner_bank_type(osv.osv):
_description='Bank Account Type'
_name = 'res.partner.bank.type'
_columns = {
'name': fields.char('Name', size=64, required=True),
'code': fields.char('Code', size=64, required=True),
'field_ids': fields.one2many('res.partner.bank.type.field', 'bank_type_id', 'Type fields'),
}
_description='Bank Account Type'
_name = 'res.partner.bank.type'
_columns = {
'name': fields.char('Name', size=64, required=True),
'code': fields.char('Code', size=64, required=True),
'field_ids': fields.one2many('res.partner.bank.type.field', 'bank_type_id', 'Type fields'),
}
res_partner_bank_type()
class res_partner_bank_type_fields(osv.osv):
_description='Bank type fields'
_name = 'res.partner.bank.type.field'
_columns = {
'name': fields.char('Field name', size=64, required=True),
'bank_type_id': fields.many2one('res.partner.bank.type', 'Bank type', required=True, ondelete='cascade'),
'required': fields.boolean('Required'),
'readonly': fields.boolean('Readonly'),
'size': fields.integer('Max. Size'),
}
_description='Bank type fields'
_name = 'res.partner.bank.type.field'
_columns = {
'name': fields.char('Field name', size=64, required=True),
'bank_type_id': fields.many2one('res.partner.bank.type', 'Bank type', required=True, ondelete='cascade'),
'required': fields.boolean('Required'),
'readonly': fields.boolean('Readonly'),
'size': fields.integer('Max. Size'),
}
res_partner_bank_type_fields()
class res_partner_bank(osv.osv):
'''Bank Accounts'''
_name = "res.partner.bank"
_rec_name = "state"
_description = __doc__
_order = 'sequence'
'''Bank Accounts'''
_name = "res.partner.bank"
_rec_name = "state"
_description = __doc__
_order = 'sequence'
def _bank_type_get(self, cr, uid, context=None):
bank_type_obj = self.pool.get('res.partner.bank.type')
def _bank_type_get(self, cr, uid, context=None):
bank_type_obj = self.pool.get('res.partner.bank.type')
result = []
type_ids = bank_type_obj.search(cr, uid, [])
bank_types = bank_type_obj.browse(cr, uid, type_ids)
for bank_type in bank_types:
result.append((bank_type.code, bank_type.name))
return result
result = []
type_ids = bank_type_obj.search(cr, uid, [])
bank_types = bank_type_obj.browse(cr, uid, type_ids)
for bank_type in bank_types:
result.append((bank_type.code, bank_type.name))
return result
def _default_value(self, cursor, user, field, context=None):
if field in ('country_id', 'state_id'):
value = False
else:
value = ''
if not context.get('address', False):
return value
for ham, spam, address in context['address']:
if address.get('type', False) == 'default':
return address.get(field, value)
elif not address.get('type', False):
value = address.get(field, value)
return value
def _default_value(self, cursor, user, field, context=None):
if field in ('country_id', 'state_id'):
value = False
else:
value = ''
if not context.get('address', False):
return value
for ham, spam, address in context['address']:
if address.get('type', False) == 'default':
return address.get(field, value)
elif not address.get('type', False):
value = address.get(field, value)
return value
_columns = {
'name': fields.char('Description', size=128),
'acc_number': fields.char('Account number', size=64, required=False),
'bank': fields.many2one('res.bank', 'Bank'),
'owner_name': fields.char('Account owner', size=64),
'street': fields.char('Street', size=128),
'zip': fields.char('Zip', change_default=True, size=24),
'city': fields.char('City', size=128),
'country_id': fields.many2one('res.country', 'Country',
change_default=True),
'state_id': fields.many2one("res.country.state", 'State',
change_default=True, domain="[('country_id','=',country_id)]"),
'partner_id': fields.many2one('res.partner', 'Partner', required=True,
ondelete='cascade', select=True),
'state': fields.selection(_bank_type_get, 'Bank type', required=True,
change_default=True),
'sequence': fields.integer('Sequence'),
'state_id': fields.many2one('res.country.state', 'State',
domain="[('country_id', '=', country_id)]"),
}
_defaults = {
'owner_name': lambda obj, cursor, user, context: obj._default_value(
cursor, user, 'name', context=context),
'street': lambda obj, cursor, user, context: obj._default_value(
cursor, user, 'street', context=context),
'city': lambda obj, cursor, user, context: obj._default_value(
cursor, user, 'city', context=context),
'zip': lambda obj, cursor, user, context: obj._default_value(
cursor, user, 'zip', context=context),
'country_id': lambda obj, cursor, user, context: obj._default_value(
cursor, user, 'country_id', context=context),
'state_id': lambda obj, cursor, user, context: obj._default_value(
cursor, user, 'state_id', context=context),
}
_columns = {
'name': fields.char('Description', size=128),
'acc_number': fields.char('Account number', size=64, required=False),
'bank': fields.many2one('res.bank', 'Bank'),
'owner_name': fields.char('Account owner', size=64),
'street': fields.char('Street', size=128),
'zip': fields.char('Zip', change_default=True, size=24),
'city': fields.char('City', size=128),
'country_id': fields.many2one('res.country', 'Country',
change_default=True),
'state_id': fields.many2one("res.country.state", 'State',
change_default=True, domain="[('country_id','=',country_id)]"),
'partner_id': fields.many2one('res.partner', 'Partner', required=True,
ondelete='cascade', select=True),
'state': fields.selection(_bank_type_get, 'Bank type', required=True,
change_default=True),
'sequence': fields.integer('Sequence'),
'state_id': fields.many2one('res.country.state', 'State',
domain="[('country_id', '=', country_id)]"),
}
_defaults = {
'owner_name': lambda obj, cursor, user, context: obj._default_value(
cursor, user, 'name', context=context),
'street': lambda obj, cursor, user, context: obj._default_value(
cursor, user, 'street', context=context),
'city': lambda obj, cursor, user, context: obj._default_value(
cursor, user, 'city', context=context),
'zip': lambda obj, cursor, user, context: obj._default_value(
cursor, user, 'zip', context=context),
'country_id': lambda obj, cursor, user, context: obj._default_value(
cursor, user, 'country_id', context=context),
'state_id': lambda obj, cursor, user, context: obj._default_value(
cursor, user, 'state_id', context=context),
}
def fields_get(self, cr, uid, fields=None, context=None):
res = super(res_partner_bank, self).fields_get(cr, uid, fields, context)
bank_type_obj = self.pool.get('res.partner.bank.type')
type_ids = bank_type_obj.search(cr, uid, [])
types = bank_type_obj.browse(cr, uid, type_ids)
for type in types:
for field in type.field_ids:
if field.name in res:
res[field.name].setdefault('states', {})
res[field.name]['states'][type.code] = [
('readonly', field.readonly),
('required', field.required)]
return res
def fields_get(self, cr, uid, fields=None, context=None):
res = super(res_partner_bank, self).fields_get(cr, uid, fields, context)
bank_type_obj = self.pool.get('res.partner.bank.type')
type_ids = bank_type_obj.search(cr, uid, [])
types = bank_type_obj.browse(cr, uid, type_ids)
for type in types:
for field in type.field_ids:
if field.name in res:
res[field.name].setdefault('states', {})
res[field.name]['states'][type.code] = [
('readonly', field.readonly),
('required', field.required)]
return res
def name_get(self, cr, uid, ids, context=None):
if not len(ids):
return []
res = []
for id in self.browse(cr, uid, ids):
res.append((id.id,id.acc_number))
return res
def name_get(self, cr, uid, ids, context=None):
if not len(ids):
return []
res = []
for id in self.browse(cr, uid, ids):
res.append((id.id,id.acc_number))
return res
res_partner_bank()

View File

@ -31,16 +31,16 @@ import wizard
import netsvc
class wizard_clear_ids(wizard.interface):
def _clear_ids(self, cr, uid, data, context):
service = netsvc.LocalService("object_proxy")
service.execute(cr.dbname, uid, 'res.partner', 'write', data['ids'], {'ref': False})
return {}
def _clear_ids(self, cr, uid, data, context):
service = netsvc.LocalService("object_proxy")
service.execute(cr.dbname, uid, 'res.partner', 'write', data['ids'], {'ref': False})
return {}
states = {
'init': {
'actions': [_clear_ids],
'result': {'type':'state', 'state':'end'}
}
}
states = {
'init': {
'actions': [_clear_ids],
'result': {'type':'state', 'state':'end'}
}
}
wizard_clear_ids('res.partner.clear_ids')

View File

@ -34,70 +34,70 @@ from tools.misc import UpdateableStr
import pooler
def _is_pair(x):
return not x%2
return not x%2
def _get_ean_key(string):
if not string or string=='':
return '0'
if len(string)!=12:
return '0'
sum=0
for i in range(12):
if _is_pair(i):
sum+=int(string[i])
else:
sum+=3*int(string[i])
return str(int(math.ceil(sum/10.0)*10-sum))
if not string or string=='':
return '0'
if len(string)!=12:
return '0'
sum=0
for i in range(12):
if _is_pair(i):
sum+=int(string[i])
else:
sum+=3*int(string[i])
return str(int(math.ceil(sum/10.0)*10-sum))
#FIXME: this is not concurrency safe !!!!
_check_arch = UpdateableStr()
_check_fields = {}
def _check_key(self, cr, uid, data, context):
partner_table=pooler.get_pool(cr.dbname).get('res.partner')
partners = partner_table.browse(cr, uid, data['ids'])
_check_arch_lst=['<?xml version="1.0"?>', '<form string="Check EAN13">', '<label string=""/>', '<label string=""/>','<label string="Original" />', '<label string="Computed" />']
for partner in partners:
if partner['ean13'] and len(partner['ean13'])>11 and len(partner['ean13'])<14:
_check_arch_lst.append('<label colspan="2" string="%s" />' % partner['ean13']);
key=_get_ean_key(partner['ean13'][:12])
_check_arch_lst.append('<label string=""/>')
if len(partner['ean13'])==12:
_check_arch_lst.append('<label string="" />');
else:
_check_arch_lst.append('<label string="%s" />' % partner['ean13'][12])
_check_arch_lst.append('<label string="%s" />' % key)
_check_arch_lst.append('</form>')
_check_arch.string = '\n'.join(_check_arch_lst)
return {}
partner_table=pooler.get_pool(cr.dbname).get('res.partner')
partners = partner_table.browse(cr, uid, data['ids'])
_check_arch_lst=['<?xml version="1.0"?>', '<form string="Check EAN13">', '<label string=""/>', '<label string=""/>','<label string="Original" />', '<label string="Computed" />']
for partner in partners:
if partner['ean13'] and len(partner['ean13'])>11 and len(partner['ean13'])<14:
_check_arch_lst.append('<label colspan="2" string="%s" />' % partner['ean13']);
key=_get_ean_key(partner['ean13'][:12])
_check_arch_lst.append('<label string=""/>')
if len(partner['ean13'])==12:
_check_arch_lst.append('<label string="" />');
else:
_check_arch_lst.append('<label string="%s" />' % partner['ean13'][12])
_check_arch_lst.append('<label string="%s" />' % key)
_check_arch_lst.append('</form>')
_check_arch.string = '\n'.join(_check_arch_lst)
return {}
def _update_ean(self, cr, uid, data, context):
partner_table = pooler.get_pool(cr.dbname).get('res.partner')
partners = partner_table.browse(cr, uid, data['ids'])
for partner in partners:
partner_table.write(cr, uid, data['ids'], {
'ean13': "%s%s" % (partner['ean13'][:12], _get_ean_key(partner['ean13'][:12]))
})
return {}
partner_table = pooler.get_pool(cr.dbname).get('res.partner')
partners = partner_table.browse(cr, uid, data['ids'])
for partner in partners:
partner_table.write(cr, uid, data['ids'], {
'ean13': "%s%s" % (partner['ean13'][:12], _get_ean_key(partner['ean13'][:12]))
})
return {}
class wiz_ean_check(wizard.interface):
states = {
'init': {
'actions': [_check_key],
'result': {
'type': 'form',
'arch': _check_arch,
'fields': _check_fields,
'state': (('end', 'Ignore'), ('correct', 'Correct EAN13'))
}
},
'correct' : {
'actions': [_update_ean],
'result': {
'type': 'state',
'state': 'end'
}
}
}
states = {
'init': {
'actions': [_check_key],
'result': {
'type': 'form',
'arch': _check_arch,
'fields': _check_fields,
'state': (('end', 'Ignore'), ('correct', 'Correct EAN13'))
}
},
'correct' : {
'actions': [_update_ean],
'result': {
'type': 'state',
'state': 'end'
}
}
}
wiz_ean_check('res.partner.ean13')

View File

@ -33,47 +33,47 @@ import tools
sms_send_form = '''<?xml version="1.0"?>
<form string="%s">
<separator string="%s" colspan="4"/>
<field name="app_id"/>
<newline/>
<field name="user"/>
<field name="password"/>
<newline/>
<field name="text" colspan="4"/>
<separator string="%s" colspan="4"/>
<field name="app_id"/>
<newline/>
<field name="user"/>
<field name="password"/>
<newline/>
<field name="text" colspan="4"/>
</form>''' % ('SMS - Gateway: clickatell','Bulk SMS send')
sms_send_fields = {
'app_id': {'string':'API ID', 'type':'char', 'required':True},
'user': {'string':'Login', 'type':'char', 'required':True},
'password': {'string':'Password', 'type':'char', 'required':True},
'text': {'string':'SMS Message', 'type':'text', 'required':True}
'app_id': {'string':'API ID', 'type':'char', 'required':True},
'user': {'string':'Login', 'type':'char', 'required':True},
'password': {'string':'Password', 'type':'char', 'required':True},
'text': {'string':'SMS Message', 'type':'text', 'required':True}
}
def _sms_send(self, cr, uid, data, context):
service = netsvc.LocalService("object_proxy")
service = netsvc.LocalService("object_proxy")
res_ids = service.execute(cr.dbname, uid, 'res.partner.address', 'search', [('partner_id','in',data['ids']),('type','=','default')])
res = service.execute(cr.dbname, uid, 'res.partner.address', 'read', res_ids, ['mobile'])
res_ids = service.execute(cr.dbname, uid, 'res.partner.address', 'search', [('partner_id','in',data['ids']),('type','=','default')])
res = service.execute(cr.dbname, uid, 'res.partner.address', 'read', res_ids, ['mobile'])
nbr = 0
for r in res:
to = r['mobile']
if to:
tools.sms_send(data['form']['user'], data['form']['password'], data['form']['app_id'], unicode(data['form']['text'], 'utf-8').encode('latin1'), to)
nbr += 1
return {'sms_sent': nbr}
nbr = 0
for r in res:
to = r['mobile']
if to:
tools.sms_send(data['form']['user'], data['form']['password'], data['form']['app_id'], unicode(data['form']['text'], 'utf-8').encode('latin1'), to)
nbr += 1
return {'sms_sent': nbr}
class part_sms(wizard.interface):
states = {
'init': {
'actions': [],
'result': {'type': 'form', 'arch':sms_send_form, 'fields': sms_send_fields, 'state':[('end','Cancel'), ('send','Send SMS')]}
},
'send': {
'actions': [_sms_send],
'result': {'type': 'state', 'state':'end'}
}
}
states = {
'init': {
'actions': [],
'result': {'type': 'form', 'arch':sms_send_form, 'fields': sms_send_fields, 'state':[('end','Cancel'), ('send','Send SMS')]}
},
'send': {
'actions': [_sms_send],
'result': {'type': 'state', 'state':'end'}
}
}
part_sms('res.partner.sms_send')

View File

@ -33,48 +33,48 @@ import tools
email_send_form = '''<?xml version="1.0"?>
<form string="Mass Mailing">
<field name="from"/>
<newline/>
<field name="subject"/>
<newline/>
<field name="text"/>
<field name="from"/>
<newline/>
<field name="subject"/>
<newline/>
<field name="text"/>
</form>'''
email_send_fields = {
'from': {'string':"Sender's email", 'type':'char', 'size':64, 'required':True},
'subject': {'string':'Subject', 'type':'char', 'size':64, 'required':True},
'text': {'string':'Message', 'type':'text_tag', 'required':True}
'from': {'string':"Sender's email", 'type':'char', 'size':64, 'required':True},
'subject': {'string':'Subject', 'type':'char', 'size':64, 'required':True},
'text': {'string':'Message', 'type':'text_tag', 'required':True}
}
# this sends an email to ALL the addresses of the selected partners.
def _mass_mail_send(self, cr, uid, data, context):
nbr = 0
partners = pooler.get_pool(cr.dbname).get('res.partner').browse(cr, uid, data['ids'], context)
for partner in partners:
for adr in partner.address:
if adr.email:
name = adr.name or partner.name
to = '%s <%s>' % (name, adr.email)
nbr = 0
partners = pooler.get_pool(cr.dbname).get('res.partner').browse(cr, uid, data['ids'], context)
for partner in partners:
for adr in partner.address:
if adr.email:
name = adr.name or partner.name
to = '%s <%s>' % (name, adr.email)
#TODO: add some tests to check for invalid email addresses
#CHECKME: maybe we should use res.partner/email_send
tools.email_send(data['form']['from'], [to], data['form']['subject'], data['form']['text'])
nbr += 1
pooler.get_pool(cr.dbname).get('res.partner.event').create(cr, uid,
{'name': 'Email sent through mass mailing',
'partner_id': partner.id,
'description': data['form']['text'], })
tools.email_send(data['form']['from'], [to], data['form']['subject'], data['form']['text'])
nbr += 1
pooler.get_pool(cr.dbname).get('res.partner.event').create(cr, uid,
{'name': 'Email sent through mass mailing',
'partner_id': partner.id,
'description': data['form']['text'], })
#TODO: log number of message sent
return {'email_sent': nbr}
return {'email_sent': nbr}
class part_email(wizard.interface):
states = {
'init': {
'actions': [],
'result': {'type': 'form', 'arch': email_send_form, 'fields': email_send_fields, 'state':[('end','Cancel'), ('send','Send Email')]}
},
'send': {
'actions': [_mass_mail_send],
'result': {'type': 'state', 'state':'end'}
}
}
states = {
'init': {
'actions': [],
'result': {'type': 'form', 'arch': email_send_form, 'fields': email_send_fields, 'state':[('end','Cancel'), ('send','Send Email')]}
},
'send': {
'actions': [_mass_mail_send],
'result': {'type': 'state', 'state':'end'}
}
}
part_email('res.partner.spam_send')

View File

@ -31,152 +31,152 @@ from osv import fields,osv
import tools
class res_company(osv.osv):
_name = "res.company"
_name = "res.company"
_columns = {
'name': fields.char('Company Name', size=64, required=True),
'parent_id': fields.many2one('res.company', 'Parent Company', select=True),
'child_ids': fields.one2many('res.company', 'parent_id', 'Childs Company'),
'partner_id': fields.many2one('res.partner', 'Partner', required=True),
'rml_header1': fields.char('Report Header', size=200),
'rml_footer1': fields.char('Report Footer 1', size=200),
'rml_footer2': fields.char('Report Footer 2', size=200),
'rml_header' : fields.text('RML Header'),
'rml_header2' : fields.text('RML Internal Header'),
'logo' : fields.binary('Logo'),
'currency_id': fields.many2one('res.currency', 'Currency', required=True),
}
_columns = {
'name': fields.char('Company Name', size=64, required=True),
'parent_id': fields.many2one('res.company', 'Parent Company', select=True),
'child_ids': fields.one2many('res.company', 'parent_id', 'Childs Company'),
'partner_id': fields.many2one('res.partner', 'Partner', required=True),
'rml_header1': fields.char('Report Header', size=200),
'rml_footer1': fields.char('Report Footer 1', size=200),
'rml_footer2': fields.char('Report Footer 2', size=200),
'rml_header' : fields.text('RML Header'),
'rml_header2' : fields.text('RML Internal Header'),
'logo' : fields.binary('Logo'),
'currency_id': fields.many2one('res.currency', 'Currency', required=True),
}
def _get_child_ids(self, cr, uid, uid2, context={}):
company = self.pool.get('res.users').company_get(cr, uid, uid2)
ids = self._get_company_children(cr, uid, company)
return ids
def _get_child_ids(self, cr, uid, uid2, context={}):
company = self.pool.get('res.users').company_get(cr, uid, uid2)
ids = self._get_company_children(cr, uid, company)
return ids
def _get_company_children(self, cr, uid=None, company=None):
if not company:
return []
ids = self.search(cr, uid, [('parent_id','child_of',[company])])
return ids
_get_company_children = tools.cache()(_get_company_children)
def _get_company_children(self, cr, uid=None, company=None):
if not company:
return []
ids = self.search(cr, uid, [('parent_id','child_of',[company])])
return ids
_get_company_children = tools.cache()(_get_company_children)
def _get_partner_hierarchy(self, cr, uid, company_id, context={}):
if company_id:
parent_id = self.browse(cr, uid, company_id)['parent_id']
if parent_id:
return self._get_partner_hierarchy(cr, uid, parent_id.id, context)
else:
return self._get_partner_descendance(cr, uid, company_id, [], context)
return []
def _get_partner_hierarchy(self, cr, uid, company_id, context={}):
if company_id:
parent_id = self.browse(cr, uid, company_id)['parent_id']
if parent_id:
return self._get_partner_hierarchy(cr, uid, parent_id.id, context)
else:
return self._get_partner_descendance(cr, uid, company_id, [], context)
return []
def _get_partner_descendance(self, cr, uid, company_id, descendance, context={}):
descendance.append(self.browse(cr, uid, company_id).partner_id.id)
for child_id in self._get_company_children(cr, uid, company_id):
if child_id != company_id:
descendance = self._get_partner_descendance(cr, uid, child_id, descendance)
return descendance
def _get_partner_descendance(self, cr, uid, company_id, descendance, context={}):
descendance.append(self.browse(cr, uid, company_id).partner_id.id)
for child_id in self._get_company_children(cr, uid, company_id):
if child_id != company_id:
descendance = self._get_partner_descendance(cr, uid, child_id, descendance)
return descendance
def __init__(self, *args, **argv):
return super(res_company, self).__init__(*args, **argv)
def __init__(self, *args, **argv):
return super(res_company, self).__init__(*args, **argv)
#
# This function restart the cache on the _get_company_children method
#
def cache_restart(self, uid=None):
self._get_company_children()
#
# This function restart the cache on the _get_company_children method
#
def cache_restart(self, uid=None):
self._get_company_children()
def create(self, *args, **argv):
self.cache_restart()
return super(res_company, self).create(*args, **argv)
def create(self, *args, **argv):
self.cache_restart()
return super(res_company, self).create(*args, **argv)
def write(self, *args, **argv):
self.cache_restart()
# Restart the cache on the company_get method
self.pool.get('ir.rule').domain_get()
return super(res_company, self).write(*args, **argv)
def write(self, *args, **argv):
self.cache_restart()
# Restart the cache on the company_get method
self.pool.get('ir.rule').domain_get()
return super(res_company, self).write(*args, **argv)
def _get_euro(self, cr, uid, context={}):
try:
return self.pool.get('res.currency').search(cr, uid, [('rate', '=', 1.0),])[0]
except:
return 1
def _get_euro(self, cr, uid, context={}):
try:
return self.pool.get('res.currency').search(cr, uid, [('rate', '=', 1.0),])[0]
except:
return 1
def _check_recursion(self, cr, uid, ids):
level = 100
while len(ids):
cr.execute('select distinct parent_id from res_company where id in ('+','.join(map(str,ids))+')')
ids = filter(None, map(lambda x:x[0], cr.fetchall()))
if not level:
return False
level -= 1
return True
def _check_recursion(self, cr, uid, ids):
level = 100
while len(ids):
cr.execute('select distinct parent_id from res_company where id in ('+','.join(map(str,ids))+')')
ids = filter(None, map(lambda x:x[0], cr.fetchall()))
if not level:
return False
level -= 1
return True
def _get_header2(self,cr,uid,ids):
return """
<header>
<pageTemplate>
<frame id="first" x1="1.3cm" y1="1.5cm" width="18.4cm" height="26.5cm"/>
<pageGraphics>
<fill color="black"/>
<stroke color="black"/>
<setFont name="Helvetica" size="8"/>
<drawString x="1.3cm" y="28.3cm"> [[ formatLang(time.strftime("%Y-%m-%d"), date=True) ]] [[ time.strftime("%H:%M") ]]</drawString>
<setFont name="Helvetica-Bold" size="10"/>
<drawString x="9.8cm" y="28.3cm">[[ company.partner_id.name ]]</drawString>
<setFont name="Helvetica" size="8"/>
<drawRightString x="19.7cm" y="28.3cm"><pageNumber/> / </drawRightString>
<drawString x="19.8cm" y="28.3cm"><pageCount/></drawString>
<stroke color="#aaaaaa"/>
<lines>1.3cm 28.1cm 20cm 28.1cm</lines>
</pageGraphics>
</pageTemplate>
def _get_header2(self,cr,uid,ids):
return """
<header>
<pageTemplate>
<frame id="first" x1="1.3cm" y1="1.5cm" width="18.4cm" height="26.5cm"/>
<pageGraphics>
<fill color="black"/>
<stroke color="black"/>
<setFont name="Helvetica" size="8"/>
<drawString x="1.3cm" y="28.3cm"> [[ formatLang(time.strftime("%Y-%m-%d"), date=True) ]] [[ time.strftime("%H:%M") ]]</drawString>
<setFont name="Helvetica-Bold" size="10"/>
<drawString x="9.8cm" y="28.3cm">[[ company.partner_id.name ]]</drawString>
<setFont name="Helvetica" size="8"/>
<drawRightString x="19.7cm" y="28.3cm"><pageNumber/> / </drawRightString>
<drawString x="19.8cm" y="28.3cm"><pageCount/></drawString>
<stroke color="#aaaaaa"/>
<lines>1.3cm 28.1cm 20cm 28.1cm</lines>
</pageGraphics>
</pageTemplate>
</header>"""
def _get_header(self,cr,uid,ids):
try :
return tools.file_open('custom/corporate_rml_header.rml').read()
except:
return """
<header>
<pageTemplate>
<frame id="first" x1="1.3cm" y1="2.5cm" height="23.0cm" width="19cm"/>
<pageGraphics>
<!-- You Logo - Change X,Y,Width and Height -->
<image x="1.3cm" y="27.6cm" height="40.0" >[[company.logo]]</image>
<setFont name="Helvetica" size="8"/>
<fill color="black"/>
<stroke color="black"/>
<lines>1.3cm 27.7cm 20cm 27.7cm</lines>
def _get_header(self,cr,uid,ids):
try :
return tools.file_open('custom/corporate_rml_header.rml').read()
except:
return """
<header>
<pageTemplate>
<frame id="first" x1="1.3cm" y1="2.5cm" height="23.0cm" width="19cm"/>
<pageGraphics>
<!-- You Logo - Change X,Y,Width and Height -->
<image x="1.3cm" y="27.6cm" height="40.0" >[[company.logo]]</image>
<setFont name="Helvetica" size="8"/>
<fill color="black"/>
<stroke color="black"/>
<lines>1.3cm 27.7cm 20cm 27.7cm</lines>
<drawRightString x="20cm" y="27.8cm">[[ company.rml_header1 ]]</drawRightString>
<drawRightString x="20cm" y="27.8cm">[[ company.rml_header1 ]]</drawRightString>
<drawString x="1.3cm" y="27.2cm">[[ company.partner_id.name ]]</drawString>
<drawString x="1.3cm" y="26.8cm">[[ company.partner_id.address and company.partner_id.address[0].street ]]</drawString>
<drawString x="1.3cm" y="26.4cm">[[ company.partner_id.address and company.partner_id.address[0].zip ]] [[ company.partner_id.address and company.partner_id.address[0].city ]] - [[ company.partner_id.address and company.partner_id.address[0].country_id and company.partner_id.address[0].country_id.name ]]</drawString>
<drawString x="1.3cm" y="26.0cm">Phone:</drawString>
<drawRightString x="7cm" y="26.0cm">[[ company.partner_id.address and company.partner_id.address[0].phone ]]</drawRightString>
<drawString x="1.3cm" y="25.6cm">Mail:</drawString>
<drawRightString x="7cm" y="25.6cm">[[ company.partner_id.address and company.partner_id.address[0].email ]]</drawRightString>
<lines>1.3cm 25.5cm 7cm 25.5cm</lines>
<drawString x="1.3cm" y="27.2cm">[[ company.partner_id.name ]]</drawString>
<drawString x="1.3cm" y="26.8cm">[[ company.partner_id.address and company.partner_id.address[0].street ]]</drawString>
<drawString x="1.3cm" y="26.4cm">[[ company.partner_id.address and company.partner_id.address[0].zip ]] [[ company.partner_id.address and company.partner_id.address[0].city ]] - [[ company.partner_id.address and company.partner_id.address[0].country_id and company.partner_id.address[0].country_id.name ]]</drawString>
<drawString x="1.3cm" y="26.0cm">Phone:</drawString>
<drawRightString x="7cm" y="26.0cm">[[ company.partner_id.address and company.partner_id.address[0].phone ]]</drawRightString>
<drawString x="1.3cm" y="25.6cm">Mail:</drawString>
<drawRightString x="7cm" y="25.6cm">[[ company.partner_id.address and company.partner_id.address[0].email ]]</drawRightString>
<lines>1.3cm 25.5cm 7cm 25.5cm</lines>
<!--page bottom-->
<!--page bottom-->
<lines>1.2cm 2.15cm 19.9cm 2.15cm</lines>
<lines>1.2cm 2.15cm 19.9cm 2.15cm</lines>
<drawCentredString x="10.5cm" y="1.7cm">[[ company.rml_footer1 ]]</drawCentredString>
<drawCentredString x="10.5cm" y="1.25cm">[[ company.rml_footer2 ]]</drawCentredString>
<drawCentredString x="10.5cm" y="0.8cm">Contact : [[ user.name ]] - Page: <pageNumber/></drawCentredString>
</pageGraphics>
</pageTemplate>
<drawCentredString x="10.5cm" y="1.7cm">[[ company.rml_footer1 ]]</drawCentredString>
<drawCentredString x="10.5cm" y="1.25cm">[[ company.rml_footer2 ]]</drawCentredString>
<drawCentredString x="10.5cm" y="0.8cm">Contact : [[ user.name ]] - Page: <pageNumber/></drawCentredString>
</pageGraphics>
</pageTemplate>
</header>"""
_defaults = {
'currency_id': _get_euro,
'rml_header':_get_header,
'rml_header2': _get_header2
}
_defaults = {
'currency_id': _get_euro,
'rml_header':_get_header,
'rml_header2': _get_header2
}
_constraints = [
(_check_recursion, 'Error! You can not create recursive companies.', ['parent_id'])
]
_constraints = [
(_check_recursion, 'Error! You can not create recursive companies.', ['parent_id'])
]
res_company()

View File

@ -39,90 +39,90 @@ import mx.DateTime
from mx.DateTime import RelativeDateTime, now, DateTime, localtime
class res_currency(osv.osv):
def _current_rate(self, cr, uid, ids, name, arg, context={}):
res={}
if 'date' in context:
date=context['date']
else:
date=time.strftime('%Y-%m-%d')
for id in ids:
cr.execute("SELECT currency_id, rate FROM res_currency_rate WHERE currency_id = %d AND name <= '%s' ORDER BY name desc LIMIT 1" % (id, date))
if cr.rowcount:
id, rate=cr.fetchall()[0]
res[id]=rate
else:
res[id]=0
return res
_name = "res.currency"
_description = "Currency"
_columns = {
'name': fields.char('Currency', size=32, required=True),
'code': fields.char('Code', size=3),
'rate': fields.function(_current_rate, method=True, string='Current rate', digits=(12,6),
help='The rate of the currency to the currency of rate 1'),
'rate_ids': fields.one2many('res.currency.rate', 'currency_id', 'Rates'),
'accuracy': fields.integer('Computational Accuracy'),
'rounding': fields.float('Rounding factor', digits=(12,6)),
'active': fields.boolean('Active'),
}
_defaults = {
'active': lambda *a: 1,
}
_order = "code"
def _current_rate(self, cr, uid, ids, name, arg, context={}):
res={}
if 'date' in context:
date=context['date']
else:
date=time.strftime('%Y-%m-%d')
for id in ids:
cr.execute("SELECT currency_id, rate FROM res_currency_rate WHERE currency_id = %d AND name <= '%s' ORDER BY name desc LIMIT 1" % (id, date))
if cr.rowcount:
id, rate=cr.fetchall()[0]
res[id]=rate
else:
res[id]=0
return res
_name = "res.currency"
_description = "Currency"
_columns = {
'name': fields.char('Currency', size=32, required=True),
'code': fields.char('Code', size=3),
'rate': fields.function(_current_rate, method=True, string='Current rate', digits=(12,6),
help='The rate of the currency to the currency of rate 1'),
'rate_ids': fields.one2many('res.currency.rate', 'currency_id', 'Rates'),
'accuracy': fields.integer('Computational Accuracy'),
'rounding': fields.float('Rounding factor', digits=(12,6)),
'active': fields.boolean('Active'),
}
_defaults = {
'active': lambda *a: 1,
}
_order = "code"
def round(self, cr, uid, currency, amount):
return round(amount / currency.rounding) * currency.rounding
def round(self, cr, uid, currency, amount):
return round(amount / currency.rounding) * currency.rounding
def is_zero(self, cr, uid, currency, amount):
return abs(self.round(cr, uid, currency, amount)) < currency.rounding
def is_zero(self, cr, uid, currency, amount):
return abs(self.round(cr, uid, currency, amount)) < currency.rounding
def compute(self, cr, uid, from_currency_id, to_currency_id, from_amount, round=True, context={}):
if not from_currency_id:
from_currency_id = to_currency_id
xc=self.browse(cr, uid, [from_currency_id,to_currency_id], context=context)
from_currency = (xc[0].id == from_currency_id and xc[0]) or xc[1]
to_currency = (xc[0].id == to_currency_id and xc[0]) or xc[1]
if from_currency['rate'] == 0 or to_currency['rate'] == 0:
date = context.get('date', time.strftime('%Y-%m-%d'))
if from_currency['rate'] == 0:
code = from_currency.code
else:
code = to_currency.code
raise osv.except_osv(_('Error'), _('No rate found \n' \
'for the currency: %s \n' \
'at the date: %s') % (code, date))
if to_currency_id==from_currency_id:
if round:
return self.round(cr, uid, to_currency, from_amount)
else:
return from_amount
else:
if round:
return self.round(cr, uid, to_currency, from_amount * to_currency.rate/from_currency.rate)
else:
return (from_amount * to_currency.rate/from_currency.rate)
def name_search(self, cr, uid, name, args=[], operator='ilike', context={}, limit=80):
args2 = args[:]
if name:
args += [('name', operator, name)]
args2 += [('code', operator, name)]
ids = self.search(cr, uid, args, limit=limit)
ids += self.search(cr, uid, args2, limit=limit)
res = self.name_get(cr, uid, ids, context)
return res
def compute(self, cr, uid, from_currency_id, to_currency_id, from_amount, round=True, context={}):
if not from_currency_id:
from_currency_id = to_currency_id
xc=self.browse(cr, uid, [from_currency_id,to_currency_id], context=context)
from_currency = (xc[0].id == from_currency_id and xc[0]) or xc[1]
to_currency = (xc[0].id == to_currency_id and xc[0]) or xc[1]
if from_currency['rate'] == 0 or to_currency['rate'] == 0:
date = context.get('date', time.strftime('%Y-%m-%d'))
if from_currency['rate'] == 0:
code = from_currency.code
else:
code = to_currency.code
raise osv.except_osv(_('Error'), _('No rate found \n' \
'for the currency: %s \n' \
'at the date: %s') % (code, date))
if to_currency_id==from_currency_id:
if round:
return self.round(cr, uid, to_currency, from_amount)
else:
return from_amount
else:
if round:
return self.round(cr, uid, to_currency, from_amount * to_currency.rate/from_currency.rate)
else:
return (from_amount * to_currency.rate/from_currency.rate)
def name_search(self, cr, uid, name, args=[], operator='ilike', context={}, limit=80):
args2 = args[:]
if name:
args += [('name', operator, name)]
args2 += [('code', operator, name)]
ids = self.search(cr, uid, args, limit=limit)
ids += self.search(cr, uid, args2, limit=limit)
res = self.name_get(cr, uid, ids, context)
return res
res_currency()
class res_currency_rate(osv.osv):
_name = "res.currency.rate"
_description = "Currency Rate"
_columns = {
'name': fields.date('Date', required=True, select=True),
'rate': fields.float('Rate', digits=(12,6), required=True,
help='The rate of the currency to the currency of rate 1'),
'currency_id': fields.many2one('res.currency', 'Currency', readonly=True),
}
_defaults = {
'name': lambda *a: time.strftime('%Y-%m-%d'),
}
_order = "name desc"
_name = "res.currency.rate"
_description = "Currency Rate"
_columns = {
'name': fields.date('Date', required=True, select=True),
'rate': fields.float('Rate', digits=(12,6), required=True,
help='The rate of the currency to the currency of rate 1'),
'currency_id': fields.many2one('res.currency', 'Currency', readonly=True),
}
_defaults = {
'name': lambda *a: time.strftime('%Y-%m-%d'),
}
_order = "name desc"
res_currency_rate()

View File

@ -30,19 +30,19 @@
from osv import fields, osv
class lang(osv.osv):
_name = "res.lang"
_columns = {
'name': fields.char('Name', size=64, required=True),
'code': fields.char('Code', size=5, required=True),
'translatable': fields.boolean('Translatable'),
'active': fields.boolean('Active'),
'direction': fields.selection([('ltr', 'Left-to-right'), ('rtl', 'Right-to-left')], 'Direction',resuired=True),
}
_defaults = {
'active': lambda *a: 1,
'translatable': lambda *a: 0,
'direction': lambda *a: 'ltr',
}
_name = "res.lang"
_columns = {
'name': fields.char('Name', size=64, required=True),
'code': fields.char('Code', size=5, required=True),
'translatable': fields.boolean('Translatable'),
'active': fields.boolean('Active'),
'direction': fields.selection([('ltr', 'Left-to-right'), ('rtl', 'Right-to-left')], 'Direction',resuired=True),
}
_defaults = {
'active': lambda *a: 1,
'translatable': lambda *a: 0,
'direction': lambda *a: 'ltr',
}
lang()

View File

@ -31,97 +31,97 @@ from osv import osv, fields
import time
def _links_get(self, cr, uid, context={}):
obj = self.pool.get('res.request.link')
ids = obj.search(cr, uid, [])
res = obj.read(cr, uid, ids, ['object', 'name'], context)
return [(r['object'], r['name']) for r in res]
obj = self.pool.get('res.request.link')
ids = obj.search(cr, uid, [])
res = obj.read(cr, uid, ids, ['object', 'name'], context)
return [(r['object'], r['name']) for r in res]
class res_request(osv.osv):
_name = 'res.request'
_name = 'res.request'
def request_send(self, cr, uid, ids, *args):
for id in ids:
cr.execute('update res_request set state=%s,date_sent=%s where id=%d', ('waiting', time.strftime('%Y-%m-%d %H:%M:%S'), id))
cr.execute('select act_from,act_to,body,date_sent from res_request where id=%d', (id,))
values = cr.dictfetchone()
if values['body'] and (len(values['body']) > 128):
values['name'] = values['body'][:125] + '...'
else:
values['name'] = values['body'] or '/'
values['req_id'] = id
self.pool.get('res.request.history').create(cr, uid, values)
return True
def request_send(self, cr, uid, ids, *args):
for id in ids:
cr.execute('update res_request set state=%s,date_sent=%s where id=%d', ('waiting', time.strftime('%Y-%m-%d %H:%M:%S'), id))
cr.execute('select act_from,act_to,body,date_sent from res_request where id=%d', (id,))
values = cr.dictfetchone()
if values['body'] and (len(values['body']) > 128):
values['name'] = values['body'][:125] + '...'
else:
values['name'] = values['body'] or '/'
values['req_id'] = id
self.pool.get('res.request.history').create(cr, uid, values)
return True
def request_reply(self, cr, uid, ids, *args):
for id in ids:
cr.execute("update res_request set state='active', act_from=%d, act_to=act_from, trigger_date=NULL, body='' where id=%d", (uid,id))
return True
def request_reply(self, cr, uid, ids, *args):
for id in ids:
cr.execute("update res_request set state='active', act_from=%d, act_to=act_from, trigger_date=NULL, body='' where id=%d", (uid,id))
return True
def request_close(self, cr, uid, ids, *args):
self.write(cr, uid, ids, {'state':'closed', 'active':False})
return True
def request_close(self, cr, uid, ids, *args):
self.write(cr, uid, ids, {'state':'closed', 'active':False})
return True
def request_get(self, cr, uid):
cr.execute('select id from res_request where act_to=%d and (trigger_date<=%s or trigger_date is null) and active=True', (uid,time.strftime('%Y-%m-%d')))
ids = map(lambda x:x[0], cr.fetchall())
cr.execute('select id from res_request where act_from=%d and (act_to<>%d) and (trigger_date<=%s or trigger_date is null) and active=True', (uid,uid,time.strftime('%Y-%m-%d')))
ids2 = map(lambda x:x[0], cr.fetchall())
return (ids, ids2)
def request_get(self, cr, uid):
cr.execute('select id from res_request where act_to=%d and (trigger_date<=%s or trigger_date is null) and active=True', (uid,time.strftime('%Y-%m-%d')))
ids = map(lambda x:x[0], cr.fetchall())
cr.execute('select id from res_request where act_from=%d and (act_to<>%d) and (trigger_date<=%s or trigger_date is null) and active=True', (uid,uid,time.strftime('%Y-%m-%d')))
ids2 = map(lambda x:x[0], cr.fetchall())
return (ids, ids2)
_columns = {
'create_date': fields.datetime('Created date', readonly=True),
'name': fields.char('Subject', states={'waiting':[('readonly',True)],'active':[('readonly',True)],'closed':[('readonly',True)]}, required=True, size=128),
'active': fields.boolean('Active'),
'priority': fields.selection([('0','Low'),('1','Normal'),('2','High')], 'Priority', states={'waiting':[('readonly',True)],'closed':[('readonly',True)]}, required=True),
'act_from': fields.many2one('res.users', 'From', required=True, readonly=True, states={'closed':[('readonly',True)]}),
'act_to': fields.many2one('res.users', 'To', required=True, states={'waiting':[('readonly',True)],'closed':[('readonly',True)]}),
'body': fields.text('Request', states={'waiting':[('readonly',True)],'closed':[('readonly',True)]}),
'date_sent': fields.datetime('Date', readonly=True),
'trigger_date': fields.datetime('Trigger Date', states={'waiting':[('readonly',True)],'closed':[('readonly',True)]}),
'ref_partner_id':fields.many2one('res.partner', 'Partner Ref.', states={'closed':[('readonly',True)]}),
'ref_doc1':fields.reference('Document Ref 1', selection=_links_get, size=128, states={'closed':[('readonly',True)]}),
'ref_doc2':fields.reference('Document Ref 2', selection=_links_get, size=128, states={'closed':[('readonly',True)]}),
'state': fields.selection([('draft','draft'),('waiting','waiting'),('active','active'),('closed','closed')], 'State', required=True, readonly=True),
'history': fields.one2many('res.request.history','req_id', 'History')
}
_defaults = {
'act_from': lambda obj,cr,uid,context={}: uid,
'state': lambda obj,cr,uid,context={}: 'draft',
'active': lambda obj,cr,uid,context={}: True,
'priority': lambda obj,cr,uid,context={}: '1',
}
_order = 'priority desc, trigger_date, create_date desc'
_table = 'res_request'
_columns = {
'create_date': fields.datetime('Created date', readonly=True),
'name': fields.char('Subject', states={'waiting':[('readonly',True)],'active':[('readonly',True)],'closed':[('readonly',True)]}, required=True, size=128),
'active': fields.boolean('Active'),
'priority': fields.selection([('0','Low'),('1','Normal'),('2','High')], 'Priority', states={'waiting':[('readonly',True)],'closed':[('readonly',True)]}, required=True),
'act_from': fields.many2one('res.users', 'From', required=True, readonly=True, states={'closed':[('readonly',True)]}),
'act_to': fields.many2one('res.users', 'To', required=True, states={'waiting':[('readonly',True)],'closed':[('readonly',True)]}),
'body': fields.text('Request', states={'waiting':[('readonly',True)],'closed':[('readonly',True)]}),
'date_sent': fields.datetime('Date', readonly=True),
'trigger_date': fields.datetime('Trigger Date', states={'waiting':[('readonly',True)],'closed':[('readonly',True)]}),
'ref_partner_id':fields.many2one('res.partner', 'Partner Ref.', states={'closed':[('readonly',True)]}),
'ref_doc1':fields.reference('Document Ref 1', selection=_links_get, size=128, states={'closed':[('readonly',True)]}),
'ref_doc2':fields.reference('Document Ref 2', selection=_links_get, size=128, states={'closed':[('readonly',True)]}),
'state': fields.selection([('draft','draft'),('waiting','waiting'),('active','active'),('closed','closed')], 'State', required=True, readonly=True),
'history': fields.one2many('res.request.history','req_id', 'History')
}
_defaults = {
'act_from': lambda obj,cr,uid,context={}: uid,
'state': lambda obj,cr,uid,context={}: 'draft',
'active': lambda obj,cr,uid,context={}: True,
'priority': lambda obj,cr,uid,context={}: '1',
}
_order = 'priority desc, trigger_date, create_date desc'
_table = 'res_request'
res_request()
class res_request_link(osv.osv):
_name = 'res.request.link'
_columns = {
'name': fields.char('Name', size=64, required=True, translate=True),
'object': fields.char('Object', size=64, required=True),
'priority': fields.integer('Priority'),
}
_defaults = {
'priority': lambda *a: 5,
}
_order = 'priority'
_name = 'res.request.link'
_columns = {
'name': fields.char('Name', size=64, required=True, translate=True),
'object': fields.char('Object', size=64, required=True),
'priority': fields.integer('Priority'),
}
_defaults = {
'priority': lambda *a: 5,
}
_order = 'priority'
res_request_link()
class res_request_history(osv.osv):
_name = 'res.request.history'
_columns = {
'name': fields.char('Summary', size=128, states={'active':[('readonly',True)],'waiting':[('readonly',True)]}, required=True),
'req_id': fields.many2one('res.request', 'Request', required=True, ondelete='cascade', select=True),
'act_from': fields.many2one('res.users', 'From', required=True, readonly=True),
'act_to': fields.many2one('res.users', 'To', required=True, states={'waiting':[('readonly',True)]}),
'body': fields.text('Body', states={'waiting':[('readonly',True)]}),
'date_sent': fields.datetime('Date sent', states={'waiting':[('readonly',True)]}, required=True)
}
_defaults = {
'name': lambda *a: 'NoName',
'act_from': lambda obj,cr,uid,context={}: uid,
'act_to': lambda obj,cr,uid,context={}: uid,
'date_sent': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
}
_name = 'res.request.history'
_columns = {
'name': fields.char('Summary', size=128, states={'active':[('readonly',True)],'waiting':[('readonly',True)]}, required=True),
'req_id': fields.many2one('res.request', 'Request', required=True, ondelete='cascade', select=True),
'act_from': fields.many2one('res.users', 'From', required=True, readonly=True),
'act_to': fields.many2one('res.users', 'To', required=True, states={'waiting':[('readonly',True)]}),
'body': fields.text('Body', states={'waiting':[('readonly',True)]}),
'date_sent': fields.datetime('Date sent', states={'waiting':[('readonly',True)]}, required=True)
}
_defaults = {
'name': lambda *a: 'NoName',
'act_from': lambda obj,cr,uid,context={}: uid,
'act_to': lambda obj,cr,uid,context={}: uid,
'date_sent': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
}
res_request_history()

View File

@ -32,243 +32,243 @@ import tools
import pytz
class groups(osv.osv):
_name = "res.groups"
_columns = {
'name': fields.char('Group Name', size=64, required=True),
'model_access': fields.one2many('ir.model.access', 'group_id', 'Access Controls'),
'rule_groups': fields.many2many('ir.rule.group', 'group_rule_group_rel',
'group_id', 'rule_group_id', 'Rules', domain="[('global', '<>', True)]"),
'menu_access': fields.many2many('ir.ui.menu', 'ir_ui_menu_group_rel', 'gid', 'menu_id', 'Access Menu'),
'comment' : fields.text('Comment',size=250),
}
_sql_constraints = [
('name_uniq', 'unique (name)', 'The name of the group must be unique !')
]
_name = "res.groups"
_columns = {
'name': fields.char('Group Name', size=64, required=True),
'model_access': fields.one2many('ir.model.access', 'group_id', 'Access Controls'),
'rule_groups': fields.many2many('ir.rule.group', 'group_rule_group_rel',
'group_id', 'rule_group_id', 'Rules', domain="[('global', '<>', True)]"),
'menu_access': fields.many2many('ir.ui.menu', 'ir_ui_menu_group_rel', 'gid', 'menu_id', 'Access Menu'),
'comment' : fields.text('Comment',size=250),
}
_sql_constraints = [
('name_uniq', 'unique (name)', 'The name of the group must be unique !')
]
def write(self, cr, uid, ids, vals, context=None):
if 'name' in vals:
if vals['name'].startswith('-'):
raise osv.except_osv(_('Error'),
_('The name of the group can not start with "-"'))
res = super(groups, self).write(cr, uid, ids, vals, context=context)
# Restart the cache on the company_get method
self.pool.get('ir.rule').domain_get()
return res
def write(self, cr, uid, ids, vals, context=None):
if 'name' in vals:
if vals['name'].startswith('-'):
raise osv.except_osv(_('Error'),
_('The name of the group can not start with "-"'))
res = super(groups, self).write(cr, uid, ids, vals, context=context)
# Restart the cache on the company_get method
self.pool.get('ir.rule').domain_get()
return res
def create(self, cr, uid, vals, context=None):
if 'name' in vals:
if vals['name'].startswith('-'):
raise osv.except_osv(_('Error'),
_('The name of the group can not start with "-"'))
return super(groups, self).create(cr, uid, vals, context=context)
def create(self, cr, uid, vals, context=None):
if 'name' in vals:
if vals['name'].startswith('-'):
raise osv.except_osv(_('Error'),
_('The name of the group can not start with "-"'))
return super(groups, self).create(cr, uid, vals, context=context)
groups()
class roles(osv.osv):
_name = "res.roles"
_columns = {
'name': fields.char('Role Name', size=64, required=True),
'parent_id': fields.many2one('res.roles', 'Parent', select=True),
'child_id': fields.one2many('res.roles', 'parent_id', 'Childs')
}
_defaults = {
}
def check(self, cr, uid, ids, role_id):
if role_id in ids:
return True
cr.execute('select parent_id from res_roles where id=%d', (role_id,))
roles = cr.fetchone()[0]
if roles:
return self.check(cr, uid, ids, roles)
return False
_name = "res.roles"
_columns = {
'name': fields.char('Role Name', size=64, required=True),
'parent_id': fields.many2one('res.roles', 'Parent', select=True),
'child_id': fields.one2many('res.roles', 'parent_id', 'Childs')
}
_defaults = {
}
def check(self, cr, uid, ids, role_id):
if role_id in ids:
return True
cr.execute('select parent_id from res_roles where id=%d', (role_id,))
roles = cr.fetchone()[0]
if roles:
return self.check(cr, uid, ids, roles)
return False
roles()
def _lang_get(self, cr, uid, context={}):
obj = self.pool.get('res.lang')
ids = obj.search(cr, uid, [])
res = obj.read(cr, uid, ids, ['code', 'name'], context)
res = [(r['code'], r['name']) for r in res]
return res
obj = self.pool.get('res.lang')
ids = obj.search(cr, uid, [])
res = obj.read(cr, uid, ids, ['code', 'name'], context)
res = [(r['code'], r['name']) for r in res]
return res
def _tz_get(self,cr,uid, context={}):
return [(x, x) for x in pytz.all_timezones]
return [(x, x) for x in pytz.all_timezones]
class users(osv.osv):
_name = "res.users"
_log_access = False
_columns = {
'name': fields.char('Name', size=64, required=True, select=True),
'login': fields.char('Login', size=64, required=True),
'password': fields.char('Password', size=64, invisible=True),
'signature': fields.text('Signature', size=64),
'address_id': fields.many2one('res.partner.address', 'Address'),
'active': fields.boolean('Active'),
'action_id': fields.many2one('ir.actions.actions', 'Home Action'),
'menu_id': fields.many2one('ir.actions.actions', 'Menu Action'),
'groups_id': fields.many2many('res.groups', 'res_groups_users_rel', 'uid', 'gid', 'Groups'),
'roles_id': fields.many2many('res.roles', 'res_roles_users_rel', 'uid', 'rid', 'Roles'),
'company_id': fields.many2one('res.company', 'Company'),
'context_lang': fields.selection(_lang_get, 'Language', required=True),
'context_tz': fields.selection(_tz_get, 'Timezone', size=64)
}
def read(self,cr, uid, ids, fields=None, context=None, load='_classic_read'):
result = super(users, self).read(cr, uid, ids, fields, context, load)
canwrite = self.pool.get('ir.model.access').check(cr, uid, 'res.users', 'write', raise_exception=False)
if not canwrite:
for r in result:
if 'password' in r:
r['password'] = '********'
return result
_name = "res.users"
_log_access = False
_columns = {
'name': fields.char('Name', size=64, required=True, select=True),
'login': fields.char('Login', size=64, required=True),
'password': fields.char('Password', size=64, invisible=True),
'signature': fields.text('Signature', size=64),
'address_id': fields.many2one('res.partner.address', 'Address'),
'active': fields.boolean('Active'),
'action_id': fields.many2one('ir.actions.actions', 'Home Action'),
'menu_id': fields.many2one('ir.actions.actions', 'Menu Action'),
'groups_id': fields.many2many('res.groups', 'res_groups_users_rel', 'uid', 'gid', 'Groups'),
'roles_id': fields.many2many('res.roles', 'res_roles_users_rel', 'uid', 'rid', 'Roles'),
'company_id': fields.many2one('res.company', 'Company'),
'context_lang': fields.selection(_lang_get, 'Language', required=True),
'context_tz': fields.selection(_tz_get, 'Timezone', size=64)
}
def read(self,cr, uid, ids, fields=None, context=None, load='_classic_read'):
result = super(users, self).read(cr, uid, ids, fields, context, load)
canwrite = self.pool.get('ir.model.access').check(cr, uid, 'res.users', 'write', raise_exception=False)
if not canwrite:
for r in result:
if 'password' in r:
r['password'] = '********'
return result
_sql_constraints = [
('login_key', 'UNIQUE (login)', 'You can not have two users with the same login !')
]
def _get_action(self,cr, uid, context={}):
ids = self.pool.get('ir.ui.menu').search(cr, uid, [('usage','=','menu')])
return ids and ids[0] or False
_sql_constraints = [
('login_key', 'UNIQUE (login)', 'You can not have two users with the same login !')
]
def _get_action(self,cr, uid, context={}):
ids = self.pool.get('ir.ui.menu').search(cr, uid, [('usage','=','menu')])
return ids and ids[0] or False
def _get_company(self,cr, uid, context={}):
return self.pool.get('res.users').browse(cr, uid, uid, context).company_id.id
def _get_company(self,cr, uid, context={}):
return self.pool.get('res.users').browse(cr, uid, uid, context).company_id.id
def _get_menu(self,cr, uid, context={}):
ids = self.pool.get('ir.actions.act_window').search(cr, uid, [('usage','=','menu')])
return ids and ids[0] or False
def _get_menu(self,cr, uid, context={}):
ids = self.pool.get('ir.actions.act_window').search(cr, uid, [('usage','=','menu')])
return ids and ids[0] or False
_defaults = {
'password' : lambda obj,cr,uid,context={} : '',
'context_lang': lambda *args: 'en_US',
'active' : lambda obj,cr,uid,context={} : True,
'menu_id': _get_menu,
'action_id': _get_menu,
'company_id': _get_company,
}
def company_get(self, cr, uid, uid2):
company_id = self.pool.get('res.users').browse(cr, uid, uid).company_id.id
return company_id
company_get = tools.cache()(company_get)
_defaults = {
'password' : lambda obj,cr,uid,context={} : '',
'context_lang': lambda *args: 'en_US',
'active' : lambda obj,cr,uid,context={} : True,
'menu_id': _get_menu,
'action_id': _get_menu,
'company_id': _get_company,
}
def company_get(self, cr, uid, uid2):
company_id = self.pool.get('res.users').browse(cr, uid, uid).company_id.id
return company_id
company_get = tools.cache()(company_get)
def write(self, cr, uid, ids, values, *args, **argv):
if (ids == [uid]):
ok = True
for k in values.keys():
if k not in ('password','signature','action_id', 'context_lang', 'context_tz'):
ok=False
if ok:
uid = 1
res = super(users, self).write(cr, uid, ids, values, *args, **argv)
self.company_get()
# Restart the cache on the company_get method
self.pool.get('ir.rule').domain_get()
return res
def write(self, cr, uid, ids, values, *args, **argv):
if (ids == [uid]):
ok = True
for k in values.keys():
if k not in ('password','signature','action_id', 'context_lang', 'context_tz'):
ok=False
if ok:
uid = 1
res = super(users, self).write(cr, uid, ids, values, *args, **argv)
self.company_get()
# Restart the cache on the company_get method
self.pool.get('ir.rule').domain_get()
return res
def unlink(self, cr, uid, ids):
if 1 in ids:
raise osv.except_osv(_('Can not remove root user!'), _('You can not remove the root user as it is used internally for resources created by Tiny ERP (updates, module installation, ...)'))
return super(users, self).unlink(cr, uid, ids)
def unlink(self, cr, uid, ids):
if 1 in ids:
raise osv.except_osv(_('Can not remove root user!'), _('You can not remove the root user as it is used internally for resources created by Tiny ERP (updates, module installation, ...)'))
return super(users, self).unlink(cr, uid, ids)
def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=80):
if not args:
args=[]
if not context:
context={}
ids = []
if name:
ids = self.search(cr, user, [('login','=',name)]+ args, limit=limit)
if not ids:
ids = self.search(cr, user, [('name',operator,name)]+ args, limit=limit)
return self.name_get(cr, user, ids)
def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=80):
if not args:
args=[]
if not context:
context={}
ids = []
if name:
ids = self.search(cr, user, [('login','=',name)]+ args, limit=limit)
if not ids:
ids = self.search(cr, user, [('name',operator,name)]+ args, limit=limit)
return self.name_get(cr, user, ids)
def copy(self, cr, uid, id, default=None, context={}):
login = self.read(cr, uid, [id], ['login'])[0]['login']
default.update({'login': login+' (copy)'})
return super(users, self).copy(cr, uid, id, default, context)
def copy(self, cr, uid, id, default=None, context={}):
login = self.read(cr, uid, [id], ['login'])[0]['login']
default.update({'login': login+' (copy)'})
return super(users, self).copy(cr, uid, id, default, context)
def context_get(self, cr, uid, context={}):
user = self.browse(cr, uid, uid, context)
result = {}
for k in self._columns.keys():
if k.startswith('context_'):
result[k[8:]] = getattr(user,k)
return result
def context_get(self, cr, uid, context={}):
user = self.browse(cr, uid, uid, context)
result = {}
for k in self._columns.keys():
if k.startswith('context_'):
result[k[8:]] = getattr(user,k)
return result
def action_get(self, cr, uid, context={}):
dataobj = self.pool.get('ir.model.data')
data_id = dataobj._get_id(cr, 1, 'base', 'action_res_users_my')
return dataobj.browse(cr, uid, data_id, context).res_id
def action_get(self, cr, uid, context={}):
dataobj = self.pool.get('ir.model.data')
data_id = dataobj._get_id(cr, 1, 'base', 'action_res_users_my')
return dataobj.browse(cr, uid, data_id, context).res_id
def action_next(self,cr,uid,ids,context=None):
return{
'view_type': 'form',
"view_mode": 'form',
'res_model': 'ir.module.module.configuration.wizard',
'type': 'ir.actions.act_window',
'target':'new',
}
def action_next(self,cr,uid,ids,context=None):
return{
'view_type': 'form',
"view_mode": 'form',
'res_model': 'ir.module.module.configuration.wizard',
'type': 'ir.actions.act_window',
'target':'new',
}
def action_continue(self,cr,uid,ids,context={}):
return {
'view_type': 'form',
"view_mode": 'form',
'res_model': 'ir.module.module.configuration.wizard',
'type': 'ir.actions.act_window',
'target':'new',
}
def action_new(self,cr,uid,ids,context={}):
return {
'view_type': 'form',
"view_mode": 'form',
'res_model': 'res.users',
'view_id':self.pool.get('ir.ui.view').search(cr,uid,[('name','=','res.users.confirm.form')]),
'type': 'ir.actions.act_window',
'target':'new',
}
def action_continue(self,cr,uid,ids,context={}):
return {
'view_type': 'form',
"view_mode": 'form',
'res_model': 'ir.module.module.configuration.wizard',
'type': 'ir.actions.act_window',
'target':'new',
}
def action_new(self,cr,uid,ids,context={}):
return {
'view_type': 'form',
"view_mode": 'form',
'res_model': 'res.users',
'view_id':self.pool.get('ir.ui.view').search(cr,uid,[('name','=','res.users.confirm.form')]),
'type': 'ir.actions.act_window',
'target':'new',
}
users()
class groups2(osv.osv):
_inherit = 'res.groups'
_columns = {
'users': fields.many2many('res.users', 'res_groups_users_rel', 'gid', 'uid', 'Users'),
}
_inherit = 'res.groups'
_columns = {
'users': fields.many2many('res.users', 'res_groups_users_rel', 'gid', 'uid', 'Users'),
}
groups2()
class res_config_view(osv.osv_memory):
_name='res.config.view'
_columns = {
'name':fields.char('Name', size=64),
'view': fields.selection([('simple','Simple'),('extended','Extended')], 'View', required=True ),
_name='res.config.view'
_columns = {
'name':fields.char('Name', size=64),
'view': fields.selection([('simple','Simple'),('extended','Extended')], 'View', required=True ),
}
_defaults={
'view':lambda *args: 'simple',
}
_defaults={
'view':lambda *args: 'simple',
}
def action_cancel(self,cr,uid,ids,conect=None):
print ' Cancel action'
return {
'view_type': 'form',
"view_mode": 'form',
'res_model': 'ir.module.module.configuration.wizard',
'type': 'ir.actions.act_window',
'target':'new',
def action_cancel(self,cr,uid,ids,conect=None):
print ' Cancel action'
return {
'view_type': 'form',
"view_mode": 'form',
'res_model': 'ir.module.module.configuration.wizard',
'type': 'ir.actions.act_window',
'target':'new',
}
def action_set(self, cr, uid, ids, context=None):
res=self.read(cr,uid,ids)[0]
users_obj = self.pool.get('res.users')
group_obj=self.pool.get('res.groups')
if 'view' in res and res['view'] and res['view']=='extended':
group_ids=group_obj.search(cr,uid,[('name','=','Extended View')])
if group_ids and len(group_ids):
users_obj.write(cr, uid, [3],{
'groups_id':[(4,group_ids[0])]
}, context=context)
return {
'view_type': 'form',
"view_mode": 'form',
'res_model': 'ir.module.module.configuration.wizard',
'type': 'ir.actions.act_window',
'target':'new',
}
def action_set(self, cr, uid, ids, context=None):
res=self.read(cr,uid,ids)[0]
users_obj = self.pool.get('res.users')
group_obj=self.pool.get('res.groups')
if 'view' in res and res['view'] and res['view']=='extended':
group_ids=group_obj.search(cr,uid,[('name','=','Extended View')])
if group_ids and len(group_ids):
users_obj.write(cr, uid, [3],{
'groups_id':[(4,group_ids[0])]
}, context=context)
return {
'view_type': 'form',
"view_mode": 'form',
'res_model': 'ir.module.module.configuration.wizard',
'type': 'ir.actions.act_window',
'target':'new',
}
res_config_view()

View File

@ -45,13 +45,13 @@ done = []
print 'digraph G {'
while len(modules):
f = modules.pop(0)
done.append(f)
if os.path.isfile(os.path.join(f,"__terp__.py")):
info=eval(file(os.path.join(f,"__terp__.py")).read())
if info.get('installable', True):
for name in info['depends']:
if name not in done+modules:
modules.append(name)
print '\t%s -> %s;' % (f, name)
f = modules.pop(0)
done.append(f)
if os.path.isfile(os.path.join(f,"__terp__.py")):
info=eval(file(os.path.join(f,"__terp__.py")).read())
if info.get('installable', True):
for name in info['depends']:
if name not in done+modules:
modules.append(name)
print '\t%s -> %s;' % (f, name)
print '}'

View File

@ -31,14 +31,14 @@ import osv
import pooler
def ir_set(cr, uid, key, key2, name, models, value, replace=True, isobject=False, meta=None):
obj = pooler.get_pool(cr.dbname).get('ir.values')
return obj.set(cr, uid, key, key2, name, models, value, replace, isobject, meta)
obj = pooler.get_pool(cr.dbname).get('ir.values')
return obj.set(cr, uid, key, key2, name, models, value, replace, isobject, meta)
def ir_del(cr, uid, id):
obj = pooler.get_pool(cr.dbname).get('ir.values')
return obj.unlink(cr, uid, [id])
obj = pooler.get_pool(cr.dbname).get('ir.values')
return obj.unlink(cr, uid, [id])
def ir_get(cr, uid, key, key2, models, meta=False, context={}, res_id_req=False):
obj = pooler.get_pool(cr.dbname).get('ir.values')
res = obj.get(cr, uid, key, key2, models, meta=meta, context=context, res_id_req=res_id_req)
return res
obj = pooler.get_pool(cr.dbname).get('ir.values')
res = obj.get(cr, uid, key, key2, models, meta=meta, context=context, res_id_req=res_id_req)
return res

View File

@ -42,97 +42,97 @@ _res_id=1
_res={}
class ServiceEndPointCall(object):
def __init__(self,id,method):
self._id=id
self._meth=method
def __call__(self,*args):
_res[self._id]=self._meth(*args)
return self._id
def __init__(self,id,method):
self._id=id
self._meth=method
def __call__(self,*args):
_res[self._id]=self._meth(*args)
return self._id
class ServiceEndPoint(object):
def __init__(self, name, id):
self._id = id
self._meth={}
s=_service[name]
for m in s._method:
self._meth[m]=s._method[m]
def __getattr__(self, name):
return ServiceEndPointCall(self._id, self._meth[name])
def __init__(self, name, id):
self._id = id
self._meth={}
s=_service[name]
for m in s._method:
self._meth[m]=s._method[m]
def __getattr__(self, name):
return ServiceEndPointCall(self._id, self._meth[name])
class Service(object):
_serviceEndPointID = 0
def __init__(self, name, audience=''):
_service[name]=self
self.__name=name
self._method={}
self.exportedMethods=None
self._response_process=None
self._response_process_id=None
self._response=None
_serviceEndPointID = 0
def __init__(self, name, audience=''):
_service[name]=self
self.__name=name
self._method={}
self.exportedMethods=None
self._response_process=None
self._response_process_id=None
self._response=None
def joinGroup(self,name):
if not name in _group:
_group[name]={}
_group[name][self.__name]=self
def joinGroup(self,name):
if not name in _group:
_group[name]={}
_group[name][self.__name]=self
def exportMethod(self, m):
if callable(m):
self._method[m.__name__]=m
def exportMethod(self, m):
if callable(m):
self._method[m.__name__]=m
def serviceEndPoint(self,s):
if Service._serviceEndPointID >= 2**16:
Service._serviceEndPointID = 0
Service._serviceEndPointID += 1
return ServiceEndPoint(s, self._serviceEndPointID)
def serviceEndPoint(self,s):
if Service._serviceEndPointID >= 2**16:
Service._serviceEndPointID = 0
Service._serviceEndPointID += 1
return ServiceEndPoint(s, self._serviceEndPointID)
def conversationId(self):
return 1
def conversationId(self):
return 1
def processResponse(self,s,id):
self._response_process, self._response_process_id = s, id
def processResponse(self,s,id):
self._response_process, self._response_process_id = s, id
def processFailure(self,s,id):
pass
def processFailure(self,s,id):
pass
def resumeResponse(self,s):
pass
def resumeResponse(self,s):
pass
def cancelResponse(self,s):
pass
def cancelResponse(self,s):
pass
def suspendResponse(self,s):
if self._response_process:
self._response_process(self._response_process_id,
_res[self._response_process_id])
self._response_process=None
self._response=s(self._response_process_id)
def suspendResponse(self,s):
if self._response_process:
self._response_process(self._response_process_id,
_res[self._response_process_id])
self._response_process=None
self._response=s(self._response_process_id)
def abortResponse(self, error, description, origin, details):
import tools
if not tools.config['debug_mode']:
raise Exception("%s -- %s\n\n%s"%(origin,description,details))
else:
raise
def abortResponse(self, error, description, origin, details):
import tools
if not tools.config['debug_mode']:
raise Exception("%s -- %s\n\n%s"%(origin,description,details))
else:
raise
def currentFailure(self,s):
pass
def currentFailure(self,s):
pass
class LocalService(Service):
def __init__(self, name):
self.__name=name
s=_service[name]
self._service=s
for m in s._method:
setattr(self,m,s._method[m])
def __init__(self, name):
self.__name=name
s=_service[name]
self._service=s
for m in s._method:
setattr(self,m,s._method[m])
class ServiceUnavailable(Exception):
pass
pass
def service_exist(name):
return (name in _service) and bool(_service[name])
return (name in _service) and bool(_service[name])
def get_rpc_paths():
return map(lambda s: '/xmlrpc/%s' % s, _service)
return map(lambda s: '/xmlrpc/%s' % s, _service)
LOG_DEBUG='debug'
LOG_INFO='info'
@ -141,269 +141,269 @@ LOG_ERROR='error'
LOG_CRITICAL='critical'
def init_logger():
from tools import config
import os
from tools import config
import os
if config['logfile']:
logf = config['logfile']
# test if the directories exist, else create them
try:
if not os.path.exists(os.path.dirname(logf)):
os.makedirs(os.path.dirname(logf))
try:
fd = open(logf, 'a')
handler = logging.StreamHandler(fd)
except IOError:
sys.stderr.write("ERROR: couldn't open the logfile\n")
handler = logging.StreamHandler(sys.stdout)
except OSError:
sys.stderr.write("ERROR: couldn't create the logfile directory\n")
handler = logging.StreamHandler(sys.stdout)
else:
handler = logging.StreamHandler(sys.stdout)
if config['logfile']:
logf = config['logfile']
# test if the directories exist, else create them
try:
if not os.path.exists(os.path.dirname(logf)):
os.makedirs(os.path.dirname(logf))
try:
fd = open(logf, 'a')
handler = logging.StreamHandler(fd)
except IOError:
sys.stderr.write("ERROR: couldn't open the logfile\n")
handler = logging.StreamHandler(sys.stdout)
except OSError:
sys.stderr.write("ERROR: couldn't create the logfile directory\n")
handler = logging.StreamHandler(sys.stdout)
else:
handler = logging.StreamHandler(sys.stdout)
# create a format for log messages and dates
formatter = logging.Formatter('[%(asctime)s] %(levelname)s:%(name)s:%(message)s', '%a %b %d %H:%M:%S %Y')
# create a format for log messages and dates
formatter = logging.Formatter('[%(asctime)s] %(levelname)s:%(name)s:%(message)s', '%a %b %d %H:%M:%S %Y')
# tell the handler to use this format
handler.setFormatter(formatter)
# tell the handler to use this format
handler.setFormatter(formatter)
# add the handler to the root logger
logging.getLogger().addHandler(handler)
logging.getLogger().setLevel(logging.INFO)
# add the handler to the root logger
logging.getLogger().addHandler(handler)
logging.getLogger().setLevel(logging.INFO)
class Logger(object):
def notifyChannel(self,name,level,msg):
log = logging.getLogger(name)
getattr(log,level)(msg)
def notifyChannel(self,name,level,msg):
log = logging.getLogger(name)
getattr(log,level)(msg)
class Agent(object):
_timers = []
_logger = Logger()
_timers = []
_logger = Logger()
def setAlarm(self, fn, dt, args=None, kwargs=None):
if not args:
args=[]
if not kwargs:
kwargs={}
wait = dt - time.time()
if wait > 0:
self._logger.notifyChannel('timers', LOG_DEBUG, "Job scheduled in %s seconds for %s.%s" % (wait, fn.im_class.__name__, fn.func_name))
timer = threading.Timer(wait, fn, args, kwargs)
timer.start()
self._timers.append(timer)
for timer in self._timers[:]:
if not timer.isAlive():
self._timers.remove(timer)
def setAlarm(self, fn, dt, args=None, kwargs=None):
if not args:
args=[]
if not kwargs:
kwargs={}
wait = dt - time.time()
if wait > 0:
self._logger.notifyChannel('timers', LOG_DEBUG, "Job scheduled in %s seconds for %s.%s" % (wait, fn.im_class.__name__, fn.func_name))
timer = threading.Timer(wait, fn, args, kwargs)
timer.start()
self._timers.append(timer)
for timer in self._timers[:]:
if not timer.isAlive():
self._timers.remove(timer)
def quit(cls):
for timer in cls._timers:
timer.cancel()
quit=classmethod(quit)
def quit(cls):
for timer in cls._timers:
timer.cancel()
quit=classmethod(quit)
class RpcGateway(object):
def __init__(self, name):
self.name=name
def __init__(self, name):
self.name=name
class Dispatcher(object):
def __init__(self):
pass
def monitor(self,signal):
pass
def run(self):
pass
def __init__(self):
pass
def monitor(self,signal):
pass
def run(self):
pass
class xmlrpc(object):
class RpcGateway(object):
def __init__(self, name):
self.name=name
class RpcGateway(object):
def __init__(self, name):
self.name=name
class GenericXMLRPCRequestHandler:
def _dispatch(self, method, params):
import traceback
try:
n=self.path.split("/")[-1]
s=LocalService(n)
m=getattr(s,method)
s._service._response=None
r=m(*params)
res=s._service._response
if res!=None:
r=res
return r
except Exception, e:
tb_s = reduce(lambda x, y: x+y, traceback.format_exception(
sys.exc_type, sys.exc_value, sys.exc_traceback))
s=str(e)
import tools
if tools.config['debug_mode']:
import pdb
tb = sys.exc_info()[2]
pdb.post_mortem(tb)
raise xmlrpclib.Fault(s, tb_s)
def _dispatch(self, method, params):
import traceback
try:
n=self.path.split("/")[-1]
s=LocalService(n)
m=getattr(s,method)
s._service._response=None
r=m(*params)
res=s._service._response
if res!=None:
r=res
return r
except Exception, e:
tb_s = reduce(lambda x, y: x+y, traceback.format_exception(
sys.exc_type, sys.exc_value, sys.exc_traceback))
s=str(e)
import tools
if tools.config['debug_mode']:
import pdb
tb = sys.exc_info()[2]
pdb.post_mortem(tb)
raise xmlrpclib.Fault(s, tb_s)
class SimpleXMLRPCRequestHandler(GenericXMLRPCRequestHandler,
SimpleXMLRPCServer.SimpleXMLRPCRequestHandler):
SimpleXMLRPCServer.SimpleXMLRPCRequestHandler.rpc_paths = get_rpc_paths()
SimpleXMLRPCServer.SimpleXMLRPCRequestHandler):
SimpleXMLRPCServer.SimpleXMLRPCRequestHandler.rpc_paths = get_rpc_paths()
class SimpleThreadedXMLRPCServer(SocketServer.ThreadingMixIn,
SimpleXMLRPCServer.SimpleXMLRPCServer):
SimpleXMLRPCServer.SimpleXMLRPCServer):
def server_bind(self):
self.socket.setsockopt(socket.SOL_SOCKET,
socket.SO_REUSEADDR, 1)
SimpleXMLRPCServer.SimpleXMLRPCServer.server_bind(self)
def server_bind(self):
self.socket.setsockopt(socket.SOL_SOCKET,
socket.SO_REUSEADDR, 1)
SimpleXMLRPCServer.SimpleXMLRPCServer.server_bind(self)
class HttpDaemon(threading.Thread):
def __init__(self, interface,port, secure=False):
threading.Thread.__init__(self)
self.__port=port
self.__interface=interface
self.secure = secure
if secure:
from ssl import SecureXMLRPCServer
class SecureXMLRPCRequestHandler(GenericXMLRPCRequestHandler,
SecureXMLRPCServer.SecureXMLRPCRequestHandler):
SecureXMLRPCServer.SecureXMLRPCRequestHandler.rpc_paths = get_rpc_paths()
class SecureThreadedXMLRPCServer(SocketServer.ThreadingMixIn,
SecureXMLRPCServer.SecureXMLRPCServer):
def __init__(self, interface,port, secure=False):
threading.Thread.__init__(self)
self.__port=port
self.__interface=interface
self.secure = secure
if secure:
from ssl import SecureXMLRPCServer
class SecureXMLRPCRequestHandler(GenericXMLRPCRequestHandler,
SecureXMLRPCServer.SecureXMLRPCRequestHandler):
SecureXMLRPCServer.SecureXMLRPCRequestHandler.rpc_paths = get_rpc_paths()
class SecureThreadedXMLRPCServer(SocketServer.ThreadingMixIn,
SecureXMLRPCServer.SecureXMLRPCServer):
def server_bind(self):
self.socket.setsockopt(socket.SOL_SOCKET,
socket.SO_REUSEADDR, 1)
SecureXMLRPCServer.SecureXMLRPCServer.server_bind(self)
def server_bind(self):
self.socket.setsockopt(socket.SOL_SOCKET,
socket.SO_REUSEADDR, 1)
SecureXMLRPCServer.SecureXMLRPCServer.server_bind(self)
self.server = SecureThreadedXMLRPCServer((interface, port),
SecureXMLRPCRequestHandler,0)
else:
self.server = SimpleThreadedXMLRPCServer((interface, port),
SimpleXMLRPCRequestHandler,0)
self.server = SecureThreadedXMLRPCServer((interface, port),
SecureXMLRPCRequestHandler,0)
else:
self.server = SimpleThreadedXMLRPCServer((interface, port),
SimpleXMLRPCRequestHandler,0)
def attach(self,path,gw):
pass
def attach(self,path,gw):
pass
def stop(self):
self.running = False
if os.name <> 'nt':
if hasattr(socket, 'SHUT_RDWR'):
if self.secure:
self.server.socket.sock_shutdown(socket.SHUT_RDWR)
else:
self.server.socket.shutdown(socket.SHUT_RDWR)
else:
if self.secure:
self.server.socket.sock_shutdown(2)
else:
self.server.socket.shutdown(2)
self.server.socket.close()
def stop(self):
self.running = False
if os.name <> 'nt':
if hasattr(socket, 'SHUT_RDWR'):
if self.secure:
self.server.socket.sock_shutdown(socket.SHUT_RDWR)
else:
self.server.socket.shutdown(socket.SHUT_RDWR)
else:
if self.secure:
self.server.socket.sock_shutdown(2)
else:
self.server.socket.shutdown(2)
self.server.socket.close()
def run(self):
self.server.register_introspection_functions()
def run(self):
self.server.register_introspection_functions()
self.running = True
while self.running:
self.server.handle_request()
return True
self.running = True
while self.running:
self.server.handle_request()
return True
# If the server need to be run recursively
#
#signal.signal(signal.SIGALRM, self.my_handler)
#signal.alarm(6)
#while True:
# self.server.handle_request()
#signal.alarm(0) # Disable the alarm
# If the server need to be run recursively
#
#signal.signal(signal.SIGALRM, self.my_handler)
#signal.alarm(6)
#while True:
# self.server.handle_request()
#signal.alarm(0) # Disable the alarm
import tiny_socket
class TinySocketClientThread(threading.Thread):
def __init__(self, sock, threads):
threading.Thread.__init__(self)
self.sock = sock
self.threads = threads
def __init__(self, sock, threads):
threading.Thread.__init__(self)
self.sock = sock
self.threads = threads
def run(self):
import traceback
import time
import select
self.running = True
try:
ts = tiny_socket.mysocket(self.sock)
except:
self.sock.close()
self.threads.remove(self)
return False
while self.running:
try:
msg = ts.myreceive()
except:
self.sock.close()
self.threads.remove(self)
return False
try:
s=LocalService(msg[0])
m=getattr(s,msg[1])
s._service._response=None
r=m(*msg[2:])
res=s._service._response
if res!=None:
r=res
ts.mysend(r)
except Exception, e:
tb_s = reduce(lambda x, y: x+y, traceback.format_exception(sys.exc_type, sys.exc_value, sys.exc_traceback))
s=str(e)
import tools
if tools.config['debug_mode']:
import pdb
tb = sys.exc_info()[2]
pdb.post_mortem(tb)
ts.mysend(e, exception=True, traceback=tb_s)
except:
pass
self.sock.close()
self.threads.remove(self)
return True
def run(self):
import traceback
import time
import select
self.running = True
try:
ts = tiny_socket.mysocket(self.sock)
except:
self.sock.close()
self.threads.remove(self)
return False
while self.running:
try:
msg = ts.myreceive()
except:
self.sock.close()
self.threads.remove(self)
return False
try:
s=LocalService(msg[0])
m=getattr(s,msg[1])
s._service._response=None
r=m(*msg[2:])
res=s._service._response
if res!=None:
r=res
ts.mysend(r)
except Exception, e:
tb_s = reduce(lambda x, y: x+y, traceback.format_exception(sys.exc_type, sys.exc_value, sys.exc_traceback))
s=str(e)
import tools
if tools.config['debug_mode']:
import pdb
tb = sys.exc_info()[2]
pdb.post_mortem(tb)
ts.mysend(e, exception=True, traceback=tb_s)
except:
pass
self.sock.close()
self.threads.remove(self)
return True
def stop(self):
self.running = False
def stop(self):
self.running = False
class TinySocketServerThread(threading.Thread):
def __init__(self, interface, port, secure=False):
threading.Thread.__init__(self)
self.__port=port
self.__interface=interface
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.socket.bind((self.__interface, self.__port))
self.socket.listen(5)
self.threads = []
def __init__(self, interface, port, secure=False):
threading.Thread.__init__(self)
self.__port=port
self.__interface=interface
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.socket.bind((self.__interface, self.__port))
self.socket.listen(5)
self.threads = []
def run(self):
import select
try:
self.running = True
while self.running:
(clientsocket, address) = self.socket.accept()
ct = TinySocketClientThread(clientsocket, self.threads)
self.threads.append(ct)
ct.start()
self.socket.close()
except Exception, e:
self.socket.close()
return False
def run(self):
import select
try:
self.running = True
while self.running:
(clientsocket, address) = self.socket.accept()
ct = TinySocketClientThread(clientsocket, self.threads)
self.threads.append(ct)
ct.start()
self.socket.close()
except Exception, e:
self.socket.close()
return False
def stop(self):
self.running=False
for t in self.threads:
t.stop()
try:
if hasattr(socket, 'SHUT_RDWR'):
self.socket.shutdown(socket.SHUT_RDWR)
else:
self.socket.shutdown(2)
self.socket.close()
except:
return False
def stop(self):
self.running=False
for t in self.threads:
t.stop()
try:
if hasattr(socket, 'SHUT_RDWR'):
self.socket.shutdown(socket.SHUT_RDWR)
else:
self.socket.shutdown(2)
self.socket.close()
except:
return False
# vim:noexpandtab:

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -45,315 +45,315 @@ module_class_list = {}
class_pool = {}
class except_osv(Exception):
def __init__(self, name, value, exc_type='warning'):
self.name = name
self.exc_type = exc_type
self.value = value
self.args = (exc_type,name)
def __init__(self, name, value, exc_type='warning'):
self.name = name
self.exc_type = exc_type
self.value = value
self.args = (exc_type,name)
class osv_pool(netsvc.Service):
def __init__(self):
self.obj_pool = {}
self.module_object_list = {}
self.created = []
self._sql_error = {}
self._store_function = {}
netsvc.Service.__init__(self, 'object_proxy', audience='')
self.joinGroup('web-services')
self.exportMethod(self.exportedMethods)
self.exportMethod(self.obj_list)
self.exportMethod(self.exec_workflow)
self.exportMethod(self.execute)
self.exportMethod(self.execute_cr)
def __init__(self):
self.obj_pool = {}
self.module_object_list = {}
self.created = []
self._sql_error = {}
self._store_function = {}
netsvc.Service.__init__(self, 'object_proxy', audience='')
self.joinGroup('web-services')
self.exportMethod(self.exportedMethods)
self.exportMethod(self.obj_list)
self.exportMethod(self.exec_workflow)
self.exportMethod(self.execute)
self.exportMethod(self.execute_cr)
def execute_cr(self, cr, uid, obj, method, *args, **kw):
try:
object = pooler.get_pool(cr.dbname).get(obj)
if not object:
self.abortResponse(1, 'Object Error', 'warning',
'Object %s doesn\'t exist' % str(obj))
return getattr(object,method)(cr, uid, *args, **kw)
except orm.except_orm, inst:
self.abortResponse(1, inst.name, 'warning', inst.value)
except except_osv, inst:
self.abortResponse(1, inst.name, inst.exc_type, inst.value)
except psycopg.IntegrityError, inst:
for key in self._sql_error.keys():
if key in inst[0]:
self.abortResponse(1, 'Constraint Error', 'warning',
self._sql_error[key])
self.abortResponse(1, 'Integrity Error', 'warning', inst[0])
except Exception, e:
import traceback
tb_s = reduce(lambda x, y: x+y, traceback.format_exception(
sys.exc_type, sys.exc_value, sys.exc_traceback))
logger = Logger()
logger.notifyChannel("web-services", LOG_ERROR,
'Exception in call: ' + tb_s)
raise
def execute_cr(self, cr, uid, obj, method, *args, **kw):
try:
object = pooler.get_pool(cr.dbname).get(obj)
if not object:
self.abortResponse(1, 'Object Error', 'warning',
'Object %s doesn\'t exist' % str(obj))
return getattr(object,method)(cr, uid, *args, **kw)
except orm.except_orm, inst:
self.abortResponse(1, inst.name, 'warning', inst.value)
except except_osv, inst:
self.abortResponse(1, inst.name, inst.exc_type, inst.value)
except psycopg.IntegrityError, inst:
for key in self._sql_error.keys():
if key in inst[0]:
self.abortResponse(1, 'Constraint Error', 'warning',
self._sql_error[key])
self.abortResponse(1, 'Integrity Error', 'warning', inst[0])
except Exception, e:
import traceback
tb_s = reduce(lambda x, y: x+y, traceback.format_exception(
sys.exc_type, sys.exc_value, sys.exc_traceback))
logger = Logger()
logger.notifyChannel("web-services", LOG_ERROR,
'Exception in call: ' + tb_s)
raise
def execute(self, db, uid, obj, method, *args, **kw):
db, pool = pooler.get_db_and_pool(db)
cr = db.cursor()
try:
try:
res = pool.execute_cr(cr, uid, obj, method, *args, **kw)
cr.commit()
except Exception:
cr.rollback()
raise
finally:
cr.close()
return res
def execute(self, db, uid, obj, method, *args, **kw):
db, pool = pooler.get_db_and_pool(db)
cr = db.cursor()
try:
try:
res = pool.execute_cr(cr, uid, obj, method, *args, **kw)
cr.commit()
except Exception:
cr.rollback()
raise
finally:
cr.close()
return res
def exec_workflow_cr(self, cr, uid, obj, method, *args):
wf_service = netsvc.LocalService("workflow")
return wf_service.trg_validate(uid, obj, args[0], method, cr)
def exec_workflow_cr(self, cr, uid, obj, method, *args):
wf_service = netsvc.LocalService("workflow")
return wf_service.trg_validate(uid, obj, args[0], method, cr)
def exec_workflow(self, db, uid, obj, method, *args):
cr = pooler.get_db(db).cursor()
try:
try:
res = self.exec_workflow_cr(cr, uid, obj, method, *args)
cr.commit()
except orm.except_orm, inst:
cr.rollback()
self.abortResponse(1, inst.name, 'warning', inst.value)
except except_osv, inst:
cr.rollback()
self.abortResponse(1, inst.name, inst[0], inst.value)
finally:
cr.close()
return res
def exec_workflow(self, db, uid, obj, method, *args):
cr = pooler.get_db(db).cursor()
try:
try:
res = self.exec_workflow_cr(cr, uid, obj, method, *args)
cr.commit()
except orm.except_orm, inst:
cr.rollback()
self.abortResponse(1, inst.name, 'warning', inst.value)
except except_osv, inst:
cr.rollback()
self.abortResponse(1, inst.name, inst[0], inst.value)
finally:
cr.close()
return res
def obj_list(self):
return self.obj_pool.keys()
def obj_list(self):
return self.obj_pool.keys()
# adds a new object instance to the object pool.
# if it already existed, the instance is replaced
def add(self, name, obj_inst):
if self.obj_pool.has_key(name):
del self.obj_pool[name]
self.obj_pool[name] = obj_inst
# adds a new object instance to the object pool.
# if it already existed, the instance is replaced
def add(self, name, obj_inst):
if self.obj_pool.has_key(name):
del self.obj_pool[name]
self.obj_pool[name] = obj_inst
module = str(obj_inst.__class__)[6:]
module = module[:len(module)-1]
module = module.split('.')[0][2:]
self.module_object_list.setdefault(module, []).append(obj_inst)
module = str(obj_inst.__class__)[6:]
module = module[:len(module)-1]
module = module.split('.')[0][2:]
self.module_object_list.setdefault(module, []).append(obj_inst)
def get(self, name):
obj = self.obj_pool.get(name, None)
def get(self, name):
obj = self.obj_pool.get(name, None)
# We cannot uncomment this line because it breaks initialisation since objects do not initialize
# in the correct order and the ORM doesnt support correctly when some objets do not exist yet
# assert obj, "object %s does not exist !" % name
return obj
# assert obj, "object %s does not exist !" % name
return obj
#TODO: pass a list of modules to load
def instanciate(self, module, cr):
# print "module list:", module_list
# for module in module_list:
res = []
class_list = module_class_list.get(module, [])
# if module not in self.module_object_list:
# print "%s class_list:" % module, class_list
for klass in class_list:
res.append(klass.createInstance(self, module, cr))
return res
# else:
# print "skipping module", module
#TODO: pass a list of modules to load
def instanciate(self, module, cr):
# print "module list:", module_list
# for module in module_list:
res = []
class_list = module_class_list.get(module, [])
# if module not in self.module_object_list:
# print "%s class_list:" % module, class_list
for klass in class_list:
res.append(klass.createInstance(self, module, cr))
return res
# else:
# print "skipping module", module
#pooler.get_pool(cr.dbname) = osv_pool()
class osv_memory(orm.orm_memory):
#__metaclass__ = inheritor
def __new__(cls):
module = str(cls)[6:]
module = module[:len(module)-1]
module = module.split('.')[0][2:]
if not hasattr(cls, '_module'):
cls._module = module
module_class_list.setdefault(cls._module, []).append(cls)
class_pool[cls._name] = cls
if module not in module_list:
module_list.append(cls._module)
return None
#__metaclass__ = inheritor
def __new__(cls):
module = str(cls)[6:]
module = module[:len(module)-1]
module = module.split('.')[0][2:]
if not hasattr(cls, '_module'):
cls._module = module
module_class_list.setdefault(cls._module, []).append(cls)
class_pool[cls._name] = cls
if module not in module_list:
module_list.append(cls._module)
return None
#
# Goal: try to apply inheritancy at the instanciation level and
# put objects in the pool var
#
def createInstance(cls, pool, module, cr):
name = hasattr(cls,'_name') and cls._name or cls._inherit
parent_name = hasattr(cls, '_inherit') and cls._inherit
if parent_name:
print 'Inherit not supported in osv_memory object !'
obj = object.__new__(cls)
obj.__init__(pool, cr)
return obj
createInstance = classmethod(createInstance)
#
# Goal: try to apply inheritancy at the instanciation level and
# put objects in the pool var
#
def createInstance(cls, pool, module, cr):
name = hasattr(cls,'_name') and cls._name or cls._inherit
parent_name = hasattr(cls, '_inherit') and cls._inherit
if parent_name:
print 'Inherit not supported in osv_memory object !'
obj = object.__new__(cls)
obj.__init__(pool, cr)
return obj
createInstance = classmethod(createInstance)
def __init__(self, pool, cr):
pool.add(self._name, self)
self.pool = pool
orm.orm_memory.__init__(self, cr)
def __init__(self, pool, cr):
pool.add(self._name, self)
self.pool = pool
orm.orm_memory.__init__(self, cr)
class osv(orm.orm):
#__metaclass__ = inheritor
def __new__(cls):
module = str(cls)[6:]
module = module[:len(module)-1]
module = module.split('.')[0][2:]
if not hasattr(cls, '_module'):
cls._module = module
module_class_list.setdefault(cls._module, []).append(cls)
class_pool[cls._name] = cls
if module not in module_list:
module_list.append(cls._module)
return None
#__metaclass__ = inheritor
def __new__(cls):
module = str(cls)[6:]
module = module[:len(module)-1]
module = module.split('.')[0][2:]
if not hasattr(cls, '_module'):
cls._module = module
module_class_list.setdefault(cls._module, []).append(cls)
class_pool[cls._name] = cls
if module not in module_list:
module_list.append(cls._module)
return None
#
# Goal: try to apply inheritancy at the instanciation level and
# put objects in the pool var
#
def createInstance(cls, pool, module, cr):
parent_name = hasattr(cls, '_inherit') and cls._inherit
if parent_name:
parent_class = pool.get(parent_name).__class__
assert parent_class, "parent class %s does not exist !" % parent_name
nattr = {}
for s in ('_columns', '_defaults', '_inherits', '_constraints', '_sql_constraints'):
new = copy.copy(getattr(pool.get(parent_name), s))
if hasattr(new, 'update'):
new.update(cls.__dict__.get(s, {}))
else:
new.extend(cls.__dict__.get(s, []))
nattr[s] = new
name = hasattr(cls,'_name') and cls._name or cls._inherit
cls = type(name, (cls, parent_class), nattr)
obj = object.__new__(cls)
obj.__init__(pool, cr)
return obj
createInstance = classmethod(createInstance)
#
# Goal: try to apply inheritancy at the instanciation level and
# put objects in the pool var
#
def createInstance(cls, pool, module, cr):
parent_name = hasattr(cls, '_inherit') and cls._inherit
if parent_name:
parent_class = pool.get(parent_name).__class__
assert parent_class, "parent class %s does not exist !" % parent_name
nattr = {}
for s in ('_columns', '_defaults', '_inherits', '_constraints', '_sql_constraints'):
new = copy.copy(getattr(pool.get(parent_name), s))
if hasattr(new, 'update'):
new.update(cls.__dict__.get(s, {}))
else:
new.extend(cls.__dict__.get(s, []))
nattr[s] = new
name = hasattr(cls,'_name') and cls._name or cls._inherit
cls = type(name, (cls, parent_class), nattr)
obj = object.__new__(cls)
obj.__init__(pool, cr)
return obj
createInstance = classmethod(createInstance)
def __init__(self, pool, cr):
pool.add(self._name, self)
self.pool = pool
orm.orm.__init__(self, cr)
def __init__(self, pool, cr):
pool.add(self._name, self)
self.pool = pool
orm.orm.__init__(self, cr)
class Cacheable(object):
_cache = UpdateableDict()
_cache = UpdateableDict()
def add(self, key, value):
self._cache[key] = value
def add(self, key, value):
self._cache[key] = value
def invalidate(self, key):
del self._cache[key]
def invalidate(self, key):
del self._cache[key]
def get(self, key):
try:
w = self._cache[key]
return w
except KeyError:
return None
def get(self, key):
try:
w = self._cache[key]
return w
except KeyError:
return None
def clear(self):
self._cache.clear()
self._items = []
def clear(self):
self._cache.clear()
self._items = []
def filter_dict(d, fields):
res = {}
for f in fields + ['id']:
if f in d:
res[f] = d[f]
return res
res = {}
for f in fields + ['id']:
if f in d:
res[f] = d[f]
return res
class cacheable_osv(osv, Cacheable):
_relevant = ['lang']
_relevant = ['lang']
def __init__(self):
super(cacheable_osv, self).__init__()
def __init__(self):
super(cacheable_osv, self).__init__()
def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'):
if not fields:
fields=[]
if not context:
context={}
fields = fields or self._columns.keys()
ctx = [context.get(x, False) for x in self._relevant]
result, tofetch = [], []
for id in ids:
res = self.get(self._name, id, ctx)
if not res:
tofetch.append(id)
else:
result.append(filter_dict(res, fields))
def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'):
if not fields:
fields=[]
if not context:
context={}
fields = fields or self._columns.keys()
ctx = [context.get(x, False) for x in self._relevant]
result, tofetch = [], []
for id in ids:
res = self.get(self._name, id, ctx)
if not res:
tofetch.append(id)
else:
result.append(filter_dict(res, fields))
# gen the list of "local" (ie not inherited) fields which are classic or many2one
nfields = filter(lambda x: x[1]._classic_write, self._columns.items())
# gen the list of inherited fields
inherits = map(lambda x: (x[0], x[1][2]), self._inherit_fields.items())
# complete the field list with the inherited fields which are classic or many2one
nfields += filter(lambda x: x[1]._classic_write, inherits)
nfields = [x[0] for x in nfields]
# gen the list of "local" (ie not inherited) fields which are classic or many2one
nfields = filter(lambda x: x[1]._classic_write, self._columns.items())
# gen the list of inherited fields
inherits = map(lambda x: (x[0], x[1][2]), self._inherit_fields.items())
# complete the field list with the inherited fields which are classic or many2one
nfields += filter(lambda x: x[1]._classic_write, inherits)
nfields = [x[0] for x in nfields]
res = super(cacheable_osv, self).read(cr, user, tofetch, nfields, context, load)
for r in res:
self.add((self._name, r['id'], ctx), r)
result.append(filter_dict(r, fields))
res = super(cacheable_osv, self).read(cr, user, tofetch, nfields, context, load)
for r in res:
self.add((self._name, r['id'], ctx), r)
result.append(filter_dict(r, fields))
# Appel de fonction si necessaire
tofetch = []
for f in fields:
if f not in nfields:
tofetch.append(f)
for f in tofetch:
fvals = self._columns[f].get(cr, self, ids, f, user, context=context)
for r in result:
r[f] = fvals[r['id']]
# Appel de fonction si necessaire
tofetch = []
for f in fields:
if f not in nfields:
tofetch.append(f)
for f in tofetch:
fvals = self._columns[f].get(cr, self, ids, f, user, context=context)
for r in result:
r[f] = fvals[r['id']]
# TODO: tri par self._order !!
return result
# TODO: tri par self._order !!
return result
def invalidate(self, key):
del self._cache[key[0]][key[1]]
def invalidate(self, key):
del self._cache[key[0]][key[1]]
def write(self, cr, user, ids, values, context=None):
if not context:
context={}
for id in ids:
self.invalidate((self._name, id))
return super(cacheable_osv, self).write(cr, user, ids, values, context)
def write(self, cr, user, ids, values, context=None):
if not context:
context={}
for id in ids:
self.invalidate((self._name, id))
return super(cacheable_osv, self).write(cr, user, ids, values, context)
def unlink(self, cr, user, ids):
self.clear()
return super(cacheable_osv, self).unlink(cr, user, ids)
def unlink(self, cr, user, ids):
self.clear()
return super(cacheable_osv, self).unlink(cr, user, ids)
#cacheable_osv = osv
# vim:noexpandtab:
#class FakePool(object):
# def __init__(self, module):
# self.preferred_module = module
# def __init__(self, module):
# self.preferred_module = module
# def get(self, name):
# localpool = module_objects_dict.get(self.preferred_module, {'dict': {}})['dict']
# if name in localpool:
# obj = localpool[name]
# else:
# obj = pooler.get_pool(cr.dbname).get(name)
# return obj
# def get(self, name):
# localpool = module_objects_dict.get(self.preferred_module, {'dict': {}})['dict']
# if name in localpool:
# obj = localpool[name]
# else:
# obj = pooler.get_pool(cr.dbname).get(name)
# return obj
# fake_pool = self
# class fake_class(obj.__class__):
# def __init__(self):
# super(fake_class, self).__init__()
# self.pool = fake_pool
# fake_pool = self
# class fake_class(obj.__class__):
# def __init__(self):
# super(fake_class, self).__init__()
# self.pool = fake_pool
# return fake_class()
# return fake_class()

View File

@ -36,65 +36,65 @@ db_dic = {}
pool_dic = {}
def get_db_and_pool(db_name, force_demo=False, status=None, update_module=False):
if not status:
status={}
if db_name in db_dic:
db = db_dic[db_name]
else:
logger = netsvc.Logger()
logger.notifyChannel('pooler', netsvc.LOG_INFO, 'Connecting to %s' % (db_name))
db = sql_db.db_connect(db_name)
db_dic[db_name] = db
if not status:
status={}
if db_name in db_dic:
db = db_dic[db_name]
else:
logger = netsvc.Logger()
logger.notifyChannel('pooler', netsvc.LOG_INFO, 'Connecting to %s' % (db_name))
db = sql_db.db_connect(db_name)
db_dic[db_name] = db
if db_name in pool_dic:
pool = pool_dic[db_name]
else:
pool = osv.osv.osv_pool()
pool_dic[db_name] = pool
addons.load_modules(db, force_demo, status, update_module)
if db_name in pool_dic:
pool = pool_dic[db_name]
else:
pool = osv.osv.osv_pool()
pool_dic[db_name] = pool
addons.load_modules(db, force_demo, status, update_module)
if not update_module:
import report
report.interface.register_all(db)
pool.get('ir.cron')._poolJobs(db.dbname)
return db, pool
if not update_module:
import report
report.interface.register_all(db)
pool.get('ir.cron')._poolJobs(db.dbname)
return db, pool
def restart_pool(db_name, force_demo=False, update_module=False):
# del db_dic[db_name]
del pool_dic[db_name]
return get_db_and_pool(db_name, force_demo, update_module=update_module)
# del db_dic[db_name]
del pool_dic[db_name]
return get_db_and_pool(db_name, force_demo, update_module=update_module)
def close_db(db_name):
if db_name in db_dic:
db_dic[db_name].truedb.close()
del db_dic[db_name]
if db_name in pool_dic:
del pool_dic[db_name]
if db_name in db_dic:
db_dic[db_name].truedb.close()
del db_dic[db_name]
if db_name in pool_dic:
del pool_dic[db_name]
def get_db_only(db_name):
if db_name in db_dic:
db = db_dic[db_name]
else:
db = sql_db.db_connect(db_name)
db_dic[db_name] = db
return db
if db_name in db_dic:
db = db_dic[db_name]
else:
db = sql_db.db_connect(db_name)
db_dic[db_name] = db
return db
def get_db(db_name):
# print "get_db", db_name
return get_db_and_pool(db_name)[0]
# print "get_db", db_name
return get_db_and_pool(db_name)[0]
def get_pool(db_name, force_demo=False, status=None, update_module=False):
# print "get_pool", db_name
pool = get_db_and_pool(db_name, force_demo, status, update_module)[1]
# addons.load_modules(db_name, False)
# if not pool.obj_list():
# pool.instanciate()
# print "pool", pool
return pool
# return get_db_and_pool(db_name)[1]
# print "get_pool", db_name
pool = get_db_and_pool(db_name, force_demo, status, update_module)[1]
# addons.load_modules(db_name, False)
# if not pool.obj_list():
# pool.instanciate()
# print "pool", pool
return pool
# return get_db_and_pool(db_name)[1]
def init():
global db
# db = get_db_only(tools.config['db_name'])
sql_db.init()
global db
# db = get_db_only(tools.config['db_name'])
sql_db.init()

View File

@ -28,6 +28,6 @@
##############################################################################
pageSize = {
'A4': (210,297),
'A5': (148.5,105)
'A4': (210,297),
'A5': (148.5,105)
}

File diff suppressed because it is too large Load Diff

View File

@ -28,55 +28,55 @@
##############################################################################
unites = {
0: '', 1:'un', 2:'deux', 3:'trois', 4:'quatre', 5:'cinq', 6:'six', 7:'sept', 8:'huit', 9:'neuf',
10:'dix', 11:'onze', 12:'douze', 13:'treize', 14:'quatorze', 15:'quinze', 16:'seize',
21:'vingt et un', 31:'trente et un', 41:'quarante et un', 51:'cinquante et un', 61:'soixante et un',
71:'septante et un', 91:'nonante et un', 80:'quatre-vingts'
0: '', 1:'un', 2:'deux', 3:'trois', 4:'quatre', 5:'cinq', 6:'six', 7:'sept', 8:'huit', 9:'neuf',
10:'dix', 11:'onze', 12:'douze', 13:'treize', 14:'quatorze', 15:'quinze', 16:'seize',
21:'vingt et un', 31:'trente et un', 41:'quarante et un', 51:'cinquante et un', 61:'soixante et un',
71:'septante et un', 91:'nonante et un', 80:'quatre-vingts'
}
dizaine = {
1: 'dix', 2:'vingt', 3:'trente',4:'quarante', 5:'cinquante', 6:'soixante', 7:'septante', 8:'quatre-vingt', 9:'nonante'
1: 'dix', 2:'vingt', 3:'trente',4:'quarante', 5:'cinquante', 6:'soixante', 7:'septante', 8:'quatre-vingt', 9:'nonante'
}
centaine = {
0:'', 1: 'cent', 2:'deux cent', 3:'trois cent',4:'quatre cent', 5:'cinq cent', 6:'six cent', 7:'sept cent', 8:'huit cent', 9:'neuf cent'
0:'', 1: 'cent', 2:'deux cent', 3:'trois cent',4:'quatre cent', 5:'cinq cent', 6:'six cent', 7:'sept cent', 8:'huit cent', 9:'neuf cent'
}
mille = {
0:'', 1:'mille'
0:'', 1:'mille'
}
def _100_to_text(chiffre):
if chiffre in unites:
return unites[chiffre]
else:
if chiffre%10>0:
return dizaine[chiffre / 10]+'-'+unites[chiffre % 10]
else:
return dizaine[chiffre / 10]
if chiffre in unites:
return unites[chiffre]
else:
if chiffre%10>0:
return dizaine[chiffre / 10]+'-'+unites[chiffre % 10]
else:
return dizaine[chiffre / 10]
def _1000_to_text(chiffre):
d = _100_to_text(chiffre % 100)
d2 = chiffre/100
if d2>0 and d:
return centaine[d2]+' '+d
elif d2>1 and not(d):
return centaine[d2]+'s'
else:
return centaine[d2] or d
d = _100_to_text(chiffre % 100)
d2 = chiffre/100
if d2>0 and d:
return centaine[d2]+' '+d
elif d2>1 and not(d):
return centaine[d2]+'s'
else:
return centaine[d2] or d
def _10000_to_text(chiffre):
if chiffre==0:
return 'zero'
part1 = _1000_to_text(chiffre % 1000)
part2 = mille.get(chiffre / 1000, _1000_to_text(chiffre / 1000)+' mille')
if part2 and part1:
part1 = ' '+part1
return part2+part1
if chiffre==0:
return 'zero'
part1 = _1000_to_text(chiffre % 1000)
part2 = mille.get(chiffre / 1000, _1000_to_text(chiffre / 1000)+' mille')
if part2 and part1:
part1 = ' '+part1
return part2+part1
def int_to_text(i):
return _10000_to_text(i)
return _10000_to_text(i)
if __name__=='__main__':
for i in range(1,999999,139):
print int_to_text(i)
for i in range(1,999999,139):
print int_to_text(i)

View File

@ -46,187 +46,187 @@ import urllib
# encode a value to a string in utf8 and converts XML entities
#
def toxml(val):
if isinstance(val, str):
str_utf8 = val
elif isinstance(val, unicode):
str_utf8 = val.encode('utf-8')
else:
str_utf8 = str(val)
return str_utf8.replace('&', '&amp;').replace('<','&lt;').replace('>','&gt;')
if isinstance(val, str):
str_utf8 = val
elif isinstance(val, unicode):
str_utf8 = val.encode('utf-8')
else:
str_utf8 = str(val)
return str_utf8.replace('&', '&amp;').replace('<','&lt;').replace('>','&gt;')
class report_int(netsvc.Service):
def __init__(self, name, audience='*'):
assert not netsvc.service_exist(name), 'The report "%s" already exist!' % name
super(report_int, self).__init__(name, audience)
if name[0:7]<>'report.':
raise Exception, 'ConceptionError, bad report name, should start with "report."'
self.name = name
self.id = 0
self.name2 = '.'.join(name.split('.')[1:])
self.joinGroup('report')
self.exportMethod(self.create)
def __init__(self, name, audience='*'):
assert not netsvc.service_exist(name), 'The report "%s" already exist!' % name
super(report_int, self).__init__(name, audience)
if name[0:7]<>'report.':
raise Exception, 'ConceptionError, bad report name, should start with "report."'
self.name = name
self.id = 0
self.name2 = '.'.join(name.split('.')[1:])
self.joinGroup('report')
self.exportMethod(self.create)
def create(self, cr, uid, ids, datas, context=None):
return False
def create(self, cr, uid, ids, datas, context=None):
return False
"""
Class to automatically build a document using the transformation process:
XML -> DATAS -> RML -> PDF
-> HTML
using a XSL:RML transformation
Class to automatically build a document using the transformation process:
XML -> DATAS -> RML -> PDF
-> HTML
using a XSL:RML transformation
"""
class report_rml(report_int):
def __init__(self, name, table, tmpl, xsl):
super(report_rml, self).__init__(name)
self.table = table
self.tmpl = tmpl
self.xsl = xsl
self.bin_datas = {}
self.generators = {
'pdf': self.create_pdf,
'html': self.create_html,
'raw': self.create_raw,
'sxw': self.create_sxw,
}
def __init__(self, name, table, tmpl, xsl):
super(report_rml, self).__init__(name)
self.table = table
self.tmpl = tmpl
self.xsl = xsl
self.bin_datas = {}
self.generators = {
'pdf': self.create_pdf,
'html': self.create_html,
'raw': self.create_raw,
'sxw': self.create_sxw,
}
def create(self, cr, uid, ids, datas, context):
xml = self.create_xml(cr, uid, ids, datas, context)
# file('/tmp/terp.xml','wb+').write(xml)
if datas.get('report_type', 'pdf') == 'raw':
return xml
rml = self.create_rml(cr, xml, uid, context)
# file('/tmp/terp.rml','wb+').write(rml)
report_type = datas.get('report_type', 'pdf')
create_doc = self.generators[report_type]
pdf = create_doc(rml)
return (pdf, report_type)
def create(self, cr, uid, ids, datas, context):
xml = self.create_xml(cr, uid, ids, datas, context)
# file('/tmp/terp.xml','wb+').write(xml)
if datas.get('report_type', 'pdf') == 'raw':
return xml
rml = self.create_rml(cr, xml, uid, context)
# file('/tmp/terp.rml','wb+').write(rml)
report_type = datas.get('report_type', 'pdf')
create_doc = self.generators[report_type]
pdf = create_doc(rml)
return (pdf, report_type)
def create_xml(self, cr, uid, ids, datas, context=None):
if not context:
context={}
doc = print_xml.document(cr, uid, datas, {})
self.bin_datas.update( doc.bin_datas or {})
doc.parse(self.tmpl, ids, self.table, context)
xml = doc.xml_get()
doc.close()
return self.post_process_xml_data(cr, uid, xml, context)
def create_xml(self, cr, uid, ids, datas, context=None):
if not context:
context={}
doc = print_xml.document(cr, uid, datas, {})
self.bin_datas.update( doc.bin_datas or {})
doc.parse(self.tmpl, ids, self.table, context)
xml = doc.xml_get()
doc.close()
return self.post_process_xml_data(cr, uid, xml, context)
def post_process_xml_data(self, cr, uid, xml, context=None):
if not context:
context={}
# find the position of the 3rd tag
# (skip the <?xml ...?> and the "root" tag)
iter = re.finditer('<[^>]*>', xml)
i = iter.next()
i = iter.next()
pos_xml = i.end()
def post_process_xml_data(self, cr, uid, xml, context=None):
if not context:
context={}
# find the position of the 3rd tag
# (skip the <?xml ...?> and the "root" tag)
iter = re.finditer('<[^>]*>', xml)
i = iter.next()
i = iter.next()
pos_xml = i.end()
doc = print_xml.document(cr, uid, {}, {})
tmpl_path = addons.get_module_resource('custom', 'corporate_defaults.xml')
doc.parse(tmpl_path, [uid], 'res.users', context)
corporate_header = doc.xml_get()
doc.close()
doc = print_xml.document(cr, uid, {}, {})
tmpl_path = addons.get_module_resource('custom', 'corporate_defaults.xml')
doc.parse(tmpl_path, [uid], 'res.users', context)
corporate_header = doc.xml_get()
doc.close()
# find the position of the tag after the <?xml ...?> tag
iter = re.finditer('<[^>]*>', corporate_header)
i = iter.next()
pos_header = i.end()
# find the position of the tag after the <?xml ...?> tag
iter = re.finditer('<[^>]*>', corporate_header)
i = iter.next()
pos_header = i.end()
return xml[:pos_xml] + corporate_header[pos_header:] + xml[pos_xml:]
return xml[:pos_xml] + corporate_header[pos_header:] + xml[pos_xml:]
#
# TODO: The translation doesn't work for "<tag t="1">textext<tag> tex</tag>text</tag>"
#
def create_rml(self, cr, xml, uid, context=None):
if not context:
context={}
service = netsvc.LocalService("object_proxy")
#
# TODO: The translation doesn't work for "<tag t="1">textext<tag> tex</tag>text</tag>"
#
def create_rml(self, cr, xml, uid, context=None):
if not context:
context={}
service = netsvc.LocalService("object_proxy")
# In some case we might not use xsl ...
if not self.xsl:
return xml
# In some case we might not use xsl ...
if not self.xsl:
return xml
# load XSL (parse it to the XML level)
styledoc = libxml2.parseDoc(tools.file_open(self.xsl).read())
xsl_path, tail = os.path.split(self.xsl)
for child in styledoc.children:
if child.name == 'import':
if child.hasProp('href'):
imp_file = child.prop('href')
_x, imp_file = tools.file_open(imp_file, subdir=xsl_path, pathinfo=True)
child.setProp('href', urllib.quote(str(imp_file)))
# load XSL (parse it to the XML level)
styledoc = libxml2.parseDoc(tools.file_open(self.xsl).read())
xsl_path, tail = os.path.split(self.xsl)
for child in styledoc.children:
if child.name == 'import':
if child.hasProp('href'):
imp_file = child.prop('href')
_x, imp_file = tools.file_open(imp_file, subdir=xsl_path, pathinfo=True)
child.setProp('href', urllib.quote(str(imp_file)))
#TODO: get all the translation in one query. That means we have to:
# * build a list of items to translate,
# * issue the query to translate them,
# * (re)build/update the stylesheet with the translated items
#TODO: get all the translation in one query. That means we have to:
# * build a list of items to translate,
# * issue the query to translate them,
# * (re)build/update the stylesheet with the translated items
# translate the XSL stylesheet
def look_down(child, lang):
while child is not None:
if (child.type == "element") and child.hasProp('t'):
#FIXME: use cursor
res = service.execute(cr.dbname, uid, 'ir.translation',
'_get_source', self.name2, 'xsl', lang, child.content)
if res:
child.setContent(res)
look_down(child.children, lang)
child = child.next
# translate the XSL stylesheet
def look_down(child, lang):
while child is not None:
if (child.type == "element") and child.hasProp('t'):
#FIXME: use cursor
res = service.execute(cr.dbname, uid, 'ir.translation',
'_get_source', self.name2, 'xsl', lang, child.content)
if res:
child.setContent(res)
look_down(child.children, lang)
child = child.next
if context.get('lang', False):
look_down(styledoc.children, context['lang'])
if context.get('lang', False):
look_down(styledoc.children, context['lang'])
# parse XSL
style = libxslt.parseStylesheetDoc(styledoc)
# load XML (data)
doc = libxml2.parseMemory(xml,len(xml))
# create RML (apply XSL to XML data)
result = style.applyStylesheet(doc, None)
# save result to string
xml = style.saveResultToString(result)
# parse XSL
style = libxslt.parseStylesheetDoc(styledoc)
# load XML (data)
doc = libxml2.parseMemory(xml,len(xml))
# create RML (apply XSL to XML data)
result = style.applyStylesheet(doc, None)
# save result to string
xml = style.saveResultToString(result)
style.freeStylesheet()
doc.freeDoc()
result.freeDoc()
return xml
style.freeStylesheet()
doc.freeDoc()
result.freeDoc()
return xml
def create_pdf(self, xml, logo=None):
if logo:
self.bin_datas['logo'] = logo
else:
if 'logo' in self.bin_datas:
del self.bin_datas['logo']
obj = render.rml(xml, self.bin_datas, tools.config['root_path'])
obj.render()
return obj.get()
def create_pdf(self, xml, logo=None):
if logo:
self.bin_datas['logo'] = logo
else:
if 'logo' in self.bin_datas:
del self.bin_datas['logo']
obj = render.rml(xml, self.bin_datas, tools.config['root_path'])
obj.render()
return obj.get()
def create_html(self, xml, logo=None):
obj = render.rml2html(xml, self.bin_datas)
obj.render()
return obj.get()
def create_html(self, xml, logo=None):
obj = render.rml2html(xml, self.bin_datas)
obj.render()
return obj.get()
def create_raw(self, xml, logo=None):
return xml
def create_raw(self, xml, logo=None):
return xml
def create_sxw(self, path, logo=None):
return path
def create_sxw(self, path, logo=None):
return path
from report_sxw import report_sxw
def register_all(db):
opj = os.path.join
cr = db.cursor()
cr.execute("SELECT * FROM ir_act_report_xml WHERE auto ORDER BY id")
result = cr.dictfetchall()
cr.close()
for r in result:
if netsvc.service_exist('report.'+r['report_name']):
continue
if r['report_rml'] or r['report_rml_content_data']:
report_sxw('report.'+r['report_name'], r['model'],
opj('addons',r['report_rml'] or '/'), header=r['header'])
if r['report_xsl']:
report_rml('report.'+r['report_name'], r['model'],
opj('addons',r['report_xml']),
r['report_xsl'] and opj('addons',r['report_xsl']))
opj = os.path.join
cr = db.cursor()
cr.execute("SELECT * FROM ir_act_report_xml WHERE auto ORDER BY id")
result = cr.dictfetchall()
cr.close()
for r in result:
if netsvc.service_exist('report.'+r['report_name']):
continue
if r['report_rml'] or r['report_rml_content_data']:
report_sxw('report.'+r['report_name'], r['model'],
opj('addons',r['report_rml'] or '/'), header=r['header'])
if r['report_xsl']:
report_rml('report.'+r['report_name'], r['model'],
opj('addons',r['report_xml']),
r['report_xsl'] and opj('addons',r['report_xsl']))

View File

@ -28,14 +28,14 @@
from pychart import *
colorline = [color.T(r=((r+3) % 11)/10.0,
g=((g+6) % 11)/10.0,
b=((b+9) % 11)/10.0)
for r in range(11) for g in range(11) for b in range(11)]
g=((g+6) % 11)/10.0,
b=((b+9) % 11)/10.0)
for r in range(11) for g in range(11) for b in range(11)]
def choice_colors(n):
if n:
return colorline[0:-1:len(colorline)/n]
return []
if n:
return colorline[0:-1:len(colorline)/n]
return []
if __name__=='__main__':
print choice_colors(10)
print choice_colors(10)

View File

@ -30,13 +30,13 @@
import time
functions = {
'today': lambda x: time.strftime('%d/%m/%Y', time.localtime()).decode('latin1')
'today': lambda x: time.strftime('%d/%m/%Y', time.localtime()).decode('latin1')
}
#
# TODO: call an object internal function too
#
def print_fnc(fnc, arg):
if fnc in functions:
return functions[fnc](arg)
return ''
if fnc in functions:
return functions[fnc](arg)
return ''

View File

@ -38,85 +38,85 @@ from osv.orm import browse_null, browse_record
import pooler
class InheritDict(dict):
# Might be usefull when we're doing name lookup for call or eval.
# Might be usefull when we're doing name lookup for call or eval.
def __init__(self, parent=None):
self.parent = parent
def __init__(self, parent=None):
self.parent = parent
def __getitem__(self, name):
if name in self:
return super(InheritDict, self).__getitem__(name)
else:
if not self.parent:
raise KeyError
else:
return self.parent[name]
def __getitem__(self, name):
if name in self:
return super(InheritDict, self).__getitem__(name)
else:
if not self.parent:
raise KeyError
else:
return self.parent[name]
def tounicode(val):
if isinstance(val, str):
unicode_val = unicode(val, 'utf-8')
elif isinstance(val, unicode):
unicode_val = val
else:
unicode_val = unicode(val)
return unicode_val
if isinstance(val, str):
unicode_val = unicode(val, 'utf-8')
elif isinstance(val, unicode):
unicode_val = val
else:
unicode_val = unicode(val)
return unicode_val
class document(object):
def __init__(self, cr, uid, datas, func=False):
# create a new document
self.cr = cr
self.pool = pooler.get_pool(cr.dbname)
self.doc = minidom.Document()
self.func = func or {}
self.datas = datas
self.uid = uid
self.bin_datas = {}
def __init__(self, cr, uid, datas, func=False):
# create a new document
self.cr = cr
self.pool = pooler.get_pool(cr.dbname)
self.doc = minidom.Document()
self.func = func or {}
self.datas = datas
self.uid = uid
self.bin_datas = {}
def node_attrs_get(self, node):
attrs = {}
nattr = node.attributes
for i in range(nattr.length):
attr = nattr.item(i)
attrs[attr.localName] = attr.nodeValue
# attrs[attr.name] = attr.nodeValue
return attrs
def node_attrs_get(self, node):
attrs = {}
nattr = node.attributes
for i in range(nattr.length):
attr = nattr.item(i)
attrs[attr.localName] = attr.nodeValue
# attrs[attr.name] = attr.nodeValue
return attrs
def get_value(self, browser, field_path):
fields = field_path.split('.')
def get_value(self, browser, field_path):
fields = field_path.split('.')
if not len(fields):
print "WARNING: field name is empty!"
return ''
if not len(fields):
print "WARNING: field name is empty!"
return ''
value = browser
for f in fields:
if isinstance(value, list):
if len(value)==0:
print "WARNING: empty list found!"
return ''
# elif len(value)>1:
# print "WARNING:", len(value), "possibilities for", value[0]._table_name , "picking first..."
value = value[0]
if isinstance(value, browse_null):
return ''
else:
value = value[f]
value = browser
for f in fields:
if isinstance(value, list):
if len(value)==0:
print "WARNING: empty list found!"
return ''
# elif len(value)>1:
# print "WARNING:", len(value), "possibilities for", value[0]._table_name , "picking first..."
value = value[0]
if isinstance(value, browse_null):
return ''
else:
value = value[f]
if isinstance(value, browse_null) or (type(value)==bool and not value):
return ''
else:
return value
if isinstance(value, browse_null) or (type(value)==bool and not value):
return ''
else:
return value
def get_value2(self, browser, field_path):
value = self.get_value(browser, field_path)
if isinstance(value, browse_record):
return value.id
elif isinstance(value, browse_null):
return False
else:
return value
def get_value2(self, browser, field_path):
value = self.get_value(browser, field_path)
if isinstance(value, browse_record):
return value.id
elif isinstance(value, browse_null):
return False
else:
return value
def eval(self, record, expr):
def eval(self, record, expr):
#TODO: support remote variables (eg address.title) in expr
# how to do that: parse the string, find dots, replace those dotted variables by temporary
# "simple ones", fetch the value of those variables and add them (temporarily) to the _data
@ -126,239 +126,239 @@ class document(object):
# happen if the eval node is the first one using this browse_record
# the next line is a workaround for the problem: it causes the resource to be loaded
#Pinky: Why not this ? eval(expr, browser) ?
# name = browser.name
# data_dict = browser._data[self.get_value(browser, 'id')]
return eval(expr)
# name = browser.name
# data_dict = browser._data[self.get_value(browser, 'id')]
return eval(expr)
def parse_node(self, node, parent, browser, datas=None):
# node is the node of the xml template to be parsed
# parent = the parent node in the xml data tree we are creating
def parse_node(self, node, parent, browser, datas=None):
# node is the node of the xml template to be parsed
# parent = the parent node in the xml data tree we are creating
if node.nodeType == node.ELEMENT_NODE:
# print '-'*60
# print "parse_node", node
# print "parent: ", parent
# print "ids:", ids
# print "model:", model
# print "datas:", datas
if node.nodeType == node.ELEMENT_NODE:
# print '-'*60
# print "parse_node", node
# print "parent: ", parent
# print "ids:", ids
# print "model:", model
# print "datas:", datas
# convert the attributes of the node to a dictionary
# convert the attributes of the node to a dictionary
attrs = self.node_attrs_get(node)
if 'type' in attrs:
if attrs['type']=='field':
value = self.get_value(browser, attrs['name'])
attrs = self.node_attrs_get(node)
if 'type' in attrs:
if attrs['type']=='field':
value = self.get_value(browser, attrs['name'])
#TODO: test this
if value == '' and 'default' in attrs:
value = attrs['default']
el = self.doc.createElement(node.localName)
parent.appendChild(el)
el_txt = self.doc.createTextNode(tounicode(value))
el.appendChild(el_txt)
if value == '' and 'default' in attrs:
value = attrs['default']
el = self.doc.createElement(node.localName)
parent.appendChild(el)
el_txt = self.doc.createTextNode(tounicode(value))
el.appendChild(el_txt)
#TODO: test this
for key, value in attrs.iteritems():
if key not in ('type', 'name', 'default'):
el.setAttribute(key, value)
for key, value in attrs.iteritems():
if key not in ('type', 'name', 'default'):
el.setAttribute(key, value)
elif attrs['type']=='attachment':
if isinstance(browser, list):
model = browser[0]._table_name
else:
model = browser._table_name
elif attrs['type']=='attachment':
if isinstance(browser, list):
model = browser[0]._table_name
else:
model = browser._table_name
value = self.get_value(browser, attrs['name'])
value = self.get_value(browser, attrs['name'])
service = netsvc.LocalService("object_proxy")
ids = service.execute(self.cr.dbname, self.uid, 'ir.attachment', 'search', [('res_model','=',model),('res_id','=',int(value))])
datas = service.execute(self.cr.dbname, self.uid, 'ir.attachment', 'read', ids)
service = netsvc.LocalService("object_proxy")
ids = service.execute(self.cr.dbname, self.uid, 'ir.attachment', 'search', [('res_model','=',model),('res_id','=',int(value))])
datas = service.execute(self.cr.dbname, self.uid, 'ir.attachment', 'read', ids)
if len(datas):
# if there are several, pick first
datas = datas[0]
fname = str(datas['datas_fname'])
ext = fname.split('.')[-1].lower()
if ext in ('jpg','jpeg', 'png'):
import base64, StringIO
dt = base64.decodestring(datas['datas'])
fp = StringIO.StringIO(dt)
i = str(len(self.bin_datas))
self.bin_datas[i] = fp
if len(datas):
# if there are several, pick first
datas = datas[0]
fname = str(datas['datas_fname'])
ext = fname.split('.')[-1].lower()
if ext in ('jpg','jpeg', 'png'):
import base64, StringIO
dt = base64.decodestring(datas['datas'])
fp = StringIO.StringIO(dt)
i = str(len(self.bin_datas))
self.bin_datas[i] = fp
el = self.doc.createElement(node.localName)
parent.appendChild(el)
# node content is the length of the image
el_txt = self.doc.createTextNode(i)
el.appendChild(el_txt)
el = self.doc.createElement(node.localName)
parent.appendChild(el)
# node content is the length of the image
el_txt = self.doc.createTextNode(i)
el.appendChild(el_txt)
elif attrs['type']=='data':
elif attrs['type']=='data':
#TODO: test this
el = self.doc.createElement(node.localName)
parent.appendChild(el)
txt = self.datas.get('form', {}).get(attrs['name'], '')
el_txt = self.doc.createTextNode(tounicode(txt))
el.appendChild(el_txt)
el = self.doc.createElement(node.localName)
parent.appendChild(el)
txt = self.datas.get('form', {}).get(attrs['name'], '')
el_txt = self.doc.createTextNode(tounicode(txt))
el.appendChild(el_txt)
elif attrs['type']=='function':
el = self.doc.createElement(node.localName)
parent.appendChild(el)
if attrs['name'] in self.func:
txt = self.func[attrs['name']](node)
else:
txt = print_fnc.print_fnc(attrs['name'], node)
el_txt = self.doc.createTextNode(txt)
el.appendChild(el_txt)
elif attrs['type']=='function':
el = self.doc.createElement(node.localName)
parent.appendChild(el)
if attrs['name'] in self.func:
txt = self.func[attrs['name']](node)
else:
txt = print_fnc.print_fnc(attrs['name'], node)
el_txt = self.doc.createTextNode(txt)
el.appendChild(el_txt)
elif attrs['type']=='eval':
elif attrs['type']=='eval':
#TODO: faire ca plus proprement
if isinstance(browser, list):
print "ERROR: EVAL!"
el = self.doc.createElement(node.localName)
parent.appendChild(el)
value = self.eval(browser, attrs['expr'])
el_txt = self.doc.createTextNode(str(value))
el.appendChild(el_txt)
if isinstance(browser, list):
print "ERROR: EVAL!"
el = self.doc.createElement(node.localName)
parent.appendChild(el)
value = self.eval(browser, attrs['expr'])
el_txt = self.doc.createTextNode(str(value))
el.appendChild(el_txt)
elif attrs['type']=='fields':
fields = attrs['name'].split(',')
vals = {}
for b in browser:
value = tuple([self.get_value2(b, f) for f in fields])
if not value in vals:
vals[value]=[]
vals[value].append(b)
keys = vals.keys()
keys.sort()
elif attrs['type']=='fields':
fields = attrs['name'].split(',')
vals = {}
for b in browser:
value = tuple([self.get_value2(b, f) for f in fields])
if not value in vals:
vals[value]=[]
vals[value].append(b)
keys = vals.keys()
keys.sort()
if 'order' in attrs and attrs['order']=='desc':
keys.reverse()
if 'order' in attrs and attrs['order']=='desc':
keys.reverse()
v_list = [vals[k] for k in keys]
for v in v_list:
el = self.doc.createElement(node.localName)
parent.appendChild(el)
el_cld = node.firstChild
while el_cld:
self.parse_node(el_cld, el, v)
el_cld = el_cld.nextSibling
v_list = [vals[k] for k in keys]
for v in v_list:
el = self.doc.createElement(node.localName)
parent.appendChild(el)
el_cld = node.firstChild
while el_cld:
self.parse_node(el_cld, el, v)
el_cld = el_cld.nextSibling
elif attrs['type']=='call':
if len(attrs['args']):
elif attrs['type']=='call':
if len(attrs['args']):
#TODO: test this
# fetches the values of the variables which names where passed in the args attribute
args = [self.eval(browser, arg) for arg in attrs['args'].split(',')]
else:
args = []
# fetches the values of the variables which names where passed in the args attribute
args = [self.eval(browser, arg) for arg in attrs['args'].split(',')]
else:
args = []
# get the object
if attrs.has_key('model'):
obj = self.pool.get(attrs['model'])
else:
if isinstance(browser, list):
obj = browser[0]._table
else:
obj = browser._table
# get the object
if attrs.has_key('model'):
obj = self.pool.get(attrs['model'])
else:
if isinstance(browser, list):
obj = browser[0]._table
else:
obj = browser._table
# get the ids
if attrs.has_key('ids'):
ids = self.eval(browser, attrs['ids'])
else:
if isinstance(browser, list):
ids = [b.id for b in browser]
else:
ids = [browser.id]
# get the ids
if attrs.has_key('ids'):
ids = self.eval(browser, attrs['ids'])
else:
if isinstance(browser, list):
ids = [b.id for b in browser]
else:
ids = [browser.id]
# call the method itself
newdatas = getattr(obj, attrs['name'])(self.cr, self.uid, ids, *args)
# call the method itself
newdatas = getattr(obj, attrs['name'])(self.cr, self.uid, ids, *args)
def parse_result_tree(node, parent, datas):
if node.nodeType == node.ELEMENT_NODE:
el = self.doc.createElement(node.localName)
parent.appendChild(el)
atr = self.node_attrs_get(node)
if 'value' in atr:
#print "type=>",type(datas[atr['value']])
#print "value=>",datas[atr['value']]
if not isinstance(datas[atr['value']], (str, unicode)):
txt = self.doc.createTextNode(str(datas[atr['value']]))
else:
txt = self.doc.createTextNode(datas[atr['value']].decode('utf-8'))
el.appendChild(txt)
else:
el_cld = node.firstChild
while el_cld:
parse_result_tree(el_cld, el, datas)
el_cld = el_cld.nextSibling
elif node.nodeType==node.TEXT_NODE:
el = self.doc.createTextNode(node.nodeValue)
parent.appendChild(el)
else:
pass
def parse_result_tree(node, parent, datas):
if node.nodeType == node.ELEMENT_NODE:
el = self.doc.createElement(node.localName)
parent.appendChild(el)
atr = self.node_attrs_get(node)
if 'value' in atr:
#print "type=>",type(datas[atr['value']])
#print "value=>",datas[atr['value']]
if not isinstance(datas[atr['value']], (str, unicode)):
txt = self.doc.createTextNode(str(datas[atr['value']]))
else:
txt = self.doc.createTextNode(datas[atr['value']].decode('utf-8'))
el.appendChild(txt)
else:
el_cld = node.firstChild
while el_cld:
parse_result_tree(el_cld, el, datas)
el_cld = el_cld.nextSibling
elif node.nodeType==node.TEXT_NODE:
el = self.doc.createTextNode(node.nodeValue)
parent.appendChild(el)
else:
pass
if not isinstance(newdatas, list):
newdatas = [newdatas]
for newdata in newdatas:
parse_result_tree(node, parent, newdata)
if not isinstance(newdatas, list):
newdatas = [newdatas]
for newdata in newdatas:
parse_result_tree(node, parent, newdata)
elif attrs['type']=='zoom':
value = self.get_value(browser, attrs['name'])
elif attrs['type']=='zoom':
value = self.get_value(browser, attrs['name'])
if value:
if not isinstance(value, list):
v_list = [value]
else:
v_list = value
for v in v_list:
el = self.doc.createElement(node.localName)
parent.appendChild(el)
el_cld = node.firstChild
while el_cld:
self.parse_node(el_cld, el, v)
el_cld = el_cld.nextSibling
else:
# if there is no "type" attribute in the node, copy it to the xml data and parse its childs
el = self.doc.createElement(node.localName)
parent.appendChild(el)
el_cld = node.firstChild
while el_cld:
self.parse_node(el_cld, el, browser)
el_cld = el_cld.nextSibling
if value:
if not isinstance(value, list):
v_list = [value]
else:
v_list = value
for v in v_list:
el = self.doc.createElement(node.localName)
parent.appendChild(el)
el_cld = node.firstChild
while el_cld:
self.parse_node(el_cld, el, v)
el_cld = el_cld.nextSibling
else:
# if there is no "type" attribute in the node, copy it to the xml data and parse its childs
el = self.doc.createElement(node.localName)
parent.appendChild(el)
el_cld = node.firstChild
while el_cld:
self.parse_node(el_cld, el, browser)
el_cld = el_cld.nextSibling
elif node.nodeType==node.TEXT_NODE:
# if it's a text node, copy it to the xml data
el = self.doc.createTextNode(node.nodeValue)
parent.appendChild(el)
else:
pass
elif node.nodeType==node.TEXT_NODE:
# if it's a text node, copy it to the xml data
el = self.doc.createTextNode(node.nodeValue)
parent.appendChild(el)
else:
pass
def xml_get(self):
return self.doc.toxml('utf-8')
def xml_get(self):
return self.doc.toxml('utf-8')
def parse_tree(self, ids, model, context=None):
if not context:
context={}
browser = self.pool.get(model).browse(self.cr, self.uid, ids, context)
self.parse_node(self.dom.documentElement, self.doc, browser)
def parse_tree(self, ids, model, context=None):
if not context:
context={}
browser = self.pool.get(model).browse(self.cr, self.uid, ids, context)
self.parse_node(self.dom.documentElement, self.doc, browser)
def parse_string(self, xml, ids, model, context=None):
if not context:
context={}
# parses the xml template to memory
self.dom = minidom.parseString(xml)
def parse_string(self, xml, ids, model, context=None):
if not context:
context={}
# parses the xml template to memory
self.dom = minidom.parseString(xml)
# create the xml data from the xml template
self.parse_tree(ids, model, context)
# create the xml data from the xml template
self.parse_tree(ids, model, context)
def parse(self, filename, ids, model, context=None):
if not context:
context={}
# parses the xml template to memory
self.dom = minidom.parseString(tools.file_open(filename).read())
def parse(self, filename, ids, model, context=None):
if not context:
context={}
# parses the xml template to memory
self.dom = minidom.parseString(tools.file_open(filename).read())
# create the xml data from the xml template
self.parse_tree(ids, model, context)
# create the xml data from the xml template
self.parse_tree(ids, model, context)
def close(self):
self.doc = None
self.dom = None
def close(self):
self.doc = None
self.dom = None

View File

@ -40,123 +40,123 @@ import libxslt
import time, os
class report_printscreen_list(report_int):
def __init__(self, name):
report_int.__init__(self, name)
def __init__(self, name):
report_int.__init__(self, name)
def _parse_node(self, root_node):
result = []
for node in root_node.childNodes:
if node.localName == 'field':
attrsa = node.attributes
attrs = {}
if not attrsa is None:
for i in range(attrsa.length):
attrs[attrsa.item(i).localName] = attrsa.item(i).nodeValue
result.append(attrs['name'])
else:
result.extend(self._parse_node(node))
return result
def _parse_node(self, root_node):
result = []
for node in root_node.childNodes:
if node.localName == 'field':
attrsa = node.attributes
attrs = {}
if not attrsa is None:
for i in range(attrsa.length):
attrs[attrsa.item(i).localName] = attrsa.item(i).nodeValue
result.append(attrs['name'])
else:
result.extend(self._parse_node(node))
return result
def _parse_string(self, view):
dom = minidom.parseString(view)
return self._parse_node(dom)
def _parse_string(self, view):
dom = minidom.parseString(view)
return self._parse_node(dom)
def create(self, cr, uid, ids, datas, context=None):
if not context:
context={}
datas['ids'] = ids
pool = pooler.get_pool(cr.dbname)
model_id = pool.get('ir.model').search(cr, uid, [('model','=',model._name)])
if model_id:
model_desc = pool.get('ir.model').browse(cr, uid, model_id, context).name
else:
model_desc = model._description
def create(self, cr, uid, ids, datas, context=None):
if not context:
context={}
datas['ids'] = ids
pool = pooler.get_pool(cr.dbname)
model_id = pool.get('ir.model').search(cr, uid, [('model','=',model._name)])
if model_id:
model_desc = pool.get('ir.model').browse(cr, uid, model_id, context).name
else:
model_desc = model._description
model = pool.get(datas['model'])
result = model.fields_view_get(cr, uid, view_type='tree', context=context)
model = pool.get(datas['model'])
result = model.fields_view_get(cr, uid, view_type='tree', context=context)
fields_order = self._parse_string(result['arch'])
rows = model.read(cr, uid, datas['ids'], result['fields'].keys() )
res = self._create_table(uid, datas['ids'], result['fields'], fields_order, rows, context, model._description)
return (self.obj.get(), 'pdf')
fields_order = self._parse_string(result['arch'])
rows = model.read(cr, uid, datas['ids'], result['fields'].keys() )
res = self._create_table(uid, datas['ids'], result['fields'], fields_order, rows, context, model._description)
return (self.obj.get(), 'pdf')
def _create_table(self, uid, ids, fields, fields_order, results, context, title=''):
pageSize=[297.0,210.0]
def _create_table(self, uid, ids, fields, fields_order, results, context, title=''):
pageSize=[297.0,210.0]
impl = minidom.getDOMImplementation()
new_doc = impl.createDocument(None, "report", None)
impl = minidom.getDOMImplementation()
new_doc = impl.createDocument(None, "report", None)
# build header
config = new_doc.createElement("config")
# build header
config = new_doc.createElement("config")
def _append_node(name, text):
n = new_doc.createElement(name)
t = new_doc.createTextNode(text)
n.appendChild(t)
config.appendChild(n)
def _append_node(name, text):
n = new_doc.createElement(name)
t = new_doc.createTextNode(text)
n.appendChild(t)
config.appendChild(n)
_append_node('date', time.strftime('%d/%m/%Y'))
_append_node('PageSize', '%.2fmm,%.2fmm' % tuple(pageSize))
_append_node('PageWidth', '%.2f' % (pageSize[0] * 2.8346,))
_append_node('PageHeight', '%.2f' %(pageSize[1] * 2.8346,))
_append_node('date', time.strftime('%d/%m/%Y'))
_append_node('PageSize', '%.2fmm,%.2fmm' % tuple(pageSize))
_append_node('PageWidth', '%.2f' % (pageSize[0] * 2.8346,))
_append_node('PageHeight', '%.2f' %(pageSize[1] * 2.8346,))
_append_node('report-header', title)
_append_node('report-header', title)
l = []
t = 0
strmax = (pageSize[0]-40) * 2.8346
for f in fields_order:
s = 0
if fields[f]['type'] in ('date','time','float','integer'):
s = 60
strmax -= s
else:
t += fields[f].get('size', 56) / 28 + 1
l.append(s)
for pos in range(len(l)):
if not l[pos]:
s = fields[fields_order[pos]].get('size', 56) / 28 + 1
l[pos] = strmax * s / t
_append_node('tableSize', ','.join(map(str,l)) )
new_doc.childNodes[0].appendChild(config)
header = new_doc.createElement("header")
l = []
t = 0
strmax = (pageSize[0]-40) * 2.8346
for f in fields_order:
s = 0
if fields[f]['type'] in ('date','time','float','integer'):
s = 60
strmax -= s
else:
t += fields[f].get('size', 56) / 28 + 1
l.append(s)
for pos in range(len(l)):
if not l[pos]:
s = fields[fields_order[pos]].get('size', 56) / 28 + 1
l[pos] = strmax * s / t
_append_node('tableSize', ','.join(map(str,l)) )
new_doc.childNodes[0].appendChild(config)
header = new_doc.createElement("header")
for f in fields_order:
field = new_doc.createElement("field")
field_txt = new_doc.createTextNode(str(fields[f]['string']))
field.appendChild(field_txt)
header.appendChild(field)
for f in fields_order:
field = new_doc.createElement("field")
field_txt = new_doc.createTextNode(str(fields[f]['string']))
field.appendChild(field_txt)
header.appendChild(field)
new_doc.childNodes[0].appendChild(header)
new_doc.childNodes[0].appendChild(header)
lines = new_doc.createElement("lines")
for line in results:
node_line = new_doc.createElement("row")
for f in fields_order:
if fields[f]['type']=='many2one' and line[f]:
line[f] = line[f][1]
if fields[f]['type'] in ('one2many','many2many') and line[f]:
line[f] = '( '+str(len(line[f])) + ' )'
col = new_doc.createElement("col")
col.setAttribute('tree','no')
if line[f] != None:
txt = new_doc.createTextNode(str(line[f] or ''))
else:
txt = new_doc.createTextNode('/')
col.appendChild(txt)
node_line.appendChild(col)
lines.appendChild(node_line)
new_doc.childNodes[0].appendChild(lines)
lines = new_doc.createElement("lines")
for line in results:
node_line = new_doc.createElement("row")
for f in fields_order:
if fields[f]['type']=='many2one' and line[f]:
line[f] = line[f][1]
if fields[f]['type'] in ('one2many','many2many') and line[f]:
line[f] = '( '+str(len(line[f])) + ' )'
col = new_doc.createElement("col")
col.setAttribute('tree','no')
if line[f] != None:
txt = new_doc.createTextNode(str(line[f] or ''))
else:
txt = new_doc.createTextNode('/')
col.appendChild(txt)
node_line.appendChild(col)
lines.appendChild(node_line)
new_doc.childNodes[0].appendChild(lines)
styledoc = libxml2.parseFile(os.path.join(tools.config['root_path'],'addons/base/report/custom_new.xsl'))
style = libxslt.parseStylesheetDoc(styledoc)
doc = libxml2.parseDoc(new_doc.toxml())
rml_obj = style.applyStylesheet(doc, None)
rml = style.saveResultToString(rml_obj)
styledoc = libxml2.parseFile(os.path.join(tools.config['root_path'],'addons/base/report/custom_new.xsl'))
style = libxslt.parseStylesheetDoc(styledoc)
doc = libxml2.parseDoc(new_doc.toxml())
rml_obj = style.applyStylesheet(doc, None)
rml = style.saveResultToString(rml_obj)
self.obj = render.rml(rml)
self.obj.render()
return True
self.obj = render.rml(rml)
self.obj.render()
return True
report_printscreen_list('report.printscreen.form')

View File

@ -40,166 +40,166 @@ import libxslt
import time, os
class report_printscreen_list(report_int):
def __init__(self, name):
report_int.__init__(self, name)
def __init__(self, name):
report_int.__init__(self, name)
def _parse_node(self, root_node):
result = []
for node in root_node.childNodes:
if node.localName == 'field':
attrsa = node.attributes
attrs = {}
if not attrsa is None:
for i in range(attrsa.length):
attrs[attrsa.item(i).localName] = attrsa.item(i).nodeValue
result.append(attrs['name'])
else:
result.extend(self._parse_node(node))
return result
def _parse_node(self, root_node):
result = []
for node in root_node.childNodes:
if node.localName == 'field':
attrsa = node.attributes
attrs = {}
if not attrsa is None:
for i in range(attrsa.length):
attrs[attrsa.item(i).localName] = attrsa.item(i).nodeValue
result.append(attrs['name'])
else:
result.extend(self._parse_node(node))
return result
def _parse_string(self, view):
dom = minidom.parseString(unicode(view, 'utf-8').encode('utf-8'))
return self._parse_node(dom)
def _parse_string(self, view):
dom = minidom.parseString(unicode(view, 'utf-8').encode('utf-8'))
return self._parse_node(dom)
def create(self, cr, uid, ids, datas, context=None):
if not context:
context={}
pool = pooler.get_pool(cr.dbname)
model = pool.get(datas['model'])
model_id = pool.get('ir.model').search(cr, uid, [('model','=',model._name)])
if model_id:
model_desc = pool.get('ir.model').browse(cr, uid, model_id[0], context).name
else:
model_desc = model._description
def create(self, cr, uid, ids, datas, context=None):
if not context:
context={}
pool = pooler.get_pool(cr.dbname)
model = pool.get(datas['model'])
model_id = pool.get('ir.model').search(cr, uid, [('model','=',model._name)])
if model_id:
model_desc = pool.get('ir.model').browse(cr, uid, model_id[0], context).name
else:
model_desc = model._description
datas['ids'] = ids
model = pooler.get_pool(cr.dbname).get(datas['model'])
datas['ids'] = ids
model = pooler.get_pool(cr.dbname).get(datas['model'])
result = model.fields_view_get(cr, uid, view_type='tree', context=context)
fields_order = self._parse_string(result['arch'])
rows = model.read(cr, uid, datas['ids'], result['fields'].keys(), context )
res = self._create_table(uid, datas['ids'], result['fields'], fields_order, rows, context, model_desc)
return (self.obj.get(), 'pdf')
result = model.fields_view_get(cr, uid, view_type='tree', context=context)
fields_order = self._parse_string(result['arch'])
rows = model.read(cr, uid, datas['ids'], result['fields'].keys(), context )
res = self._create_table(uid, datas['ids'], result['fields'], fields_order, rows, context, model_desc)
return (self.obj.get(), 'pdf')
def _create_table(self, uid, ids, fields, fields_order, results, context, title=''):
pageSize=[297.0, 210.0]
def _create_table(self, uid, ids, fields, fields_order, results, context, title=''):
pageSize=[297.0, 210.0]
impl = minidom.getDOMImplementation()
new_doc = impl.createDocument(None, "report", None)
impl = minidom.getDOMImplementation()
new_doc = impl.createDocument(None, "report", None)
# build header
config = new_doc.createElement("config")
# build header
config = new_doc.createElement("config")
def _append_node(name, text):
n = new_doc.createElement(name)
t = new_doc.createTextNode(text)
n.appendChild(t)
config.appendChild(n)
def _append_node(name, text):
n = new_doc.createElement(name)
t = new_doc.createTextNode(text)
n.appendChild(t)
config.appendChild(n)
_append_node('date', time.strftime('%d/%m/%Y'))
_append_node('PageSize', '%.2fmm,%.2fmm' % tuple(pageSize))
_append_node('PageWidth', '%.2f' % (pageSize[0] * 2.8346,))
_append_node('PageHeight', '%.2f' %(pageSize[1] * 2.8346,))
_append_node('date', time.strftime('%d/%m/%Y'))
_append_node('PageSize', '%.2fmm,%.2fmm' % tuple(pageSize))
_append_node('PageWidth', '%.2f' % (pageSize[0] * 2.8346,))
_append_node('PageHeight', '%.2f' %(pageSize[1] * 2.8346,))
_append_node('report-header', title)
l = []
t = 0
rowcount=0;
strmax = (pageSize[0]-40) * 2.8346
temp = []
count = len(fields_order)
for i in range(0,count):
temp.append(0)
_append_node('report-header', title)
l = []
t = 0
rowcount=0;
strmax = (pageSize[0]-40) * 2.8346
temp = []
count = len(fields_order)
for i in range(0,count):
temp.append(0)
ince = -1;
for f in fields_order:
s = 0
ince += 1
if fields[f]['type'] in ('date','time','float','integer'):
s = 60
strmax -= s
if fields[f]['type'] in ('float','integer'):
temp[ince]=1;
else:
t += fields[f].get('size', 80) / 28 + 1
ince = -1;
for f in fields_order:
s = 0
ince += 1
if fields[f]['type'] in ('date','time','float','integer'):
s = 60
strmax -= s
if fields[f]['type'] in ('float','integer'):
temp[ince]=1;
else:
t += fields[f].get('size', 80) / 28 + 1
l.append(s)
l.append(s)
for pos in range(len(l)):
if not l[pos]:
s = fields[fields_order[pos]].get('size', 80) / 28 + 1
l[pos] = strmax * s / t
for pos in range(len(l)):
if not l[pos]:
s = fields[fields_order[pos]].get('size', 80) / 28 + 1
l[pos] = strmax * s / t
_append_node('tableSize', ','.join(map(str,l)) )
new_doc.childNodes[0].appendChild(config)
header = new_doc.createElement("header")
_append_node('tableSize', ','.join(map(str,l)) )
new_doc.childNodes[0].appendChild(config)
header = new_doc.createElement("header")
for f in fields_order:
field = new_doc.createElement("field")
field_txt = new_doc.createTextNode(str(fields[f]['string'] or ''))
field.appendChild(field_txt)
header.appendChild(field)
for f in fields_order:
field = new_doc.createElement("field")
field_txt = new_doc.createTextNode(str(fields[f]['string'] or ''))
field.appendChild(field_txt)
header.appendChild(field)
new_doc.childNodes[0].appendChild(header)
new_doc.childNodes[0].appendChild(header)
lines = new_doc.createElement("lines")
lines = new_doc.createElement("lines")
tsum = []
count = len(fields_order)
for i in range(0,count):
tsum.append(0)
tsum = []
count = len(fields_order)
for i in range(0,count):
tsum.append(0)
for line in results:
node_line = new_doc.createElement("row")
for line in results:
node_line = new_doc.createElement("row")
count = -1
for f in fields_order:
count += 1
if fields[f]['type']=='many2one' and line[f]:
line[f] = line[f][1]
if fields[f]['type'] in ('one2many','many2many') and line[f]:
line[f] = '( '+str(len(line[f])) + ' )'
col = new_doc.createElement("col")
col.setAttribute('para','yes')
col.setAttribute('tree','no')
if line[f] != None:
txt = new_doc.createTextNode(str(line[f] or ''))
if temp[count] == 1:
tsum[count] = tsum[count] + line[f];
count = -1
for f in fields_order:
count += 1
if fields[f]['type']=='many2one' and line[f]:
line[f] = line[f][1]
if fields[f]['type'] in ('one2many','many2many') and line[f]:
line[f] = '( '+str(len(line[f])) + ' )'
col = new_doc.createElement("col")
col.setAttribute('para','yes')
col.setAttribute('tree','no')
if line[f] != None:
txt = new_doc.createTextNode(str(line[f] or ''))
if temp[count] == 1:
tsum[count] = tsum[count] + line[f];
else:
txt = new_doc.createTextNode('/')
col.appendChild(txt)
node_line.appendChild(col)
lines.appendChild(node_line)
node_line = new_doc.createElement("row")
lines.appendChild(node_line)
node_line = new_doc.createElement("row")
for f in range(0,count+1):
col = new_doc.createElement("col")
col.setAttribute('para','yes')
col.setAttribute('tree','no')
if tsum[f] != None:
txt = new_doc.createTextNode(str(tsum[f] or ''))
else:
txt = new_doc.createTextNode('/')
if f == 0:
txt = new_doc.createTextNode('Total')
else:
txt = new_doc.createTextNode('/')
col.appendChild(txt)
node_line.appendChild(col)
lines.appendChild(node_line)
node_line = new_doc.createElement("row")
lines.appendChild(node_line)
node_line = new_doc.createElement("row")
for f in range(0,count+1):
col = new_doc.createElement("col")
col.setAttribute('para','yes')
col.setAttribute('tree','no')
if tsum[f] != None:
txt = new_doc.createTextNode(str(tsum[f] or ''))
else:
txt = new_doc.createTextNode('/')
if f == 0:
txt = new_doc.createTextNode('Total')
col.appendChild(txt)
node_line.appendChild(col)
lines.appendChild(node_line)
col.appendChild(txt)
node_line.appendChild(col)
lines.appendChild(node_line)
new_doc.childNodes[0].appendChild(lines)
new_doc.childNodes[0].appendChild(lines)
styledoc = libxml2.parseFile(os.path.join(tools.config['root_path'],'addons/base/report/custom_new.xsl'))
style = libxslt.parseStylesheetDoc(styledoc)
doc = libxml2.parseDoc(new_doc.toxml())
rml_obj = style.applyStylesheet(doc, None)
rml = style.saveResultToString(rml_obj)
self.obj = render.rml(rml)
self.obj.render()
return True
styledoc = libxml2.parseFile(os.path.join(tools.config['root_path'],'addons/base/report/custom_new.xsl'))
style = libxslt.parseStylesheetDoc(styledoc)
doc = libxml2.parseDoc(new_doc.toxml())
rml_obj = style.applyStylesheet(doc, None)
rml = style.saveResultToString(rml_obj)
self.obj = render.rml(rml)
self.obj.render()
return True
report_printscreen_list('report.printscreen.list')

View File

@ -33,6 +33,6 @@ from rml import rml, rml2html
from render import render
try:
import Image
import Image
except:
print 'WARNING; Python Imaging not installed, you can use only .JPG pictures !'
print 'WARNING; Python Imaging not installed, you can use only .JPG pictures !'

View File

@ -44,38 +44,38 @@ import threading
# _render
#
class render(object):
def __init__(self, bin_datas={}, path='.'):
self.done = False
self.bin_datas = bin_datas
self.path = path
def __init__(self, bin_datas={}, path='.'):
self.done = False
self.bin_datas = bin_datas
self.path = path
def _render(self):
return None
def _render(self):
return None
def render(self):
self.done = False
result = self._render()
self._result = result
self.done = True
return True
def render(self):
self.done = False
result = self._render()
self._result = result
self.done = True
return True
def is_done(self):
res = self.done
return res
def is_done(self):
res = self.done
return res
def get(self):
if self.is_done():
return self._result
else:
return None
def get(self):
if self.is_done():
return self._result
else:
return None
if __name__=='__main__':
import time
print 'Multi-thread code !'
r = render()
r.render()
while not r.is_done():
print 'not yet!'
time.sleep(1)
print 'done!'
import time
print 'Multi-thread code !'
r = render()
r.render()
while not r.is_done():
print 'not yet!'
time.sleep(1)
print 'done!'

View File

@ -32,20 +32,20 @@ import rml2pdf
import rml2html as htmlizer
class rml(render.render):
def __init__(self, xml, datas={}, path='.'):
render.render.__init__(self, datas)
self.xml = xml
self.output_type = 'pdf'
self.path = path
def __init__(self, xml, datas={}, path='.'):
render.render.__init__(self, datas)
self.xml = xml
self.output_type = 'pdf'
self.path = path
def _render(self):
return rml2pdf.parseString(self.xml, images=self.bin_datas, path=self.path)
def _render(self):
return rml2pdf.parseString(self.xml, images=self.bin_datas, path=self.path)
class rml2html(render.render):
def __init__(self, xml, datas={}):
super(rml2html, self).__init__(datas)
self.xml = xml
self.output_type = 'html'
def __init__(self, xml, datas={}):
super(rml2html, self).__init__(datas)
self.xml = xml
self.output_type = 'html'
def _render(self):
return htmlizer.parseString(self.xml)
def _render(self):
return htmlizer.parseString(self.xml)

578
bin/report/render/rml2html/rml2html.py Executable file → Normal file
View File

@ -52,341 +52,341 @@ import copy
import utils
class _flowable(object):
def __init__(self, template, doc):
self._tags = {
'title': self._tag_title,
'spacer': self._tag_spacer,
'para': self._tag_para,
'nextFrame': self._tag_next_frame,
'blockTable': self._tag_table,
'pageBreak': self._tag_page_break,
'setNextTemplate': self._tag_next_template,
}
self.template = template
self.doc = doc
def __init__(self, template, doc):
self._tags = {
'title': self._tag_title,
'spacer': self._tag_spacer,
'para': self._tag_para,
'nextFrame': self._tag_next_frame,
'blockTable': self._tag_table,
'pageBreak': self._tag_page_break,
'setNextTemplate': self._tag_next_template,
}
self.template = template
self.doc = doc
def _tag_page_break(self, node):
return '<br/>'*3
def _tag_page_break(self, node):
return '<br/>'*3
def _tag_next_template(self, node):
return ''
def _tag_next_template(self, node):
return ''
def _tag_next_frame(self, node):
result=self.template.frame_stop()
result+='<br/>'
result+=self.template.frame_start()
return result
def _tag_next_frame(self, node):
result=self.template.frame_stop()
result+='<br/>'
result+=self.template.frame_start()
return result
def _tag_title(self, node):
node.tagName='h1'
return node.toxml()
def _tag_title(self, node):
node.tagName='h1'
return node.toxml()
def _tag_spacer(self, node):
length = 1+int(utils.unit_get(node.getAttribute('length')))/35
return "<br/>"*length
def _tag_spacer(self, node):
length = 1+int(utils.unit_get(node.getAttribute('length')))/35
return "<br/>"*length
def _tag_table(self, node):
node.tagName='table'
if node.hasAttribute('colWidths'):
sizes = map(lambda x: utils.unit_get(x), node.getAttribute('colWidths').split(','))
tr = self.doc.createElement('tr')
for s in sizes:
td = self.doc.createElement('td')
td.setAttribute("width", str(s))
tr.appendChild(td)
node.appendChild(tr)
return node.toxml()
def _tag_table(self, node):
node.tagName='table'
if node.hasAttribute('colWidths'):
sizes = map(lambda x: utils.unit_get(x), node.getAttribute('colWidths').split(','))
tr = self.doc.createElement('tr')
for s in sizes:
td = self.doc.createElement('td')
td.setAttribute("width", str(s))
tr.appendChild(td)
node.appendChild(tr)
return node.toxml()
def _tag_para(self, node):
node.tagName='p'
if node.hasAttribute('style'):
node.setAttribute('class', node.getAttribute('style'))
return node.toxml()
def _tag_para(self, node):
node.tagName='p'
if node.hasAttribute('style'):
node.setAttribute('class', node.getAttribute('style'))
return node.toxml()
def render(self, node):
result = self.template.start()
result += self.template.frame_start()
for n in node.childNodes:
if n.nodeType==node.ELEMENT_NODE:
if n.localName in self._tags:
result += self._tags[n.localName](n)
else:
pass
#print 'tag', n.localName, 'not yet implemented!'
result += self.template.frame_stop()
result += self.template.end()
return result
def render(self, node):
result = self.template.start()
result += self.template.frame_start()
for n in node.childNodes:
if n.nodeType==node.ELEMENT_NODE:
if n.localName in self._tags:
result += self._tags[n.localName](n)
else:
pass
#print 'tag', n.localName, 'not yet implemented!'
result += self.template.frame_stop()
result += self.template.end()
return result
class _rml_tmpl_tag(object):
def __init__(self, *args):
pass
def tag_start(self):
return ''
def tag_end(self):
return False
def tag_stop(self):
return ''
def tag_mergeable(self):
return True
def __init__(self, *args):
pass
def tag_start(self):
return ''
def tag_end(self):
return False
def tag_stop(self):
return ''
def tag_mergeable(self):
return True
class _rml_tmpl_frame(_rml_tmpl_tag):
def __init__(self, posx, width):
self.width = width
self.posx = posx
def tag_start(self):
return '<table border="0" width="%d"><tr><td width="%d">&nbsp;</td><td>' % (self.width+self.posx,self.posx)
def tag_end(self):
return True
def tag_stop(self):
return '</td></tr></table><br/>'
def tag_mergeable(self):
return False
def __init__(self, posx, width):
self.width = width
self.posx = posx
def tag_start(self):
return '<table border="0" width="%d"><tr><td width="%d">&nbsp;</td><td>' % (self.width+self.posx,self.posx)
def tag_end(self):
return True
def tag_stop(self):
return '</td></tr></table><br/>'
def tag_mergeable(self):
return False
# An awfull workaround since I don't really understand the semantic behind merge.
def merge(self, frame):
pass
# An awfull workaround since I don't really understand the semantic behind merge.
def merge(self, frame):
pass
class _rml_tmpl_draw_string(_rml_tmpl_tag):
def __init__(self, node, style):
self.posx = utils.unit_get(node.getAttribute('x'))
self.posy = utils.unit_get(node.getAttribute('y'))
aligns = {
'drawString': 'left',
'drawRightString': 'right',
'drawCentredString': 'center'
}
align = aligns[node.localName]
self.pos = [(self.posx, self.posy, align, utils.text_get(node), style.get('td'), style.font_size_get('td'))]
def __init__(self, node, style):
self.posx = utils.unit_get(node.getAttribute('x'))
self.posy = utils.unit_get(node.getAttribute('y'))
aligns = {
'drawString': 'left',
'drawRightString': 'right',
'drawCentredString': 'center'
}
align = aligns[node.localName]
self.pos = [(self.posx, self.posy, align, utils.text_get(node), style.get('td'), style.font_size_get('td'))]
def tag_start(self):
self.pos.sort()
res = '<table border="0" cellpadding="0" cellspacing="0"><tr>'
posx = 0
i = 0
for (x,y,align,txt, style, fs) in self.pos:
if align=="left":
pos2 = len(txt)*fs
res+='<td width="%d"></td><td style="%s" width="%d">%s</td>' % (x - posx, style, pos2, txt)
posx = x+pos2
if align=="right":
res+='<td width="%d" align="right" style="%s">%s</td>' % (x - posx, style, txt)
posx = x
if align=="center":
res+='<td width="%d" align="center" style="%s">%s</td>' % ((x - posx)*2, style, txt)
posx = 2*x-posx
i+=1
res+='</tr></table>'
return res
def merge(self, ds):
self.pos+=ds.pos
def tag_start(self):
self.pos.sort()
res = '<table border="0" cellpadding="0" cellspacing="0"><tr>'
posx = 0
i = 0
for (x,y,align,txt, style, fs) in self.pos:
if align=="left":
pos2 = len(txt)*fs
res+='<td width="%d"></td><td style="%s" width="%d">%s</td>' % (x - posx, style, pos2, txt)
posx = x+pos2
if align=="right":
res+='<td width="%d" align="right" style="%s">%s</td>' % (x - posx, style, txt)
posx = x
if align=="center":
res+='<td width="%d" align="center" style="%s">%s</td>' % ((x - posx)*2, style, txt)
posx = 2*x-posx
i+=1
res+='</tr></table>'
return res
def merge(self, ds):
self.pos+=ds.pos
class _rml_tmpl_draw_lines(_rml_tmpl_tag):
def __init__(self, node, style):
coord = [utils.unit_get(x) for x in utils.text_get(node).split(' ')]
self.ok = False
self.posx = coord[0]
self.posy = coord[1]
self.width = coord[2]-coord[0]
self.ok = coord[1]==coord[3]
self.style = style
self.style = style.get('hr')
def __init__(self, node, style):
coord = [utils.unit_get(x) for x in utils.text_get(node).split(' ')]
self.ok = False
self.posx = coord[0]
self.posy = coord[1]
self.width = coord[2]-coord[0]
self.ok = coord[1]==coord[3]
self.style = style
self.style = style.get('hr')
def tag_start(self):
if self.ok:
return '<table border="0" cellpadding="0" cellspacing="0" width="%d"><tr><td width="%d"></td><td><hr width="100%%" style="margin:0px; %s"></td></tr></table>' % (self.posx+self.width,self.posx,self.style)
else:
return ''
def tag_start(self):
if self.ok:
return '<table border="0" cellpadding="0" cellspacing="0" width="%d"><tr><td width="%d"></td><td><hr width="100%%" style="margin:0px; %s"></td></tr></table>' % (self.posx+self.width,self.posx,self.style)
else:
return ''
class _rml_stylesheet(object):
def __init__(self, stylesheet, doc):
self.doc = doc
self.attrs = {}
self._tags = {
'fontSize': lambda x: ('font-size',str(utils.unit_get(x))+'px'),
'alignment': lambda x: ('text-align',str(x))
}
result = ''
for ps in stylesheet.getElementsByTagName('paraStyle'):
attr = {}
attrs = ps.attributes
for i in range(attrs.length):
name = attrs.item(i).localName
attr[name] = ps.getAttribute(name)
attrs = []
for a in attr:
if a in self._tags:
attrs.append("%s:%s" % self._tags[a](attr[a]))
if len(attrs):
result += "p."+attr['name']+" {"+'; '.join(attrs)+"}\n"
self.result = result
def __init__(self, stylesheet, doc):
self.doc = doc
self.attrs = {}
self._tags = {
'fontSize': lambda x: ('font-size',str(utils.unit_get(x))+'px'),
'alignment': lambda x: ('text-align',str(x))
}
result = ''
for ps in stylesheet.getElementsByTagName('paraStyle'):
attr = {}
attrs = ps.attributes
for i in range(attrs.length):
name = attrs.item(i).localName
attr[name] = ps.getAttribute(name)
attrs = []
for a in attr:
if a in self._tags:
attrs.append("%s:%s" % self._tags[a](attr[a]))
if len(attrs):
result += "p."+attr['name']+" {"+'; '.join(attrs)+"}\n"
self.result = result
def render(self):
return self.result
def render(self):
return self.result
class _rml_draw_style(object):
def __init__(self):
self.style = {}
self._styles = {
'fill': lambda x: {'td': {'color':x.getAttribute('color')}},
'setFont': lambda x: {'td': {'font-size':x.getAttribute('size')+'px'}},
'stroke': lambda x: {'hr': {'color':x.getAttribute('color')}},
}
def update(self, node):
if node.localName in self._styles:
result = self._styles[node.localName](node)
for key in result:
if key in self.style:
self.style[key].update(result[key])
else:
self.style[key] = result[key]
def font_size_get(self,tag):
size = utils.unit_get(self.style.get('td', {}).get('font-size','16'))
return size
def __init__(self):
self.style = {}
self._styles = {
'fill': lambda x: {'td': {'color':x.getAttribute('color')}},
'setFont': lambda x: {'td': {'font-size':x.getAttribute('size')+'px'}},
'stroke': lambda x: {'hr': {'color':x.getAttribute('color')}},
}
def update(self, node):
if node.localName in self._styles:
result = self._styles[node.localName](node)
for key in result:
if key in self.style:
self.style[key].update(result[key])
else:
self.style[key] = result[key]
def font_size_get(self,tag):
size = utils.unit_get(self.style.get('td', {}).get('font-size','16'))
return size
def get(self,tag):
if not tag in self.style:
return ""
return ';'.join(['%s:%s' % (x[0],x[1]) for x in self.style[tag].items()])
def get(self,tag):
if not tag in self.style:
return ""
return ';'.join(['%s:%s' % (x[0],x[1]) for x in self.style[tag].items()])
class _rml_template(object):
def __init__(self, template):
self.frame_pos = -1
self.frames = []
self.template_order = []
self.page_template = {}
self.loop = 0
self._tags = {
'drawString': _rml_tmpl_draw_string,
'drawRightString': _rml_tmpl_draw_string,
'drawCentredString': _rml_tmpl_draw_string,
'lines': _rml_tmpl_draw_lines
}
self.style = _rml_draw_style()
for pt in template.getElementsByTagName('pageTemplate'):
frames = {}
id = pt.getAttribute('id')
self.template_order.append(id)
for tmpl in pt.getElementsByTagName('frame'):
posy = int(utils.unit_get(tmpl.getAttribute('y1'))) #+utils.unit_get(tmpl.getAttribute('height')))
posx = int(utils.unit_get(tmpl.getAttribute('x1')))
frames[(posy,posx,tmpl.getAttribute('id'))] = _rml_tmpl_frame(posx, utils.unit_get(tmpl.getAttribute('width')))
for tmpl in template.getElementsByTagName('pageGraphics'):
for n in tmpl.childNodes:
if n.nodeType==n.ELEMENT_NODE:
if n.localName in self._tags:
t = self._tags[n.localName](n, self.style)
frames[(t.posy,t.posx,n.localName)] = t
else:
self.style.update(n)
keys = frames.keys()
keys.sort()
keys.reverse()
self.page_template[id] = []
for key in range(len(keys)):
if key>0 and keys[key-1][0] == keys[key][0]:
if type(self.page_template[id][-1]) == type(frames[keys[key]]):
if self.page_template[id][-1].tag_mergeable():
self.page_template[id][-1].merge(frames[keys[key]])
continue
self.page_template[id].append(frames[keys[key]])
self.template = self.template_order[0]
def __init__(self, template):
self.frame_pos = -1
self.frames = []
self.template_order = []
self.page_template = {}
self.loop = 0
self._tags = {
'drawString': _rml_tmpl_draw_string,
'drawRightString': _rml_tmpl_draw_string,
'drawCentredString': _rml_tmpl_draw_string,
'lines': _rml_tmpl_draw_lines
}
self.style = _rml_draw_style()
for pt in template.getElementsByTagName('pageTemplate'):
frames = {}
id = pt.getAttribute('id')
self.template_order.append(id)
for tmpl in pt.getElementsByTagName('frame'):
posy = int(utils.unit_get(tmpl.getAttribute('y1'))) #+utils.unit_get(tmpl.getAttribute('height')))
posx = int(utils.unit_get(tmpl.getAttribute('x1')))
frames[(posy,posx,tmpl.getAttribute('id'))] = _rml_tmpl_frame(posx, utils.unit_get(tmpl.getAttribute('width')))
for tmpl in template.getElementsByTagName('pageGraphics'):
for n in tmpl.childNodes:
if n.nodeType==n.ELEMENT_NODE:
if n.localName in self._tags:
t = self._tags[n.localName](n, self.style)
frames[(t.posy,t.posx,n.localName)] = t
else:
self.style.update(n)
keys = frames.keys()
keys.sort()
keys.reverse()
self.page_template[id] = []
for key in range(len(keys)):
if key>0 and keys[key-1][0] == keys[key][0]:
if type(self.page_template[id][-1]) == type(frames[keys[key]]):
if self.page_template[id][-1].tag_mergeable():
self.page_template[id][-1].merge(frames[keys[key]])
continue
self.page_template[id].append(frames[keys[key]])
self.template = self.template_order[0]
def _get_style(self):
return self.style
def _get_style(self):
return self.style
def set_next_template(self):
self.template = self.template_order[(self.template_order.index(name)+1) % self.template_order]
self.frame_pos = -1
def set_next_template(self):
self.template = self.template_order[(self.template_order.index(name)+1) % self.template_order]
self.frame_pos = -1
def set_template(self, name):
self.template = name
self.frame_pos = -1
def set_template(self, name):
self.template = name
self.frame_pos = -1
def frame_start(self):
result = ''
frames = self.page_template[self.template]
ok = True
while ok:
self.frame_pos += 1
if self.frame_pos>=len(frames):
self.frame_pos=0
self.loop=1
ok = False
continue
f = frames[self.frame_pos]
result+=f.tag_start()
ok = not f.tag_end()
if ok:
result+=f.tag_stop()
return result
def frame_start(self):
result = ''
frames = self.page_template[self.template]
ok = True
while ok:
self.frame_pos += 1
if self.frame_pos>=len(frames):
self.frame_pos=0
self.loop=1
ok = False
continue
f = frames[self.frame_pos]
result+=f.tag_start()
ok = not f.tag_end()
if ok:
result+=f.tag_stop()
return result
def frame_stop(self):
frames = self.page_template[self.template]
f = frames[self.frame_pos]
result=f.tag_stop()
return result
def frame_stop(self):
frames = self.page_template[self.template]
f = frames[self.frame_pos]
result=f.tag_stop()
return result
def start(self):
return ''
def start(self):
return ''
def end(self):
result = ''
while not self.loop:
result += self.frame_start()
result += self.frame_stop()
return result
def end(self):
result = ''
while not self.loop:
result += self.frame_start()
result += self.frame_stop()
return result
class _rml_doc(object):
def __init__(self, data):
self.dom = xml.dom.minidom.parseString(data)
self.filename = self.dom.documentElement.getAttribute('filename')
self.result = ''
def __init__(self, data):
self.dom = xml.dom.minidom.parseString(data)
self.filename = self.dom.documentElement.getAttribute('filename')
self.result = ''
def render(self, out):
self.result += '''<!DOCTYPE HTML PUBLIC "-//w3c//DTD HTML 4.0 Frameset//EN">
def render(self, out):
self.result += '''<!DOCTYPE HTML PUBLIC "-//w3c//DTD HTML 4.0 Frameset//EN">
<html>
<head>
<style type="text/css">
p {margin:0px; font-size:12px;}
td {font-size:14px;}
<style type="text/css">
p {margin:0px; font-size:12px;}
td {font-size:14px;}
'''
style = self.dom.documentElement.getElementsByTagName('stylesheet')[0]
s = _rml_stylesheet(style, self.dom)
self.result += s.render()
self.result+='''
</style>
style = self.dom.documentElement.getElementsByTagName('stylesheet')[0]
s = _rml_stylesheet(style, self.dom)
self.result += s.render()
self.result+='''
</style>
</head>
<body>'''
template = _rml_template(self.dom.documentElement.getElementsByTagName('template')[0])
f = _flowable(template, self.dom)
self.result += f.render(self.dom.documentElement.getElementsByTagName('story')[0])
del f
self.result += '</body></html>'
out.write( self.result)
template = _rml_template(self.dom.documentElement.getElementsByTagName('template')[0])
f = _flowable(template, self.dom)
self.result += f.render(self.dom.documentElement.getElementsByTagName('story')[0])
del f
self.result += '</body></html>'
out.write( self.result)
def parseString(data, fout=None):
r = _rml_doc(data)
if fout:
fp = file(fout,'wb')
r.render(fp)
fp.close()
return fout
else:
fp = StringIO.StringIO()
r.render(fp)
return fp.getvalue()
r = _rml_doc(data)
if fout:
fp = file(fout,'wb')
r.render(fp)
fp.close()
return fout
else:
fp = StringIO.StringIO()
r.render(fp)
return fp.getvalue()
def trml2pdf_help():
print 'Usage: rml2html input.rml >output.html'
print 'Render the standard input (RML) and output an HTML file'
sys.exit(0)
print 'Usage: rml2html input.rml >output.html'
print 'Render the standard input (RML) and output an HTML file'
sys.exit(0)
if __name__=="__main__":
if len(sys.argv)>1:
if sys.argv[1]=='--help':
trml2pdf_help()
print parseString(file(sys.argv[1], 'r').read()),
else:
print 'Usage: trml2pdf input.rml >output.pdf'
print 'Try \'trml2pdf --help\' for more information.'
if len(sys.argv)>1:
if sys.argv[1]=='--help':
trml2pdf_help()
print parseString(file(sys.argv[1], 'r').read()),
else:
print 'Usage: trml2pdf input.rml >output.pdf'
print 'Try \'trml2pdf --help\' for more information.'

View File

@ -47,48 +47,48 @@ import reportlab
import reportlab.lib.units
def text_get(node):
rc = ''
for node in node.childNodes:
if node.nodeType == node.TEXT_NODE:
rc = rc + node.data
return rc
rc = ''
for node in node.childNodes:
if node.nodeType == node.TEXT_NODE:
rc = rc + node.data
return rc
units = [
(re.compile('^(-?[0-9\.]+)\s*in$'), reportlab.lib.units.inch),
(re.compile('^(-?[0-9\.]+)\s*cm$'), reportlab.lib.units.cm),
(re.compile('^(-?[0-9\.]+)\s*mm$'), reportlab.lib.units.mm),
(re.compile('^(-?[0-9\.]+)\s*px$'), 0.7),
(re.compile('^(-?[0-9\.]+)\s*$'), 1)
(re.compile('^(-?[0-9\.]+)\s*in$'), reportlab.lib.units.inch),
(re.compile('^(-?[0-9\.]+)\s*cm$'), reportlab.lib.units.cm),
(re.compile('^(-?[0-9\.]+)\s*mm$'), reportlab.lib.units.mm),
(re.compile('^(-?[0-9\.]+)\s*px$'), 0.7),
(re.compile('^(-?[0-9\.]+)\s*$'), 1)
]
def unit_get(size):
global units
for unit in units:
res = unit[0].search(size, 0)
if res:
return int(unit[1]*float(res.group(1))*1.3)
return False
global units
for unit in units:
res = unit[0].search(size, 0)
if res:
return int(unit[1]*float(res.group(1))*1.3)
return False
def tuple_int_get(node, attr_name, default=None):
if not node.hasAttribute(attr_name):
return default
res = [int(x) for x in node.getAttribute(attr_name).split(',')]
return res
if not node.hasAttribute(attr_name):
return default
res = [int(x) for x in node.getAttribute(attr_name).split(',')]
return res
def bool_get(value):
return (str(value)=="1") or (value.lower()=='yes')
return (str(value)=="1") or (value.lower()=='yes')
def attr_get(node, attrs, dict={}):
res = {}
for name in attrs:
if node.hasAttribute(name):
res[name] = unit_get(node.getAttribute(name))
for key in dict:
if node.hasAttribute(key):
if dict[key]=='str':
res[key] = str(node.getAttribute(key))
elif dict[key]=='bool':
res[key] = bool_get(node.getAttribute(key))
elif dict[key]=='int':
res[key] = int(node.getAttribute(key))
return res
res = {}
for name in attrs:
if node.hasAttribute(name):
res[name] = unit_get(node.getAttribute(name))
for key in dict:
if node.hasAttribute(key):
if dict[key]=='str':
res[key] = str(node.getAttribute(key))
elif dict[key]=='bool':
res[key] = bool_get(node.getAttribute(key))
elif dict[key]=='int':
res[key] = int(node.getAttribute(key))
return res

View File

@ -51,13 +51,13 @@ regex_t = re.compile('\(([0-9\.]*),([0-9\.]*),([0-9\.]*)\)')
regex_h = re.compile('#([0-9a-zA-Z][0-9a-zA-Z])([0-9a-zA-Z][0-9a-zA-Z])([0-9a-zA-Z][0-9a-zA-Z])')
def get(col_str):
global allcols
if col_str in allcols.keys():
return allcols[col_str]
res = regex_t.search(col_str, 0)
if res:
return (float(res.group(1)),float(res.group(2)),float(res.group(3)))
res = regex_h.search(col_str, 0)
if res:
return tuple([ float(int(res.group(i),16))/255 for i in range(1,4)])
return colors.red
global allcols
if col_str in allcols.keys():
return allcols[col_str]
res = regex_t.search(col_str, 0)
if res:
return (float(res.group(1)),float(res.group(2)),float(res.group(3)))
res = regex_h.search(col_str, 0)
if res:
return tuple([ float(int(res.group(i),16))/255 for i in range(1,4)])
return colors.red

1334
bin/report/render/rml2pdf/trml2pdf.py Executable file → Normal file

File diff suppressed because it is too large Load Diff

View File

@ -48,47 +48,47 @@ import re
import reportlab
def text_get(node):
rc = ''
for node in node.childNodes:
if node.nodeType == node.TEXT_NODE:
rc = rc + node.data
return rc
rc = ''
for node in node.childNodes:
if node.nodeType == node.TEXT_NODE:
rc = rc + node.data
return rc
units = [
(re.compile('^(-?[0-9\.]+)\s*in$'), reportlab.lib.units.inch),
(re.compile('^(-?[0-9\.]+)\s*cm$'), reportlab.lib.units.cm),
(re.compile('^(-?[0-9\.]+)\s*mm$'), reportlab.lib.units.mm),
(re.compile('^(-?[0-9\.]+)\s*$'), 1)
(re.compile('^(-?[0-9\.]+)\s*in$'), reportlab.lib.units.inch),
(re.compile('^(-?[0-9\.]+)\s*cm$'), reportlab.lib.units.cm),
(re.compile('^(-?[0-9\.]+)\s*mm$'), reportlab.lib.units.mm),
(re.compile('^(-?[0-9\.]+)\s*$'), 1)
]
def unit_get(size):
global units
for unit in units:
res = unit[0].search(size, 0)
if res:
return unit[1]*float(res.group(1))
return False
global units
for unit in units:
res = unit[0].search(size, 0)
if res:
return unit[1]*float(res.group(1))
return False
def tuple_int_get(node, attr_name, default=None):
if not node.hasAttribute(attr_name):
return default
res = [int(x) for x in node.getAttribute(attr_name).split(',')]
return res
if not node.hasAttribute(attr_name):
return default
res = [int(x) for x in node.getAttribute(attr_name).split(',')]
return res
def bool_get(value):
return (str(value)=="1") or (value.lower()=='yes')
return (str(value)=="1") or (value.lower()=='yes')
def attr_get(node, attrs, dict={}):
res = {}
for name in attrs:
if node.hasAttribute(name):
res[name] = unit_get(node.getAttribute(name))
for key in dict:
if node.hasAttribute(key):
if dict[key]=='str':
res[key] = str(node.getAttribute(key))
elif dict[key]=='bool':
res[key] = bool_get(node.getAttribute(key))
elif dict[key]=='int':
res[key] = int(node.getAttribute(key))
return res
res = {}
for name in attrs:
if node.hasAttribute(name):
res[name] = unit_get(node.getAttribute(name))
for key in dict:
if node.hasAttribute(key):
if dict[key]=='str':
res[key] = str(node.getAttribute(key))
elif dict[key]=='bool':
res[key] = bool_get(node.getAttribute(key))
elif dict[key]=='int':
res[key] = int(node.getAttribute(key))
return res

View File

@ -40,52 +40,52 @@ import reportlab.lib
import copy
class simple(render.render):
def _render(self):
self.result = StringIO()
parser = xml.dom.minidom.parseString(self.xml)
def _render(self):
self.result = StringIO()
parser = xml.dom.minidom.parseString(self.xml)
title = parser.documentElement.tagName
doc = SimpleDocTemplate(self.result, pagesize=A4, title=title,
author='Tiny ERP, Fabien Pinckaers', leftmargin=10*mm, rightmargin=10*mm)
title = parser.documentElement.tagName
doc = SimpleDocTemplate(self.result, pagesize=A4, title=title,
author='Tiny ERP, Fabien Pinckaers', leftmargin=10*mm, rightmargin=10*mm)
styles = reportlab.lib.styles.getSampleStyleSheet()
title_style = copy.deepcopy(styles["Heading1"])
title_style.alignment = reportlab.lib.enums.TA_CENTER
story = [ Paragraph(title, title_style) ]
style_level = {}
nodes = [ (parser.documentElement,0) ]
while len(nodes):
node = nodes.pop(0)
value = ''
n=len(node[0].childNodes)-1
while n>=0:
if node[0].childNodes[n].nodeType==3:
value += node[0].childNodes[n].nodeValue
else:
nodes.insert( 0, (node[0].childNodes[n], node[1]+1) )
n-=1
if not node[1] in style_level:
style = copy.deepcopy(styles["Normal"])
style.leftIndent=node[1]*6*mm
style.firstLineIndent=-3*mm
style_level[node[1]] = style
story.append( Paragraph('<b>%s</b>: %s' % (node[0].tagName, value), style_level[node[1]]))
doc.build(story)
return self.result.getvalue()
styles = reportlab.lib.styles.getSampleStyleSheet()
title_style = copy.deepcopy(styles["Heading1"])
title_style.alignment = reportlab.lib.enums.TA_CENTER
story = [ Paragraph(title, title_style) ]
style_level = {}
nodes = [ (parser.documentElement,0) ]
while len(nodes):
node = nodes.pop(0)
value = ''
n=len(node[0].childNodes)-1
while n>=0:
if node[0].childNodes[n].nodeType==3:
value += node[0].childNodes[n].nodeValue
else:
nodes.insert( 0, (node[0].childNodes[n], node[1]+1) )
n-=1
if not node[1] in style_level:
style = copy.deepcopy(styles["Normal"])
style.leftIndent=node[1]*6*mm
style.firstLineIndent=-3*mm
style_level[node[1]] = style
story.append( Paragraph('<b>%s</b>: %s' % (node[0].tagName, value), style_level[node[1]]))
doc.build(story)
return self.result.getvalue()
if __name__=='__main__':
import time
s = simple('''<test>
<author-list>
<author>
<name>Fabien Pinckaers</name>
<age>23</age>
</author>
<author>
<name>Michel Pinckaers</name>
<age>53</age>
</author>
No other
</author-list>
</test>''')
print s.render()
import time
s = simple('''<test>
<author-list>
<author>
<name>Fabien Pinckaers</name>
<age>23</age>
</author>
<author>
<name>Michel Pinckaers</name>
<age>53</age>
</author>
No other
</author-list>
</test>''')
print s.render()

File diff suppressed because it is too large Load Diff

View File

@ -32,44 +32,44 @@ import tools
_uid_cache = {}
def login(db, login, password):
cr = pooler.get_db(db).cursor()
cr.execute('select id from res_users where login=%s and password=%s and active', (login.encode('utf-8'), password.encode('utf-8')))
res = cr.fetchone()
cr.close()
if res:
return res[0]
else:
return False
cr = pooler.get_db(db).cursor()
cr.execute('select id from res_users where login=%s and password=%s and active', (login.encode('utf-8'), password.encode('utf-8')))
res = cr.fetchone()
cr.close()
if res:
return res[0]
else:
return False
def check_super(passwd):
if passwd == tools.config['admin_passwd']:
return True
else:
raise Exception('AccessDenied')
if passwd == tools.config['admin_passwd']:
return True
else:
raise Exception('AccessDenied')
def check(db, uid, passwd):
if _uid_cache.get(db, {}).get(uid) == passwd:
return True
if _uid_cache.get(db, {}).get(uid) == passwd:
return True
cr = pooler.get_db(db).cursor()
cr.execute('select count(*) from res_users where id=%d and password=%s', (int(uid), passwd))
res = cr.fetchone()[0]
cr.close()
if not bool(res):
raise Exception('AccessDenied')
if res:
if _uid_cache.has_key(db):
ulist = _uid_cache[db]
ulist[uid] = passwd
else:
_uid_cache[db] = {uid:passwd}
return bool(res)
cr = pooler.get_db(db).cursor()
cr.execute('select count(*) from res_users where id=%d and password=%s', (int(uid), passwd))
res = cr.fetchone()[0]
cr.close()
if not bool(res):
raise Exception('AccessDenied')
if res:
if _uid_cache.has_key(db):
ulist = _uid_cache[db]
ulist[uid] = passwd
else:
_uid_cache[db] = {uid:passwd}
return bool(res)
def access(db, uid, passwd, sec_level, ids):
cr = pooler.get_db(db).cursor()
cr.execute('select id from res_users where id=%s and password=%s', (uid, passwd))
res = cr.fetchone()
cr.close()
if not res:
raise Exception('Bad username or password')
return res[0]
cr = pooler.get_db(db).cursor()
cr.execute('select id from res_users where id=%s and password=%s', (uid, passwd))
res = cr.fetchone()
cr.close()
if not res:
raise Exception('Bad username or password')
return res[0]

View File

@ -45,292 +45,292 @@ from tools.translate import _
logging.basicConfig()
class db(netsvc.Service):
def __init__(self, name="db"):
netsvc.Service.__init__(self, name)
self.joinGroup("web-services")
self.exportMethod(self.create)
self.exportMethod(self.get_progress)
self.exportMethod(self.drop)
self.exportMethod(self.dump)
self.exportMethod(self.restore)
self.exportMethod(self.list)
self.exportMethod(self.list_lang)
self.exportMethod(self.change_admin_password)
self.actions = {}
self.id = 0
self.id_protect = threading.Semaphore()
def __init__(self, name="db"):
netsvc.Service.__init__(self, name)
self.joinGroup("web-services")
self.exportMethod(self.create)
self.exportMethod(self.get_progress)
self.exportMethod(self.drop)
self.exportMethod(self.dump)
self.exportMethod(self.restore)
self.exportMethod(self.list)
self.exportMethod(self.list_lang)
self.exportMethod(self.change_admin_password)
self.actions = {}
self.id = 0
self.id_protect = threading.Semaphore()
def create(self, password, db_name, demo, lang):
security.check_super(password)
self.id_protect.acquire()
self.id += 1
id = self.id
self.id_protect.release()
def create(self, password, db_name, demo, lang):
security.check_super(password)
self.id_protect.acquire()
self.id += 1
id = self.id
self.id_protect.release()
self.actions[id] = {'clean': False}
self.actions[id] = {'clean': False}
db = sql_db.db_connect('template1', serialize=1)
db.truedb.autocommit()
cr = db.cursor()
cr.execute('CREATE DATABASE ' + db_name + ' ENCODING \'unicode\'')
cr.close()
class DBInitialize(object):
def __call__(self, serv, id, db_name, demo, lang):
try:
serv.actions[id]['progress'] = 0
clean = False
cr = sql_db.db_connect(db_name).cursor()
tools.init_db(cr)
cr.commit()
cr.close()
cr = None
pool = pooler.get_pool(db_name, demo,serv.actions[id],
update_module=True)
if lang and lang != 'en_US':
filename = tools.config["root_path"] + "/i18n/" + lang + ".csv"
tools.trans_load(db_name, filename, lang)
serv.actions[id]['clean'] = True
cr = sql_db.db_connect(db_name).cursor()
cr.execute('select login, password, name ' \
'from res_users ' \
'where login <> \'root\' order by login')
serv.actions[id]['users'] = cr.dictfetchall()
cr.close()
except Exception, e:
serv.actions[id]['clean'] = False
serv.actions[id]['exception'] = e
from StringIO import StringIO
import traceback
e_str = StringIO()
traceback.print_exc(file=e_str)
traceback_str = e_str.getvalue()
e_str.close()
print traceback_str
serv.actions[id]['traceback'] = traceback_str
if cr:
cr.close()
logger = netsvc.Logger()
logger.notifyChannel("web-services", netsvc.LOG_INFO,
'CREATE DB: %s' % (db_name))
dbi = DBInitialize()
create_thread = threading.Thread(target=dbi,
args=(self, id, db_name, demo, lang))
create_thread.start()
self.actions[id]['thread'] = create_thread
return id
db = sql_db.db_connect('template1', serialize=1)
db.truedb.autocommit()
cr = db.cursor()
cr.execute('CREATE DATABASE ' + db_name + ' ENCODING \'unicode\'')
cr.close()
class DBInitialize(object):
def __call__(self, serv, id, db_name, demo, lang):
try:
serv.actions[id]['progress'] = 0
clean = False
cr = sql_db.db_connect(db_name).cursor()
tools.init_db(cr)
cr.commit()
cr.close()
cr = None
pool = pooler.get_pool(db_name, demo,serv.actions[id],
update_module=True)
if lang and lang != 'en_US':
filename = tools.config["root_path"] + "/i18n/" + lang + ".csv"
tools.trans_load(db_name, filename, lang)
serv.actions[id]['clean'] = True
cr = sql_db.db_connect(db_name).cursor()
cr.execute('select login, password, name ' \
'from res_users ' \
'where login <> \'root\' order by login')
serv.actions[id]['users'] = cr.dictfetchall()
cr.close()
except Exception, e:
serv.actions[id]['clean'] = False
serv.actions[id]['exception'] = e
from StringIO import StringIO
import traceback
e_str = StringIO()
traceback.print_exc(file=e_str)
traceback_str = e_str.getvalue()
e_str.close()
print traceback_str
serv.actions[id]['traceback'] = traceback_str
if cr:
cr.close()
logger = netsvc.Logger()
logger.notifyChannel("web-services", netsvc.LOG_INFO,
'CREATE DB: %s' % (db_name))
dbi = DBInitialize()
create_thread = threading.Thread(target=dbi,
args=(self, id, db_name, demo, lang))
create_thread.start()
self.actions[id]['thread'] = create_thread
return id
def get_progress(self, password, id):
security.check_super(password)
if self.actions[id]['thread'].isAlive():
# return addons.init_progress[db_name]
return (min(self.actions[id].get('progress', 0),0.95), [])
else:
clean = self.actions[id]['clean']
if clean:
users = self.actions[id]['users']
del self.actions[id]
return (1.0, users)
else:
e = self.actions[id]['exception']
del self.actions[id]
raise Exception, e
def get_progress(self, password, id):
security.check_super(password)
if self.actions[id]['thread'].isAlive():
# return addons.init_progress[db_name]
return (min(self.actions[id].get('progress', 0),0.95), [])
else:
clean = self.actions[id]['clean']
if clean:
users = self.actions[id]['users']
del self.actions[id]
return (1.0, users)
else:
e = self.actions[id]['exception']
del self.actions[id]
raise Exception, e
def drop(self, password, db_name):
security.check_super(password)
pooler.close_db(db_name)
logger = netsvc.Logger()
def drop(self, password, db_name):
security.check_super(password)
pooler.close_db(db_name)
logger = netsvc.Logger()
db = sql_db.db_connect('template1', serialize=1)
db.truedb.autocommit()
cr = db.cursor()
try:
try:
cr.execute('DROP DATABASE ' + db_name)
except:
logger.notifyChannel("web-service", netsvc.LOG_ERROR,
'DROP DB: %s failed' % (db_name,))
raise
else:
logger.notifyChannel("web-services", netsvc.LOG_INFO,
'DROP DB: %s' % (db_name))
finally:
cr.close()
return True
db = sql_db.db_connect('template1', serialize=1)
db.truedb.autocommit()
cr = db.cursor()
try:
try:
cr.execute('DROP DATABASE ' + db_name)
except:
logger.notifyChannel("web-service", netsvc.LOG_ERROR,
'DROP DB: %s failed' % (db_name,))
raise
else:
logger.notifyChannel("web-services", netsvc.LOG_INFO,
'DROP DB: %s' % (db_name))
finally:
cr.close()
return True
def dump(self, password, db_name):
security.check_super(password)
logger = netsvc.Logger()
def dump(self, password, db_name):
security.check_super(password)
logger = netsvc.Logger()
if tools.config['db_password']:
logger.notifyChannel("web-service", netsvc.LOG_ERROR,
'DUMP DB: %s doesn\'t work with password' % (db_name,))
raise Exception, "Couldn't dump database with password"
if tools.config['db_password']:
logger.notifyChannel("web-service", netsvc.LOG_ERROR,
'DUMP DB: %s doesn\'t work with password' % (db_name,))
raise Exception, "Couldn't dump database with password"
cmd = ['pg_dump', '--format=c']
if tools.config['db_user']:
cmd.append('--username=' + tools.config['db_user'])
if tools.config['db_host']:
cmd.append('--host=' + tools.config['db_host'])
if tools.config['db_port']:
cmd.append('--port=' + tools.config['db_port'])
cmd.append(db_name)
cmd = ['pg_dump', '--format=c']
if tools.config['db_user']:
cmd.append('--username=' + tools.config['db_user'])
if tools.config['db_host']:
cmd.append('--host=' + tools.config['db_host'])
if tools.config['db_port']:
cmd.append('--port=' + tools.config['db_port'])
cmd.append(db_name)
stdin, stdout = tools.exec_pg_command_pipe(*tuple(cmd))
stdin.close()
data = stdout.read()
res = stdout.close()
if res:
logger.notifyChannel("web-service", netsvc.LOG_ERROR,
'DUMP DB: %s failed\n%s' % (db_name, data))
raise Exception, "Couldn't dump database"
logger.notifyChannel("web-services", netsvc.LOG_INFO,
'DUMP DB: %s' % (db_name))
return base64.encodestring(data)
stdin, stdout = tools.exec_pg_command_pipe(*tuple(cmd))
stdin.close()
data = stdout.read()
res = stdout.close()
if res:
logger.notifyChannel("web-service", netsvc.LOG_ERROR,
'DUMP DB: %s failed\n%s' % (db_name, data))
raise Exception, "Couldn't dump database"
logger.notifyChannel("web-services", netsvc.LOG_INFO,
'DUMP DB: %s' % (db_name))
return base64.encodestring(data)
def restore(self, password, db_name, data):
security.check_super(password)
logger = netsvc.Logger()
def restore(self, password, db_name, data):
security.check_super(password)
logger = netsvc.Logger()
if self.db_exist(db_name):
logger.notifyChannel("web-service", netsvc.LOG_WARNING,
'RESTORE DB: %s already exists' % (db_name,))
raise Exception, "Database already exists"
if self.db_exist(db_name):
logger.notifyChannel("web-service", netsvc.LOG_WARNING,
'RESTORE DB: %s already exists' % (db_name,))
raise Exception, "Database already exists"
if tools.config['db_password']:
logger.notifyChannel("web-service", netsvc.LOG_ERROR,
'RESTORE DB: %s doesn\'t work with password' % (db_name,))
raise Exception, "Couldn't restore database with password"
if tools.config['db_password']:
logger.notifyChannel("web-service", netsvc.LOG_ERROR,
'RESTORE DB: %s doesn\'t work with password' % (db_name,))
raise Exception, "Couldn't restore database with password"
db = sql_db.db_connect('template1', serialize=1)
db.truedb.autocommit()
cr = db.cursor()
cr.execute('CREATE DATABASE ' + db_name + ' ENCODING \'unicode\'')
cr.close()
db = sql_db.db_connect('template1', serialize=1)
db.truedb.autocommit()
cr = db.cursor()
cr.execute('CREATE DATABASE ' + db_name + ' ENCODING \'unicode\'')
cr.close()
cmd = ['pg_restore']
if tools.config['db_user']:
cmd.append('--username=' + tools.config['db_user'])
if tools.config['db_host']:
cmd.append('--host=' + tools.config['db_host'])
if tools.config['db_port']:
cmd.append('--port=' + tools.config['db_port'])
cmd.append('--dbname=' + db_name)
args2 = tuple(cmd)
cmd = ['pg_restore']
if tools.config['db_user']:
cmd.append('--username=' + tools.config['db_user'])
if tools.config['db_host']:
cmd.append('--host=' + tools.config['db_host'])
if tools.config['db_port']:
cmd.append('--port=' + tools.config['db_port'])
cmd.append('--dbname=' + db_name)
args2 = tuple(cmd)
buf=base64.decodestring(data)
if os.name == "nt":
tmpfile = (os.environ['TMP'] or 'C:\\') + os.tmpnam()
file(tmpfile, 'wb').write(buf)
args2=list(args2)
args2.append(' ' + tmpfile)
args2=tuple(args2)
stdin, stdout = tools.exec_pg_command_pipe(*args2)
if not os.name == "nt":
stdin.write(base64.decodestring(data))
stdin.close()
res = stdout.close()
if res:
raise Exception, "Couldn't restore database"
logger.notifyChannel("web-services", netsvc.LOG_INFO,
'RESTORE DB: %s' % (db_name))
return True
buf=base64.decodestring(data)
if os.name == "nt":
tmpfile = (os.environ['TMP'] or 'C:\\') + os.tmpnam()
file(tmpfile, 'wb').write(buf)
args2=list(args2)
args2.append(' ' + tmpfile)
args2=tuple(args2)
stdin, stdout = tools.exec_pg_command_pipe(*args2)
if not os.name == "nt":
stdin.write(base64.decodestring(data))
stdin.close()
res = stdout.close()
if res:
raise Exception, "Couldn't restore database"
logger.notifyChannel("web-services", netsvc.LOG_INFO,
'RESTORE DB: %s' % (db_name))
return True
def db_exist(self, db_name):
try:
db = sql_db.db_connect(db_name)
db.truedb.close()
return True
except:
return False
def db_exist(self, db_name):
try:
db = sql_db.db_connect(db_name)
db.truedb.close()
return True
except:
return False
def list(self):
db = sql_db.db_connect('template1')
try:
cr = db.cursor()
db_user = tools.config["db_user"]
if not db_user and os.name == 'posix':
import pwd
db_user = pwd.getpwuid(os.getuid())[0]
if not db_user:
cr.execute("select usename from pg_user where usesysid=(select datdba from pg_database where datname=%s)", (tools.config["db_name"],))
res = cr.fetchone()
db_user = res and res[0]
if db_user:
cr.execute("select datname from pg_database where datdba=(select usesysid from pg_user where usename=%s) and datname not in ('template0', 'template1', 'postgres')", (db_user,))
else:
cr.execute("select datname from pg_database where datname not in('template0', 'template1','postgres')")
res = [name for (name,) in cr.fetchall()]
cr.close()
except:
res = []
db.truedb.close()
return res
def list(self):
db = sql_db.db_connect('template1')
try:
cr = db.cursor()
db_user = tools.config["db_user"]
if not db_user and os.name == 'posix':
import pwd
db_user = pwd.getpwuid(os.getuid())[0]
if not db_user:
cr.execute("select usename from pg_user where usesysid=(select datdba from pg_database where datname=%s)", (tools.config["db_name"],))
res = cr.fetchone()
db_user = res and res[0]
if db_user:
cr.execute("select datname from pg_database where datdba=(select usesysid from pg_user where usename=%s) and datname not in ('template0', 'template1', 'postgres')", (db_user,))
else:
cr.execute("select datname from pg_database where datname not in('template0', 'template1','postgres')")
res = [name for (name,) in cr.fetchall()]
cr.close()
except:
res = []
db.truedb.close()
return res
def change_admin_password(self, old_password, new_password):
security.check_super(old_password)
tools.config['admin_passwd'] = new_password
tools.config.save()
return True
def change_admin_password(self, old_password, new_password):
security.check_super(old_password)
tools.config['admin_passwd'] = new_password
tools.config.save()
return True
def list_lang(self):
return tools.scan_languages()
import glob
file_list = glob.glob(os.path.join(tools.config['root_path'], 'i18n', '*.csv'))
def lang_tuple(fname):
lang_dict=tools.get_languages()
lang = os.path.basename(fname).split(".")[0]
return (lang, lang_dict.get(lang, lang))
return [lang_tuple(fname) for fname in file_list]
def list_lang(self):
return tools.scan_languages()
import glob
file_list = glob.glob(os.path.join(tools.config['root_path'], 'i18n', '*.csv'))
def lang_tuple(fname):
lang_dict=tools.get_languages()
lang = os.path.basename(fname).split(".")[0]
return (lang, lang_dict.get(lang, lang))
return [lang_tuple(fname) for fname in file_list]
db()
class common(netsvc.Service):
def __init__(self,name="common"):
netsvc.Service.__init__(self,name)
self.joinGroup("web-services")
self.exportMethod(self.ir_get)
self.exportMethod(self.ir_set)
self.exportMethod(self.ir_del)
self.exportMethod(self.about)
self.exportMethod(self.login)
self.exportMethod(self.timezone_get)
def __init__(self,name="common"):
netsvc.Service.__init__(self,name)
self.joinGroup("web-services")
self.exportMethod(self.ir_get)
self.exportMethod(self.ir_set)
self.exportMethod(self.ir_del)
self.exportMethod(self.about)
self.exportMethod(self.login)
self.exportMethod(self.timezone_get)
def ir_set(self, db, uid, password, keys, args, name, value, replace=True, isobject=False):
security.check(db, uid, password)
cr = pooler.get_db(db).cursor()
res = ir.ir_set(cr,uid, keys, args, name, value, replace, isobject)
cr.commit()
cr.close()
return res
def ir_set(self, db, uid, password, keys, args, name, value, replace=True, isobject=False):
security.check(db, uid, password)
cr = pooler.get_db(db).cursor()
res = ir.ir_set(cr,uid, keys, args, name, value, replace, isobject)
cr.commit()
cr.close()
return res
def ir_del(self, db, uid, password, id):
security.check(db, uid, password)
cr = pooler.get_db(db).cursor()
res = ir.ir_del(cr,uid, id)
cr.commit()
cr.close()
return res
def ir_del(self, db, uid, password, id):
security.check(db, uid, password)
cr = pooler.get_db(db).cursor()
res = ir.ir_del(cr,uid, id)
cr.commit()
cr.close()
return res
def ir_get(self, db, uid, password, keys, args=None, meta=None, context=None):
if not args:
args=[]
if not context:
context={}
security.check(db, uid, password)
cr = pooler.get_db(db).cursor()
res = ir.ir_get(cr,uid, keys, args, meta, context)
cr.commit()
cr.close()
return res
def ir_get(self, db, uid, password, keys, args=None, meta=None, context=None):
if not args:
args=[]
if not context:
context={}
security.check(db, uid, password)
cr = pooler.get_db(db).cursor()
res = ir.ir_get(cr,uid, keys, args, meta, context)
cr.commit()
cr.close()
return res
def login(self, db, login, password):
res = security.login(db, login, password)
logger = netsvc.Logger()
msg = res and 'successful login' or 'bad login or password'
logger.notifyChannel("web-service", netsvc.LOG_INFO, "%s from '%s' using database '%s'" % (msg, login, db))
return res or False
def login(self, db, login, password):
res = security.login(db, login, password)
logger = netsvc.Logger()
msg = res and 'successful login' or 'bad login or password'
logger.notifyChannel("web-service", netsvc.LOG_INFO, "%s from '%s' using database '%s'" % (msg, login, db))
return res or False
def about(self):
return _('''
def about(self):
return _('''
OpenERP is an ERP+CRM program for small and medium businesses.
@ -339,35 +339,35 @@ GNU Public Licence.
(c) 2003-TODAY, Fabien Pinckaers - Tiny sprl''')
def timezone_get(self, db, login, password):
return time.tzname[0]
def timezone_get(self, db, login, password):
return time.tzname[0]
common()
class objects_proxy(netsvc.Service):
def __init__(self, name="object"):
netsvc.Service.__init__(self,name)
self.joinGroup('web-services')
self.exportMethod(self.execute)
self.exportMethod(self.exec_workflow)
self.exportMethod(self.obj_list)
def __init__(self, name="object"):
netsvc.Service.__init__(self,name)
self.joinGroup('web-services')
self.exportMethod(self.execute)
self.exportMethod(self.exec_workflow)
self.exportMethod(self.obj_list)
def exec_workflow(self, db, uid, passwd, object, method, id):
security.check(db, uid, passwd)
service = netsvc.LocalService("object_proxy")
res = service.exec_workflow(db, uid, object, method, id)
return res
def exec_workflow(self, db, uid, passwd, object, method, id):
security.check(db, uid, passwd)
service = netsvc.LocalService("object_proxy")
res = service.exec_workflow(db, uid, object, method, id)
return res
def execute(self, db, uid, passwd, object, method, *args):
security.check(db, uid, passwd)
service = netsvc.LocalService("object_proxy")
res = service.execute(db, uid, object, method, *args)
return res
def execute(self, db, uid, passwd, object, method, *args):
security.check(db, uid, passwd)
service = netsvc.LocalService("object_proxy")
res = service.execute(db, uid, object, method, *args)
return res
def obj_list(self, db, uid, passwd):
security.check(db, uid, passwd)
service = netsvc.LocalService("object_proxy")
res = service.obj_list()
return res
def obj_list(self, db, uid, passwd):
security.check(db, uid, passwd)
service = netsvc.LocalService("object_proxy")
res = service.obj_list()
return res
objects_proxy()
@ -383,44 +383,44 @@ objects_proxy()
# TODO: change local request to OSE request/reply pattern
#
class wizard(netsvc.Service):
def __init__(self, name='wizard'):
netsvc.Service.__init__(self,name)
self.joinGroup('web-services')
self.exportMethod(self.execute)
self.exportMethod(self.create)
self.id = 0
self.wiz_datas = {}
self.wiz_name = {}
self.wiz_uid = {}
def __init__(self, name='wizard'):
netsvc.Service.__init__(self,name)
self.joinGroup('web-services')
self.exportMethod(self.execute)
self.exportMethod(self.create)
self.id = 0
self.wiz_datas = {}
self.wiz_name = {}
self.wiz_uid = {}
def _execute(self, db, uid, wiz_id, datas, action, context):
self.wiz_datas[wiz_id].update(datas)
wiz = netsvc.LocalService('wizard.'+self.wiz_name[wiz_id])
return wiz.execute(db, uid, self.wiz_datas[wiz_id], action, context)
def _execute(self, db, uid, wiz_id, datas, action, context):
self.wiz_datas[wiz_id].update(datas)
wiz = netsvc.LocalService('wizard.'+self.wiz_name[wiz_id])
return wiz.execute(db, uid, self.wiz_datas[wiz_id], action, context)
def create(self, db, uid, passwd, wiz_name, datas=None):
if not datas:
datas={}
security.check(db, uid, passwd)
def create(self, db, uid, passwd, wiz_name, datas=None):
if not datas:
datas={}
security.check(db, uid, passwd)
#FIXME: this is not thread-safe
self.id += 1
self.wiz_datas[self.id] = {}
self.wiz_name[self.id] = wiz_name
self.wiz_uid[self.id] = uid
return self.id
self.id += 1
self.wiz_datas[self.id] = {}
self.wiz_name[self.id] = wiz_name
self.wiz_uid[self.id] = uid
return self.id
def execute(self, db, uid, passwd, wiz_id, datas, action='init', context=None):
if not context:
context={}
security.check(db, uid, passwd)
def execute(self, db, uid, passwd, wiz_id, datas, action='init', context=None):
if not context:
context={}
security.check(db, uid, passwd)
if wiz_id in self.wiz_uid:
if self.wiz_uid[wiz_id] == uid:
return self._execute(db, uid, wiz_id, datas, action, context)
else:
raise Exception, 'AccessDenied'
else:
raise Exception, 'WizardNotFound'
if wiz_id in self.wiz_uid:
if self.wiz_uid[wiz_id] == uid:
return self._execute(db, uid, wiz_id, datas, action, context)
else:
raise Exception, 'AccessDenied'
else:
raise Exception, 'WizardNotFound'
wizard()
#
@ -430,85 +430,85 @@ wizard()
# False -> True
#
class report_spool(netsvc.Service):
def __init__(self, name='report'):
netsvc.Service.__init__(self, name)
self.joinGroup('web-services')
self.exportMethod(self.report)
self.exportMethod(self.report_get)
self._reports = {}
self.id = 0
self.id_protect = threading.Semaphore()
def __init__(self, name='report'):
netsvc.Service.__init__(self, name)
self.joinGroup('web-services')
self.exportMethod(self.report)
self.exportMethod(self.report_get)
self._reports = {}
self.id = 0
self.id_protect = threading.Semaphore()
def report(self, db, uid, passwd, object, ids, datas=None, context=None):
if not datas:
datas={}
if not context:
context={}
security.check(db, uid, passwd)
def report(self, db, uid, passwd, object, ids, datas=None, context=None):
if not datas:
datas={}
if not context:
context={}
security.check(db, uid, passwd)
self.id_protect.acquire()
self.id += 1
id = self.id
self.id_protect.release()
self.id_protect.acquire()
self.id += 1
id = self.id
self.id_protect.release()
self._reports[id] = {'uid': uid, 'result': False, 'state': False, 'exception': None}
self._reports[id] = {'uid': uid, 'result': False, 'state': False, 'exception': None}
def go(id, uid, ids, datas, context):
try:
cr = pooler.get_db(db).cursor()
obj = netsvc.LocalService('report.'+object)
(result, format) = obj.create(cr, uid, ids, datas, context)
cr.close()
self._reports[id]['result'] = result
self._reports[id]['format'] = format
self._reports[id]['state'] = True
except Exception, exception:
import traceback
import sys
tb_s = reduce(lambda x, y: x+y, traceback.format_exception(
sys.exc_type, sys.exc_value, sys.exc_traceback))
logger = netsvc.Logger()
logger.notifyChannel('web-service', netsvc.LOG_ERROR,
'Exception: %s\n%s' % (str(exception), tb_s))
self._reports[id]['exception'] = exception
self._reports[id]['state'] = True
return True
def go(id, uid, ids, datas, context):
try:
cr = pooler.get_db(db).cursor()
obj = netsvc.LocalService('report.'+object)
(result, format) = obj.create(cr, uid, ids, datas, context)
cr.close()
self._reports[id]['result'] = result
self._reports[id]['format'] = format
self._reports[id]['state'] = True
except Exception, exception:
import traceback
import sys
tb_s = reduce(lambda x, y: x+y, traceback.format_exception(
sys.exc_type, sys.exc_value, sys.exc_traceback))
logger = netsvc.Logger()
logger.notifyChannel('web-service', netsvc.LOG_ERROR,
'Exception: %s\n%s' % (str(exception), tb_s))
self._reports[id]['exception'] = exception
self._reports[id]['state'] = True
return True
thread.start_new_thread(go, (id, uid, ids, datas, context))
return id
thread.start_new_thread(go, (id, uid, ids, datas, context))
return id
def _check_report(self, report_id):
result = self._reports[report_id]
if result['exception']:
raise result['exception']
res = {'state': result['state']}
if res['state']:
if tools.config['reportgz']:
import zlib
res2 = zlib.compress(result['result'])
res['code'] = 'zlib'
else:
#CHECKME: why is this needed???
if isinstance(result['result'], unicode):
res2 = result['result'].encode('latin1', 'replace')
else:
res2 = result['result']
if res2:
res['result'] = base64.encodestring(res2)
res['format'] = result['format']
del self._reports[report_id]
return res
def _check_report(self, report_id):
result = self._reports[report_id]
if result['exception']:
raise result['exception']
res = {'state': result['state']}
if res['state']:
if tools.config['reportgz']:
import zlib
res2 = zlib.compress(result['result'])
res['code'] = 'zlib'
else:
#CHECKME: why is this needed???
if isinstance(result['result'], unicode):
res2 = result['result'].encode('latin1', 'replace')
else:
res2 = result['result']
if res2:
res['result'] = base64.encodestring(res2)
res['format'] = result['format']
del self._reports[report_id]
return res
def report_get(self, db, uid, passwd, report_id):
security.check(db, uid, passwd)
def report_get(self, db, uid, passwd, report_id):
security.check(db, uid, passwd)
if report_id in self._reports:
if self._reports[report_id]['uid'] == uid:
return self._check_report(report_id)
else:
raise Exception, 'AccessDenied'
else:
raise Exception, 'ReportNotFound'
if report_id in self._reports:
if self._reports[report_id]['uid'] == uid:
return self._check_report(report_id)
else:
raise Exception, 'AccessDenied'
else:
raise Exception, 'ReportNotFound'
report_spool()

View File

@ -31,9 +31,9 @@ import tools
import sys,os
#try:
# import decimal
# import decimal
#except ImportError:
# from tools import decimal
# from tools import decimal
import re
@ -43,112 +43,112 @@ re_from = re.compile('.* from "?([a-zA-Z_0-9]+)"? .*$');
re_into = re.compile('.* into "?([a-zA-Z_0-9]+)"? .*$');
class fake_cursor:
nbr = 0
_tables = {}
sql_from_log = {}
sql_into_log = {}
sql_log = False
count = 0
nbr = 0
_tables = {}
sql_from_log = {}
sql_into_log = {}
sql_log = False
count = 0
def __init__(self, db, con, dbname):
self.db = db
self.obj = db.cursor()
self.con = con
self.dbname = dbname
def __init__(self, db, con, dbname):
self.db = db
self.obj = db.cursor()
self.con = con
self.dbname = dbname
def execute(self, sql, params=None):
if not params:
params=()
def base_string(s):
if isinstance(s, unicode):
return s.encode('utf-8')
return s
p=map(base_string, params)
if isinstance(sql, unicode):
sql = sql.encode('utf-8')
if self.sql_log:
now = mdt.now()
if p:
res = self.obj.execute(sql, p)
else:
res = self.obj.execute(sql)
if self.sql_log:
print "SQL LOG query:", sql
print "SQL LOG params:", repr(p)
self.count+=1
res_from = re_from.match(sql.lower())
if res_from:
self.sql_from_log.setdefault(res_from.group(1), [0, 0])
self.sql_from_log[res_from.group(1)][0] += 1
self.sql_from_log[res_from.group(1)][1] += mdt.now() - now
res_into = re_into.match(sql.lower())
if res_into:
self.sql_into_log.setdefault(res_into.group(1), [0, 0])
self.sql_into_log[res_into.group(1)][0] += 1
self.sql_into_log[res_into.group(1)][1] += mdt.now() - now
return res
def execute(self, sql, params=None):
if not params:
params=()
def base_string(s):
if isinstance(s, unicode):
return s.encode('utf-8')
return s
p=map(base_string, params)
if isinstance(sql, unicode):
sql = sql.encode('utf-8')
if self.sql_log:
now = mdt.now()
if p:
res = self.obj.execute(sql, p)
else:
res = self.obj.execute(sql)
if self.sql_log:
print "SQL LOG query:", sql
print "SQL LOG params:", repr(p)
self.count+=1
res_from = re_from.match(sql.lower())
if res_from:
self.sql_from_log.setdefault(res_from.group(1), [0, 0])
self.sql_from_log[res_from.group(1)][0] += 1
self.sql_from_log[res_from.group(1)][1] += mdt.now() - now
res_into = re_into.match(sql.lower())
if res_into:
self.sql_into_log.setdefault(res_into.group(1), [0, 0])
self.sql_into_log[res_into.group(1)][0] += 1
self.sql_into_log[res_into.group(1)][1] += mdt.now() - now
return res
def print_log(self, type='from'):
print "SQL LOG %s:" % (type,)
if type == 'from':
logs = self.sql_from_log.items()
else:
logs = self.sql_into_log.items()
logs.sort(lambda x, y: cmp(x[1][1], y[1][1]))
sum=0
for r in logs:
print "table:", r[0], ":", str(r[1][1]), "/", r[1][0]
sum+= r[1][1]
print "SUM:%s/%d"% (sum, self.count)
def print_log(self, type='from'):
print "SQL LOG %s:" % (type,)
if type == 'from':
logs = self.sql_from_log.items()
else:
logs = self.sql_into_log.items()
logs.sort(lambda x, y: cmp(x[1][1], y[1][1]))
sum=0
for r in logs:
print "table:", r[0], ":", str(r[1][1]), "/", r[1][0]
sum+= r[1][1]
print "SUM:%s/%d"% (sum, self.count)
def close(self):
if self.sql_log:
self.print_log('from')
self.print_log('into')
self.obj.close()
def close(self):
if self.sql_log:
self.print_log('from')
self.print_log('into')
self.obj.close()
# This force the cursor to be freed, and thus, available again. It is
# important because otherwise we can overload the server very easily
# because of a cursor shortage (because cursors are not garbage
# collected as fast as they should). The problem is probably due in
# part because browse records keep a reference to the cursor.
del self.obj
# This force the cursor to be freed, and thus, available again. It is
# important because otherwise we can overload the server very easily
# because of a cursor shortage (because cursors are not garbage
# collected as fast as they should). The problem is probably due in
# part because browse records keep a reference to the cursor.
del self.obj
def __getattr__(self, name):
return getattr(self.obj, name)
def __getattr__(self, name):
return getattr(self.obj, name)
class fakedb:
def __init__(self, truedb, dbname):
self.truedb = truedb
self.dbname = dbname
def __init__(self, truedb, dbname):
self.truedb = truedb
self.dbname = dbname
def cursor(self):
return fake_cursor(self.truedb, {}, self.dbname)
def cursor(self):
return fake_cursor(self.truedb, {}, self.dbname)
def decimalize(symb):
if symb is None: return None
if isinstance(symb, float):
return decimal.Decimal('%f' % symb)
return decimal.Decimal(symb)
if symb is None: return None
if isinstance(symb, float):
return decimal.Decimal('%f' % symb)
return decimal.Decimal(symb)
def db_connect(db_name, serialize=0):
host = tools.config['db_host'] and "host=%s" % tools.config['db_host'] or ''
port = tools.config['db_port'] and "port=%s" % tools.config['db_port'] or ''
name = "dbname=%s" % db_name
user = tools.config['db_user'] and "user=%s" % tools.config['db_user'] or ''
password = tools.config['db_password'] and "password=%s" % tools.config['db_password'] or ''
maxconn = int(tools.config['db_maxconn']) or 64
tdb = psycopg.connect('%s %s %s %s %s' % (host, port, name, user, password),
serialize=serialize, maxconn=maxconn)
fdb = fakedb(tdb, db_name)
return fdb
host = tools.config['db_host'] and "host=%s" % tools.config['db_host'] or ''
port = tools.config['db_port'] and "port=%s" % tools.config['db_port'] or ''
name = "dbname=%s" % db_name
user = tools.config['db_user'] and "user=%s" % tools.config['db_user'] or ''
password = tools.config['db_password'] and "password=%s" % tools.config['db_password'] or ''
maxconn = int(tools.config['db_maxconn']) or 64
tdb = psycopg.connect('%s %s %s %s %s' % (host, port, name, user, password),
serialize=serialize, maxconn=maxconn)
fdb = fakedb(tdb, db_name)
return fdb
def init():
#define DATEOID 1082, define TIMESTAMPOID 1114 see pgtypes.h
psycopg.register_type(psycopg.new_type((1082,), "date", lambda x:x))
psycopg.register_type(psycopg.new_type((1083,), "time", lambda x:x))
psycopg.register_type(psycopg.new_type((1114,), "datetime", lambda x:x))
#psycopg.register_type(psycopg.new_type((700, 701, 1700), 'decimal', decimalize))
#define DATEOID 1082, define TIMESTAMPOID 1114 see pgtypes.h
psycopg.register_type(psycopg.new_type((1082,), "date", lambda x:x))
psycopg.register_type(psycopg.new_type((1083,), "time", lambda x:x))
psycopg.register_type(psycopg.new_type((1114,), "datetime", lambda x:x))
#psycopg.register_type(psycopg.new_type((700, 701, 1700), 'decimal', decimalize))
psycopg.register_type(psycopg.new_type((1082,), "date", lambda x:x))
psycopg.register_type(psycopg.new_type((1083,), "time", lambda x:x))

View File

@ -30,61 +30,61 @@ import cPickle
import marshal
class Myexception(Exception):
def __init__(self, faultCode, faultString):
self.faultCode = faultCode
self.faultString = faultString
self.args = (faultCode, faultString)
def __init__(self, faultCode, faultString):
self.faultCode = faultCode
self.faultString = faultString
self.args = (faultCode, faultString)
class mysocket:
def __init__(self, sock=None):
if sock is None:
self.sock = socket.socket(
socket.AF_INET, socket.SOCK_STREAM)
else:
self.sock = sock
self.sock.settimeout(120)
def connect(self, host, port=False):
if not port:
protocol, buf = host.split('//')
host, port = buf.split(':')
self.sock.connect((host, int(port)))
def disconnect(self):
self.sock.shutdown(socket.SHUT_RDWR)
self.sock.close()
def mysend(self, msg, exception=False, traceback=None):
msg = cPickle.dumps([msg,traceback])
size = len(msg)
self.sock.send('%8d' % size)
self.sock.send(exception and "1" or "0")
totalsent = 0
while totalsent < size:
sent = self.sock.send(msg[totalsent:])
if sent == 0:
raise RuntimeError, "socket connection broken"
totalsent = totalsent + sent
def myreceive(self):
buf=''
while len(buf) < 8:
chunk = self.sock.recv(8 - len(buf))
if chunk == '':
raise RuntimeError, "socket connection broken"
buf += chunk
size = int(buf)
buf = self.sock.recv(1)
if buf != "0":
exception = buf
else:
exception = False
msg = ''
while len(msg) < size:
chunk = self.sock.recv(size-len(msg))
if chunk == '':
raise RuntimeError, "socket connection broken"
msg = msg + chunk
res = cPickle.loads(msg)
if isinstance(res[0],Exception):
if exception:
raise Myexception(str(res[0]), str(res[1]))
raise res[0]
else:
return res[0]
def __init__(self, sock=None):
if sock is None:
self.sock = socket.socket(
socket.AF_INET, socket.SOCK_STREAM)
else:
self.sock = sock
self.sock.settimeout(120)
def connect(self, host, port=False):
if not port:
protocol, buf = host.split('//')
host, port = buf.split(':')
self.sock.connect((host, int(port)))
def disconnect(self):
self.sock.shutdown(socket.SHUT_RDWR)
self.sock.close()
def mysend(self, msg, exception=False, traceback=None):
msg = cPickle.dumps([msg,traceback])
size = len(msg)
self.sock.send('%8d' % size)
self.sock.send(exception and "1" or "0")
totalsent = 0
while totalsent < size:
sent = self.sock.send(msg[totalsent:])
if sent == 0:
raise RuntimeError, "socket connection broken"
totalsent = totalsent + sent
def myreceive(self):
buf=''
while len(buf) < 8:
chunk = self.sock.recv(8 - len(buf))
if chunk == '':
raise RuntimeError, "socket connection broken"
buf += chunk
size = int(buf)
buf = self.sock.recv(1)
if buf != "0":
exception = buf
else:
exception = False
msg = ''
while len(msg) < size:
chunk = self.sock.recv(size-len(msg))
if chunk == '':
raise RuntimeError, "socket connection broken"
msg = msg + chunk
res = cPickle.loads(msg)
if isinstance(res[0],Exception):
if exception:
raise Myexception(str(res[0]), str(res[1]))
raise res[0]
else:
return res[0]

180
bin/tinyerp-server.py Executable file → Normal file
View File

@ -75,8 +75,8 @@ import tools
import time
if sys.platform=='win32':
import mx.DateTime
mx.DateTime.strptime = lambda x,y: mx.DateTime.mktime(time.strptime(x, y))
import mx.DateTime
mx.DateTime.strptime = lambda x,y: mx.DateTime.mktime(time.strptime(x, y))
#os.chdir(tools.file_path_root)
@ -98,18 +98,18 @@ import pooler
# try to connect to the database
try:
# pooler.init()
pass
# pooler.init()
pass
except psycopg.OperationalError, err:
logger.notifyChannel("init", netsvc.LOG_ERROR, "could not connect to database '%s'!" % (tools.config["db_name"],))
logger.notifyChannel("init", netsvc.LOG_ERROR, "could not connect to database '%s'!" % (tools.config["db_name"],))
msg = str(err).replace("FATAL:","").strip()
db_msg = "database \"%s\" does not exist" % (tools.config["db_name"],)
msg = str(err).replace("FATAL:","").strip()
db_msg = "database \"%s\" does not exist" % (tools.config["db_name"],)
# Note: this is ugly but since psycopg only uses one exception for all errors
# I don't think it's possible to do differently
if msg == db_msg:
print """
# Note: this is ugly but since psycopg only uses one exception for all errors
# I don't think it's possible to do differently
if msg == db_msg:
print """
this database does not exist
You need to create it using the command:
@ -126,42 +126,42 @@ Two accounts will be created by default:
2. login: demo password : demo
""" % (tools.config["db_name"])
else:
print "\n "+msg+"\n"
sys.exit(1)
else:
print "\n "+msg+"\n"
sys.exit(1)
db_name = tools.config["db_name"]
# test whether it is needed to initialize the db (the db is empty)
try:
cr = pooler.get_db_only(db_name).cursor()
cr = pooler.get_db_only(db_name).cursor()
except psycopg.OperationalError:
logger.notifyChannel("init", netsvc.LOG_INFO, "could not connect to database '%s'!" % db_name,)
cr = None
logger.notifyChannel("init", netsvc.LOG_INFO, "could not connect to database '%s'!" % db_name,)
cr = None
if cr:
cr.execute("SELECT relname FROM pg_class WHERE relkind='r' AND relname='ir_ui_menu'")
if len(cr.fetchall())==0:
cr.execute("SELECT relname FROM pg_class WHERE relkind='r' AND relname='ir_ui_menu'")
if len(cr.fetchall())==0:
#if False:
logger.notifyChannel("init", netsvc.LOG_INFO, "init db")
tools.init_db(cr)
# in that case, force --init=all
tools.config["init"]["all"] = 1
tools.config['update']['all'] = 1
if not tools.config['without_demo']:
tools.config["demo"]['all'] = 1
cr.close()
logger.notifyChannel("init", netsvc.LOG_INFO, "init db")
tools.init_db(cr)
# in that case, force --init=all
tools.config["init"]["all"] = 1
tools.config['update']['all'] = 1
if not tools.config['without_demo']:
tools.config["demo"]['all'] = 1
cr.close()
#----------------------------------------------------------
# launch modules install/upgrade/removes if needed
#----------------------------------------------------------
if tools.config['upgrade']:
print 'Upgrading new modules...'
import tools.upgrade
(toinit, toupdate) = tools.upgrade.upgrade()
for m in toinit:
tools.config['init'][m] = 1
for m in toupdate:
tools.config['update'][m] = 1
print 'Upgrading new modules...'
import tools.upgrade
(toinit, toupdate) = tools.upgrade.upgrade()
for m in toinit:
tools.config['init'][m] = 1
for m in toupdate:
tools.config['update'][m] = 1
#----------------------------------------------------------
# import basic modules
@ -175,27 +175,27 @@ import addons
addons.register_classes()
if tools.config['init'] or tools.config['update']:
pooler.get_db_and_pool(tools.config['db_name'], update_module=True)
pooler.get_db_and_pool(tools.config['db_name'], update_module=True)
#----------------------------------------------------------
# translation stuff
#----------------------------------------------------------
if tools.config["translate_out"]:
import csv
import csv
logger.notifyChannel("init", netsvc.LOG_INFO, 'writing translation file for language %s to %s' % (tools.config["language"], tools.config["translate_out"]))
logger.notifyChannel("init", netsvc.LOG_INFO, 'writing translation file for language %s to %s' % (tools.config["language"], tools.config["translate_out"]))
fileformat = os.path.splitext(tools.config["translate_out"])[-1][1:].lower()
buf = file(tools.config["translate_out"], "w")
tools.trans_export(tools.config["language"], tools.config["translate_modules"], buf, fileformat)
buf.close()
fileformat = os.path.splitext(tools.config["translate_out"])[-1][1:].lower()
buf = file(tools.config["translate_out"], "w")
tools.trans_export(tools.config["language"], tools.config["translate_modules"], buf, fileformat)
buf.close()
logger.notifyChannel("init", netsvc.LOG_INFO, 'translation file written succesfully')
sys.exit(0)
logger.notifyChannel("init", netsvc.LOG_INFO, 'translation file written succesfully')
sys.exit(0)
if tools.config["translate_in"]:
tools.trans_load(tools.config["db_name"], tools.config["translate_in"], tools.config["language"])
sys.exit(0)
tools.trans_load(tools.config["db_name"], tools.config["translate_in"], tools.config["language"])
sys.exit(0)
#----------------------------------------------------------------------------------
# if we don't want the server to continue to run after initialization, we quit here
@ -209,69 +209,69 @@ if tools.config["stop_after_init"]:
#----------------------------------------------------------
if tools.config['xmlrpc']:
try:
port = int(tools.config["port"])
except Exception:
logger.notifyChannel("init", netsvc.LOG_ERROR, "invalid port '%s'!" % (tools.config["port"],))
sys.exit(1)
interface = tools.config["interface"]
secure = tools.config["secure"]
try:
port = int(tools.config["port"])
except Exception:
logger.notifyChannel("init", netsvc.LOG_ERROR, "invalid port '%s'!" % (tools.config["port"],))
sys.exit(1)
interface = tools.config["interface"]
secure = tools.config["secure"]
httpd = netsvc.HttpDaemon(interface,port, secure)
httpd = netsvc.HttpDaemon(interface,port, secure)
if tools.config["xmlrpc"]:
xml_gw = netsvc.xmlrpc.RpcGateway('web-services')
httpd.attach("/xmlrpc", xml_gw )
logger.notifyChannel("web-services", netsvc.LOG_INFO,
"starting XML-RPC" + \
(tools.config['secure'] and ' Secure' or '') + \
" services, port " + str(port))
if tools.config["xmlrpc"]:
xml_gw = netsvc.xmlrpc.RpcGateway('web-services')
httpd.attach("/xmlrpc", xml_gw )
logger.notifyChannel("web-services", netsvc.LOG_INFO,
"starting XML-RPC" + \
(tools.config['secure'] and ' Secure' or '') + \
" services, port " + str(port))
#
#if tools.config["soap"]:
# soap_gw = netsvc.xmlrpc.RpcGateway('web-services')
# httpd.attach("/soap", soap_gw )
# logger.notifyChannel("web-services", netsvc.LOG_INFO, 'starting SOAP services, port '+str(port))
#
#
#if tools.config["soap"]:
# soap_gw = netsvc.xmlrpc.RpcGateway('web-services')
# httpd.attach("/soap", soap_gw )
# logger.notifyChannel("web-services", netsvc.LOG_INFO, 'starting SOAP services, port '+str(port))
#
if tools.config['netrpc']:
try:
netport = int(tools.config["netport"])
except Exception:
logger.notifyChannel("init", netsvc.LOG_ERROR, "invalid port '%s'!" % (tools.config["netport"],))
sys.exit(1)
netinterface = tools.config["netinterface"]
try:
netport = int(tools.config["netport"])
except Exception:
logger.notifyChannel("init", netsvc.LOG_ERROR, "invalid port '%s'!" % (tools.config["netport"],))
sys.exit(1)
netinterface = tools.config["netinterface"]
tinySocket = netsvc.TinySocketServerThread(netinterface, netport, False)
logger.notifyChannel("web-services", netsvc.LOG_INFO, "starting netrpc service, port "+str(netport))
tinySocket = netsvc.TinySocketServerThread(netinterface, netport, False)
logger.notifyChannel("web-services", netsvc.LOG_INFO, "starting netrpc service, port "+str(netport))
def handler(signum, frame):
from tools import config
if tools.config['netrpc']:
tinySocket.stop()
if tools.config['xmlrpc']:
httpd.stop()
netsvc.Agent.quit()
if config['pidfile']:
os.unlink(config['pidfile'])
sys.exit(0)
from tools import config
if tools.config['netrpc']:
tinySocket.stop()
if tools.config['xmlrpc']:
httpd.stop()
netsvc.Agent.quit()
if config['pidfile']:
os.unlink(config['pidfile'])
sys.exit(0)
from tools import config
if config['pidfile']:
fd=open(config['pidfile'], 'w')
pidtext="%d" % (os.getpid())
fd.write(pidtext)
fd.close()
fd=open(config['pidfile'], 'w')
pidtext="%d" % (os.getpid())
fd.write(pidtext)
fd.close()
signal.signal(signal.SIGINT, handler)
signal.signal(signal.SIGTERM, handler)
logger.notifyChannel("web-services", netsvc.LOG_INFO, 'the server is running, waiting for connections...')
if tools.config['netrpc']:
tinySocket.start()
tinySocket.start()
if tools.config['xmlrpc']:
httpd.start()
httpd.start()
#dispatcher.run()
while True:
time.sleep(1)
time.sleep(1)

View File

@ -32,146 +32,146 @@
#-------------------------------------------------------------
unites = {
0: '', 1:'un', 2:'deux', 3:'trois', 4:'quatre', 5:'cinq', 6:'six', 7:'sept', 8:'huit', 9:'neuf',
10:'dix', 11:'onze', 12:'douze', 13:'treize', 14:'quatorze', 15:'quinze', 16:'seize',
21:'vingt et un', 31:'trente et un', 41:'quarante et un', 51:'cinquante et un', 61:'soixante et un',
71:'septante et un', 91:'nonante et un', 80:'quatre-vingts'
0: '', 1:'un', 2:'deux', 3:'trois', 4:'quatre', 5:'cinq', 6:'six', 7:'sept', 8:'huit', 9:'neuf',
10:'dix', 11:'onze', 12:'douze', 13:'treize', 14:'quatorze', 15:'quinze', 16:'seize',
21:'vingt et un', 31:'trente et un', 41:'quarante et un', 51:'cinquante et un', 61:'soixante et un',
71:'septante et un', 91:'nonante et un', 80:'quatre-vingts'
}
dizaine = {
1: 'dix', 2:'vingt', 3:'trente',4:'quarante', 5:'cinquante', 6:'soixante', 7:'septante', 8:'quatre-vingt', 9:'nonante'
1: 'dix', 2:'vingt', 3:'trente',4:'quarante', 5:'cinquante', 6:'soixante', 7:'septante', 8:'quatre-vingt', 9:'nonante'
}
centaine = {
0:'', 1: 'cent', 2:'deux cent', 3:'trois cent',4:'quatre cent', 5:'cinq cent', 6:'six cent', 7:'sept cent', 8:'huit cent', 9:'neuf cent'
0:'', 1: 'cent', 2:'deux cent', 3:'trois cent',4:'quatre cent', 5:'cinq cent', 6:'six cent', 7:'sept cent', 8:'huit cent', 9:'neuf cent'
}
mille = {
0:'', 1:'mille'
0:'', 1:'mille'
}
def _100_to_text_fr(chiffre):
if chiffre in unites:
return unites[chiffre]
else:
if chiffre%10>0:
return dizaine[chiffre / 10]+'-'+unites[chiffre % 10]
else:
return dizaine[chiffre / 10]
if chiffre in unites:
return unites[chiffre]
else:
if chiffre%10>0:
return dizaine[chiffre / 10]+'-'+unites[chiffre % 10]
else:
return dizaine[chiffre / 10]
def _1000_to_text_fr(chiffre):
d = _100_to_text_fr(chiffre % 100)
d2 = chiffre/100
if d2>0 and d:
return centaine[d2]+' '+d
elif d2>1 and not(d):
return centaine[d2]+'s'
else:
return centaine[d2] or d
d = _100_to_text_fr(chiffre % 100)
d2 = chiffre/100
if d2>0 and d:
return centaine[d2]+' '+d
elif d2>1 and not(d):
return centaine[d2]+'s'
else:
return centaine[d2] or d
def _10000_to_text_fr(chiffre):
if chiffre==0:
return 'zero'
part1 = _1000_to_text_fr(chiffre % 1000)
part2 = mille.get(chiffre / 1000, _1000_to_text_fr(chiffre / 1000)+' mille')
if part2 and part1:
part1 = ' '+part1
return part2+part1
if chiffre==0:
return 'zero'
part1 = _1000_to_text_fr(chiffre % 1000)
part2 = mille.get(chiffre / 1000, _1000_to_text_fr(chiffre / 1000)+' mille')
if part2 and part1:
part1 = ' '+part1
return part2+part1
def amount_to_text_fr(number, currency):
units_number = int(number)
units_name = currency
if units_number > 1:
units_name += 's'
units = _10000_to_text_fr(units_number)
units = units_number and '%s %s' % (units, units_name) or ''
units_number = int(number)
units_name = currency
if units_number > 1:
units_name += 's'
units = _10000_to_text_fr(units_number)
units = units_number and '%s %s' % (units, units_name) or ''
cents_number = int(number * 100) % 100
cents_name = (cents_number > 1) and 'cents' or 'cent'
cents = _100_to_text_fr(cents_number)
cents = cents_number and '%s %s' % (cents, cents_name) or ''
cents_number = int(number * 100) % 100
cents_name = (cents_number > 1) and 'cents' or 'cent'
cents = _100_to_text_fr(cents_number)
cents = cents_number and '%s %s' % (cents, cents_name) or ''
if units and cents:
cents = ' '+cents
if units and cents:
cents = ' '+cents
return units + cents
return units + cents
#-------------------------------------------------------------
# Dutch
#-------------------------------------------------------------
units_nl = {
0:'', 1:'een', 2:'twee', 3:'drie', 4:'vier', 5:'vijf', 6:'zes', 7:'zeven', 8:'acht', 9:'negen',
10:'tien', 11:'elf', 12:'twaalf', 13:'dertien', 14:'veertien'
0:'', 1:'een', 2:'twee', 3:'drie', 4:'vier', 5:'vijf', 6:'zes', 7:'zeven', 8:'acht', 9:'negen',
10:'tien', 11:'elf', 12:'twaalf', 13:'dertien', 14:'veertien'
}
tens_nl = {
1: 'tien', 2:'twintig', 3:'dertig',4:'veertig', 5:'vijftig', 6:'zestig', 7:'zeventig', 8:'tachtig', 9:'negentig'
1: 'tien', 2:'twintig', 3:'dertig',4:'veertig', 5:'vijftig', 6:'zestig', 7:'zeventig', 8:'tachtig', 9:'negentig'
}
hundreds_nl = {
0:'', 1: 'honderd',
0:'', 1: 'honderd',
}
thousands_nl = {
0:'', 1:'duizend'
0:'', 1:'duizend'
}
def _100_to_text_nl(number):
if number in units_nl:
return units_nl[number]
else:
if number%10 > 0:
if number>10 and number<20:
return units_nl[number % 10]+tens_nl[number / 10]
else:
units = units_nl[number % 10]
if units[-1] == 'e':
joinword = 'ën'
else:
joinword = 'en'
return units+joinword+tens_nl[number / 10]
else:
return tens_nl[number / 10]
if number in units_nl:
return units_nl[number]
else:
if number%10 > 0:
if number>10 and number<20:
return units_nl[number % 10]+tens_nl[number / 10]
else:
units = units_nl[number % 10]
if units[-1] == 'e':
joinword = 'ën'
else:
joinword = 'en'
return units+joinword+tens_nl[number / 10]
else:
return tens_nl[number / 10]
def _1000_to_text_nl(number):
part1 = _100_to_text_nl(number % 100)
part2 = hundreds_nl.get(number / 100, units_nl[number/100] + hundreds_nl[1])
if part2 and part1:
part1 = ' ' + part1
return part2 + part1
part1 = _100_to_text_nl(number % 100)
part2 = hundreds_nl.get(number / 100, units_nl[number/100] + hundreds_nl[1])
if part2 and part1:
part1 = ' ' + part1
return part2 + part1
def _10000_to_text_nl(number):
if number==0:
return 'nul'
part1 = _1000_to_text_nl(number % 1000)
if thousands_nl.has_key(number / 1000):
part2 = thousands_nl[number / 1000]
else:
if (number / 1000 % 100 > 0) and (number / 1000 > 100):
space = ' '
else:
space = ''
part2 = _1000_to_text_nl(number / 1000) + space + thousands_nl[1]
if part2 and part1:
part1 = ' ' + part1
return part2 + part1
if number==0:
return 'nul'
part1 = _1000_to_text_nl(number % 1000)
if thousands_nl.has_key(number / 1000):
part2 = thousands_nl[number / 1000]
else:
if (number / 1000 % 100 > 0) and (number / 1000 > 100):
space = ' '
else:
space = ''
part2 = _1000_to_text_nl(number / 1000) + space + thousands_nl[1]
if part2 and part1:
part1 = ' ' + part1
return part2 + part1
def amount_to_text_nl(number, currency):
units_number = int(number)
units_name = currency
units = _10000_to_text_nl(units_number)
units = units_number and '%s %s' % (units, units_name) or ''
units_number = int(number)
units_name = currency
units = _10000_to_text_nl(units_number)
units = units_number and '%s %s' % (units, units_name) or ''
cents_number = int(number * 100) % 100
cents_name = 'cent'
cents = _100_to_text_nl(cents_number)
cents = cents_number and '%s %s' % (cents, cents_name) or ''
cents_number = int(number * 100) % 100
cents_name = 'cent'
cents = _100_to_text_nl(cents_number)
cents = cents_number and '%s %s' % (cents, cents_name) or ''
if units and cents:
cents = ' ' + cents
if units and cents:
cents = ' ' + cents
return units + cents
return units + cents
#-------------------------------------------------------------
# Generic functions
@ -182,32 +182,32 @@ _translate_funcs = {'fr' : amount_to_text_fr, 'nl' : amount_to_text_nl}
#TODO: we should use the country AND language (ex: septante VS soixante dix)
#TODO: we should use en by default, but the translation func is yet to be implemented
def amount_to_text(nbr, lang='fr', currency='euro'):
"""
Converts an integer to its textual representation, using the language set in the context if any.
Example:
1654: mille six cent cinquante-quatre.
"""
if nbr > 1000000:
"""
Converts an integer to its textual representation, using the language set in the context if any.
Example:
1654: mille six cent cinquante-quatre.
"""
if nbr > 1000000:
#TODO: use logger
print "WARNING: number too large '%d', can't translate it!" % (nbr,)
return str(nbr)
print "WARNING: number too large '%d', can't translate it!" % (nbr,)
return str(nbr)
if not _translate_funcs.has_key(lang):
if not _translate_funcs.has_key(lang):
#TODO: use logger
print "WARNING: no translation function found for lang: '%s'" % (lang,)
print "WARNING: no translation function found for lang: '%s'" % (lang,)
#TODO: (default should be en) same as above
lang = 'fr'
return _translate_funcs[lang](nbr, currency)
lang = 'fr'
return _translate_funcs[lang](nbr, currency)
if __name__=='__main__':
from sys import argv
from sys import argv
lang = 'nl'
if len(argv) < 2:
for i in range(1,200):
print i, ">>", amount_to_text(i, lang)
for i in range(200,999999,139):
print i, ">>", amount_to_text(i, lang)
else:
print amount_to_text(int(argv[1]), lang)
lang = 'nl'
if len(argv) < 2:
for i in range(1,200):
print i, ">>", amount_to_text(i, lang)
for i in range(200,999999,139):
print i, ">>", amount_to_text(i, lang)
else:
print amount_to_text(int(argv[1]), lang)

View File

@ -31,208 +31,208 @@ import netsvc,logging
class configmanager(object):
def __init__(self, fname=None):
self.options = {
'verbose': False,
'interface': '', # this will bind the server to all interfaces
'port': '8069',
'netinterface': '',
'netport': '8070',
'db_host': False,
'db_port': False,
'db_name': 'terp',
'db_user': False,
'db_password': False,
'db_maxconn': 64,
'reportgz': False,
'netrpc': True,
'xmlrpc': True,
'soap': False,
'translate_in': None,
'translate_out': None,
'language': None,
'pg_path': None,
'admin_passwd': 'admin',
'addons_path': None,
'root_path': None,
'debug_mode': False,
'commit_mode': False,
'pidfile': None,
'logfile': None,
'secure': False,
'smtp_server': 'localhost',
'smtp_user': False,
'smtp_password': False,
'stop_after_init': False, # this will stop the server after initialization
'price_accuracy': 2,
def __init__(self, fname=None):
self.options = {
'verbose': False,
'interface': '', # this will bind the server to all interfaces
'port': '8069',
'netinterface': '',
'netport': '8070',
'db_host': False,
'db_port': False,
'db_name': 'terp',
'db_user': False,
'db_password': False,
'db_maxconn': 64,
'reportgz': False,
'netrpc': True,
'xmlrpc': True,
'soap': False,
'translate_in': None,
'translate_out': None,
'language': None,
'pg_path': None,
'admin_passwd': 'admin',
'addons_path': None,
'root_path': None,
'debug_mode': False,
'commit_mode': False,
'pidfile': None,
'logfile': None,
'secure': False,
'smtp_server': 'localhost',
'smtp_user': False,
'smtp_password': False,
'stop_after_init': False, # this will stop the server after initialization
'price_accuracy': 2,
'assert_exit_level': logging.WARNING, # level above which a failed assert will
}
'assert_exit_level': logging.WARNING, # level above which a failed assert will
}
assert_exit_levels = (netsvc.LOG_CRITICAL, netsvc.LOG_ERROR, netsvc.LOG_WARNING, netsvc.LOG_INFO, netsvc.LOG_DEBUG)
assert_exit_levels = (netsvc.LOG_CRITICAL, netsvc.LOG_ERROR, netsvc.LOG_WARNING, netsvc.LOG_INFO, netsvc.LOG_DEBUG)
parser = optparse.OptionParser(version=tinyerp_version_string)
parser = optparse.OptionParser(version=tinyerp_version_string)
parser.add_option("-c", "--config", dest="config", help="specify alternate config file")
parser.add_option("-s", "--save", action="store_true", dest="save", default=False, help="save configuration to ~/.terp_serverrc")
parser.add_option("-v", "--verbose", action="store_true", dest="verbose", default=False, help="enable debugging")
parser.add_option("--pidfile", dest="pidfile", help="file where the server pid will be stored")
parser.add_option("--logfile", dest="logfile", help="file where the server log will be stored")
parser.add_option("-c", "--config", dest="config", help="specify alternate config file")
parser.add_option("-s", "--save", action="store_true", dest="save", default=False, help="save configuration to ~/.terp_serverrc")
parser.add_option("-v", "--verbose", action="store_true", dest="verbose", default=False, help="enable debugging")
parser.add_option("--pidfile", dest="pidfile", help="file where the server pid will be stored")
parser.add_option("--logfile", dest="logfile", help="file where the server log will be stored")
parser.add_option("-n", "--interface", dest="interface", help="specify the TCP IP address")
parser.add_option("-p", "--port", dest="port", help="specify the TCP port")
parser.add_option("--net_interface", dest="netinterface", help="specify the TCP IP address for netrpc")
parser.add_option("--net_port", dest="netport", help="specify the TCP port for netrpc")
parser.add_option("--no-netrpc", dest="netrpc", action="store_false", default=True, help="disable netrpc")
parser.add_option("--no-xmlrpc", dest="xmlrpc", action="store_false", default=True, help="disable xmlrpc")
parser.add_option("-n", "--interface", dest="interface", help="specify the TCP IP address")
parser.add_option("-p", "--port", dest="port", help="specify the TCP port")
parser.add_option("--net_interface", dest="netinterface", help="specify the TCP IP address for netrpc")
parser.add_option("--net_port", dest="netport", help="specify the TCP port for netrpc")
parser.add_option("--no-netrpc", dest="netrpc", action="store_false", default=True, help="disable netrpc")
parser.add_option("--no-xmlrpc", dest="xmlrpc", action="store_false", default=True, help="disable xmlrpc")
parser.add_option("-i", "--init", dest="init", help="init a module (use \"all\" for all modules)")
parser.add_option("--without-demo", dest="without_demo", help="load demo data for a module (use \"all\" for all modules)", default=False)
parser.add_option("-u", "--update", dest="update", help="update a module (use \"all\" for all modules)")
# stops the server from launching after initialization
parser.add_option("--stop-after-init", action="store_true", dest="stop_after_init", default=False, help="stop the server after it initializes")
parser.add_option('--debug', dest='debug_mode', action='store_true', default=False, help='enable debug mode')
parser.add_option("--assert-exit-level", dest='assert_exit_level', help="specify the level at which a failed assertion will stop the server " + str(assert_exit_levels))
parser.add_option("-S", "--secure", dest="secure", action="store_true", help="launch server over https instead of http", default=False)
parser.add_option('--smtp', dest='smtp_server', default='', help='specify the SMTP server for sending email')
parser.add_option('--smtp-user', dest='smtp_user', default='', help='specify the SMTP username for sending email')
parser.add_option('--smtp-password', dest='smtp_password', default='', help='specify the SMTP password for sending email')
parser.add_option('--price_accuracy', dest='price_accuracy', default='2', help='specify the price accuracy')
parser.add_option("-i", "--init", dest="init", help="init a module (use \"all\" for all modules)")
parser.add_option("--without-demo", dest="without_demo", help="load demo data for a module (use \"all\" for all modules)", default=False)
parser.add_option("-u", "--update", dest="update", help="update a module (use \"all\" for all modules)")
# stops the server from launching after initialization
parser.add_option("--stop-after-init", action="store_true", dest="stop_after_init", default=False, help="stop the server after it initializes")
parser.add_option('--debug', dest='debug_mode', action='store_true', default=False, help='enable debug mode')
parser.add_option("--assert-exit-level", dest='assert_exit_level', help="specify the level at which a failed assertion will stop the server " + str(assert_exit_levels))
parser.add_option("-S", "--secure", dest="secure", action="store_true", help="launch server over https instead of http", default=False)
parser.add_option('--smtp', dest='smtp_server', default='', help='specify the SMTP server for sending email')
parser.add_option('--smtp-user', dest='smtp_user', default='', help='specify the SMTP username for sending email')
parser.add_option('--smtp-password', dest='smtp_password', default='', help='specify the SMTP password for sending email')
parser.add_option('--price_accuracy', dest='price_accuracy', default='2', help='specify the price accuracy')
group = optparse.OptionGroup(parser, "Modules related options")
group.add_option("-g", "--upgrade", action="store_true", dest="upgrade", default=False, help="Upgrade/install/uninstall modules")
group = optparse.OptionGroup(parser, "Modules related options")
group.add_option("-g", "--upgrade", action="store_true", dest="upgrade", default=False, help="Upgrade/install/uninstall modules")
group = optparse.OptionGroup(parser, "Database related options")
group.add_option("-d", "--database", dest="db_name", help="specify the database name")
group.add_option("-r", "--db_user", dest="db_user", help="specify the database user name")
group.add_option("-w", "--db_password", dest="db_password", help="specify the database password")
group.add_option("--pg_path", dest="pg_path", help="specify the pg executable path")
group.add_option("--db_host", dest="db_host", help="specify the database host")
group.add_option("--db_port", dest="db_port", help="specify the database port")
group.add_option("--db_maxconn", dest="db_maxconn", default='64', help="specify the the maximum number of physical connections to posgresql")
group.add_option("-C", "--commit-mode", dest="commit_mode", action="store_true", help="Several commit during one file importation. Use this for big data importation.", default=False)
parser.add_option_group(group)
group = optparse.OptionGroup(parser, "Database related options")
group.add_option("-d", "--database", dest="db_name", help="specify the database name")
group.add_option("-r", "--db_user", dest="db_user", help="specify the database user name")
group.add_option("-w", "--db_password", dest="db_password", help="specify the database password")
group.add_option("--pg_path", dest="pg_path", help="specify the pg executable path")
group.add_option("--db_host", dest="db_host", help="specify the database host")
group.add_option("--db_port", dest="db_port", help="specify the database port")
group.add_option("--db_maxconn", dest="db_maxconn", default='64', help="specify the the maximum number of physical connections to posgresql")
group.add_option("-C", "--commit-mode", dest="commit_mode", action="store_true", help="Several commit during one file importation. Use this for big data importation.", default=False)
parser.add_option_group(group)
group = optparse.OptionGroup(parser, "Internationalisation options",
"Use these options to translate Tiny ERP to another language."
"See i18n section of the user manual. Options '-l' and '-d' are mandatory.")
group = optparse.OptionGroup(parser, "Internationalisation options",
"Use these options to translate Tiny ERP to another language."
"See i18n section of the user manual. Options '-l' and '-d' are mandatory.")
group.add_option('-l', "--language", dest="language", help="specify the language of the translation file. Use it with --i18n-export and --i18n-import")
group.add_option("--i18n-export", dest="translate_out", help="export all sentences to be translated to a CSV or a PO file and exit")
group.add_option("--i18n-import", dest="translate_in", help="import a CSV or a PO file with translations and exit")
group.add_option("--modules", dest="translate_modules", help="specify modules to export. Use in combination with --i18n-export")
group.add_option("--addons-path", dest="addons_path", help="specify an alternative addons path.")
parser.add_option_group(group)
group.add_option('-l', "--language", dest="language", help="specify the language of the translation file. Use it with --i18n-export and --i18n-import")
group.add_option("--i18n-export", dest="translate_out", help="export all sentences to be translated to a CSV or a PO file and exit")
group.add_option("--i18n-import", dest="translate_in", help="import a CSV or a PO file with translations and exit")
group.add_option("--modules", dest="translate_modules", help="specify modules to export. Use in combination with --i18n-export")
group.add_option("--addons-path", dest="addons_path", help="specify an alternative addons path.")
parser.add_option_group(group)
(opt, args) = parser.parse_args()
(opt, args) = parser.parse_args()
assert not ((opt.translate_in or opt.translate_out) and (not opt.language or not opt.db_name)), "the i18n-import and i18n-export options cannot be used without the language (-l) and database (-d) options"
assert not ((opt.translate_in or opt.translate_out) and (not opt.language or not opt.db_name)), "the i18n-import and i18n-export options cannot be used without the language (-l) and database (-d) options"
# place/search the config file on Win32 near the server installation
# (../etc from the server)
# if the server is run by an unprivileged user, he has to specify location of a config file where he has the rights to write,
# else he won't be able to save the configurations, or even to start the server...
if os.name == 'nt':
rcfilepath = os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])), 'tinyerp-server.conf')
else:
rcfilepath = os.path.expanduser('~/.terp_serverrc')
# place/search the config file on Win32 near the server installation
# (../etc from the server)
# if the server is run by an unprivileged user, he has to specify location of a config file where he has the rights to write,
# else he won't be able to save the configurations, or even to start the server...
if os.name == 'nt':
rcfilepath = os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])), 'tinyerp-server.conf')
else:
rcfilepath = os.path.expanduser('~/.terp_serverrc')
self.rcfile = fname or opt.config or os.environ.get('TERP_SERVER') or rcfilepath
self.load()
self.rcfile = fname or opt.config or os.environ.get('TERP_SERVER') or rcfilepath
self.load()
# Verify that we want to log or not, if not the output will go to stdout
if self.options['logfile'] in ('None', 'False'):
self.options['logfile'] = False
# the same for the pidfile
if self.options['pidfile'] in ('None', 'False'):
self.options['pidfile'] = False
# Verify that we want to log or not, if not the output will go to stdout
if self.options['logfile'] in ('None', 'False'):
self.options['logfile'] = False
# the same for the pidfile
if self.options['pidfile'] in ('None', 'False'):
self.options['pidfile'] = False
for arg in ('interface', 'port', 'db_name', 'db_user', 'db_password', 'db_host',
'db_port', 'logfile', 'pidfile', 'secure', 'smtp_server', 'smtp_user', 'smtp_password', 'price_accuracy', 'netinterface', 'netport', 'db_maxconn', 'commit_mode', 'addons_path'):
if getattr(opt, arg):
self.options[arg] = getattr(opt, arg)
for arg in ('interface', 'port', 'db_name', 'db_user', 'db_password', 'db_host',
'db_port', 'logfile', 'pidfile', 'secure', 'smtp_server', 'smtp_user', 'smtp_password', 'price_accuracy', 'netinterface', 'netport', 'db_maxconn', 'commit_mode', 'addons_path'):
if getattr(opt, arg):
self.options[arg] = getattr(opt, arg)
for arg in ('language', 'translate_out', 'translate_in',
'upgrade', 'verbose', 'debug_mode',
'stop_after_init', 'without_demo', 'netrpc', 'xmlrpc'):
self.options[arg] = getattr(opt, arg)
for arg in ('language', 'translate_out', 'translate_in',
'upgrade', 'verbose', 'debug_mode',
'stop_after_init', 'without_demo', 'netrpc', 'xmlrpc'):
self.options[arg] = getattr(opt, arg)
if opt.assert_exit_level:
assert opt.assert_exit_level in assert_exit_levels, 'ERROR: The assert-exit-level must be one of those values: '+str(assert_exit_levels)
self.options['assert_exit_level'] = getattr(logging, opt.assert_exit_level.upper())
if opt.assert_exit_level:
assert opt.assert_exit_level in assert_exit_levels, 'ERROR: The assert-exit-level must be one of those values: '+str(assert_exit_levels)
self.options['assert_exit_level'] = getattr(logging, opt.assert_exit_level.upper())
if not self.options['root_path'] or self.options['root_path']=='None':
self.options['root_path'] = os.path.abspath(os.path.dirname(sys.argv[0]))
if not self.options['addons_path'] or self.options['addons_path']=='None':
self.options['addons_path'] = os.path.join(self.options['root_path'], 'addons')
if not self.options['root_path'] or self.options['root_path']=='None':
self.options['root_path'] = os.path.abspath(os.path.dirname(sys.argv[0]))
if not self.options['addons_path'] or self.options['addons_path']=='None':
self.options['addons_path'] = os.path.join(self.options['root_path'], 'addons')
init = {}
if opt.init:
for i in opt.init.split(','):
init[i] = 1
self.options['init'] = init
self.options["demo"] = not opt.without_demo and self.options['init'] or {}
init = {}
if opt.init:
for i in opt.init.split(','):
init[i] = 1
self.options['init'] = init
self.options["demo"] = not opt.without_demo and self.options['init'] or {}
update = {}
if opt.update:
for i in opt.update.split(','):
update[i] = 1
self.options['update'] = update
update = {}
if opt.update:
for i in opt.update.split(','):
update[i] = 1
self.options['update'] = update
self.options['translate_modules'] = opt.translate_modules and map(lambda m: m.strip(), opt.translate_modules.split(',')) or ['all']
self.options['translate_modules'].sort()
self.options['translate_modules'] = opt.translate_modules and map(lambda m: m.strip(), opt.translate_modules.split(',')) or ['all']
self.options['translate_modules'].sort()
if opt.pg_path:
self.options['pg_path'] = opt.pg_path
if opt.pg_path:
self.options['pg_path'] = opt.pg_path
if self.options.get('language', False):
assert len(self.options['language'])<=5, 'ERROR: The Lang name must take max 5 chars, Eg: -lfr_BE'
if opt.save:
self.save()
if self.options.get('language', False):
assert len(self.options['language'])<=5, 'ERROR: The Lang name must take max 5 chars, Eg: -lfr_BE'
if opt.save:
self.save()
def load(self):
p = ConfigParser.ConfigParser()
try:
p.read([self.rcfile])
for (name,value) in p.items('options'):
if value=='True' or value=='true':
value = True
if value=='False' or value=='false':
value = False
self.options[name] = value
except IOError:
pass
except ConfigParser.NoSectionError:
pass
def load(self):
p = ConfigParser.ConfigParser()
try:
p.read([self.rcfile])
for (name,value) in p.items('options'):
if value=='True' or value=='true':
value = True
if value=='False' or value=='false':
value = False
self.options[name] = value
except IOError:
pass
except ConfigParser.NoSectionError:
pass
def save(self):
p = ConfigParser.ConfigParser()
p.add_section('options')
for o in [opt for opt in self.options.keys() if opt not in ('version','language','translate_out','translate_in','init','update')]:
p.set('options', o, self.options[o])
def save(self):
p = ConfigParser.ConfigParser()
p.add_section('options')
for o in [opt for opt in self.options.keys() if opt not in ('version','language','translate_out','translate_in','init','update')]:
p.set('options', o, self.options[o])
# try to create the directories and write the file
try:
if not os.path.exists(os.path.dirname(self.rcfile)):
os.makedirs(os.path.dirname(self.rcfile))
try:
p.write(file(self.rcfile, 'w'))
except IOError:
sys.stderr.write("ERROR: couldn't write the config file\n")
# try to create the directories and write the file
try:
if not os.path.exists(os.path.dirname(self.rcfile)):
os.makedirs(os.path.dirname(self.rcfile))
try:
p.write(file(self.rcfile, 'w'))
except IOError:
sys.stderr.write("ERROR: couldn't write the config file\n")
except OSError:
# what to do if impossible?
sys.stderr.write("ERROR: couldn't create the config directory\n")
except OSError:
# what to do if impossible?
sys.stderr.write("ERROR: couldn't create the config directory\n")
def get(self, key, default=None):
return self.options.get(key, default)
def get(self, key, default=None):
return self.options.get(key, default)
def __setitem__(self, key, value):
self.options[key] = value
def __setitem__(self, key, value):
self.options[key] = value
def __getitem__(self, key):
return self.options[key]
def __getitem__(self, key):
return self.options[key]
config = configmanager()

File diff suppressed because it is too large Load Diff

View File

@ -28,306 +28,306 @@
###############################################################################
class graph(object):
def __init__(self, nodes, transitions):
self.nodes = nodes
self.links = transitions
trans = {}
for t in transitions:
trans.setdefault(t[0], [])
trans[t[0]].append(t[1])
self.transitions = trans
self.result = {}
self.levels = {}
def __init__(self, nodes, transitions):
self.nodes = nodes
self.links = transitions
trans = {}
for t in transitions:
trans.setdefault(t[0], [])
trans[t[0]].append(t[1])
self.transitions = trans
self.result = {}
self.levels = {}
def get_parent(self,node):
count = 0
for item in self.transitions:
if self.transitions[item].__contains__(node):
count +=1
return count
def get_parent(self,node):
count = 0
for item in self.transitions:
if self.transitions[item].__contains__(node):
count +=1
return count
def init_rank(self):
self.temp = {}
for link in self.links:
self.temp[link] = self.result[link[1]]['y'] - self.result[link[0]]['y']
def init_rank(self):
self.temp = {}
for link in self.links:
self.temp[link] = self.result[link[1]]['y'] - self.result[link[0]]['y']
cnt = 0
list_node = []
list_edge = []
cnt = 0
list_node = []
list_edge = []
while self.tight_tree()<self.result.__len__():
cnt+=1
list_node = []
while self.tight_tree()<self.result.__len__():
cnt+=1
list_node = []
for node in self.nodes:
if node not in self.reachable_nodes:
list_node.append(node)
list_edge = []
for node in self.nodes:
if node not in self.reachable_nodes:
list_node.append(node)
list_edge = []
for link in self.temp:
if link not in self.tree_edges:
list_edge.append(link)
for link in self.temp:
if link not in self.tree_edges:
list_edge.append(link)
slack = 100
slack = 100
for edge in list_edge:
if (self.reachable_nodes.__contains__(edge[0]) and edge[1] not in self.reachable_nodes) or ( self.reachable_nodes.__contains__(edge[1]) and edge[0] not in self.reachable_nodes):
if(slack>self.temp[edge]-1):
slack = self.temp[edge]-1
new_edge = edge
for edge in list_edge:
if (self.reachable_nodes.__contains__(edge[0]) and edge[1] not in self.reachable_nodes) or ( self.reachable_nodes.__contains__(edge[1]) and edge[0] not in self.reachable_nodes):
if(slack>self.temp[edge]-1):
slack = self.temp[edge]-1
new_edge = edge
if new_edge[0] not in self.reachable_nodes:
delta = -(self.temp[new_edge]-1)
else:
delta = self.temp[new_edge]-1
if new_edge[0] not in self.reachable_nodes:
delta = -(self.temp[new_edge]-1)
else:
delta = self.temp[new_edge]-1
for node in self.result:
if node in self.reachable_nodes:
self.result[node]['y'] += delta
for node in self.result:
if node in self.reachable_nodes:
self.result[node]['y'] += delta
for link in self.temp:
self.temp[link] = self.result[link[1]]['y'] - self.result[link[0]]['y']
for link in self.temp:
self.temp[link] = self.result[link[1]]['y'] - self.result[link[0]]['y']
self.init_cutvalues()
self.init_cutvalues()
def tight_tree(self,):
self.reachable_nodes = []
self.tree_edges = []
self.reachable_node(self.start)
return self.reachable_nodes.__len__()
def tight_tree(self,):
self.reachable_nodes = []
self.tree_edges = []
self.reachable_node(self.start)
return self.reachable_nodes.__len__()
def reachable_node(self,node):
if node not in self.reachable_nodes:
self.reachable_nodes.append(node)
for link in self.temp:
if link[0]==node:
# print link[0]
if self.temp[link]==1:
self.tree_edges.append(link)
if link[1] not in self.reachable_nodes:
self.reachable_nodes.append(link[1])
self.reachable_node(link[1])
def reachable_node(self,node):
if node not in self.reachable_nodes:
self.reachable_nodes.append(node)
for link in self.temp:
if link[0]==node:
# print link[0]
if self.temp[link]==1:
self.tree_edges.append(link)
if link[1] not in self.reachable_nodes:
self.reachable_nodes.append(link[1])
self.reachable_node(link[1])
def init_cutvalues(self):
self.cut_edges = {}
self.head_nodes = []
i=0;
for edge in self.tree_edges:
self.head_nodes = []
rest_edges = []
rest_edges += self.tree_edges
rest_edges.__delitem__(i)
self.head_component(self.start,rest_edges)
i+=1
positive = 0
negative = 0
for source_node in self.transitions:
if source_node in self.head_nodes:
for dest_node in self.transitions[source_node]:
if dest_node not in self.head_nodes:
negative+=1
else:
for dest_node in self.transitions[source_node]:
if dest_node in self.head_nodes:
positive+=1
def init_cutvalues(self):
self.cut_edges = {}
self.head_nodes = []
i=0;
for edge in self.tree_edges:
self.head_nodes = []
rest_edges = []
rest_edges += self.tree_edges
rest_edges.__delitem__(i)
self.head_component(self.start,rest_edges)
i+=1
positive = 0
negative = 0
for source_node in self.transitions:
if source_node in self.head_nodes:
for dest_node in self.transitions[source_node]:
if dest_node not in self.head_nodes:
negative+=1
else:
for dest_node in self.transitions[source_node]:
if dest_node in self.head_nodes:
positive+=1
self.cut_edges[edge] = positive - negative
self.cut_edges[edge] = positive - negative
def head_component(self, node, rest_edges):
if node not in self.head_nodes:
self.head_nodes.append(node)
for link in rest_edges:
if link[0]==node:
self.head_component(link[1],rest_edges)
def head_component(self, node, rest_edges):
if node not in self.head_nodes:
self.head_nodes.append(node)
for link in rest_edges:
if link[0]==node:
self.head_component(link[1],rest_edges)
def process_ranking(self, node, level=0):
if node not in self.result:
self.result[node] = {'x': None, 'y':level, 'mark':0}
else:
if level > self.result[node]['y']:
self.result[node]['y'] = level
if self.result[node]['mark']==0:
self.result[node]['mark'] = 1
for t in self.transitions.get(node, []):
self.process_ranking(t, level+1)
def process_ranking(self, node, level=0):
if node not in self.result:
self.result[node] = {'x': None, 'y':level, 'mark':0}
else:
if level > self.result[node]['y']:
self.result[node]['y'] = level
if self.result[node]['mark']==0:
self.result[node]['mark'] = 1
for t in self.transitions.get(node, []):
self.process_ranking(t, level+1)
def preprocess_order(self):
levels = {}
for r in self.result:
l = self.result[r]['y']
levels.setdefault(l,[])
levels[l].append(r)
self.levels = levels
def preprocess_order(self):
levels = {}
for r in self.result:
l = self.result[r]['y']
levels.setdefault(l,[])
levels[l].append(r)
self.levels = levels
def process_order(self, level):
self.levels[level].sort(lambda x,y: cmp(self.result[x]['x'], self.result[y]['x']))
for nodepos in range(len(self.levels[level])):
node = self.levels[level][nodepos]
if nodepos == 0:
left = self.result[node]['x']- 0.5
else:
left = (self.result[node]['x'] + self.result[self.levels[level][nodepos-1]]['x']) / 2.0
def process_order(self, level):
self.levels[level].sort(lambda x,y: cmp(self.result[x]['x'], self.result[y]['x']))
for nodepos in range(len(self.levels[level])):
node = self.levels[level][nodepos]
if nodepos == 0:
left = self.result[node]['x']- 0.5
else:
left = (self.result[node]['x'] + self.result[self.levels[level][nodepos-1]]['x']) / 2.0
if nodepos == (len(self.levels[level])-1):
right = self.result[node]['x'] + 0.5
else:
right = (self.result[node]['x'] + self.result[self.levels[level][nodepos+1]]['x']) / 2.0
if nodepos == (len(self.levels[level])-1):
right = self.result[node]['x'] + 0.5
else:
right = (self.result[node]['x'] + self.result[self.levels[level][nodepos+1]]['x']) / 2.0
if self.transitions.get(node, False):
if len(self.transitions[node])==1:
pos = (left+right)/2.0
step = 0
else:
pos = left
step = (-left+right) / (len(self.transitions[node])-1)
if self.transitions.get(node, False):
if len(self.transitions[node])==1:
pos = (left+right)/2.0
step = 0
else:
pos = left
step = (-left+right) / (len(self.transitions[node])-1)
for n2 in self.transitions[node]:
self.result[n2]['x'] = pos
pos += step
for n2 in self.transitions[node]:
self.result[n2]['x'] = pos
pos += step
def exchange(self,e,f):
self.tree_edges.__delitem__(self.tree_edges.index(e))
self.tree_edges.append(f)
self.init_cutvalues()
def exchange(self,e,f):
self.tree_edges.__delitem__(self.tree_edges.index(e))
self.tree_edges.append(f)
self.init_cutvalues()
def enter_edge(self,edge):
self.head_nodes = []
rest_edges = []
rest_edges += self.tree_edges
rest_edges.__delitem__(rest_edges.index(edge))
self.head_component(self.start,rest_edges)
slack = 100
for source_node in self.transitions:
if source_node in self.head_nodes:
for dest_node in self.transitions[source_node]:
if dest_node not in self.head_nodes:
if(slack>(self.temp[edge]-1)):
slack = self.temp[edge]-1
new_edge = (source_node,dest_node)
return new_edge
def enter_edge(self,edge):
self.head_nodes = []
rest_edges = []
rest_edges += self.tree_edges
rest_edges.__delitem__(rest_edges.index(edge))
self.head_component(self.start,rest_edges)
slack = 100
for source_node in self.transitions:
if source_node in self.head_nodes:
for dest_node in self.transitions[source_node]:
if dest_node not in self.head_nodes:
if(slack>(self.temp[edge]-1)):
slack = self.temp[edge]-1
new_edge = (source_node,dest_node)
return new_edge
def leave_edge(self):
for edge in self.cut_edges:
if self.cut_edges[edge]<0:
return edge
return ()
def leave_edge(self):
for edge in self.cut_edges:
if self.cut_edges[edge]<0:
return edge
return ()
def process(self, starting_node):
pos = (len(starting_node) - 1.0)/2.0
self.start = starting_node[0]
for s in starting_node:
self.process_ranking(s)
self.result[s]['x'] = pos
pos += 1.0
self.init_rank()
#normalize
least_rank=100
def process(self, starting_node):
pos = (len(starting_node) - 1.0)/2.0
self.start = starting_node[0]
for s in starting_node:
self.process_ranking(s)
self.result[s]['x'] = pos
pos += 1.0
self.init_rank()
#normalize
least_rank=100
#normalization
for node in self.result:
if least_rank>self.result[node]['y']:
least_rank = self.result[node]['y']
#normalization
for node in self.result:
if least_rank>self.result[node]['y']:
least_rank = self.result[node]['y']
if(least_rank!=0):
diff = least_rank
for node in self.result:
self.result[node]['y']-=least_rank
if(least_rank!=0):
diff = least_rank
for node in self.result:
self.result[node]['y']-=least_rank
e = self.leave_edge()
#while e:
f = self.enter_edge(e)
self.exchange(e,f)
e = self.leave_edge()
e = self.leave_edge()
#while e:
f = self.enter_edge(e)
self.exchange(e,f)
e = self.leave_edge()
self.preprocess_order()
for n in self.levels:
self.process_order(n)
self.preprocess_order()
for n in self.levels:
self.process_order(n)
def __str__(self):
result = ''
for l in self.levels:
result += 'PosY: ' + str(l) + '\n'
for node in self.levels[l]:
result += '\tPosX: '+ str(self.result[node]['x']) + ' - Node:' + node + "\n"
return result
def __str__(self):
result = ''
for l in self.levels:
result += 'PosY: ' + str(l) + '\n'
for node in self.levels[l]:
result += '\tPosX: '+ str(self.result[node]['x']) + ' - Node:' + node + "\n"
return result
def scale(self, maxx, maxy, plusx2=0, plusy2=0):
plusx = - min(map(lambda x: x['x'],self.result.values()))
plusy = - min(map(lambda x: x['y'],self.result.values()))
def scale(self, maxx, maxy, plusx2=0, plusy2=0):
plusx = - min(map(lambda x: x['x'],self.result.values()))
plusy = - min(map(lambda x: x['y'],self.result.values()))
maxcurrent = 1.0
diff = 1.0
for l in self.levels:
for n in range(1, len(self.levels[l])):
n1 = self.levels[l][n]
n2 = self.levels[l][n-1]
diff = abs(self.result[n2]['x']-self.result[n1]['x'])
if diff<maxcurrent:
maxcurrent=diff
factor = maxx / diff
for r in self.result:
self.result[r]['x'] = (self.result[r]['x']+plusx) * factor + plusx2
self.result[r]['y'] = (self.result[r]['y']+plusy) * maxy + plusy2
maxcurrent = 1.0
diff = 1.0
for l in self.levels:
for n in range(1, len(self.levels[l])):
n1 = self.levels[l][n]
n2 = self.levels[l][n-1]
diff = abs(self.result[n2]['x']-self.result[n1]['x'])
if diff<maxcurrent:
maxcurrent=diff
factor = maxx / diff
for r in self.result:
self.result[r]['x'] = (self.result[r]['x']+plusx) * factor + plusx2
self.result[r]['y'] = (self.result[r]['y']+plusy) * maxy + plusy2
def result_get(self):
return self.result
def result_get(self):
return self.result
if __name__=='__main__':
starting_node = ['profile'] # put here nodes with flow_start=True
nodes = ['project','account','hr','base','product','mrp','test','profile']
transitions = [
('profile','mrp'),
('mrp','project'),
('project','product'),
('mrp','hr'),
('mrp','test'),
('project','account'),
('project','hr'),
('product','base'),
('account','product'),
('account','test'),
('account','base'),
('hr','base'),
('test','base')
]
starting_node = ['profile'] # put here nodes with flow_start=True
nodes = ['project','account','hr','base','product','mrp','test','profile']
transitions = [
('profile','mrp'),
('mrp','project'),
('project','product'),
('mrp','hr'),
('mrp','test'),
('project','account'),
('project','hr'),
('product','base'),
('account','product'),
('account','test'),
('account','base'),
('hr','base'),
('test','base')
]
radius = 20
g = graph(nodes, transitions)
g.process(starting_node)
g.scale(radius*3,radius*3, radius, radius)
radius = 20
g = graph(nodes, transitions)
g.process(starting_node)
g.scale(radius*3,radius*3, radius, radius)
print g
print g
import Image
import ImageDraw
img = Image.new("RGB", (800, 600), "#ffffff")
draw = ImageDraw.Draw(img)
import Image
import ImageDraw
img = Image.new("RGB", (800, 600), "#ffffff")
draw = ImageDraw.Draw(img)
for name,node in g.result.items():
draw.arc( (int(node['x']-radius), int(node['y']-radius),int(node['x']+radius), int(node['y']+radius) ), 0, 360, (128,128,128))
draw.text( (int(node['x']), int(node['y'])), name, (128,128,128))
for name,node in g.result.items():
draw.arc( (int(node['x']-radius), int(node['y']-radius),int(node['x']+radius), int(node['y']+radius) ), 0, 360, (128,128,128))
draw.text( (int(node['x']), int(node['y'])), name, (128,128,128))
for nodefrom in g.transitions:
for nodeto in g.transitions[nodefrom]:
draw.line( (int(g.result[nodefrom]['x']), int(g.result[nodefrom]['y']),int(g.result[nodeto]['x']),int(g.result[nodeto]['y'])),(128,128,128) )
for nodefrom in g.transitions:
for nodeto in g.transitions[nodefrom]:
draw.line( (int(g.result[nodefrom]['x']), int(g.result[nodefrom]['y']),int(g.result[nodeto]['x']),int(g.result[nodeto]['y'])),(128,128,128) )
img.save("graph.png", "PNG")
img.save("graph.png", "PNG")

View File

@ -43,131 +43,131 @@ Le message avec le sujet "%s" n'a pu être archivé dans l'ERP.
class EmailParser(object):
def __init__(self, headers, dispatcher):
self.headers = headers
self.dispatcher = dispatcher
def __init__(self, headers, dispatcher):
self.headers = headers
self.dispatcher = dispatcher
def parse(self, msg):
dispatcher((self.headers, msg))
def parse(self, msg):
dispatcher((self.headers, msg))
class CommandDispatcher(object):
def __init__(self, receiver):
self.receiver = receiver
def __init__(self, receiver):
self.receiver = receiver
def __call__(self, request):
return self.receiver(request)
def __call__(self, request):
return self.receiver(request)
class RPCProxy(object):
def __init__(self, uid, passwd, host='localhost', port=8069, path='object'):
self.rpc = xmlrpclib.ServerProxy('http://%s:%s/%s' % (host, port, path))
self.user_id = uid
self.passwd = passwd
def __init__(self, uid, passwd, host='localhost', port=8069, path='object'):
self.rpc = xmlrpclib.ServerProxy('http://%s:%s/%s' % (host, port, path))
self.user_id = uid
self.passwd = passwd
def __call__(self, request):
return self.rpc.execute(self.user_id, self.passwd, *request)
def __call__(self, request):
return self.rpc.execute(self.user_id, self.passwd, *request)
class ReceiverEmail2Event(object):
email_re = re.compile(r"""
([a-zA-Z][\w\.-]*[a-zA-Z0-9] # username part
@ # mandatory @ sign
[a-zA-Z0-9][\w\.-]* # domain must start with a letter
\.
[a-z]{2,3} # TLD
)
""", re.VERBOSE)
email_re = re.compile(r"""
([a-zA-Z][\w\.-]*[a-zA-Z0-9] # username part
@ # mandatory @ sign
[a-zA-Z0-9][\w\.-]* # domain must start with a letter
\.
[a-z]{2,3} # TLD
)
""", re.VERBOSE)
project_re = re.compile(r"^ *\[?(\d{4}\.?\d{0,3})\]?", re.UNICODE)
project_re = re.compile(r"^ *\[?(\d{4}\.?\d{0,3})\]?", re.UNICODE)
def __init__(self, rpc):
self.rpc = rpc
def __init__(self, rpc):
self.rpc = rpc
def get_addresses(self, headers, msg):
hcontent = ''
for header in [h for h in headers if msg.has_key(h)]:
hcontent += msg[header]
return self.email_re.findall(hcontent)
def get_addresses(self, headers, msg):
hcontent = ''
for header in [h for h in headers if msg.has_key(h)]:
hcontent += msg[header]
return self.email_re.findall(hcontent)
def get_partners(self, headers, msg):
alladdresses = self.get_addresses(headers, msg)
address_ids = self.rpc(('res.partner.address', 'search', [('email', 'in', alladdresses)]))
addresses = self.rpc(('res.partner.address', 'read', address_ids))
return [x['partner_id'][0] for x in addresses]
def get_partners(self, headers, msg):
alladdresses = self.get_addresses(headers, msg)
address_ids = self.rpc(('res.partner.address', 'search', [('email', 'in', alladdresses)]))
addresses = self.rpc(('res.partner.address', 'read', address_ids))
return [x['partner_id'][0] for x in addresses]
def __call__(self, request):
headers, msg = request
partners = self.get_partners(headers, msg)
subject = u''
for string, charset in decode_header(msg['Subject']):
if charset:
subject += string.decode(charset)
else:
subject += unicode(string)
if partners:
self.save_mail(msg, subject, partners)
else:
warning = MIMEText((warn_msg % (subject,)).encode('utf-8'), 'plain', 'utf-8')
warning['Subject'] = 'Message de TinyERP'
warning['From'] = 'erp@steel-sa.com'
warning['To'] = msg['From']
s = smtplib.SMTP()
s.connect()
s.sendmail('erp@steel-sa.com', self.email_re.findall(msg['From']), warning.as_string())
s.close()
def __call__(self, request):
headers, msg = request
partners = self.get_partners(headers, msg)
subject = u''
for string, charset in decode_header(msg['Subject']):
if charset:
subject += string.decode(charset)
else:
subject += unicode(string)
if partners:
self.save_mail(msg, subject, partners)
else:
warning = MIMEText((warn_msg % (subject,)).encode('utf-8'), 'plain', 'utf-8')
warning['Subject'] = 'Message de TinyERP'
warning['From'] = 'erp@steel-sa.com'
warning['To'] = msg['From']
s = smtplib.SMTP()
s.connect()
s.sendmail('erp@steel-sa.com', self.email_re.findall(msg['From']), warning.as_string())
s.close()
if msg.is_multipart():
for message in [m for m in msg.get_payload() if m.get_content_type() == 'message/rfc822']:
self((headers, message.get_payload()[0]))
if msg.is_multipart():
for message in [m for m in msg.get_payload() if m.get_content_type() == 'message/rfc822']:
self((headers, message.get_payload()[0]))
def save_mail(self, msg, subject, partners):
counter, description = 1, u''
if msg.is_multipart():
for part in msg.get_payload():
stockdir = os.path.join('emails', msg['Message-Id'][1:-1])
newdir = os.path.join('/tmp', stockdir)
filename = part.get_filename()
if not filename:
ext = mimetypes.guess_extension(part.get_type())
if not ext:
ext = '.bin'
filename = 'part-%03d%s' % (counter, ext)
def save_mail(self, msg, subject, partners):
counter, description = 1, u''
if msg.is_multipart():
for part in msg.get_payload():
stockdir = os.path.join('emails', msg['Message-Id'][1:-1])
newdir = os.path.join('/tmp', stockdir)
filename = part.get_filename()
if not filename:
ext = mimetypes.guess_extension(part.get_type())
if not ext:
ext = '.bin'
filename = 'part-%03d%s' % (counter, ext)
if part.get_content_maintype() == 'multipart':
continue
elif part.get_content_maintype() == 'text':
if part.get_content_subtype() == 'plain':
description += part.get_payload(decode=1).decode(part.get_charsets()[0])
description += u'\n\nVous trouverez les éventuels fichiers dans le répertoire: %s' % stockdir
continue
else:
description += u'\n\nCe message est en "%s", vous trouverez ce texte dans le répertoire: %s' % (part.get_content_type(), stockdir)
elif part.get_content_type() == 'message/rfc822':
continue
if not os.path.isdir(newdir):
os.mkdir(newdir)
if part.get_content_maintype() == 'multipart':
continue
elif part.get_content_maintype() == 'text':
if part.get_content_subtype() == 'plain':
description += part.get_payload(decode=1).decode(part.get_charsets()[0])
description += u'\n\nVous trouverez les éventuels fichiers dans le répertoire: %s' % stockdir
continue
else:
description += u'\n\nCe message est en "%s", vous trouverez ce texte dans le répertoire: %s' % (part.get_content_type(), stockdir)
elif part.get_content_type() == 'message/rfc822':
continue
if not os.path.isdir(newdir):
os.mkdir(newdir)
counter += 1
fd = file(os.path.join(newdir, filename), 'w')
fd.write(part.get_payload(decode=1))
fd.close()
else:
description = msg.get_payload(decode=1).decode(msg.get_charsets()[0])
counter += 1
fd = file(os.path.join(newdir, filename), 'w')
fd.write(part.get_payload(decode=1))
fd.close()
else:
description = msg.get_payload(decode=1).decode(msg.get_charsets()[0])
project = self.project_re.search(subject)
if project:
project = project.groups()[0]
else:
project = ''
project = self.project_re.search(subject)
if project:
project = project.groups()[0]
else:
project = ''
for partner in partners:
self.rpc(('res.partner.event', 'create', {'name' : subject, 'partner_id' : partner, 'description' : description, 'project' : project}))
for partner in partners:
self.rpc(('res.partner.event', 'create', {'name' : subject, 'partner_id' : partner, 'description' : description, 'project' : project}))
if __name__ == '__main__':
rpc_dispatcher = CommandDispatcher(RPCProxy(4, 'admin'))
dispatcher = CommandDispatcher(ReceiverEmail2Event(rpc_dispatcher))
parser = EmailParser(['To', 'Cc', 'From'], dispatcher)
parser.parse(email.message_from_file(sys.stdin))
rpc_dispatcher = CommandDispatcher(RPCProxy(4, 'admin'))
dispatcher = CommandDispatcher(ReceiverEmail2Event(rpc_dispatcher))
parser = EmailParser(['To', 'Cc', 'From'], dispatcher)
parser.parse(email.message_from_file(sys.stdin))

View File

@ -44,123 +44,123 @@ import release
import socket
if sys.version_info[:2] < (2, 4):
from threadinglocal import local
from threadinglocal import local
else:
from threading import local
from threading import local
# initialize a database with base/base.sql
def init_db(cr):
import addons
f = addons.get_module_resource('base', 'base.sql')
for line in file(f).read().split(';'):
if (len(line)>0) and (not line.isspace()):
cr.execute(line)
cr.commit()
import addons
f = addons.get_module_resource('base', 'base.sql')
for line in file(f).read().split(';'):
if (len(line)>0) and (not line.isspace()):
cr.execute(line)
cr.commit()
for i in addons.get_modules():
terp_file = addons.get_module_resource(i, '__terp__.py')
mod_path = addons.get_module_path(i)
info = False
if os.path.isfile(terp_file) and not os.path.isfile(mod_path+'.zip'):
info = eval(file(terp_file).read())
elif zipfile.is_zipfile(mod_path+'.zip'):
zfile = zipfile.ZipFile(mod_path+'.zip')
i = os.path.splitext(i)[0]
info = eval(zfile.read(os.path.join(i, '__terp__.py')))
if info:
categs = info.get('category', 'Uncategorized').split('/')
p_id = None
while categs:
if p_id is not None:
cr.execute('select id \
from ir_module_category \
where name=%s and parent_id=%d', (categs[0], p_id))
else:
cr.execute('select id \
from ir_module_category \
where name=%s and parent_id is NULL', (categs[0],))
c_id = cr.fetchone()
if not c_id:
cr.execute('select nextval(\'ir_module_category_id_seq\')')
c_id = cr.fetchone()[0]
cr.execute('insert into ir_module_category \
(id, name, parent_id) \
values (%d, %s, %d)', (c_id, categs[0], p_id))
else:
c_id = c_id[0]
p_id = c_id
categs = categs[1:]
for i in addons.get_modules():
terp_file = addons.get_module_resource(i, '__terp__.py')
mod_path = addons.get_module_path(i)
info = False
if os.path.isfile(terp_file) and not os.path.isfile(mod_path+'.zip'):
info = eval(file(terp_file).read())
elif zipfile.is_zipfile(mod_path+'.zip'):
zfile = zipfile.ZipFile(mod_path+'.zip')
i = os.path.splitext(i)[0]
info = eval(zfile.read(os.path.join(i, '__terp__.py')))
if info:
categs = info.get('category', 'Uncategorized').split('/')
p_id = None
while categs:
if p_id is not None:
cr.execute('select id \
from ir_module_category \
where name=%s and parent_id=%d', (categs[0], p_id))
else:
cr.execute('select id \
from ir_module_category \
where name=%s and parent_id is NULL', (categs[0],))
c_id = cr.fetchone()
if not c_id:
cr.execute('select nextval(\'ir_module_category_id_seq\')')
c_id = cr.fetchone()[0]
cr.execute('insert into ir_module_category \
(id, name, parent_id) \
values (%d, %s, %d)', (c_id, categs[0], p_id))
else:
c_id = c_id[0]
p_id = c_id
categs = categs[1:]
active = info.get('active', False)
installable = info.get('installable', True)
if installable:
if active:
state = 'to install'
else:
state = 'uninstalled'
else:
state = 'uninstallable'
cr.execute('select nextval(\'ir_module_module_id_seq\')')
id = cr.fetchone()[0]
cr.execute('insert into ir_module_module \
(id, author, latest_version, website, name, shortdesc, description, \
category_id, state) \
values (%d, %s, %s, %s, %s, %s, %s, %d, %s)', (
id, info.get('author', ''),
release.version.rsplit('.', 1)[0] + '.' + info.get('version', ''),
info.get('website', ''), i, info.get('name', False),
info.get('description', ''), p_id, state))
dependencies = info.get('depends', [])
for d in dependencies:
cr.execute('insert into ir_module_module_dependency \
(module_id,name) values (%s, %s)', (id, d))
cr.commit()
active = info.get('active', False)
installable = info.get('installable', True)
if installable:
if active:
state = 'to install'
else:
state = 'uninstalled'
else:
state = 'uninstallable'
cr.execute('select nextval(\'ir_module_module_id_seq\')')
id = cr.fetchone()[0]
cr.execute('insert into ir_module_module \
(id, author, latest_version, website, name, shortdesc, description, \
category_id, state) \
values (%d, %s, %s, %s, %s, %s, %s, %d, %s)', (
id, info.get('author', ''),
release.version.rsplit('.', 1)[0] + '.' + info.get('version', ''),
info.get('website', ''), i, info.get('name', False),
info.get('description', ''), p_id, state))
dependencies = info.get('depends', [])
for d in dependencies:
cr.execute('insert into ir_module_module_dependency \
(module_id,name) values (%s, %s)', (id, d))
cr.commit()
def find_in_path(name):
if os.name == "nt":
sep = ';'
else:
sep = ':'
path = [dir for dir in os.environ['PATH'].split(sep)
if os.path.isdir(dir)]
for dir in path:
val = os.path.join(dir, name)
if os.path.isfile(val) or os.path.islink(val):
return val
return None
if os.name == "nt":
sep = ';'
else:
sep = ':'
path = [dir for dir in os.environ['PATH'].split(sep)
if os.path.isdir(dir)]
for dir in path:
val = os.path.join(dir, name)
if os.path.isfile(val) or os.path.islink(val):
return val
return None
def find_pg_tool(name):
if config['pg_path'] and config['pg_path'] != 'None':
return os.path.join(config['pg_path'], name)
else:
return find_in_path(name)
if config['pg_path'] and config['pg_path'] != 'None':
return os.path.join(config['pg_path'], name)
else:
return find_in_path(name)
def exec_pg_command(name, *args):
prog = find_pg_tool(name)
if not prog:
raise Exception('Couldn\'t find %s' % name)
args2 = (os.path.basename(prog),) + args
return os.spawnv(os.P_WAIT, prog, args2)
prog = find_pg_tool(name)
if not prog:
raise Exception('Couldn\'t find %s' % name)
args2 = (os.path.basename(prog),) + args
return os.spawnv(os.P_WAIT, prog, args2)
def exec_pg_command_pipe(name, *args):
prog = find_pg_tool(name)
if not prog:
raise Exception('Couldn\'t find %s' % name)
if os.name == "nt":
cmd = '"' + prog + '" ' + ' '.join(args)
else:
cmd = prog + ' ' + ' '.join(args)
return os.popen2(cmd, 'b')
prog = find_pg_tool(name)
if not prog:
raise Exception('Couldn\'t find %s' % name)
if os.name == "nt":
cmd = '"' + prog + '" ' + ' '.join(args)
else:
cmd = prog + ' ' + ' '.join(args)
return os.popen2(cmd, 'b')
def exec_command_pipe(name, *args):
prog = find_in_path(name)
if not prog:
raise Exception('Couldn\'t find %s' % name)
if os.name == "nt":
cmd = '"'+prog+'" '+' '.join(args)
else:
cmd = prog+' '+' '.join(args)
return os.popen2(cmd, 'b')
prog = find_in_path(name)
if not prog:
raise Exception('Couldn\'t find %s' % name)
if os.name == "nt":
cmd = '"'+prog+'" '+' '.join(args)
else:
cmd = prog+' '+' '.join(args)
return os.popen2(cmd, 'b')
#----------------------------------------------------------
# File paths
@ -169,89 +169,89 @@ def exec_command_pipe(name, *args):
#file_path_addons = os.path.join(file_path_root, 'addons')
def file_open(name, mode="r", subdir='addons', pathinfo=False):
"""Open a file from the Tiny ERP root, using a subdir folder.
"""Open a file from the Tiny ERP root, using a subdir folder.
>>> file_open('hr/report/timesheer.xsl')
>>> file_open('addons/hr/report/timesheet.xsl')
>>> file_open('../../base/report/rml_template.xsl', subdir='addons/hr/report', pathinfo=True)
>>> file_open('hr/report/timesheer.xsl')
>>> file_open('addons/hr/report/timesheet.xsl')
>>> file_open('../../base/report/rml_template.xsl', subdir='addons/hr/report', pathinfo=True)
@param name: name of the file
@param mode: file open mode
@param subdir: subdirectory
@param pathinfo: if True returns tupple (fileobject, filepath)
@param name: name of the file
@param mode: file open mode
@param subdir: subdirectory
@param pathinfo: if True returns tupple (fileobject, filepath)
@return: fileobject if pathinfo is False else (fileobject, filepath)
"""
@return: fileobject if pathinfo is False else (fileobject, filepath)
"""
adp = os.path.normcase(os.path.abspath(config['addons_path']))
rtp = os.path.normcase(os.path.abspath(config['root_path']))
adp = os.path.normcase(os.path.abspath(config['addons_path']))
rtp = os.path.normcase(os.path.abspath(config['root_path']))
if name.replace(os.path.sep, '/').startswith('addons/'):
subdir = 'addons'
name = name[7:]
if name.replace(os.path.sep, '/').startswith('addons/'):
subdir = 'addons'
name = name[7:]
# First try to locate in addons_path
if subdir:
subdir2 = subdir
if subdir2.replace(os.path.sep, '/').startswith('addons/'):
subdir2 = subdir2[7:]
# First try to locate in addons_path
if subdir:
subdir2 = subdir
if subdir2.replace(os.path.sep, '/').startswith('addons/'):
subdir2 = subdir2[7:]
subdir2 = (subdir2 != 'addons' or None) and subdir2
subdir2 = (subdir2 != 'addons' or None) and subdir2
try:
if subdir2:
fn = os.path.join(adp, subdir2, name)
else:
fn = os.path.join(adp, name)
fn = os.path.normpath(fn)
fo = file_open(fn, mode=mode, subdir=None, pathinfo=pathinfo)
if pathinfo:
return fo, fn
return fo
except IOError, e:
pass
try:
if subdir2:
fn = os.path.join(adp, subdir2, name)
else:
fn = os.path.join(adp, name)
fn = os.path.normpath(fn)
fo = file_open(fn, mode=mode, subdir=None, pathinfo=pathinfo)
if pathinfo:
return fo, fn
return fo
except IOError, e:
pass
if subdir:
name = os.path.join(rtp, subdir, name)
else:
name = os.path.join(rtp, name)
if subdir:
name = os.path.join(rtp, subdir, name)
else:
name = os.path.join(rtp, name)
name = os.path.normpath(name)
name = os.path.normpath(name)
# Check for a zipfile in the path
head = name
zipname = False
name2 = False
while True:
head, tail = os.path.split(head)
if not tail:
break
if zipname:
zipname = os.path.join(tail, zipname)
else:
zipname = tail
if zipfile.is_zipfile(head+'.zip'):
import StringIO
zfile = zipfile.ZipFile(head+'.zip')
try:
fo = StringIO.StringIO(zfile.read(os.path.join(
os.path.basename(head), zipname).replace(
os.sep, '/')))
# Check for a zipfile in the path
head = name
zipname = False
name2 = False
while True:
head, tail = os.path.split(head)
if not tail:
break
if zipname:
zipname = os.path.join(tail, zipname)
else:
zipname = tail
if zipfile.is_zipfile(head+'.zip'):
import StringIO
zfile = zipfile.ZipFile(head+'.zip')
try:
fo = StringIO.StringIO(zfile.read(os.path.join(
os.path.basename(head), zipname).replace(
os.sep, '/')))
if pathinfo:
return fo, name
return fo
except:
name2 = os.path.normpath(os.path.join(head + '.zip', zipname))
pass
for i in (name2, name):
if i and os.path.isfile(i):
fo = file(i, mode)
if pathinfo:
return fo, i
return fo
if pathinfo:
return fo, name
return fo
except:
name2 = os.path.normpath(os.path.join(head + '.zip', zipname))
pass
for i in (name2, name):
if i and os.path.isfile(i):
fo = file(i, mode)
if pathinfo:
return fo, i
return fo
raise IOError, 'File not found : '+str(name)
raise IOError, 'File not found : '+str(name)
#----------------------------------------------------------
@ -294,244 +294,244 @@ def flatten(list):
# Emails
#----------------------------------------------------------
def email_send(email_from, email_to, subject, body, email_cc=None, email_bcc=None, on_error=False, reply_to=False, tinycrm=False):
"""Send an email."""
if not email_cc:
email_cc=[]
if not email_bcc:
email_bcc=[]
import smtplib
from email.MIMEText import MIMEText
from email.MIMEMultipart import MIMEMultipart
from email.Header import Header
from email.Utils import formatdate, COMMASPACE
"""Send an email."""
if not email_cc:
email_cc=[]
if not email_bcc:
email_bcc=[]
import smtplib
from email.MIMEText import MIMEText
from email.MIMEMultipart import MIMEMultipart
from email.Header import Header
from email.Utils import formatdate, COMMASPACE
msg = MIMEText(body or '', _charset='utf-8')
msg['Subject'] = Header(subject.decode('utf8'), 'utf-8')
msg['From'] = email_from
del msg['Reply-To']
if reply_to:
msg['Reply-To'] = msg['From']+', '+reply_to
msg['To'] = COMMASPACE.join(email_to)
if email_cc:
msg['Cc'] = COMMASPACE.join(email_cc)
if email_bcc:
msg['Bcc'] = COMMASPACE.join(email_bcc)
msg['Date'] = formatdate(localtime=True)
if tinycrm:
msg['Message-Id'] = '<'+str(time.time())+'-tinycrm-'+str(tinycrm)+'@'+socket.gethostname()+'>'
try:
s = smtplib.SMTP()
s.connect(config['smtp_server'])
if config['smtp_user'] or config['smtp_password']:
s.login(config['smtp_user'], config['smtp_password'])
s.sendmail(email_from, flatten([email_to, email_cc, email_bcc]), msg.as_string())
s.quit()
except Exception, e:
import logging
logging.getLogger().error(str(e))
return True
msg = MIMEText(body or '', _charset='utf-8')
msg['Subject'] = Header(subject.decode('utf8'), 'utf-8')
msg['From'] = email_from
del msg['Reply-To']
if reply_to:
msg['Reply-To'] = msg['From']+', '+reply_to
msg['To'] = COMMASPACE.join(email_to)
if email_cc:
msg['Cc'] = COMMASPACE.join(email_cc)
if email_bcc:
msg['Bcc'] = COMMASPACE.join(email_bcc)
msg['Date'] = formatdate(localtime=True)
if tinycrm:
msg['Message-Id'] = '<'+str(time.time())+'-tinycrm-'+str(tinycrm)+'@'+socket.gethostname()+'>'
try:
s = smtplib.SMTP()
s.connect(config['smtp_server'])
if config['smtp_user'] or config['smtp_password']:
s.login(config['smtp_user'], config['smtp_password'])
s.sendmail(email_from, flatten([email_to, email_cc, email_bcc]), msg.as_string())
s.quit()
except Exception, e:
import logging
logging.getLogger().error(str(e))
return True
#----------------------------------------------------------
# Emails
#----------------------------------------------------------
def email_send_attach(email_from, email_to, subject, body, email_cc=None, email_bcc=None, on_error=False, reply_to=False, attach=None, tinycrm=False):
"""Send an email."""
if not email_cc:
email_cc=[]
if not email_bcc:
email_bcc=[]
if not attach:
attach=[]
import smtplib
from email.MIMEText import MIMEText
from email.MIMEBase import MIMEBase
from email.MIMEMultipart import MIMEMultipart
from email.Header import Header
from email.Utils import formatdate, COMMASPACE
from email import Encoders
"""Send an email."""
if not email_cc:
email_cc=[]
if not email_bcc:
email_bcc=[]
if not attach:
attach=[]
import smtplib
from email.MIMEText import MIMEText
from email.MIMEBase import MIMEBase
from email.MIMEMultipart import MIMEMultipart
from email.Header import Header
from email.Utils import formatdate, COMMASPACE
from email import Encoders
msg = MIMEMultipart()
msg = MIMEMultipart()
msg['Subject'] = Header(subject.decode('utf8'), 'utf-8')
msg['From'] = email_from
del msg['Reply-To']
if reply_to:
msg['Reply-To'] = reply_to
msg['To'] = COMMASPACE.join(email_to)
if email_cc:
msg['Cc'] = COMMASPACE.join(email_cc)
if email_bcc:
msg['Bcc'] = COMMASPACE.join(email_bcc)
if tinycrm:
msg['Message-Id'] = '<'+str(time.time())+'-tinycrm-'+str(tinycrm)+'@'+socket.gethostname()+'>'
msg['Date'] = formatdate(localtime=True)
msg.attach( MIMEText(body or '', _charset='utf-8') )
for (fname,fcontent) in attach:
part = MIMEBase('application', "octet-stream")
part.set_payload( fcontent )
Encoders.encode_base64(part)
part.add_header('Content-Disposition', 'attachment; filename="%s"' % (fname,))
msg.attach(part)
try:
s = smtplib.SMTP()
s.connect(config['smtp_server'])
if config['smtp_user'] or config['smtp_password']:
s.login(config['smtp_user'], config['smtp_password'])
s.sendmail(email_from, flatten([email_to, email_cc, email_bcc]), msg.as_string())
s.quit()
except Exception, e:
import logging
logging.getLogger().error(str(e))
return True
msg['Subject'] = Header(subject.decode('utf8'), 'utf-8')
msg['From'] = email_from
del msg['Reply-To']
if reply_to:
msg['Reply-To'] = reply_to
msg['To'] = COMMASPACE.join(email_to)
if email_cc:
msg['Cc'] = COMMASPACE.join(email_cc)
if email_bcc:
msg['Bcc'] = COMMASPACE.join(email_bcc)
if tinycrm:
msg['Message-Id'] = '<'+str(time.time())+'-tinycrm-'+str(tinycrm)+'@'+socket.gethostname()+'>'
msg['Date'] = formatdate(localtime=True)
msg.attach( MIMEText(body or '', _charset='utf-8') )
for (fname,fcontent) in attach:
part = MIMEBase('application', "octet-stream")
part.set_payload( fcontent )
Encoders.encode_base64(part)
part.add_header('Content-Disposition', 'attachment; filename="%s"' % (fname,))
msg.attach(part)
try:
s = smtplib.SMTP()
s.connect(config['smtp_server'])
if config['smtp_user'] or config['smtp_password']:
s.login(config['smtp_user'], config['smtp_password'])
s.sendmail(email_from, flatten([email_to, email_cc, email_bcc]), msg.as_string())
s.quit()
except Exception, e:
import logging
logging.getLogger().error(str(e))
return True
#----------------------------------------------------------
# SMS
#----------------------------------------------------------
# text must be latin-1 encoded
def sms_send(user, password, api_id, text, to):
import urllib
params = urllib.urlencode({'user': user, 'password': password, 'api_id': api_id, 'text': text, 'to':to})
#f = urllib.urlopen("http://api.clickatell.com/http/sendmsg", params)
f = urllib.urlopen("http://196.7.150.220/http/sendmsg", params)
print f.read()
return True
import urllib
params = urllib.urlencode({'user': user, 'password': password, 'api_id': api_id, 'text': text, 'to':to})
#f = urllib.urlopen("http://api.clickatell.com/http/sendmsg", params)
f = urllib.urlopen("http://196.7.150.220/http/sendmsg", params)
print f.read()
return True
#---------------------------------------------------------
# Class that stores an updateable string (used in wizards)
#---------------------------------------------------------
class UpdateableStr(local):
def __init__(self, string=''):
self.string = string
def __init__(self, string=''):
self.string = string
def __str__(self):
return str(self.string)
def __str__(self):
return str(self.string)
def __repr__(self):
return str(self.string)
def __repr__(self):
return str(self.string)
def __nonzero__(self):
return bool(self.string)
def __nonzero__(self):
return bool(self.string)
class UpdateableDict(local):
'''Stores an updateable dict to use in wizards'''
'''Stores an updateable dict to use in wizards'''
def __init__(self, dict=None):
if dict is None:
dict = {}
self.dict = dict
def __init__(self, dict=None):
if dict is None:
dict = {}
self.dict = dict
def __str__(self):
return str(self.dict)
def __str__(self):
return str(self.dict)
def __repr__(self):
return str(self.dict)
def __repr__(self):
return str(self.dict)
def clear(self):
return self.dict.clear()
def clear(self):
return self.dict.clear()
def keys(self):
return self.dict.keys()
def keys(self):
return self.dict.keys()
def __setitem__(self, i, y):
self.dict.__setitem__(i, y)
def __setitem__(self, i, y):
self.dict.__setitem__(i, y)
def __getitem__(self, i):
return self.dict.__getitem__(i)
def __getitem__(self, i):
return self.dict.__getitem__(i)
def copy(self):
return self.dict.copy()
def copy(self):
return self.dict.copy()
def iteritems(self):
return self.dict.iteritems()
def iteritems(self):
return self.dict.iteritems()
def iterkeys(self):
return self.dict.iterkeys()
def iterkeys(self):
return self.dict.iterkeys()
def itervalues(self):
return self.dict.itervalues()
def itervalues(self):
return self.dict.itervalues()
def pop(self, k, d=None):
return self.dict.pop(k, d)
def pop(self, k, d=None):
return self.dict.pop(k, d)
def popitem(self):
return self.dict.popitem()
def popitem(self):
return self.dict.popitem()
def setdefault(self, k, d=None):
return self.dict.setdefault(k, d)
def setdefault(self, k, d=None):
return self.dict.setdefault(k, d)
def update(self, E, **F):
return self.dict.update(E, F)
def update(self, E, **F):
return self.dict.update(E, F)
def values(self):
return self.dict.values()
def values(self):
return self.dict.values()
def get(self, k, d=None):
return self.dict.get(k, d)
def get(self, k, d=None):
return self.dict.get(k, d)
def has_key(self, k):
return self.dict.has_key(k)
def has_key(self, k):
return self.dict.has_key(k)
def items(self):
return self.dict.items()
def items(self):
return self.dict.items()
def __cmp__(self, y):
return self.dict.__cmp__(y)
def __cmp__(self, y):
return self.dict.__cmp__(y)
def __contains__(self, k):
return self.dict.__contains__(k)
def __contains__(self, k):
return self.dict.__contains__(k)
def __delitem__(self, y):
return self.dict.__delitem__(y)
def __delitem__(self, y):
return self.dict.__delitem__(y)
def __eq__(self, y):
return self.dict.__eq__(y)
def __eq__(self, y):
return self.dict.__eq__(y)
def __ge__(self, y):
return self.dict.__ge__(y)
def __ge__(self, y):
return self.dict.__ge__(y)
def __getitem__(self, y):
return self.dict.__getitem__(y)
def __getitem__(self, y):
return self.dict.__getitem__(y)
def __gt__(self, y):
return self.dict.__gt__(y)
def __gt__(self, y):
return self.dict.__gt__(y)
def __hash__(self):
return self.dict.__hash__()
def __hash__(self):
return self.dict.__hash__()
def __iter__(self):
return self.dict.__iter__()
def __iter__(self):
return self.dict.__iter__()
def __le__(self, y):
return self.dict.__le__(y)
def __le__(self, y):
return self.dict.__le__(y)
def __len__(self):
return self.dict.__len__()
def __len__(self):
return self.dict.__len__()
def __lt__(self, y):
return self.dict.__lt__(y)
def __lt__(self, y):
return self.dict.__lt__(y)
def __ne__(self, y):
return self.dict.__ne__(y)
def __ne__(self, y):
return self.dict.__ne__(y)
# Don't use ! Use res.currency.round()
class currency(float):
def __init__(self, value, accuracy=2, rounding=None):
if rounding is None:
rounding=10**-accuracy
self.rounding=rounding
self.accuracy=accuracy
def __init__(self, value, accuracy=2, rounding=None):
if rounding is None:
rounding=10**-accuracy
self.rounding=rounding
self.accuracy=accuracy
def __new__(cls, value, accuracy=2, rounding=None):
return float.__new__(cls, round(value, accuracy))
def __new__(cls, value, accuracy=2, rounding=None):
return float.__new__(cls, round(value, accuracy))
#def __str__(self):
# display_value = int(self*(10**(-self.accuracy))/self.rounding)*self.rounding/(10**(-self.accuracy))
# return str(display_value)
#def __str__(self):
# display_value = int(self*(10**(-self.accuracy))/self.rounding)*self.rounding/(10**(-self.accuracy))
# return str(display_value)
#
@ -539,99 +539,99 @@ class currency(float):
# Timeout: 0 = no timeout, otherwise in seconds
#
class cache(object):
def __init__(self, timeout=10000, skiparg=2):
self.timeout = timeout
self.cache = {}
def __init__(self, timeout=10000, skiparg=2):
self.timeout = timeout
self.cache = {}
def __call__(self, fn):
arg_names = inspect.getargspec(fn)[0][2:]
def cached_result(self2, cr=None, *args, **kwargs):
if cr is None:
self.cache = {}
return True
def __call__(self, fn):
arg_names = inspect.getargspec(fn)[0][2:]
def cached_result(self2, cr=None, *args, **kwargs):
if cr is None:
self.cache = {}
return True
# Update named arguments with positional argument values
kwargs.update(dict(zip(arg_names, args)))
kwargs = kwargs.items()
kwargs.sort()
# Update named arguments with positional argument values
kwargs.update(dict(zip(arg_names, args)))
kwargs = kwargs.items()
kwargs.sort()
# Work out key as a tuple of ('argname', value) pairs
key = (('dbname', cr.dbname),) + tuple(kwargs)
# Work out key as a tuple of ('argname', value) pairs
key = (('dbname', cr.dbname),) + tuple(kwargs)
# Check cache and return cached value if possible
if key in self.cache:
(value, last_time) = self.cache[key]
mintime = time.time() - self.timeout
if self.timeout <= 0 or mintime <= last_time:
return value
# Check cache and return cached value if possible
if key in self.cache:
(value, last_time) = self.cache[key]
mintime = time.time() - self.timeout
if self.timeout <= 0 or mintime <= last_time:
return value
# Work out new value, cache it and return it
# Should copy() this value to avoid futur modf of the cacle ?
result = fn(self2,cr,**dict(kwargs))
# Work out new value, cache it and return it
# Should copy() this value to avoid futur modf of the cacle ?
result = fn(self2,cr,**dict(kwargs))
self.cache[key] = (result, time.time())
return result
return cached_result
self.cache[key] = (result, time.time())
return result
return cached_result
def to_xml(s):
return s.replace('&','&amp;').replace('<','&lt;').replace('>','&gt;')
return s.replace('&','&amp;').replace('<','&lt;').replace('>','&gt;')
def get_languages():
languages={
'zh_CN': 'Chinese (CN)',
'zh_TW': 'Chinese (TW)',
'cs_CZ': 'Czech',
'de_DE': 'Deutsch',
'es_AR': 'Español (Argentina)',
'es_ES': 'Español (España)',
'fr_FR': 'Français',
'fr_CH': 'Français (Suisse)',
'en_EN': 'English (default)',
'hu_HU': 'Hungarian',
'it_IT': 'Italiano',
'pt_BR': 'Portugese (Brasil)',
'pt_PT': 'Portugese (Portugal)',
'nl_NL': 'Nederlands',
'ro_RO': 'Romanian',
'ru_RU': 'Russian',
'sv_SE': 'Swedish',
}
return languages
languages={
'zh_CN': 'Chinese (CN)',
'zh_TW': 'Chinese (TW)',
'cs_CZ': 'Czech',
'de_DE': 'Deutsch',
'es_AR': 'Español (Argentina)',
'es_ES': 'Español (España)',
'fr_FR': 'Français',
'fr_CH': 'Français (Suisse)',
'en_EN': 'English (default)',
'hu_HU': 'Hungarian',
'it_IT': 'Italiano',
'pt_BR': 'Portugese (Brasil)',
'pt_PT': 'Portugese (Portugal)',
'nl_NL': 'Nederlands',
'ro_RO': 'Romanian',
'ru_RU': 'Russian',
'sv_SE': 'Swedish',
}
return languages
def scan_languages():
import glob
file_list = [os.path.splitext(os.path.basename(f))[0] for f in glob.glob(os.path.join(config['root_path'], 'i18n', '*.csv'))]
lang_dict = get_languages()
return [(lang, lang_dict.get(lang, lang)) for lang in file_list]
import glob
file_list = [os.path.splitext(os.path.basename(f))[0] for f in glob.glob(os.path.join(config['root_path'], 'i18n', '*.csv'))]
lang_dict = get_languages()
return [(lang, lang_dict.get(lang, lang)) for lang in file_list]
def get_user_companies(cr, user):
def _get_company_children(cr, ids):
if not ids:
return []
cr.execute('SELECT id FROM res_company WHERE parent_id = any(array[%s])' %(','.join([str(x) for x in ids]),))
res=[x[0] for x in cr.fetchall()]
res.extend(_get_company_children(cr, res))
return res
cr.execute('SELECT comp.id FROM res_company AS comp, res_users AS u WHERE u.id = %d AND comp.id = u.company_id' % (user,))
compids=[cr.fetchone()[0]]
compids.extend(_get_company_children(cr, compids))
return compids
def _get_company_children(cr, ids):
if not ids:
return []
cr.execute('SELECT id FROM res_company WHERE parent_id = any(array[%s])' %(','.join([str(x) for x in ids]),))
res=[x[0] for x in cr.fetchall()]
res.extend(_get_company_children(cr, res))
return res
cr.execute('SELECT comp.id FROM res_company AS comp, res_users AS u WHERE u.id = %d AND comp.id = u.company_id' % (user,))
compids=[cr.fetchone()[0]]
compids.extend(_get_company_children(cr, compids))
return compids
def mod10r(number):
"""
Input number : account or invoice number
Output return: the same number completed with the recursive mod10
key
"""
codec=[0,9,4,6,8,2,7,1,3,5]
report = 0
result=""
for digit in number:
result += digit
if digit.isdigit():
report = codec[ (int(digit) + report) % 10 ]
return result + str((10 - report) % 10)
"""
Input number : account or invoice number
Output return: the same number completed with the recursive mod10
key
"""
codec=[0,9,4,6,8,2,7,1,3,5]
report = 0
result=""
for digit in number:
result += digit
if digit.isdigit():
report = codec[ (int(digit) + report) % 10 ]
return result + str((10 - report) % 10)

View File

@ -38,7 +38,7 @@ import inspect
import mx.DateTime as mxdt
class UNIX_LINE_TERMINATOR(csv.excel):
lineterminator = '\n'
lineterminator = '\n'
csv.register_dialect("UNIX", UNIX_LINE_TERMINATOR)
@ -46,15 +46,15 @@ csv.register_dialect("UNIX", UNIX_LINE_TERMINATOR)
# TODO: a caching method
#
def translate(cr, name, source_type, lang, source=None):
if source and name:
cr.execute('select value from ir_translation where lang=%s and type=%s and name=%s and src=%s', (lang, source_type, str(name), source))
elif name:
cr.execute('select value from ir_translation where lang=%s and type=%s and name=%s', (lang, source_type, str(name)))
elif source:
cr.execute('select value from ir_translation where lang=%s and type=%s and src=%s', (lang, source_type, source))
res_trans = cr.fetchone()
res = res_trans and res_trans[0] or False
return res
if source and name:
cr.execute('select value from ir_translation where lang=%s and type=%s and name=%s and src=%s', (lang, source_type, str(name), source))
elif name:
cr.execute('select value from ir_translation where lang=%s and type=%s and name=%s', (lang, source_type, str(name)))
elif source:
cr.execute('select value from ir_translation where lang=%s and type=%s and src=%s', (lang, source_type, source))
res_trans = cr.fetchone()
res = res_trans and res_trans[0] or False
return res
class GettextAlias(object):
def __call__(self, source):
@ -71,469 +71,469 @@ _ = GettextAlias()
# class to handle po files
class TinyPoFile(object):
def __init__(self, buffer):
self.buffer = buffer
def __init__(self, buffer):
self.buffer = buffer
def __iter__(self):
self.buffer.seek(0)
self.lines = self.buffer.readlines()
self.first = True
return self
def __iter__(self):
self.buffer.seek(0)
self.lines = self.buffer.readlines()
self.first = True
return self
def next(self):
def unquote(str):
return str[1:-1].replace("\\n", "\n") \
.replace('\\"', "\"")
def next(self):
def unquote(str):
return str[1:-1].replace("\\n", "\n") \
.replace('\\"', "\"")
type = name = res_id = source = trad = None
type = name = res_id = source = trad = None
line = None
while not line:
if 0 == len(self.lines):
raise StopIteration()
line = self.lines.pop(0).strip()
line = None
while not line:
if 0 == len(self.lines):
raise StopIteration()
line = self.lines.pop(0).strip()
while line.startswith('#'):
if line.startswith('#:'):
type, name, res_id = line[2:].strip().split(':')
line = self.lines.pop(0).strip()
if not line.startswith('msgid'):
raise Exception("malformed file")
source = line[7:-1]
line = self.lines.pop(0).strip()
if not source and self.first:
# if the source is "" and it's the first msgid, it's the special
# msgstr with the informations about the traduction and the
# traductor; we skip it
while line:
line = self.lines.pop(0).strip()
return next()
while line.startswith('#'):
if line.startswith('#:'):
type, name, res_id = line[2:].strip().split(':')
line = self.lines.pop(0).strip()
if not line.startswith('msgid'):
raise Exception("malformed file")
source = line[7:-1]
line = self.lines.pop(0).strip()
if not source and self.first:
# if the source is "" and it's the first msgid, it's the special
# msgstr with the informations about the traduction and the
# traductor; we skip it
while line:
line = self.lines.pop(0).strip()
return next()
while not line.startswith('msgstr'):
if not line:
raise Exception('malformed file')
source += unquote(line)
line = self.lines.pop(0).strip()
while not line.startswith('msgstr'):
if not line:
raise Exception('malformed file')
source += unquote(line)
line = self.lines.pop(0).strip()
trad = line[8:-1]
line = self.lines.pop(0).strip()
while line:
trad += unquote(line)
line = self.lines.pop(0).strip()
trad = line[8:-1]
line = self.lines.pop(0).strip()
while line:
trad += unquote(line)
line = self.lines.pop(0).strip()
self.first = False
return type, name, res_id, source, trad
self.first = False
return type, name, res_id, source, trad
def write_infos(self, modules):
import release
self.buffer.write("# Translation of %(project)s.\n" \
"# This file containt the translation of the following modules:\n" \
"%(modules)s" \
"#\n" \
"msgid \"\"\n" \
"msgstr \"\"\n" \
"\"Project-Id-Version: %(project)s %(version)s\"\n" \
"\"Report-Msgid-Bugs-To: %(bugmail)s\"\n" \
"\"POT-Creation-Date: %(now)s\"\n" \
"\"PO-Revision-Date: %(now)s\"\n" \
"\"Last-Translator: <>\"\n" \
"\"Language-Team: \"\n" \
"\"MIME-Version: 1.0\"\n" \
"\"Content-Type: text/plain; charset=UTF-8\"\n" \
"\"Content-Transfer-Encoding: \"\n" \
"\"Plural-Forms: \"\n" \
"\n"
def write_infos(self, modules):
import release
self.buffer.write("# Translation of %(project)s.\n" \
"# This file containt the translation of the following modules:\n" \
"%(modules)s" \
"#\n" \
"msgid \"\"\n" \
"msgstr \"\"\n" \
"\"Project-Id-Version: %(project)s %(version)s\"\n" \
"\"Report-Msgid-Bugs-To: %(bugmail)s\"\n" \
"\"POT-Creation-Date: %(now)s\"\n" \
"\"PO-Revision-Date: %(now)s\"\n" \
"\"Last-Translator: <>\"\n" \
"\"Language-Team: \"\n" \
"\"MIME-Version: 1.0\"\n" \
"\"Content-Type: text/plain; charset=UTF-8\"\n" \
"\"Content-Transfer-Encoding: \"\n" \
"\"Plural-Forms: \"\n" \
"\n"
% { 'project': release.description,
'version': release.version,
'modules': reduce(lambda s, m: s + "#\t* %s\n" % m, modules, ""),
'bugmail': 'support@tinyerp.com', #TODO: use variable from release
'now': mxdt.ISO.strUTC(mxdt.ISO.DateTime.utc()),
}
)
% { 'project': release.description,
'version': release.version,
'modules': reduce(lambda s, m: s + "#\t* %s\n" % m, modules, ""),
'bugmail': 'support@tinyerp.com', #TODO: use variable from release
'now': mxdt.ISO.strUTC(mxdt.ISO.DateTime.utc()),
}
)
def write(self, module, type, name, res_id, source, trad):
def quote(str):
return '"%s"' % str.replace('"','\\"') \
.replace('\n', '\\n"\n"')
def write(self, module, type, name, res_id, source, trad):
def quote(str):
return '"%s"' % str.replace('"','\\"') \
.replace('\n', '\\n"\n"')
self.buffer.write("#. module: %s\n" \
"#, python-format\n" \
"#: %s:%s:%s\n" \
"msgid %s\n" \
"msgstr %s\n\n" \
% (module, type, name, str(res_id), quote(source), quote(trad))
)
self.buffer.write("#. module: %s\n" \
"#, python-format\n" \
"#: %s:%s:%s\n" \
"msgid %s\n" \
"msgstr %s\n\n" \
% (module, type, name, str(res_id), quote(source), quote(trad))
)
# Methods to export the translation file
def trans_export(lang, modules, buffer, format, dbname=None):
trans = trans_generate(lang, modules, dbname)
if format == 'csv':
writer=csv.writer(buffer, 'UNIX')
for row in trans:
writer.writerow(row)
elif format == 'po':
trans.pop(0)
writer = tools.TinyPoFile(buffer)
writer.write_infos(modules)
for module, type, name, res_id, src, trad in trans:
writer.write(module, type, name, res_id, src, trad)
else:
raise Exception(_('Bad file format'))
del trans
trans = trans_generate(lang, modules, dbname)
if format == 'csv':
writer=csv.writer(buffer, 'UNIX')
for row in trans:
writer.writerow(row)
elif format == 'po':
trans.pop(0)
writer = tools.TinyPoFile(buffer)
writer.write_infos(modules)
for module, type, name, res_id, src, trad in trans:
writer.write(module, type, name, res_id, src, trad)
else:
raise Exception(_('Bad file format'))
del trans
def trans_parse_xsl(de):
res = []
for n in [i for i in de.childNodes if (i.nodeType == i.ELEMENT_NODE)]:
if n.hasAttribute("t"):
for m in [j for j in n.childNodes if (j.nodeType == j.TEXT_NODE)]:
l = m.data.strip().replace('\n',' ')
if len(l):
res.append(l.encode("utf8"))
res.extend(trans_parse_xsl(n))
return res
res = []
for n in [i for i in de.childNodes if (i.nodeType == i.ELEMENT_NODE)]:
if n.hasAttribute("t"):
for m in [j for j in n.childNodes if (j.nodeType == j.TEXT_NODE)]:
l = m.data.strip().replace('\n',' ')
if len(l):
res.append(l.encode("utf8"))
res.extend(trans_parse_xsl(n))
return res
def trans_parse_rml(de):
res = []
for n in [i for i in de.childNodes if (i.nodeType == i.ELEMENT_NODE)]:
for m in [j for j in n.childNodes if (j.nodeType == j.TEXT_NODE)]:
string_list = [s.replace('\n', ' ').strip() for s in re.split('\[\[.+?\]\]', m.data)]
for s in string_list:
if s:
res.append(s.encode("utf8"))
res.extend(trans_parse_rml(n))
return res
res = []
for n in [i for i in de.childNodes if (i.nodeType == i.ELEMENT_NODE)]:
for m in [j for j in n.childNodes if (j.nodeType == j.TEXT_NODE)]:
string_list = [s.replace('\n', ' ').strip() for s in re.split('\[\[.+?\]\]', m.data)]
for s in string_list:
if s:
res.append(s.encode("utf8"))
res.extend(trans_parse_rml(n))
return res
def trans_parse_view(de):
res = []
if de.hasAttribute("string"):
s = de.getAttribute('string')
if s:
res.append(s.encode("utf8"))
if de.hasAttribute("sum"):
s = de.getAttribute('sum')
if s:
res.append(s.encode("utf8"))
for n in [i for i in de.childNodes if (i.nodeType == i.ELEMENT_NODE)]:
res.extend(trans_parse_view(n))
return res
res = []
if de.hasAttribute("string"):
s = de.getAttribute('string')
if s:
res.append(s.encode("utf8"))
if de.hasAttribute("sum"):
s = de.getAttribute('sum')
if s:
res.append(s.encode("utf8"))
for n in [i for i in de.childNodes if (i.nodeType == i.ELEMENT_NODE)]:
res.extend(trans_parse_view(n))
return res
# tests whether an object is in a list of modules
def in_modules(object_name, modules):
if 'all' in modules:
return True
if 'all' in modules:
return True
module_dict = {
'ir': 'base',
'res': 'base',
'workflow': 'base',
}
module = object_name.split('.')[0]
module = module_dict.get(module, module)
return module in modules
module_dict = {
'ir': 'base',
'res': 'base',
'workflow': 'base',
}
module = object_name.split('.')[0]
module = module_dict.get(module, module)
return module in modules
def trans_generate(lang, modules, dbname=None):
logger = netsvc.Logger()
if not dbname:
dbname=tools.config['db_name']
pool = pooler.get_pool(dbname)
trans_obj = pool.get('ir.translation')
model_data_obj = pool.get('ir.model.data')
cr = pooler.get_db(dbname).cursor()
uid = 1
l = pool.obj_pool.items()
l.sort()
logger = netsvc.Logger()
if not dbname:
dbname=tools.config['db_name']
pool = pooler.get_pool(dbname)
trans_obj = pool.get('ir.translation')
model_data_obj = pool.get('ir.model.data')
cr = pooler.get_db(dbname).cursor()
uid = 1
l = pool.obj_pool.items()
l.sort()
query = 'SELECT name, model, res_id, module' \
' FROM ir_model_data'
if not 'all' in modules:
query += ' WHERE module IN (%s)' % ','.join(['%s']*len(modules))
query += ' ORDER BY module, model, name'
query = 'SELECT name, model, res_id, module' \
' FROM ir_model_data'
if not 'all' in modules:
query += ' WHERE module IN (%s)' % ','.join(['%s']*len(modules))
query += ' ORDER BY module, model, name'
query_param = not 'all' in modules and modules or None
cr.execute(query, query_param)
query_param = not 'all' in modules and modules or None
cr.execute(query, query_param)
#if 'all' in modules:
# cr.execute('select name,model,res_id,module from ir_model_data')
#else:
# cr.execute('select name,model,res_id,module from ir_model_data where module in ('+','.join(['%s']*len(modules))+')', modules)
#if 'all' in modules:
# cr.execute('select name,model,res_id,module from ir_model_data')
#else:
# cr.execute('select name,model,res_id,module from ir_model_data where module in ('+','.join(['%s']*len(modules))+')', modules)
_to_translate = []
def push_translation(module, type, name, id, source):
tuple = (module, type, name, id, source)
if not tuple in _to_translate:
_to_translate.append(tuple)
_to_translate = []
def push_translation(module, type, name, id, source):
tuple = (module, type, name, id, source)
if not tuple in _to_translate:
_to_translate.append(tuple)
for (xml_name,model,res_id,module) in cr.fetchall():
xml_name = module+'.'+xml_name
obj = pool.get(model).browse(cr, uid, res_id)
if model=='ir.ui.view':
d = xml.dom.minidom.parseString(obj.arch)
for t in trans_parse_view(d.documentElement):
push_translation(module, 'view', obj.model, 0, t)
elif model=='ir.actions.wizard':
service_name = 'wizard.'+obj.wiz_name
obj2 = netsvc._service[service_name]
for state_name, state_def in obj2.states.iteritems():
if 'result' in state_def:
result = state_def['result']
if result['type'] != 'form':
continue
name = obj.wiz_name + ',' + state_name
for (xml_name,model,res_id,module) in cr.fetchall():
xml_name = module+'.'+xml_name
obj = pool.get(model).browse(cr, uid, res_id)
if model=='ir.ui.view':
d = xml.dom.minidom.parseString(obj.arch)
for t in trans_parse_view(d.documentElement):
push_translation(module, 'view', obj.model, 0, t)
elif model=='ir.actions.wizard':
service_name = 'wizard.'+obj.wiz_name
obj2 = netsvc._service[service_name]
for state_name, state_def in obj2.states.iteritems():
if 'result' in state_def:
result = state_def['result']
if result['type'] != 'form':
continue
name = obj.wiz_name + ',' + state_name
# export fields
for field_name, field_def in result['fields'].iteritems():
if 'string' in field_def:
source = field_def['string']
res_name = name + ',' + field_name
push_translation(module, 'wizard_field', res_name, 0, source)
# export fields
for field_name, field_def in result['fields'].iteritems():
if 'string' in field_def:
source = field_def['string']
res_name = name + ',' + field_name
push_translation(module, 'wizard_field', res_name, 0, source)
# export arch
arch = result['arch']
if not isinstance(arch, UpdateableStr):
d = xml.dom.minidom.parseString(arch)
for t in trans_parse_view(d.documentElement):
push_translation(module, 'wizard_view', name, 0, t)
# export arch
arch = result['arch']
if not isinstance(arch, UpdateableStr):
d = xml.dom.minidom.parseString(arch)
for t in trans_parse_view(d.documentElement):
push_translation(module, 'wizard_view', name, 0, t)
# export button labels
for but_args in result['state']:
button_name = but_args[0]
button_label = but_args[1]
res_name = name + ',' + button_name
push_translation(module, 'wizard_button', res_name, 0, button_label)
# export button labels
for but_args in result['state']:
button_name = but_args[0]
button_label = but_args[1]
res_name = name + ',' + button_name
push_translation(module, 'wizard_button', res_name, 0, button_label)
elif model=='ir.model.fields':
field_name = obj.name
field_def = pool.get(obj.model)._columns[field_name]
elif model=='ir.model.fields':
field_name = obj.name
field_def = pool.get(obj.model)._columns[field_name]
name = obj.model + "," + field_name
push_translation(module, 'field', name, 0, field_def.string.encode('utf8'))
name = obj.model + "," + field_name
push_translation(module, 'field', name, 0, field_def.string.encode('utf8'))
if field_def.help:
push_translation(module, 'help', name, 0, field_def.help.encode('utf8'))
if field_def.help:
push_translation(module, 'help', name, 0, field_def.help.encode('utf8'))
if field_def.translate:
ids = pool.get(obj.model).search(cr, uid, [])
obj_values = pool.get(obj.model).read(cr, uid, ids, [field_name])
for obj_value in obj_values:
res_id = obj_value['id']
if obj.name in ('ir.model', 'ir.ui.menu'):
res_id = 0
model_data_ids = model_data_obj.search(cr, uid, [
('model', '=', model),
('res_id', '=', res_id),
])
if not model_data_ids:
push_translation(module, 'model', name, 0, obj_value[field_name])
if field_def.translate:
ids = pool.get(obj.model).search(cr, uid, [])
obj_values = pool.get(obj.model).read(cr, uid, ids, [field_name])
for obj_value in obj_values:
res_id = obj_value['id']
if obj.name in ('ir.model', 'ir.ui.menu'):
res_id = 0
model_data_ids = model_data_obj.search(cr, uid, [
('model', '=', model),
('res_id', '=', res_id),
])
if not model_data_ids:
push_translation(module, 'model', name, 0, obj_value[field_name])
if hasattr(field_def, 'selection') and isinstance(field_def.selection, (list, tuple)):
for key, val in field_def.selection:
push_translation(module, 'selection', name, 0, val.encode('utf8'))
if hasattr(field_def, 'selection') and isinstance(field_def.selection, (list, tuple)):
for key, val in field_def.selection:
push_translation(module, 'selection', name, 0, val.encode('utf8'))
elif model=='ir.actions.report.xml':
name = obj.report_name
fname = ""
if obj.report_rml:
fname = obj.report_rml
parse_func = trans_parse_rml
report_type = "rml"
elif obj.report_xsl:
fname = obj.report_xsl
parse_func = trans_parse_xsl
report_type = "xsl"
try:
xmlstr = tools.file_open(fname).read()
d = xml.dom.minidom.parseString(xmlstr)
for t in parse_func(d.documentElement):
push_translation(module, report_type, name, 0, t)
except IOError:
if fname:
logger.notifyChannel("init", netsvc.LOG_WARNING, "couldn't export translation for report %s %s %s" % (name, report_type, fname))
elif model=='ir.actions.report.xml':
name = obj.report_name
fname = ""
if obj.report_rml:
fname = obj.report_rml
parse_func = trans_parse_rml
report_type = "rml"
elif obj.report_xsl:
fname = obj.report_xsl
parse_func = trans_parse_xsl
report_type = "xsl"
try:
xmlstr = tools.file_open(fname).read()
d = xml.dom.minidom.parseString(xmlstr)
for t in parse_func(d.documentElement):
push_translation(module, report_type, name, 0, t)
except IOError:
if fname:
logger.notifyChannel("init", netsvc.LOG_WARNING, "couldn't export translation for report %s %s %s" % (name, report_type, fname))
for constraint in pool.get(model)._constraints:
msg = constraint[1]
push_translation(module, 'constraint', model, 0, msg.encode('utf8'))
for constraint in pool.get(model)._constraints:
msg = constraint[1]
push_translation(module, 'constraint', model, 0, msg.encode('utf8'))
for field_name,field_def in pool.get(model)._columns.items():
if field_def.translate:
name = model + "," + field_name
trad = getattr(obj, field_name) or ''
push_translation(module, 'model', name, xml_name, trad)
for field_name,field_def in pool.get(model)._columns.items():
if field_def.translate:
name = model + "," + field_name
trad = getattr(obj, field_name) or ''
push_translation(module, 'model', name, xml_name, trad)
# parse source code for _() calls
def get_module_from_path(path):
relative_addons_path = tools.config['addons_path'][len(tools.config['root_path'])+1:]
if path.startswith(relative_addons_path):
path = path[len(relative_addons_path)+1:]
return path.split(os.path.sep)[0]
return 'base' # files that are not in a module are considered as being in 'base' module
# parse source code for _() calls
def get_module_from_path(path):
relative_addons_path = tools.config['addons_path'][len(tools.config['root_path'])+1:]
if path.startswith(relative_addons_path):
path = path[len(relative_addons_path)+1:]
return path.split(os.path.sep)[0]
return 'base' # files that are not in a module are considered as being in 'base' module
modobj = pool.get('ir.module.module')
for root, dirs, files in os.walk(tools.config['root_path']):
for fname in fnmatch.filter(files, '*.py'):
fabsolutepath = join(root, fname)
frelativepath = fabsolutepath[len(tools.config['root_path'])+1:]
module = get_module_from_path(frelativepath)
is_mod_installed = modobj.search(cr, uid, [('state', '=', 'installed'), ('name', '=', module)]) <> []
if (('all' in modules) or (module in modules)) and is_mod_installed:
code_string = tools.file_open(fabsolutepath, subdir='').read()
iter = re.finditer(
'[^a-zA-Z0-9_]_\([\s]*["\'](.+?)["\'][\s]*\)',
code_string)
for i in iter:
push_translation(module, 'code', frelativepath, 0, i.group(1).encode('utf8'))
modobj = pool.get('ir.module.module')
for root, dirs, files in os.walk(tools.config['root_path']):
for fname in fnmatch.filter(files, '*.py'):
fabsolutepath = join(root, fname)
frelativepath = fabsolutepath[len(tools.config['root_path'])+1:]
module = get_module_from_path(frelativepath)
is_mod_installed = modobj.search(cr, uid, [('state', '=', 'installed'), ('name', '=', module)]) <> []
if (('all' in modules) or (module in modules)) and is_mod_installed:
code_string = tools.file_open(fabsolutepath, subdir='').read()
iter = re.finditer(
'[^a-zA-Z0-9_]_\([\s]*["\'](.+?)["\'][\s]*\)',
code_string)
for i in iter:
push_translation(module, 'code', frelativepath, 0, i.group(1).encode('utf8'))
out = [["module","type","name","res_id","src","value"]] # header
# translate strings marked as to be translated
for module, type, name, id, source in _to_translate:
trans = trans_obj._get_source(cr, uid, name, type, lang, source)
out.append([module, type, name, id, source, trans or ''])
out = [["module","type","name","res_id","src","value"]] # header
# translate strings marked as to be translated
for module, type, name, id, source in _to_translate:
trans = trans_obj._get_source(cr, uid, name, type, lang, source)
out.append([module, type, name, id, source, trans or ''])
cr.close()
return out
cr.close()
return out
def trans_load(db_name, filename, lang, strict=False):
logger = netsvc.Logger()
try:
fileobj = open(filename,'r')
fileformat = os.path.splitext(filename)[-1][1:].lower()
r = trans_load_data(db_name, fileobj, fileformat, lang, strict=False)
fileobj.close()
return r
except IOError:
logger.notifyChannel("init", netsvc.LOG_ERROR, "couldn't read file")
return None
logger = netsvc.Logger()
try:
fileobj = open(filename,'r')
fileformat = os.path.splitext(filename)[-1][1:].lower()
r = trans_load_data(db_name, fileobj, fileformat, lang, strict=False)
fileobj.close()
return r
except IOError:
logger.notifyChannel("init", netsvc.LOG_ERROR, "couldn't read file")
return None
def trans_load_data(db_name, fileobj, fileformat, lang, strict=False, lang_name=None):
logger = netsvc.Logger()
logger.notifyChannel("init", netsvc.LOG_INFO,
'loading translation file for language %s' % (lang))
pool = pooler.get_pool(db_name)
lang_obj = pool.get('res.lang')
trans_obj = pool.get('ir.translation')
model_data_obj = pool.get('ir.model.data')
try:
uid = 1
cr = pooler.get_db(db_name).cursor()
logger = netsvc.Logger()
logger.notifyChannel("init", netsvc.LOG_INFO,
'loading translation file for language %s' % (lang))
pool = pooler.get_pool(db_name)
lang_obj = pool.get('res.lang')
trans_obj = pool.get('ir.translation')
model_data_obj = pool.get('ir.model.data')
try:
uid = 1
cr = pooler.get_db(db_name).cursor()
ids = lang_obj.search(cr, uid, [('code','=',lang)])
if not ids:
if not lang_name:
lang_name=lang
languages=tools.get_languages()
if lang in languages:
lang_name=languages[lang]
ids = lang_obj.create(cr, uid, {
'code': lang,
'name': lang_name,
'translatable': 1,
})
else:
lang_obj.write(cr, uid, ids, {'translatable':1})
lang_ids = lang_obj.search(cr, uid, [])
langs = lang_obj.read(cr, uid, lang_ids)
ls = map(lambda x: (x['code'],x['name']), langs)
ids = lang_obj.search(cr, uid, [('code','=',lang)])
if not ids:
if not lang_name:
lang_name=lang
languages=tools.get_languages()
if lang in languages:
lang_name=languages[lang]
ids = lang_obj.create(cr, uid, {
'code': lang,
'name': lang_name,
'translatable': 1,
})
else:
lang_obj.write(cr, uid, ids, {'translatable':1})
lang_ids = lang_obj.search(cr, uid, [])
langs = lang_obj.read(cr, uid, lang_ids)
ls = map(lambda x: (x['code'],x['name']), langs)
fileobj.seek(0)
fileobj.seek(0)
if fileformat == 'csv':
reader = csv.reader(fileobj, quotechar='"', delimiter=',')
# read the first line of the file (it contains columns titles)
for row in reader:
f = row
break
elif fileformat == 'po':
reader = TinyPoFile(fileobj)
f = ['type', 'name', 'res_id', 'src', 'value']
else:
raise Exception(_('Bad file format'))
if fileformat == 'csv':
reader = csv.reader(fileobj, quotechar='"', delimiter=',')
# read the first line of the file (it contains columns titles)
for row in reader:
f = row
break
elif fileformat == 'po':
reader = TinyPoFile(fileobj)
f = ['type', 'name', 'res_id', 'src', 'value']
else:
raise Exception(_('Bad file format'))
# read the rest of the file
line = 1
for row in reader:
line += 1
#try:
# skip empty rows and rows where the translation field (=last fiefd) is empty
if (not row) or (not row[-1]):
#print "translate: skip %s" % repr(row)
continue
# read the rest of the file
line = 1
for row in reader:
line += 1
#try:
# skip empty rows and rows where the translation field (=last fiefd) is empty
if (not row) or (not row[-1]):
#print "translate: skip %s" % repr(row)
continue
# dictionary which holds values for this line of the csv file
# {'lang': ..., 'type': ..., 'name': ..., 'res_id': ...,
# 'src': ..., 'value': ...}
dic = {'lang': lang}
for i in range(len(f)):
if f[i] in ('module',):
continue
dic[f[i]] = row[i]
# dictionary which holds values for this line of the csv file
# {'lang': ..., 'type': ..., 'name': ..., 'res_id': ...,
# 'src': ..., 'value': ...}
dic = {'lang': lang}
for i in range(len(f)):
if f[i] in ('module',):
continue
dic[f[i]] = row[i]
try:
dic['res_id'] = int(dic['res_id'])
except:
model_data_ids = model_data_obj.search(cr, uid, [
('model', '=', dic['name'].split(',')[0]),
('module', '=', dic['res_id'].split('.', 1)[0]),
('name', '=', dic['res_id'].split('.', 1)[1]),
])
if model_data_ids:
dic['res_id'] = model_data_obj.browse(cr, uid,
model_data_ids[0]).res_id
else:
dic['res_id'] = False
try:
dic['res_id'] = int(dic['res_id'])
except:
model_data_ids = model_data_obj.search(cr, uid, [
('model', '=', dic['name'].split(',')[0]),
('module', '=', dic['res_id'].split('.', 1)[0]),
('name', '=', dic['res_id'].split('.', 1)[1]),
])
if model_data_ids:
dic['res_id'] = model_data_obj.browse(cr, uid,
model_data_ids[0]).res_id
else:
dic['res_id'] = False
if dic['type'] == 'model' and not strict:
(model, field) = dic['name'].split(',')
if dic['type'] == 'model' and not strict:
(model, field) = dic['name'].split(',')
# get the ids of the resources of this model which share
# the same source
obj = pool.get(model)
if obj:
ids = obj.search(cr, uid, [(field, '=', dic['src'])])
# get the ids of the resources of this model which share
# the same source
obj = pool.get(model)
if obj:
ids = obj.search(cr, uid, [(field, '=', dic['src'])])
# if the resource id (res_id) is in that list, use it,
# otherwise use the whole list
ids = (dic['res_id'] in ids) and [dic['res_id']] or ids
for id in ids:
dic['res_id'] = id
ids = trans_obj.search(cr, uid, [
('lang', '=', lang),
('type', '=', dic['type']),
('name', '=', dic['name']),
('src', '=', dic['src']),
('res_id', '=', dic['res_id'])
])
if ids:
trans_obj.write(cr, uid, ids, {'value': dic['value']})
else:
trans_obj.create(cr, uid, dic)
else:
ids = trans_obj.search(cr, uid, [
('lang', '=', lang),
('type', '=', dic['type']),
('name', '=', dic['name']),
('src', '=', dic['src'])
])
if ids:
trans_obj.write(cr, uid, ids, {'value': dic['value']})
else:
trans_obj.create(cr, uid, dic)
cr.commit()
#except Exception, e:
# logger.notifyChannel('init', netsvc.LOG_ERROR,
# 'Import error: %s on line %d: %s!' % (str(e), line, row))
# cr.rollback()
# cr.close()
# cr = pooler.get_db(db_name).cursor()
cr.close()
logger.notifyChannel("init", netsvc.LOG_INFO,
"translation file loaded succesfully")
except IOError:
logger.notifyChannel("init", netsvc.LOG_ERROR, "couldn't read file")
# if the resource id (res_id) is in that list, use it,
# otherwise use the whole list
ids = (dic['res_id'] in ids) and [dic['res_id']] or ids
for id in ids:
dic['res_id'] = id
ids = trans_obj.search(cr, uid, [
('lang', '=', lang),
('type', '=', dic['type']),
('name', '=', dic['name']),
('src', '=', dic['src']),
('res_id', '=', dic['res_id'])
])
if ids:
trans_obj.write(cr, uid, ids, {'value': dic['value']})
else:
trans_obj.create(cr, uid, dic)
else:
ids = trans_obj.search(cr, uid, [
('lang', '=', lang),
('type', '=', dic['type']),
('name', '=', dic['name']),
('src', '=', dic['src'])
])
if ids:
trans_obj.write(cr, uid, ids, {'value': dic['value']})
else:
trans_obj.create(cr, uid, dic)
cr.commit()
#except Exception, e:
# logger.notifyChannel('init', netsvc.LOG_ERROR,
# 'Import error: %s on line %d: %s!' % (str(e), line, row))
# cr.rollback()
# cr.close()
# cr = pooler.get_db(db_name).cursor()
cr.close()
logger.notifyChannel("init", netsvc.LOG_INFO,
"translation file loaded succesfully")
except IOError:
logger.notifyChannel("init", netsvc.LOG_ERROR, "couldn't read file")

View File

@ -36,60 +36,60 @@ import tools
# remove an existing version of modules if it exist
def remove(name):
adp = tools.config['addons_path']
addons = os.listdir(adp)
if name in addons:
try:
shutil.rmtree(os.path.join(adp, name))
except:
print "Unable to remove module %s !" % name
adp = tools.config['addons_path']
addons = os.listdir(adp)
if name in addons:
try:
shutil.rmtree(os.path.join(adp, name))
except:
print "Unable to remove module %s !" % name
def install(name, url):
tar = tarfile.open(mode="r|gz", fileobj=urllib2.urlopen(url))
for tarinfo in tar:
tar.extract(tarinfo, tools.config['addons_path'])
tar = tarfile.open(mode="r|gz", fileobj=urllib2.urlopen(url))
for tarinfo in tar:
tar.extract(tarinfo, tools.config['addons_path'])
def upgrade():
import pooler
cr = pooler.db.cursor()
import pooler
cr = pooler.db.cursor()
toinit = []
toupdate = []
toinit = []
toupdate = []
# print 'Check for correct rights (create and unlink on addons)...'
# todo: touch addons/test.txt
# todo: rm addons/test.txt
# print 'Check for correct rights (create and unlink on addons)...'
# todo: touch addons/test.txt
# todo: rm addons/test.txt
print 'Check for modules to remove...'
cr.execute('select id,name,url from ir_module_module where state=%s', ('to remove',))
for module_id,name,url in cr.fetchall():
print '\tremoving module %s' % name
remove(name)
cr.execute('update ir_module_module set state=%s where id=%d', ('uninstalled', module_id))
cr.commit()
print 'Check for modules to remove...'
cr.execute('select id,name,url from ir_module_module where state=%s', ('to remove',))
for module_id,name,url in cr.fetchall():
print '\tremoving module %s' % name
remove(name)
cr.execute('update ir_module_module set state=%s where id=%d', ('uninstalled', module_id))
cr.commit()
print 'Check for modules to upgrade...'
cr.execute('select id,name,url from ir_module_module where state=%s', ('to upgrade',))
for module_id,name,url in cr.fetchall():
print '\tupgrading module %s' % name
remove(name)
install(name, url)
cr.execute('update ir_module_module set state=%s where id=%d', ('installed', module_id))
cr.commit()
toupdate.append(name)
print 'Check for modules to upgrade...'
cr.execute('select id,name,url from ir_module_module where state=%s', ('to upgrade',))
for module_id,name,url in cr.fetchall():
print '\tupgrading module %s' % name
remove(name)
install(name, url)
cr.execute('update ir_module_module set state=%s where id=%d', ('installed', module_id))
cr.commit()
toupdate.append(name)
print 'Check for modules to install...'
cr.execute('select id,name,url from ir_module_module where state=%s', ('to install',))
for module_id,name,url in cr.fetchall():
print '\tinstalling module %s' % name
install(name, url)
cr.execute('update ir_module_module set state=%s where id=%d', ('installed', module_id))
cr.commit()
toinit.append(name)
print 'Check for modules to install...'
cr.execute('select id,name,url from ir_module_module where state=%s', ('to install',))
for module_id,name,url in cr.fetchall():
print '\tinstalling module %s' % name
install(name, url)
cr.execute('update ir_module_module set state=%s where id=%d', ('installed', module_id))
cr.commit()
toinit.append(name)
print 'Initializing all datas...'
print 'Initializing all datas...'
cr.commit()
cr.close()
return (toinit, toupdate)
cr.commit()
cr.close()
return (toinit, toupdate)

View File

@ -41,138 +41,138 @@ from netsvc import Logger, LOG_ERROR
import sys
class except_wizard(Exception):
def __init__(self, name, value):
self.name = name
self.value = value
self.args = (name, value)
def __init__(self, name, value):
self.name = name
self.value = value
self.args = (name, value)
class interface(netsvc.Service):
states = {}
states = {}
def __init__(self, name):
assert not netsvc.service_exist('wizard.'+name), 'The wizard "%s" already exists!'%name
super(interface, self).__init__('wizard.'+name)
self.exportMethod(self.execute)
self.wiz_name = name
def __init__(self, name):
assert not netsvc.service_exist('wizard.'+name), 'The wizard "%s" already exists!'%name
super(interface, self).__init__('wizard.'+name)
self.exportMethod(self.execute)
self.wiz_name = name
def translate_view(self, cr, node, state, lang):
if node.nodeType == node.ELEMENT_NODE:
if node.hasAttribute('string') and node.getAttribute('string'):
trans = translate(cr, self.wiz_name+','+state, 'wizard_view', lang, node.getAttribute('string').encode('utf8'))
if trans:
node.setAttribute('string', trans.decode('utf8'))
for n in node.childNodes:
self.translate_view(cr, n, state, lang)
def translate_view(self, cr, node, state, lang):
if node.nodeType == node.ELEMENT_NODE:
if node.hasAttribute('string') and node.getAttribute('string'):
trans = translate(cr, self.wiz_name+','+state, 'wizard_view', lang, node.getAttribute('string').encode('utf8'))
if trans:
node.setAttribute('string', trans.decode('utf8'))
for n in node.childNodes:
self.translate_view(cr, n, state, lang)
def execute_cr(self, cr, uid, data, state='init', context=None):
if not context:
context={}
res = {}
try:
state_def = self.states[state]
result_def = state_def.get('result', {})
def execute_cr(self, cr, uid, data, state='init', context=None):
if not context:
context={}
res = {}
try:
state_def = self.states[state]
result_def = state_def.get('result', {})
actions_res = {}
# iterate through the list of actions defined for this state
for action in state_def.get('actions', []):
# execute them
action_res = action(self, cr, uid, data, context)
assert isinstance(action_res, dict), 'The return value of wizard actions should be a dictionary'
actions_res.update(action_res)
actions_res = {}
# iterate through the list of actions defined for this state
for action in state_def.get('actions', []):
# execute them
action_res = action(self, cr, uid, data, context)
assert isinstance(action_res, dict), 'The return value of wizard actions should be a dictionary'
actions_res.update(action_res)
res = copy.copy(result_def)
res['datas'] = actions_res
res = copy.copy(result_def)
res['datas'] = actions_res
lang = context.get('lang', False)
if result_def['type'] == 'action':
res['action'] = result_def['action'](self, cr, uid, data, context)
elif result_def['type'] == 'form':
fields = copy.copy(result_def['fields'])
arch = copy.copy(result_def['arch'])
button_list = copy.copy(result_def['state'])
lang = context.get('lang', False)
if result_def['type'] == 'action':
res['action'] = result_def['action'](self, cr, uid, data, context)
elif result_def['type'] == 'form':
fields = copy.copy(result_def['fields'])
arch = copy.copy(result_def['arch'])
button_list = copy.copy(result_def['state'])
if isinstance(fields, UpdateableDict):
fields = fields.dict
if isinstance(arch, UpdateableStr):
arch = arch.string
if isinstance(fields, UpdateableDict):
fields = fields.dict
if isinstance(arch, UpdateableStr):
arch = arch.string
# fetch user-set defaut values for the field... shouldn't we pass it the uid?
defaults = ir.ir_get(cr, uid, 'default', False, [('wizard.'+self.wiz_name, False)])
default_values = dict([(x[1], x[2]) for x in defaults])
for val in fields.keys():
if 'default' in fields[val]:
# execute default method for this field
if callable(fields[val]['default']):
fields[val]['value'] = fields[val]['default'](uid, data, state)
else:
fields[val]['value'] = fields[val]['default']
del fields[val]['default']
else:
# if user has set a default value for the field, use it
if val in default_values:
fields[val]['value'] = default_values[val]
if 'selection' in fields[val]:
if not isinstance(fields[val]['selection'], (tuple, list)):
fields[val] = copy.copy(fields[val])
fields[val]['selection'] = fields[val]['selection'](self, cr, uid, context)
# fetch user-set defaut values for the field... shouldn't we pass it the uid?
defaults = ir.ir_get(cr, uid, 'default', False, [('wizard.'+self.wiz_name, False)])
default_values = dict([(x[1], x[2]) for x in defaults])
for val in fields.keys():
if 'default' in fields[val]:
# execute default method for this field
if callable(fields[val]['default']):
fields[val]['value'] = fields[val]['default'](uid, data, state)
else:
fields[val]['value'] = fields[val]['default']
del fields[val]['default']
else:
# if user has set a default value for the field, use it
if val in default_values:
fields[val]['value'] = default_values[val]
if 'selection' in fields[val]:
if not isinstance(fields[val]['selection'], (tuple, list)):
fields[val] = copy.copy(fields[val])
fields[val]['selection'] = fields[val]['selection'](self, cr, uid, context)
if lang:
# translate fields
for field in fields:
trans = translate(cr, self.wiz_name+','+state+','+field, 'wizard_field', lang)
if trans:
fields[field]['string'] = trans
if lang:
# translate fields
for field in fields:
trans = translate(cr, self.wiz_name+','+state+','+field, 'wizard_field', lang)
if trans:
fields[field]['string'] = trans
# translate arch
if not isinstance(arch, UpdateableStr):
doc = dom.minidom.parseString(arch)
self.translate_view(cr, doc, state, lang)
arch = doc.toxml()
# translate arch
if not isinstance(arch, UpdateableStr):
doc = dom.minidom.parseString(arch)
self.translate_view(cr, doc, state, lang)
arch = doc.toxml()
# translate buttons
button_list = list(button_list)
for i, aa in enumerate(button_list):
button_name = aa[0]
trans = translate(cr, self.wiz_name+','+state+','+button_name, 'wizard_button', lang)
if trans:
aa = list(aa)
aa[1] = trans
button_list[i] = aa
# translate buttons
button_list = list(button_list)
for i, aa in enumerate(button_list):
button_name = aa[0]
trans = translate(cr, self.wiz_name+','+state+','+button_name, 'wizard_button', lang)
if trans:
aa = list(aa)
aa[1] = trans
button_list[i] = aa
res['fields'] = fields
res['arch'] = arch
res['state'] = button_list
res['fields'] = fields
res['arch'] = arch
res['state'] = button_list
except Exception, e:
if isinstance(e, except_wizard) \
or isinstance(e, except_osv) \
or isinstance(e, except_orm):
self.abortResponse(2, e.name, 'warning', e.value)
else:
import traceback
tb_s = reduce(lambda x, y: x+y, traceback.format_exception(
sys.exc_type, sys.exc_value, sys.exc_traceback))
logger = Logger()
logger.notifyChannel("web-services", LOG_ERROR,
'Exception in call: ' + tb_s)
raise
except Exception, e:
if isinstance(e, except_wizard) \
or isinstance(e, except_osv) \
or isinstance(e, except_orm):
self.abortResponse(2, e.name, 'warning', e.value)
else:
import traceback
tb_s = reduce(lambda x, y: x+y, traceback.format_exception(
sys.exc_type, sys.exc_value, sys.exc_traceback))
logger = Logger()
logger.notifyChannel("web-services", LOG_ERROR,
'Exception in call: ' + tb_s)
raise
if result_def['type'] == 'choice':
next_state = result_def['next_state'](self, cr, uid, data, context)
return self.execute_cr(cr, uid, data, next_state, context)
return res
if result_def['type'] == 'choice':
next_state = result_def['next_state'](self, cr, uid, data, context)
return self.execute_cr(cr, uid, data, next_state, context)
return res
def execute(self, db, uid, data, state='init', context=None):
if not context:
context={}
cr = pooler.get_db(db).cursor()
try:
try:
res = self.execute_cr(cr, uid, data, state, context)
cr.commit()
except Exception:
cr.rollback()
raise
finally:
cr.close()
return res
def execute(self, db, uid, data, state='init', context=None):
if not context:
context={}
cr = pooler.get_db(db).cursor()
try:
try:
res = self.execute_cr(cr, uid, data, state, context)
cr.commit()
except Exception:
cr.rollback()
raise
finally:
cr.close()
return res

View File

@ -26,6 +26,6 @@
##############################################################################
class except_wkf(Exception):
def __init__(self, name, value):
self.name = name
self.value = value
def __init__(self, name, value):
self.name = name
self.value = value

View File

@ -33,54 +33,54 @@ import netsvc
import pooler
def create(cr, ident, wkf_id):
(uid,res_type,res_id) = ident
cr.execute("select nextval('wkf_instance_id_seq')")
id_new = cr.fetchone()[0]
cr.execute('insert into wkf_instance (id,res_type,res_id,uid,wkf_id) values (%d,%s,%s,%s,%s)', (id_new,res_type,res_id,uid,wkf_id))
cr.execute('select * from wkf_activity where flow_start=True and wkf_id=%d', (wkf_id,))
res = cr.dictfetchall()
stack = []
workitem.create(cr, res, id_new, ident, stack=stack)
update(cr, id_new, ident)
return id_new
(uid,res_type,res_id) = ident
cr.execute("select nextval('wkf_instance_id_seq')")
id_new = cr.fetchone()[0]
cr.execute('insert into wkf_instance (id,res_type,res_id,uid,wkf_id) values (%d,%s,%s,%s,%s)', (id_new,res_type,res_id,uid,wkf_id))
cr.execute('select * from wkf_activity where flow_start=True and wkf_id=%d', (wkf_id,))
res = cr.dictfetchall()
stack = []
workitem.create(cr, res, id_new, ident, stack=stack)
update(cr, id_new, ident)
return id_new
def delete(cr, ident):
(uid,res_type,res_id) = ident
cr.execute('delete from wkf_instance where res_id=%d and res_type=%s', (res_id,res_type))
(uid,res_type,res_id) = ident
cr.execute('delete from wkf_instance where res_id=%d and res_type=%s', (res_id,res_type))
def validate(cr, inst_id, ident, signal, force_running=False):
cr.execute("select * from wkf_workitem where inst_id=%d", (inst_id,))
for witem in cr.dictfetchall():
stack = []
workitem.process(cr, witem, ident, signal, force_running, stack=stack)
# An action is returned
_update_end(cr, inst_id, ident)
return stack and stack[0] or False
cr.execute("select * from wkf_workitem where inst_id=%d", (inst_id,))
for witem in cr.dictfetchall():
stack = []
workitem.process(cr, witem, ident, signal, force_running, stack=stack)
# An action is returned
_update_end(cr, inst_id, ident)
return stack and stack[0] or False
def update(cr, inst_id, ident):
cr.execute("select * from wkf_workitem where inst_id=%d", (inst_id,))
for witem in cr.dictfetchall():
stack = []
workitem.process(cr, witem, ident, stack=stack)
return _update_end(cr, inst_id, ident)
cr.execute("select * from wkf_workitem where inst_id=%d", (inst_id,))
for witem in cr.dictfetchall():
stack = []
workitem.process(cr, witem, ident, stack=stack)
return _update_end(cr, inst_id, ident)
def _update_end(cr, inst_id, ident):
cr.execute('select wkf_id from wkf_instance where id=%d', (inst_id,))
wkf_id = cr.fetchone()[0]
cr.execute('select state,flow_stop from wkf_workitem w left join wkf_activity a on (a.id=w.act_id) where w.inst_id=%d', (inst_id,))
ok=True
for r in cr.fetchall():
if (r[0]<>'complete') or not r[1]:
ok=False
break
if ok:
cr.execute('select distinct a.name from wkf_activity a left join wkf_workitem w on (a.id=w.act_id) where w.inst_id=%d', (inst_id,))
act_names = cr.fetchall()
cr.execute("update wkf_instance set state='complete' where id=%d", (inst_id,))
cr.execute("update wkf_workitem set state='complete' where subflow_id=%d", (inst_id,))
cr.execute("select i.id,w.osv,i.res_id from wkf_instance i left join wkf w on (i.wkf_id=w.id) where i.id in (select inst_id from wkf_workitem where subflow_id=%d)", (inst_id,))
for i in cr.fetchall():
for act_name in act_names:
validate(cr, i[0], (ident[0],i[1],i[2]), 'subflow.'+act_name[0])
return ok
cr.execute('select wkf_id from wkf_instance where id=%d', (inst_id,))
wkf_id = cr.fetchone()[0]
cr.execute('select state,flow_stop from wkf_workitem w left join wkf_activity a on (a.id=w.act_id) where w.inst_id=%d', (inst_id,))
ok=True
for r in cr.fetchall():
if (r[0]<>'complete') or not r[1]:
ok=False
break
if ok:
cr.execute('select distinct a.name from wkf_activity a left join wkf_workitem w on (a.id=w.act_id) where w.inst_id=%d', (inst_id,))
act_names = cr.fetchall()
cr.execute("update wkf_instance set state='complete' where id=%d", (inst_id,))
cr.execute("update wkf_workitem set state='complete' where subflow_id=%d", (inst_id,))
cr.execute("select i.id,w.osv,i.res_id from wkf_instance i left join wkf w on (i.wkf_id=w.id) where i.id in (select inst_id from wkf_workitem where subflow_id=%d)", (inst_id,))
for i in cr.fetchall():
for act_name in act_names:
validate(cr, i[0], (ident[0],i[1],i[2]), 'subflow.'+act_name[0])
return ok

View File

@ -34,77 +34,77 @@ import pooler
class EnvCall(object):
def __init__(self,wf_service,d_arg):
self.wf_service=wf_service
self.d_arg=d_arg
def __init__(self,wf_service,d_arg):
self.wf_service=wf_service
self.d_arg=d_arg
def __call__(self,*args):
arg=self.d_arg+args
return self.wf_service.execute_cr(*arg)
def __call__(self,*args):
arg=self.d_arg+args
return self.wf_service.execute_cr(*arg)
class Env(dict):
def __init__(self, wf_service, cr, uid, model, ids):
self.wf_service = wf_service
self.cr = cr
self.uid = uid
self.model = model
self.ids = ids
self.obj = pooler.get_pool(cr.dbname).get(model)
self.columns = self.obj._columns.keys() + self.obj._inherit_fields.keys()
def __init__(self, wf_service, cr, uid, model, ids):
self.wf_service = wf_service
self.cr = cr
self.uid = uid
self.model = model
self.ids = ids
self.obj = pooler.get_pool(cr.dbname).get(model)
self.columns = self.obj._columns.keys() + self.obj._inherit_fields.keys()
def __getitem__(self, key):
if (key in self.columns) or (key in dir(self.obj)):
res = self.obj.browse(self.cr, self.uid, self.ids[0])
return res[key]
#res=self.wf_service.execute_cr(self.cr, self.uid, self.model, 'read',\
# self.ids, [key])[0][key]
#super(Env, self).__setitem__(key, res)
#return res
#elif key in dir(self.obj):
# return EnvCall(self.wf_service, (self.cr, self.uid, self.model, key,\
# self.ids))
else:
return super(Env, self).__getitem__(key)
def __getitem__(self, key):
if (key in self.columns) or (key in dir(self.obj)):
res = self.obj.browse(self.cr, self.uid, self.ids[0])
return res[key]
#res=self.wf_service.execute_cr(self.cr, self.uid, self.model, 'read',\
# self.ids, [key])[0][key]
#super(Env, self).__setitem__(key, res)
#return res
#elif key in dir(self.obj):
# return EnvCall(self.wf_service, (self.cr, self.uid, self.model, key,\
# self.ids))
else:
return super(Env, self).__getitem__(key)
def _eval_expr(cr, ident, workitem, action):
ret=False
assert action, 'You used a NULL action in a workflow, use dummy node instead.'
for line in action.split('\n'):
uid=ident[0]
model=ident[1]
ids=[ident[2]]
if line =='True':
ret=True
elif line =='False':
ret=False
else:
wf_service = netsvc.LocalService("object_proxy")
env = Env(wf_service, cr, uid, model, ids)
ret = eval(line, env)
return ret
ret=False
assert action, 'You used a NULL action in a workflow, use dummy node instead.'
for line in action.split('\n'):
uid=ident[0]
model=ident[1]
ids=[ident[2]]
if line =='True':
ret=True
elif line =='False':
ret=False
else:
wf_service = netsvc.LocalService("object_proxy")
env = Env(wf_service, cr, uid, model, ids)
ret = eval(line, env)
return ret
def execute_action(cr, ident, workitem, activity):
wf_service = netsvc.LocalService("object_proxy")
obj = pooler.get_pool(cr.dbname).get('ir.actions.server')
ctx = {'active_id':ident[2], 'active_ids':[ident[2]]}
result = obj.run(cr, ident[0], [activity['action_id']], ctx)
return result
wf_service = netsvc.LocalService("object_proxy")
obj = pooler.get_pool(cr.dbname).get('ir.actions.server')
ctx = {'active_id':ident[2], 'active_ids':[ident[2]]}
result = obj.run(cr, ident[0], [activity['action_id']], ctx)
return result
def execute(cr, ident, workitem, activity):
return _eval_expr(cr, ident, workitem, activity['action'])
return _eval_expr(cr, ident, workitem, activity['action'])
def check(cr, workitem, ident, transition, signal):
ok = True
if transition['signal']:
ok = (signal==transition['signal'])
ok = True
if transition['signal']:
ok = (signal==transition['signal'])
if transition['role_id']:
uid = ident[0]
serv = netsvc.LocalService('object_proxy')
user_roles = serv.execute_cr(cr, uid, 'res.users', 'read', [uid], ['roles_id'])[0]['roles_id']
ok = ok and serv.execute_cr(cr, uid, 'res.roles', 'check', user_roles, transition['role_id'])
ok = ok and _eval_expr(cr, ident, workitem, transition['condition'])
return ok
if transition['role_id']:
uid = ident[0]
serv = netsvc.LocalService('object_proxy')
user_roles = serv.execute_cr(cr, uid, 'res.users', 'read', [uid], ['roles_id'])[0]['roles_id']
ok = ok and serv.execute_cr(cr, uid, 'res.roles', 'check', user_roles, transition['role_id'])
ok = ok and _eval_expr(cr, ident, workitem, transition['condition'])
return ok

View File

@ -31,5 +31,5 @@
#
def log(cr,ident,act_id,info=''):
pass
#cr.execute('insert into wkf_logs (res_type, res_id, uid, act_id, time, info) values (%s,%d,%d,%d,current_time,%s)', (ident[1],int(ident[2]),int(ident[0]),int(act_id),info))
pass
#cr.execute('insert into wkf_logs (res_type, res_id, uid, act_id, time, info) values (%s,%d,%d,%d,current_time,%s)', (ident[1],int(ident[2]),int(ident[0]),int(act_id),info))

View File

@ -34,79 +34,79 @@ import netsvc
import pooler
class workflow_service(netsvc.Service):
def __init__(self, name='workflow', audience='*'):
netsvc.Service.__init__(self, name, audience)
self.exportMethod(self.trg_write)
self.exportMethod(self.trg_delete)
self.exportMethod(self.trg_create)
self.exportMethod(self.trg_validate)
self.exportMethod(self.trg_redirect)
self.exportMethod(self.trg_trigger)
self.exportMethod(self.clear_cache)
self.wkf_on_create_cache={}
def __init__(self, name='workflow', audience='*'):
netsvc.Service.__init__(self, name, audience)
self.exportMethod(self.trg_write)
self.exportMethod(self.trg_delete)
self.exportMethod(self.trg_create)
self.exportMethod(self.trg_validate)
self.exportMethod(self.trg_redirect)
self.exportMethod(self.trg_trigger)
self.exportMethod(self.clear_cache)
self.wkf_on_create_cache={}
def clear_cache(self, cr, uid):
self.wkf_on_create_cache[cr.dbname]={}
def clear_cache(self, cr, uid):
self.wkf_on_create_cache[cr.dbname]={}
def trg_write(self, uid, res_type, res_id, cr):
ident = (uid,res_type,res_id)
cr.execute('select id from wkf_instance where res_id=%d and res_type=%s and state=%s', (res_id,res_type, 'active'))
for (id,) in cr.fetchall():
instance.update(cr, id, ident)
def trg_write(self, uid, res_type, res_id, cr):
ident = (uid,res_type,res_id)
cr.execute('select id from wkf_instance where res_id=%d and res_type=%s and state=%s', (res_id,res_type, 'active'))
for (id,) in cr.fetchall():
instance.update(cr, id, ident)
def trg_trigger(self, uid, res_type, res_id, cr):
cr.execute('select instance_id from wkf_triggers where res_id=%d and model=%s', (res_id,res_type))
res = cr.fetchall()
for (instance_id,) in res:
cr.execute('select uid,res_type,res_id from wkf_instance where id=%d', (instance_id,))
ident = cr.fetchone()
instance.update(cr, instance_id, ident)
def trg_trigger(self, uid, res_type, res_id, cr):
cr.execute('select instance_id from wkf_triggers where res_id=%d and model=%s', (res_id,res_type))
res = cr.fetchall()
for (instance_id,) in res:
cr.execute('select uid,res_type,res_id from wkf_instance where id=%d', (instance_id,))
ident = cr.fetchone()
instance.update(cr, instance_id, ident)
def trg_delete(self, uid, res_type, res_id, cr):
ident = (uid,res_type,res_id)
instance.delete(cr, ident)
def trg_delete(self, uid, res_type, res_id, cr):
ident = (uid,res_type,res_id)
instance.delete(cr, ident)
def trg_create(self, uid, res_type, res_id, cr):
ident = (uid,res_type,res_id)
self.wkf_on_create_cache.setdefault(cr.dbname, {})
if res_type in self.wkf_on_create_cache[cr.dbname]:
wkf_ids = self.wkf_on_create_cache[cr.dbname][res_type]
else:
cr.execute('select id from wkf where osv=%s and on_create=True', (res_type,))
wkf_ids = cr.fetchall()
self.wkf_on_create_cache[cr.dbname][res_type] = wkf_ids
for (wkf_id,) in wkf_ids:
instance.create(cr, ident, wkf_id)
def trg_create(self, uid, res_type, res_id, cr):
ident = (uid,res_type,res_id)
self.wkf_on_create_cache.setdefault(cr.dbname, {})
if res_type in self.wkf_on_create_cache[cr.dbname]:
wkf_ids = self.wkf_on_create_cache[cr.dbname][res_type]
else:
cr.execute('select id from wkf where osv=%s and on_create=True', (res_type,))
wkf_ids = cr.fetchall()
self.wkf_on_create_cache[cr.dbname][res_type] = wkf_ids
for (wkf_id,) in wkf_ids:
instance.create(cr, ident, wkf_id)
def trg_validate(self, uid, res_type, res_id, signal, cr):
result = False
ident = (uid,res_type,res_id)
# ids of all active workflow instances for a corresponding resource (id, model_nam)
cr.execute('select id from wkf_instance where res_id=%d and res_type=%s and state=%s', (res_id, res_type, 'active'))
for (id,) in cr.fetchall():
res2 = instance.validate(cr, id, ident, signal)
result = result or res2
return result
def trg_validate(self, uid, res_type, res_id, signal, cr):
result = False
ident = (uid,res_type,res_id)
# ids of all active workflow instances for a corresponding resource (id, model_nam)
cr.execute('select id from wkf_instance where res_id=%d and res_type=%s and state=%s', (res_id, res_type, 'active'))
for (id,) in cr.fetchall():
res2 = instance.validate(cr, id, ident, signal)
result = result or res2
return result
# make all workitems which are waiting for a (subflow) workflow instance
# for the old resource point to the (first active) workflow instance for
# the new resource
def trg_redirect(self, uid, res_type, res_id, new_rid, cr):
# get ids of wkf instances for the old resource (res_id)
# make all workitems which are waiting for a (subflow) workflow instance
# for the old resource point to the (first active) workflow instance for
# the new resource
def trg_redirect(self, uid, res_type, res_id, new_rid, cr):
# get ids of wkf instances for the old resource (res_id)
#CHECKME: shouldn't we get only active instances?
cr.execute('select id, wkf_id from wkf_instance where res_id=%d and res_type=%s', (res_id, res_type))
for old_inst_id, wkf_id in cr.fetchall():
# first active instance for new resource (new_rid), using same wkf
cr.execute(
'SELECT id '\
'FROM wkf_instance '\
'WHERE res_id=%d AND res_type=%s AND wkf_id=%d AND state=%s',
(new_rid, res_type, wkf_id, 'active'))
new_id = cr.fetchone()
if new_id:
# select all workitems which "wait" for the old instance
cr.execute('select id from wkf_workitem where subflow_id=%d', (old_inst_id,))
for (item_id,) in cr.fetchall():
# redirect all those workitems to the wkf instance of the new resource
cr.execute('update wkf_workitem set subflow_id=%d where id=%d', (new_id[0], item_id))
cr.execute('select id, wkf_id from wkf_instance where res_id=%d and res_type=%s', (res_id, res_type))
for old_inst_id, wkf_id in cr.fetchall():
# first active instance for new resource (new_rid), using same wkf
cr.execute(
'SELECT id '\
'FROM wkf_instance '\
'WHERE res_id=%d AND res_type=%s AND wkf_id=%d AND state=%s',
(new_rid, res_type, wkf_id, 'active'))
new_id = cr.fetchone()
if new_id:
# select all workitems which "wait" for the old instance
cr.execute('select id from wkf_workitem where subflow_id=%d', (old_inst_id,))
for (item_id,) in cr.fetchall():
# redirect all those workitems to the wkf instance of the new resource
cr.execute('update wkf_workitem set subflow_id=%d where id=%d', (new_id[0], item_id))
workflow_service()

View File

@ -38,158 +38,158 @@ import wkf_expr
import wkf_logs
def create(cr, act_datas, inst_id, ident, stack):
for act in act_datas:
cr.execute("select nextval('wkf_workitem_id_seq')")
id_new = cr.fetchone()[0]
cr.execute("insert into wkf_workitem (id,act_id,inst_id,state) values (%d,%s,%s,'active')", (id_new, act['id'], inst_id))
cr.execute('select * from wkf_workitem where id=%d',(id_new,))
res = cr.dictfetchone()
wkf_logs.log(cr,ident,act['id'],'active')
process(cr, res, ident, stack=stack)
for act in act_datas:
cr.execute("select nextval('wkf_workitem_id_seq')")
id_new = cr.fetchone()[0]
cr.execute("insert into wkf_workitem (id,act_id,inst_id,state) values (%d,%s,%s,'active')", (id_new, act['id'], inst_id))
cr.execute('select * from wkf_workitem where id=%d',(id_new,))
res = cr.dictfetchone()
wkf_logs.log(cr,ident,act['id'],'active')
process(cr, res, ident, stack=stack)
def process(cr, workitem, ident, signal=None, force_running=False, stack=None):
if stack is None:
raise 'Error !!!'
result = True
cr.execute('select * from wkf_activity where id=%d', (workitem['act_id'],))
activity = cr.dictfetchone()
if stack is None:
raise 'Error !!!'
result = True
cr.execute('select * from wkf_activity where id=%d', (workitem['act_id'],))
activity = cr.dictfetchone()
triggers = False
if workitem['state']=='active':
triggers = True
result = _execute(cr, workitem, activity, ident, stack)
if not result:
return False
triggers = False
if workitem['state']=='active':
triggers = True
result = _execute(cr, workitem, activity, ident, stack)
if not result:
return False
if workitem['state']=='running':
pass
if workitem['state']=='running':
pass
if workitem['state']=='complete' or force_running:
ok = _split_test(cr, workitem, activity['split_mode'], ident, signal, stack)
triggers = triggers and not ok
if workitem['state']=='complete' or force_running:
ok = _split_test(cr, workitem, activity['split_mode'], ident, signal, stack)
triggers = triggers and not ok
if triggers:
cr.execute('select * from wkf_transition where act_from=%d', (workitem['act_id'],))
alltrans = cr.dictfetchall()
for trans in alltrans:
if trans['trigger_model']:
ids = wkf_expr._eval_expr(cr,ident,workitem,trans['trigger_expr_id'])
for res_id in ids:
cr.execute('select nextval(\'wkf_triggers_id_seq\')')
id =cr.fetchone()[0]
cr.execute('insert into wkf_triggers (model,res_id,instance_id,workitem_id,id) values (%s,%d,%d,%d,%d)', (trans['trigger_model'],res_id,workitem['inst_id'], workitem['id'], id))
if triggers:
cr.execute('select * from wkf_transition where act_from=%d', (workitem['act_id'],))
alltrans = cr.dictfetchall()
for trans in alltrans:
if trans['trigger_model']:
ids = wkf_expr._eval_expr(cr,ident,workitem,trans['trigger_expr_id'])
for res_id in ids:
cr.execute('select nextval(\'wkf_triggers_id_seq\')')
id =cr.fetchone()[0]
cr.execute('insert into wkf_triggers (model,res_id,instance_id,workitem_id,id) values (%s,%d,%d,%d,%d)', (trans['trigger_model'],res_id,workitem['inst_id'], workitem['id'], id))
return result
return result
# ---------------------- PRIVATE FUNCS --------------------------------
def _state_set(cr, workitem, activity, state, ident):
cr.execute('update wkf_workitem set state=%s where id=%d', (state,workitem['id']))
workitem['state'] = state
wkf_logs.log(cr,ident,activity['id'],state)
cr.execute('update wkf_workitem set state=%s where id=%d', (state,workitem['id']))
workitem['state'] = state
wkf_logs.log(cr,ident,activity['id'],state)
def _execute(cr, workitem, activity, ident, stack):
result = True
#
# send a signal to parent workflow (signal: subflow.signal_name)
#
if (workitem['state']=='active') and activity['signal_send']:
cr.execute("select i.id,w.osv,i.res_id from wkf_instance i left join wkf w on (i.wkf_id=w.id) where i.id in (select inst_id from wkf_workitem where subflow_id=%d)", (workitem['inst_id'],))
for i in cr.fetchall():
instance.validate(cr, i[0], (ident[0],i[1],i[2]), activity['signal_send'], force_running=True)
result = True
#
# send a signal to parent workflow (signal: subflow.signal_name)
#
if (workitem['state']=='active') and activity['signal_send']:
cr.execute("select i.id,w.osv,i.res_id from wkf_instance i left join wkf w on (i.wkf_id=w.id) where i.id in (select inst_id from wkf_workitem where subflow_id=%d)", (workitem['inst_id'],))
for i in cr.fetchall():
instance.validate(cr, i[0], (ident[0],i[1],i[2]), activity['signal_send'], force_running=True)
if activity['kind']=='dummy':
if workitem['state']=='active':
_state_set(cr, workitem, activity, 'complete', ident)
elif activity['kind']=='function':
if workitem['state']=='active':
_state_set(cr, workitem, activity, 'running', ident)
wkf_expr.execute(cr, ident, workitem, activity)
if activity['action_id']:
res2 = wkf_expr.execute_action(cr, ident, workitem, activity)
# A client action has been returned
if res2:
stack.append(res2)
result=res2
_state_set(cr, workitem, activity, 'complete', ident)
elif activity['kind']=='stopall':
if workitem['state']=='active':
_state_set(cr, workitem, activity, 'running', ident)
cr.execute('delete from wkf_workitem where inst_id=%d and id<>%d', (workitem['inst_id'], workitem['id']))
if activity['action']:
wkf_expr.execute(cr, ident, workitem, activity)
_state_set(cr, workitem, activity, 'complete', ident)
elif activity['kind']=='subflow':
if workitem['state']=='active':
_state_set(cr, workitem, activity, 'running', ident)
if activity.get('action', False):
id_new = wkf_expr.execute(cr, ident, workitem, activity)
if not (id_new):
cr.execute('delete from wkf_workitem where id=%s', (workitem['id'],))
return False
assert type(id_new)==type(1) or type(id_new)==type(1L), 'Wrong return value: '+str(id_new)+' '+str(type(id_new))
cr.execute('select id from wkf_instance where res_id=%d and wkf_id=%d', (id_new,activity['subflow_id']))
id_new = cr.fetchone()[0]
else:
id_new = instance.create(cr, ident, activity['subflow_id'])
cr.execute('update wkf_workitem set subflow_id=%d where id=%s', (id_new, workitem['id']))
workitem['subflow_id'] = id_new
if workitem['state']=='running':
cr.execute("select state from wkf_instance where id=%d", (workitem['subflow_id'],))
state= cr.fetchone()[0]
if state=='complete':
_state_set(cr, workitem, activity, 'complete', ident)
return result
if activity['kind']=='dummy':
if workitem['state']=='active':
_state_set(cr, workitem, activity, 'complete', ident)
elif activity['kind']=='function':
if workitem['state']=='active':
_state_set(cr, workitem, activity, 'running', ident)
wkf_expr.execute(cr, ident, workitem, activity)
if activity['action_id']:
res2 = wkf_expr.execute_action(cr, ident, workitem, activity)
# A client action has been returned
if res2:
stack.append(res2)
result=res2
_state_set(cr, workitem, activity, 'complete', ident)
elif activity['kind']=='stopall':
if workitem['state']=='active':
_state_set(cr, workitem, activity, 'running', ident)
cr.execute('delete from wkf_workitem where inst_id=%d and id<>%d', (workitem['inst_id'], workitem['id']))
if activity['action']:
wkf_expr.execute(cr, ident, workitem, activity)
_state_set(cr, workitem, activity, 'complete', ident)
elif activity['kind']=='subflow':
if workitem['state']=='active':
_state_set(cr, workitem, activity, 'running', ident)
if activity.get('action', False):
id_new = wkf_expr.execute(cr, ident, workitem, activity)
if not (id_new):
cr.execute('delete from wkf_workitem where id=%s', (workitem['id'],))
return False
assert type(id_new)==type(1) or type(id_new)==type(1L), 'Wrong return value: '+str(id_new)+' '+str(type(id_new))
cr.execute('select id from wkf_instance where res_id=%d and wkf_id=%d', (id_new,activity['subflow_id']))
id_new = cr.fetchone()[0]
else:
id_new = instance.create(cr, ident, activity['subflow_id'])
cr.execute('update wkf_workitem set subflow_id=%d where id=%s', (id_new, workitem['id']))
workitem['subflow_id'] = id_new
if workitem['state']=='running':
cr.execute("select state from wkf_instance where id=%d", (workitem['subflow_id'],))
state= cr.fetchone()[0]
if state=='complete':
_state_set(cr, workitem, activity, 'complete', ident)
return result
def _split_test(cr, workitem, split_mode, ident, signal=None, stack=None):
if stack is None:
raise 'Error !!!'
cr.execute('select * from wkf_transition where act_from=%d', (workitem['act_id'],))
test = False
transitions = []
alltrans = cr.dictfetchall()
if split_mode=='XOR' or split_mode=='OR':
for transition in alltrans:
if wkf_expr.check(cr, workitem, ident, transition,signal):
test = True
transitions.append((transition['id'], workitem['inst_id']))
if split_mode=='XOR':
break
else:
test = True
for transition in alltrans:
if not wkf_expr.check(cr, workitem, ident, transition,signal):
test = False
break
cr.execute('select count(*) from wkf_witm_trans where trans_id=%d and inst_id=%d', (transition['id'], workitem['inst_id']))
if not cr.fetchone()[0]:
transitions.append((transition['id'], workitem['inst_id']))
if test and len(transitions):
cr.executemany('insert into wkf_witm_trans (trans_id,inst_id) values (%d,%d)', transitions)
cr.execute('delete from wkf_workitem where id=%d', (workitem['id'],))
for t in transitions:
_join_test(cr, t[0], t[1], ident, stack)
return True
return False
if stack is None:
raise 'Error !!!'
cr.execute('select * from wkf_transition where act_from=%d', (workitem['act_id'],))
test = False
transitions = []
alltrans = cr.dictfetchall()
if split_mode=='XOR' or split_mode=='OR':
for transition in alltrans:
if wkf_expr.check(cr, workitem, ident, transition,signal):
test = True
transitions.append((transition['id'], workitem['inst_id']))
if split_mode=='XOR':
break
else:
test = True
for transition in alltrans:
if not wkf_expr.check(cr, workitem, ident, transition,signal):
test = False
break
cr.execute('select count(*) from wkf_witm_trans where trans_id=%d and inst_id=%d', (transition['id'], workitem['inst_id']))
if not cr.fetchone()[0]:
transitions.append((transition['id'], workitem['inst_id']))
if test and len(transitions):
cr.executemany('insert into wkf_witm_trans (trans_id,inst_id) values (%d,%d)', transitions)
cr.execute('delete from wkf_workitem where id=%d', (workitem['id'],))
for t in transitions:
_join_test(cr, t[0], t[1], ident, stack)
return True
return False
def _join_test(cr, trans_id, inst_id, ident, stack):
cr.execute('select * from wkf_activity where id=(select act_to from wkf_transition where id=%d)', (trans_id,))
activity = cr.dictfetchone()
if activity['join_mode']=='XOR':
create(cr,[activity], inst_id, ident, stack)
cr.execute('delete from wkf_witm_trans where inst_id=%d and trans_id=%d', (inst_id,trans_id))
else:
cr.execute('select id from wkf_transition where act_to=%d', (activity['id'],))
trans_ids = cr.fetchall()
ok = True
for (id,) in trans_ids:
cr.execute('select count(*) from wkf_witm_trans where trans_id=%d and inst_id=%d', (id,inst_id))
res = cr.fetchone()[0]
if not res:
ok = False
break
if ok:
for (id,) in trans_ids:
cr.execute('delete from wkf_witm_trans where trans_id=%d and inst_id=%d', (id,inst_id))
create(cr, [activity], inst_id, ident, stack)
cr.execute('select * from wkf_activity where id=(select act_to from wkf_transition where id=%d)', (trans_id,))
activity = cr.dictfetchone()
if activity['join_mode']=='XOR':
create(cr,[activity], inst_id, ident, stack)
cr.execute('delete from wkf_witm_trans where inst_id=%d and trans_id=%d', (inst_id,trans_id))
else:
cr.execute('select id from wkf_transition where act_to=%d', (activity['id'],))
trans_ids = cr.fetchall()
ok = True
for (id,) in trans_ids:
cr.execute('select count(*) from wkf_witm_trans where trans_id=%d and inst_id=%d', (id,inst_id))
res = cr.fetchone()[0]
if not res:
ok = False
break
if ok:
for (id,) in trans_ids:
cr.execute('delete from wkf_witm_trans where trans_id=%d and inst_id=%d', (id,inst_id))
create(cr, [activity], inst_id, ident, stack)

View File

@ -53,15 +53,15 @@ options.db_name = 'terp' # default value
parser.parse_args(values=options)
if hasattr(options, 'config'):
configparser = ConfigParser.ConfigParser()
configparser.read([options.config])
for name, value in configparser.items('options'):
if not (hasattr(options, name) and getattr(options, name)):
if value in ('true', 'True'):
value = True
if value in ('false', 'False'):
value = False
setattr(options, name, value)
configparser = ConfigParser.ConfigParser()
configparser.read([options.config])
for name, value in configparser.items('options'):
if not (hasattr(options, name) and getattr(options, name)):
if value in ('true', 'True'):
value = True
if value in ('false', 'False'):
value = False
setattr(options, name, value)
# -----
@ -91,23 +91,23 @@ cr.execute("SELECT c.relname FROM pg_class c, pg_attribute a WHERE c.relname='re
partners=[]
drop_payment_term=False
if cr.rowcount:
drop_payment_term=True
cr.execute("select id, payment_term from res_partner where payment_term is not null")
partners = cr.dictfetchall()
drop_payment_term=True
cr.execute("select id, payment_term from res_partner where payment_term is not null")
partners = cr.dictfetchall()
# loop over them
for partner in partners:
value = 'account.payment.term,%d' % partner['payment_term']
res_id = 'res.partner,%d' % partner['id']
cr.execute(
"insert into ir_property(name, value, res_id, company_id, fields_id) "\
"values(%s, %s, %s, %d, %d)",
('property_payment_term', value, res_id, company_id, fields_id))
value = 'account.payment.term,%d' % partner['payment_term']
res_id = 'res.partner,%d' % partner['id']
cr.execute(
"insert into ir_property(name, value, res_id, company_id, fields_id) "\
"values(%s, %s, %s, %d, %d)",
('property_payment_term', value, res_id, company_id, fields_id))
# remove the field
if drop_payment_term:
cr.execute("alter table res_partner drop column payment_term")
cr.execute("alter table res_partner drop column payment_term")
cr.execute("delete from ir_model_fields where model = 'res.partner' and name = 'payment_term'")
cr.commit()
@ -124,10 +124,10 @@ registered_reports = cr.fetchall()
reg_reports_ids = ','.join([str(id) for (id,) in registered_reports])
for report in reports_wh_duplicates:
cr.execute("select id from ir_act_report_xml where model=%s and report_name=%s and id not in ("+reg_reports_ids+")", (report['model'], report['report_name']))
(id,) = cr.fetchone()
cr.execute("delete from ir_act_report_xml where id=%d", (id,))
cr.execute("delete from ir_values where value='ir.actions.report.xml,%d'", (id,))
cr.execute("select id from ir_act_report_xml where model=%s and report_name=%s and id not in ("+reg_reports_ids+")", (report['model'], report['report_name']))
(id,) = cr.fetchone()
cr.execute("delete from ir_act_report_xml where id=%d", (id,))
cr.execute("delete from ir_values where value='ir.actions.report.xml,%d'", (id,))
cr.commit()

View File

@ -53,15 +53,15 @@ options.db_name = 'terp' # default value
parser.parse_args(values=options)
if hasattr(options, 'config'):
configparser = ConfigParser.ConfigParser()
configparser.read([options.config])
for name, value in configparser.items('options'):
if not (hasattr(options, name) and getattr(options, name)):
if value in ('true', 'True'):
value = True
if value in ('false', 'False'):
value = False
setattr(options, name, value)
configparser = ConfigParser.ConfigParser()
configparser.read([options.config])
for name, value in configparser.items('options'):
if not (hasattr(options, name) and getattr(options, name)):
if value in ('true', 'True'):
value = True
if value in ('false', 'False'):
value = False
setattr(options, name, value)
# -----
@ -79,18 +79,18 @@ cr = db.cursor()
# ------------------------- #
def change_column(cr, table, column, new_type, copy):
commands = [
"ALTER TABLE %s RENAME COLUMN %s TO temp_column" % (table, column),
"ALTER TABLE %s ADD COLUMN %s %s" % (table, column, new_type),
"ALTER TABLE %s DROP COLUMN temp_column" % table
]
if copy:
commands.insert(
2,
"UPDATE %s SET %s=temp_column::%s" % (table, column, new_type))
commands = [
"ALTER TABLE %s RENAME COLUMN %s TO temp_column" % (table, column),
"ALTER TABLE %s ADD COLUMN %s %s" % (table, column, new_type),
"ALTER TABLE %s DROP COLUMN temp_column" % table
]
if copy:
commands.insert(
2,
"UPDATE %s SET %s=temp_column::%s" % (table, column, new_type))
for command in commands:
cr.execute(command)
for command in commands:
cr.execute(command)
change_column(cr, 'account_account_type', 'code_from', 'varchar(10)', False)
change_column(cr, 'account_account_type', 'code_to', 'varchar(10)', False)
@ -101,17 +101,17 @@ cr.commit()
# ----------------------------------------------------- #
for line in (
"alter table ir_model_fields add group_name varchar(64)",
"alter table ir_model_fields add view_load boolean",
"alter table ir_model_fields alter group_name set default ''",
"alter table ir_model_fields alter view_load set default False",
"delete from ir_values where value like '%,False'",
):
try:
cr.execute(line)
except psycopg.ProgrammingError, e:
cr.commit()
print e
"alter table ir_model_fields add group_name varchar(64)",
"alter table ir_model_fields add view_load boolean",
"alter table ir_model_fields alter group_name set default ''",
"alter table ir_model_fields alter view_load set default False",
"delete from ir_values where value like '%,False'",
):
try:
cr.execute(line)
except psycopg.ProgrammingError, e:
cr.commit()
print e
cr.commit()
cr.close()

View File

@ -53,15 +53,15 @@ options.db_name = 'terp' # default value
parser.parse_args(values=options)
if hasattr(options, 'config'):
configparser = ConfigParser.ConfigParser()
configparser.read([options.config])
for name, value in configparser.items('options'):
if not (hasattr(options, name) and getattr(options, name)):
if value in ('true', 'True'):
value = True
if value in ('false', 'False'):
value = False
setattr(options, name, value)
configparser = ConfigParser.ConfigParser()
configparser.read([options.config])
for name, value in configparser.items('options'):
if not (hasattr(options, name) and getattr(options, name)):
if value in ('true', 'True'):
value = True
if value in ('false', 'False'):
value = False
setattr(options, name, value)
# -----

View File

@ -53,15 +53,15 @@ options.db_name = 'terp' # default value
parser.parse_args(values=options)
if hasattr(options, 'config'):
configparser = ConfigParser.ConfigParser()
configparser.read([options.config])
for name, value in configparser.items('options'):
if not (hasattr(options, name) and getattr(options, name)):
if value in ('true', 'True'):
value = True
if value in ('false', 'False'):
value = False
setattr(options, name, value)
configparser = ConfigParser.ConfigParser()
configparser.read([options.config])
for name, value in configparser.items('options'):
if not (hasattr(options, name) and getattr(options, name)):
if value in ('true', 'True'):
value = True
if value in ('false', 'False'):
value = False
setattr(options, name, value)
# -----
@ -86,10 +86,10 @@ cr.commit()
# --------------- #
while True:
cr.execute("select id from ir_ui_menu where (id not in (select parent_id from ir_ui_menu where parent_id is not null)) and (id not in (select res_id from ir_values where model='ir.ui.menu'))")
if not cr.rowcount:
break
cr.execute("delete from ir_ui_menu where (id not in (select parent_id from ir_ui_menu where parent_id is not null)) and (id not in (select res_id from ir_values where model='ir.ui.menu'))")
cr.execute("select id from ir_ui_menu where (id not in (select parent_id from ir_ui_menu where parent_id is not null)) and (id not in (select res_id from ir_values where model='ir.ui.menu'))")
if not cr.rowcount:
break
cr.execute("delete from ir_ui_menu where (id not in (select parent_id from ir_ui_menu where parent_id is not null)) and (id not in (select res_id from ir_values where model='ir.ui.menu'))")
cr.commit()
# ----------------------------------------- #
@ -114,7 +114,7 @@ It is not possible to migrate the data automatically so you need to create the o
And then update the field uos_id of the table account_invoice to match the new id of product_uom.
EXAMPLE:
UPDATE account_invoice SET uos_id = new_id WHERE uos_id = old_id;
UPDATE account_invoice SET uos_id = new_id WHERE uos_id = old_id;
"""
cr.close()

View File

@ -53,15 +53,15 @@ options.db_name = 'terp' # default value
parser.parse_args(values=options)
if hasattr(options, 'config'):
configparser = ConfigParser.ConfigParser()
configparser.read([options.config])
for name, value in configparser.items('options'):
if not (hasattr(options, name) and getattr(options, name)):
if value in ('true', 'True'):
value = True
if value in ('false', 'False'):
value = False
setattr(options, name, value)
configparser = ConfigParser.ConfigParser()
configparser.read([options.config])
for name, value in configparser.items('options'):
if not (hasattr(options, name) and getattr(options, name)):
if value in ('true', 'True'):
value = True
if value in ('false', 'False'):
value = False
setattr(options, name, value)
# -----
@ -79,18 +79,18 @@ cr = db.cursor()
# ------------------------- #
def change_column(cr, table, column, new_type, copy):
commands = [
"ALTER TABLE %s RENAME COLUMN %s TO temp_column" % (table, column),
"ALTER TABLE %s ADD COLUMN %s %s" % (table, column, new_type),
"ALTER TABLE %s DROP COLUMN temp_column" % table
]
if copy:
commands.insert(
2,
"UPDATE %s SET %s=temp_column::%s" % (table, column, new_type))
commands = [
"ALTER TABLE %s RENAME COLUMN %s TO temp_column" % (table, column),
"ALTER TABLE %s ADD COLUMN %s %s" % (table, column, new_type),
"ALTER TABLE %s DROP COLUMN temp_column" % table
]
if copy:
commands.insert(
2,
"UPDATE %s SET %s=temp_column::%s" % (table, column, new_type))
for command in commands:
cr.execute(command)
for command in commands:
cr.execute(command)
#change_column(cr, 'crm_case', 'date_closed', 'timestamp', True)
cr.commit()
@ -101,7 +101,7 @@ cr.commit()
cr.execute("SELECT name FROM ir_module_module")
if not cr.rowcount:
for module in set(['base', 'marketing', 'subscription', 'account', 'base_partner_relation', 'audittrail', 'account_followup', 'product', 'hr', 'l10n_simple', 'crm', 'stock', 'hr_timesheet', 'purchase', 'report_purchase', 'mrp', 'sale', 'report_sale', 'delivery', 'project', 'sale_crm', 'hr_timesheet_project', 'scrum', 'report_project',
for module in set(['base', 'marketing', 'subscription', 'account', 'base_partner_relation', 'audittrail', 'account_followup', 'product', 'hr', 'l10n_simple', 'crm', 'stock', 'hr_timesheet', 'purchase', 'report_purchase', 'mrp', 'sale', 'report_sale', 'delivery', 'project', 'sale_crm', 'hr_timesheet_project', 'scrum', 'report_project',
'account_followup',
'account',
'audittrail',
@ -131,8 +131,8 @@ if not cr.rowcount:
'sandwich',
'scrum',
'stock']):
cr.execute("INSERT INTO ir_module_module (name, state) VALUES ('%s', 'installed')" % module)
cr.commit()
cr.execute("INSERT INTO ir_module_module (name, state) VALUES ('%s', 'installed')" % module)
cr.commit()
# ----------------------------------------------------- #
@ -140,11 +140,11 @@ if not cr.rowcount:
# ----------------------------------------------------- #
for line in (
"ALTER TABLE ir_module_module ADD demo BOOLEAN DEFAULT False",
"delete from ir_values where value like '%,False'",
"""UPDATE ir_ui_view set arch='<?xml version="1.0"?><tree string="Menu" toolbar="1"><field icon="icon" name="name"/></tree>' where name='ir.ui.menu.tree' and type='tree' and field_parent='child_id'""",
):
cr.execute(line)
"ALTER TABLE ir_module_module ADD demo BOOLEAN DEFAULT False",
"delete from ir_values where value like '%,False'",
"""UPDATE ir_ui_view set arch='<?xml version="1.0"?><tree string="Menu" toolbar="1"><field icon="icon" name="name"/></tree>' where name='ir.ui.menu.tree' and type='tree' and field_parent='child_id'""",
):
cr.execute(line)
cr.commit()
cr.close()

View File

@ -53,15 +53,15 @@ options.db_name = 'terp' # default value
parser.parse_args(values=options)
if hasattr(options, 'config'):
configparser = ConfigParser.ConfigParser()
configparser.read([options.config])
for name, value in configparser.items('options'):
if not (hasattr(options, name) and getattr(options, name)):
if value in ('true', 'True'):
value = True
if value in ('false', 'False'):
value = False
setattr(options, name, value)
configparser = ConfigParser.ConfigParser()
configparser.read([options.config])
for name, value in configparser.items('options'):
if not (hasattr(options, name) and getattr(options, name)):
if value in ('true', 'True'):
value = True
if value in ('false', 'False'):
value = False
setattr(options, name, value)
# -----
@ -79,18 +79,18 @@ cr = db.cursor()
# ------------------------- #
def change_column(cr, table, column, new_type, copy):
commands = [
"ALTER TABLE %s RENAME COLUMN %s TO temp_column" % (table, column),
"ALTER TABLE %s ADD COLUMN %s %s" % (table, column, new_type),
"ALTER TABLE %s DROP COLUMN temp_column" % table
]
if copy:
commands.insert(
2,
"UPDATE %s SET %s=temp_column::%s" % (table, column, new_type))
commands = [
"ALTER TABLE %s RENAME COLUMN %s TO temp_column" % (table, column),
"ALTER TABLE %s ADD COLUMN %s %s" % (table, column, new_type),
"ALTER TABLE %s DROP COLUMN temp_column" % table
]
if copy:
commands.insert(
2,
"UPDATE %s SET %s=temp_column::%s" % (table, column, new_type))
for command in commands:
cr.execute(command)
for command in commands:
cr.execute(command)
change_column(cr, 'crm_case', 'date_closed', 'timestamp', True)
cr.commit()
@ -101,19 +101,19 @@ cr.commit()
cr.execute("SELECT name FROM ir_module_module")
if not cr.rowcount:
for module in ('base', 'marketing', 'subscription', 'account', 'base_partner_relation', 'audittrail', 'account_followup', 'product', 'hr', 'l10n_simple', 'crm', 'stock', 'hr_timesheet', 'purchase', 'report_purchase', 'mrp', 'sale', 'report_sale', 'delivery', 'project', 'sale_crm', 'hr_timesheet_project', 'scrum', 'report_project'):
cr.execute("INSERT INTO ir_module_module (name, state) VALUES ('%s', 'installed')" % module)
cr.commit()
for module in ('base', 'marketing', 'subscription', 'account', 'base_partner_relation', 'audittrail', 'account_followup', 'product', 'hr', 'l10n_simple', 'crm', 'stock', 'hr_timesheet', 'purchase', 'report_purchase', 'mrp', 'sale', 'report_sale', 'delivery', 'project', 'sale_crm', 'hr_timesheet_project', 'scrum', 'report_project'):
cr.execute("INSERT INTO ir_module_module (name, state) VALUES ('%s', 'installed')" % module)
cr.commit()
# --------------- #
# remove old menu #
# --------------- #
while True:
cr.execute("select id from ir_ui_menu where id not in (select parent_id from ir_ui_menu where parent_id is not null) and id not in (select res_id from ir_model_data where model='ir.ui.menu')")
if not cr.rowcount:
break
cr.execute("delete from ir_ui_menu where id not in (select parent_id from ir_ui_menu where parent_id is not null) and id not in (select res_id from ir_model_data where model='ir.ui.menu')")
cr.execute("select id from ir_ui_menu where id not in (select parent_id from ir_ui_menu where parent_id is not null) and id not in (select res_id from ir_model_data where model='ir.ui.menu')")
if not cr.rowcount:
break
cr.execute("delete from ir_ui_menu where id not in (select parent_id from ir_ui_menu where parent_id is not null) and id not in (select res_id from ir_model_data where model='ir.ui.menu')")
cr.commit()
# ----------------------------------------------------- #
@ -121,12 +121,12 @@ cr.commit()
# ----------------------------------------------------- #
for line in (
"ALTER TABLE ir_module_module ADD demo BOOLEAN",
"ALTER TABLE ir_module_module SET demo DEFAULT False",
"DELETE FROM ir_values WHERE VALUE LIKE '%,False'",
"""UPDATE ir_ui_view set arch='<?xml version="1.0"?><tree string="Menu" toolbar="1"><field icon="icon" name="name"/></tree>' where name='ir.ui.menu.tree' and type='tree' and field_parent='child_id'""",
):
cr.execute(line)
"ALTER TABLE ir_module_module ADD demo BOOLEAN",
"ALTER TABLE ir_module_module SET demo DEFAULT False",
"DELETE FROM ir_values WHERE VALUE LIKE '%,False'",
"""UPDATE ir_ui_view set arch='<?xml version="1.0"?><tree string="Menu" toolbar="1"><field icon="icon" name="name"/></tree>' where name='ir.ui.menu.tree' and type='tree' and field_parent='child_id'""",
):
cr.execute(line)
cr.commit()
cr.close()

View File

@ -54,15 +54,15 @@ options.db_name = 'terp' # default value
parser.parse_args(values=options)
if hasattr(options, 'config'):
configparser = ConfigParser.ConfigParser()
configparser.read([options.config])
for name, value in configparser.items('options'):
if not (hasattr(options, name) and getattr(options, name)):
if value in ('true', 'True'):
value = True
if value in ('false', 'False'):
value = False
setattr(options, name, value)
configparser = ConfigParser.ConfigParser()
configparser.read([options.config])
for name, value in configparser.items('options'):
if not (hasattr(options, name) and getattr(options, name)):
if value in ('true', 'True'):
value = True
if value in ('false', 'False'):
value = False
setattr(options, name, value)
# -----
@ -82,13 +82,13 @@ cr = db.cursor()
cr.execute("""SELECT c.relname,a.attname,a.attlen,a.atttypmod,a.attnotnull,a.atthasdef,t.typname,CASE WHEN a.attlen=-1 THEN a.atttypmod-4 ELSE a.attlen END as size FROM pg_class c,pg_attribute a,pg_type t WHERE c.relname='res_currency' AND a.attname='rounding' AND c.oid=a.attrelid AND a.atttypid=t.oid""")
res = cr.dictfetchall()
if res[0]['typname'] != 'numeric':
for line in (
"ALTER TABLE res_currency RENAME rounding TO rounding_bak",
"ALTER TABLE res_currency ADD rounding NUMERIC(12,6)",
"UPDATE res_currency SET rounding = power(10, - rounding_bak)",
"ALTER TABLE res_currency DROP rounding_bak",
):
cr.execute(line)
for line in (
"ALTER TABLE res_currency RENAME rounding TO rounding_bak",
"ALTER TABLE res_currency ADD rounding NUMERIC(12,6)",
"UPDATE res_currency SET rounding = power(10, - rounding_bak)",
"ALTER TABLE res_currency DROP rounding_bak",
):
cr.execute(line)
cr.commit()
# ----------------------------- #
@ -97,7 +97,7 @@ cr.commit()
cr.execute('SELECT conname FROM pg_constraint where conname = \'ir_ui_view_type\'')
if cr.fetchall():
cr.execute('ALTER TABLE ir_ui_view DROP CONSTRAINT ir_ui_view_type')
cr.execute('ALTER TABLE ir_ui_view DROP CONSTRAINT ir_ui_view_type')
cr.commit()
# ------------------------ #
@ -106,7 +106,7 @@ cr.commit()
cr.execute('SELECT a.attname FROM pg_class c, pg_attribute a WHERE c.relname = \'res_partner_bank\' AND a.attname = \'iban\' AND c.oid = a.attrelid')
if cr.fetchall():
cr.execute('ALTER TABLE res_partner_bank RENAME iban TO acc_number')
cr.execute('ALTER TABLE res_partner_bank RENAME iban TO acc_number')
cr.commit()
# ------------------------------------------- #
@ -115,12 +115,12 @@ cr.commit()
cr.execute('SELECT a.attname FROM pg_class c, pg_attribute a WHERE c.relname = \'ir_model\' AND a.attname = \'perm_id\' AND c.oid = a.attrelid')
if not cr.fetchall():
cr.execute("ALTER TABLE ir_model ADD perm_id int references perm on delete set null")
cr.execute("ALTER TABLE ir_model ADD perm_id int references perm on delete set null")
cr.commit()
cr.execute('SELECT a.attname FROM pg_class c, pg_attribute a WHERE c.relname = \'ir_model_fields\' AND a.attname = \'perm_id\' AND c.oid = a.attrelid')
if not cr.fetchall():
cr.execute("ALTER TABLE ir_model_fields ADD perm_id int references perm on delete set null")
cr.execute("ALTER TABLE ir_model_fields ADD perm_id int references perm on delete set null")
cr.commit()
@ -138,9 +138,9 @@ cr.commit()
cr.execute('SELECT model_id FROM ir_model_access')
res= cr.fetchall()
for r in res:
cr.execute('SELECT id FROM ir_model_access WHERE model_id = %d AND group_id IS NULL', (r[0],))
if not cr.fetchall():
cr.execute("INSERT into ir_model_access (name,model_id,group_id) VALUES ('Auto-generated access by migration',%d,NULL)",(r[0],))
cr.execute('SELECT id FROM ir_model_access WHERE model_id = %d AND group_id IS NULL', (r[0],))
if not cr.fetchall():
cr.execute("INSERT into ir_model_access (name,model_id,group_id) VALUES ('Auto-generated access by migration',%d,NULL)",(r[0],))
cr.commit()
# ------------------------------------------------- #
@ -149,7 +149,7 @@ cr.commit()
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_account_analytic_line_to_invoice\'')
if cr.fetchall():
cr.execute('DROP VIEW report_account_analytic_line_to_invoice')
cr.execute('DROP VIEW report_account_analytic_line_to_invoice')
cr.commit()
# --------------------------- #
@ -158,7 +158,7 @@ cr.commit()
cr.execute('SELECT * FROM pg_class c, pg_attribute a WHERE c.relname=\'hr_employee\' AND a.attname=\'state\' AND c.oid=a.attrelid')
if cr.fetchall():
cr.execute('ALTER TABLE hr_employee DROP state')
cr.execute('ALTER TABLE hr_employee DROP state')
cr.commit()
# ------------ #
@ -167,10 +167,10 @@ cr.commit()
cr.execute('SELECT id FROM ir_values where model=\'res.users\' AND key=\'meta\' AND name=\'tz\'')
if not cr.fetchall():
import pytz, pickle
meta = pickle.dumps({'type':'selection', 'string':'Timezone', 'selection': [(x, x) for x in pytz.all_timezones]})
value = pickle.dumps(False)
cr.execute('INSERT INTO ir_values (name, key, model, meta, key2, object, value) VALUES (\'tz\', \'meta\', \'res.users\', %s, \'tz\', %s, %s)', (meta,False, value))
import pytz, pickle
meta = pickle.dumps({'type':'selection', 'string':'Timezone', 'selection': [(x, x) for x in pytz.all_timezones]})
value = pickle.dumps(False)
cr.execute('INSERT INTO ir_values (name, key, model, meta, key2, object, value) VALUES (\'tz\', \'meta\', \'res.users\', %s, \'tz\', %s, %s)', (meta,False, value))
cr.commit()
# ------------------------- #
@ -179,36 +179,36 @@ cr.commit()
cr.execute('SELECT a.attname FROM pg_class c, pg_attribute a, pg_type t WHERE c.relname = \'product_uom\' AND a.attname = \'factor\' AND c.oid = a.attrelid AND a.atttypid = t.oid AND t.typname = \'float8\'')
if cr.fetchall():
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_account_analytic_planning_stat_account\'')
if cr.fetchall():
cr.execute('DROP VIEW report_account_analytic_planning_stat_account')
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_account_analytic_planning_stat\'')
if cr.fetchall():
cr.execute('DROP VIEW report_account_analytic_planning_stat')
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_account_analytic_planning_stat_user\'')
if cr.fetchall():
cr.execute('DROP VIEW report_account_analytic_planning_stat_user')
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_purchase_order_product\'')
if cr.fetchall():
cr.execute('DROP VIEW report_purchase_order_product')
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_purchase_order_category\'')
if cr.fetchall():
cr.execute('DROP VIEW report_purchase_order_category')
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_sale_order_product\'')
if cr.fetchall():
cr.execute('DROP VIEW report_sale_order_product')
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_sale_order_category\'')
if cr.fetchall():
cr.execute('DROP VIEW report_sale_order_category')
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_hr_timesheet_invoice_journal\'')
if cr.fetchall():
cr.execute('DROP VIEW report_hr_timesheet_invoice_journal')
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_account_analytic_planning_stat_account\'')
if cr.fetchall():
cr.execute('DROP VIEW report_account_analytic_planning_stat_account')
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_account_analytic_planning_stat\'')
if cr.fetchall():
cr.execute('DROP VIEW report_account_analytic_planning_stat')
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_account_analytic_planning_stat_user\'')
if cr.fetchall():
cr.execute('DROP VIEW report_account_analytic_planning_stat_user')
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_purchase_order_product\'')
if cr.fetchall():
cr.execute('DROP VIEW report_purchase_order_product')
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_purchase_order_category\'')
if cr.fetchall():
cr.execute('DROP VIEW report_purchase_order_category')
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_sale_order_product\'')
if cr.fetchall():
cr.execute('DROP VIEW report_sale_order_product')
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_sale_order_category\'')
if cr.fetchall():
cr.execute('DROP VIEW report_sale_order_category')
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_hr_timesheet_invoice_journal\'')
if cr.fetchall():
cr.execute('DROP VIEW report_hr_timesheet_invoice_journal')
cr.execute('ALTER TABLE product_uom RENAME COLUMN factor to temp_column')
cr.execute('ALTER TABLE product_uom ADD COLUMN factor NUMERIC(12,6)')
cr.execute('UPDATE product_uom SET factor = temp_column')
cr.execute('ALTER TABLE product_uom ALTER factor SET NOT NULL')
cr.execute('ALTER TABLE product_uom DROP COLUMN temp_column')
cr.execute('ALTER TABLE product_uom RENAME COLUMN factor to temp_column')
cr.execute('ALTER TABLE product_uom ADD COLUMN factor NUMERIC(12,6)')
cr.execute('UPDATE product_uom SET factor = temp_column')
cr.execute('ALTER TABLE product_uom ALTER factor SET NOT NULL')
cr.execute('ALTER TABLE product_uom DROP COLUMN temp_column')
cr.commit()
@ -218,7 +218,7 @@ cr.commit()
cr.execute('SELECT conname FROM pg_constraint where conname = \'stock_production_lot_name_uniq\'')
if cr.fetchall():
cr.execute('ALTER TABLE stock_production_lot DROP CONSTRAINT stock_production_lot_name_uniq')
cr.execute('ALTER TABLE stock_production_lot DROP CONSTRAINT stock_production_lot_name_uniq')
cr.commit()
# ------------------------------------ #
@ -235,19 +235,19 @@ cr.commit()
cr.execute('SELECT indexname FROm pg_indexes WHERE indexname = \'ir_act_report_xml_pkey\' and tablename = \'ir_act_report_xml\'')
if not cr.fetchall():
cr.execute('ALTER TABLE ir_act_report_xml ADD PRIMARY KEY (id)')
cr.execute('ALTER TABLE ir_act_report_xml ADD PRIMARY KEY (id)')
cr.execute('SELECT indexname FROm pg_indexes WHERE indexname = \'ir_act_report_custom_pkey\' and tablename = \'ir_act_report_custom\'')
if not cr.fetchall():
cr.execute('ALTER TABLE ir_act_report_custom ADD PRIMARY KEY (id)')
cr.execute('ALTER TABLE ir_act_report_custom ADD PRIMARY KEY (id)')
cr.execute('SELECT indexname FROm pg_indexes WHERE indexname = \'ir_act_group_pkey\' and tablename = \'ir_act_group\'')
if not cr.fetchall():
cr.execute('ALTER TABLE ir_act_group ADD PRIMARY KEY (id)')
cr.execute('ALTER TABLE ir_act_group ADD PRIMARY KEY (id)')
cr.execute('SELECT indexname FROm pg_indexes WHERE indexname = \'ir_act_execute_pkey\' and tablename = \'ir_act_execute\'')
if not cr.fetchall():
cr.execute('ALTER TABLE ir_act_execute ADD PRIMARY KEY (id)')
cr.execute('ALTER TABLE ir_act_execute ADD PRIMARY KEY (id)')
cr.execute('SELECT indexname FROm pg_indexes WHERE indexname = \'ir_act_wizard_pkey\' and tablename = \'ir_act_wizard\'')
if not cr.fetchall():
cr.execute('ALTER TABLE ir_act_wizard ADD PRIMARY KEY (id)')
cr.execute('ALTER TABLE ir_act_wizard ADD PRIMARY KEY (id)')
cr.commit()
cr.close

View File

@ -54,15 +54,15 @@ options.db_name = 'terp' # default value
parser.parse_args(values=options)
if hasattr(options, 'config'):
configparser = ConfigParser.ConfigParser()
configparser.read([options.config])
for name, value in configparser.items('options'):
if not (hasattr(options, name) and getattr(options, name)):
if value in ('true', 'True'):
value = True
if value in ('false', 'False'):
value = False
setattr(options, name, value)
configparser = ConfigParser.ConfigParser()
configparser.read([options.config])
for name, value in configparser.items('options'):
if not (hasattr(options, name) and getattr(options, name)):
if value in ('true', 'True'):
value = True
if value in ('false', 'False'):
value = False
setattr(options, name, value)
raise Exception('This script is provided as an example, you must custom it before')
@ -84,18 +84,18 @@ cr.execute('SELECT code from res_country where code is not null group by code')
res = cr.fetchall()
for c in res:
cr.execute('SELECT max(id) from res_country where code = %s group by code', (c[0],))
res2 = cr.fetchone()
cr.execute('SELECT id from res_country where code = %s', (c[0],))
ids = ','.join(map(lambda x: str(x[0]), cr.fetchall()))
cr.execute('UPDATE res_partner_address set country_id = %d where country_id in ('+ids+')', (res2[0],))
cr.execute('DELETE FROM res_country WHERE code = %s and id <> %d', (c[0], res2[0],))
cr.execute('SELECT max(id) from res_country where code = %s group by code', (c[0],))
res2 = cr.fetchone()
cr.execute('SELECT id from res_country where code = %s', (c[0],))
ids = ','.join(map(lambda x: str(x[0]), cr.fetchall()))
cr.execute('UPDATE res_partner_address set country_id = %d where country_id in ('+ids+')', (res2[0],))
cr.execute('DELETE FROM res_country WHERE code = %s and id <> %d', (c[0], res2[0],))
cr.commit()
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_account_analytic_planning_stat\'')
if cr.fetchall():
cr.execute('DROP VIEW report_account_analytic_planning_stat')
cr.execute('DROP VIEW report_account_analytic_planning_stat')
cr.commit()
@ -104,59 +104,59 @@ res = cr.fetchall()
for p in res:
cr.execute('SELECT max(id) FROM res_partner WHERE name = %s GROUP BY name', (p[0],))
res2 = cr.fetchone()
cr.execute('UPDATE res_partner set active = False WHERE name = %s and id <> %d', (p[0], res2[0],))
cr.execute('SELECT id FROM res_partner WHERE name = %s AND id <> %d', (p[0], res2[0],))
res3 = cr.fetchall()
i = 0
for id in res3:
name = p[0]+' old'
if i:
name = name + ' ' + str(i)
cr.execute('UPDATE res_partner set name = %s WHERE id = %d', (name, id[0]))
i += 1
cr.execute('SELECT max(id) FROM res_partner WHERE name = %s GROUP BY name', (p[0],))
res2 = cr.fetchone()
cr.execute('UPDATE res_partner set active = False WHERE name = %s and id <> %d', (p[0], res2[0],))
cr.execute('SELECT id FROM res_partner WHERE name = %s AND id <> %d', (p[0], res2[0],))
res3 = cr.fetchall()
i = 0
for id in res3:
name = p[0]+' old'
if i:
name = name + ' ' + str(i)
cr.execute('UPDATE res_partner set name = %s WHERE id = %d', (name, id[0]))
i += 1
cr.commit()
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_account_analytic_line_to_invoice\'')
if cr.fetchall():
cr.execute('DROP VIEW report_account_analytic_line_to_invoice')
cr.execute('DROP VIEW report_account_analytic_line_to_invoice')
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_timesheet_invoice\'')
if cr.fetchall():
cr.execute('drop VIEW report_timesheet_invoice')
cr.execute('drop VIEW report_timesheet_invoice')
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_purchase_order_category\'')
if cr.fetchall():
cr.execute('drop VIEW report_purchase_order_category')
cr.execute('drop VIEW report_purchase_order_category')
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_purchase_order_product\'')
if cr.fetchall():
cr.execute('drop VIEW report_purchase_order_product')
cr.execute('drop VIEW report_purchase_order_product')
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_sale_order_category\'')
if cr.fetchall():
cr.execute('drop VIEW report_sale_order_category')
cr.execute('drop VIEW report_sale_order_category')
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_sale_order_product\'')
if cr.fetchall():
cr.execute('drop VIEW report_sale_order_product')
cr.execute('drop VIEW report_sale_order_product')
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_timesheet_user\'')
if cr.fetchall():
cr.execute('drop VIEW report_timesheet_user')
cr.execute('drop VIEW report_timesheet_user')
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'report_task_user_pipeline_open\'')
if cr.fetchall():
cr.execute('drop VIEW report_task_user_pipeline_open')
cr.execute('drop VIEW report_task_user_pipeline_open')
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'hr_timesheet_sheet_sheet_day\'')
if cr.fetchall():
cr.execute('drop VIEW hr_timesheet_sheet_sheet_day')
cr.execute('drop VIEW hr_timesheet_sheet_sheet_day')
cr.execute('SELECT viewname FROM pg_views WHERE viewname = \'hr_timesheet_sheet_sheet_account\'')
if cr.fetchall():
cr.execute('drop VIEW hr_timesheet_sheet_sheet_account')
cr.execute('drop VIEW hr_timesheet_sheet_sheet_account')
cr.execute('SELECT viewname from pg_views where viewname = \'sale_journal_sale_stats\'')
if cr.fetchall():
cr.execute('drop VIEW sale_journal_sale_stats')
cr.execute('drop VIEW sale_journal_sale_stats')
cr.execute('SELECT viewname from pg_views where viewname = \'sale_journal_picking_stats\'')
if cr.fetchall():
cr.execute('drop VIEW sale_journal_picking_stats')
cr.execute('drop VIEW sale_journal_picking_stats')
cr.execute('SELECT viewname from pg_views where viewname = \'sale_journal_invoice_type_stats\'')
if cr.fetchall():
cr.execute('drop VIEW sale_journal_invoice_type_stats')
cr.execute('drop VIEW sale_journal_invoice_type_stats')
cr.execute('ALTER TABLE product_template ALTER list_price TYPE numeric(16,2)')
cr.execute('ALTER TABLE product_template ALTER standard_price TYPE numeric(16,2)')
@ -171,13 +171,13 @@ cr.commit()
cr.execute('SELECT tablename FROM pg_tables WHERE tablename = \'subscription_document_fields\'')
if cr.fetchall():
cr.execute('DROP TABLE subscription_document_fields')
cr.execute('DROP TABLE subscription_document_fields')
cr.execute('SELECT tablename FROM pg_tables WHERE tablename = \'subscription_document\'')
if cr.fetchall():
cr.execute('DROP TABLE subscription_document')
cr.execute('DROP TABLE subscription_document')
cr.execute('SELECT tablename FROM pg_tables WHERE tablename = \'subscription_subscription_history\'')
if cr.fetchall():
cr.execute('DROP TABLE subscription_subscription_history')
cr.execute('DROP TABLE subscription_subscription_history')
cr.commit()
# -------------------- #
@ -186,9 +186,9 @@ cr.commit()
cr.execute('SELECT a.attname FROM pg_class c, pg_attribute a WHERE c.relname = \'res_currency_rate\' AND a.attname = \'rate_old\' AND c.oid = a.attrelid')
if not cr.fetchall():
cr.execute('ALTER TABLE res_currency_rate ADD rate_old NUMERIC(12,6)')
cr.execute('UPDATE res_currency_rate SET rate_old = rate')
cr.execute('UPDATE res_currency_rate SET rate = (1 / rate_old)')
cr.execute('ALTER TABLE res_currency_rate ADD rate_old NUMERIC(12,6)')
cr.execute('UPDATE res_currency_rate SET rate_old = rate')
cr.execute('UPDATE res_currency_rate SET rate = (1 / rate_old)')
cr.commit()
cr.close

Some files were not shown because too many files have changed in this diff Show More