2011-03-02 18:56:06 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
2013-04-23 12:41:51 +00:00
|
|
|
|
2011-10-05 17:58:26 +00:00
|
|
|
import ast
|
2011-08-04 09:20:43 +00:00
|
|
|
import base64
|
|
|
|
import csv
|
|
|
|
import glob
|
2011-09-05 13:05:38 +00:00
|
|
|
import itertools
|
2013-04-23 12:41:51 +00:00
|
|
|
import logging
|
2011-08-04 09:20:43 +00:00
|
|
|
import operator
|
2012-01-23 10:07:44 +00:00
|
|
|
import datetime
|
2012-02-10 14:00:21 +00:00
|
|
|
import hashlib
|
2011-08-04 09:20:43 +00:00
|
|
|
import os
|
|
|
|
import re
|
|
|
|
import simplejson
|
2011-08-29 15:52:56 +00:00
|
|
|
import time
|
2013-02-07 17:34:39 +00:00
|
|
|
import urllib
|
2011-12-16 00:44:02 +00:00
|
|
|
import urllib2
|
2013-02-28 16:15:37 +00:00
|
|
|
import urlparse
|
2011-10-05 17:58:26 +00:00
|
|
|
import xmlrpclib
|
2011-09-07 07:28:18 +00:00
|
|
|
import zlib
|
2011-03-10 15:53:45 +00:00
|
|
|
from xml.etree import ElementTree
|
2011-03-24 20:11:25 +00:00
|
|
|
from cStringIO import StringIO
|
2011-03-02 18:56:06 +00:00
|
|
|
|
2012-09-18 13:51:11 +00:00
|
|
|
import babel.messages.pofile
|
2011-12-20 15:05:56 +00:00
|
|
|
import werkzeug.utils
|
2012-02-10 14:00:21 +00:00
|
|
|
import werkzeug.wrappers
|
2012-01-13 09:06:11 +00:00
|
|
|
try:
|
|
|
|
import xlwt
|
|
|
|
except ImportError:
|
|
|
|
xlwt = None
|
2011-10-05 17:58:26 +00:00
|
|
|
|
2012-10-10 17:24:18 +00:00
|
|
|
import openerp
|
2013-04-23 12:41:51 +00:00
|
|
|
import openerp.modules.registry
|
2012-11-29 00:22:00 +00:00
|
|
|
from openerp.tools.translate import _
|
2013-06-11 14:50:28 +00:00
|
|
|
from openerp.tools import config
|
2012-10-10 17:24:18 +00:00
|
|
|
|
2012-10-10 20:37:53 +00:00
|
|
|
from .. import http
|
|
|
|
openerpweb = http
|
2011-03-03 14:55:52 +00:00
|
|
|
|
2011-03-02 18:56:06 +00:00
|
|
|
#----------------------------------------------------------
|
2012-08-12 21:48:27 +00:00
|
|
|
# OpenERP Web helpers
|
2011-03-02 18:56:06 +00:00
|
|
|
#----------------------------------------------------------
|
|
|
|
|
2012-08-18 17:34:37 +00:00
|
|
|
def rjsmin(script):
|
|
|
|
""" Minify js with a clever regex.
|
|
|
|
Taken from http://opensource.perlig.de/rjsmin
|
|
|
|
Apache License, Version 2.0 """
|
|
|
|
def subber(match):
|
|
|
|
""" Substitution callback """
|
|
|
|
groups = match.groups()
|
|
|
|
return (
|
|
|
|
groups[0] or
|
|
|
|
groups[1] or
|
|
|
|
groups[2] or
|
|
|
|
groups[3] or
|
|
|
|
(groups[4] and '\n') or
|
|
|
|
(groups[5] and ' ') or
|
|
|
|
(groups[6] and ' ') or
|
|
|
|
(groups[7] and ' ') or
|
|
|
|
''
|
|
|
|
)
|
|
|
|
|
|
|
|
result = re.sub(
|
|
|
|
r'([^\047"/\000-\040]+)|((?:(?:\047[^\047\\\r\n]*(?:\\(?:[^\r\n]|\r?'
|
|
|
|
r'\n|\r)[^\047\\\r\n]*)*\047)|(?:"[^"\\\r\n]*(?:\\(?:[^\r\n]|\r?\n|'
|
|
|
|
r'\r)[^"\\\r\n]*)*"))[^\047"/\000-\040]*)|(?:(?<=[(,=:\[!&|?{};\r\n]'
|
|
|
|
r')(?:[\000-\011\013\014\016-\040]|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/'
|
|
|
|
r'))*((?:/(?![\r\n/*])[^/\\\[\r\n]*(?:(?:\\[^\r\n]|(?:\[[^\\\]\r\n]*'
|
|
|
|
r'(?:\\[^\r\n][^\\\]\r\n]*)*\]))[^/\\\[\r\n]*)*/)[^\047"/\000-\040]*'
|
|
|
|
r'))|(?:(?<=[\000-#%-,./:-@\[-^`{-~-]return)(?:[\000-\011\013\014\01'
|
|
|
|
r'6-\040]|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/))*((?:/(?![\r\n/*])[^/'
|
|
|
|
r'\\\[\r\n]*(?:(?:\\[^\r\n]|(?:\[[^\\\]\r\n]*(?:\\[^\r\n][^\\\]\r\n]'
|
|
|
|
r'*)*\]))[^/\\\[\r\n]*)*/)[^\047"/\000-\040]*))|(?<=[^\000-!#%&(*,./'
|
|
|
|
r':-@\[\\^`{|~])(?:[\000-\011\013\014\016-\040]|(?:/\*[^*]*\*+(?:[^/'
|
|
|
|
r'*][^*]*\*+)*/))*(?:((?:(?://[^\r\n]*)?[\r\n]))(?:[\000-\011\013\01'
|
|
|
|
r'4\016-\040]|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/))*)+(?=[^\000-\040"#'
|
|
|
|
r'%-\047)*,./:-@\\-^`|-~])|(?<=[^\000-#%-,./:-@\[-^`{-~-])((?:[\000-'
|
|
|
|
r'\011\013\014\016-\040]|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/)))+(?=[^'
|
|
|
|
r'\000-#%-,./:-@\[-^`{-~-])|(?<=\+)((?:[\000-\011\013\014\016-\040]|'
|
|
|
|
r'(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/)))+(?=\+)|(?<=-)((?:[\000-\011\0'
|
|
|
|
r'13\014\016-\040]|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/)))+(?=-)|(?:[\0'
|
|
|
|
r'00-\011\013\014\016-\040]|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/))+|(?:'
|
|
|
|
r'(?:(?://[^\r\n]*)?[\r\n])(?:[\000-\011\013\014\016-\040]|(?:/\*[^*'
|
|
|
|
r']*\*+(?:[^/*][^*]*\*+)*/))*)+', subber, '\n%s\n' % script
|
|
|
|
).strip()
|
|
|
|
return result
|
|
|
|
|
2013-06-11 11:03:27 +00:00
|
|
|
def db_list(req, force=False):
|
2012-08-12 21:48:27 +00:00
|
|
|
proxy = req.session.proxy("db")
|
2013-06-11 11:03:27 +00:00
|
|
|
dbs = proxy.list(force)
|
2012-08-12 21:48:27 +00:00
|
|
|
h = req.httprequest.environ['HTTP_HOST'].split(':')[0]
|
|
|
|
d = h.split('.')[0]
|
2012-10-10 17:24:18 +00:00
|
|
|
r = openerp.tools.config['dbfilter'].replace('%h', h).replace('%d', d)
|
2012-08-12 21:48:27 +00:00
|
|
|
dbs = [i for i in dbs if re.match(r, i)]
|
|
|
|
return dbs
|
|
|
|
|
2013-03-06 00:57:55 +00:00
|
|
|
def db_monodb_redirect(req):
|
2013-07-02 12:20:14 +00:00
|
|
|
return db_redirect(req, not config['list_db'])
|
|
|
|
|
|
|
|
def db_redirect(req, match_first_only_if_unique):
|
2013-03-06 00:57:55 +00:00
|
|
|
db = False
|
|
|
|
redirect = False
|
|
|
|
|
2014-09-05 12:57:20 +00:00
|
|
|
dbs = db_list(req, True)
|
|
|
|
|
2013-03-06 00:57:55 +00:00
|
|
|
# 1 try the db in the url
|
|
|
|
db_url = req.params.get('db')
|
2014-09-05 12:57:20 +00:00
|
|
|
if db_url and db_url in dbs:
|
2013-03-06 00:57:55 +00:00
|
|
|
return (db_url, False)
|
|
|
|
|
|
|
|
# 2 use the database from the cookie if it's listable and still listed
|
2013-03-06 15:21:29 +00:00
|
|
|
cookie_db = req.httprequest.cookies.get('last_used_database')
|
|
|
|
if cookie_db in dbs:
|
|
|
|
db = cookie_db
|
2013-03-05 09:24:26 +00:00
|
|
|
|
2013-06-11 14:50:28 +00:00
|
|
|
# 3 use the first db if user can list databases
|
2013-07-02 12:20:14 +00:00
|
|
|
if dbs and not db and (not match_first_only_if_unique or len(dbs) == 1):
|
2013-03-06 00:57:55 +00:00
|
|
|
db = dbs[0]
|
|
|
|
|
|
|
|
# redirect to the chosen db if multiple are available
|
|
|
|
if db and len(dbs) > 1:
|
2013-03-05 09:24:26 +00:00
|
|
|
query = dict(urlparse.parse_qsl(req.httprequest.query_string, keep_blank_values=True))
|
2013-06-11 14:50:28 +00:00
|
|
|
query.update({'db': db})
|
2013-03-05 09:24:26 +00:00
|
|
|
redirect = req.httprequest.path + '?' + urllib.urlencode(query)
|
2013-03-06 00:57:55 +00:00
|
|
|
return (db, redirect)
|
2012-11-11 16:32:43 +00:00
|
|
|
|
2013-03-05 21:14:33 +00:00
|
|
|
def db_monodb(req):
|
|
|
|
# if only one db exists, return it else return False
|
2013-07-02 12:20:14 +00:00
|
|
|
return db_redirect(req, True)[0]
|
2013-03-05 21:14:33 +00:00
|
|
|
|
2013-03-07 09:59:23 +00:00
|
|
|
def redirect_with_hash(req, url, code=303):
|
2014-01-08 10:53:07 +00:00
|
|
|
# Most IE and Safari versions decided not to preserve location.hash upon
|
|
|
|
# redirect. And even if IE10 pretends to support it, it still fails
|
|
|
|
# inexplicably in case of multiple redirects (and we do have some).
|
|
|
|
# See extensive test page at http://greenbytes.de/tech/tc/httpredirects/
|
|
|
|
return "<html><head><script>window.location = '%s' + location.hash;</script></head></html>" % url
|
2012-11-11 16:32:43 +00:00
|
|
|
|
2012-08-12 21:48:27 +00:00
|
|
|
def module_topological_sort(modules):
|
|
|
|
""" Return a list of module names sorted so that their dependencies of the
|
|
|
|
modules are listed before the module itself
|
|
|
|
|
|
|
|
modules is a dict of {module_name: dependencies}
|
|
|
|
|
|
|
|
:param modules: modules to sort
|
|
|
|
:type modules: dict
|
|
|
|
:returns: list(str)
|
|
|
|
"""
|
|
|
|
|
|
|
|
dependencies = set(itertools.chain.from_iterable(modules.itervalues()))
|
|
|
|
# incoming edge: dependency on other module (if a depends on b, a has an
|
|
|
|
# incoming edge from b, aka there's an edge from b to a)
|
|
|
|
# outgoing edge: other module depending on this one
|
|
|
|
|
|
|
|
# [Tarjan 1976], http://en.wikipedia.org/wiki/Topological_sorting#Algorithms
|
|
|
|
#L ← Empty list that will contain the sorted nodes
|
|
|
|
L = []
|
|
|
|
#S ← Set of all nodes with no outgoing edges (modules on which no other
|
|
|
|
# module depends)
|
|
|
|
S = set(module for module in modules if module not in dependencies)
|
|
|
|
|
|
|
|
visited = set()
|
|
|
|
#function visit(node n)
|
|
|
|
def visit(n):
|
|
|
|
#if n has not been visited yet then
|
|
|
|
if n not in visited:
|
|
|
|
#mark n as visited
|
|
|
|
visited.add(n)
|
|
|
|
#change: n not web module, can not be resolved, ignore
|
|
|
|
if n not in modules: return
|
|
|
|
#for each node m with an edge from m to n do (dependencies of n)
|
|
|
|
for m in modules[n]:
|
|
|
|
#visit(m)
|
|
|
|
visit(m)
|
|
|
|
#add n to L
|
|
|
|
L.append(n)
|
|
|
|
#for each node n in S do
|
|
|
|
for n in S:
|
|
|
|
#visit(n)
|
|
|
|
visit(n)
|
|
|
|
return L
|
|
|
|
|
|
|
|
def module_installed(req):
|
|
|
|
# Candidates module the current heuristic is the /static dir
|
|
|
|
loadable = openerpweb.addons_manifest.keys()
|
|
|
|
modules = {}
|
2012-08-12 22:29:01 +00:00
|
|
|
|
2012-08-12 21:48:27 +00:00
|
|
|
# Retrieve database installed modules
|
2012-08-12 22:29:01 +00:00
|
|
|
# TODO The following code should move to ir.module.module.list_installed_modules()
|
2012-08-12 21:48:27 +00:00
|
|
|
Modules = req.session.model('ir.module.module')
|
|
|
|
domain = [('state','=','installed'), ('name','in', loadable)]
|
|
|
|
for module in Modules.search_read(domain, ['name', 'dependencies_id']):
|
|
|
|
modules[module['name']] = []
|
|
|
|
deps = module.get('dependencies_id')
|
|
|
|
if deps:
|
|
|
|
deps_read = req.session.model('ir.module.module.dependency').read(deps, ['name'])
|
|
|
|
dependencies = [i['name'] for i in deps_read]
|
|
|
|
modules[module['name']] = dependencies
|
|
|
|
|
|
|
|
sorted_modules = module_topological_sort(modules)
|
|
|
|
return sorted_modules
|
|
|
|
|
2012-10-02 22:02:58 +00:00
|
|
|
def module_installed_bypass_session(dbname):
|
|
|
|
loadable = openerpweb.addons_manifest.keys()
|
|
|
|
modules = {}
|
|
|
|
try:
|
|
|
|
registry = openerp.modules.registry.RegistryManager.get(dbname)
|
|
|
|
with registry.cursor() as cr:
|
|
|
|
m = registry.get('ir.module.module')
|
|
|
|
# TODO The following code should move to ir.module.module.list_installed_modules()
|
|
|
|
domain = [('state','=','installed'), ('name','in', loadable)]
|
|
|
|
ids = m.search(cr, 1, [('state','=','installed'), ('name','in', loadable)])
|
|
|
|
for module in m.read(cr, 1, ids, ['name', 'dependencies_id']):
|
|
|
|
modules[module['name']] = []
|
|
|
|
deps = module.get('dependencies_id')
|
|
|
|
if deps:
|
|
|
|
deps_read = registry.get('ir.module.module.dependency').read(cr, 1, deps, ['name'])
|
|
|
|
dependencies = [i['name'] for i in deps_read]
|
|
|
|
modules[module['name']] = dependencies
|
|
|
|
except Exception,e:
|
|
|
|
pass
|
|
|
|
sorted_modules = module_topological_sort(modules)
|
|
|
|
return sorted_modules
|
|
|
|
|
2012-11-19 17:00:42 +00:00
|
|
|
def module_boot(req, db=None):
|
2012-10-10 17:24:18 +00:00
|
|
|
server_wide_modules = openerp.conf.server_wide_modules or ['web']
|
2012-10-02 22:02:58 +00:00
|
|
|
serverside = []
|
|
|
|
dbside = []
|
2012-10-10 17:24:18 +00:00
|
|
|
for i in server_wide_modules:
|
2012-10-02 22:02:58 +00:00
|
|
|
if i in openerpweb.addons_manifest:
|
|
|
|
serverside.append(i)
|
2012-11-19 17:00:42 +00:00
|
|
|
monodb = db or db_monodb(req)
|
2012-11-11 16:32:43 +00:00
|
|
|
if monodb:
|
|
|
|
dbside = module_installed_bypass_session(monodb)
|
2012-10-02 22:02:58 +00:00
|
|
|
dbside = [i for i in dbside if i not in serverside]
|
|
|
|
addons = serverside + dbside
|
|
|
|
return addons
|
2011-11-02 11:11:05 +00:00
|
|
|
|
|
|
|
def concat_xml(file_list):
|
|
|
|
"""Concatenate xml files
|
2012-02-10 14:00:21 +00:00
|
|
|
|
|
|
|
:param list(str) file_list: list of files to check
|
|
|
|
:returns: (concatenation_result, checksum)
|
|
|
|
:rtype: (str, str)
|
2011-11-02 11:11:05 +00:00
|
|
|
"""
|
2012-02-10 14:00:21 +00:00
|
|
|
checksum = hashlib.new('sha1')
|
2012-01-13 15:06:41 +00:00
|
|
|
if not file_list:
|
2012-02-10 14:00:21 +00:00
|
|
|
return '', checksum.hexdigest()
|
2012-01-13 15:06:41 +00:00
|
|
|
|
2011-11-02 11:11:05 +00:00
|
|
|
root = None
|
|
|
|
for fname in file_list:
|
2012-02-10 14:00:21 +00:00
|
|
|
with open(fname, 'rb') as fp:
|
|
|
|
contents = fp.read()
|
|
|
|
checksum.update(contents)
|
|
|
|
fp.seek(0)
|
|
|
|
xml = ElementTree.parse(fp).getroot()
|
2011-11-02 11:11:05 +00:00
|
|
|
|
|
|
|
if root is None:
|
|
|
|
root = ElementTree.Element(xml.tag)
|
|
|
|
#elif root.tag != xml.tag:
|
|
|
|
# raise ValueError("Root tags missmatch: %r != %r" % (root.tag, xml.tag))
|
|
|
|
|
|
|
|
for child in xml.getchildren():
|
|
|
|
root.append(child)
|
2012-02-10 14:00:21 +00:00
|
|
|
return ElementTree.tostring(root, 'utf-8'), checksum.hexdigest()
|
2011-11-02 11:11:05 +00:00
|
|
|
|
2012-01-13 15:01:10 +00:00
|
|
|
def concat_files(file_list, reader=None, intersperse=""):
|
2012-02-10 14:00:21 +00:00
|
|
|
""" Concatenates contents of all provided files
|
|
|
|
|
|
|
|
:param list(str) file_list: list of files to check
|
|
|
|
:param function reader: reading procedure for each file
|
|
|
|
:param str intersperse: string to intersperse between file contents
|
|
|
|
:returns: (concatenation_result, checksum)
|
|
|
|
:rtype: (str, str)
|
2011-07-22 14:28:24 +00:00
|
|
|
"""
|
2012-02-10 14:00:21 +00:00
|
|
|
checksum = hashlib.new('sha1')
|
2012-01-13 15:01:10 +00:00
|
|
|
if not file_list:
|
2012-02-10 14:00:21 +00:00
|
|
|
return '', checksum.hexdigest()
|
2012-01-13 15:01:10 +00:00
|
|
|
|
2011-11-03 14:47:38 +00:00
|
|
|
if reader is None:
|
|
|
|
def reader(f):
|
2013-05-15 08:56:18 +00:00
|
|
|
import codecs
|
2013-05-15 09:02:31 +00:00
|
|
|
with codecs.open(f, 'rb', "utf-8-sig") as fp:
|
2013-05-15 08:56:18 +00:00
|
|
|
return fp.read().encode("utf-8")
|
2011-11-03 14:47:38 +00:00
|
|
|
|
2011-07-22 14:28:24 +00:00
|
|
|
files_content = []
|
2011-10-14 15:29:46 +00:00
|
|
|
for fname in file_list:
|
2012-02-10 14:00:21 +00:00
|
|
|
contents = reader(fname)
|
|
|
|
checksum.update(contents)
|
|
|
|
files_content.append(contents)
|
2011-11-03 14:47:38 +00:00
|
|
|
|
2012-01-13 15:01:10 +00:00
|
|
|
files_concat = intersperse.join(files_content)
|
2012-02-10 14:00:21 +00:00
|
|
|
return files_concat, checksum.hexdigest()
|
2011-07-22 14:28:24 +00:00
|
|
|
|
2012-11-16 13:58:22 +00:00
|
|
|
concat_js_cache = {}
|
|
|
|
|
2012-08-18 17:34:37 +00:00
|
|
|
def concat_js(file_list):
|
|
|
|
content, checksum = concat_files(file_list, intersperse=';')
|
2012-11-16 13:58:22 +00:00
|
|
|
if checksum in concat_js_cache:
|
|
|
|
content = concat_js_cache[checksum]
|
|
|
|
else:
|
|
|
|
content = rjsmin(content)
|
|
|
|
concat_js_cache[checksum] = content
|
|
|
|
return content, checksum
|
2012-08-18 17:34:37 +00:00
|
|
|
|
2012-11-15 15:12:28 +00:00
|
|
|
def fs2web(path):
|
|
|
|
"""convert FS path into web path"""
|
|
|
|
return '/'.join(path.split(os.path.sep))
|
|
|
|
|
2012-11-19 17:00:42 +00:00
|
|
|
def manifest_glob(req, extension, addons=None, db=None):
|
2012-08-08 11:41:10 +00:00
|
|
|
if addons is None:
|
2012-11-19 17:00:42 +00:00
|
|
|
addons = module_boot(req, db=db)
|
2012-08-08 11:41:10 +00:00
|
|
|
else:
|
|
|
|
addons = addons.split(',')
|
|
|
|
r = []
|
|
|
|
for addon in addons:
|
|
|
|
manifest = openerpweb.addons_manifest.get(addon, None)
|
|
|
|
if not manifest:
|
|
|
|
continue
|
|
|
|
# ensure does not ends with /
|
|
|
|
addons_path = os.path.join(manifest['addons_path'], '')[:-1]
|
2012-11-19 17:00:42 +00:00
|
|
|
globlist = manifest.get(extension, [])
|
2012-08-08 11:41:10 +00:00
|
|
|
for pattern in globlist:
|
|
|
|
for path in glob.glob(os.path.normpath(os.path.join(addons_path, addon, pattern))):
|
2012-11-15 15:12:28 +00:00
|
|
|
r.append((path, fs2web(path[len(addons_path):])))
|
2012-08-08 11:41:10 +00:00
|
|
|
return r
|
|
|
|
|
2012-11-19 17:00:42 +00:00
|
|
|
def manifest_list(req, extension, mods=None, db=None):
|
2013-03-06 00:57:55 +00:00
|
|
|
""" list ressources to load specifying either:
|
|
|
|
mods: a comma separated string listing modules
|
|
|
|
db: a database name (return all installed modules in that database)
|
|
|
|
"""
|
2012-08-08 11:41:10 +00:00
|
|
|
if not req.debug:
|
|
|
|
path = '/web/webclient/' + extension
|
|
|
|
if mods is not None:
|
2013-02-07 17:34:39 +00:00
|
|
|
path += '?' + urllib.urlencode({'mods': mods})
|
2012-11-19 17:00:42 +00:00
|
|
|
elif db:
|
2013-02-07 17:34:39 +00:00
|
|
|
path += '?' + urllib.urlencode({'db': db})
|
2012-08-08 11:41:10 +00:00
|
|
|
return [path]
|
2012-11-19 17:00:42 +00:00
|
|
|
files = manifest_glob(req, extension, addons=mods, db=db)
|
2013-02-27 11:41:34 +00:00
|
|
|
return [wp for _fp, wp in files]
|
2011-09-30 20:10:18 +00:00
|
|
|
|
2012-08-08 11:41:10 +00:00
|
|
|
def get_last_modified(files):
|
|
|
|
""" Returns the modification time of the most recently modified
|
|
|
|
file provided
|
|
|
|
|
|
|
|
:param list(str) files: names of files to check
|
|
|
|
:return: most recent modification time amongst the fileset
|
|
|
|
:rtype: datetime.datetime
|
|
|
|
"""
|
|
|
|
files = list(files)
|
|
|
|
if files:
|
|
|
|
return max(datetime.datetime.fromtimestamp(os.path.getmtime(f))
|
|
|
|
for f in files)
|
|
|
|
return datetime.datetime(1970, 1, 1)
|
|
|
|
|
|
|
|
def make_conditional(req, response, last_modified=None, etag=None):
|
|
|
|
""" Makes the provided response conditional based upon the request,
|
|
|
|
and mandates revalidation from clients
|
|
|
|
|
|
|
|
Uses Werkzeug's own :meth:`ETagResponseMixin.make_conditional`, after
|
|
|
|
setting ``last_modified`` and ``etag`` correctly on the response object
|
|
|
|
|
|
|
|
:param req: OpenERP request
|
|
|
|
:type req: web.common.http.WebRequest
|
|
|
|
:param response: Werkzeug response
|
|
|
|
:type response: werkzeug.wrappers.Response
|
|
|
|
:param datetime.datetime last_modified: last modification date of the response content
|
|
|
|
:param str etag: some sort of checksum of the content (deep etag)
|
|
|
|
:return: the response object provided
|
|
|
|
:rtype: werkzeug.wrappers.Response
|
|
|
|
"""
|
|
|
|
response.cache_control.must_revalidate = True
|
|
|
|
response.cache_control.max_age = 0
|
|
|
|
if last_modified:
|
|
|
|
response.last_modified = last_modified
|
|
|
|
if etag:
|
|
|
|
response.set_etag(etag)
|
|
|
|
return response.make_conditional(req.httprequest)
|
2011-07-22 14:28:24 +00:00
|
|
|
|
2012-08-12 21:48:27 +00:00
|
|
|
def login_and_redirect(req, db, login, key, redirect_url='/'):
|
2012-11-20 18:09:33 +00:00
|
|
|
wsgienv = req.httprequest.environ
|
|
|
|
env = dict(
|
|
|
|
base_location=req.httprequest.url_root.rstrip('/'),
|
|
|
|
HTTP_HOST=wsgienv['HTTP_HOST'],
|
|
|
|
REMOTE_ADDR=wsgienv['REMOTE_ADDR'],
|
|
|
|
)
|
|
|
|
req.session.authenticate(db, login, key, env)
|
2012-08-13 17:13:01 +00:00
|
|
|
return set_cookie_and_redirect(req, redirect_url)
|
|
|
|
|
|
|
|
def set_cookie_and_redirect(req, redirect_url):
|
2012-08-12 21:48:27 +00:00
|
|
|
redirect = werkzeug.utils.redirect(redirect_url, 303)
|
2012-08-13 13:59:27 +00:00
|
|
|
redirect.autocorrect_location_header = False
|
2012-08-12 21:48:27 +00:00
|
|
|
cookie_val = urllib2.quote(simplejson.dumps(req.session_id))
|
|
|
|
redirect.set_cookie('instance0|session_id', cookie_val)
|
|
|
|
return redirect
|
|
|
|
|
|
|
|
def load_actions_from_ir_values(req, key, key2, models, meta):
|
|
|
|
Values = req.session.model('ir.values')
|
2012-11-26 10:54:50 +00:00
|
|
|
actions = Values.get(key, key2, models, meta, req.context)
|
2012-08-12 21:48:27 +00:00
|
|
|
|
|
|
|
return [(id, name, clean_action(req, action))
|
|
|
|
for id, name, action in actions]
|
|
|
|
|
2012-11-26 10:54:50 +00:00
|
|
|
def clean_action(req, action):
|
2012-08-12 21:48:27 +00:00
|
|
|
action.setdefault('flags', {})
|
|
|
|
action_type = action.setdefault('type', 'ir.actions.act_window_close')
|
|
|
|
if action_type == 'ir.actions.act_window':
|
|
|
|
return fix_view_modes(action)
|
|
|
|
return action
|
|
|
|
|
|
|
|
# I think generate_views,fix_view_modes should go into js ActionManager
|
|
|
|
def generate_views(action):
|
|
|
|
"""
|
|
|
|
While the server generates a sequence called "views" computing dependencies
|
|
|
|
between a bunch of stuff for views coming directly from the database
|
|
|
|
(the ``ir.actions.act_window model``), it's also possible for e.g. buttons
|
|
|
|
to return custom view dictionaries generated on the fly.
|
|
|
|
|
|
|
|
In that case, there is no ``views`` key available on the action.
|
|
|
|
|
|
|
|
Since the web client relies on ``action['views']``, generate it here from
|
|
|
|
``view_mode`` and ``view_id``.
|
|
|
|
|
|
|
|
Currently handles two different cases:
|
|
|
|
|
|
|
|
* no view_id, multiple view_mode
|
|
|
|
* single view_id, single view_mode
|
|
|
|
|
|
|
|
:param dict action: action descriptor dictionary to generate a views key for
|
|
|
|
"""
|
|
|
|
view_id = action.get('view_id') or False
|
|
|
|
if isinstance(view_id, (list, tuple)):
|
|
|
|
view_id = view_id[0]
|
|
|
|
|
|
|
|
# providing at least one view mode is a requirement, not an option
|
|
|
|
view_modes = action['view_mode'].split(',')
|
|
|
|
|
|
|
|
if len(view_modes) > 1:
|
|
|
|
if view_id:
|
|
|
|
raise ValueError('Non-db action dictionaries should provide '
|
|
|
|
'either multiple view modes or a single view '
|
|
|
|
'mode and an optional view id.\n\n Got view '
|
|
|
|
'modes %r and view id %r for action %r' % (
|
|
|
|
view_modes, view_id, action))
|
|
|
|
action['views'] = [(False, mode) for mode in view_modes]
|
|
|
|
return
|
|
|
|
action['views'] = [(view_id, view_modes[0])]
|
|
|
|
|
|
|
|
def fix_view_modes(action):
|
|
|
|
""" For historical reasons, OpenERP has weird dealings in relation to
|
|
|
|
view_mode and the view_type attribute (on window actions):
|
|
|
|
|
|
|
|
* one of the view modes is ``tree``, which stands for both list views
|
|
|
|
and tree views
|
|
|
|
* the choice is made by checking ``view_type``, which is either
|
|
|
|
``form`` for a list view or ``tree`` for an actual tree view
|
|
|
|
|
|
|
|
This methods simply folds the view_type into view_mode by adding a
|
|
|
|
new view mode ``list`` which is the result of the ``tree`` view_mode
|
|
|
|
in conjunction with the ``form`` view_type.
|
|
|
|
|
|
|
|
TODO: this should go into the doc, some kind of "peculiarities" section
|
|
|
|
|
|
|
|
:param dict action: an action descriptor
|
|
|
|
:returns: nothing, the action is modified in place
|
|
|
|
"""
|
|
|
|
if not action.get('views'):
|
|
|
|
generate_views(action)
|
|
|
|
|
|
|
|
if action.pop('view_type', 'form') != 'form':
|
|
|
|
return action
|
|
|
|
|
2012-09-10 10:51:29 +00:00
|
|
|
if 'view_mode' in action:
|
|
|
|
action['view_mode'] = ','.join(
|
|
|
|
mode if mode != 'tree' else 'list'
|
|
|
|
for mode in action['view_mode'].split(','))
|
2012-08-12 21:48:27 +00:00
|
|
|
action['views'] = [
|
|
|
|
[id, mode if mode != 'tree' else 'list']
|
|
|
|
for id, mode in action['views']
|
|
|
|
]
|
|
|
|
|
|
|
|
return action
|
|
|
|
|
2012-09-18 13:51:11 +00:00
|
|
|
def _local_web_translations(trans_file):
|
|
|
|
messages = []
|
|
|
|
try:
|
|
|
|
with open(trans_file) as t_file:
|
|
|
|
po = babel.messages.pofile.read_po(t_file)
|
|
|
|
except Exception:
|
|
|
|
return
|
|
|
|
for x in po:
|
|
|
|
if x.id and x.string and "openerp-web" in x.auto_comments:
|
|
|
|
messages.append({'id': x.id, 'string': x.string})
|
|
|
|
return messages
|
2012-09-18 07:28:02 +00:00
|
|
|
|
2012-11-10 21:13:43 +00:00
|
|
|
def xml2json_from_elementtree(el, preserve_whitespaces=False):
|
2012-10-10 20:37:53 +00:00
|
|
|
""" xml2json-direct
|
|
|
|
Simple and straightforward XML-to-JSON converter in Python
|
|
|
|
New BSD Licensed
|
|
|
|
http://code.google.com/p/xml2json-direct/
|
|
|
|
"""
|
|
|
|
res = {}
|
|
|
|
if el.tag[0] == "{":
|
|
|
|
ns, name = el.tag.rsplit("}", 1)
|
|
|
|
res["tag"] = name
|
|
|
|
res["namespace"] = ns[1:]
|
|
|
|
else:
|
|
|
|
res["tag"] = el.tag
|
|
|
|
res["attrs"] = {}
|
|
|
|
for k, v in el.items():
|
|
|
|
res["attrs"][k] = v
|
|
|
|
kids = []
|
|
|
|
if el.text and (preserve_whitespaces or el.text.strip() != ''):
|
|
|
|
kids.append(el.text)
|
|
|
|
for kid in el:
|
2012-11-10 21:13:43 +00:00
|
|
|
kids.append(xml2json_from_elementtree(kid, preserve_whitespaces))
|
2012-10-10 20:37:53 +00:00
|
|
|
if kid.tail and (preserve_whitespaces or kid.tail.strip() != ''):
|
|
|
|
kids.append(kid.tail)
|
|
|
|
res["children"] = kids
|
|
|
|
return res
|
2012-09-18 07:28:02 +00:00
|
|
|
|
2012-10-30 11:51:13 +00:00
|
|
|
def content_disposition(filename, req):
|
|
|
|
filename = filename.encode('utf8')
|
|
|
|
escaped = urllib2.quote(filename)
|
|
|
|
browser = req.httprequest.user_agent.browser
|
|
|
|
version = int((req.httprequest.user_agent.version or '0').split('.')[0])
|
|
|
|
if browser == 'msie' and version < 9:
|
|
|
|
return "attachment; filename=%s" % escaped
|
|
|
|
elif browser == 'safari':
|
2015-05-04 10:01:49 +00:00
|
|
|
return "attachment; filename=\"%s\"" % filename
|
2012-10-30 11:51:13 +00:00
|
|
|
else:
|
|
|
|
return "attachment; filename*=UTF-8''%s" % escaped
|
|
|
|
|
|
|
|
|
2012-08-12 21:48:27 +00:00
|
|
|
#----------------------------------------------------------
|
|
|
|
# OpenERP Web web Controllers
|
|
|
|
#----------------------------------------------------------
|
|
|
|
|
2012-08-12 15:15:32 +00:00
|
|
|
html_template = """<!DOCTYPE html>
|
|
|
|
<html style="height: 100%%">
|
|
|
|
<head>
|
|
|
|
<meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1"/>
|
|
|
|
<meta http-equiv="content-type" content="text/html; charset=utf-8" />
|
|
|
|
<title>OpenERP</title>
|
|
|
|
<link rel="shortcut icon" href="/web/static/src/img/favicon.ico" type="image/x-icon"/>
|
|
|
|
<link rel="stylesheet" href="/web/static/src/css/full.css" />
|
|
|
|
%(css)s
|
|
|
|
%(js)s
|
|
|
|
<script type="text/javascript">
|
|
|
|
$(function() {
|
|
|
|
var s = new openerp.init(%(modules)s);
|
|
|
|
%(init)s
|
|
|
|
});
|
|
|
|
</script>
|
|
|
|
</head>
|
2012-10-08 10:37:00 +00:00
|
|
|
<body>
|
|
|
|
<!--[if lte IE 8]>
|
2013-01-29 14:26:38 +00:00
|
|
|
<script src="//ajax.googleapis.com/ajax/libs/chrome-frame/1/CFInstall.min.js"></script>
|
2012-12-11 02:13:38 +00:00
|
|
|
<script>CFInstall.check({mode: "overlay"});</script>
|
2012-10-08 10:37:00 +00:00
|
|
|
<![endif]-->
|
|
|
|
</body>
|
2012-08-12 15:15:32 +00:00
|
|
|
</html>
|
|
|
|
"""
|
2011-07-22 14:28:24 +00:00
|
|
|
|
2012-08-08 11:41:10 +00:00
|
|
|
class Home(openerpweb.Controller):
|
|
|
|
_cp_path = '/'
|
2011-09-30 20:10:18 +00:00
|
|
|
|
2012-08-08 11:41:10 +00:00
|
|
|
@openerpweb.httprequest
|
2012-11-19 17:00:42 +00:00
|
|
|
def index(self, req, s_action=None, db=None, **kw):
|
2013-03-06 00:57:55 +00:00
|
|
|
db, redir = db_monodb_redirect(req)
|
2013-03-05 09:24:26 +00:00
|
|
|
if redir:
|
2013-03-07 09:59:23 +00:00
|
|
|
return redirect_with_hash(req, redir)
|
2013-02-28 16:15:37 +00:00
|
|
|
|
2012-11-19 17:00:42 +00:00
|
|
|
js = "\n ".join('<script type="text/javascript" src="%s"></script>' % i for i in manifest_list(req, 'js', db=db))
|
|
|
|
css = "\n ".join('<link rel="stylesheet" href="%s">' % i for i in manifest_list(req, 'css', db=db))
|
2012-08-08 11:41:10 +00:00
|
|
|
|
|
|
|
r = html_template % {
|
|
|
|
'js': js,
|
|
|
|
'css': css,
|
2012-11-19 17:00:42 +00:00
|
|
|
'modules': simplejson.dumps(module_boot(req, db=db)),
|
2012-08-08 11:41:10 +00:00
|
|
|
'init': 'var wc = new s.web.WebClient();wc.appendTo($(document.body));'
|
|
|
|
}
|
2011-12-07 14:06:44 +00:00
|
|
|
return r
|
2011-10-14 15:29:46 +00:00
|
|
|
|
2012-08-08 11:41:10 +00:00
|
|
|
@openerpweb.httprequest
|
|
|
|
def login(self, req, db, login, key):
|
2014-09-05 12:57:20 +00:00
|
|
|
if db not in db_list(req, True):
|
|
|
|
return werkzeug.utils.redirect('/', 303)
|
2012-08-12 21:48:27 +00:00
|
|
|
return login_and_redirect(req, db, login, key)
|
2012-08-08 11:41:10 +00:00
|
|
|
|
|
|
|
class WebClient(openerpweb.Controller):
|
|
|
|
_cp_path = "/web/webclient"
|
2011-09-30 20:10:18 +00:00
|
|
|
|
2011-07-22 14:28:24 +00:00
|
|
|
@openerpweb.jsonrequest
|
2011-09-27 00:14:35 +00:00
|
|
|
def csslist(self, req, mods=None):
|
2012-11-19 17:00:42 +00:00
|
|
|
return manifest_list(req, 'css', mods=mods)
|
2011-07-22 14:28:24 +00:00
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
2011-09-27 00:14:35 +00:00
|
|
|
def jslist(self, req, mods=None):
|
2012-11-19 17:00:42 +00:00
|
|
|
return manifest_list(req, 'js', mods=mods)
|
2011-07-22 14:28:24 +00:00
|
|
|
|
2011-11-02 11:11:05 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def qweblist(self, req, mods=None):
|
2012-11-19 17:00:42 +00:00
|
|
|
return manifest_list(req, 'qweb', mods=mods)
|
2012-02-10 14:13:38 +00:00
|
|
|
|
2011-07-22 14:28:24 +00:00
|
|
|
@openerpweb.httprequest
|
2012-11-19 17:00:42 +00:00
|
|
|
def css(self, req, mods=None, db=None):
|
|
|
|
files = list(manifest_glob(req, 'css', addons=mods, db=db))
|
2012-08-08 11:41:10 +00:00
|
|
|
last_modified = get_last_modified(f[0] for f in files)
|
2012-02-10 14:00:21 +00:00
|
|
|
if req.httprequest.if_modified_since and req.httprequest.if_modified_since >= last_modified:
|
|
|
|
return werkzeug.wrappers.Response(status=304)
|
|
|
|
|
2011-11-03 14:47:38 +00:00
|
|
|
file_map = dict(files)
|
|
|
|
|
|
|
|
rx_import = re.compile(r"""@import\s+('|")(?!'|"|/|https?://)""", re.U)
|
2012-07-16 14:29:18 +00:00
|
|
|
rx_url = re.compile(r"""url\s*\(\s*('|"|)(?!'|"|/|https?://|data:)""", re.U)
|
2011-11-03 14:47:38 +00:00
|
|
|
|
|
|
|
def reader(f):
|
|
|
|
"""read the a css file and absolutify all relative uris"""
|
[FIX] encoding issues when concatenating CSS or paths which may contain non-ascii
The concatenator tries to only work with bytes without ever wondering
what is in the byte bucket: files are read to `str`, concatenated with
`str` (via join) and returned as `str`, usually considered to be utf-8
encoded. It's the author's job to correctly encode files to utf-8.
So far so good.
On runbot, there's apparently an issue in some CSS files in some cases
on the runbot: `web_dir` finds itself to be typed `unicode` (because
it contains non-ascii characters? Not sure at all), as a result
`re.sub` will decode the corresponding file data when trying to inject
the dir as replacement and the CSS reader will return a `unicode`
object.
Then, when concat_files try to compute the checksum it will need bytes
thus re-encode everything using the default codec (ascii) and the
non-ascii character(s) will blow up the encoding with a
UnicodeEncodeError.
Solution:
* Assume CSS files can contain non-ascii characters (they can, and
do), decode them using `utf-8` to get `unicode` strings in the CSS
reader
* Inject web_dir as usual via replacement, this still yields a
`unicode` object (a `str` web_dir will simply be decoded using the
ASCII codec, a non-ascii web_dir should have been decoded to
`unicode` using sys.getfilesystemencoding)
* Cleanly re-encode evrything to utf-8, so that the code outside the
reader only ever manipulates 8-bit "byte" strings
bzr revid: xmo@openerp.com-20120405070711-vjyw8g4mge2goyik
2012-04-05 07:07:11 +00:00
|
|
|
with open(f, 'rb') as fp:
|
|
|
|
data = fp.read().decode('utf-8')
|
2011-11-03 14:47:38 +00:00
|
|
|
|
2012-01-27 11:30:33 +00:00
|
|
|
path = file_map[f]
|
2012-11-15 15:12:28 +00:00
|
|
|
web_dir = os.path.dirname(path)
|
2011-11-03 14:47:38 +00:00
|
|
|
|
|
|
|
data = re.sub(
|
|
|
|
rx_import,
|
|
|
|
r"""@import \1%s/""" % (web_dir,),
|
|
|
|
data,
|
|
|
|
)
|
|
|
|
|
|
|
|
data = re.sub(
|
|
|
|
rx_url,
|
|
|
|
r"""url(\1%s/""" % (web_dir,),
|
|
|
|
data,
|
|
|
|
)
|
[FIX] encoding issues when concatenating CSS or paths which may contain non-ascii
The concatenator tries to only work with bytes without ever wondering
what is in the byte bucket: files are read to `str`, concatenated with
`str` (via join) and returned as `str`, usually considered to be utf-8
encoded. It's the author's job to correctly encode files to utf-8.
So far so good.
On runbot, there's apparently an issue in some CSS files in some cases
on the runbot: `web_dir` finds itself to be typed `unicode` (because
it contains non-ascii characters? Not sure at all), as a result
`re.sub` will decode the corresponding file data when trying to inject
the dir as replacement and the CSS reader will return a `unicode`
object.
Then, when concat_files try to compute the checksum it will need bytes
thus re-encode everything using the default codec (ascii) and the
non-ascii character(s) will blow up the encoding with a
UnicodeEncodeError.
Solution:
* Assume CSS files can contain non-ascii characters (they can, and
do), decode them using `utf-8` to get `unicode` strings in the CSS
reader
* Inject web_dir as usual via replacement, this still yields a
`unicode` object (a `str` web_dir will simply be decoded using the
ASCII codec, a non-ascii web_dir should have been decoded to
`unicode` using sys.getfilesystemencoding)
* Cleanly re-encode evrything to utf-8, so that the code outside the
reader only ever manipulates 8-bit "byte" strings
bzr revid: xmo@openerp.com-20120405070711-vjyw8g4mge2goyik
2012-04-05 07:07:11 +00:00
|
|
|
return data.encode('utf-8')
|
2011-11-03 14:47:38 +00:00
|
|
|
|
2012-02-10 14:00:21 +00:00
|
|
|
content, checksum = concat_files((f[0] for f in files), reader)
|
|
|
|
|
2013-03-19 15:44:33 +00:00
|
|
|
# move up all @import and @charset rules to the top
|
|
|
|
matches = []
|
|
|
|
def push(matchobj):
|
|
|
|
matches.append(matchobj.group(0))
|
|
|
|
return ''
|
|
|
|
|
|
|
|
content = re.sub(re.compile("(@charset.+;$)", re.M), push, content)
|
|
|
|
content = re.sub(re.compile("(@import.+;$)", re.M), push, content)
|
|
|
|
|
|
|
|
matches.append(content)
|
|
|
|
content = '\n'.join(matches)
|
|
|
|
|
2012-08-08 11:41:10 +00:00
|
|
|
return make_conditional(
|
2012-02-10 14:13:38 +00:00
|
|
|
req, req.make_response(content, [('Content-Type', 'text/css')]),
|
|
|
|
last_modified, checksum)
|
2011-07-22 14:28:24 +00:00
|
|
|
|
|
|
|
@openerpweb.httprequest
|
2012-11-19 17:00:42 +00:00
|
|
|
def js(self, req, mods=None, db=None):
|
|
|
|
files = [f[0] for f in manifest_glob(req, 'js', addons=mods, db=db)]
|
2012-08-08 11:41:10 +00:00
|
|
|
last_modified = get_last_modified(files)
|
2012-02-10 14:00:21 +00:00
|
|
|
if req.httprequest.if_modified_since and req.httprequest.if_modified_since >= last_modified:
|
|
|
|
return werkzeug.wrappers.Response(status=304)
|
|
|
|
|
2012-08-18 17:34:37 +00:00
|
|
|
content, checksum = concat_js(files)
|
2012-02-10 14:00:21 +00:00
|
|
|
|
2012-08-08 11:41:10 +00:00
|
|
|
return make_conditional(
|
2012-02-10 14:13:38 +00:00
|
|
|
req, req.make_response(content, [('Content-Type', 'application/javascript')]),
|
|
|
|
last_modified, checksum)
|
2011-07-22 14:28:24 +00:00
|
|
|
|
2011-11-02 11:11:05 +00:00
|
|
|
@openerpweb.httprequest
|
2012-11-19 17:00:42 +00:00
|
|
|
def qweb(self, req, mods=None, db=None):
|
|
|
|
files = [f[0] for f in manifest_glob(req, 'qweb', addons=mods, db=db)]
|
2012-08-08 11:41:10 +00:00
|
|
|
last_modified = get_last_modified(files)
|
2012-02-10 14:00:21 +00:00
|
|
|
if req.httprequest.if_modified_since and req.httprequest.if_modified_since >= last_modified:
|
|
|
|
return werkzeug.wrappers.Response(status=304)
|
|
|
|
|
2012-08-08 11:41:10 +00:00
|
|
|
content, checksum = concat_xml(files)
|
2011-11-02 11:11:05 +00:00
|
|
|
|
2012-08-08 11:41:10 +00:00
|
|
|
return make_conditional(
|
2012-02-10 14:13:38 +00:00
|
|
|
req, req.make_response(content, [('Content-Type', 'text/xml')]),
|
|
|
|
last_modified, checksum)
|
2011-11-02 11:11:05 +00:00
|
|
|
|
2012-09-18 07:28:02 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def bootstrap_translations(self, req, mods):
|
|
|
|
""" Load local translations from *.po files, as a temporary solution
|
|
|
|
until we have established a valid session. This is meant only
|
|
|
|
for translating the login page and db management chrome, using
|
|
|
|
the browser's language. """
|
|
|
|
# For performance reasons we only load a single translation, so for
|
|
|
|
# sub-languages (that should only be partially translated) we load the
|
|
|
|
# main language PO instead - that should be enough for the login screen.
|
2012-11-12 18:10:33 +00:00
|
|
|
lang = req.lang.split('_')[0]
|
2012-09-18 07:28:02 +00:00
|
|
|
|
|
|
|
translations_per_module = {}
|
|
|
|
for addon_name in mods:
|
2012-11-10 21:13:43 +00:00
|
|
|
if openerpweb.addons_manifest[addon_name].get('bootstrap'):
|
|
|
|
addons_path = openerpweb.addons_manifest[addon_name]['addons_path']
|
|
|
|
f_name = os.path.join(addons_path, addon_name, "i18n", lang + ".po")
|
|
|
|
if not os.path.exists(f_name):
|
|
|
|
continue
|
|
|
|
translations_per_module[addon_name] = {'messages': _local_web_translations(f_name)}
|
|
|
|
|
2012-09-18 07:28:02 +00:00
|
|
|
return {"modules": translations_per_module,
|
|
|
|
"lang_parameters": None}
|
|
|
|
|
2011-08-11 16:39:33 +00:00
|
|
|
@openerpweb.jsonrequest
|
2011-08-12 15:17:36 +00:00
|
|
|
def translations(self, req, mods, lang):
|
2012-09-18 07:28:02 +00:00
|
|
|
res_lang = req.session.model('res.lang')
|
|
|
|
ids = res_lang.search([("code", "=", lang)])
|
|
|
|
lang_params = None
|
2011-08-16 12:24:50 +00:00
|
|
|
if ids:
|
2012-09-18 07:28:02 +00:00
|
|
|
lang_params = res_lang.read(ids[0], ["direction", "date_format", "time_format",
|
2011-08-16 12:24:50 +00:00
|
|
|
"grouping", "decimal_point", "thousands_sep"])
|
2011-08-25 05:49:30 +00:00
|
|
|
|
2012-09-18 07:28:02 +00:00
|
|
|
# Regional languages (ll_CC) must inherit/override their parent lang (ll), but this is
|
|
|
|
# done server-side when the language is loaded, so we only need to load the user's lang.
|
|
|
|
ir_translation = req.session.model('ir.translation')
|
|
|
|
translations_per_module = {}
|
|
|
|
messages = ir_translation.search_read([('module','in',mods),('lang','=',lang),
|
|
|
|
('comments','like','openerp-web'),('value','!=',False),
|
|
|
|
('value','!=','')],
|
2012-12-26 07:45:39 +00:00
|
|
|
['module','src','value','lang'], order='module')
|
2012-09-18 07:28:02 +00:00
|
|
|
for mod, msg_group in itertools.groupby(messages, key=operator.itemgetter('module')):
|
|
|
|
translations_per_module.setdefault(mod,{'messages':[]})
|
|
|
|
translations_per_module[mod]['messages'].extend({'id': m['src'],
|
|
|
|
'string': m['value']} \
|
|
|
|
for m in msg_group)
|
|
|
|
return {"modules": translations_per_module,
|
|
|
|
"lang_parameters": lang_params}
|
2011-07-22 14:28:24 +00:00
|
|
|
|
2011-09-05 15:14:20 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def version_info(self, req):
|
2012-12-19 15:45:05 +00:00
|
|
|
return openerp.service.web_services.RPC_VERSION_1
|
2011-09-05 15:14:20 +00:00
|
|
|
|
2011-12-15 12:07:32 +00:00
|
|
|
class Proxy(openerpweb.Controller):
|
|
|
|
_cp_path = '/web/proxy'
|
|
|
|
|
2011-12-16 00:44:02 +00:00
|
|
|
@openerpweb.jsonrequest
|
2011-12-15 12:07:32 +00:00
|
|
|
def load(self, req, path):
|
2011-12-19 14:57:59 +00:00
|
|
|
""" Proxies an HTTP request through a JSON request.
|
2011-12-15 12:07:32 +00:00
|
|
|
|
2011-12-19 14:57:59 +00:00
|
|
|
It is strongly recommended to not request binary files through this,
|
|
|
|
as the result will be a binary data blob as well.
|
2011-12-15 12:07:32 +00:00
|
|
|
|
2011-12-19 14:57:59 +00:00
|
|
|
:param req: OpenERP request
|
|
|
|
:param path: actual request path
|
|
|
|
:return: file content
|
|
|
|
"""
|
|
|
|
from werkzeug.test import Client
|
|
|
|
from werkzeug.wrappers import BaseResponse
|
|
|
|
|
|
|
|
return Client(req.httprequest.app, BaseResponse).get(path).data
|
2011-12-15 12:07:32 +00:00
|
|
|
|
2011-07-13 10:26:12 +00:00
|
|
|
class Database(openerpweb.Controller):
|
2011-09-05 11:03:09 +00:00
|
|
|
_cp_path = "/web/database"
|
2011-07-13 10:26:12 +00:00
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
2011-08-02 09:18:49 +00:00
|
|
|
def get_list(self, req):
|
2013-06-11 11:03:27 +00:00
|
|
|
# TODO change js to avoid calling this method if in monodb mode
|
2013-06-11 14:50:28 +00:00
|
|
|
try:
|
|
|
|
return db_list(req)
|
|
|
|
except xmlrpclib.Fault:
|
|
|
|
monodb = db_monodb(req)
|
|
|
|
if monodb:
|
|
|
|
return [monodb]
|
|
|
|
raise
|
2011-07-13 10:26:12 +00:00
|
|
|
|
2011-07-01 12:22:22 +00:00
|
|
|
@openerpweb.jsonrequest
|
2011-08-02 09:18:49 +00:00
|
|
|
def create(self, req, fields):
|
2011-08-01 09:11:07 +00:00
|
|
|
params = dict(map(operator.itemgetter('name', 'value'), fields))
|
2012-11-14 11:00:53 +00:00
|
|
|
return req.session.proxy("db").create_database(
|
2011-08-04 10:37:25 +00:00
|
|
|
params['super_admin_pwd'],
|
|
|
|
params['db_name'],
|
|
|
|
bool(params.get('demo_data')),
|
|
|
|
params['db_lang'],
|
2012-11-14 11:00:53 +00:00
|
|
|
params['create_admin_pwd'])
|
2011-12-15 10:29:10 +00:00
|
|
|
|
2012-10-19 10:12:59 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def duplicate(self, req, fields):
|
|
|
|
params = dict(map(operator.itemgetter('name', 'value'), fields))
|
|
|
|
duplicate_attrs = (
|
|
|
|
params['super_admin_pwd'],
|
|
|
|
params['db_original_name'],
|
|
|
|
params['db_name'],
|
|
|
|
)
|
|
|
|
return req.session.proxy("db").duplicate_database(*duplicate_attrs)
|
|
|
|
|
2011-07-26 08:03:18 +00:00
|
|
|
@openerpweb.jsonrequest
|
2011-08-02 09:18:49 +00:00
|
|
|
def drop(self, req, fields):
|
2011-08-01 09:11:07 +00:00
|
|
|
password, db = operator.itemgetter(
|
|
|
|
'drop_pwd', 'drop_db')(
|
|
|
|
dict(map(operator.itemgetter('name', 'value'), fields)))
|
2013-05-16 17:04:03 +00:00
|
|
|
|
2011-07-26 08:03:18 +00:00
|
|
|
try:
|
2013-05-16 17:04:03 +00:00
|
|
|
if req.session.proxy("db").drop(password, db):return True
|
2011-08-01 09:11:07 +00:00
|
|
|
except xmlrpclib.Fault, e:
|
2011-07-26 08:03:18 +00:00
|
|
|
if e.faultCode and e.faultCode.split(':')[0] == 'AccessDenied':
|
2011-07-26 08:20:02 +00:00
|
|
|
return {'error': e.faultCode, 'title': 'Drop Database'}
|
2012-11-29 00:22:00 +00:00
|
|
|
return {'error': _('Could not drop database !'), 'title': _('Drop Database')}
|
2011-07-26 08:03:18 +00:00
|
|
|
|
2011-08-01 13:12:54 +00:00
|
|
|
@openerpweb.httprequest
|
2011-08-02 09:18:49 +00:00
|
|
|
def backup(self, req, backup_db, backup_pwd, token):
|
2012-06-21 09:04:25 +00:00
|
|
|
try:
|
|
|
|
db_dump = base64.b64decode(
|
|
|
|
req.session.proxy("db").dump(backup_pwd, backup_db))
|
|
|
|
filename = "%(db)s_%(timestamp)s.dump" % {
|
|
|
|
'db': backup_db,
|
|
|
|
'timestamp': datetime.datetime.utcnow().strftime(
|
|
|
|
"%Y-%m-%d_%H-%M-%SZ")
|
|
|
|
}
|
|
|
|
return req.make_response(db_dump,
|
|
|
|
[('Content-Type', 'application/octet-stream; charset=binary'),
|
2012-10-31 15:53:10 +00:00
|
|
|
('Content-Disposition', content_disposition(filename, req))],
|
2013-07-16 13:15:48 +00:00
|
|
|
{'fileToken': token}
|
2012-06-21 09:04:25 +00:00
|
|
|
)
|
|
|
|
except xmlrpclib.Fault, e:
|
2012-11-29 00:22:00 +00:00
|
|
|
return simplejson.dumps([[],[{'error': e.faultCode, 'title': _('Backup Database')}]])
|
2011-08-18 18:51:45 +00:00
|
|
|
|
2011-08-01 14:47:28 +00:00
|
|
|
@openerpweb.httprequest
|
2011-08-02 09:18:49 +00:00
|
|
|
def restore(self, req, db_file, restore_pwd, new_db):
|
2011-07-26 08:03:18 +00:00
|
|
|
try:
|
2011-11-14 12:51:06 +00:00
|
|
|
data = base64.b64encode(db_file.read())
|
2011-08-02 09:13:44 +00:00
|
|
|
req.session.proxy("db").restore(restore_pwd, new_db, data)
|
|
|
|
return ''
|
2011-08-01 09:11:07 +00:00
|
|
|
except xmlrpclib.Fault, e:
|
2011-07-26 08:03:18 +00:00
|
|
|
if e.faultCode and e.faultCode.split(':')[0] == 'AccessDenied':
|
2011-08-18 18:51:45 +00:00
|
|
|
raise Exception("AccessDenied")
|
2011-08-01 14:47:28 +00:00
|
|
|
|
2011-07-26 08:03:18 +00:00
|
|
|
@openerpweb.jsonrequest
|
2011-08-02 09:18:49 +00:00
|
|
|
def change_password(self, req, fields):
|
2011-08-01 09:11:07 +00:00
|
|
|
old_password, new_password = operator.itemgetter(
|
2011-08-01 14:54:37 +00:00
|
|
|
'old_pwd', 'new_pwd')(
|
2011-08-01 09:11:07 +00:00
|
|
|
dict(map(operator.itemgetter('name', 'value'), fields)))
|
2011-07-26 08:03:18 +00:00
|
|
|
try:
|
|
|
|
return req.session.proxy("db").change_admin_password(old_password, new_password)
|
2011-08-01 09:11:07 +00:00
|
|
|
except xmlrpclib.Fault, e:
|
2011-07-26 08:03:18 +00:00
|
|
|
if e.faultCode and e.faultCode.split(':')[0] == 'AccessDenied':
|
2012-11-29 00:22:00 +00:00
|
|
|
return {'error': e.faultCode, 'title': _('Change Password')}
|
|
|
|
return {'error': _('Error, password not changed !'), 'title': _('Change Password')}
|
2011-07-13 10:26:12 +00:00
|
|
|
|
2011-03-10 11:51:23 +00:00
|
|
|
class Session(openerpweb.Controller):
|
2011-09-05 11:03:09 +00:00
|
|
|
_cp_path = "/web/session"
|
2011-03-10 11:51:23 +00:00
|
|
|
|
2011-12-27 19:39:00 +00:00
|
|
|
def session_info(self, req):
|
2012-01-19 14:30:57 +00:00
|
|
|
req.session.ensure_valid()
|
2011-12-27 15:59:15 +00:00
|
|
|
return {
|
|
|
|
"session_id": req.session_id,
|
|
|
|
"uid": req.session._uid,
|
2012-12-26 18:18:33 +00:00
|
|
|
"user_context": req.session.get_context() if req.session._uid else {},
|
2011-12-27 15:59:15 +00:00
|
|
|
"db": req.session._db,
|
2012-12-26 18:18:33 +00:00
|
|
|
"username": req.session._login,
|
2011-12-27 15:59:15 +00:00
|
|
|
}
|
|
|
|
|
2011-12-27 19:39:00 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def get_session_info(self, req):
|
|
|
|
return self.session_info(req)
|
|
|
|
|
2011-03-11 13:26:22 +00:00
|
|
|
@openerpweb.jsonrequest
|
2011-10-13 14:33:39 +00:00
|
|
|
def authenticate(self, req, db, login, password, base_location=None):
|
2011-10-12 16:12:08 +00:00
|
|
|
wsgienv = req.httprequest.environ
|
|
|
|
env = dict(
|
2011-10-13 14:33:39 +00:00
|
|
|
base_location=base_location,
|
|
|
|
HTTP_HOST=wsgienv['HTTP_HOST'],
|
2011-10-12 16:12:08 +00:00
|
|
|
REMOTE_ADDR=wsgienv['REMOTE_ADDR'],
|
|
|
|
)
|
2011-10-13 14:33:39 +00:00
|
|
|
req.session.authenticate(db, login, password, env)
|
2011-09-27 00:14:35 +00:00
|
|
|
|
2011-12-27 19:39:00 +00:00
|
|
|
return self.session_info(req)
|
2011-09-27 00:14:35 +00:00
|
|
|
|
2011-09-13 10:23:20 +00:00
|
|
|
@openerpweb.jsonrequest
|
2011-08-19 13:10:07 +00:00
|
|
|
def change_password (self,req,fields):
|
2011-08-23 12:58:02 +00:00
|
|
|
old_password, new_password,confirm_password = operator.itemgetter('old_pwd', 'new_password','confirm_pwd')(
|
2011-08-19 13:10:07 +00:00
|
|
|
dict(map(operator.itemgetter('name', 'value'), fields)))
|
2011-08-23 12:58:02 +00:00
|
|
|
if not (old_password.strip() and new_password.strip() and confirm_password.strip()):
|
2012-11-29 00:22:00 +00:00
|
|
|
return {'error':_('You cannot leave any password empty.'),'title': _('Change Password')}
|
2011-08-23 12:58:02 +00:00
|
|
|
if new_password != confirm_password:
|
2012-11-29 00:22:00 +00:00
|
|
|
return {'error': _('The new password and its confirmation must be identical.'),'title': _('Change Password')}
|
2011-08-19 13:10:07 +00:00
|
|
|
try:
|
|
|
|
if req.session.model('res.users').change_password(
|
|
|
|
old_password, new_password):
|
2011-08-30 11:16:49 +00:00
|
|
|
return {'new_password':new_password}
|
2012-01-19 08:52:13 +00:00
|
|
|
except Exception:
|
2012-11-29 00:22:00 +00:00
|
|
|
return {'error': _('The old password you provided is incorrect, your password was not changed.'), 'title': _('Change Password')}
|
|
|
|
return {'error': _('Error, password not changed !'), 'title': _('Change Password')}
|
2011-09-27 00:14:35 +00:00
|
|
|
|
2011-06-10 12:50:06 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def sc_list(self, req):
|
2011-08-16 10:11:10 +00:00
|
|
|
return req.session.model('ir.ui.view_sc').get_sc(
|
2012-11-26 10:54:50 +00:00
|
|
|
req.session._uid, "ir.ui.menu", req.context)
|
2011-08-16 10:11:10 +00:00
|
|
|
|
2011-07-13 10:50:58 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def get_lang_list(self, req):
|
|
|
|
try:
|
2012-10-25 15:47:45 +00:00
|
|
|
return req.session.proxy("db").list_lang() or []
|
2011-07-13 10:50:58 +00:00
|
|
|
except Exception, e:
|
2012-11-29 00:22:00 +00:00
|
|
|
return {"error": e, "title": _("Languages")}
|
2011-08-03 05:53:58 +00:00
|
|
|
|
2011-03-11 13:26:22 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def modules(self, req):
|
2012-08-16 16:07:38 +00:00
|
|
|
# return all installed modules. Web client is smart enough to not load a module twice
|
|
|
|
return module_installed(req)
|
2011-03-21 08:13:31 +00:00
|
|
|
|
2011-04-08 15:25:08 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def save_session_action(self, req, the_action):
|
|
|
|
"""
|
|
|
|
This method store an action object in the session object and returns an integer
|
|
|
|
identifying that action. The method get_session_action() can be used to get
|
|
|
|
back the action.
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-04-08 15:25:08 +00:00
|
|
|
:param the_action: The action to save in the session.
|
|
|
|
:type the_action: anything
|
|
|
|
:return: A key identifying the saved action.
|
|
|
|
:rtype: integer
|
|
|
|
"""
|
2011-08-18 18:51:45 +00:00
|
|
|
saved_actions = req.httpsession.get('saved_actions')
|
2011-04-08 15:25:08 +00:00
|
|
|
if not saved_actions:
|
2013-02-27 17:28:29 +00:00
|
|
|
saved_actions = {"next":1, "actions":{}}
|
2011-08-18 18:51:45 +00:00
|
|
|
req.httpsession['saved_actions'] = saved_actions
|
2011-04-08 15:25:08 +00:00
|
|
|
# we don't allow more than 10 stored actions
|
|
|
|
if len(saved_actions["actions"]) >= 10:
|
2012-01-19 08:52:13 +00:00
|
|
|
del saved_actions["actions"][min(saved_actions["actions"])]
|
2011-04-08 15:25:08 +00:00
|
|
|
key = saved_actions["next"]
|
|
|
|
saved_actions["actions"][key] = the_action
|
|
|
|
saved_actions["next"] = key + 1
|
|
|
|
return key
|
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def get_session_action(self, req, key):
|
|
|
|
"""
|
|
|
|
Gets back a previously saved action. This method can return None if the action
|
|
|
|
was saved since too much time (this case should be handled in a smart way).
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-04-08 15:25:08 +00:00
|
|
|
:param key: The key given by save_session_action()
|
|
|
|
:type key: integer
|
|
|
|
:return: The saved action or None.
|
|
|
|
:rtype: anything
|
|
|
|
"""
|
2011-08-18 18:51:45 +00:00
|
|
|
saved_actions = req.httpsession.get('saved_actions')
|
2011-04-08 15:25:08 +00:00
|
|
|
if not saved_actions:
|
|
|
|
return None
|
|
|
|
return saved_actions["actions"].get(key)
|
2011-07-04 14:18:07 +00:00
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def check(self, req):
|
|
|
|
req.session.assert_valid()
|
|
|
|
return None
|
2011-07-14 10:22:43 +00:00
|
|
|
|
2012-02-10 16:43:09 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def destroy(self, req):
|
|
|
|
req.session._suicide = True
|
|
|
|
|
2011-03-02 18:56:06 +00:00
|
|
|
class Menu(openerpweb.Controller):
|
2011-09-05 11:03:09 +00:00
|
|
|
_cp_path = "/web/menu"
|
2011-03-02 18:56:06 +00:00
|
|
|
|
2012-02-09 14:13:39 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def get_user_roots(self, req):
|
|
|
|
""" Return all root menu ids visible for the session user.
|
2011-03-21 10:47:35 +00:00
|
|
|
|
|
|
|
:param req: A request object, with an OpenERP session attribute
|
|
|
|
:type req: < session -> OpenERPSession >
|
2012-02-09 14:13:39 +00:00
|
|
|
:return: the root menu ids
|
|
|
|
:rtype: list(int)
|
2011-03-21 10:47:35 +00:00
|
|
|
"""
|
2012-02-08 00:13:05 +00:00
|
|
|
s = req.session
|
|
|
|
Menus = s.model('ir.ui.menu')
|
|
|
|
# If a menu action is defined use its domain to get the root menu items
|
2012-11-26 10:54:50 +00:00
|
|
|
user_menu_id = s.model('res.users').read([s._uid], ['menu_id'],
|
|
|
|
req.context)[0]['menu_id']
|
2012-02-23 13:46:12 +00:00
|
|
|
|
|
|
|
menu_domain = [('parent_id', '=', False)]
|
2012-02-08 00:13:05 +00:00
|
|
|
if user_menu_id:
|
2012-11-26 14:05:25 +00:00
|
|
|
domain_string = s.model('ir.actions.act_window').read(
|
|
|
|
[user_menu_id[0]], ['domain'],req.context)[0]['domain']
|
2012-02-23 13:46:12 +00:00
|
|
|
if domain_string:
|
|
|
|
menu_domain = ast.literal_eval(domain_string)
|
|
|
|
|
2012-11-26 10:54:50 +00:00
|
|
|
return Menus.search(menu_domain, 0, False, False, req.context)
|
2012-02-09 14:13:39 +00:00
|
|
|
|
2012-12-11 02:13:38 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def load(self, req):
|
2012-02-09 14:13:39 +00:00
|
|
|
""" Loads all menu items (all applications and their sub-menus).
|
|
|
|
|
|
|
|
:param req: A request object, with an OpenERP session attribute
|
|
|
|
:type req: < session -> OpenERPSession >
|
|
|
|
:return: the menu root
|
|
|
|
:rtype: dict('children': menu_nodes)
|
|
|
|
"""
|
|
|
|
Menus = req.session.model('ir.ui.menu')
|
|
|
|
|
2012-12-12 20:51:20 +00:00
|
|
|
fields = ['name', 'sequence', 'parent_id', 'action']
|
|
|
|
menu_root_ids = self.get_user_roots(req)
|
|
|
|
menu_roots = Menus.read(menu_root_ids, fields, req.context) if menu_root_ids else []
|
2012-11-26 10:54:50 +00:00
|
|
|
menu_root = {
|
|
|
|
'id': False,
|
|
|
|
'name': 'root',
|
|
|
|
'parent_id': [-1, ''],
|
2012-12-12 20:51:20 +00:00
|
|
|
'children': menu_roots,
|
|
|
|
'all_menu_ids': menu_root_ids,
|
2012-11-26 10:54:50 +00:00
|
|
|
}
|
2012-12-12 20:51:20 +00:00
|
|
|
if not menu_roots:
|
|
|
|
return menu_root
|
2012-02-08 00:13:05 +00:00
|
|
|
|
|
|
|
# menus are loaded fully unlike a regular tree view, cause there are a
|
|
|
|
# limited number of items (752 when all 6.1 addons are installed)
|
2012-12-12 20:51:20 +00:00
|
|
|
menu_ids = Menus.search([('id', 'child_of', menu_root_ids)], 0, False, False, req.context)
|
2012-11-26 10:54:50 +00:00
|
|
|
menu_items = Menus.read(menu_ids, fields, req.context)
|
2012-02-09 08:35:54 +00:00
|
|
|
# adds roots at the end of the sequence, so that they will overwrite
|
|
|
|
# equivalent menu items from full menu read when put into id:item
|
|
|
|
# mapping, resulting in children being correctly set on the roots.
|
2012-02-08 00:13:05 +00:00
|
|
|
menu_items.extend(menu_roots)
|
2012-12-12 20:51:20 +00:00
|
|
|
menu_root['all_menu_ids'] = menu_ids # includes menu_root_ids!
|
2012-02-08 00:13:05 +00:00
|
|
|
|
|
|
|
# make a tree using parent_id
|
2012-11-26 10:54:50 +00:00
|
|
|
menu_items_map = dict(
|
|
|
|
(menu_item["id"], menu_item) for menu_item in menu_items)
|
2012-02-08 00:13:05 +00:00
|
|
|
for menu_item in menu_items:
|
|
|
|
if menu_item['parent_id']:
|
|
|
|
parent = menu_item['parent_id'][0]
|
|
|
|
else:
|
|
|
|
parent = False
|
|
|
|
if parent in menu_items_map:
|
|
|
|
menu_items_map[parent].setdefault(
|
|
|
|
'children', []).append(menu_item)
|
|
|
|
|
|
|
|
# sort by sequence a tree using parent_id
|
|
|
|
for menu_item in menu_items:
|
|
|
|
menu_item.setdefault('children', []).sort(
|
|
|
|
key=operator.itemgetter('sequence'))
|
|
|
|
|
|
|
|
return menu_root
|
2011-03-02 18:56:06 +00:00
|
|
|
|
2012-12-11 02:13:38 +00:00
|
|
|
@openerpweb.jsonrequest
|
2012-12-12 20:51:20 +00:00
|
|
|
def load_needaction(self, req, menu_ids):
|
|
|
|
""" Loads needaction counters for specific menu ids.
|
2012-11-27 14:29:44 +00:00
|
|
|
|
2012-12-10 13:16:58 +00:00
|
|
|
:return: needaction data
|
|
|
|
:rtype: dict(menu_id: {'needaction_enabled': boolean, 'needaction_counter': int})
|
2012-11-27 14:29:44 +00:00
|
|
|
"""
|
2012-12-12 20:51:20 +00:00
|
|
|
return req.session.model('ir.ui.menu').get_needaction_data(menu_ids, req.context)
|
2012-11-27 14:29:44 +00:00
|
|
|
|
2011-03-02 18:56:06 +00:00
|
|
|
@openerpweb.jsonrequest
|
2011-03-21 08:13:31 +00:00
|
|
|
def action(self, req, menu_id):
|
2012-12-11 02:13:38 +00:00
|
|
|
# still used by web_shortcut
|
2011-04-04 13:47:05 +00:00
|
|
|
actions = load_actions_from_ir_values(req,'action', 'tree_but_open',
|
2011-08-25 17:04:10 +00:00
|
|
|
[('ir.ui.menu', menu_id)], False)
|
2011-03-25 09:41:19 +00:00
|
|
|
return {"action": actions}
|
2011-03-02 18:56:06 +00:00
|
|
|
|
|
|
|
class DataSet(openerpweb.Controller):
|
2011-09-05 11:03:09 +00:00
|
|
|
_cp_path = "/web/dataset"
|
2011-03-02 18:56:06 +00:00
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
2011-08-25 17:14:51 +00:00
|
|
|
def search_read(self, req, model, fields=False, offset=0, limit=False, domain=None, sort=None):
|
|
|
|
return self.do_search_read(req, model, fields, offset, limit, domain, sort)
|
|
|
|
def do_search_read(self, req, model, fields=False, offset=0, limit=False, domain=None
|
2011-06-22 14:55:59 +00:00
|
|
|
, sort=None):
|
2011-03-23 12:08:06 +00:00
|
|
|
""" Performs a search() followed by a read() (if needed) using the
|
2011-03-23 12:21:26 +00:00
|
|
|
provided search criteria
|
|
|
|
|
2011-08-25 17:14:51 +00:00
|
|
|
:param req: a JSON-RPC request object
|
|
|
|
:type req: openerpweb.JsonRequest
|
2011-03-28 09:18:09 +00:00
|
|
|
:param str model: the name of the model to search on
|
2011-03-23 12:21:26 +00:00
|
|
|
:param fields: a list of the fields to return in the result records
|
|
|
|
:type fields: [str]
|
2011-03-28 09:18:09 +00:00
|
|
|
:param int offset: from which index should the results start being returned
|
|
|
|
:param int limit: the maximum number of records to return
|
|
|
|
:param list domain: the search domain for the query
|
|
|
|
:param list sort: sorting directives
|
2011-06-22 14:55:59 +00:00
|
|
|
:returns: A structure (dict) with two keys: ids (all the ids matching
|
|
|
|
the (domain, context) pair) and records (paginated records
|
|
|
|
matching fields selection set)
|
2011-03-23 12:22:17 +00:00
|
|
|
:rtype: list
|
2011-03-23 12:08:06 +00:00
|
|
|
"""
|
2011-08-25 17:14:51 +00:00
|
|
|
Model = req.session.model(model)
|
2011-05-13 13:39:14 +00:00
|
|
|
|
2012-11-26 10:54:50 +00:00
|
|
|
ids = Model.search(domain, offset or 0, limit or False, sort or False,
|
|
|
|
req.context)
|
2012-02-09 17:07:48 +00:00
|
|
|
if limit and len(ids) == limit:
|
2012-11-26 10:54:50 +00:00
|
|
|
length = Model.search_count(domain, req.context)
|
2012-02-09 17:07:48 +00:00
|
|
|
else:
|
|
|
|
length = len(ids) + (offset or 0)
|
2011-03-23 08:34:41 +00:00
|
|
|
if fields and fields == ['id']:
|
|
|
|
# shortcut read if we only want the ids
|
2011-06-22 14:55:59 +00:00
|
|
|
return {
|
2012-02-09 17:07:48 +00:00
|
|
|
'length': length,
|
|
|
|
'records': [{'id': id} for id in ids]
|
2011-06-22 14:55:59 +00:00
|
|
|
}
|
|
|
|
|
2012-11-26 10:54:50 +00:00
|
|
|
records = Model.read(ids, fields or False, req.context)
|
2014-01-23 16:15:23 +00:00
|
|
|
|
2014-01-23 17:08:36 +00:00
|
|
|
index = dict((r['id'], r) for r in records)
|
2014-01-23 16:15:23 +00:00
|
|
|
records = [index[x] for x in ids if x in index]
|
|
|
|
|
2011-06-22 14:55:59 +00:00
|
|
|
return {
|
2012-02-09 17:07:48 +00:00
|
|
|
'length': length,
|
2011-06-22 14:55:59 +00:00
|
|
|
'records': records
|
|
|
|
}
|
2011-05-04 14:05:35 +00:00
|
|
|
|
2011-03-17 14:06:38 +00:00
|
|
|
@openerpweb.jsonrequest
|
2011-03-21 08:13:31 +00:00
|
|
|
def load(self, req, model, id, fields):
|
2011-03-17 14:06:38 +00:00
|
|
|
m = req.session.model(model)
|
|
|
|
value = {}
|
2012-11-26 10:54:50 +00:00
|
|
|
r = m.read([id], False, req.context)
|
2011-03-17 14:06:38 +00:00
|
|
|
if r:
|
|
|
|
value = r[0]
|
|
|
|
return {'value': value}
|
|
|
|
|
2011-06-28 15:59:56 +00:00
|
|
|
def call_common(self, req, model, method, args, domain_id=None, context_id=None):
|
2012-01-06 10:32:19 +00:00
|
|
|
return self._call_kw(req, model, method, args, {})
|
2011-06-28 15:59:56 +00:00
|
|
|
|
2012-11-26 10:54:50 +00:00
|
|
|
def _call_kw(self, req, model, method, args, kwargs):
|
2012-07-31 14:03:09 +00:00
|
|
|
# Temporary implements future display_name special field for model#read()
|
2014-04-07 08:24:45 +00:00
|
|
|
if method in ('read', 'search_read') and kwargs.get('context', {}).get('future_display_name'):
|
2012-07-31 14:03:09 +00:00
|
|
|
if 'display_name' in args[1]:
|
2014-04-07 08:24:45 +00:00
|
|
|
if method == 'read':
|
|
|
|
names = dict(req.session.model(model).name_get(args[0], **kwargs))
|
|
|
|
else:
|
|
|
|
names = dict(req.session.model(model).name_search('', args[0], **kwargs))
|
2012-07-31 14:03:09 +00:00
|
|
|
args[1].remove('display_name')
|
2014-04-07 08:24:45 +00:00
|
|
|
records = getattr(req.session.model(model), method)(*args, **kwargs)
|
2012-12-13 13:05:48 +00:00
|
|
|
for record in records:
|
|
|
|
record['display_name'] = \
|
|
|
|
names.get(record['id']) or "%s#%d" % (model, (record['id']))
|
|
|
|
return records
|
2012-07-31 14:03:09 +00:00
|
|
|
|
2012-01-06 10:32:19 +00:00
|
|
|
return getattr(req.session.model(model), method)(*args, **kwargs)
|
2011-06-28 15:59:56 +00:00
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def call(self, req, model, method, args, domain_id=None, context_id=None):
|
2012-11-26 10:54:50 +00:00
|
|
|
return self._call_kw(req, model, method, args, {})
|
2012-12-26 07:45:39 +00:00
|
|
|
|
2012-01-06 10:32:19 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def call_kw(self, req, model, method, args, kwargs):
|
|
|
|
return self._call_kw(req, model, method, args, kwargs)
|
2011-06-28 15:59:56 +00:00
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def call_button(self, req, model, method, args, domain_id=None, context_id=None):
|
2012-11-26 10:54:50 +00:00
|
|
|
action = self._call_kw(req, model, method, args, {})
|
2011-06-28 15:59:56 +00:00
|
|
|
if isinstance(action, dict) and action.get('type') != '':
|
2012-10-04 11:50:11 +00:00
|
|
|
return clean_action(req, action)
|
|
|
|
return False
|
2011-04-05 14:32:29 +00:00
|
|
|
|
2011-04-21 15:56:05 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def exec_workflow(self, req, model, id, signal):
|
2012-02-27 13:56:26 +00:00
|
|
|
return req.session.exec_workflow(model, id, signal)
|
2011-04-27 12:20:51 +00:00
|
|
|
|
2012-08-14 19:53:16 +00:00
|
|
|
@openerpweb.jsonrequest
|
2012-08-30 12:14:03 +00:00
|
|
|
def resequence(self, req, model, ids, field='sequence', offset=0):
|
|
|
|
""" Re-sequences a number of records in the model, by their ids
|
|
|
|
|
|
|
|
The re-sequencing starts at the first model of ``ids``, the sequence
|
|
|
|
number is incremented by one after each record and starts at ``offset``
|
|
|
|
|
|
|
|
:param ids: identifiers of the records to resequence, in the new sequence order
|
|
|
|
:type ids: list(id)
|
|
|
|
:param str field: field used for sequence specification, defaults to
|
|
|
|
"sequence"
|
|
|
|
:param int offset: sequence number for first record in ``ids``, allows
|
|
|
|
starting the resequencing from an arbitrary number,
|
|
|
|
defaults to ``0``
|
|
|
|
"""
|
2012-08-14 19:53:16 +00:00
|
|
|
m = req.session.model(model)
|
2012-08-30 12:14:03 +00:00
|
|
|
if not m.fields_get([field]):
|
2012-08-14 22:13:34 +00:00
|
|
|
return False
|
2012-08-30 12:14:03 +00:00
|
|
|
# python 2.6 has no start parameter
|
|
|
|
for i, id in enumerate(ids):
|
|
|
|
m.write(id, { field: i + offset })
|
2012-08-14 19:53:16 +00:00
|
|
|
return True
|
|
|
|
|
2011-03-24 20:11:25 +00:00
|
|
|
class View(openerpweb.Controller):
|
2011-09-05 11:03:09 +00:00
|
|
|
_cp_path = "/web/view"
|
2011-06-01 13:45:14 +00:00
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
2011-08-25 17:14:51 +00:00
|
|
|
def add_custom(self, req, view_id, arch):
|
|
|
|
CustomView = req.session.model('ir.ui.view.custom')
|
2011-06-01 13:45:14 +00:00
|
|
|
CustomView.create({
|
2011-08-25 17:14:51 +00:00
|
|
|
'user_id': req.session._uid,
|
2011-06-01 13:45:14 +00:00
|
|
|
'ref_id': view_id,
|
|
|
|
'arch': arch
|
2012-11-26 10:54:50 +00:00
|
|
|
}, req.context)
|
2011-06-01 13:45:14 +00:00
|
|
|
return {'result': True}
|
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
2011-08-25 17:14:51 +00:00
|
|
|
def undo_custom(self, req, view_id, reset=False):
|
|
|
|
CustomView = req.session.model('ir.ui.view.custom')
|
|
|
|
vcustom = CustomView.search([('user_id', '=', req.session._uid), ('ref_id' ,'=', view_id)],
|
2012-11-26 10:54:50 +00:00
|
|
|
0, False, False, req.context)
|
2011-06-01 13:45:14 +00:00
|
|
|
if vcustom:
|
2011-06-06 10:54:36 +00:00
|
|
|
if reset:
|
2012-11-26 10:54:50 +00:00
|
|
|
CustomView.unlink(vcustom, req.context)
|
2011-06-06 10:54:36 +00:00
|
|
|
else:
|
2012-11-26 10:54:50 +00:00
|
|
|
CustomView.unlink([vcustom[0]], req.context)
|
2011-06-01 13:45:14 +00:00
|
|
|
return {'result': True}
|
|
|
|
return {'result': False}
|
|
|
|
|
2011-09-06 20:54:38 +00:00
|
|
|
class TreeView(View):
|
|
|
|
_cp_path = "/web/treeview"
|
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def action(self, req, model, id):
|
|
|
|
return load_actions_from_ir_values(
|
|
|
|
req,'action', 'tree_but_open',[(model, id)],
|
|
|
|
False)
|
|
|
|
|
2011-05-23 14:52:19 +00:00
|
|
|
class Binary(openerpweb.Controller):
|
2011-09-05 11:03:09 +00:00
|
|
|
_cp_path = "/web/binary"
|
2011-05-23 14:52:19 +00:00
|
|
|
|
|
|
|
@openerpweb.httprequest
|
2011-08-18 18:51:45 +00:00
|
|
|
def image(self, req, model, id, field, **kw):
|
2012-06-18 15:47:10 +00:00
|
|
|
last_update = '__last_update'
|
2011-08-18 18:51:45 +00:00
|
|
|
Model = req.session.model(model)
|
2012-06-18 15:47:10 +00:00
|
|
|
headers = [('Content-Type', 'image/png')]
|
|
|
|
etag = req.httprequest.headers.get('If-None-Match')
|
|
|
|
hashed_session = hashlib.md5(req.session_id).hexdigest()
|
2012-08-13 13:56:53 +00:00
|
|
|
id = None if not id else simplejson.loads(id)
|
|
|
|
if type(id) is list:
|
|
|
|
id = id[0] # m2o
|
2012-06-18 15:47:10 +00:00
|
|
|
if etag:
|
|
|
|
if not id and hashed_session == etag:
|
|
|
|
return werkzeug.wrappers.Response(status=304)
|
|
|
|
else:
|
2012-11-26 10:54:50 +00:00
|
|
|
date = Model.read([id], [last_update], req.context)[0].get(last_update)
|
2012-06-18 15:47:10 +00:00
|
|
|
if hashlib.md5(date).hexdigest() == etag:
|
|
|
|
return werkzeug.wrappers.Response(status=304)
|
2011-09-06 11:11:57 +00:00
|
|
|
|
2012-06-18 15:47:10 +00:00
|
|
|
retag = hashed_session
|
2011-05-23 14:52:19 +00:00
|
|
|
try:
|
|
|
|
if not id:
|
2012-11-26 10:54:50 +00:00
|
|
|
res = Model.default_get([field], req.context).get(field)
|
2012-11-08 10:24:23 +00:00
|
|
|
image_base64 = res
|
2011-05-23 14:52:19 +00:00
|
|
|
else:
|
2012-11-26 10:54:50 +00:00
|
|
|
res = Model.read([id], [last_update, field], req.context)[0]
|
2012-06-18 15:47:10 +00:00
|
|
|
retag = hashlib.md5(res.get(last_update)).hexdigest()
|
2012-11-08 10:24:23 +00:00
|
|
|
image_base64 = res.get(field)
|
|
|
|
|
|
|
|
if kw.get('resize'):
|
2012-11-26 10:54:50 +00:00
|
|
|
resize = kw.get('resize').split(',')
|
2012-11-08 10:24:23 +00:00
|
|
|
if len(resize) == 2 and int(resize[0]) and int(resize[1]):
|
|
|
|
width = int(resize[0])
|
|
|
|
height = int(resize[1])
|
|
|
|
# resize maximum 500*500
|
|
|
|
if width > 500: width = 500
|
|
|
|
if height > 500: height = 500
|
|
|
|
image_base64 = openerp.tools.image_resize_image(base64_source=image_base64, size=(width, height), encoding='base64', filetype='PNG')
|
2012-12-26 07:45:39 +00:00
|
|
|
|
2012-11-08 10:24:23 +00:00
|
|
|
image_data = base64.b64decode(image_base64)
|
|
|
|
|
2011-09-06 11:12:26 +00:00
|
|
|
except (TypeError, xmlrpclib.Fault):
|
2011-09-06 11:11:57 +00:00
|
|
|
image_data = self.placeholder(req)
|
2012-06-18 15:47:10 +00:00
|
|
|
headers.append(('ETag', retag))
|
|
|
|
headers.append(('Content-Length', len(image_data)))
|
2012-06-19 09:06:42 +00:00
|
|
|
try:
|
|
|
|
ncache = int(kw.get('cache'))
|
|
|
|
headers.append(('Cache-Control', 'no-cache' if ncache == 0 else 'max-age=%s' % (ncache)))
|
|
|
|
except:
|
|
|
|
pass
|
2012-06-18 15:47:10 +00:00
|
|
|
return req.make_response(image_data, headers)
|
2012-12-11 11:18:24 +00:00
|
|
|
|
|
|
|
def placeholder(self, req, image='placeholder.png'):
|
2011-10-05 15:57:40 +00:00
|
|
|
addons_path = openerpweb.addons_manifest['web']['addons_path']
|
2012-12-11 11:18:24 +00:00
|
|
|
return open(os.path.join(addons_path, 'web', 'static', 'src', 'img', image), 'rb').read()
|
2011-05-23 14:52:19 +00:00
|
|
|
|
|
|
|
@openerpweb.httprequest
|
2012-01-10 14:35:18 +00:00
|
|
|
def saveas(self, req, model, field, id=None, filename_field=None, **kw):
|
|
|
|
""" Download link for files stored as binary fields.
|
|
|
|
|
|
|
|
If the ``id`` parameter is omitted, fetches the default value for the
|
|
|
|
binary field (via ``default_get``), otherwise fetches the field for
|
|
|
|
that precise record.
|
|
|
|
|
|
|
|
:param req: OpenERP request
|
|
|
|
:type req: :class:`web.common.http.HttpRequest`
|
|
|
|
:param str model: name of the model to fetch the binary from
|
|
|
|
:param str field: binary field
|
|
|
|
:param str id: id of the record from which to fetch the binary
|
|
|
|
:param str filename_field: field holding the file's name, if any
|
|
|
|
:returns: :class:`werkzeug.wrappers.Response`
|
|
|
|
"""
|
2011-08-18 18:51:45 +00:00
|
|
|
Model = req.session.model(model)
|
2012-01-10 15:39:05 +00:00
|
|
|
fields = [field]
|
|
|
|
if filename_field:
|
|
|
|
fields.append(filename_field)
|
2011-10-17 14:58:49 +00:00
|
|
|
if id:
|
2012-11-26 10:54:50 +00:00
|
|
|
res = Model.read([int(id)], fields, req.context)[0]
|
2011-10-17 14:58:49 +00:00
|
|
|
else:
|
2012-11-26 10:54:50 +00:00
|
|
|
res = Model.default_get(fields, req.context)
|
2011-10-14 11:09:42 +00:00
|
|
|
filecontent = base64.b64decode(res.get(field, ''))
|
2011-05-23 14:52:19 +00:00
|
|
|
if not filecontent:
|
2011-09-02 08:58:53 +00:00
|
|
|
return req.not_found()
|
2011-05-23 14:52:19 +00:00
|
|
|
else:
|
|
|
|
filename = '%s_%s' % (model.replace('.', '_'), id)
|
2012-01-10 14:35:18 +00:00
|
|
|
if filename_field:
|
|
|
|
filename = res.get(filename_field, '') or filename
|
2011-09-02 08:58:53 +00:00
|
|
|
return req.make_response(filecontent,
|
|
|
|
[('Content-Type', 'application/octet-stream'),
|
2012-10-30 11:51:13 +00:00
|
|
|
('Content-Disposition', content_disposition(filename, req))])
|
2011-05-23 14:52:19 +00:00
|
|
|
|
2012-02-08 10:39:35 +00:00
|
|
|
@openerpweb.httprequest
|
|
|
|
def saveas_ajax(self, req, data, token):
|
|
|
|
jdata = simplejson.loads(data)
|
|
|
|
model = jdata['model']
|
|
|
|
field = jdata['field']
|
2012-12-11 10:33:57 +00:00
|
|
|
data = jdata['data']
|
2012-02-08 10:39:35 +00:00
|
|
|
id = jdata.get('id', None)
|
|
|
|
filename_field = jdata.get('filename_field', None)
|
2012-11-26 10:54:50 +00:00
|
|
|
context = jdata.get('context', {})
|
2012-02-08 10:39:35 +00:00
|
|
|
|
|
|
|
Model = req.session.model(model)
|
|
|
|
fields = [field]
|
|
|
|
if filename_field:
|
|
|
|
fields.append(filename_field)
|
2012-12-11 10:33:57 +00:00
|
|
|
if data:
|
2015-01-29 11:47:29 +00:00
|
|
|
res = {field: data, filename_field: jdata.get('filename', None)}
|
2012-12-11 10:33:57 +00:00
|
|
|
elif id:
|
2012-02-08 10:39:35 +00:00
|
|
|
res = Model.read([int(id)], fields, context)[0]
|
|
|
|
else:
|
|
|
|
res = Model.default_get(fields, context)
|
|
|
|
filecontent = base64.b64decode(res.get(field, ''))
|
|
|
|
if not filecontent:
|
2012-11-29 00:22:00 +00:00
|
|
|
raise ValueError(_("No content found for field '%s' on '%s:%s'") %
|
2012-02-08 10:39:35 +00:00
|
|
|
(field, model, id))
|
|
|
|
else:
|
|
|
|
filename = '%s_%s' % (model.replace('.', '_'), id)
|
|
|
|
if filename_field:
|
|
|
|
filename = res.get(filename_field, '') or filename
|
|
|
|
return req.make_response(filecontent,
|
|
|
|
headers=[('Content-Type', 'application/octet-stream'),
|
2012-10-30 11:51:13 +00:00
|
|
|
('Content-Disposition', content_disposition(filename, req))],
|
2013-07-16 13:15:48 +00:00
|
|
|
cookies={'fileToken': token})
|
2012-02-08 10:39:35 +00:00
|
|
|
|
2011-05-23 14:52:19 +00:00
|
|
|
@openerpweb.httprequest
|
2011-09-06 11:53:16 +00:00
|
|
|
def upload(self, req, callback, ufile):
|
2011-06-29 13:12:33 +00:00
|
|
|
# TODO: might be useful to have a configuration flag for max-length file uploads
|
2012-12-14 15:29:57 +00:00
|
|
|
out = """<script language="javascript" type="text/javascript">
|
|
|
|
var win = window.top.window;
|
|
|
|
win.jQuery(win).trigger(%s, %s);
|
|
|
|
</script>"""
|
2011-05-23 14:52:19 +00:00
|
|
|
try:
|
2011-09-06 11:53:16 +00:00
|
|
|
data = ufile.read()
|
2011-12-15 10:29:10 +00:00
|
|
|
args = [len(data), ufile.filename,
|
2011-09-06 11:53:16 +00:00
|
|
|
ufile.content_type, base64.b64encode(data)]
|
2011-05-23 14:52:19 +00:00
|
|
|
except Exception, e:
|
|
|
|
args = [False, e.message]
|
|
|
|
return out % (simplejson.dumps(callback), simplejson.dumps(args))
|
|
|
|
|
2011-05-26 21:06:41 +00:00
|
|
|
@openerpweb.httprequest
|
2011-09-06 11:53:16 +00:00
|
|
|
def upload_attachment(self, req, callback, model, id, ufile):
|
2011-08-18 18:51:45 +00:00
|
|
|
Model = req.session.model('ir.attachment')
|
2012-12-14 15:29:57 +00:00
|
|
|
out = """<script language="javascript" type="text/javascript">
|
|
|
|
var win = window.top.window;
|
|
|
|
win.jQuery(win).trigger(%s, %s);
|
|
|
|
</script>"""
|
2011-05-26 21:06:41 +00:00
|
|
|
try:
|
|
|
|
attachment_id = Model.create({
|
|
|
|
'name': ufile.filename,
|
2011-09-06 11:53:16 +00:00
|
|
|
'datas': base64.encodestring(ufile.read()),
|
2012-01-11 12:55:01 +00:00
|
|
|
'datas_fname': ufile.filename,
|
2011-05-26 21:06:41 +00:00
|
|
|
'res_model': model,
|
|
|
|
'res_id': int(id)
|
2012-11-26 10:54:50 +00:00
|
|
|
}, req.context)
|
2011-05-26 21:06:41 +00:00
|
|
|
args = {
|
|
|
|
'filename': ufile.filename,
|
|
|
|
'id': attachment_id
|
|
|
|
}
|
2012-12-14 15:29:57 +00:00
|
|
|
except xmlrpclib.Fault, e:
|
2012-12-07 08:09:46 +00:00
|
|
|
args = {'error':e.faultCode }
|
2011-05-26 21:06:41 +00:00
|
|
|
return out % (simplejson.dumps(callback), simplejson.dumps(args))
|
|
|
|
|
2012-12-11 11:18:24 +00:00
|
|
|
@openerpweb.httprequest
|
|
|
|
def company_logo(self, req, dbname=None):
|
2012-12-15 19:26:59 +00:00
|
|
|
# TODO add etag, refactor to use /image code for etag
|
2012-12-11 11:18:24 +00:00
|
|
|
uid = None
|
|
|
|
if req.session._db:
|
|
|
|
dbname = req.session._db
|
|
|
|
uid = req.session._uid
|
|
|
|
elif dbname is None:
|
|
|
|
dbname = db_monodb(req)
|
|
|
|
|
2013-04-04 14:31:44 +00:00
|
|
|
if not uid:
|
2012-12-11 11:18:24 +00:00
|
|
|
uid = openerp.SUPERUSER_ID
|
|
|
|
|
|
|
|
if not dbname:
|
|
|
|
image_data = self.placeholder(req, 'logo.png')
|
|
|
|
else:
|
2013-04-05 13:47:47 +00:00
|
|
|
try:
|
|
|
|
# create an empty registry
|
2013-05-06 09:27:51 +00:00
|
|
|
registry = openerp.modules.registry.Registry(dbname)
|
2013-04-05 13:47:47 +00:00
|
|
|
with registry.cursor() as cr:
|
|
|
|
cr.execute("""SELECT c.logo_web
|
|
|
|
FROM res_users u
|
|
|
|
LEFT JOIN res_company c
|
|
|
|
ON c.id = u.company_id
|
|
|
|
WHERE u.id = %s
|
|
|
|
""", (uid,))
|
|
|
|
row = cr.fetchone()
|
|
|
|
if row and row[0]:
|
|
|
|
image_data = str(row[0]).decode('base64')
|
|
|
|
else:
|
|
|
|
image_data = self.placeholder(req, 'nologo.png')
|
|
|
|
except Exception:
|
|
|
|
image_data = self.placeholder(req, 'logo.png')
|
|
|
|
|
2012-12-11 11:18:24 +00:00
|
|
|
headers = [
|
|
|
|
('Content-Type', 'image/png'),
|
|
|
|
('Content-Length', len(image_data)),
|
|
|
|
]
|
|
|
|
return req.make_response(image_data, headers)
|
|
|
|
|
2011-03-02 18:56:06 +00:00
|
|
|
class Action(openerpweb.Controller):
|
2011-09-05 11:03:09 +00:00
|
|
|
_cp_path = "/web/action"
|
2011-03-02 18:56:06 +00:00
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
2011-10-18 15:57:33 +00:00
|
|
|
def load(self, req, action_id, do_not_eval=False):
|
2011-04-21 13:23:54 +00:00
|
|
|
Actions = req.session.model('ir.actions.actions')
|
|
|
|
value = False
|
2012-07-13 14:57:40 +00:00
|
|
|
try:
|
|
|
|
action_id = int(action_id)
|
|
|
|
except ValueError:
|
|
|
|
try:
|
|
|
|
module, xmlid = action_id.split('.', 1)
|
|
|
|
model, action_id = req.session.model('ir.model.data').get_object_reference(module, xmlid)
|
|
|
|
assert model.startswith('ir.actions.')
|
|
|
|
except Exception:
|
|
|
|
action_id = 0 # force failed read
|
|
|
|
|
[FIX] web: action translation (menuitems & more menu)
This rev. reverts partially 1d76586a1b0fff766581871cd277332bbfec030a
This rev. is related to #3462
Regarding addons/web/controllers/main.py
---
name of model ir.actions.actions is not translated
it's the name of server actions, client actions and window actions
that are translated.
Meaning the name of the ir.actions.actions will always be in English,
even when passing the user language within the context.
Regarding addons/web/static/src/js/views.js
---
There is no reason to pass the field values within the context
of the /web/action/load call: The read methods of actions are
not overidden to use the field values. Besides, it pollutes
the context of the action, leading to unwanted behavior, such
as the translation of the action name within the lang available in the
fields of the form view (e.g. the partner form).
Initially, the field values added in the context has been added
within the rev. 542928adde6f74269f51d24e18896b2e8bb44fc2
Indeed, sidebar_context (or sidebar_eval_context nowadays), contains
the field values, and the additional_context passed to /web/action/load
is an extension of this sidebar_context.
We are not sure the reasons why the sidebar_context was passed to the
/web/action/load, but we believe it was to pass the session/user context
containing the lang, timezone, and so on, not to pass the fields values.
2015-02-20 14:24:18 +00:00
|
|
|
base_action = Actions.read([action_id], ['type'], req.context)
|
2012-06-08 09:06:19 +00:00
|
|
|
if base_action:
|
2011-09-07 13:23:20 +00:00
|
|
|
ctx = {}
|
2012-06-08 09:06:19 +00:00
|
|
|
action_type = base_action[0]['type']
|
|
|
|
if action_type == 'ir.actions.report.xml':
|
2011-09-07 13:23:20 +00:00
|
|
|
ctx.update({'bin_size': True})
|
2012-11-26 10:54:50 +00:00
|
|
|
ctx.update(req.context)
|
2012-10-01 14:53:08 +00:00
|
|
|
action = req.session.model(action_type).read([action_id], False, ctx)
|
2011-04-21 13:23:54 +00:00
|
|
|
if action:
|
[FIX] web: action translation (menuitems & more menu)
This rev. reverts partially 1d76586a1b0fff766581871cd277332bbfec030a
This rev. is related to #3462
Regarding addons/web/controllers/main.py
---
name of model ir.actions.actions is not translated
it's the name of server actions, client actions and window actions
that are translated.
Meaning the name of the ir.actions.actions will always be in English,
even when passing the user language within the context.
Regarding addons/web/static/src/js/views.js
---
There is no reason to pass the field values within the context
of the /web/action/load call: The read methods of actions are
not overidden to use the field values. Besides, it pollutes
the context of the action, leading to unwanted behavior, such
as the translation of the action name within the lang available in the
fields of the form view (e.g. the partner form).
Initially, the field values added in the context has been added
within the rev. 542928adde6f74269f51d24e18896b2e8bb44fc2
Indeed, sidebar_context (or sidebar_eval_context nowadays), contains
the field values, and the additional_context passed to /web/action/load
is an extension of this sidebar_context.
We are not sure the reasons why the sidebar_context was passed to the
/web/action/load, but we believe it was to pass the session/user context
containing the lang, timezone, and so on, not to pass the fields values.
2015-02-20 14:24:18 +00:00
|
|
|
value = clean_action(req, action[0])
|
2012-10-05 11:30:46 +00:00
|
|
|
return value
|
2011-06-30 06:24:24 +00:00
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def run(self, req, action_id):
|
2012-06-26 11:44:08 +00:00
|
|
|
return_action = req.session.model('ir.actions.server').run(
|
2012-11-26 10:54:50 +00:00
|
|
|
[action_id], req.context)
|
2012-06-26 11:44:08 +00:00
|
|
|
if return_action:
|
|
|
|
return clean_action(req, return_action)
|
|
|
|
else:
|
|
|
|
return False
|
2011-07-13 10:26:12 +00:00
|
|
|
|
2013-04-23 21:06:44 +00:00
|
|
|
class Export(openerpweb.Controller):
|
2011-09-05 11:03:09 +00:00
|
|
|
_cp_path = "/web/export"
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-08-30 13:23:26 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def formats(self, req):
|
|
|
|
""" Returns all valid export formats
|
|
|
|
|
|
|
|
:returns: for each export format, a pair of identifier and printable name
|
|
|
|
:rtype: [(str, str)]
|
|
|
|
"""
|
|
|
|
return sorted([
|
|
|
|
controller.fmt
|
|
|
|
for path, controller in openerpweb.controllers_path.iteritems()
|
|
|
|
if path.startswith(self._cp_path)
|
|
|
|
if hasattr(controller, 'fmt')
|
2012-01-13 09:06:11 +00:00
|
|
|
], key=operator.itemgetter("label"))
|
2011-08-30 13:23:26 +00:00
|
|
|
|
2011-07-22 12:52:14 +00:00
|
|
|
def fields_get(self, req, model):
|
|
|
|
Model = req.session.model(model)
|
2012-11-26 10:54:50 +00:00
|
|
|
fields = Model.fields_get(False, req.context)
|
2011-07-22 12:52:14 +00:00
|
|
|
return fields
|
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
2011-08-31 13:29:50 +00:00
|
|
|
def get_fields(self, req, model, prefix='', parent_name= '',
|
2011-11-09 16:32:32 +00:00
|
|
|
import_compat=True, parent_field_type=None,
|
|
|
|
exclude=None):
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-08-31 12:28:44 +00:00
|
|
|
if import_compat and parent_field_type == "many2one":
|
2011-07-22 12:52:14 +00:00
|
|
|
fields = {}
|
2011-08-31 14:04:02 +00:00
|
|
|
else:
|
|
|
|
fields = self.fields_get(req, model)
|
2011-11-09 15:11:33 +00:00
|
|
|
|
|
|
|
if import_compat:
|
|
|
|
fields.pop('id', None)
|
|
|
|
else:
|
|
|
|
fields['.id'] = fields.pop('id', {'string': 'ID'})
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-08-31 13:29:50 +00:00
|
|
|
fields_sequence = sorted(fields.iteritems(),
|
2015-03-06 12:12:50 +00:00
|
|
|
key=lambda field: openerp.tools.ustr(field[1].get('string', '')))
|
2011-07-22 12:52:14 +00:00
|
|
|
|
|
|
|
records = []
|
2011-08-31 13:29:50 +00:00
|
|
|
for field_name, field in fields_sequence:
|
2011-12-16 11:49:41 +00:00
|
|
|
if import_compat:
|
|
|
|
if exclude and field_name in exclude:
|
|
|
|
continue
|
|
|
|
if field.get('readonly'):
|
|
|
|
# If none of the field's states unsets readonly, skip the field
|
|
|
|
if all(dict(attrs).get('readonly', True)
|
|
|
|
for attrs in field.get('states', {}).values()):
|
|
|
|
continue
|
2013-06-07 09:30:44 +00:00
|
|
|
if not field.get('exportable', True):
|
|
|
|
continue
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-08-31 12:21:01 +00:00
|
|
|
id = prefix + (prefix and '/'or '') + field_name
|
2011-08-31 13:29:50 +00:00
|
|
|
name = parent_name + (parent_name and '/' or '') + field['string']
|
2011-09-01 06:42:59 +00:00
|
|
|
record = {'id': id, 'string': name,
|
|
|
|
'value': id, 'children': False,
|
2011-08-31 12:28:59 +00:00
|
|
|
'field_type': field.get('type'),
|
2011-11-09 16:32:32 +00:00
|
|
|
'required': field.get('required'),
|
|
|
|
'relation_field': field.get('relation_field')}
|
2011-07-22 12:52:14 +00:00
|
|
|
records.append(record)
|
|
|
|
|
2011-08-31 13:29:50 +00:00
|
|
|
if len(name.split('/')) < 3 and 'relation' in field:
|
2011-08-31 12:34:01 +00:00
|
|
|
ref = field.pop('relation')
|
2011-09-01 06:42:59 +00:00
|
|
|
record['value'] += '/id'
|
2011-08-31 13:29:50 +00:00
|
|
|
record['params'] = {'model': ref, 'prefix': id, 'name': name}
|
2011-08-31 14:30:02 +00:00
|
|
|
|
2011-09-01 06:42:59 +00:00
|
|
|
if not import_compat or field['type'] == 'one2many':
|
2011-08-31 14:30:02 +00:00
|
|
|
# m2m field in import_compat is childless
|
|
|
|
record['children'] = True
|
2011-07-22 12:52:14 +00:00
|
|
|
|
|
|
|
return records
|
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def namelist(self,req, model, export_id):
|
2011-09-05 13:05:38 +00:00
|
|
|
# TODO: namelist really has no reason to be in Python (although itertools.groupby helps)
|
2011-09-05 09:38:37 +00:00
|
|
|
export = req.session.model("ir.exports").read([export_id])[0]
|
|
|
|
export_fields_list = req.session.model("ir.exports.line").read(
|
|
|
|
export['export_fields'])
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-09-05 13:05:38 +00:00
|
|
|
fields_data = self.fields_info(
|
|
|
|
req, model, map(operator.itemgetter('name'), export_fields_list))
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-09-05 15:24:42 +00:00
|
|
|
return [
|
2014-02-21 11:58:55 +00:00
|
|
|
{'name': field['name'], 'label': fields_data[field['name']]}
|
|
|
|
for field in export_fields_list
|
2011-09-05 15:24:42 +00:00
|
|
|
]
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-09-05 13:05:38 +00:00
|
|
|
def fields_info(self, req, model, export_fields):
|
|
|
|
info = {}
|
2011-07-22 12:52:14 +00:00
|
|
|
fields = self.fields_get(req, model)
|
2012-10-31 10:07:37 +00:00
|
|
|
if ".id" in export_fields:
|
|
|
|
fields['.id'] = fields.pop('id', {'string': 'ID'})
|
2012-12-26 07:45:39 +00:00
|
|
|
|
2011-09-05 13:05:38 +00:00
|
|
|
# To make fields retrieval more efficient, fetch all sub-fields of a
|
|
|
|
# given field at the same time. Because the order in the export list is
|
|
|
|
# arbitrary, this requires ordering all sub-fields of a given field
|
|
|
|
# together so they can be fetched at the same time
|
|
|
|
#
|
|
|
|
# Works the following way:
|
|
|
|
# * sort the list of fields to export, the default sorting order will
|
|
|
|
# put the field itself (if present, for xmlid) and all of its
|
|
|
|
# sub-fields right after it
|
|
|
|
# * then, group on: the first field of the path (which is the same for
|
|
|
|
# a field and for its subfields and the length of splitting on the
|
|
|
|
# first '/', which basically means grouping the field on one side and
|
|
|
|
# all of the subfields on the other. This way, we have the field (for
|
|
|
|
# the xmlid) with length 1, and all of the subfields with the same
|
|
|
|
# base but a length "flag" of 2
|
|
|
|
# * if we have a normal field (length 1), just add it to the info
|
|
|
|
# mapping (with its string) as-is
|
|
|
|
# * otherwise, recursively call fields_info via graft_subfields.
|
|
|
|
# all graft_subfields does is take the result of fields_info (on the
|
|
|
|
# field's model) and prepend the current base (current field), which
|
|
|
|
# rebuilds the whole sub-tree for the field
|
|
|
|
#
|
|
|
|
# result: because we're not fetching the fields_get for half the
|
|
|
|
# database models, fetching a namelist with a dozen fields (including
|
|
|
|
# relational data) falls from ~6s to ~300ms (on the leads model).
|
|
|
|
# export lists with no sub-fields (e.g. import_compatible lists with
|
|
|
|
# no o2m) are even more efficient (from the same 6s to ~170ms, as
|
|
|
|
# there's a single fields_get to execute)
|
|
|
|
for (base, length), subfields in itertools.groupby(
|
|
|
|
sorted(export_fields),
|
|
|
|
lambda field: (field.split('/', 1)[0], len(field.split('/', 1)))):
|
|
|
|
subfields = list(subfields)
|
|
|
|
if length == 2:
|
|
|
|
# subfields is a seq of $base/*rest, and not loaded yet
|
|
|
|
info.update(self.graft_subfields(
|
|
|
|
req, fields[base]['relation'], base, fields[base]['string'],
|
|
|
|
subfields
|
|
|
|
))
|
2014-01-17 10:16:55 +00:00
|
|
|
elif base in fields:
|
2011-09-05 13:05:38 +00:00
|
|
|
info[base] = fields[base]['string']
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-09-05 13:05:38 +00:00
|
|
|
return info
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-09-05 13:05:38 +00:00
|
|
|
def graft_subfields(self, req, model, prefix, prefix_string, fields):
|
|
|
|
export_fields = [field.split('/', 1)[1] for field in fields]
|
|
|
|
return (
|
|
|
|
(prefix + '/' + k, prefix_string + '/' + v)
|
|
|
|
for k, v in self.fields_info(req, model, export_fields).iteritems())
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2013-04-23 21:06:44 +00:00
|
|
|
class ExportFormat(object):
|
2011-08-30 13:06:32 +00:00
|
|
|
@property
|
|
|
|
def content_type(self):
|
|
|
|
""" Provides the format's content type """
|
|
|
|
raise NotImplementedError()
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-08-30 13:06:32 +00:00
|
|
|
def filename(self, base):
|
|
|
|
""" Creates a valid filename for the format (with extension) from the
|
|
|
|
provided base name (exension-less)
|
|
|
|
"""
|
|
|
|
raise NotImplementedError()
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-08-30 13:06:32 +00:00
|
|
|
def from_data(self, fields, rows):
|
|
|
|
""" Conversion method from OpenERP's export data to whatever the
|
|
|
|
current export class outputs
|
|
|
|
|
|
|
|
:params list fields: a list of fields to export
|
|
|
|
:params list rows: a list of records to export
|
|
|
|
:returns:
|
|
|
|
:rtype: bytes
|
|
|
|
"""
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
2011-08-30 11:02:04 +00:00
|
|
|
@openerpweb.httprequest
|
2011-08-30 13:06:32 +00:00
|
|
|
def index(self, req, data, token):
|
2014-08-13 12:52:01 +00:00
|
|
|
params = simplejson.loads(data)
|
2011-08-30 13:06:32 +00:00
|
|
|
model, fields, ids, domain, import_compat = \
|
2011-08-30 11:02:04 +00:00
|
|
|
operator.itemgetter('model', 'fields', 'ids', 'domain',
|
2011-08-30 13:06:32 +00:00
|
|
|
'import_compat')(
|
2014-08-13 12:52:01 +00:00
|
|
|
params)
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-08-30 11:02:04 +00:00
|
|
|
Model = req.session.model(model)
|
2014-08-13 12:52:01 +00:00
|
|
|
context = dict(req.context or {}, **params.get('context', {}))
|
|
|
|
ids = ids or Model.search(domain, 0, False, False, context)
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-08-31 11:04:39 +00:00
|
|
|
field_names = map(operator.itemgetter('name'), fields)
|
2014-08-13 12:52:01 +00:00
|
|
|
import_data = Model.export_data(ids, field_names, context).get('datas',[])
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-08-31 11:04:39 +00:00
|
|
|
if import_compat:
|
|
|
|
columns_headers = field_names
|
2011-07-22 12:52:14 +00:00
|
|
|
else:
|
2011-08-31 11:04:39 +00:00
|
|
|
columns_headers = [val['label'].strip() for val in fields]
|
2011-08-29 15:24:38 +00:00
|
|
|
|
2013-04-23 12:41:51 +00:00
|
|
|
|
2011-09-05 14:24:55 +00:00
|
|
|
return req.make_response(self.from_data(columns_headers, import_data),
|
2012-10-31 15:53:10 +00:00
|
|
|
headers=[('Content-Disposition',
|
|
|
|
content_disposition(self.filename(model), req)),
|
2011-09-05 14:24:55 +00:00
|
|
|
('Content-Type', self.content_type)],
|
2013-07-16 13:15:48 +00:00
|
|
|
cookies={'fileToken': token})
|
2011-08-30 13:06:32 +00:00
|
|
|
|
2013-04-23 21:06:44 +00:00
|
|
|
class CSVExport(ExportFormat, http.Controller):
|
2011-09-05 14:24:55 +00:00
|
|
|
_cp_path = '/web/export/csv'
|
2012-01-13 09:06:11 +00:00
|
|
|
fmt = {'tag': 'csv', 'label': 'CSV'}
|
2011-08-30 13:06:32 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def content_type(self):
|
|
|
|
return 'text/csv;charset=utf8'
|
|
|
|
|
|
|
|
def filename(self, base):
|
|
|
|
return base + '.csv'
|
|
|
|
|
|
|
|
def from_data(self, fields, rows):
|
|
|
|
fp = StringIO()
|
|
|
|
writer = csv.writer(fp, quoting=csv.QUOTE_ALL)
|
|
|
|
|
2012-01-25 16:25:58 +00:00
|
|
|
writer.writerow([name.encode('utf-8') for name in fields])
|
2011-08-30 13:06:32 +00:00
|
|
|
|
|
|
|
for data in rows:
|
|
|
|
row = []
|
|
|
|
for d in data:
|
|
|
|
if isinstance(d, basestring):
|
|
|
|
d = d.replace('\n',' ').replace('\t',' ')
|
|
|
|
try:
|
|
|
|
d = d.encode('utf-8')
|
2012-01-19 08:52:13 +00:00
|
|
|
except UnicodeError:
|
2011-08-30 13:06:32 +00:00
|
|
|
pass
|
|
|
|
if d is False: d = None
|
|
|
|
row.append(d)
|
|
|
|
writer.writerow(row)
|
|
|
|
|
|
|
|
fp.seek(0)
|
|
|
|
data = fp.read()
|
|
|
|
fp.close()
|
|
|
|
return data
|
|
|
|
|
2013-04-23 21:06:44 +00:00
|
|
|
class ExcelExport(ExportFormat, http.Controller):
|
2011-09-05 14:24:55 +00:00
|
|
|
_cp_path = '/web/export/xls'
|
2012-01-13 09:06:11 +00:00
|
|
|
fmt = {
|
|
|
|
'tag': 'xls',
|
|
|
|
'label': 'Excel',
|
|
|
|
'error': None if xlwt else "XLWT required"
|
|
|
|
}
|
2011-08-30 13:06:32 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def content_type(self):
|
|
|
|
return 'application/vnd.ms-excel'
|
|
|
|
|
|
|
|
def filename(self, base):
|
|
|
|
return base + '.xls'
|
|
|
|
|
|
|
|
def from_data(self, fields, rows):
|
|
|
|
workbook = xlwt.Workbook()
|
|
|
|
worksheet = workbook.add_sheet('Sheet 1')
|
|
|
|
|
|
|
|
for i, fieldname in enumerate(fields):
|
2012-01-25 16:25:58 +00:00
|
|
|
worksheet.write(0, i, fieldname)
|
2011-08-30 13:06:32 +00:00
|
|
|
worksheet.col(i).width = 8000 # around 220 pixels
|
|
|
|
|
|
|
|
style = xlwt.easyxf('align: wrap yes')
|
|
|
|
|
|
|
|
for row_index, row in enumerate(rows):
|
|
|
|
for cell_index, cell_value in enumerate(row):
|
2011-08-30 13:43:05 +00:00
|
|
|
if isinstance(cell_value, basestring):
|
|
|
|
cell_value = re.sub("\r", " ", cell_value)
|
2011-11-10 13:09:50 +00:00
|
|
|
if cell_value is False: cell_value = None
|
2011-08-30 13:06:32 +00:00
|
|
|
worksheet.write(row_index + 1, cell_index, cell_value, style)
|
|
|
|
|
|
|
|
fp = StringIO()
|
|
|
|
workbook.save(fp)
|
|
|
|
fp.seek(0)
|
|
|
|
data = fp.read()
|
|
|
|
fp.close()
|
|
|
|
return data
|
2011-09-06 11:57:54 +00:00
|
|
|
|
2013-04-23 21:06:44 +00:00
|
|
|
class Reports(openerpweb.Controller):
|
2011-09-06 11:57:54 +00:00
|
|
|
_cp_path = "/web/report"
|
2011-09-07 07:23:58 +00:00
|
|
|
POLLING_DELAY = 0.25
|
|
|
|
TYPES_MAPPING = {
|
|
|
|
'doc': 'application/vnd.ms-word',
|
|
|
|
'html': 'text/html',
|
|
|
|
'odt': 'application/vnd.oasis.opendocument.text',
|
|
|
|
'pdf': 'application/pdf',
|
|
|
|
'sxw': 'application/vnd.sun.xml.writer',
|
|
|
|
'xls': 'application/vnd.ms-excel',
|
|
|
|
}
|
2011-09-06 11:57:54 +00:00
|
|
|
|
2011-09-07 07:23:58 +00:00
|
|
|
@openerpweb.httprequest
|
|
|
|
def index(self, req, action, token):
|
|
|
|
action = simplejson.loads(action)
|
2011-09-06 11:57:54 +00:00
|
|
|
|
2011-09-07 07:23:58 +00:00
|
|
|
report_srv = req.session.proxy("report")
|
2012-11-26 10:54:50 +00:00
|
|
|
context = dict(req.context)
|
|
|
|
context.update(action["context"])
|
2011-09-08 07:14:15 +00:00
|
|
|
|
2011-09-08 09:53:27 +00:00
|
|
|
report_data = {}
|
2011-09-08 09:55:44 +00:00
|
|
|
report_ids = context["active_ids"]
|
2011-09-08 07:14:15 +00:00
|
|
|
if 'report_type' in action:
|
|
|
|
report_data['report_type'] = action['report_type']
|
2011-09-08 09:55:44 +00:00
|
|
|
if 'datas' in action:
|
|
|
|
if 'ids' in action['datas']:
|
2011-10-21 14:32:32 +00:00
|
|
|
report_ids = action['datas'].pop('ids')
|
|
|
|
report_data.update(action['datas'])
|
2011-09-15 12:24:20 +00:00
|
|
|
|
2011-09-07 07:23:58 +00:00
|
|
|
report_id = report_srv.report(
|
|
|
|
req.session._db, req.session._uid, req.session._password,
|
2011-09-08 09:54:11 +00:00
|
|
|
action["report_name"], report_ids,
|
2011-09-08 07:14:15 +00:00
|
|
|
report_data, context)
|
|
|
|
|
2011-09-07 07:23:58 +00:00
|
|
|
report_struct = None
|
2011-09-06 11:57:54 +00:00
|
|
|
while True:
|
2011-09-07 07:23:58 +00:00
|
|
|
report_struct = report_srv.report_get(
|
|
|
|
req.session._db, req.session._uid, req.session._password, report_id)
|
|
|
|
if report_struct["state"]:
|
2011-09-06 11:57:54 +00:00
|
|
|
break
|
2011-08-31 10:44:13 +00:00
|
|
|
|
2011-09-07 07:23:58 +00:00
|
|
|
time.sleep(self.POLLING_DELAY)
|
|
|
|
|
|
|
|
report = base64.b64decode(report_struct['result'])
|
2011-09-07 07:28:18 +00:00
|
|
|
if report_struct.get('code') == 'zlib':
|
|
|
|
report = zlib.decompress(report)
|
2011-09-07 07:23:58 +00:00
|
|
|
report_mimetype = self.TYPES_MAPPING.get(
|
|
|
|
report_struct['format'], 'octet-stream')
|
2012-09-20 09:34:57 +00:00
|
|
|
file_name = action.get('name', 'report')
|
2012-04-05 10:36:13 +00:00
|
|
|
if 'name' not in action:
|
|
|
|
reports = req.session.model('ir.actions.report.xml')
|
2012-06-22 14:09:51 +00:00
|
|
|
res_id = reports.search([('report_name', '=', action['report_name']),],
|
2012-04-05 10:36:13 +00:00
|
|
|
0, False, False, context)
|
2012-06-22 14:09:51 +00:00
|
|
|
if len(res_id) > 0:
|
|
|
|
file_name = reports.read(res_id[0], ['name'], context)['name']
|
|
|
|
else:
|
|
|
|
file_name = action['report_name']
|
2012-10-30 11:51:13 +00:00
|
|
|
file_name = '%s.%s' % (file_name, report_struct['format'])
|
2012-04-05 10:36:13 +00:00
|
|
|
|
2011-09-07 07:23:58 +00:00
|
|
|
return req.make_response(report,
|
|
|
|
headers=[
|
2012-10-30 11:51:13 +00:00
|
|
|
('Content-Disposition', content_disposition(file_name, req)),
|
2011-09-07 07:23:58 +00:00
|
|
|
('Content-Type', report_mimetype),
|
|
|
|
('Content-Length', len(report))],
|
2013-07-15 12:28:35 +00:00
|
|
|
cookies={'fileToken': token})
|
2011-09-06 11:57:54 +00:00
|
|
|
|
2012-08-12 21:48:27 +00:00
|
|
|
# vim:expandtab:tabstop=4:softtabstop=4:shiftwidth=4:
|