2011-03-02 18:56:06 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
2011-07-13 10:50:58 +00:00
|
|
|
|
2011-10-05 17:58:26 +00:00
|
|
|
import ast
|
2011-08-04 09:20:43 +00:00
|
|
|
import base64
|
|
|
|
import csv
|
|
|
|
import glob
|
2011-09-05 13:05:38 +00:00
|
|
|
import itertools
|
2012-02-09 16:15:42 +00:00
|
|
|
import logging
|
2011-08-04 09:20:43 +00:00
|
|
|
import operator
|
2012-01-23 10:07:44 +00:00
|
|
|
import datetime
|
2012-02-10 14:00:21 +00:00
|
|
|
import hashlib
|
2011-08-04 09:20:43 +00:00
|
|
|
import os
|
|
|
|
import re
|
|
|
|
import simplejson
|
2011-08-29 15:52:56 +00:00
|
|
|
import time
|
2011-12-16 00:44:02 +00:00
|
|
|
import urllib2
|
2011-10-05 17:58:26 +00:00
|
|
|
import xmlrpclib
|
2011-09-07 07:28:18 +00:00
|
|
|
import zlib
|
2011-03-10 15:53:45 +00:00
|
|
|
from xml.etree import ElementTree
|
2011-03-24 20:11:25 +00:00
|
|
|
from cStringIO import StringIO
|
2011-03-02 18:56:06 +00:00
|
|
|
|
2012-09-18 13:51:11 +00:00
|
|
|
import babel.messages.pofile
|
2011-12-20 15:05:56 +00:00
|
|
|
import werkzeug.utils
|
2012-02-10 14:00:21 +00:00
|
|
|
import werkzeug.wrappers
|
2012-01-13 09:06:11 +00:00
|
|
|
try:
|
|
|
|
import xlwt
|
|
|
|
except ImportError:
|
|
|
|
xlwt = None
|
2011-10-05 17:58:26 +00:00
|
|
|
|
2012-10-10 17:24:18 +00:00
|
|
|
import openerp
|
|
|
|
|
2012-10-10 20:37:53 +00:00
|
|
|
from .. import http
|
|
|
|
from .. import nonliterals
|
|
|
|
openerpweb = http
|
2011-03-03 14:55:52 +00:00
|
|
|
|
2011-03-02 18:56:06 +00:00
|
|
|
#----------------------------------------------------------
|
2012-08-12 21:48:27 +00:00
|
|
|
# OpenERP Web helpers
|
2011-03-02 18:56:06 +00:00
|
|
|
#----------------------------------------------------------
|
|
|
|
|
2012-08-18 17:34:37 +00:00
|
|
|
def rjsmin(script):
|
|
|
|
""" Minify js with a clever regex.
|
|
|
|
Taken from http://opensource.perlig.de/rjsmin
|
|
|
|
Apache License, Version 2.0 """
|
|
|
|
def subber(match):
|
|
|
|
""" Substitution callback """
|
|
|
|
groups = match.groups()
|
|
|
|
return (
|
|
|
|
groups[0] or
|
|
|
|
groups[1] or
|
|
|
|
groups[2] or
|
|
|
|
groups[3] or
|
|
|
|
(groups[4] and '\n') or
|
|
|
|
(groups[5] and ' ') or
|
|
|
|
(groups[6] and ' ') or
|
|
|
|
(groups[7] and ' ') or
|
|
|
|
''
|
|
|
|
)
|
|
|
|
|
|
|
|
result = re.sub(
|
|
|
|
r'([^\047"/\000-\040]+)|((?:(?:\047[^\047\\\r\n]*(?:\\(?:[^\r\n]|\r?'
|
|
|
|
r'\n|\r)[^\047\\\r\n]*)*\047)|(?:"[^"\\\r\n]*(?:\\(?:[^\r\n]|\r?\n|'
|
|
|
|
r'\r)[^"\\\r\n]*)*"))[^\047"/\000-\040]*)|(?:(?<=[(,=:\[!&|?{};\r\n]'
|
|
|
|
r')(?:[\000-\011\013\014\016-\040]|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/'
|
|
|
|
r'))*((?:/(?![\r\n/*])[^/\\\[\r\n]*(?:(?:\\[^\r\n]|(?:\[[^\\\]\r\n]*'
|
|
|
|
r'(?:\\[^\r\n][^\\\]\r\n]*)*\]))[^/\\\[\r\n]*)*/)[^\047"/\000-\040]*'
|
|
|
|
r'))|(?:(?<=[\000-#%-,./:-@\[-^`{-~-]return)(?:[\000-\011\013\014\01'
|
|
|
|
r'6-\040]|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/))*((?:/(?![\r\n/*])[^/'
|
|
|
|
r'\\\[\r\n]*(?:(?:\\[^\r\n]|(?:\[[^\\\]\r\n]*(?:\\[^\r\n][^\\\]\r\n]'
|
|
|
|
r'*)*\]))[^/\\\[\r\n]*)*/)[^\047"/\000-\040]*))|(?<=[^\000-!#%&(*,./'
|
|
|
|
r':-@\[\\^`{|~])(?:[\000-\011\013\014\016-\040]|(?:/\*[^*]*\*+(?:[^/'
|
|
|
|
r'*][^*]*\*+)*/))*(?:((?:(?://[^\r\n]*)?[\r\n]))(?:[\000-\011\013\01'
|
|
|
|
r'4\016-\040]|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/))*)+(?=[^\000-\040"#'
|
|
|
|
r'%-\047)*,./:-@\\-^`|-~])|(?<=[^\000-#%-,./:-@\[-^`{-~-])((?:[\000-'
|
|
|
|
r'\011\013\014\016-\040]|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/)))+(?=[^'
|
|
|
|
r'\000-#%-,./:-@\[-^`{-~-])|(?<=\+)((?:[\000-\011\013\014\016-\040]|'
|
|
|
|
r'(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/)))+(?=\+)|(?<=-)((?:[\000-\011\0'
|
|
|
|
r'13\014\016-\040]|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/)))+(?=-)|(?:[\0'
|
|
|
|
r'00-\011\013\014\016-\040]|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/))+|(?:'
|
|
|
|
r'(?:(?://[^\r\n]*)?[\r\n])(?:[\000-\011\013\014\016-\040]|(?:/\*[^*'
|
|
|
|
r']*\*+(?:[^/*][^*]*\*+)*/))*)+', subber, '\n%s\n' % script
|
|
|
|
).strip()
|
|
|
|
return result
|
|
|
|
|
2012-08-12 21:48:27 +00:00
|
|
|
def db_list(req):
|
2012-08-15 17:03:21 +00:00
|
|
|
dbs = []
|
2012-08-12 21:48:27 +00:00
|
|
|
proxy = req.session.proxy("db")
|
|
|
|
dbs = proxy.list()
|
|
|
|
h = req.httprequest.environ['HTTP_HOST'].split(':')[0]
|
|
|
|
d = h.split('.')[0]
|
2012-10-10 17:24:18 +00:00
|
|
|
r = openerp.tools.config['dbfilter'].replace('%h', h).replace('%d', d)
|
2012-08-12 21:48:27 +00:00
|
|
|
dbs = [i for i in dbs if re.match(r, i)]
|
|
|
|
return dbs
|
|
|
|
|
|
|
|
def module_topological_sort(modules):
|
|
|
|
""" Return a list of module names sorted so that their dependencies of the
|
|
|
|
modules are listed before the module itself
|
|
|
|
|
|
|
|
modules is a dict of {module_name: dependencies}
|
|
|
|
|
|
|
|
:param modules: modules to sort
|
|
|
|
:type modules: dict
|
|
|
|
:returns: list(str)
|
|
|
|
"""
|
|
|
|
|
|
|
|
dependencies = set(itertools.chain.from_iterable(modules.itervalues()))
|
|
|
|
# incoming edge: dependency on other module (if a depends on b, a has an
|
|
|
|
# incoming edge from b, aka there's an edge from b to a)
|
|
|
|
# outgoing edge: other module depending on this one
|
|
|
|
|
|
|
|
# [Tarjan 1976], http://en.wikipedia.org/wiki/Topological_sorting#Algorithms
|
|
|
|
#L ← Empty list that will contain the sorted nodes
|
|
|
|
L = []
|
|
|
|
#S ← Set of all nodes with no outgoing edges (modules on which no other
|
|
|
|
# module depends)
|
|
|
|
S = set(module for module in modules if module not in dependencies)
|
|
|
|
|
|
|
|
visited = set()
|
|
|
|
#function visit(node n)
|
|
|
|
def visit(n):
|
|
|
|
#if n has not been visited yet then
|
|
|
|
if n not in visited:
|
|
|
|
#mark n as visited
|
|
|
|
visited.add(n)
|
|
|
|
#change: n not web module, can not be resolved, ignore
|
|
|
|
if n not in modules: return
|
|
|
|
#for each node m with an edge from m to n do (dependencies of n)
|
|
|
|
for m in modules[n]:
|
|
|
|
#visit(m)
|
|
|
|
visit(m)
|
|
|
|
#add n to L
|
|
|
|
L.append(n)
|
|
|
|
#for each node n in S do
|
|
|
|
for n in S:
|
|
|
|
#visit(n)
|
|
|
|
visit(n)
|
|
|
|
return L
|
|
|
|
|
|
|
|
def module_installed(req):
|
|
|
|
# Candidates module the current heuristic is the /static dir
|
|
|
|
loadable = openerpweb.addons_manifest.keys()
|
|
|
|
modules = {}
|
2012-08-12 22:29:01 +00:00
|
|
|
|
2012-08-12 21:48:27 +00:00
|
|
|
# Retrieve database installed modules
|
2012-08-12 22:29:01 +00:00
|
|
|
# TODO The following code should move to ir.module.module.list_installed_modules()
|
2012-08-12 21:48:27 +00:00
|
|
|
Modules = req.session.model('ir.module.module')
|
|
|
|
domain = [('state','=','installed'), ('name','in', loadable)]
|
|
|
|
for module in Modules.search_read(domain, ['name', 'dependencies_id']):
|
|
|
|
modules[module['name']] = []
|
|
|
|
deps = module.get('dependencies_id')
|
|
|
|
if deps:
|
|
|
|
deps_read = req.session.model('ir.module.module.dependency').read(deps, ['name'])
|
|
|
|
dependencies = [i['name'] for i in deps_read]
|
|
|
|
modules[module['name']] = dependencies
|
|
|
|
|
|
|
|
sorted_modules = module_topological_sort(modules)
|
|
|
|
return sorted_modules
|
|
|
|
|
2012-10-02 22:02:58 +00:00
|
|
|
def module_installed_bypass_session(dbname):
|
|
|
|
loadable = openerpweb.addons_manifest.keys()
|
|
|
|
modules = {}
|
|
|
|
try:
|
|
|
|
import openerp.modules.registry
|
|
|
|
registry = openerp.modules.registry.RegistryManager.get(dbname)
|
|
|
|
with registry.cursor() as cr:
|
|
|
|
m = registry.get('ir.module.module')
|
|
|
|
# TODO The following code should move to ir.module.module.list_installed_modules()
|
|
|
|
domain = [('state','=','installed'), ('name','in', loadable)]
|
|
|
|
ids = m.search(cr, 1, [('state','=','installed'), ('name','in', loadable)])
|
|
|
|
for module in m.read(cr, 1, ids, ['name', 'dependencies_id']):
|
|
|
|
modules[module['name']] = []
|
|
|
|
deps = module.get('dependencies_id')
|
|
|
|
if deps:
|
|
|
|
deps_read = registry.get('ir.module.module.dependency').read(cr, 1, deps, ['name'])
|
|
|
|
dependencies = [i['name'] for i in deps_read]
|
|
|
|
modules[module['name']] = dependencies
|
|
|
|
except Exception,e:
|
|
|
|
pass
|
|
|
|
sorted_modules = module_topological_sort(modules)
|
|
|
|
return sorted_modules
|
|
|
|
|
2012-08-12 21:48:27 +00:00
|
|
|
def module_boot(req):
|
2012-10-10 17:24:18 +00:00
|
|
|
server_wide_modules = openerp.conf.server_wide_modules or ['web']
|
2012-10-02 22:02:58 +00:00
|
|
|
serverside = []
|
|
|
|
dbside = []
|
2012-10-10 17:24:18 +00:00
|
|
|
for i in server_wide_modules:
|
2012-10-02 22:02:58 +00:00
|
|
|
if i in openerpweb.addons_manifest:
|
|
|
|
serverside.append(i)
|
|
|
|
# if only one db load every module at boot
|
|
|
|
dbs = []
|
|
|
|
try:
|
|
|
|
dbs = db_list(req)
|
|
|
|
except xmlrpclib.Fault:
|
|
|
|
# ignore access denied
|
|
|
|
pass
|
|
|
|
if len(dbs) == 1:
|
|
|
|
dbside = module_installed_bypass_session(dbs[0])
|
|
|
|
dbside = [i for i in dbside if i not in serverside]
|
|
|
|
addons = serverside + dbside
|
|
|
|
return addons
|
2011-11-02 11:11:05 +00:00
|
|
|
|
|
|
|
def concat_xml(file_list):
|
|
|
|
"""Concatenate xml files
|
2012-02-10 14:00:21 +00:00
|
|
|
|
|
|
|
:param list(str) file_list: list of files to check
|
|
|
|
:returns: (concatenation_result, checksum)
|
|
|
|
:rtype: (str, str)
|
2011-11-02 11:11:05 +00:00
|
|
|
"""
|
2012-02-10 14:00:21 +00:00
|
|
|
checksum = hashlib.new('sha1')
|
2012-01-13 15:06:41 +00:00
|
|
|
if not file_list:
|
2012-02-10 14:00:21 +00:00
|
|
|
return '', checksum.hexdigest()
|
2012-01-13 15:06:41 +00:00
|
|
|
|
2011-11-02 11:11:05 +00:00
|
|
|
root = None
|
|
|
|
for fname in file_list:
|
2012-02-10 14:00:21 +00:00
|
|
|
with open(fname, 'rb') as fp:
|
|
|
|
contents = fp.read()
|
|
|
|
checksum.update(contents)
|
|
|
|
fp.seek(0)
|
|
|
|
xml = ElementTree.parse(fp).getroot()
|
2011-11-02 11:11:05 +00:00
|
|
|
|
|
|
|
if root is None:
|
|
|
|
root = ElementTree.Element(xml.tag)
|
|
|
|
#elif root.tag != xml.tag:
|
|
|
|
# raise ValueError("Root tags missmatch: %r != %r" % (root.tag, xml.tag))
|
|
|
|
|
|
|
|
for child in xml.getchildren():
|
|
|
|
root.append(child)
|
2012-02-10 14:00:21 +00:00
|
|
|
return ElementTree.tostring(root, 'utf-8'), checksum.hexdigest()
|
2011-11-02 11:11:05 +00:00
|
|
|
|
2012-01-13 15:01:10 +00:00
|
|
|
def concat_files(file_list, reader=None, intersperse=""):
|
2012-02-10 14:00:21 +00:00
|
|
|
""" Concatenates contents of all provided files
|
|
|
|
|
|
|
|
:param list(str) file_list: list of files to check
|
|
|
|
:param function reader: reading procedure for each file
|
|
|
|
:param str intersperse: string to intersperse between file contents
|
|
|
|
:returns: (concatenation_result, checksum)
|
|
|
|
:rtype: (str, str)
|
2011-07-22 14:28:24 +00:00
|
|
|
"""
|
2012-02-10 14:00:21 +00:00
|
|
|
checksum = hashlib.new('sha1')
|
2012-01-13 15:01:10 +00:00
|
|
|
if not file_list:
|
2012-02-10 14:00:21 +00:00
|
|
|
return '', checksum.hexdigest()
|
2012-01-13 15:01:10 +00:00
|
|
|
|
2011-11-03 14:47:38 +00:00
|
|
|
if reader is None:
|
|
|
|
def reader(f):
|
[FIX] encoding issues when concatenating CSS or paths which may contain non-ascii
The concatenator tries to only work with bytes without ever wondering
what is in the byte bucket: files are read to `str`, concatenated with
`str` (via join) and returned as `str`, usually considered to be utf-8
encoded. It's the author's job to correctly encode files to utf-8.
So far so good.
On runbot, there's apparently an issue in some CSS files in some cases
on the runbot: `web_dir` finds itself to be typed `unicode` (because
it contains non-ascii characters? Not sure at all), as a result
`re.sub` will decode the corresponding file data when trying to inject
the dir as replacement and the CSS reader will return a `unicode`
object.
Then, when concat_files try to compute the checksum it will need bytes
thus re-encode everything using the default codec (ascii) and the
non-ascii character(s) will blow up the encoding with a
UnicodeEncodeError.
Solution:
* Assume CSS files can contain non-ascii characters (they can, and
do), decode them using `utf-8` to get `unicode` strings in the CSS
reader
* Inject web_dir as usual via replacement, this still yields a
`unicode` object (a `str` web_dir will simply be decoded using the
ASCII codec, a non-ascii web_dir should have been decoded to
`unicode` using sys.getfilesystemencoding)
* Cleanly re-encode evrything to utf-8, so that the code outside the
reader only ever manipulates 8-bit "byte" strings
bzr revid: xmo@openerp.com-20120405070711-vjyw8g4mge2goyik
2012-04-05 07:07:11 +00:00
|
|
|
with open(f, 'rb') as fp:
|
2011-11-03 14:47:38 +00:00
|
|
|
return fp.read()
|
|
|
|
|
2011-07-22 14:28:24 +00:00
|
|
|
files_content = []
|
2011-10-14 15:29:46 +00:00
|
|
|
for fname in file_list:
|
2012-02-10 14:00:21 +00:00
|
|
|
contents = reader(fname)
|
|
|
|
checksum.update(contents)
|
|
|
|
files_content.append(contents)
|
2011-11-03 14:47:38 +00:00
|
|
|
|
2012-01-13 15:01:10 +00:00
|
|
|
files_concat = intersperse.join(files_content)
|
2012-02-10 14:00:21 +00:00
|
|
|
return files_concat, checksum.hexdigest()
|
2011-07-22 14:28:24 +00:00
|
|
|
|
2012-08-18 17:34:37 +00:00
|
|
|
def concat_js(file_list):
|
|
|
|
content, checksum = concat_files(file_list, intersperse=';')
|
|
|
|
content = rjsmin(content)
|
|
|
|
return content, checksum
|
|
|
|
|
2012-08-08 11:41:10 +00:00
|
|
|
def manifest_glob(req, addons, key):
|
|
|
|
if addons is None:
|
2012-08-12 21:48:27 +00:00
|
|
|
addons = module_boot(req)
|
2012-08-08 11:41:10 +00:00
|
|
|
else:
|
|
|
|
addons = addons.split(',')
|
|
|
|
r = []
|
|
|
|
for addon in addons:
|
|
|
|
manifest = openerpweb.addons_manifest.get(addon, None)
|
|
|
|
if not manifest:
|
|
|
|
continue
|
|
|
|
# ensure does not ends with /
|
|
|
|
addons_path = os.path.join(manifest['addons_path'], '')[:-1]
|
|
|
|
globlist = manifest.get(key, [])
|
|
|
|
for pattern in globlist:
|
|
|
|
for path in glob.glob(os.path.normpath(os.path.join(addons_path, addon, pattern))):
|
|
|
|
r.append((path, path[len(addons_path):]))
|
|
|
|
return r
|
|
|
|
|
|
|
|
def manifest_list(req, mods, extension):
|
|
|
|
if not req.debug:
|
|
|
|
path = '/web/webclient/' + extension
|
|
|
|
if mods is not None:
|
|
|
|
path += '?mods=' + mods
|
|
|
|
return [path]
|
|
|
|
files = manifest_glob(req, mods, extension)
|
|
|
|
i_am_diabetic = req.httprequest.environ["QUERY_STRING"].count("no_sugar") >= 1 or \
|
|
|
|
req.httprequest.environ.get('HTTP_REFERER', '').count("no_sugar") >= 1
|
|
|
|
if i_am_diabetic:
|
|
|
|
return [wp for _fp, wp in files]
|
|
|
|
else:
|
|
|
|
return ['%s?debug=%s' % (wp, os.path.getmtime(fp)) for fp, wp in files]
|
2011-09-30 20:10:18 +00:00
|
|
|
|
2012-08-08 11:41:10 +00:00
|
|
|
def get_last_modified(files):
|
|
|
|
""" Returns the modification time of the most recently modified
|
|
|
|
file provided
|
|
|
|
|
|
|
|
:param list(str) files: names of files to check
|
|
|
|
:return: most recent modification time amongst the fileset
|
|
|
|
:rtype: datetime.datetime
|
|
|
|
"""
|
|
|
|
files = list(files)
|
|
|
|
if files:
|
|
|
|
return max(datetime.datetime.fromtimestamp(os.path.getmtime(f))
|
|
|
|
for f in files)
|
|
|
|
return datetime.datetime(1970, 1, 1)
|
|
|
|
|
|
|
|
def make_conditional(req, response, last_modified=None, etag=None):
|
|
|
|
""" Makes the provided response conditional based upon the request,
|
|
|
|
and mandates revalidation from clients
|
|
|
|
|
|
|
|
Uses Werkzeug's own :meth:`ETagResponseMixin.make_conditional`, after
|
|
|
|
setting ``last_modified`` and ``etag`` correctly on the response object
|
|
|
|
|
|
|
|
:param req: OpenERP request
|
|
|
|
:type req: web.common.http.WebRequest
|
|
|
|
:param response: Werkzeug response
|
|
|
|
:type response: werkzeug.wrappers.Response
|
|
|
|
:param datetime.datetime last_modified: last modification date of the response content
|
|
|
|
:param str etag: some sort of checksum of the content (deep etag)
|
|
|
|
:return: the response object provided
|
|
|
|
:rtype: werkzeug.wrappers.Response
|
|
|
|
"""
|
|
|
|
response.cache_control.must_revalidate = True
|
|
|
|
response.cache_control.max_age = 0
|
|
|
|
if last_modified:
|
|
|
|
response.last_modified = last_modified
|
|
|
|
if etag:
|
|
|
|
response.set_etag(etag)
|
|
|
|
return response.make_conditional(req.httprequest)
|
2011-07-22 14:28:24 +00:00
|
|
|
|
2012-08-12 21:48:27 +00:00
|
|
|
def login_and_redirect(req, db, login, key, redirect_url='/'):
|
|
|
|
req.session.authenticate(db, login, key, {})
|
2012-08-13 17:13:01 +00:00
|
|
|
return set_cookie_and_redirect(req, redirect_url)
|
|
|
|
|
|
|
|
def set_cookie_and_redirect(req, redirect_url):
|
2012-08-12 21:48:27 +00:00
|
|
|
redirect = werkzeug.utils.redirect(redirect_url, 303)
|
2012-08-13 13:59:27 +00:00
|
|
|
redirect.autocorrect_location_header = False
|
2012-08-12 21:48:27 +00:00
|
|
|
cookie_val = urllib2.quote(simplejson.dumps(req.session_id))
|
|
|
|
redirect.set_cookie('instance0|session_id', cookie_val)
|
|
|
|
return redirect
|
|
|
|
|
|
|
|
def eval_context_and_domain(session, context, domain=None):
|
|
|
|
e_context = session.eval_context(context)
|
|
|
|
# should we give the evaluated context as an evaluation context to the domain?
|
|
|
|
e_domain = session.eval_domain(domain or [])
|
|
|
|
|
|
|
|
return e_context, e_domain
|
|
|
|
|
|
|
|
def load_actions_from_ir_values(req, key, key2, models, meta):
|
|
|
|
context = req.session.eval_context(req.context)
|
|
|
|
Values = req.session.model('ir.values')
|
|
|
|
actions = Values.get(key, key2, models, meta, context)
|
|
|
|
|
|
|
|
return [(id, name, clean_action(req, action))
|
|
|
|
for id, name, action in actions]
|
|
|
|
|
|
|
|
def clean_action(req, action, do_not_eval=False):
|
|
|
|
action.setdefault('flags', {})
|
|
|
|
|
|
|
|
context = req.session.eval_context(req.context)
|
|
|
|
eval_ctx = req.session.evaluation_context(context)
|
|
|
|
|
|
|
|
if not do_not_eval:
|
|
|
|
# values come from the server, we can just eval them
|
|
|
|
if action.get('context') and isinstance(action.get('context'), basestring):
|
|
|
|
action['context'] = eval( action['context'], eval_ctx ) or {}
|
|
|
|
|
|
|
|
if action.get('domain') and isinstance(action.get('domain'), basestring):
|
|
|
|
action['domain'] = eval( action['domain'], eval_ctx ) or []
|
|
|
|
else:
|
|
|
|
if 'context' in action:
|
|
|
|
action['context'] = parse_context(action['context'], req.session)
|
|
|
|
if 'domain' in action:
|
|
|
|
action['domain'] = parse_domain(action['domain'], req.session)
|
|
|
|
|
|
|
|
action_type = action.setdefault('type', 'ir.actions.act_window_close')
|
|
|
|
if action_type == 'ir.actions.act_window':
|
|
|
|
return fix_view_modes(action)
|
|
|
|
return action
|
|
|
|
|
|
|
|
# I think generate_views,fix_view_modes should go into js ActionManager
|
|
|
|
def generate_views(action):
|
|
|
|
"""
|
|
|
|
While the server generates a sequence called "views" computing dependencies
|
|
|
|
between a bunch of stuff for views coming directly from the database
|
|
|
|
(the ``ir.actions.act_window model``), it's also possible for e.g. buttons
|
|
|
|
to return custom view dictionaries generated on the fly.
|
|
|
|
|
|
|
|
In that case, there is no ``views`` key available on the action.
|
|
|
|
|
|
|
|
Since the web client relies on ``action['views']``, generate it here from
|
|
|
|
``view_mode`` and ``view_id``.
|
|
|
|
|
|
|
|
Currently handles two different cases:
|
|
|
|
|
|
|
|
* no view_id, multiple view_mode
|
|
|
|
* single view_id, single view_mode
|
|
|
|
|
|
|
|
:param dict action: action descriptor dictionary to generate a views key for
|
|
|
|
"""
|
|
|
|
view_id = action.get('view_id') or False
|
|
|
|
if isinstance(view_id, (list, tuple)):
|
|
|
|
view_id = view_id[0]
|
|
|
|
|
|
|
|
# providing at least one view mode is a requirement, not an option
|
|
|
|
view_modes = action['view_mode'].split(',')
|
|
|
|
|
|
|
|
if len(view_modes) > 1:
|
|
|
|
if view_id:
|
|
|
|
raise ValueError('Non-db action dictionaries should provide '
|
|
|
|
'either multiple view modes or a single view '
|
|
|
|
'mode and an optional view id.\n\n Got view '
|
|
|
|
'modes %r and view id %r for action %r' % (
|
|
|
|
view_modes, view_id, action))
|
|
|
|
action['views'] = [(False, mode) for mode in view_modes]
|
|
|
|
return
|
|
|
|
action['views'] = [(view_id, view_modes[0])]
|
|
|
|
|
|
|
|
def fix_view_modes(action):
|
|
|
|
""" For historical reasons, OpenERP has weird dealings in relation to
|
|
|
|
view_mode and the view_type attribute (on window actions):
|
|
|
|
|
|
|
|
* one of the view modes is ``tree``, which stands for both list views
|
|
|
|
and tree views
|
|
|
|
* the choice is made by checking ``view_type``, which is either
|
|
|
|
``form`` for a list view or ``tree`` for an actual tree view
|
|
|
|
|
|
|
|
This methods simply folds the view_type into view_mode by adding a
|
|
|
|
new view mode ``list`` which is the result of the ``tree`` view_mode
|
|
|
|
in conjunction with the ``form`` view_type.
|
|
|
|
|
|
|
|
TODO: this should go into the doc, some kind of "peculiarities" section
|
|
|
|
|
|
|
|
:param dict action: an action descriptor
|
|
|
|
:returns: nothing, the action is modified in place
|
|
|
|
"""
|
|
|
|
if not action.get('views'):
|
|
|
|
generate_views(action)
|
|
|
|
|
|
|
|
if action.pop('view_type', 'form') != 'form':
|
|
|
|
return action
|
|
|
|
|
2012-09-10 10:51:29 +00:00
|
|
|
if 'view_mode' in action:
|
|
|
|
action['view_mode'] = ','.join(
|
|
|
|
mode if mode != 'tree' else 'list'
|
|
|
|
for mode in action['view_mode'].split(','))
|
2012-08-12 21:48:27 +00:00
|
|
|
action['views'] = [
|
|
|
|
[id, mode if mode != 'tree' else 'list']
|
|
|
|
for id, mode in action['views']
|
|
|
|
]
|
|
|
|
|
|
|
|
return action
|
|
|
|
|
|
|
|
def parse_domain(domain, session):
|
|
|
|
""" Parses an arbitrary string containing a domain, transforms it
|
2012-10-10 20:37:53 +00:00
|
|
|
to either a literal domain or a :class:`nonliterals.Domain`
|
2012-08-12 21:48:27 +00:00
|
|
|
|
|
|
|
:param domain: the domain to parse, if the domain is not a string it
|
|
|
|
is assumed to be a literal domain and is returned as-is
|
|
|
|
:param session: Current OpenERP session
|
2012-10-10 17:24:18 +00:00
|
|
|
:type session: openerpweb.OpenERPSession
|
2012-08-12 21:48:27 +00:00
|
|
|
"""
|
|
|
|
if not isinstance(domain, basestring):
|
|
|
|
return domain
|
|
|
|
try:
|
|
|
|
return ast.literal_eval(domain)
|
|
|
|
except ValueError:
|
|
|
|
# not a literal
|
2012-10-10 20:37:53 +00:00
|
|
|
return nonliterals.Domain(session, domain)
|
2012-08-12 21:48:27 +00:00
|
|
|
|
|
|
|
def parse_context(context, session):
|
|
|
|
""" Parses an arbitrary string containing a context, transforms it
|
2012-10-10 20:37:53 +00:00
|
|
|
to either a literal context or a :class:`nonliterals.Context`
|
2012-08-08 11:41:10 +00:00
|
|
|
|
2012-08-12 21:48:27 +00:00
|
|
|
:param context: the context to parse, if the context is not a string it
|
|
|
|
is assumed to be a literal domain and is returned as-is
|
|
|
|
:param session: Current OpenERP session
|
2012-10-10 17:24:18 +00:00
|
|
|
:type session: openerpweb.OpenERPSession
|
2012-08-08 11:41:10 +00:00
|
|
|
"""
|
2012-08-12 21:48:27 +00:00
|
|
|
if not isinstance(context, basestring):
|
|
|
|
return context
|
|
|
|
try:
|
|
|
|
return ast.literal_eval(context)
|
|
|
|
except ValueError:
|
2012-10-10 20:37:53 +00:00
|
|
|
return nonliterals.Context(session, context)
|
2012-09-18 07:28:02 +00:00
|
|
|
|
2012-09-18 13:51:11 +00:00
|
|
|
def _local_web_translations(trans_file):
|
|
|
|
messages = []
|
|
|
|
try:
|
|
|
|
with open(trans_file) as t_file:
|
|
|
|
po = babel.messages.pofile.read_po(t_file)
|
|
|
|
except Exception:
|
|
|
|
return
|
|
|
|
for x in po:
|
|
|
|
if x.id and x.string and "openerp-web" in x.auto_comments:
|
|
|
|
messages.append({'id': x.id, 'string': x.string})
|
|
|
|
return messages
|
2012-09-18 07:28:02 +00:00
|
|
|
|
2012-11-10 21:13:43 +00:00
|
|
|
def xml2json_from_elementtree(el, preserve_whitespaces=False):
|
2012-10-10 20:37:53 +00:00
|
|
|
""" xml2json-direct
|
|
|
|
Simple and straightforward XML-to-JSON converter in Python
|
|
|
|
New BSD Licensed
|
|
|
|
http://code.google.com/p/xml2json-direct/
|
|
|
|
"""
|
|
|
|
res = {}
|
|
|
|
if el.tag[0] == "{":
|
|
|
|
ns, name = el.tag.rsplit("}", 1)
|
|
|
|
res["tag"] = name
|
|
|
|
res["namespace"] = ns[1:]
|
|
|
|
else:
|
|
|
|
res["tag"] = el.tag
|
|
|
|
res["attrs"] = {}
|
|
|
|
for k, v in el.items():
|
|
|
|
res["attrs"][k] = v
|
|
|
|
kids = []
|
|
|
|
if el.text and (preserve_whitespaces or el.text.strip() != ''):
|
|
|
|
kids.append(el.text)
|
|
|
|
for kid in el:
|
2012-11-10 21:13:43 +00:00
|
|
|
kids.append(xml2json_from_elementtree(kid, preserve_whitespaces))
|
2012-10-10 20:37:53 +00:00
|
|
|
if kid.tail and (preserve_whitespaces or kid.tail.strip() != ''):
|
|
|
|
kids.append(kid.tail)
|
|
|
|
res["children"] = kids
|
|
|
|
return res
|
2012-09-18 07:28:02 +00:00
|
|
|
|
2012-10-30 11:51:13 +00:00
|
|
|
def content_disposition(filename, req):
|
|
|
|
filename = filename.encode('utf8')
|
|
|
|
escaped = urllib2.quote(filename)
|
|
|
|
browser = req.httprequest.user_agent.browser
|
|
|
|
version = int((req.httprequest.user_agent.version or '0').split('.')[0])
|
|
|
|
if browser == 'msie' and version < 9:
|
|
|
|
return "attachment; filename=%s" % escaped
|
|
|
|
elif browser == 'safari':
|
|
|
|
return "attachment; filename=%s" % filename
|
|
|
|
else:
|
|
|
|
return "attachment; filename*=UTF-8''%s" % escaped
|
|
|
|
|
|
|
|
|
2012-08-12 21:48:27 +00:00
|
|
|
#----------------------------------------------------------
|
|
|
|
# OpenERP Web web Controllers
|
|
|
|
#----------------------------------------------------------
|
|
|
|
|
2012-08-12 15:15:32 +00:00
|
|
|
html_template = """<!DOCTYPE html>
|
|
|
|
<html style="height: 100%%">
|
|
|
|
<head>
|
|
|
|
<meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1"/>
|
|
|
|
<meta http-equiv="content-type" content="text/html; charset=utf-8" />
|
|
|
|
<title>OpenERP</title>
|
|
|
|
<link rel="shortcut icon" href="/web/static/src/img/favicon.ico" type="image/x-icon"/>
|
|
|
|
<link rel="stylesheet" href="/web/static/src/css/full.css" />
|
|
|
|
%(css)s
|
|
|
|
%(js)s
|
|
|
|
<script type="text/javascript">
|
|
|
|
$(function() {
|
|
|
|
var s = new openerp.init(%(modules)s);
|
|
|
|
%(init)s
|
|
|
|
});
|
|
|
|
</script>
|
|
|
|
</head>
|
2012-10-08 10:37:00 +00:00
|
|
|
<body>
|
|
|
|
<!--[if lte IE 8]>
|
2012-11-10 21:13:43 +00:00
|
|
|
<script src="http://ajax.googleapis.com/ajax/libs/chrome-frame/1/CFInstall.min.js"></script>
|
2012-10-08 10:37:00 +00:00
|
|
|
<script>
|
|
|
|
var test = function() {
|
|
|
|
CFInstall.check({
|
|
|
|
mode: "overlay"
|
|
|
|
});
|
|
|
|
};
|
|
|
|
if (window.localStorage && false) {
|
|
|
|
if (! localStorage.getItem("hasShownGFramePopup")) {
|
|
|
|
test();
|
|
|
|
localStorage.setItem("hasShownGFramePopup", true);
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
test();
|
|
|
|
}
|
|
|
|
</script>
|
|
|
|
<![endif]-->
|
|
|
|
</body>
|
2012-08-12 15:15:32 +00:00
|
|
|
</html>
|
|
|
|
"""
|
2011-07-22 14:28:24 +00:00
|
|
|
|
2012-08-08 11:41:10 +00:00
|
|
|
class Home(openerpweb.Controller):
|
|
|
|
_cp_path = '/'
|
2011-09-30 20:10:18 +00:00
|
|
|
|
2012-08-08 11:41:10 +00:00
|
|
|
@openerpweb.httprequest
|
|
|
|
def index(self, req, s_action=None, **kw):
|
|
|
|
js = "\n ".join('<script type="text/javascript" src="%s"></script>' % i for i in manifest_list(req, None, 'js'))
|
|
|
|
css = "\n ".join('<link rel="stylesheet" href="%s">' % i for i in manifest_list(req, None, 'css'))
|
|
|
|
|
|
|
|
r = html_template % {
|
|
|
|
'js': js,
|
|
|
|
'css': css,
|
2012-08-12 21:48:27 +00:00
|
|
|
'modules': simplejson.dumps(module_boot(req)),
|
2012-08-08 11:41:10 +00:00
|
|
|
'init': 'var wc = new s.web.WebClient();wc.appendTo($(document.body));'
|
|
|
|
}
|
2011-12-07 14:06:44 +00:00
|
|
|
return r
|
2011-10-14 15:29:46 +00:00
|
|
|
|
2012-08-08 11:41:10 +00:00
|
|
|
@openerpweb.httprequest
|
|
|
|
def login(self, req, db, login, key):
|
2012-08-12 21:48:27 +00:00
|
|
|
return login_and_redirect(req, db, login, key)
|
2012-08-08 11:41:10 +00:00
|
|
|
|
|
|
|
class WebClient(openerpweb.Controller):
|
|
|
|
_cp_path = "/web/webclient"
|
2011-09-30 20:10:18 +00:00
|
|
|
|
2011-07-22 14:28:24 +00:00
|
|
|
@openerpweb.jsonrequest
|
2011-09-27 00:14:35 +00:00
|
|
|
def csslist(self, req, mods=None):
|
2012-08-08 11:41:10 +00:00
|
|
|
return manifest_list(req, mods, 'css')
|
2011-07-22 14:28:24 +00:00
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
2011-09-27 00:14:35 +00:00
|
|
|
def jslist(self, req, mods=None):
|
2012-08-08 11:41:10 +00:00
|
|
|
return manifest_list(req, mods, 'js')
|
2011-07-22 14:28:24 +00:00
|
|
|
|
2011-11-02 11:11:05 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def qweblist(self, req, mods=None):
|
2012-08-08 11:41:10 +00:00
|
|
|
return manifest_list(req, mods, 'qweb')
|
2012-02-10 14:13:38 +00:00
|
|
|
|
2011-07-22 14:28:24 +00:00
|
|
|
@openerpweb.httprequest
|
2011-09-27 00:14:35 +00:00
|
|
|
def css(self, req, mods=None):
|
2012-08-08 11:41:10 +00:00
|
|
|
files = list(manifest_glob(req, mods, 'css'))
|
|
|
|
last_modified = get_last_modified(f[0] for f in files)
|
2012-02-10 14:00:21 +00:00
|
|
|
if req.httprequest.if_modified_since and req.httprequest.if_modified_since >= last_modified:
|
|
|
|
return werkzeug.wrappers.Response(status=304)
|
|
|
|
|
2011-11-03 14:47:38 +00:00
|
|
|
file_map = dict(files)
|
|
|
|
|
|
|
|
rx_import = re.compile(r"""@import\s+('|")(?!'|"|/|https?://)""", re.U)
|
2012-07-16 14:29:18 +00:00
|
|
|
rx_url = re.compile(r"""url\s*\(\s*('|"|)(?!'|"|/|https?://|data:)""", re.U)
|
2011-11-03 14:47:38 +00:00
|
|
|
|
|
|
|
def reader(f):
|
|
|
|
"""read the a css file and absolutify all relative uris"""
|
[FIX] encoding issues when concatenating CSS or paths which may contain non-ascii
The concatenator tries to only work with bytes without ever wondering
what is in the byte bucket: files are read to `str`, concatenated with
`str` (via join) and returned as `str`, usually considered to be utf-8
encoded. It's the author's job to correctly encode files to utf-8.
So far so good.
On runbot, there's apparently an issue in some CSS files in some cases
on the runbot: `web_dir` finds itself to be typed `unicode` (because
it contains non-ascii characters? Not sure at all), as a result
`re.sub` will decode the corresponding file data when trying to inject
the dir as replacement and the CSS reader will return a `unicode`
object.
Then, when concat_files try to compute the checksum it will need bytes
thus re-encode everything using the default codec (ascii) and the
non-ascii character(s) will blow up the encoding with a
UnicodeEncodeError.
Solution:
* Assume CSS files can contain non-ascii characters (they can, and
do), decode them using `utf-8` to get `unicode` strings in the CSS
reader
* Inject web_dir as usual via replacement, this still yields a
`unicode` object (a `str` web_dir will simply be decoded using the
ASCII codec, a non-ascii web_dir should have been decoded to
`unicode` using sys.getfilesystemencoding)
* Cleanly re-encode evrything to utf-8, so that the code outside the
reader only ever manipulates 8-bit "byte" strings
bzr revid: xmo@openerp.com-20120405070711-vjyw8g4mge2goyik
2012-04-05 07:07:11 +00:00
|
|
|
with open(f, 'rb') as fp:
|
|
|
|
data = fp.read().decode('utf-8')
|
2011-11-03 14:47:38 +00:00
|
|
|
|
2012-01-27 11:30:33 +00:00
|
|
|
path = file_map[f]
|
|
|
|
# convert FS path into web path
|
|
|
|
web_dir = '/'.join(os.path.dirname(path).split(os.path.sep))
|
2011-11-03 14:47:38 +00:00
|
|
|
|
|
|
|
data = re.sub(
|
|
|
|
rx_import,
|
|
|
|
r"""@import \1%s/""" % (web_dir,),
|
|
|
|
data,
|
|
|
|
)
|
|
|
|
|
|
|
|
data = re.sub(
|
|
|
|
rx_url,
|
|
|
|
r"""url(\1%s/""" % (web_dir,),
|
|
|
|
data,
|
|
|
|
)
|
[FIX] encoding issues when concatenating CSS or paths which may contain non-ascii
The concatenator tries to only work with bytes without ever wondering
what is in the byte bucket: files are read to `str`, concatenated with
`str` (via join) and returned as `str`, usually considered to be utf-8
encoded. It's the author's job to correctly encode files to utf-8.
So far so good.
On runbot, there's apparently an issue in some CSS files in some cases
on the runbot: `web_dir` finds itself to be typed `unicode` (because
it contains non-ascii characters? Not sure at all), as a result
`re.sub` will decode the corresponding file data when trying to inject
the dir as replacement and the CSS reader will return a `unicode`
object.
Then, when concat_files try to compute the checksum it will need bytes
thus re-encode everything using the default codec (ascii) and the
non-ascii character(s) will blow up the encoding with a
UnicodeEncodeError.
Solution:
* Assume CSS files can contain non-ascii characters (they can, and
do), decode them using `utf-8` to get `unicode` strings in the CSS
reader
* Inject web_dir as usual via replacement, this still yields a
`unicode` object (a `str` web_dir will simply be decoded using the
ASCII codec, a non-ascii web_dir should have been decoded to
`unicode` using sys.getfilesystemencoding)
* Cleanly re-encode evrything to utf-8, so that the code outside the
reader only ever manipulates 8-bit "byte" strings
bzr revid: xmo@openerp.com-20120405070711-vjyw8g4mge2goyik
2012-04-05 07:07:11 +00:00
|
|
|
return data.encode('utf-8')
|
2011-11-03 14:47:38 +00:00
|
|
|
|
2012-02-10 14:00:21 +00:00
|
|
|
content, checksum = concat_files((f[0] for f in files), reader)
|
|
|
|
|
2012-08-08 11:41:10 +00:00
|
|
|
return make_conditional(
|
2012-02-10 14:13:38 +00:00
|
|
|
req, req.make_response(content, [('Content-Type', 'text/css')]),
|
|
|
|
last_modified, checksum)
|
2011-07-22 14:28:24 +00:00
|
|
|
|
|
|
|
@openerpweb.httprequest
|
2011-09-27 00:14:35 +00:00
|
|
|
def js(self, req, mods=None):
|
2012-08-08 11:41:10 +00:00
|
|
|
files = [f[0] for f in manifest_glob(req, mods, 'js')]
|
|
|
|
last_modified = get_last_modified(files)
|
2012-02-10 14:00:21 +00:00
|
|
|
if req.httprequest.if_modified_since and req.httprequest.if_modified_since >= last_modified:
|
|
|
|
return werkzeug.wrappers.Response(status=304)
|
|
|
|
|
2012-08-18 17:34:37 +00:00
|
|
|
content, checksum = concat_js(files)
|
2012-02-10 14:00:21 +00:00
|
|
|
|
2012-08-08 11:41:10 +00:00
|
|
|
return make_conditional(
|
2012-02-10 14:13:38 +00:00
|
|
|
req, req.make_response(content, [('Content-Type', 'application/javascript')]),
|
|
|
|
last_modified, checksum)
|
2011-07-22 14:28:24 +00:00
|
|
|
|
2011-11-02 11:11:05 +00:00
|
|
|
@openerpweb.httprequest
|
|
|
|
def qweb(self, req, mods=None):
|
2012-08-08 11:41:10 +00:00
|
|
|
files = [f[0] for f in manifest_glob(req, mods, 'qweb')]
|
|
|
|
last_modified = get_last_modified(files)
|
2012-02-10 14:00:21 +00:00
|
|
|
if req.httprequest.if_modified_since and req.httprequest.if_modified_since >= last_modified:
|
|
|
|
return werkzeug.wrappers.Response(status=304)
|
|
|
|
|
2012-08-08 11:41:10 +00:00
|
|
|
content, checksum = concat_xml(files)
|
2011-11-02 11:11:05 +00:00
|
|
|
|
2012-08-08 11:41:10 +00:00
|
|
|
return make_conditional(
|
2012-02-10 14:13:38 +00:00
|
|
|
req, req.make_response(content, [('Content-Type', 'text/xml')]),
|
|
|
|
last_modified, checksum)
|
2011-11-02 11:11:05 +00:00
|
|
|
|
2012-09-18 07:28:02 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def bootstrap_translations(self, req, mods):
|
|
|
|
""" Load local translations from *.po files, as a temporary solution
|
|
|
|
until we have established a valid session. This is meant only
|
|
|
|
for translating the login page and db management chrome, using
|
|
|
|
the browser's language. """
|
|
|
|
lang = req.httprequest.accept_languages.best or 'en'
|
|
|
|
# For performance reasons we only load a single translation, so for
|
|
|
|
# sub-languages (that should only be partially translated) we load the
|
|
|
|
# main language PO instead - that should be enough for the login screen.
|
|
|
|
if '-' in lang: # RFC2616 uses '-' separators for sublanguages
|
2012-10-04 09:13:14 +00:00
|
|
|
lang = lang.split('-')[0]
|
2012-09-18 07:28:02 +00:00
|
|
|
|
|
|
|
translations_per_module = {}
|
|
|
|
for addon_name in mods:
|
2012-11-10 21:13:43 +00:00
|
|
|
if openerpweb.addons_manifest[addon_name].get('bootstrap'):
|
|
|
|
addons_path = openerpweb.addons_manifest[addon_name]['addons_path']
|
|
|
|
f_name = os.path.join(addons_path, addon_name, "i18n", lang + ".po")
|
|
|
|
if not os.path.exists(f_name):
|
|
|
|
continue
|
|
|
|
translations_per_module[addon_name] = {'messages': _local_web_translations(f_name)}
|
|
|
|
|
2012-09-18 07:28:02 +00:00
|
|
|
return {"modules": translations_per_module,
|
|
|
|
"lang_parameters": None}
|
|
|
|
|
2011-08-11 16:39:33 +00:00
|
|
|
@openerpweb.jsonrequest
|
2011-08-12 15:17:36 +00:00
|
|
|
def translations(self, req, mods, lang):
|
2012-09-18 07:28:02 +00:00
|
|
|
res_lang = req.session.model('res.lang')
|
|
|
|
ids = res_lang.search([("code", "=", lang)])
|
|
|
|
lang_params = None
|
2011-08-16 12:24:50 +00:00
|
|
|
if ids:
|
2012-09-18 07:28:02 +00:00
|
|
|
lang_params = res_lang.read(ids[0], ["direction", "date_format", "time_format",
|
2011-08-16 12:24:50 +00:00
|
|
|
"grouping", "decimal_point", "thousands_sep"])
|
2011-08-25 05:49:30 +00:00
|
|
|
|
2012-09-18 07:28:02 +00:00
|
|
|
# Regional languages (ll_CC) must inherit/override their parent lang (ll), but this is
|
|
|
|
# done server-side when the language is loaded, so we only need to load the user's lang.
|
|
|
|
ir_translation = req.session.model('ir.translation')
|
|
|
|
translations_per_module = {}
|
|
|
|
messages = ir_translation.search_read([('module','in',mods),('lang','=',lang),
|
|
|
|
('comments','like','openerp-web'),('value','!=',False),
|
|
|
|
('value','!=','')],
|
|
|
|
['module','src','value','lang'], order='module')
|
|
|
|
for mod, msg_group in itertools.groupby(messages, key=operator.itemgetter('module')):
|
|
|
|
translations_per_module.setdefault(mod,{'messages':[]})
|
|
|
|
translations_per_module[mod]['messages'].extend({'id': m['src'],
|
|
|
|
'string': m['value']} \
|
|
|
|
for m in msg_group)
|
|
|
|
return {"modules": translations_per_module,
|
|
|
|
"lang_parameters": lang_params}
|
2011-07-22 14:28:24 +00:00
|
|
|
|
2011-09-05 15:14:20 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def version_info(self, req):
|
|
|
|
return {
|
2012-10-10 17:24:18 +00:00
|
|
|
"version": openerp.release.version
|
2011-09-05 15:14:20 +00:00
|
|
|
}
|
|
|
|
|
2011-12-15 12:07:32 +00:00
|
|
|
class Proxy(openerpweb.Controller):
|
|
|
|
_cp_path = '/web/proxy'
|
|
|
|
|
2011-12-16 00:44:02 +00:00
|
|
|
@openerpweb.jsonrequest
|
2011-12-15 12:07:32 +00:00
|
|
|
def load(self, req, path):
|
2011-12-19 14:57:59 +00:00
|
|
|
""" Proxies an HTTP request through a JSON request.
|
2011-12-15 12:07:32 +00:00
|
|
|
|
2011-12-19 14:57:59 +00:00
|
|
|
It is strongly recommended to not request binary files through this,
|
|
|
|
as the result will be a binary data blob as well.
|
2011-12-15 12:07:32 +00:00
|
|
|
|
2011-12-19 14:57:59 +00:00
|
|
|
:param req: OpenERP request
|
|
|
|
:param path: actual request path
|
|
|
|
:return: file content
|
|
|
|
"""
|
|
|
|
from werkzeug.test import Client
|
|
|
|
from werkzeug.wrappers import BaseResponse
|
|
|
|
|
|
|
|
return Client(req.httprequest.app, BaseResponse).get(path).data
|
2011-12-15 12:07:32 +00:00
|
|
|
|
2011-07-13 10:26:12 +00:00
|
|
|
class Database(openerpweb.Controller):
|
2011-09-05 11:03:09 +00:00
|
|
|
_cp_path = "/web/database"
|
2011-07-13 10:26:12 +00:00
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
2011-08-02 09:18:49 +00:00
|
|
|
def get_list(self, req):
|
2012-08-12 21:48:27 +00:00
|
|
|
dbs = db_list(req)
|
2011-07-13 10:26:12 +00:00
|
|
|
return {"db_list": dbs}
|
|
|
|
|
2011-07-01 12:22:22 +00:00
|
|
|
@openerpweb.jsonrequest
|
2011-08-02 09:18:49 +00:00
|
|
|
def create(self, req, fields):
|
2011-08-01 09:11:07 +00:00
|
|
|
params = dict(map(operator.itemgetter('name', 'value'), fields))
|
2011-08-04 10:37:25 +00:00
|
|
|
create_attrs = (
|
|
|
|
params['super_admin_pwd'],
|
|
|
|
params['db_name'],
|
|
|
|
bool(params.get('demo_data')),
|
|
|
|
params['db_lang'],
|
|
|
|
params['create_admin_pwd']
|
|
|
|
)
|
2011-12-15 10:29:10 +00:00
|
|
|
|
2012-03-09 14:05:36 +00:00
|
|
|
return req.session.proxy("db").create_database(*create_attrs)
|
2011-07-26 08:03:18 +00:00
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
2011-08-02 09:18:49 +00:00
|
|
|
def drop(self, req, fields):
|
2011-08-01 09:11:07 +00:00
|
|
|
password, db = operator.itemgetter(
|
|
|
|
'drop_pwd', 'drop_db')(
|
|
|
|
dict(map(operator.itemgetter('name', 'value'), fields)))
|
2011-08-03 05:53:58 +00:00
|
|
|
|
2011-07-26 08:03:18 +00:00
|
|
|
try:
|
|
|
|
return req.session.proxy("db").drop(password, db)
|
2011-08-01 09:11:07 +00:00
|
|
|
except xmlrpclib.Fault, e:
|
2011-07-26 08:03:18 +00:00
|
|
|
if e.faultCode and e.faultCode.split(':')[0] == 'AccessDenied':
|
2011-07-26 08:20:02 +00:00
|
|
|
return {'error': e.faultCode, 'title': 'Drop Database'}
|
2011-08-01 09:11:07 +00:00
|
|
|
return {'error': 'Could not drop database !', 'title': 'Drop Database'}
|
2011-07-26 08:03:18 +00:00
|
|
|
|
2011-08-01 13:12:54 +00:00
|
|
|
@openerpweb.httprequest
|
2011-08-02 09:18:49 +00:00
|
|
|
def backup(self, req, backup_db, backup_pwd, token):
|
2012-06-21 09:04:25 +00:00
|
|
|
try:
|
|
|
|
db_dump = base64.b64decode(
|
|
|
|
req.session.proxy("db").dump(backup_pwd, backup_db))
|
|
|
|
filename = "%(db)s_%(timestamp)s.dump" % {
|
|
|
|
'db': backup_db,
|
|
|
|
'timestamp': datetime.datetime.utcnow().strftime(
|
|
|
|
"%Y-%m-%d_%H-%M-%SZ")
|
|
|
|
}
|
|
|
|
return req.make_response(db_dump,
|
|
|
|
[('Content-Type', 'application/octet-stream; charset=binary'),
|
2012-10-31 15:53:10 +00:00
|
|
|
('Content-Disposition', content_disposition(filename, req))],
|
2012-06-21 09:04:25 +00:00
|
|
|
{'fileToken': int(token)}
|
|
|
|
)
|
|
|
|
except xmlrpclib.Fault, e:
|
2012-09-18 07:28:02 +00:00
|
|
|
return simplejson.dumps([[],[{'error': e.faultCode, 'title': 'backup Database'}]])
|
2011-08-18 18:51:45 +00:00
|
|
|
|
2011-08-01 14:47:28 +00:00
|
|
|
@openerpweb.httprequest
|
2011-08-02 09:18:49 +00:00
|
|
|
def restore(self, req, db_file, restore_pwd, new_db):
|
2011-07-26 08:03:18 +00:00
|
|
|
try:
|
2011-11-14 12:51:06 +00:00
|
|
|
data = base64.b64encode(db_file.read())
|
2011-08-02 09:13:44 +00:00
|
|
|
req.session.proxy("db").restore(restore_pwd, new_db, data)
|
|
|
|
return ''
|
2011-08-01 09:11:07 +00:00
|
|
|
except xmlrpclib.Fault, e:
|
2011-07-26 08:03:18 +00:00
|
|
|
if e.faultCode and e.faultCode.split(':')[0] == 'AccessDenied':
|
2011-08-18 18:51:45 +00:00
|
|
|
raise Exception("AccessDenied")
|
2011-08-01 14:47:28 +00:00
|
|
|
|
2011-07-26 08:03:18 +00:00
|
|
|
@openerpweb.jsonrequest
|
2011-08-02 09:18:49 +00:00
|
|
|
def change_password(self, req, fields):
|
2011-08-01 09:11:07 +00:00
|
|
|
old_password, new_password = operator.itemgetter(
|
2011-08-01 14:54:37 +00:00
|
|
|
'old_pwd', 'new_pwd')(
|
2011-08-01 09:11:07 +00:00
|
|
|
dict(map(operator.itemgetter('name', 'value'), fields)))
|
2011-07-26 08:03:18 +00:00
|
|
|
try:
|
|
|
|
return req.session.proxy("db").change_admin_password(old_password, new_password)
|
2011-08-01 09:11:07 +00:00
|
|
|
except xmlrpclib.Fault, e:
|
2011-07-26 08:03:18 +00:00
|
|
|
if e.faultCode and e.faultCode.split(':')[0] == 'AccessDenied':
|
2011-07-26 08:20:02 +00:00
|
|
|
return {'error': e.faultCode, 'title': 'Change Password'}
|
2011-08-01 09:11:07 +00:00
|
|
|
return {'error': 'Error, password not changed !', 'title': 'Change Password'}
|
2011-07-13 10:26:12 +00:00
|
|
|
|
2011-03-10 11:51:23 +00:00
|
|
|
class Session(openerpweb.Controller):
|
2011-09-05 11:03:09 +00:00
|
|
|
_cp_path = "/web/session"
|
2011-03-10 11:51:23 +00:00
|
|
|
|
2011-12-27 19:39:00 +00:00
|
|
|
def session_info(self, req):
|
2012-01-19 14:30:57 +00:00
|
|
|
req.session.ensure_valid()
|
2011-12-27 15:59:15 +00:00
|
|
|
return {
|
|
|
|
"session_id": req.session_id,
|
|
|
|
"uid": req.session._uid,
|
|
|
|
"context": req.session.get_context() if req.session._uid else {},
|
|
|
|
"db": req.session._db,
|
|
|
|
"login": req.session._login,
|
|
|
|
}
|
|
|
|
|
2011-12-27 19:39:00 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def get_session_info(self, req):
|
|
|
|
return self.session_info(req)
|
|
|
|
|
2011-03-11 13:26:22 +00:00
|
|
|
@openerpweb.jsonrequest
|
2011-10-13 14:33:39 +00:00
|
|
|
def authenticate(self, req, db, login, password, base_location=None):
|
2011-10-12 16:12:08 +00:00
|
|
|
wsgienv = req.httprequest.environ
|
|
|
|
env = dict(
|
2011-10-13 14:33:39 +00:00
|
|
|
base_location=base_location,
|
|
|
|
HTTP_HOST=wsgienv['HTTP_HOST'],
|
2011-10-12 16:12:08 +00:00
|
|
|
REMOTE_ADDR=wsgienv['REMOTE_ADDR'],
|
|
|
|
)
|
2011-10-13 14:33:39 +00:00
|
|
|
req.session.authenticate(db, login, password, env)
|
2011-09-27 00:14:35 +00:00
|
|
|
|
2011-12-27 19:39:00 +00:00
|
|
|
return self.session_info(req)
|
2011-09-27 00:14:35 +00:00
|
|
|
|
2011-09-13 10:23:20 +00:00
|
|
|
@openerpweb.jsonrequest
|
2011-08-19 13:10:07 +00:00
|
|
|
def change_password (self,req,fields):
|
2011-08-23 12:58:02 +00:00
|
|
|
old_password, new_password,confirm_password = operator.itemgetter('old_pwd', 'new_password','confirm_pwd')(
|
2011-08-19 13:10:07 +00:00
|
|
|
dict(map(operator.itemgetter('name', 'value'), fields)))
|
2011-08-23 12:58:02 +00:00
|
|
|
if not (old_password.strip() and new_password.strip() and confirm_password.strip()):
|
|
|
|
return {'error':'All passwords have to be filled.','title': 'Change Password'}
|
|
|
|
if new_password != confirm_password:
|
|
|
|
return {'error': 'The new password and its confirmation must be identical.','title': 'Change Password'}
|
2011-08-19 13:10:07 +00:00
|
|
|
try:
|
|
|
|
if req.session.model('res.users').change_password(
|
|
|
|
old_password, new_password):
|
2011-08-30 11:16:49 +00:00
|
|
|
return {'new_password':new_password}
|
2012-01-19 08:52:13 +00:00
|
|
|
except Exception:
|
2011-08-19 13:10:07 +00:00
|
|
|
return {'error': 'Original password incorrect, your password was not changed.', 'title': 'Change Password'}
|
2011-08-30 11:16:49 +00:00
|
|
|
return {'error': 'Error, password not changed !', 'title': 'Change Password'}
|
2011-09-27 00:14:35 +00:00
|
|
|
|
2011-06-10 12:50:06 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def sc_list(self, req):
|
2011-08-16 10:11:10 +00:00
|
|
|
return req.session.model('ir.ui.view_sc').get_sc(
|
|
|
|
req.session._uid, "ir.ui.menu", req.session.eval_context(req.context))
|
|
|
|
|
2011-07-13 10:50:58 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def get_lang_list(self, req):
|
|
|
|
try:
|
2011-08-01 10:58:38 +00:00
|
|
|
return {
|
|
|
|
'lang_list': (req.session.proxy("db").list_lang() or []),
|
|
|
|
'error': ""
|
|
|
|
}
|
2011-07-13 10:50:58 +00:00
|
|
|
except Exception, e:
|
2011-07-26 08:20:02 +00:00
|
|
|
return {"error": e, "title": "Languages"}
|
2011-08-03 05:53:58 +00:00
|
|
|
|
2011-03-11 13:26:22 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def modules(self, req):
|
2012-08-16 16:07:38 +00:00
|
|
|
# return all installed modules. Web client is smart enough to not load a module twice
|
|
|
|
return module_installed(req)
|
2011-03-21 08:13:31 +00:00
|
|
|
|
2011-03-25 12:32:52 +00:00
|
|
|
@openerpweb.jsonrequest
|
2011-03-28 16:39:17 +00:00
|
|
|
def eval_domain_and_context(self, req, contexts, domains,
|
|
|
|
group_by_seq=None):
|
|
|
|
""" Evaluates sequences of domains and contexts, composing them into
|
|
|
|
a single context, domain or group_by sequence.
|
|
|
|
|
|
|
|
:param list contexts: list of contexts to merge together. Contexts are
|
|
|
|
evaluated in sequence, all previous contexts
|
|
|
|
are part of their own evaluation context
|
|
|
|
(starting at the session context).
|
|
|
|
:param list domains: list of domains to merge together. Domains are
|
|
|
|
evaluated in sequence and appended to one another
|
|
|
|
(implicit AND), their evaluation domain is the
|
|
|
|
result of merging all contexts.
|
|
|
|
:param list group_by_seq: list of domains (which may be in a different
|
|
|
|
order than the ``contexts`` parameter),
|
|
|
|
evaluated in sequence, their ``'group_by'``
|
|
|
|
key is extracted if they have one.
|
|
|
|
:returns:
|
|
|
|
a 3-dict of:
|
|
|
|
|
|
|
|
context (``dict``)
|
|
|
|
the global context created by merging all of
|
|
|
|
``contexts``
|
|
|
|
|
|
|
|
domain (``list``)
|
|
|
|
the concatenation of all domains
|
|
|
|
|
|
|
|
group_by (``list``)
|
|
|
|
a list of fields to group by, potentially empty (in which case
|
|
|
|
no group by should be performed)
|
|
|
|
"""
|
2011-06-28 12:17:47 +00:00
|
|
|
context, domain = eval_context_and_domain(req.session,
|
2012-10-10 20:37:53 +00:00
|
|
|
nonliterals.CompoundContext(*(contexts or [])),
|
|
|
|
nonliterals.CompoundDomain(*(domains or [])))
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-06-17 16:08:49 +00:00
|
|
|
group_by_sequence = []
|
|
|
|
for candidate in (group_by_seq or []):
|
|
|
|
ctx = req.session.eval_context(candidate, context)
|
|
|
|
group_by = ctx.get('group_by')
|
|
|
|
if not group_by:
|
|
|
|
continue
|
|
|
|
elif isinstance(group_by, basestring):
|
|
|
|
group_by_sequence.append(group_by)
|
|
|
|
else:
|
|
|
|
group_by_sequence.extend(group_by)
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-06-17 16:08:49 +00:00
|
|
|
return {
|
|
|
|
'context': context,
|
|
|
|
'domain': domain,
|
|
|
|
'group_by': group_by_sequence
|
|
|
|
}
|
2011-04-08 15:25:08 +00:00
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def save_session_action(self, req, the_action):
|
|
|
|
"""
|
|
|
|
This method store an action object in the session object and returns an integer
|
|
|
|
identifying that action. The method get_session_action() can be used to get
|
|
|
|
back the action.
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-04-08 15:25:08 +00:00
|
|
|
:param the_action: The action to save in the session.
|
|
|
|
:type the_action: anything
|
|
|
|
:return: A key identifying the saved action.
|
|
|
|
:rtype: integer
|
|
|
|
"""
|
2011-08-18 18:51:45 +00:00
|
|
|
saved_actions = req.httpsession.get('saved_actions')
|
2011-04-08 15:25:08 +00:00
|
|
|
if not saved_actions:
|
|
|
|
saved_actions = {"next":0, "actions":{}}
|
2011-08-18 18:51:45 +00:00
|
|
|
req.httpsession['saved_actions'] = saved_actions
|
2011-04-08 15:25:08 +00:00
|
|
|
# we don't allow more than 10 stored actions
|
|
|
|
if len(saved_actions["actions"]) >= 10:
|
2012-01-19 08:52:13 +00:00
|
|
|
del saved_actions["actions"][min(saved_actions["actions"])]
|
2011-04-08 15:25:08 +00:00
|
|
|
key = saved_actions["next"]
|
|
|
|
saved_actions["actions"][key] = the_action
|
|
|
|
saved_actions["next"] = key + 1
|
|
|
|
return key
|
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def get_session_action(self, req, key):
|
|
|
|
"""
|
|
|
|
Gets back a previously saved action. This method can return None if the action
|
|
|
|
was saved since too much time (this case should be handled in a smart way).
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-04-08 15:25:08 +00:00
|
|
|
:param key: The key given by save_session_action()
|
|
|
|
:type key: integer
|
|
|
|
:return: The saved action or None.
|
|
|
|
:rtype: anything
|
|
|
|
"""
|
2011-08-18 18:51:45 +00:00
|
|
|
saved_actions = req.httpsession.get('saved_actions')
|
2011-04-08 15:25:08 +00:00
|
|
|
if not saved_actions:
|
|
|
|
return None
|
|
|
|
return saved_actions["actions"].get(key)
|
2011-07-04 14:18:07 +00:00
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def check(self, req):
|
|
|
|
req.session.assert_valid()
|
|
|
|
return None
|
2011-07-14 10:22:43 +00:00
|
|
|
|
2012-02-10 16:43:09 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def destroy(self, req):
|
|
|
|
req.session._suicide = True
|
|
|
|
|
2011-03-02 18:56:06 +00:00
|
|
|
class Menu(openerpweb.Controller):
|
2011-09-05 11:03:09 +00:00
|
|
|
_cp_path = "/web/menu"
|
2011-03-02 18:56:06 +00:00
|
|
|
|
2012-02-08 00:13:05 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def load(self, req):
|
|
|
|
return {'data': self.do_load(req)}
|
2011-03-21 10:47:35 +00:00
|
|
|
|
2012-02-09 14:13:39 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def get_user_roots(self, req):
|
|
|
|
return self.do_get_user_roots(req)
|
|
|
|
|
|
|
|
def do_get_user_roots(self, req):
|
|
|
|
""" Return all root menu ids visible for the session user.
|
2011-03-21 10:47:35 +00:00
|
|
|
|
|
|
|
:param req: A request object, with an OpenERP session attribute
|
|
|
|
:type req: < session -> OpenERPSession >
|
2012-02-09 14:13:39 +00:00
|
|
|
:return: the root menu ids
|
|
|
|
:rtype: list(int)
|
2011-03-21 10:47:35 +00:00
|
|
|
"""
|
2012-02-08 00:13:05 +00:00
|
|
|
s = req.session
|
|
|
|
context = s.eval_context(req.context)
|
|
|
|
Menus = s.model('ir.ui.menu')
|
|
|
|
# If a menu action is defined use its domain to get the root menu items
|
|
|
|
user_menu_id = s.model('res.users').read([s._uid], ['menu_id'], context)[0]['menu_id']
|
2012-02-23 13:46:12 +00:00
|
|
|
|
|
|
|
menu_domain = [('parent_id', '=', False)]
|
2012-02-08 00:13:05 +00:00
|
|
|
if user_menu_id:
|
2012-02-23 13:46:12 +00:00
|
|
|
domain_string = s.model('ir.actions.act_window').read([user_menu_id[0]], ['domain'], context)[0]['domain']
|
|
|
|
if domain_string:
|
|
|
|
menu_domain = ast.literal_eval(domain_string)
|
|
|
|
|
2012-02-09 14:13:39 +00:00
|
|
|
return Menus.search(menu_domain, 0, False, False, context)
|
|
|
|
|
|
|
|
def do_load(self, req):
|
|
|
|
""" Loads all menu items (all applications and their sub-menus).
|
|
|
|
|
|
|
|
:param req: A request object, with an OpenERP session attribute
|
|
|
|
:type req: < session -> OpenERPSession >
|
|
|
|
:return: the menu root
|
|
|
|
:rtype: dict('children': menu_nodes)
|
|
|
|
"""
|
|
|
|
context = req.session.eval_context(req.context)
|
|
|
|
Menus = req.session.model('ir.ui.menu')
|
|
|
|
|
2012-03-30 12:59:28 +00:00
|
|
|
menu_roots = Menus.read(self.do_get_user_roots(req), ['name', 'sequence', 'parent_id', 'action', 'needaction_enabled', 'needaction_counter'], context)
|
2012-02-09 08:35:54 +00:00
|
|
|
menu_root = {'id': False, 'name': 'root', 'parent_id': [-1, ''], 'children' : menu_roots}
|
2012-02-08 00:13:05 +00:00
|
|
|
|
|
|
|
# menus are loaded fully unlike a regular tree view, cause there are a
|
|
|
|
# limited number of items (752 when all 6.1 addons are installed)
|
|
|
|
menu_ids = Menus.search([], 0, False, False, context)
|
2012-03-30 12:59:28 +00:00
|
|
|
menu_items = Menus.read(menu_ids, ['name', 'sequence', 'parent_id', 'action', 'needaction_enabled', 'needaction_counter'], context)
|
2012-02-09 08:35:54 +00:00
|
|
|
# adds roots at the end of the sequence, so that they will overwrite
|
|
|
|
# equivalent menu items from full menu read when put into id:item
|
|
|
|
# mapping, resulting in children being correctly set on the roots.
|
2012-02-08 00:13:05 +00:00
|
|
|
menu_items.extend(menu_roots)
|
|
|
|
|
|
|
|
# make a tree using parent_id
|
|
|
|
menu_items_map = dict((menu_item["id"], menu_item) for menu_item in menu_items)
|
|
|
|
for menu_item in menu_items:
|
|
|
|
if menu_item['parent_id']:
|
|
|
|
parent = menu_item['parent_id'][0]
|
|
|
|
else:
|
|
|
|
parent = False
|
|
|
|
if parent in menu_items_map:
|
|
|
|
menu_items_map[parent].setdefault(
|
|
|
|
'children', []).append(menu_item)
|
|
|
|
|
|
|
|
# sort by sequence a tree using parent_id
|
|
|
|
for menu_item in menu_items:
|
|
|
|
menu_item.setdefault('children', []).sort(
|
|
|
|
key=operator.itemgetter('sequence'))
|
|
|
|
|
|
|
|
return menu_root
|
2011-03-02 18:56:06 +00:00
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
2011-03-21 08:13:31 +00:00
|
|
|
def action(self, req, menu_id):
|
2011-04-04 13:47:05 +00:00
|
|
|
actions = load_actions_from_ir_values(req,'action', 'tree_but_open',
|
2011-08-25 17:04:10 +00:00
|
|
|
[('ir.ui.menu', menu_id)], False)
|
2011-03-25 09:41:19 +00:00
|
|
|
return {"action": actions}
|
2011-03-02 18:56:06 +00:00
|
|
|
|
|
|
|
class DataSet(openerpweb.Controller):
|
2011-09-05 11:03:09 +00:00
|
|
|
_cp_path = "/web/dataset"
|
2011-03-02 18:56:06 +00:00
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
2011-08-25 17:14:51 +00:00
|
|
|
def search_read(self, req, model, fields=False, offset=0, limit=False, domain=None, sort=None):
|
|
|
|
return self.do_search_read(req, model, fields, offset, limit, domain, sort)
|
|
|
|
def do_search_read(self, req, model, fields=False, offset=0, limit=False, domain=None
|
2011-06-22 14:55:59 +00:00
|
|
|
, sort=None):
|
2011-03-23 12:08:06 +00:00
|
|
|
""" Performs a search() followed by a read() (if needed) using the
|
2011-03-23 12:21:26 +00:00
|
|
|
provided search criteria
|
|
|
|
|
2011-08-25 17:14:51 +00:00
|
|
|
:param req: a JSON-RPC request object
|
|
|
|
:type req: openerpweb.JsonRequest
|
2011-03-28 09:18:09 +00:00
|
|
|
:param str model: the name of the model to search on
|
2011-03-23 12:21:26 +00:00
|
|
|
:param fields: a list of the fields to return in the result records
|
|
|
|
:type fields: [str]
|
2011-03-28 09:18:09 +00:00
|
|
|
:param int offset: from which index should the results start being returned
|
|
|
|
:param int limit: the maximum number of records to return
|
|
|
|
:param list domain: the search domain for the query
|
|
|
|
:param list sort: sorting directives
|
2011-06-22 14:55:59 +00:00
|
|
|
:returns: A structure (dict) with two keys: ids (all the ids matching
|
|
|
|
the (domain, context) pair) and records (paginated records
|
|
|
|
matching fields selection set)
|
2011-03-23 12:22:17 +00:00
|
|
|
:rtype: list
|
2011-03-23 12:08:06 +00:00
|
|
|
"""
|
2011-08-25 17:14:51 +00:00
|
|
|
Model = req.session.model(model)
|
2011-05-13 13:39:14 +00:00
|
|
|
|
2011-06-22 14:55:59 +00:00
|
|
|
context, domain = eval_context_and_domain(
|
2011-08-25 17:14:51 +00:00
|
|
|
req.session, req.context, domain)
|
2011-05-04 14:05:35 +00:00
|
|
|
|
2012-02-09 17:07:48 +00:00
|
|
|
ids = Model.search(domain, offset or 0, limit or False, sort or False, context)
|
|
|
|
if limit and len(ids) == limit:
|
|
|
|
length = Model.search_count(domain, context)
|
|
|
|
else:
|
|
|
|
length = len(ids) + (offset or 0)
|
2011-03-23 08:34:41 +00:00
|
|
|
if fields and fields == ['id']:
|
|
|
|
# shortcut read if we only want the ids
|
2011-06-22 14:55:59 +00:00
|
|
|
return {
|
2012-02-09 17:07:48 +00:00
|
|
|
'length': length,
|
|
|
|
'records': [{'id': id} for id in ids]
|
2011-06-22 14:55:59 +00:00
|
|
|
}
|
|
|
|
|
2012-02-09 17:07:48 +00:00
|
|
|
records = Model.read(ids, fields or False, context)
|
2011-06-22 14:55:59 +00:00
|
|
|
records.sort(key=lambda obj: ids.index(obj['id']))
|
|
|
|
return {
|
2012-02-09 17:07:48 +00:00
|
|
|
'length': length,
|
2011-06-22 14:55:59 +00:00
|
|
|
'records': records
|
|
|
|
}
|
2011-05-04 14:05:35 +00:00
|
|
|
|
2011-03-17 14:06:38 +00:00
|
|
|
@openerpweb.jsonrequest
|
2011-03-21 08:13:31 +00:00
|
|
|
def load(self, req, model, id, fields):
|
2011-03-17 14:06:38 +00:00
|
|
|
m = req.session.model(model)
|
|
|
|
value = {}
|
2011-06-17 16:08:49 +00:00
|
|
|
r = m.read([id], False, req.session.eval_context(req.context))
|
2011-03-17 14:06:38 +00:00
|
|
|
if r:
|
|
|
|
value = r[0]
|
|
|
|
return {'value': value}
|
|
|
|
|
2011-06-28 15:59:56 +00:00
|
|
|
def call_common(self, req, model, method, args, domain_id=None, context_id=None):
|
2011-12-06 11:31:41 +00:00
|
|
|
has_domain = domain_id is not None and domain_id < len(args)
|
|
|
|
has_context = context_id is not None and context_id < len(args)
|
|
|
|
|
|
|
|
domain = args[domain_id] if has_domain else []
|
|
|
|
context = args[context_id] if has_context else {}
|
2011-06-29 13:12:33 +00:00
|
|
|
c, d = eval_context_and_domain(req.session, context, domain)
|
2011-12-06 11:31:41 +00:00
|
|
|
if has_domain:
|
2011-06-17 16:08:49 +00:00
|
|
|
args[domain_id] = d
|
2011-12-06 11:31:41 +00:00
|
|
|
if has_context:
|
2011-06-17 16:08:49 +00:00
|
|
|
args[context_id] = c
|
2011-06-28 15:59:56 +00:00
|
|
|
|
2012-01-06 10:32:19 +00:00
|
|
|
return self._call_kw(req, model, method, args, {})
|
|
|
|
|
|
|
|
def _call_kw(self, req, model, method, args, kwargs):
|
2011-09-02 15:26:22 +00:00
|
|
|
for i in xrange(len(args)):
|
2012-10-10 20:37:53 +00:00
|
|
|
if isinstance(args[i], nonliterals.BaseContext):
|
2011-09-02 15:59:49 +00:00
|
|
|
args[i] = req.session.eval_context(args[i])
|
2012-10-10 20:37:53 +00:00
|
|
|
elif isinstance(args[i], nonliterals.BaseDomain):
|
2011-09-02 15:59:49 +00:00
|
|
|
args[i] = req.session.eval_domain(args[i])
|
2012-01-06 10:32:19 +00:00
|
|
|
for k in kwargs.keys():
|
2012-10-10 20:37:53 +00:00
|
|
|
if isinstance(kwargs[k], nonliterals.BaseContext):
|
2012-01-06 10:32:19 +00:00
|
|
|
kwargs[k] = req.session.eval_context(kwargs[k])
|
2012-10-10 20:37:53 +00:00
|
|
|
elif isinstance(kwargs[k], nonliterals.BaseDomain):
|
2012-01-06 10:32:19 +00:00
|
|
|
kwargs[k] = req.session.eval_domain(kwargs[k])
|
2011-06-28 15:59:56 +00:00
|
|
|
|
2012-07-31 14:03:09 +00:00
|
|
|
# Temporary implements future display_name special field for model#read()
|
|
|
|
if method == 'read' and kwargs.get('context') and kwargs['context'].get('future_display_name'):
|
|
|
|
if 'display_name' in args[1]:
|
|
|
|
names = req.session.model(model).name_get(args[0], **kwargs)
|
|
|
|
args[1].remove('display_name')
|
|
|
|
r = getattr(req.session.model(model), method)(*args, **kwargs)
|
|
|
|
for i in range(len(r)):
|
|
|
|
r[i]['display_name'] = names[i][1] or "%s#%d" % (model, names[i][0])
|
|
|
|
return r
|
|
|
|
|
2012-01-06 10:32:19 +00:00
|
|
|
return getattr(req.session.model(model), method)(*args, **kwargs)
|
2011-06-28 15:59:56 +00:00
|
|
|
|
2012-01-20 10:56:08 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def onchange(self, req, model, method, args, context_id=None):
|
2012-01-20 11:05:43 +00:00
|
|
|
""" Support method for handling onchange calls: behaves much like call
|
|
|
|
with the following differences:
|
|
|
|
|
|
|
|
* Does not take a domain_id
|
|
|
|
* Is aware of the return value's structure, and will parse the domains
|
|
|
|
if needed in order to return either parsed literal domains (in JSON)
|
|
|
|
or non-literal domain instances, allowing those domains to be used
|
|
|
|
from JS
|
|
|
|
|
|
|
|
:param req:
|
|
|
|
:type req: web.common.http.JsonRequest
|
|
|
|
:param str model: object type on which to call the method
|
|
|
|
:param str method: name of the onchange handler method
|
|
|
|
:param list args: arguments to call the onchange handler with
|
|
|
|
:param int context_id: index of the context object in the list of
|
|
|
|
arguments
|
|
|
|
:return: result of the onchange call with all domains parsed
|
|
|
|
"""
|
2012-01-20 10:56:08 +00:00
|
|
|
result = self.call_common(req, model, method, args, context_id=context_id)
|
2012-01-23 14:46:05 +00:00
|
|
|
if not result or 'domain' not in result:
|
2012-01-20 10:56:08 +00:00
|
|
|
return result
|
|
|
|
|
|
|
|
result['domain'] = dict(
|
|
|
|
(k, parse_domain(v, req.session))
|
|
|
|
for k, v in result['domain'].iteritems())
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
2011-06-28 15:59:56 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def call(self, req, model, method, args, domain_id=None, context_id=None):
|
2011-07-14 10:05:22 +00:00
|
|
|
return self.call_common(req, model, method, args, domain_id, context_id)
|
2012-01-06 10:32:19 +00:00
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def call_kw(self, req, model, method, args, kwargs):
|
|
|
|
return self._call_kw(req, model, method, args, kwargs)
|
2011-06-28 15:59:56 +00:00
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def call_button(self, req, model, method, args, domain_id=None, context_id=None):
|
|
|
|
action = self.call_common(req, model, method, args, domain_id, context_id)
|
|
|
|
if isinstance(action, dict) and action.get('type') != '':
|
2012-10-04 11:50:11 +00:00
|
|
|
return clean_action(req, action)
|
|
|
|
return False
|
2011-04-05 14:32:29 +00:00
|
|
|
|
2011-04-21 15:56:05 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def exec_workflow(self, req, model, id, signal):
|
2012-02-27 13:56:26 +00:00
|
|
|
return req.session.exec_workflow(model, id, signal)
|
2011-04-27 12:20:51 +00:00
|
|
|
|
2012-08-14 19:53:16 +00:00
|
|
|
@openerpweb.jsonrequest
|
2012-08-30 12:14:03 +00:00
|
|
|
def resequence(self, req, model, ids, field='sequence', offset=0):
|
|
|
|
""" Re-sequences a number of records in the model, by their ids
|
|
|
|
|
|
|
|
The re-sequencing starts at the first model of ``ids``, the sequence
|
|
|
|
number is incremented by one after each record and starts at ``offset``
|
|
|
|
|
|
|
|
:param ids: identifiers of the records to resequence, in the new sequence order
|
|
|
|
:type ids: list(id)
|
|
|
|
:param str field: field used for sequence specification, defaults to
|
|
|
|
"sequence"
|
|
|
|
:param int offset: sequence number for first record in ``ids``, allows
|
|
|
|
starting the resequencing from an arbitrary number,
|
|
|
|
defaults to ``0``
|
|
|
|
"""
|
2012-08-14 19:53:16 +00:00
|
|
|
m = req.session.model(model)
|
2012-08-30 12:14:03 +00:00
|
|
|
if not m.fields_get([field]):
|
2012-08-14 22:13:34 +00:00
|
|
|
return False
|
2012-08-30 12:14:03 +00:00
|
|
|
# python 2.6 has no start parameter
|
|
|
|
for i, id in enumerate(ids):
|
|
|
|
m.write(id, { field: i + offset })
|
2012-08-14 19:53:16 +00:00
|
|
|
return True
|
|
|
|
|
2011-05-10 08:34:20 +00:00
|
|
|
class DataGroup(openerpweb.Controller):
|
2011-09-05 11:03:09 +00:00
|
|
|
_cp_path = "/web/group"
|
2011-05-10 08:34:20 +00:00
|
|
|
@openerpweb.jsonrequest
|
2011-08-25 17:14:51 +00:00
|
|
|
def read(self, req, model, fields, group_by_fields, domain=None, sort=None):
|
|
|
|
Model = req.session.model(model)
|
|
|
|
context, domain = eval_context_and_domain(req.session, req.context, domain)
|
2011-05-24 09:11:14 +00:00
|
|
|
|
|
|
|
return Model.read_group(
|
2011-07-11 11:32:28 +00:00
|
|
|
domain or [], fields, group_by_fields, 0, False,
|
2011-07-13 10:37:25 +00:00
|
|
|
dict(context, group_by=group_by_fields), sort or False)
|
2011-05-10 08:34:20 +00:00
|
|
|
|
2011-03-24 20:11:25 +00:00
|
|
|
class View(openerpweb.Controller):
|
2011-09-05 11:03:09 +00:00
|
|
|
_cp_path = "/web/view"
|
2011-06-01 13:45:14 +00:00
|
|
|
|
2011-08-25 17:14:51 +00:00
|
|
|
def fields_view_get(self, req, model, view_id, view_type,
|
2011-04-11 10:31:18 +00:00
|
|
|
transform=True, toolbar=False, submenu=False):
|
2011-08-25 17:14:51 +00:00
|
|
|
Model = req.session.model(model)
|
|
|
|
context = req.session.eval_context(req.context)
|
2011-06-17 16:08:49 +00:00
|
|
|
fvg = Model.fields_view_get(view_id, view_type, context, toolbar, submenu)
|
|
|
|
# todo fme?: check that we should pass the evaluated context here
|
2011-12-01 09:28:23 +00:00
|
|
|
self.process_view(req.session, fvg, context, transform, (view_type == 'kanban'))
|
2011-09-07 15:02:57 +00:00
|
|
|
if toolbar and transform:
|
|
|
|
self.process_toolbar(req, fvg['toolbar'])
|
2011-06-06 07:04:51 +00:00
|
|
|
return fvg
|
2011-07-13 10:26:12 +00:00
|
|
|
|
2011-12-01 09:28:23 +00:00
|
|
|
def process_view(self, session, fvg, context, transform, preserve_whitespaces=False):
|
2011-06-30 14:25:28 +00:00
|
|
|
# depending on how it feels, xmlrpclib.ServerProxy can translate
|
|
|
|
# XML-RPC strings to ``str`` or ``unicode``. ElementTree does not
|
|
|
|
# enjoy unicode strings which can not be trivially converted to
|
|
|
|
# strings, and it blows up during parsing.
|
|
|
|
|
|
|
|
# So ensure we fix this retardation by converting view xml back to
|
|
|
|
# bit strings.
|
|
|
|
if isinstance(fvg['arch'], unicode):
|
|
|
|
arch = fvg['arch'].encode('utf-8')
|
|
|
|
else:
|
|
|
|
arch = fvg['arch']
|
2012-03-12 15:34:52 +00:00
|
|
|
fvg['arch_string'] = arch
|
2011-06-30 14:25:28 +00:00
|
|
|
|
2011-03-24 20:11:25 +00:00
|
|
|
if transform:
|
2011-06-06 07:04:51 +00:00
|
|
|
evaluation_context = session.evaluation_context(context or {})
|
2011-06-30 14:25:28 +00:00
|
|
|
xml = self.transform_view(arch, session, evaluation_context)
|
2011-03-24 20:11:25 +00:00
|
|
|
else:
|
2011-06-30 14:25:28 +00:00
|
|
|
xml = ElementTree.fromstring(arch)
|
2012-11-10 21:13:43 +00:00
|
|
|
fvg['arch'] = xml2json_from_elementtree(xml, preserve_whitespaces)
|
2011-07-14 15:36:47 +00:00
|
|
|
|
2012-01-25 11:16:28 +00:00
|
|
|
if 'id' in fvg['fields']:
|
|
|
|
# Special case for id's
|
2012-01-25 11:23:25 +00:00
|
|
|
id_field = fvg['fields']['id']
|
|
|
|
id_field['original_type'] = id_field['type']
|
|
|
|
id_field['type'] = 'id'
|
2012-01-25 11:16:28 +00:00
|
|
|
|
2011-07-14 15:36:47 +00:00
|
|
|
for field in fvg['fields'].itervalues():
|
|
|
|
if field.get('views'):
|
|
|
|
for view in field["views"].itervalues():
|
2011-06-06 07:04:51 +00:00
|
|
|
self.process_view(session, view, None, transform)
|
2011-07-14 13:52:00 +00:00
|
|
|
if field.get('domain'):
|
2011-10-18 15:57:33 +00:00
|
|
|
field["domain"] = parse_domain(field["domain"], session)
|
2011-07-14 13:52:00 +00:00
|
|
|
if field.get('context'):
|
2011-10-18 15:57:33 +00:00
|
|
|
field["context"] = parse_context(field["context"], session)
|
2011-03-28 08:31:12 +00:00
|
|
|
|
2011-09-07 15:02:57 +00:00
|
|
|
def process_toolbar(self, req, toolbar):
|
|
|
|
"""
|
|
|
|
The toolbar is a mapping of section_key: [action_descriptor]
|
|
|
|
|
|
|
|
We need to clean all those actions in order to ensure correct
|
|
|
|
round-tripping
|
|
|
|
"""
|
2011-09-08 07:03:09 +00:00
|
|
|
for actions in toolbar.itervalues():
|
|
|
|
for action in actions:
|
|
|
|
if 'context' in action:
|
2011-10-18 15:57:33 +00:00
|
|
|
action['context'] = parse_context(
|
2011-09-08 07:03:09 +00:00
|
|
|
action['context'], req.session)
|
|
|
|
if 'domain' in action:
|
2011-10-18 15:57:33 +00:00
|
|
|
action['domain'] = parse_domain(
|
2011-09-08 07:03:09 +00:00
|
|
|
action['domain'], req.session)
|
2011-09-07 15:02:57 +00:00
|
|
|
|
2011-06-01 13:45:14 +00:00
|
|
|
@openerpweb.jsonrequest
|
2011-08-25 17:14:51 +00:00
|
|
|
def add_custom(self, req, view_id, arch):
|
|
|
|
CustomView = req.session.model('ir.ui.view.custom')
|
2011-06-01 13:45:14 +00:00
|
|
|
CustomView.create({
|
2011-08-25 17:14:51 +00:00
|
|
|
'user_id': req.session._uid,
|
2011-06-01 13:45:14 +00:00
|
|
|
'ref_id': view_id,
|
|
|
|
'arch': arch
|
2011-08-25 17:14:51 +00:00
|
|
|
}, req.session.eval_context(req.context))
|
2011-06-01 13:45:14 +00:00
|
|
|
return {'result': True}
|
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
2011-08-25 17:14:51 +00:00
|
|
|
def undo_custom(self, req, view_id, reset=False):
|
|
|
|
CustomView = req.session.model('ir.ui.view.custom')
|
|
|
|
context = req.session.eval_context(req.context)
|
|
|
|
vcustom = CustomView.search([('user_id', '=', req.session._uid), ('ref_id' ,'=', view_id)],
|
2011-06-17 16:08:49 +00:00
|
|
|
0, False, False, context)
|
2011-06-01 13:45:14 +00:00
|
|
|
if vcustom:
|
2011-06-06 10:54:36 +00:00
|
|
|
if reset:
|
2011-06-17 16:08:49 +00:00
|
|
|
CustomView.unlink(vcustom, context)
|
2011-06-06 10:54:36 +00:00
|
|
|
else:
|
2011-06-17 16:08:49 +00:00
|
|
|
CustomView.unlink([vcustom[0]], context)
|
2011-06-01 13:45:14 +00:00
|
|
|
return {'result': True}
|
|
|
|
return {'result': False}
|
|
|
|
|
2011-03-28 12:27:24 +00:00
|
|
|
def transform_view(self, view_string, session, context=None):
|
2011-03-28 08:31:12 +00:00
|
|
|
# transform nodes on the fly via iterparse, instead of
|
|
|
|
# doing it statically on the parsing result
|
|
|
|
parser = ElementTree.iterparse(StringIO(view_string), events=("start",))
|
2011-03-24 20:11:25 +00:00
|
|
|
root = None
|
|
|
|
for event, elem in parser:
|
|
|
|
if event == "start":
|
|
|
|
if root is None:
|
|
|
|
root = elem
|
2011-03-28 12:27:24 +00:00
|
|
|
self.parse_domains_and_contexts(elem, session)
|
2011-03-24 20:11:25 +00:00
|
|
|
return root
|
|
|
|
|
2011-03-29 09:09:41 +00:00
|
|
|
def parse_domains_and_contexts(self, elem, session):
|
|
|
|
""" Converts domains and contexts from the view into Python objects,
|
|
|
|
either literals if they can be parsed by literal_eval or a special
|
|
|
|
placeholder object if the domain or context refers to free variables.
|
|
|
|
|
|
|
|
:param elem: the current node being parsed
|
|
|
|
:type param: xml.etree.ElementTree.Element
|
|
|
|
:param session: OpenERP session object, used to store and retrieve
|
|
|
|
non-literal objects
|
|
|
|
:type session: openerpweb.openerpweb.OpenERPSession
|
|
|
|
"""
|
2011-07-14 13:52:00 +00:00
|
|
|
for el in ['domain', 'filter_domain']:
|
|
|
|
domain = elem.get(el, '').strip()
|
|
|
|
if domain:
|
2011-10-18 15:57:33 +00:00
|
|
|
elem.set(el, parse_domain(domain, session))
|
2011-11-14 20:50:12 +00:00
|
|
|
elem.set(el + '_string', domain)
|
2011-06-30 12:31:13 +00:00
|
|
|
for el in ['context', 'default_get']:
|
|
|
|
context_string = elem.get(el, '').strip()
|
|
|
|
if context_string:
|
2011-10-18 15:57:33 +00:00
|
|
|
elem.set(el, parse_context(context_string, session))
|
2011-11-14 20:50:12 +00:00
|
|
|
elem.set(el + '_string', context_string)
|
2011-03-24 20:11:25 +00:00
|
|
|
|
2011-03-02 18:56:06 +00:00
|
|
|
@openerpweb.jsonrequest
|
2011-09-06 20:54:38 +00:00
|
|
|
def load(self, req, model, view_id, view_type, toolbar=False):
|
|
|
|
return self.fields_view_get(req, model, view_id, view_type, toolbar=toolbar)
|
2011-03-21 08:13:31 +00:00
|
|
|
|
2011-09-06 20:54:38 +00:00
|
|
|
class TreeView(View):
|
|
|
|
_cp_path = "/web/treeview"
|
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def action(self, req, model, id):
|
|
|
|
return load_actions_from_ir_values(
|
|
|
|
req,'action', 'tree_but_open',[(model, id)],
|
|
|
|
False)
|
|
|
|
|
2011-03-24 20:11:25 +00:00
|
|
|
class SearchView(View):
|
2011-09-05 11:03:09 +00:00
|
|
|
_cp_path = "/web/searchview"
|
2011-03-21 08:13:31 +00:00
|
|
|
|
2011-03-02 18:56:06 +00:00
|
|
|
@openerpweb.jsonrequest
|
2011-03-21 08:13:31 +00:00
|
|
|
def load(self, req, model, view_id):
|
2011-04-11 10:31:18 +00:00
|
|
|
fields_view = self.fields_view_get(req, model, view_id, 'search')
|
2011-03-24 20:11:25 +00:00
|
|
|
return {'fields_view': fields_view}
|
2011-05-23 14:52:19 +00:00
|
|
|
|
2011-05-19 15:12:49 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def fields_get(self, req, model):
|
|
|
|
Model = req.session.model(model)
|
2011-06-17 16:08:49 +00:00
|
|
|
fields = Model.fields_get(False, req.session.eval_context(req.context))
|
2011-07-14 15:54:38 +00:00
|
|
|
for field in fields.values():
|
|
|
|
# shouldn't convert the views too?
|
|
|
|
if field.get('domain'):
|
2011-10-18 15:57:33 +00:00
|
|
|
field["domain"] = parse_domain(field["domain"], req.session)
|
2011-07-14 15:54:38 +00:00
|
|
|
if field.get('context'):
|
2011-10-18 15:57:33 +00:00
|
|
|
field["context"] = parse_context(field["context"], req.session)
|
2011-05-19 15:12:49 +00:00
|
|
|
return {'fields': fields}
|
2011-07-27 06:43:25 +00:00
|
|
|
|
2011-07-22 15:35:05 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def get_filters(self, req, model):
|
2012-02-09 16:15:42 +00:00
|
|
|
logger = logging.getLogger(__name__ + '.SearchView.get_filters')
|
2011-07-22 15:35:05 +00:00
|
|
|
Model = req.session.model("ir.filters")
|
|
|
|
filters = Model.get_filters(model)
|
|
|
|
for filter in filters:
|
2012-02-09 16:15:42 +00:00
|
|
|
try:
|
2012-03-07 12:03:23 +00:00
|
|
|
parsed_context = parse_context(filter["context"], req.session)
|
|
|
|
filter["context"] = (parsed_context
|
2012-10-10 20:37:53 +00:00
|
|
|
if not isinstance(parsed_context, nonliterals.BaseContext)
|
2012-03-07 12:03:23 +00:00
|
|
|
else req.session.eval_context(parsed_context))
|
|
|
|
|
|
|
|
parsed_domain = parse_domain(filter["domain"], req.session)
|
|
|
|
filter["domain"] = (parsed_domain
|
2012-10-10 20:37:53 +00:00
|
|
|
if not isinstance(parsed_domain, nonliterals.BaseDomain)
|
2012-03-07 12:03:23 +00:00
|
|
|
else req.session.eval_domain(parsed_domain))
|
2012-02-09 16:15:42 +00:00
|
|
|
except Exception:
|
|
|
|
logger.exception("Failed to parse custom filter %s in %s",
|
|
|
|
filter['name'], model)
|
|
|
|
filter['disabled'] = True
|
|
|
|
del filter['context']
|
|
|
|
del filter['domain']
|
2011-07-22 15:35:05 +00:00
|
|
|
return filters
|
2011-11-23 21:53:56 +00:00
|
|
|
|
2011-05-23 14:52:19 +00:00
|
|
|
class Binary(openerpweb.Controller):
|
2011-09-05 11:03:09 +00:00
|
|
|
_cp_path = "/web/binary"
|
2011-05-23 14:52:19 +00:00
|
|
|
|
|
|
|
@openerpweb.httprequest
|
2011-08-18 18:51:45 +00:00
|
|
|
def image(self, req, model, id, field, **kw):
|
2012-06-18 15:47:10 +00:00
|
|
|
last_update = '__last_update'
|
2011-08-18 18:51:45 +00:00
|
|
|
Model = req.session.model(model)
|
|
|
|
context = req.session.eval_context(req.context)
|
2012-06-18 15:47:10 +00:00
|
|
|
headers = [('Content-Type', 'image/png')]
|
|
|
|
etag = req.httprequest.headers.get('If-None-Match')
|
|
|
|
hashed_session = hashlib.md5(req.session_id).hexdigest()
|
2012-08-13 13:56:53 +00:00
|
|
|
id = None if not id else simplejson.loads(id)
|
|
|
|
if type(id) is list:
|
|
|
|
id = id[0] # m2o
|
2012-06-18 15:47:10 +00:00
|
|
|
if etag:
|
|
|
|
if not id and hashed_session == etag:
|
|
|
|
return werkzeug.wrappers.Response(status=304)
|
|
|
|
else:
|
2012-08-13 13:56:53 +00:00
|
|
|
date = Model.read([id], [last_update], context)[0].get(last_update)
|
2012-06-18 15:47:10 +00:00
|
|
|
if hashlib.md5(date).hexdigest() == etag:
|
|
|
|
return werkzeug.wrappers.Response(status=304)
|
2011-09-06 11:11:57 +00:00
|
|
|
|
2012-06-18 15:47:10 +00:00
|
|
|
retag = hashed_session
|
2011-05-23 14:52:19 +00:00
|
|
|
try:
|
|
|
|
if not id:
|
2011-10-05 15:57:40 +00:00
|
|
|
res = Model.default_get([field], context).get(field)
|
2012-06-18 15:47:10 +00:00
|
|
|
image_data = base64.b64decode(res)
|
2011-05-23 14:52:19 +00:00
|
|
|
else:
|
2012-08-07 10:58:13 +00:00
|
|
|
res = Model.read([id], [last_update, field], context)[0]
|
2012-06-18 15:47:10 +00:00
|
|
|
retag = hashlib.md5(res.get(last_update)).hexdigest()
|
|
|
|
image_data = base64.b64decode(res.get(field))
|
2011-09-06 11:12:26 +00:00
|
|
|
except (TypeError, xmlrpclib.Fault):
|
2011-09-06 11:11:57 +00:00
|
|
|
image_data = self.placeholder(req)
|
2012-06-18 15:47:10 +00:00
|
|
|
headers.append(('ETag', retag))
|
|
|
|
headers.append(('Content-Length', len(image_data)))
|
2012-06-19 09:06:42 +00:00
|
|
|
try:
|
|
|
|
ncache = int(kw.get('cache'))
|
|
|
|
headers.append(('Cache-Control', 'no-cache' if ncache == 0 else 'max-age=%s' % (ncache)))
|
|
|
|
except:
|
|
|
|
pass
|
2012-06-18 15:47:10 +00:00
|
|
|
return req.make_response(image_data, headers)
|
2011-09-02 09:44:49 +00:00
|
|
|
def placeholder(self, req):
|
2011-10-05 15:57:40 +00:00
|
|
|
addons_path = openerpweb.addons_manifest['web']['addons_path']
|
|
|
|
return open(os.path.join(addons_path, 'web', 'static', 'src', 'img', 'placeholder.png'), 'rb').read()
|
2011-05-23 14:52:19 +00:00
|
|
|
|
|
|
|
@openerpweb.httprequest
|
2012-01-10 14:35:18 +00:00
|
|
|
def saveas(self, req, model, field, id=None, filename_field=None, **kw):
|
|
|
|
""" Download link for files stored as binary fields.
|
|
|
|
|
|
|
|
If the ``id`` parameter is omitted, fetches the default value for the
|
|
|
|
binary field (via ``default_get``), otherwise fetches the field for
|
|
|
|
that precise record.
|
|
|
|
|
|
|
|
:param req: OpenERP request
|
|
|
|
:type req: :class:`web.common.http.HttpRequest`
|
|
|
|
:param str model: name of the model to fetch the binary from
|
|
|
|
:param str field: binary field
|
|
|
|
:param str id: id of the record from which to fetch the binary
|
|
|
|
:param str filename_field: field holding the file's name, if any
|
|
|
|
:returns: :class:`werkzeug.wrappers.Response`
|
|
|
|
"""
|
2011-08-18 18:51:45 +00:00
|
|
|
Model = req.session.model(model)
|
|
|
|
context = req.session.eval_context(req.context)
|
2012-01-10 15:39:05 +00:00
|
|
|
fields = [field]
|
|
|
|
if filename_field:
|
|
|
|
fields.append(filename_field)
|
2011-10-17 14:58:49 +00:00
|
|
|
if id:
|
2012-01-10 15:39:05 +00:00
|
|
|
res = Model.read([int(id)], fields, context)[0]
|
2011-10-17 14:58:49 +00:00
|
|
|
else:
|
2012-01-10 15:39:05 +00:00
|
|
|
res = Model.default_get(fields, context)
|
2011-10-14 11:09:42 +00:00
|
|
|
filecontent = base64.b64decode(res.get(field, ''))
|
2011-05-23 14:52:19 +00:00
|
|
|
if not filecontent:
|
2011-09-02 08:58:53 +00:00
|
|
|
return req.not_found()
|
2011-05-23 14:52:19 +00:00
|
|
|
else:
|
|
|
|
filename = '%s_%s' % (model.replace('.', '_'), id)
|
2012-01-10 14:35:18 +00:00
|
|
|
if filename_field:
|
|
|
|
filename = res.get(filename_field, '') or filename
|
2011-09-02 08:58:53 +00:00
|
|
|
return req.make_response(filecontent,
|
|
|
|
[('Content-Type', 'application/octet-stream'),
|
2012-10-30 11:51:13 +00:00
|
|
|
('Content-Disposition', content_disposition(filename, req))])
|
2011-05-23 14:52:19 +00:00
|
|
|
|
2012-02-08 10:39:35 +00:00
|
|
|
@openerpweb.httprequest
|
|
|
|
def saveas_ajax(self, req, data, token):
|
|
|
|
jdata = simplejson.loads(data)
|
|
|
|
model = jdata['model']
|
|
|
|
field = jdata['field']
|
|
|
|
id = jdata.get('id', None)
|
|
|
|
filename_field = jdata.get('filename_field', None)
|
|
|
|
context = jdata.get('context', dict())
|
|
|
|
|
|
|
|
context = req.session.eval_context(context)
|
|
|
|
Model = req.session.model(model)
|
|
|
|
fields = [field]
|
|
|
|
if filename_field:
|
|
|
|
fields.append(filename_field)
|
|
|
|
if id:
|
|
|
|
res = Model.read([int(id)], fields, context)[0]
|
|
|
|
else:
|
|
|
|
res = Model.default_get(fields, context)
|
|
|
|
filecontent = base64.b64decode(res.get(field, ''))
|
|
|
|
if not filecontent:
|
|
|
|
raise ValueError("No content found for field '%s' on '%s:%s'" %
|
|
|
|
(field, model, id))
|
|
|
|
else:
|
|
|
|
filename = '%s_%s' % (model.replace('.', '_'), id)
|
|
|
|
if filename_field:
|
|
|
|
filename = res.get(filename_field, '') or filename
|
|
|
|
return req.make_response(filecontent,
|
|
|
|
headers=[('Content-Type', 'application/octet-stream'),
|
2012-10-30 11:51:13 +00:00
|
|
|
('Content-Disposition', content_disposition(filename, req))],
|
2012-02-08 10:39:35 +00:00
|
|
|
cookies={'fileToken': int(token)})
|
|
|
|
|
2011-05-23 14:52:19 +00:00
|
|
|
@openerpweb.httprequest
|
2011-09-06 11:53:16 +00:00
|
|
|
def upload(self, req, callback, ufile):
|
2011-06-29 13:12:33 +00:00
|
|
|
# TODO: might be useful to have a configuration flag for max-length file uploads
|
2011-05-23 14:52:19 +00:00
|
|
|
try:
|
|
|
|
out = """<script language="javascript" type="text/javascript">
|
2012-06-13 10:05:16 +00:00
|
|
|
var win = window.top.window;
|
|
|
|
win.jQuery(win).trigger(%s, %s);
|
2011-05-23 14:52:19 +00:00
|
|
|
</script>"""
|
2011-09-06 11:53:16 +00:00
|
|
|
data = ufile.read()
|
2011-12-15 10:29:10 +00:00
|
|
|
args = [len(data), ufile.filename,
|
2011-09-06 11:53:16 +00:00
|
|
|
ufile.content_type, base64.b64encode(data)]
|
2011-05-23 14:52:19 +00:00
|
|
|
except Exception, e:
|
|
|
|
args = [False, e.message]
|
|
|
|
return out % (simplejson.dumps(callback), simplejson.dumps(args))
|
|
|
|
|
2011-05-26 21:06:41 +00:00
|
|
|
@openerpweb.httprequest
|
2011-09-06 11:53:16 +00:00
|
|
|
def upload_attachment(self, req, callback, model, id, ufile):
|
2011-08-18 18:51:45 +00:00
|
|
|
context = req.session.eval_context(req.context)
|
|
|
|
Model = req.session.model('ir.attachment')
|
2011-05-26 21:06:41 +00:00
|
|
|
try:
|
|
|
|
out = """<script language="javascript" type="text/javascript">
|
2012-06-13 10:05:16 +00:00
|
|
|
var win = window.top.window;
|
|
|
|
win.jQuery(win).trigger(%s, %s);
|
2011-05-26 21:06:41 +00:00
|
|
|
</script>"""
|
|
|
|
attachment_id = Model.create({
|
|
|
|
'name': ufile.filename,
|
2011-09-06 11:53:16 +00:00
|
|
|
'datas': base64.encodestring(ufile.read()),
|
2012-01-11 12:55:01 +00:00
|
|
|
'datas_fname': ufile.filename,
|
2011-05-26 21:06:41 +00:00
|
|
|
'res_model': model,
|
|
|
|
'res_id': int(id)
|
2011-06-17 16:08:49 +00:00
|
|
|
}, context)
|
2011-05-26 21:06:41 +00:00
|
|
|
args = {
|
|
|
|
'filename': ufile.filename,
|
|
|
|
'id': attachment_id
|
|
|
|
}
|
|
|
|
except Exception, e:
|
|
|
|
args = { 'error': e.message }
|
|
|
|
return out % (simplejson.dumps(callback), simplejson.dumps(args))
|
|
|
|
|
2011-03-02 18:56:06 +00:00
|
|
|
class Action(openerpweb.Controller):
|
2011-09-05 11:03:09 +00:00
|
|
|
_cp_path = "/web/action"
|
2011-03-02 18:56:06 +00:00
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
2011-10-18 15:57:33 +00:00
|
|
|
def load(self, req, action_id, do_not_eval=False):
|
2011-04-21 13:23:54 +00:00
|
|
|
Actions = req.session.model('ir.actions.actions')
|
|
|
|
value = False
|
2011-06-17 16:08:49 +00:00
|
|
|
context = req.session.eval_context(req.context)
|
2012-07-13 14:57:40 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
action_id = int(action_id)
|
|
|
|
except ValueError:
|
|
|
|
try:
|
|
|
|
module, xmlid = action_id.split('.', 1)
|
|
|
|
model, action_id = req.session.model('ir.model.data').get_object_reference(module, xmlid)
|
|
|
|
assert model.startswith('ir.actions.')
|
|
|
|
except Exception:
|
|
|
|
action_id = 0 # force failed read
|
|
|
|
|
2012-06-08 09:06:19 +00:00
|
|
|
base_action = Actions.read([action_id], ['type'], context)
|
|
|
|
if base_action:
|
2011-09-07 13:23:20 +00:00
|
|
|
ctx = {}
|
2012-06-08 09:06:19 +00:00
|
|
|
action_type = base_action[0]['type']
|
|
|
|
if action_type == 'ir.actions.report.xml':
|
2011-09-07 13:23:20 +00:00
|
|
|
ctx.update({'bin_size': True})
|
|
|
|
ctx.update(context)
|
2012-10-01 14:53:08 +00:00
|
|
|
action = req.session.model(action_type).read([action_id], False, ctx)
|
2011-04-21 13:23:54 +00:00
|
|
|
if action:
|
2011-10-18 15:57:33 +00:00
|
|
|
value = clean_action(req, action[0], do_not_eval)
|
2012-10-05 11:30:46 +00:00
|
|
|
return value
|
2011-06-30 06:24:24 +00:00
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def run(self, req, action_id):
|
2012-06-26 11:44:08 +00:00
|
|
|
return_action = req.session.model('ir.actions.server').run(
|
|
|
|
[action_id], req.session.eval_context(req.context))
|
|
|
|
if return_action:
|
|
|
|
return clean_action(req, return_action)
|
|
|
|
else:
|
|
|
|
return False
|
2011-07-13 10:26:12 +00:00
|
|
|
|
2011-07-22 12:52:14 +00:00
|
|
|
class Export(View):
|
2011-09-05 11:03:09 +00:00
|
|
|
_cp_path = "/web/export"
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-08-30 13:23:26 +00:00
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def formats(self, req):
|
|
|
|
""" Returns all valid export formats
|
|
|
|
|
|
|
|
:returns: for each export format, a pair of identifier and printable name
|
|
|
|
:rtype: [(str, str)]
|
|
|
|
"""
|
|
|
|
return sorted([
|
|
|
|
controller.fmt
|
|
|
|
for path, controller in openerpweb.controllers_path.iteritems()
|
|
|
|
if path.startswith(self._cp_path)
|
|
|
|
if hasattr(controller, 'fmt')
|
2012-01-13 09:06:11 +00:00
|
|
|
], key=operator.itemgetter("label"))
|
2011-08-30 13:23:26 +00:00
|
|
|
|
2011-07-22 12:52:14 +00:00
|
|
|
def fields_get(self, req, model):
|
|
|
|
Model = req.session.model(model)
|
|
|
|
fields = Model.fields_get(False, req.session.eval_context(req.context))
|
|
|
|
return fields
|
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
2011-08-31 13:29:50 +00:00
|
|
|
def get_fields(self, req, model, prefix='', parent_name= '',
|
2011-11-09 16:32:32 +00:00
|
|
|
import_compat=True, parent_field_type=None,
|
|
|
|
exclude=None):
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-08-31 12:28:44 +00:00
|
|
|
if import_compat and parent_field_type == "many2one":
|
2011-07-22 12:52:14 +00:00
|
|
|
fields = {}
|
2011-08-31 14:04:02 +00:00
|
|
|
else:
|
|
|
|
fields = self.fields_get(req, model)
|
2011-11-09 15:11:33 +00:00
|
|
|
|
|
|
|
if import_compat:
|
|
|
|
fields.pop('id', None)
|
|
|
|
else:
|
|
|
|
fields['.id'] = fields.pop('id', {'string': 'ID'})
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-08-31 13:29:50 +00:00
|
|
|
fields_sequence = sorted(fields.iteritems(),
|
2011-08-31 13:34:10 +00:00
|
|
|
key=lambda field: field[1].get('string', ''))
|
2011-07-22 12:52:14 +00:00
|
|
|
|
|
|
|
records = []
|
2011-08-31 13:29:50 +00:00
|
|
|
for field_name, field in fields_sequence:
|
2011-12-16 11:49:41 +00:00
|
|
|
if import_compat:
|
|
|
|
if exclude and field_name in exclude:
|
|
|
|
continue
|
|
|
|
if field.get('readonly'):
|
|
|
|
# If none of the field's states unsets readonly, skip the field
|
|
|
|
if all(dict(attrs).get('readonly', True)
|
|
|
|
for attrs in field.get('states', {}).values()):
|
|
|
|
continue
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-08-31 12:21:01 +00:00
|
|
|
id = prefix + (prefix and '/'or '') + field_name
|
2011-08-31 13:29:50 +00:00
|
|
|
name = parent_name + (parent_name and '/' or '') + field['string']
|
2011-09-01 06:42:59 +00:00
|
|
|
record = {'id': id, 'string': name,
|
|
|
|
'value': id, 'children': False,
|
2011-08-31 12:28:59 +00:00
|
|
|
'field_type': field.get('type'),
|
2011-11-09 16:32:32 +00:00
|
|
|
'required': field.get('required'),
|
|
|
|
'relation_field': field.get('relation_field')}
|
2011-07-22 12:52:14 +00:00
|
|
|
records.append(record)
|
|
|
|
|
2011-08-31 13:29:50 +00:00
|
|
|
if len(name.split('/')) < 3 and 'relation' in field:
|
2011-08-31 12:34:01 +00:00
|
|
|
ref = field.pop('relation')
|
2011-09-01 06:42:59 +00:00
|
|
|
record['value'] += '/id'
|
2011-08-31 13:29:50 +00:00
|
|
|
record['params'] = {'model': ref, 'prefix': id, 'name': name}
|
2011-08-31 14:30:02 +00:00
|
|
|
|
2011-09-01 06:42:59 +00:00
|
|
|
if not import_compat or field['type'] == 'one2many':
|
2011-08-31 14:30:02 +00:00
|
|
|
# m2m field in import_compat is childless
|
|
|
|
record['children'] = True
|
2011-07-22 12:52:14 +00:00
|
|
|
|
|
|
|
return records
|
|
|
|
|
|
|
|
@openerpweb.jsonrequest
|
|
|
|
def namelist(self,req, model, export_id):
|
2011-09-05 13:05:38 +00:00
|
|
|
# TODO: namelist really has no reason to be in Python (although itertools.groupby helps)
|
2011-09-05 09:38:37 +00:00
|
|
|
export = req.session.model("ir.exports").read([export_id])[0]
|
|
|
|
export_fields_list = req.session.model("ir.exports.line").read(
|
|
|
|
export['export_fields'])
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-09-05 13:05:38 +00:00
|
|
|
fields_data = self.fields_info(
|
|
|
|
req, model, map(operator.itemgetter('name'), export_fields_list))
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-09-05 15:24:42 +00:00
|
|
|
return [
|
|
|
|
{'name': field['name'], 'label': fields_data[field['name']]}
|
|
|
|
for field in export_fields_list
|
|
|
|
]
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-09-05 13:05:38 +00:00
|
|
|
def fields_info(self, req, model, export_fields):
|
|
|
|
info = {}
|
2011-07-22 12:52:14 +00:00
|
|
|
fields = self.fields_get(req, model)
|
2011-09-05 13:05:38 +00:00
|
|
|
|
|
|
|
# To make fields retrieval more efficient, fetch all sub-fields of a
|
|
|
|
# given field at the same time. Because the order in the export list is
|
|
|
|
# arbitrary, this requires ordering all sub-fields of a given field
|
|
|
|
# together so they can be fetched at the same time
|
|
|
|
#
|
|
|
|
# Works the following way:
|
|
|
|
# * sort the list of fields to export, the default sorting order will
|
|
|
|
# put the field itself (if present, for xmlid) and all of its
|
|
|
|
# sub-fields right after it
|
|
|
|
# * then, group on: the first field of the path (which is the same for
|
|
|
|
# a field and for its subfields and the length of splitting on the
|
|
|
|
# first '/', which basically means grouping the field on one side and
|
|
|
|
# all of the subfields on the other. This way, we have the field (for
|
|
|
|
# the xmlid) with length 1, and all of the subfields with the same
|
|
|
|
# base but a length "flag" of 2
|
|
|
|
# * if we have a normal field (length 1), just add it to the info
|
|
|
|
# mapping (with its string) as-is
|
|
|
|
# * otherwise, recursively call fields_info via graft_subfields.
|
|
|
|
# all graft_subfields does is take the result of fields_info (on the
|
|
|
|
# field's model) and prepend the current base (current field), which
|
|
|
|
# rebuilds the whole sub-tree for the field
|
|
|
|
#
|
|
|
|
# result: because we're not fetching the fields_get for half the
|
|
|
|
# database models, fetching a namelist with a dozen fields (including
|
|
|
|
# relational data) falls from ~6s to ~300ms (on the leads model).
|
|
|
|
# export lists with no sub-fields (e.g. import_compatible lists with
|
|
|
|
# no o2m) are even more efficient (from the same 6s to ~170ms, as
|
|
|
|
# there's a single fields_get to execute)
|
|
|
|
for (base, length), subfields in itertools.groupby(
|
|
|
|
sorted(export_fields),
|
|
|
|
lambda field: (field.split('/', 1)[0], len(field.split('/', 1)))):
|
|
|
|
subfields = list(subfields)
|
|
|
|
if length == 2:
|
|
|
|
# subfields is a seq of $base/*rest, and not loaded yet
|
|
|
|
info.update(self.graft_subfields(
|
|
|
|
req, fields[base]['relation'], base, fields[base]['string'],
|
|
|
|
subfields
|
|
|
|
))
|
|
|
|
else:
|
|
|
|
info[base] = fields[base]['string']
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-09-05 13:05:38 +00:00
|
|
|
return info
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-09-05 13:05:38 +00:00
|
|
|
def graft_subfields(self, req, model, prefix, prefix_string, fields):
|
|
|
|
export_fields = [field.split('/', 1)[1] for field in fields]
|
|
|
|
return (
|
|
|
|
(prefix + '/' + k, prefix_string + '/' + v)
|
|
|
|
for k, v in self.fields_info(req, model, export_fields).iteritems())
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-08-30 13:06:32 +00:00
|
|
|
#noinspection PyPropertyDefinition
|
|
|
|
@property
|
|
|
|
def content_type(self):
|
|
|
|
""" Provides the format's content type """
|
|
|
|
raise NotImplementedError()
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-08-30 13:06:32 +00:00
|
|
|
def filename(self, base):
|
|
|
|
""" Creates a valid filename for the format (with extension) from the
|
|
|
|
provided base name (exension-less)
|
|
|
|
"""
|
|
|
|
raise NotImplementedError()
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-08-30 13:06:32 +00:00
|
|
|
def from_data(self, fields, rows):
|
|
|
|
""" Conversion method from OpenERP's export data to whatever the
|
|
|
|
current export class outputs
|
|
|
|
|
|
|
|
:params list fields: a list of fields to export
|
|
|
|
:params list rows: a list of records to export
|
|
|
|
:returns:
|
|
|
|
:rtype: bytes
|
|
|
|
"""
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
2011-08-30 11:02:04 +00:00
|
|
|
@openerpweb.httprequest
|
2011-08-30 13:06:32 +00:00
|
|
|
def index(self, req, data, token):
|
|
|
|
model, fields, ids, domain, import_compat = \
|
2011-08-30 11:02:04 +00:00
|
|
|
operator.itemgetter('model', 'fields', 'ids', 'domain',
|
2011-08-30 13:06:32 +00:00
|
|
|
'import_compat')(
|
2011-08-30 11:02:04 +00:00
|
|
|
simplejson.loads(data))
|
2011-07-22 12:52:14 +00:00
|
|
|
|
|
|
|
context = req.session.eval_context(req.context)
|
2011-08-30 11:02:04 +00:00
|
|
|
Model = req.session.model(model)
|
2011-11-24 15:46:01 +00:00
|
|
|
ids = ids or Model.search(domain, 0, False, False, context)
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-08-31 11:04:39 +00:00
|
|
|
field_names = map(operator.itemgetter('name'), fields)
|
|
|
|
import_data = Model.export_data(ids, field_names, context).get('datas',[])
|
2011-07-22 12:52:14 +00:00
|
|
|
|
2011-08-31 11:04:39 +00:00
|
|
|
if import_compat:
|
|
|
|
columns_headers = field_names
|
2011-07-22 12:52:14 +00:00
|
|
|
else:
|
2011-08-31 11:04:39 +00:00
|
|
|
columns_headers = [val['label'].strip() for val in fields]
|
2011-08-29 15:24:38 +00:00
|
|
|
|
|
|
|
|
2011-09-05 14:24:55 +00:00
|
|
|
return req.make_response(self.from_data(columns_headers, import_data),
|
2012-10-31 15:53:10 +00:00
|
|
|
headers=[('Content-Disposition',
|
|
|
|
content_disposition(self.filename(model), req)),
|
2011-09-05 14:24:55 +00:00
|
|
|
('Content-Type', self.content_type)],
|
|
|
|
cookies={'fileToken': int(token)})
|
2011-08-30 13:06:32 +00:00
|
|
|
|
|
|
|
class CSVExport(Export):
|
2011-09-05 14:24:55 +00:00
|
|
|
_cp_path = '/web/export/csv'
|
2012-01-13 09:06:11 +00:00
|
|
|
fmt = {'tag': 'csv', 'label': 'CSV'}
|
2011-08-30 13:06:32 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def content_type(self):
|
|
|
|
return 'text/csv;charset=utf8'
|
|
|
|
|
|
|
|
def filename(self, base):
|
|
|
|
return base + '.csv'
|
|
|
|
|
|
|
|
def from_data(self, fields, rows):
|
|
|
|
fp = StringIO()
|
|
|
|
writer = csv.writer(fp, quoting=csv.QUOTE_ALL)
|
|
|
|
|
2012-01-25 16:25:58 +00:00
|
|
|
writer.writerow([name.encode('utf-8') for name in fields])
|
2011-08-30 13:06:32 +00:00
|
|
|
|
|
|
|
for data in rows:
|
|
|
|
row = []
|
|
|
|
for d in data:
|
|
|
|
if isinstance(d, basestring):
|
|
|
|
d = d.replace('\n',' ').replace('\t',' ')
|
|
|
|
try:
|
|
|
|
d = d.encode('utf-8')
|
2012-01-19 08:52:13 +00:00
|
|
|
except UnicodeError:
|
2011-08-30 13:06:32 +00:00
|
|
|
pass
|
|
|
|
if d is False: d = None
|
|
|
|
row.append(d)
|
|
|
|
writer.writerow(row)
|
|
|
|
|
|
|
|
fp.seek(0)
|
|
|
|
data = fp.read()
|
|
|
|
fp.close()
|
|
|
|
return data
|
|
|
|
|
|
|
|
class ExcelExport(Export):
|
2011-09-05 14:24:55 +00:00
|
|
|
_cp_path = '/web/export/xls'
|
2012-01-13 09:06:11 +00:00
|
|
|
fmt = {
|
|
|
|
'tag': 'xls',
|
|
|
|
'label': 'Excel',
|
|
|
|
'error': None if xlwt else "XLWT required"
|
|
|
|
}
|
2011-08-30 13:06:32 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def content_type(self):
|
|
|
|
return 'application/vnd.ms-excel'
|
|
|
|
|
|
|
|
def filename(self, base):
|
|
|
|
return base + '.xls'
|
|
|
|
|
|
|
|
def from_data(self, fields, rows):
|
|
|
|
workbook = xlwt.Workbook()
|
|
|
|
worksheet = workbook.add_sheet('Sheet 1')
|
|
|
|
|
|
|
|
for i, fieldname in enumerate(fields):
|
2012-01-25 16:25:58 +00:00
|
|
|
worksheet.write(0, i, fieldname)
|
2011-08-30 13:06:32 +00:00
|
|
|
worksheet.col(i).width = 8000 # around 220 pixels
|
|
|
|
|
|
|
|
style = xlwt.easyxf('align: wrap yes')
|
|
|
|
|
|
|
|
for row_index, row in enumerate(rows):
|
|
|
|
for cell_index, cell_value in enumerate(row):
|
2011-08-30 13:43:05 +00:00
|
|
|
if isinstance(cell_value, basestring):
|
|
|
|
cell_value = re.sub("\r", " ", cell_value)
|
2011-11-10 13:09:50 +00:00
|
|
|
if cell_value is False: cell_value = None
|
2011-08-30 13:06:32 +00:00
|
|
|
worksheet.write(row_index + 1, cell_index, cell_value, style)
|
|
|
|
|
|
|
|
fp = StringIO()
|
|
|
|
workbook.save(fp)
|
|
|
|
fp.seek(0)
|
|
|
|
data = fp.read()
|
|
|
|
fp.close()
|
|
|
|
return data
|
2011-09-06 11:57:54 +00:00
|
|
|
|
|
|
|
class Reports(View):
|
|
|
|
_cp_path = "/web/report"
|
2011-09-07 07:23:58 +00:00
|
|
|
POLLING_DELAY = 0.25
|
|
|
|
TYPES_MAPPING = {
|
|
|
|
'doc': 'application/vnd.ms-word',
|
|
|
|
'html': 'text/html',
|
|
|
|
'odt': 'application/vnd.oasis.opendocument.text',
|
|
|
|
'pdf': 'application/pdf',
|
|
|
|
'sxw': 'application/vnd.sun.xml.writer',
|
|
|
|
'xls': 'application/vnd.ms-excel',
|
|
|
|
}
|
2011-09-06 11:57:54 +00:00
|
|
|
|
2011-09-07 07:23:58 +00:00
|
|
|
@openerpweb.httprequest
|
|
|
|
def index(self, req, action, token):
|
|
|
|
action = simplejson.loads(action)
|
2011-09-06 11:57:54 +00:00
|
|
|
|
2011-09-07 07:23:58 +00:00
|
|
|
report_srv = req.session.proxy("report")
|
|
|
|
context = req.session.eval_context(
|
2012-10-10 20:37:53 +00:00
|
|
|
nonliterals.CompoundContext(
|
2011-09-07 07:23:58 +00:00
|
|
|
req.context or {}, action[ "context"]))
|
2011-09-08 07:14:15 +00:00
|
|
|
|
2011-09-08 09:53:27 +00:00
|
|
|
report_data = {}
|
2011-09-08 09:55:44 +00:00
|
|
|
report_ids = context["active_ids"]
|
2011-09-08 07:14:15 +00:00
|
|
|
if 'report_type' in action:
|
|
|
|
report_data['report_type'] = action['report_type']
|
2011-09-08 09:55:44 +00:00
|
|
|
if 'datas' in action:
|
|
|
|
if 'ids' in action['datas']:
|
2011-10-21 14:32:32 +00:00
|
|
|
report_ids = action['datas'].pop('ids')
|
|
|
|
report_data.update(action['datas'])
|
2011-09-15 12:24:20 +00:00
|
|
|
|
2011-09-07 07:23:58 +00:00
|
|
|
report_id = report_srv.report(
|
|
|
|
req.session._db, req.session._uid, req.session._password,
|
2011-09-08 09:54:11 +00:00
|
|
|
action["report_name"], report_ids,
|
2011-09-08 07:14:15 +00:00
|
|
|
report_data, context)
|
|
|
|
|
2011-09-07 07:23:58 +00:00
|
|
|
report_struct = None
|
2011-09-06 11:57:54 +00:00
|
|
|
while True:
|
2011-09-07 07:23:58 +00:00
|
|
|
report_struct = report_srv.report_get(
|
|
|
|
req.session._db, req.session._uid, req.session._password, report_id)
|
|
|
|
if report_struct["state"]:
|
2011-09-06 11:57:54 +00:00
|
|
|
break
|
2011-08-31 10:44:13 +00:00
|
|
|
|
2011-09-07 07:23:58 +00:00
|
|
|
time.sleep(self.POLLING_DELAY)
|
|
|
|
|
|
|
|
report = base64.b64decode(report_struct['result'])
|
2011-09-07 07:28:18 +00:00
|
|
|
if report_struct.get('code') == 'zlib':
|
|
|
|
report = zlib.decompress(report)
|
2011-09-07 07:23:58 +00:00
|
|
|
report_mimetype = self.TYPES_MAPPING.get(
|
|
|
|
report_struct['format'], 'octet-stream')
|
2012-09-20 09:34:57 +00:00
|
|
|
file_name = action.get('name', 'report')
|
2012-04-05 10:36:13 +00:00
|
|
|
if 'name' not in action:
|
|
|
|
reports = req.session.model('ir.actions.report.xml')
|
2012-06-22 14:09:51 +00:00
|
|
|
res_id = reports.search([('report_name', '=', action['report_name']),],
|
2012-04-05 10:36:13 +00:00
|
|
|
0, False, False, context)
|
2012-06-22 14:09:51 +00:00
|
|
|
if len(res_id) > 0:
|
|
|
|
file_name = reports.read(res_id[0], ['name'], context)['name']
|
|
|
|
else:
|
|
|
|
file_name = action['report_name']
|
2012-10-30 11:51:13 +00:00
|
|
|
file_name = '%s.%s' % (file_name, report_struct['format'])
|
2012-04-05 10:36:13 +00:00
|
|
|
|
2011-09-07 07:23:58 +00:00
|
|
|
return req.make_response(report,
|
|
|
|
headers=[
|
2012-10-30 11:51:13 +00:00
|
|
|
('Content-Disposition', content_disposition(file_name, req)),
|
2011-09-07 07:23:58 +00:00
|
|
|
('Content-Type', report_mimetype),
|
|
|
|
('Content-Length', len(report))],
|
|
|
|
cookies={'fileToken': int(token)})
|
2011-09-06 11:57:54 +00:00
|
|
|
|
2011-07-27 06:43:25 +00:00
|
|
|
class Import(View):
|
2011-09-06 13:05:25 +00:00
|
|
|
_cp_path = "/web/import"
|
2011-07-27 06:43:25 +00:00
|
|
|
|
2011-08-03 12:32:42 +00:00
|
|
|
def fields_get(self, req, model):
|
|
|
|
Model = req.session.model(model)
|
|
|
|
fields = Model.fields_get(False, req.session.eval_context(req.context))
|
|
|
|
return fields
|
|
|
|
|
|
|
|
@openerpweb.httprequest
|
2011-10-24 14:06:58 +00:00
|
|
|
def detect_data(self, req, csvfile, csvsep=',', csvdel='"', csvcode='utf-8', jsonp='callback'):
|
2011-08-03 12:32:42 +00:00
|
|
|
try:
|
2011-09-23 08:59:24 +00:00
|
|
|
data = list(csv.reader(
|
|
|
|
csvfile, quotechar=str(csvdel), delimiter=str(csvsep)))
|
|
|
|
except csv.Error, e:
|
2011-09-08 10:24:00 +00:00
|
|
|
csvfile.seek(0)
|
2011-09-09 10:53:07 +00:00
|
|
|
return '<script>window.top.%s(%s);</script>' % (
|
2011-09-23 08:59:24 +00:00
|
|
|
jsonp, simplejson.dumps({'error': {
|
|
|
|
'message': 'Error parsing CSV file: %s' % e,
|
|
|
|
# decodes each byte to a unicode character, which may or
|
|
|
|
# may not be printable, but decoding will succeed.
|
|
|
|
# Otherwise simplejson will try to decode the `str` using
|
|
|
|
# utf-8, which is very likely to blow up on characters out
|
|
|
|
# of the ascii range (in range [128, 256))
|
|
|
|
'preview': csvfile.read(200).decode('iso-8859-1')}}))
|
|
|
|
|
2011-09-23 09:23:46 +00:00
|
|
|
try:
|
|
|
|
return '<script>window.top.%s(%s);</script>' % (
|
|
|
|
jsonp, simplejson.dumps(
|
|
|
|
{'records': data[:10]}, encoding=csvcode))
|
|
|
|
except UnicodeDecodeError:
|
|
|
|
return '<script>window.top.%s(%s);</script>' % (
|
|
|
|
jsonp, simplejson.dumps({
|
|
|
|
'message': u"Failed to decode CSV file using encoding %s, "
|
|
|
|
u"try switching to a different encoding" % csvcode
|
|
|
|
}))
|
2011-08-03 12:32:42 +00:00
|
|
|
|
2011-07-27 06:43:25 +00:00
|
|
|
@openerpweb.httprequest
|
2011-09-23 12:22:02 +00:00
|
|
|
def import_data(self, req, model, csvfile, csvsep, csvdel, csvcode, jsonp,
|
|
|
|
meta):
|
2011-09-09 10:53:07 +00:00
|
|
|
modle_obj = req.session.model(model)
|
2011-09-23 12:22:02 +00:00
|
|
|
skip, indices, fields = operator.itemgetter('skip', 'indices', 'fields')(
|
|
|
|
simplejson.loads(meta))
|
2011-07-27 06:43:25 +00:00
|
|
|
|
2011-09-23 12:22:02 +00:00
|
|
|
error = None
|
2011-09-08 10:24:00 +00:00
|
|
|
if not (csvdel and len(csvdel) == 1):
|
2011-09-23 12:22:02 +00:00
|
|
|
error = u"The CSV delimiter must be a single character"
|
2011-07-27 06:43:25 +00:00
|
|
|
|
2011-09-23 12:22:02 +00:00
|
|
|
if not indices and fields:
|
|
|
|
error = u"You must select at least one field to import"
|
2011-09-08 10:24:00 +00:00
|
|
|
|
2011-09-23 12:22:02 +00:00
|
|
|
if error:
|
2011-09-09 10:53:07 +00:00
|
|
|
return '<script>window.top.%s(%s);</script>' % (
|
2011-09-23 12:22:02 +00:00
|
|
|
jsonp, simplejson.dumps({'error': {'message': error}}))
|
2011-07-27 06:43:25 +00:00
|
|
|
|
2012-06-06 10:33:53 +00:00
|
|
|
# skip ignored records (@skip parameter)
|
|
|
|
# then skip empty lines (not valid csv)
|
|
|
|
# nb: should these operations be reverted?
|
|
|
|
rows_to_import = itertools.ifilter(
|
|
|
|
None,
|
|
|
|
itertools.islice(
|
|
|
|
csv.reader(csvfile, quotechar=str(csvdel), delimiter=str(csvsep)),
|
|
|
|
skip, None))
|
2011-07-27 06:43:25 +00:00
|
|
|
|
2011-09-23 12:22:02 +00:00
|
|
|
# if only one index, itemgetter will return an atom rather than a tuple
|
|
|
|
if len(indices) == 1: mapper = lambda row: [row[indices[0]]]
|
|
|
|
else: mapper = operator.itemgetter(*indices)
|
2011-07-27 06:43:25 +00:00
|
|
|
|
2011-09-23 12:22:02 +00:00
|
|
|
data = None
|
|
|
|
error = None
|
|
|
|
try:
|
|
|
|
# decode each data row
|
|
|
|
data = [
|
|
|
|
[record.decode(csvcode) for record in row]
|
2012-06-06 10:33:53 +00:00
|
|
|
for row in itertools.imap(mapper, rows_to_import)
|
2011-09-23 12:22:02 +00:00
|
|
|
# don't insert completely empty rows (can happen due to fields
|
|
|
|
# filtering in case of e.g. o2m content rows)
|
|
|
|
if any(row)
|
|
|
|
]
|
|
|
|
except UnicodeDecodeError:
|
|
|
|
error = u"Failed to decode CSV file using encoding %s" % csvcode
|
|
|
|
except csv.Error, e:
|
|
|
|
error = u"Could not process CSV file: %s" % e
|
2011-07-27 06:43:25 +00:00
|
|
|
|
|
|
|
# If the file contains nothing,
|
2011-09-23 12:22:02 +00:00
|
|
|
if not data:
|
|
|
|
error = u"File to import is empty"
|
|
|
|
if error:
|
2011-09-09 10:53:07 +00:00
|
|
|
return '<script>window.top.%s(%s);</script>' % (
|
2011-09-23 12:22:02 +00:00
|
|
|
jsonp, simplejson.dumps({'error': {'message': error}}))
|
2011-07-27 06:43:25 +00:00
|
|
|
|
|
|
|
try:
|
2011-09-23 12:22:02 +00:00
|
|
|
(code, record, message, _nope) = modle_obj.import_data(
|
|
|
|
fields, data, 'init', '', False,
|
|
|
|
req.session.eval_context(req.context))
|
2011-08-16 06:08:35 +00:00
|
|
|
except xmlrpclib.Fault, e:
|
2011-09-23 12:22:02 +00:00
|
|
|
error = {"message": u"%s, %s" % (e.faultCode, e.faultString)}
|
2011-09-09 10:53:07 +00:00
|
|
|
return '<script>window.top.%s(%s);</script>' % (
|
|
|
|
jsonp, simplejson.dumps({'error':error}))
|
2011-07-27 06:43:25 +00:00
|
|
|
|
2011-09-23 12:22:02 +00:00
|
|
|
if code != -1:
|
2011-09-09 10:53:07 +00:00
|
|
|
return '<script>window.top.%s(%s);</script>' % (
|
2011-09-12 12:59:00 +00:00
|
|
|
jsonp, simplejson.dumps({'success':True}))
|
2011-07-27 06:43:25 +00:00
|
|
|
|
2011-09-23 12:22:02 +00:00
|
|
|
msg = u"Error during import: %s\n\nTrying to import record %r" % (
|
|
|
|
message, record)
|
2011-09-09 10:53:07 +00:00
|
|
|
return '<script>window.top.%s(%s);</script>' % (
|
2011-09-23 12:22:02 +00:00
|
|
|
jsonp, simplejson.dumps({'error': {'message':msg}}))
|
2012-08-12 21:48:27 +00:00
|
|
|
|
|
|
|
# vim:expandtab:tabstop=4:softtabstop=4:shiftwidth=4:
|