2013-07-26 12:33:17 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
2013-10-14 15:10:16 +00:00
|
|
|
import fnmatch
|
2013-08-28 09:17:41 +00:00
|
|
|
import functools
|
2013-11-08 13:26:15 +00:00
|
|
|
import inspect
|
2013-11-04 11:10:21 +00:00
|
|
|
import logging
|
|
|
|
import math
|
2013-11-08 13:26:15 +00:00
|
|
|
import itertools
|
2013-11-04 11:10:21 +00:00
|
|
|
import traceback
|
|
|
|
import urllib
|
2013-11-06 15:17:52 +00:00
|
|
|
import urlparse
|
2013-11-04 11:10:21 +00:00
|
|
|
|
2013-11-08 13:26:15 +00:00
|
|
|
import simplejson
|
2013-11-04 11:10:21 +00:00
|
|
|
import werkzeug
|
|
|
|
import werkzeug.exceptions
|
|
|
|
import werkzeug.wrappers
|
2013-07-26 12:33:17 +00:00
|
|
|
|
|
|
|
import openerp
|
2013-11-04 11:10:21 +00:00
|
|
|
from openerp.exceptions import AccessError, AccessDenied
|
2013-11-08 13:26:15 +00:00
|
|
|
from openerp.osv import orm, osv, fields
|
2013-11-04 11:10:21 +00:00
|
|
|
from openerp.tools.safe_eval import safe_eval
|
2013-11-08 13:26:15 +00:00
|
|
|
|
2013-07-26 12:33:17 +00:00
|
|
|
from openerp.addons.web import http
|
|
|
|
from openerp.addons.web.http import request
|
2013-08-14 15:11:22 +00:00
|
|
|
|
2013-11-08 13:26:15 +00:00
|
|
|
|
2013-08-14 15:11:22 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
2013-07-26 12:33:17 +00:00
|
|
|
|
2013-09-23 11:59:05 +00:00
|
|
|
def route(routes, *route_args, **route_kwargs):
|
2013-08-27 17:07:07 +00:00
|
|
|
def decorator(f):
|
2013-09-23 11:59:05 +00:00
|
|
|
new_routes = routes if isinstance(routes, list) else [routes]
|
2013-11-08 13:26:15 +00:00
|
|
|
f.cms = True
|
2013-09-24 09:51:09 +00:00
|
|
|
f.multilang = route_kwargs.get('multilang', False)
|
2013-11-13 10:13:19 +00:00
|
|
|
f.methods = route_kwargs.pop('methods', None)
|
2013-09-24 09:51:09 +00:00
|
|
|
if f.multilang:
|
2013-09-23 11:59:05 +00:00
|
|
|
route_kwargs.pop('multilang')
|
|
|
|
for r in list(new_routes):
|
|
|
|
new_routes.append('/<string(length=5):lang_code>' + r)
|
|
|
|
@http.route(new_routes, *route_args, **route_kwargs)
|
2013-08-28 09:17:41 +00:00
|
|
|
@functools.wraps(f, assigned=functools.WRAPPER_ASSIGNMENTS + ('func_name',))
|
2013-08-27 17:07:07 +00:00
|
|
|
def wrap(*args, **kwargs):
|
2013-11-12 11:21:50 +00:00
|
|
|
request.route_lang = kwargs.pop('lang_code', None)
|
2013-09-10 13:41:33 +00:00
|
|
|
if not hasattr(request, 'website'):
|
2013-09-24 09:51:09 +00:00
|
|
|
request.multilang = f.multilang
|
2013-11-07 15:14:00 +00:00
|
|
|
# TODO: Select website, currently hard coded
|
|
|
|
request.website = request.registry['website'].browse(
|
|
|
|
request.cr, request.uid, 1, context=request.context)
|
|
|
|
|
2013-09-23 11:59:05 +00:00
|
|
|
if request.route_lang:
|
|
|
|
lang_ok = [lg.code for lg in request.website.language_ids if lg.code == request.route_lang]
|
|
|
|
if not lang_ok:
|
|
|
|
return request.not_found()
|
2013-11-07 15:14:00 +00:00
|
|
|
request.website.preprocess_request(request)
|
2013-11-13 10:13:19 +00:00
|
|
|
if f.methods and request.httprequest.method not in f.methods:
|
|
|
|
return werkzeug.exceptions.MethodNotAllowed(valid_methods=f.methods)
|
2013-11-14 14:46:52 +00:00
|
|
|
try:
|
|
|
|
return f(*args, **kwargs)
|
|
|
|
except Exception, err:
|
|
|
|
logger.exception("Website Rendering Error.")
|
|
|
|
if request.context['is_public_user']:
|
|
|
|
return request.website.render("website.401")
|
|
|
|
else:
|
|
|
|
return request.website.render("website.500", {
|
|
|
|
'traceback': traceback.format_exc(),
|
|
|
|
'controller': [f.__module__, "%s.%s" % (args[0].__class__.__name__, f.__name__)],
|
|
|
|
})
|
2013-08-27 17:07:07 +00:00
|
|
|
return wrap
|
|
|
|
return decorator
|
2013-07-26 12:33:17 +00:00
|
|
|
|
2013-11-06 15:17:52 +00:00
|
|
|
def url_for(path_or_uri, lang=None, keep_query=None):
|
|
|
|
location = path_or_uri.strip()
|
|
|
|
url = urlparse.urlparse(location)
|
|
|
|
if request and not url.netloc and not url.scheme:
|
|
|
|
location = urlparse.urljoin(request.httprequest.path, location)
|
2013-09-25 09:31:57 +00:00
|
|
|
langs = request.context.get('langs')
|
2013-11-06 15:17:52 +00:00
|
|
|
if location[0] == '/' and (len(langs) > 1 or lang):
|
|
|
|
ps = location.split('/')
|
2013-09-25 09:31:57 +00:00
|
|
|
lang = lang or request.context.get('lang')
|
|
|
|
if ps[1] in langs:
|
|
|
|
ps[1] = lang
|
|
|
|
else:
|
|
|
|
ps.insert(1, lang)
|
2013-11-06 15:17:52 +00:00
|
|
|
location = '/'.join(ps)
|
2013-10-14 15:10:16 +00:00
|
|
|
if keep_query:
|
2013-11-06 15:17:52 +00:00
|
|
|
url = urlparse.urlparse(location)
|
|
|
|
location = url.path
|
|
|
|
params = werkzeug.url_decode(url.query)
|
|
|
|
query_params = frozenset(werkzeug.url_decode(request.httprequest.query_string).keys())
|
2013-10-14 15:10:16 +00:00
|
|
|
for kq in keep_query:
|
2013-11-06 15:17:52 +00:00
|
|
|
for param in fnmatch.filter(query_params, kq):
|
|
|
|
params[param] = request.params[param]
|
|
|
|
params = werkzeug.urls.url_encode(params)
|
|
|
|
if params:
|
|
|
|
location += '?%s' % params
|
2013-10-14 15:10:16 +00:00
|
|
|
|
2013-11-06 15:17:52 +00:00
|
|
|
return location
|
2013-08-01 12:48:50 +00:00
|
|
|
|
2013-08-12 13:43:12 +00:00
|
|
|
def urlplus(url, params):
|
|
|
|
if not params:
|
|
|
|
return url
|
2013-10-10 06:23:11 +00:00
|
|
|
|
|
|
|
# can't use urlencode because it encodes to (ascii, replace) in p2
|
|
|
|
return "%s?%s" % (url, '&'.join(
|
2013-10-10 10:49:00 +00:00
|
|
|
k + '=' + urllib.quote_plus(v.encode('utf-8') if isinstance(v, unicode) else str(v))
|
2013-10-10 06:23:11 +00:00
|
|
|
for k, v in params.iteritems()
|
|
|
|
))
|
2013-08-12 13:43:12 +00:00
|
|
|
|
2013-08-05 16:16:00 +00:00
|
|
|
class website(osv.osv):
|
|
|
|
_name = "website" # Avoid website.website convention for conciseness (for new api). Got a special authorization from xmo and rco
|
|
|
|
_description = "Website"
|
2013-09-10 13:41:33 +00:00
|
|
|
_columns = {
|
|
|
|
'name': fields.char('Domain'),
|
|
|
|
'company_id': fields.many2one('res.company', string="Company"),
|
|
|
|
'language_ids': fields.many2many('res.lang', 'website_lang_rel', 'website_id', 'lang_id', 'Languages'),
|
|
|
|
'default_lang_id': fields.many2one('res.lang', string="Default language"),
|
2013-09-29 08:48:53 +00:00
|
|
|
'social_twitter': fields.char('Twitter Account'),
|
|
|
|
'social_facebook': fields.char('Facebook Account'),
|
|
|
|
'social_github': fields.char('GitHub Account'),
|
|
|
|
'social_linkedin': fields.char('LinkedIn Account'),
|
|
|
|
'social_youtube': fields.char('Youtube Account'),
|
|
|
|
'social_googleplus': fields.char('Google+ Account'),
|
2013-09-10 13:41:33 +00:00
|
|
|
}
|
2013-08-05 16:16:00 +00:00
|
|
|
|
2013-08-07 13:01:27 +00:00
|
|
|
public_user = None
|
2013-08-06 14:06:36 +00:00
|
|
|
|
2013-11-07 15:14:00 +00:00
|
|
|
def get_public_user(self, cr, uid, context=None):
|
2013-08-07 13:01:27 +00:00
|
|
|
if not self.public_user:
|
2013-11-07 15:14:00 +00:00
|
|
|
uid = openerp.SUPERUSER_ID
|
|
|
|
ref = self.pool['ir.model.data'].get_object_reference(cr, uid, 'website', 'public_user')
|
|
|
|
self.public_user = self.pool[ref[0]].browse(cr, uid, ref[1])
|
2013-08-07 13:01:27 +00:00
|
|
|
return self.public_user
|
2013-08-06 14:06:36 +00:00
|
|
|
|
2013-11-07 15:14:00 +00:00
|
|
|
def preprocess_request(self, cr, uid, ids, request, context=None):
|
|
|
|
def redirect(url):
|
|
|
|
return werkzeug.utils.redirect(url_for(url))
|
|
|
|
request.redirect = redirect
|
|
|
|
|
|
|
|
is_public_user = request.uid == self.get_public_user(cr, uid, context).id
|
2013-08-27 14:51:16 +00:00
|
|
|
|
2013-11-07 15:14:00 +00:00
|
|
|
# Select current language
|
2013-09-10 13:41:33 +00:00
|
|
|
if hasattr(request, 'route_lang'):
|
|
|
|
lang = request.route_lang
|
|
|
|
else:
|
|
|
|
lang = request.params.get('lang', None) or request.httprequest.cookies.get('lang', None)
|
2013-11-07 15:14:00 +00:00
|
|
|
if lang not in [lg.code for lg in request.website.language_ids]:
|
|
|
|
lang = request.website.default_lang_id.code
|
2013-08-27 14:51:16 +00:00
|
|
|
|
2013-09-23 11:59:05 +00:00
|
|
|
is_master_lang = lang == request.website.default_lang_id.code
|
2013-09-10 13:41:33 +00:00
|
|
|
request.context.update({
|
2013-09-23 11:59:05 +00:00
|
|
|
'lang': lang,
|
2013-10-09 13:27:24 +00:00
|
|
|
'lang_selected': [lg for lg in request.website.language_ids if lg.code == lang],
|
2013-09-24 09:51:09 +00:00
|
|
|
'langs': [lg.code for lg in request.website.language_ids],
|
2013-09-26 09:25:43 +00:00
|
|
|
'multilang': request.multilang,
|
2013-08-07 13:01:27 +00:00
|
|
|
'is_public_user': is_public_user,
|
2013-09-23 11:59:05 +00:00
|
|
|
'is_master_lang': is_master_lang,
|
|
|
|
'editable': not is_public_user,
|
2013-09-24 09:51:09 +00:00
|
|
|
'translatable': not is_public_user and not is_master_lang and request.multilang,
|
2013-09-10 13:41:33 +00:00
|
|
|
})
|
2013-08-27 17:07:07 +00:00
|
|
|
|
2013-11-07 15:14:00 +00:00
|
|
|
def render(self, cr, uid, ids, template, values=None, context=None):
|
|
|
|
view = self.pool.get("ir.ui.view")
|
|
|
|
IMD = self.pool.get("ir.model.data")
|
|
|
|
user = self.pool.get("res.users")
|
2013-08-05 16:16:00 +00:00
|
|
|
|
2013-11-07 15:14:00 +00:00
|
|
|
if not context:
|
|
|
|
context = {}
|
2013-08-26 10:16:15 +00:00
|
|
|
|
2013-11-07 15:14:00 +00:00
|
|
|
qweb_context = context.copy()
|
2013-09-10 13:41:33 +00:00
|
|
|
|
2013-09-24 14:40:18 +00:00
|
|
|
if values:
|
|
|
|
qweb_context.update(values)
|
2013-09-10 13:41:33 +00:00
|
|
|
|
2013-09-24 14:40:18 +00:00
|
|
|
qweb_context.update(
|
2013-10-13 03:08:06 +00:00
|
|
|
request=request, # TODO maybe rename to _request to mark this attribute as unsafe
|
2013-09-20 14:20:52 +00:00
|
|
|
json=simplejson,
|
|
|
|
website=request.website,
|
2013-09-25 08:52:26 +00:00
|
|
|
url_for=url_for,
|
2013-09-20 14:20:52 +00:00
|
|
|
res_company=request.website.company_id,
|
2013-10-08 15:22:33 +00:00
|
|
|
user_id=user.browse(cr, uid, uid),
|
2013-09-20 14:20:52 +00:00
|
|
|
)
|
2013-09-10 13:41:33 +00:00
|
|
|
|
2013-09-25 11:44:57 +00:00
|
|
|
context.update(
|
|
|
|
inherit_branding=qweb_context.setdefault('editable', False),
|
|
|
|
)
|
2013-08-26 10:16:15 +00:00
|
|
|
|
2013-10-15 09:54:13 +00:00
|
|
|
view_ref = None
|
2013-08-26 10:16:15 +00:00
|
|
|
# check if xmlid of the template exists
|
|
|
|
try:
|
2013-09-20 14:20:52 +00:00
|
|
|
module, xmlid = template.split('.', 1)
|
2013-10-15 09:54:13 +00:00
|
|
|
view_ref = IMD.get_object_reference(cr, uid, module, xmlid)
|
2013-09-20 14:20:52 +00:00
|
|
|
except ValueError: # catches both unpack errors and gor errors
|
|
|
|
module, xmlid = 'website', template
|
|
|
|
try:
|
2013-10-15 09:54:13 +00:00
|
|
|
view_ref = IMD.get_object_reference(cr, uid, module, xmlid)
|
2013-09-20 14:20:52 +00:00
|
|
|
except ValueError:
|
2013-11-04 11:10:21 +00:00
|
|
|
return self.error(cr, uid, 404, qweb_context, context=context)
|
2013-09-10 13:41:33 +00:00
|
|
|
|
2013-10-16 15:12:57 +00:00
|
|
|
if 'main_object' not in qweb_context:
|
|
|
|
try:
|
2013-11-07 15:14:00 +00:00
|
|
|
main_object = self.pool[view_ref[0]].browse(cr, uid, view_ref[1])
|
2013-10-16 15:12:57 +00:00
|
|
|
qweb_context['main_object'] = main_object
|
|
|
|
except Exception:
|
|
|
|
pass
|
2013-10-15 09:54:13 +00:00
|
|
|
|
2013-08-14 15:11:22 +00:00
|
|
|
try:
|
2013-10-08 09:22:33 +00:00
|
|
|
return view.render(
|
|
|
|
cr, uid, "%s.%s" % (module, xmlid), qweb_context,
|
|
|
|
engine='website.qweb', context=context)
|
2013-08-26 10:16:15 +00:00
|
|
|
except (AccessError, AccessDenied), err:
|
|
|
|
logger.error(err)
|
2013-09-10 13:41:33 +00:00
|
|
|
qweb_context['error'] = err[1]
|
2013-08-26 10:16:15 +00:00
|
|
|
logger.warn("Website Rendering Error.\n\n%s" % traceback.format_exc())
|
2013-11-04 11:10:21 +00:00
|
|
|
return self.error(cr, uid, 401, qweb_context, context=context)
|
2013-11-02 17:16:56 +00:00
|
|
|
except Exception, e:
|
|
|
|
qweb_context['template'] = getattr(e, 'qweb_template', '')
|
|
|
|
node = getattr(e, 'qweb_node', None)
|
|
|
|
qweb_context['node'] = node and node.toxml()
|
|
|
|
qweb_context['expr'] = getattr(e, 'qweb_eval', '')
|
2013-10-18 11:41:16 +00:00
|
|
|
qweb_context['traceback'] = traceback.format_exc()
|
2013-11-04 11:10:21 +00:00
|
|
|
logger.exception("Website Rendering Error.\n%(template)s\n%(expr)s\n%(node)s" % qweb_context)
|
|
|
|
return self.error(cr, uid, 500 if qweb_context['editable'] else 404,
|
|
|
|
qweb_context, context=context)
|
|
|
|
|
|
|
|
def error(self, cr, uid, code, qweb_context, context=None):
|
|
|
|
View = request.registry['ir.ui.view']
|
|
|
|
return werkzeug.wrappers.Response(
|
|
|
|
View.render(cr, uid, 'website.%d' % code, qweb_context),
|
|
|
|
status=code,
|
|
|
|
content_type='text/html;charset=utf-8')
|
2013-08-12 13:43:12 +00:00
|
|
|
|
2013-11-07 15:14:00 +00:00
|
|
|
def pager(self, cr, uid, ids, url, total, page=1, step=30, scope=5, url_args=None, context=None):
|
2013-08-12 13:43:12 +00:00
|
|
|
# Compute Pager
|
2013-08-21 14:04:31 +00:00
|
|
|
page_count = int(math.ceil(float(total) / step))
|
2013-08-12 13:43:12 +00:00
|
|
|
|
2013-08-21 14:04:31 +00:00
|
|
|
page = max(1, min(int(page), page_count))
|
2013-08-12 13:43:12 +00:00
|
|
|
scope -= 1
|
|
|
|
|
|
|
|
pmin = max(page - int(math.floor(scope/2)), 1)
|
2013-08-21 14:04:31 +00:00
|
|
|
pmax = min(pmin + scope, page_count)
|
2013-08-12 13:43:12 +00:00
|
|
|
|
|
|
|
if pmax - pmin < scope:
|
2013-08-21 14:04:31 +00:00
|
|
|
pmin = pmax - scope if pmax - scope > 0 else 1
|
2013-08-12 13:43:12 +00:00
|
|
|
|
2013-08-14 10:54:00 +00:00
|
|
|
def get_url(page):
|
|
|
|
_url = "%spage/%s/" % (url, page)
|
|
|
|
if url_args:
|
|
|
|
_url = "%s?%s" % (_url, urllib.urlencode(url_args))
|
|
|
|
return _url
|
2013-08-12 13:43:12 +00:00
|
|
|
|
2013-08-21 14:04:31 +00:00
|
|
|
return {
|
|
|
|
"page_count": page_count,
|
|
|
|
"offset": (page - 1) * step,
|
2013-10-23 14:51:52 +00:00
|
|
|
"page": {
|
|
|
|
'url': get_url(page),
|
|
|
|
'num': page
|
|
|
|
},
|
|
|
|
"page_start": {
|
|
|
|
'url': get_url(pmin),
|
|
|
|
'num': pmin
|
|
|
|
},
|
|
|
|
"page_previous": {
|
|
|
|
'url': get_url(max(pmin, page - 1)),
|
|
|
|
'num': max(pmin, page - 1)
|
|
|
|
},
|
|
|
|
"page_next": {
|
|
|
|
'url': get_url(min(pmax, page + 1)),
|
|
|
|
'num': min(pmax, page + 1)
|
|
|
|
},
|
|
|
|
"page_end": {
|
|
|
|
'url': get_url(pmax),
|
|
|
|
'num': pmax
|
|
|
|
},
|
2013-08-21 14:04:31 +00:00
|
|
|
"pages": [
|
|
|
|
{'url': get_url(page), 'num': page}
|
|
|
|
for page in xrange(pmin, pmax+1)
|
|
|
|
]
|
|
|
|
}
|
2013-08-12 13:43:12 +00:00
|
|
|
|
2013-11-08 13:26:15 +00:00
|
|
|
|
|
|
|
def rule_is_enumerable(self, rule):
|
|
|
|
""" Checks that it is possible to generate sensible GET queries for
|
|
|
|
a given rule (if the endpoint matches its own requirements)
|
|
|
|
|
|
|
|
:type rule: werkzeug.routing.Rule
|
|
|
|
:rtype: bool
|
|
|
|
"""
|
|
|
|
endpoint = rule.endpoint
|
|
|
|
methods = rule.methods or ['GET']
|
|
|
|
|
|
|
|
return (
|
|
|
|
'GET' in methods
|
|
|
|
and endpoint.exposed == 'http'
|
|
|
|
and endpoint.auth in ('none', 'public')
|
|
|
|
and getattr(endpoint, 'cms', False)
|
|
|
|
# ensure all converters on the rule are able to generate values for
|
|
|
|
# themselves
|
|
|
|
and all(hasattr(converter, 'generate')
|
|
|
|
for converter in rule._converters.itervalues())
|
|
|
|
) and self.endpoint_is_enumerable(rule)
|
|
|
|
|
|
|
|
def endpoint_is_enumerable(self, rule):
|
|
|
|
""" Verifies that it's possible to generate a valid url for the rule's
|
|
|
|
endpoint
|
|
|
|
|
|
|
|
:type rule: werkzeug.routing.Rule
|
|
|
|
:rtype: bool
|
|
|
|
"""
|
|
|
|
|
|
|
|
# apparently the decorator package makes getargspec work correctly
|
|
|
|
# on functions it decorates. That's not the case for
|
|
|
|
# @functools.wraps, so hack around to get the original function
|
|
|
|
# (and hope a single decorator was applied or we're hosed)
|
|
|
|
# FIXME: this is going to blow up if we want/need to use multiple @route (with various configurations) on a method
|
|
|
|
undecorated_func = rule.endpoint.func_closure[0].cell_contents
|
|
|
|
|
|
|
|
# If this is ever ported to py3, use signatures, it doesn't suck as much
|
|
|
|
spec = inspect.getargspec(undecorated_func)
|
|
|
|
|
|
|
|
# if *args or **kwargs, just bail the fuck out, only dragons can
|
|
|
|
# live there
|
|
|
|
if spec.varargs or spec.keywords:
|
|
|
|
return False
|
|
|
|
|
|
|
|
# remove all arguments with a default value from the list
|
|
|
|
defaults_count = len(spec.defaults or []) # spec.defaults can be None
|
|
|
|
# a[:-0] ~ a[:0] ~ [] -> replace defaults_count == 0 by None to get
|
|
|
|
# a[:None] ~ a
|
|
|
|
args = spec.args[:(-defaults_count or None)]
|
|
|
|
|
|
|
|
# params with defaults were removed, leftover allowed are:
|
|
|
|
# * self (technically should be first-parameter-of-instance-method but whatever)
|
|
|
|
# * any parameter mapping to a converter
|
|
|
|
return all(
|
|
|
|
(arg == 'self' or arg in rule._converters)
|
|
|
|
for arg in args)
|
|
|
|
|
2013-09-23 13:42:40 +00:00
|
|
|
def list_pages(self, cr, uid, ids, context=None):
|
2013-08-21 13:59:17 +00:00
|
|
|
""" Available pages in the website/CMS. This is mostly used for links
|
|
|
|
generation and can be overridden by modules setting up new HTML
|
|
|
|
controllers for dynamic pages (e.g. blog).
|
|
|
|
|
|
|
|
By default, returns template views marked as pages.
|
|
|
|
|
|
|
|
:returns: a list of mappings with two keys: ``name`` is the displayable
|
|
|
|
name of the resource (page), ``url`` is the absolute URL
|
|
|
|
of the same.
|
|
|
|
:rtype: list({name: str, url: str})
|
|
|
|
"""
|
2013-11-12 12:08:14 +00:00
|
|
|
# FIXME: possibility to add custom converters without editing server
|
|
|
|
# would allow the creation of a pages converter generating page
|
|
|
|
# urls on its own
|
|
|
|
View = self.pool['ir.ui.view']
|
|
|
|
views = View.search_read(cr, uid, [['page', '=', True]],
|
|
|
|
fields=['name'], order='name', context=context)
|
|
|
|
xids = View.get_external_id(cr, uid, [view['id'] for view in views], context=context)
|
|
|
|
for view in views:
|
|
|
|
if xids[view['id']]:
|
|
|
|
yield {
|
|
|
|
'name': view['name'],
|
|
|
|
'url': '/page/' + xids[view['id']],
|
|
|
|
}
|
2013-11-08 13:26:15 +00:00
|
|
|
|
|
|
|
router = request.httprequest.app.get_db_router(request.db)
|
|
|
|
for rule in router.iter_rules():
|
|
|
|
if not self.rule_is_enumerable(rule):
|
|
|
|
continue
|
|
|
|
|
|
|
|
generated = map(dict, itertools.product(*(
|
|
|
|
itertools.izip(itertools.repeat(name), converter.generate())
|
|
|
|
for name, converter in rule._converters.iteritems()
|
|
|
|
)))
|
|
|
|
|
|
|
|
for values in generated:
|
2013-11-12 10:15:59 +00:00
|
|
|
domain_part, url = rule.build(values, append_unknown=False)
|
2013-11-08 15:48:42 +00:00
|
|
|
yield {'name': url, 'url': url }
|
2013-08-12 16:31:23 +00:00
|
|
|
|
2013-11-07 15:14:00 +00:00
|
|
|
def kanban(self, cr, uid, ids, model, domain, column, template, step=None, scope=None, orderby=None, context=None):
|
2013-08-21 16:06:32 +00:00
|
|
|
step = step and int(step) or 10
|
|
|
|
scope = scope and int(scope) or 5
|
|
|
|
orderby = orderby or "name"
|
|
|
|
|
|
|
|
get_args = dict(request.httprequest.args or {})
|
2013-11-07 15:14:00 +00:00
|
|
|
model_obj = self.pool[model]
|
2013-08-21 16:06:32 +00:00
|
|
|
relation = model_obj._columns.get(column)._obj
|
2013-11-07 15:14:00 +00:00
|
|
|
relation_obj = self.pool[relation]
|
2013-08-21 16:06:32 +00:00
|
|
|
|
|
|
|
get_args.setdefault('kanban', "")
|
|
|
|
kanban = get_args.pop('kanban')
|
|
|
|
kanban_url = "?%s&kanban=" % urllib.urlencode(get_args)
|
|
|
|
|
|
|
|
pages = {}
|
|
|
|
for col in kanban.split(","):
|
|
|
|
if col:
|
|
|
|
col = col.split("-")
|
|
|
|
pages[int(col[0])] = int(col[1])
|
|
|
|
|
|
|
|
objects = []
|
2013-09-10 13:41:33 +00:00
|
|
|
for group in model_obj.read_group(cr, uid, domain, ["id", column], groupby=column):
|
2013-08-21 16:06:32 +00:00
|
|
|
obj = {}
|
|
|
|
|
|
|
|
# browse column
|
|
|
|
relation_id = group[column][0]
|
2013-09-10 13:41:33 +00:00
|
|
|
obj['column_id'] = relation_obj.browse(cr, uid, relation_id)
|
2013-08-21 16:06:32 +00:00
|
|
|
|
|
|
|
obj['kanban_url'] = kanban_url
|
|
|
|
for k, v in pages.items():
|
|
|
|
if k != relation_id:
|
|
|
|
obj['kanban_url'] += "%s-%s" % (k, v)
|
|
|
|
|
|
|
|
# pager
|
2013-09-10 13:41:33 +00:00
|
|
|
number = model_obj.search(cr, uid, group['__domain'], count=True)
|
2013-08-21 16:06:32 +00:00
|
|
|
obj['page_count'] = int(math.ceil(float(number) / step))
|
2013-08-22 09:18:12 +00:00
|
|
|
obj['page'] = pages.get(relation_id) or 1
|
|
|
|
if obj['page'] > obj['page_count']:
|
|
|
|
obj['page'] = obj['page_count']
|
2013-08-21 16:06:32 +00:00
|
|
|
offset = (obj['page']-1) * step
|
|
|
|
obj['page_start'] = max(obj['page'] - int(math.floor((scope-1)/2)), 1)
|
|
|
|
obj['page_end'] = min(obj['page_start'] + (scope-1), obj['page_count'])
|
|
|
|
|
|
|
|
# view data
|
|
|
|
obj['domain'] = group['__domain']
|
|
|
|
obj['model'] = model
|
|
|
|
obj['step'] = step
|
|
|
|
obj['orderby'] = orderby
|
|
|
|
|
|
|
|
# browse objects
|
2013-09-10 13:41:33 +00:00
|
|
|
object_ids = model_obj.search(cr, uid, group['__domain'], limit=step, offset=offset, order=orderby)
|
|
|
|
obj['object_ids'] = model_obj.browse(cr, uid, object_ids)
|
2013-08-21 16:06:32 +00:00
|
|
|
|
|
|
|
objects.append(obj)
|
|
|
|
|
2013-09-04 15:32:36 +00:00
|
|
|
values = {
|
2013-08-21 16:06:32 +00:00
|
|
|
'objects': objects,
|
|
|
|
'range': range,
|
2013-08-22 09:18:12 +00:00
|
|
|
'template': template,
|
2013-09-04 15:32:36 +00:00
|
|
|
}
|
2013-09-10 13:41:33 +00:00
|
|
|
return request.website.render("website.kanban_contain", values)
|
2013-08-21 16:06:32 +00:00
|
|
|
|
2013-11-07 15:14:00 +00:00
|
|
|
def kanban_col(self, cr, uid, ids, model, domain, page, template, step, orderby, context=None):
|
2013-08-21 16:06:32 +00:00
|
|
|
html = ""
|
2013-11-07 15:14:00 +00:00
|
|
|
model_obj = self.pool[model]
|
2013-08-21 16:06:32 +00:00
|
|
|
domain = safe_eval(domain)
|
|
|
|
step = int(step)
|
|
|
|
offset = (int(page)-1) * step
|
2013-09-10 13:41:33 +00:00
|
|
|
object_ids = model_obj.search(cr, uid, domain, limit=step, offset=offset, order=orderby)
|
|
|
|
object_ids = model_obj.browse(cr, uid, object_ids)
|
2013-08-21 16:06:32 +00:00
|
|
|
for object_id in object_ids:
|
2013-09-10 13:41:33 +00:00
|
|
|
html += request.website.render(template, {'object_id': object_id})
|
2013-08-21 16:06:32 +00:00
|
|
|
return html
|
2013-08-12 16:31:23 +00:00
|
|
|
|
2013-11-05 15:16:51 +00:00
|
|
|
def get_menu(self, cr, uid, ids, context=None):
|
|
|
|
return self.pool['website.menu'].get_menu(cr, uid, ids[0], context=context)
|
2013-10-24 14:18:40 +00:00
|
|
|
|
|
|
|
class website_menu(osv.osv):
|
|
|
|
_name = "website.menu"
|
|
|
|
_description = "Website Menu"
|
|
|
|
_columns = {
|
|
|
|
'name': fields.char('Menu', size=64, required=True, translate=True),
|
|
|
|
'url': fields.char('Url', required=True, translate=True),
|
2013-10-29 11:22:21 +00:00
|
|
|
'new_window': fields.boolean('New Window'),
|
2013-10-24 14:18:40 +00:00
|
|
|
'sequence': fields.integer('Sequence'),
|
|
|
|
# TODO: support multiwebsite once done for ir.ui.views
|
|
|
|
'website_id': fields.many2one('website', 'Website'),
|
2013-10-30 14:02:16 +00:00
|
|
|
'parent_id': fields.many2one('website.menu', 'Parent Menu', select=True, ondelete="cascade"),
|
2013-11-05 15:16:51 +00:00
|
|
|
'child_id': fields.one2many('website.menu', 'parent_id', string='Child Menus'),
|
2013-10-24 14:18:40 +00:00
|
|
|
'parent_left': fields.integer('Parent Left', select=True),
|
|
|
|
'parent_right': fields.integer('Parent Right', select=True),
|
|
|
|
}
|
2013-10-25 15:24:46 +00:00
|
|
|
_defaults = {
|
|
|
|
'url': '',
|
|
|
|
'sequence': 0,
|
|
|
|
}
|
2013-11-05 15:16:51 +00:00
|
|
|
_parent_store = True
|
|
|
|
_parent_order = 'sequence, name'
|
2013-10-25 15:24:46 +00:00
|
|
|
_order = "parent_left"
|
|
|
|
|
2013-11-05 15:16:51 +00:00
|
|
|
def get_menu(self, cr, uid, website_id, context=None):
|
2013-10-25 15:24:46 +00:00
|
|
|
root_domain = [('parent_id', '=', False)] # ('website_id', '=', website_id),
|
2013-11-05 15:16:51 +00:00
|
|
|
menu_ids = self.search(cr, uid, root_domain, context=context)
|
|
|
|
menu = self.browse(cr, uid, menu_ids, context=context)
|
|
|
|
return menu[0]
|
|
|
|
|
|
|
|
def get_tree(self, cr, uid, website_id, context=None):
|
|
|
|
def make_tree(node):
|
|
|
|
menu_node = dict(
|
|
|
|
id=node.id,
|
|
|
|
name=node.name,
|
|
|
|
url=node.url,
|
|
|
|
new_window=node.new_window,
|
|
|
|
sequence=node.sequence,
|
|
|
|
parent_id=node.parent_id.id,
|
|
|
|
children=[],
|
|
|
|
)
|
|
|
|
for child in node.child_id:
|
|
|
|
menu_node['children'].append(make_tree(child))
|
|
|
|
return menu_node
|
|
|
|
menu = self.get_menu(cr, uid, website_id, context=context)
|
|
|
|
return make_tree(menu)
|
2013-10-25 15:24:46 +00:00
|
|
|
|
2013-10-30 14:02:16 +00:00
|
|
|
def save(self, cr, uid, website_id, data, context=None):
|
2013-10-29 14:33:46 +00:00
|
|
|
def replace_id(old_id, new_id):
|
2013-10-30 14:02:16 +00:00
|
|
|
for menu in data['data']:
|
2013-10-29 14:33:46 +00:00
|
|
|
if menu['id'] == old_id:
|
|
|
|
menu['id'] = new_id
|
|
|
|
if menu['parent_id'] == old_id:
|
|
|
|
menu['parent_id'] = new_id
|
2013-10-30 14:02:16 +00:00
|
|
|
to_delete = data['to_delete']
|
|
|
|
if to_delete:
|
|
|
|
self.unlink(cr, uid, to_delete, context=context)
|
|
|
|
for menu in data['data']:
|
2013-10-30 10:58:03 +00:00
|
|
|
mid = menu['id']
|
2013-10-30 14:02:16 +00:00
|
|
|
if isinstance(mid, str):
|
2013-10-29 14:33:46 +00:00
|
|
|
new_id = self.create(cr, uid, {'name': menu['name']}, context=context)
|
2013-10-30 10:58:03 +00:00
|
|
|
replace_id(mid, new_id)
|
2013-10-30 14:02:16 +00:00
|
|
|
for menu in data['data']:
|
2013-10-29 14:33:46 +00:00
|
|
|
self.write(cr, uid, [menu['id']], menu, context=context)
|
|
|
|
return True
|
|
|
|
|
2013-09-20 05:10:29 +00:00
|
|
|
class ir_attachment(osv.osv):
|
|
|
|
_inherit = "ir.attachment"
|
|
|
|
def _website_url_get(self, cr, uid, ids, name, arg, context=None):
|
|
|
|
result = {}
|
|
|
|
for attach in self.browse(cr, uid, ids, context=context):
|
2013-10-24 10:34:01 +00:00
|
|
|
if attach.type == 'url':
|
2013-09-20 05:10:29 +00:00
|
|
|
result[attach.id] = attach.url
|
|
|
|
else:
|
2013-10-24 10:34:01 +00:00
|
|
|
result[attach.id] = urlplus('/website/image', {
|
|
|
|
'model': 'ir.attachment',
|
|
|
|
'field': 'datas',
|
|
|
|
'id': attach.id,
|
|
|
|
'max_width': 1024,
|
|
|
|
'max_height': 768,
|
|
|
|
})
|
2013-09-20 05:10:29 +00:00
|
|
|
return result
|
|
|
|
_columns = {
|
|
|
|
'website_url': fields.function(_website_url_get, string="Attachment URL", type='char')
|
|
|
|
}
|
|
|
|
|
2013-08-12 16:31:23 +00:00
|
|
|
class res_partner(osv.osv):
|
|
|
|
_inherit = "res.partner"
|
|
|
|
|
|
|
|
def google_map_img(self, cr, uid, ids, zoom=8, width=298, height=298, context=None):
|
|
|
|
partner = self.browse(cr, uid, ids[0], context=context)
|
|
|
|
params = {
|
|
|
|
'center': '%s, %s %s, %s' % (partner.street, partner.city, partner.zip, partner.country_id and partner.country_id.name_get()[0][1] or ''),
|
|
|
|
'size': "%sx%s" % (height, width),
|
|
|
|
'zoom': zoom,
|
|
|
|
'sensor': 'false',
|
|
|
|
}
|
|
|
|
return urlplus('http://maps.googleapis.com/maps/api/staticmap' , params)
|
|
|
|
|
|
|
|
def google_map_link(self, cr, uid, ids, zoom=8, context=None):
|
|
|
|
partner = self.browse(cr, uid, ids[0], context=context)
|
|
|
|
params = {
|
|
|
|
'q': '%s, %s %s, %s' % (partner.street, partner.city, partner.zip, partner.country_id and partner.country_id.name_get()[0][1] or ''),
|
|
|
|
}
|
|
|
|
return urlplus('https://maps.google.be/maps' , params)
|
2013-10-09 12:09:04 +00:00
|
|
|
|
2013-11-04 16:59:54 +00:00
|
|
|
class res_company(osv.osv):
|
|
|
|
_inherit = "res.company"
|
|
|
|
def google_map_img(self, cr, uid, ids, zoom=8, width=298, height=298, context=None):
|
2013-11-14 09:02:23 +00:00
|
|
|
partner = self.browse(cr, openerp.SUPERUSER_ID, ids[0], context=context).partner_id
|
2013-11-04 16:59:54 +00:00
|
|
|
return partner and partner.google_map_img(zoom, width, height, context=context) or None
|
|
|
|
def google_map_link(self, cr, uid, ids, zoom=8, context=None):
|
2013-11-14 09:02:23 +00:00
|
|
|
partner = self.browse(cr, openerp.SUPERUSER_ID, ids[0], context=context).partner_id
|
2013-11-04 16:59:54 +00:00
|
|
|
return partner and partner.google_map_link(zoom, context=context) or None
|
|
|
|
|
2013-10-09 12:09:04 +00:00
|
|
|
class base_language_install(osv.osv):
|
|
|
|
_inherit = "base.language.install"
|
|
|
|
_columns = {
|
|
|
|
'website_ids': fields.many2many('website', string='Websites to translate'),
|
|
|
|
}
|
|
|
|
|
2013-10-22 12:18:40 +00:00
|
|
|
def default_get(self, cr, uid, fields, context=None):
|
|
|
|
if context is None:
|
|
|
|
context = {}
|
|
|
|
defaults = super(base_language_install, self).default_get(cr, uid, fields, context)
|
|
|
|
website_id = context.get('params', {}).get('website_id')
|
|
|
|
if website_id:
|
|
|
|
if 'website_ids' not in defaults:
|
|
|
|
defaults['website_ids'] = []
|
|
|
|
defaults['website_ids'].append(website_id)
|
|
|
|
return defaults
|
|
|
|
|
2013-10-09 12:09:04 +00:00
|
|
|
def lang_install(self, cr, uid, ids, context=None):
|
|
|
|
if context is None:
|
|
|
|
context = {}
|
|
|
|
action = super(base_language_install, self).lang_install(cr, uid, ids, context)
|
|
|
|
language_obj = self.browse(cr, uid, ids)[0]
|
|
|
|
website_ids = [website.id for website in language_obj['website_ids']]
|
|
|
|
lang_id = self.pool['res.lang'].search(cr, uid, [('code', '=', language_obj['lang'])])
|
|
|
|
if website_ids and lang_id:
|
|
|
|
data = {'language_ids': [(4, lang_id[0])]}
|
|
|
|
self.pool['website'].write(cr, uid, website_ids, data)
|
|
|
|
params = context.get('params', {})
|
|
|
|
if 'url_return' in params:
|
|
|
|
return {
|
|
|
|
'url': params['url_return'].replace('[lang]', language_obj['lang']),
|
|
|
|
'type': 'ir.actions.act_url',
|
|
|
|
'target': 'self'
|
|
|
|
}
|
|
|
|
return action
|
2013-10-18 09:28:20 +00:00
|
|
|
|
|
|
|
class SeoMetadata(osv.Model):
|
|
|
|
_name = 'website.seo.metadata'
|
|
|
|
_description = 'SEO metadata'
|
|
|
|
|
|
|
|
_columns = {
|
|
|
|
'website_meta_title': fields.char("Website meta title", size=70, translate=True),
|
|
|
|
'website_meta_description': fields.text("Website meta description", size=160, translate=True),
|
|
|
|
'website_meta_keywords': fields.char("Website meta keywords", translate=True),
|
|
|
|
}
|