2013-06-24 13:36:54 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
2013-09-03 14:32:37 +00:00
|
|
|
import cStringIO
|
2014-05-16 13:31:26 +00:00
|
|
|
import datetime
|
2014-05-15 14:58:16 +00:00
|
|
|
from itertools import islice
|
2013-07-31 12:13:43 +00:00
|
|
|
import json
|
2014-08-04 10:25:59 +00:00
|
|
|
import xml.etree.ElementTree as ET
|
|
|
|
|
2013-07-31 12:13:43 +00:00
|
|
|
import logging
|
2014-04-25 08:35:21 +00:00
|
|
|
import re
|
2013-10-02 09:59:17 +00:00
|
|
|
|
2013-09-03 14:32:37 +00:00
|
|
|
import werkzeug.utils
|
2014-08-04 10:25:59 +00:00
|
|
|
import urllib2
|
2013-09-03 14:32:37 +00:00
|
|
|
import werkzeug.wrappers
|
2013-08-14 08:45:05 +00:00
|
|
|
from PIL import Image
|
2013-07-24 08:59:21 +00:00
|
|
|
|
2013-06-24 13:36:54 +00:00
|
|
|
import openerp
|
2015-02-22 20:14:15 +00:00
|
|
|
from openerp.addons.web.controllers.main import WebClient
|
2013-06-24 13:36:54 +00:00
|
|
|
from openerp.addons.web import http
|
2014-09-05 16:20:55 +00:00
|
|
|
from openerp.http import request, STATIC_CACHE
|
2014-09-08 13:54:38 +00:00
|
|
|
from openerp.tools import image_save_for_web
|
2013-06-24 13:36:54 +00:00
|
|
|
|
2013-11-15 13:26:26 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
2013-08-14 08:45:05 +00:00
|
|
|
|
2013-08-21 09:45:50 +00:00
|
|
|
# Completely arbitrary limits
|
|
|
|
MAX_IMAGE_WIDTH, MAX_IMAGE_HEIGHT = IMAGE_LIMITS = (1024, 768)
|
2014-05-11 11:52:31 +00:00
|
|
|
LOC_PER_SITEMAP = 45000
|
2014-05-19 11:59:38 +00:00
|
|
|
SITEMAP_CACHE_TIME = datetime.timedelta(hours=12)
|
2014-02-05 09:04:47 +00:00
|
|
|
|
2013-06-24 13:36:54 +00:00
|
|
|
class Website(openerp.addons.web.controllers.main.Home):
|
2014-01-29 16:38:09 +00:00
|
|
|
#------------------------------------------------------
|
|
|
|
# View
|
|
|
|
#------------------------------------------------------
|
2014-05-13 09:35:45 +00:00
|
|
|
@http.route('/', type='http', auth="public", website=True)
|
2013-06-24 13:36:54 +00:00
|
|
|
def index(self, **kw):
|
2014-05-11 15:32:35 +00:00
|
|
|
page = 'homepage'
|
2013-11-27 10:43:07 +00:00
|
|
|
try:
|
|
|
|
main_menu = request.registry['ir.model.data'].get_object(request.cr, request.uid, 'website', 'main_menu')
|
2014-06-01 12:30:15 +00:00
|
|
|
except Exception:
|
|
|
|
pass
|
|
|
|
else:
|
2013-11-27 10:43:07 +00:00
|
|
|
first_menu = main_menu.child_id and main_menu.child_id[0]
|
2014-05-11 15:32:35 +00:00
|
|
|
if first_menu:
|
2014-05-11 15:40:58 +00:00
|
|
|
if not (first_menu.url.startswith(('/page/', '/?', '/#')) or (first_menu.url=='/')):
|
2014-05-11 15:32:35 +00:00
|
|
|
return request.redirect(first_menu.url)
|
2014-05-11 15:40:58 +00:00
|
|
|
if first_menu.url.startswith('/page/'):
|
2014-07-09 08:39:00 +00:00
|
|
|
return request.registry['ir.http'].reroute(first_menu.url)
|
2014-05-11 15:32:35 +00:00
|
|
|
return self.page(page)
|
2013-06-24 13:36:54 +00:00
|
|
|
|
2014-05-13 09:35:45 +00:00
|
|
|
@http.route(website=True, auth="public")
|
2014-01-16 18:17:46 +00:00
|
|
|
def web_login(self, *args, **kw):
|
2014-02-27 10:17:25 +00:00
|
|
|
# TODO: can't we just put auth=public, ... in web client ?
|
2014-02-11 11:23:02 +00:00
|
|
|
return super(Website, self).web_login(*args, **kw)
|
2014-01-16 18:17:46 +00:00
|
|
|
|
2015-03-26 09:24:56 +00:00
|
|
|
@http.route('/website/lang/<lang>', type='http', auth="public", website=True, multilang=False)
|
2015-04-08 18:14:25 +00:00
|
|
|
def change_lang(self, lang, r='/', **kwargs):
|
|
|
|
if lang == 'default':
|
|
|
|
lang = request.website.default_lang_code
|
|
|
|
r = '/%s%s' % (lang, r or '/')
|
|
|
|
redirect = werkzeug.utils.redirect(r or ('/%s' % lang), 303)
|
2015-03-26 09:24:56 +00:00
|
|
|
redirect.set_cookie('website_lang', lang)
|
|
|
|
return redirect
|
|
|
|
|
2014-06-17 11:43:57 +00:00
|
|
|
@http.route('/page/<page:page>', type='http', auth="public", website=True)
|
2014-01-29 16:38:09 +00:00
|
|
|
def page(self, page, **opt):
|
|
|
|
values = {
|
|
|
|
'path': page,
|
|
|
|
}
|
2014-08-26 08:29:18 +00:00
|
|
|
# /page/website.XXX --> /page/XXX
|
|
|
|
if page.startswith('website.'):
|
2014-08-26 09:48:21 +00:00
|
|
|
return request.redirect('/page/' + page[8:], code=301)
|
2014-08-26 08:29:18 +00:00
|
|
|
elif '.' not in page:
|
2014-01-29 16:38:09 +00:00
|
|
|
page = 'website.%s' % page
|
|
|
|
|
|
|
|
try:
|
|
|
|
request.website.get_template(page)
|
|
|
|
except ValueError, e:
|
|
|
|
# page not found
|
2014-05-05 16:38:41 +00:00
|
|
|
if request.website.is_publisher():
|
2014-01-29 16:38:09 +00:00
|
|
|
page = 'website.page_404'
|
|
|
|
else:
|
|
|
|
return request.registry['ir.http']._handle_exception(e, 404)
|
|
|
|
|
2014-02-19 10:30:32 +00:00
|
|
|
return request.render(page, values)
|
2014-01-29 16:38:09 +00:00
|
|
|
|
2014-05-11 11:52:31 +00:00
|
|
|
@http.route(['/robots.txt'], type='http', auth="public")
|
2014-01-29 16:38:09 +00:00
|
|
|
def robots(self):
|
2014-02-19 10:30:32 +00:00
|
|
|
return request.render('website.robots', {'url_root': request.httprequest.url_root}, mimetype='text/plain')
|
2014-01-29 16:38:09 +00:00
|
|
|
|
|
|
|
@http.route('/sitemap.xml', type='http', auth="public", website=True)
|
2014-05-11 11:52:31 +00:00
|
|
|
def sitemap_xml_index(self):
|
2014-05-16 13:31:26 +00:00
|
|
|
cr, uid, context = request.cr, openerp.SUPERUSER_ID, request.context
|
|
|
|
ira = request.registry['ir.attachment']
|
|
|
|
iuv = request.registry['ir.ui.view']
|
|
|
|
mimetype ='application/xml;charset=utf-8'
|
|
|
|
content = None
|
|
|
|
|
2014-05-19 11:59:38 +00:00
|
|
|
def create_sitemap(url, content):
|
2014-05-16 13:31:26 +00:00
|
|
|
ira.create(cr, uid, dict(
|
|
|
|
datas=content.encode('base64'),
|
|
|
|
mimetype=mimetype,
|
|
|
|
type='binary',
|
|
|
|
name=url,
|
|
|
|
url=url,
|
|
|
|
), context=context)
|
|
|
|
|
|
|
|
sitemap = ira.search_read(cr, uid, [('url', '=' , '/sitemap.xml'), ('type', '=', 'binary')], ('datas', 'create_date'), context=context)
|
|
|
|
if sitemap:
|
|
|
|
# Check if stored version is still valid
|
|
|
|
server_format = openerp.tools.misc.DEFAULT_SERVER_DATETIME_FORMAT
|
|
|
|
create_date = datetime.datetime.strptime(sitemap[0]['create_date'], server_format)
|
|
|
|
delta = datetime.datetime.now() - create_date
|
|
|
|
if delta < SITEMAP_CACHE_TIME:
|
|
|
|
content = sitemap[0]['datas'].decode('base64')
|
|
|
|
|
|
|
|
if not content:
|
|
|
|
# Remove all sitemaps in ir.attachments as we're going to regenerated them
|
|
|
|
sitemap_ids = ira.search(cr, uid, [('url', '=like' , '/sitemap%.xml'), ('type', '=', 'binary')], context=context)
|
|
|
|
if sitemap_ids:
|
|
|
|
ira.unlink(cr, uid, sitemap_ids, context=context)
|
|
|
|
|
|
|
|
pages = 0
|
|
|
|
first_page = None
|
2015-07-08 12:14:11 +00:00
|
|
|
locs = request.website.sudo(user=request.website.user_id.id).enumerate_pages()
|
2014-05-16 13:31:26 +00:00
|
|
|
while True:
|
|
|
|
start = pages * LOC_PER_SITEMAP
|
2014-05-26 12:58:30 +00:00
|
|
|
values = {
|
|
|
|
'locs': islice(locs, start, start + LOC_PER_SITEMAP),
|
|
|
|
'url_root': request.httprequest.url_root[:-1],
|
|
|
|
}
|
|
|
|
urls = iuv.render(cr, uid, 'website.sitemap_locs', values, context=context)
|
2014-05-16 13:31:26 +00:00
|
|
|
if urls.strip():
|
|
|
|
page = iuv.render(cr, uid, 'website.sitemap_xml', dict(content=urls), context=context)
|
|
|
|
if not first_page:
|
|
|
|
first_page = page
|
|
|
|
pages += 1
|
2014-05-19 11:59:38 +00:00
|
|
|
create_sitemap('/sitemap-%d.xml' % pages, page)
|
2014-05-16 13:31:26 +00:00
|
|
|
else:
|
|
|
|
break
|
|
|
|
if not pages:
|
|
|
|
return request.not_found()
|
|
|
|
elif pages == 1:
|
|
|
|
content = first_page
|
|
|
|
else:
|
|
|
|
# Sitemaps must be split in several smaller files with a sitemap index
|
|
|
|
content = iuv.render(cr, uid, 'website.sitemap_index_xml', dict(
|
|
|
|
pages=range(1, pages + 1),
|
|
|
|
url_root=request.httprequest.url_root,
|
|
|
|
), context=context)
|
2014-05-19 11:59:38 +00:00
|
|
|
create_sitemap('/sitemap.xml', content)
|
2014-05-11 11:52:31 +00:00
|
|
|
|
2014-05-16 13:31:26 +00:00
|
|
|
return request.make_response(content, [('Content-Type', mimetype)])
|
2014-01-29 16:38:09 +00:00
|
|
|
|
2014-08-26 19:40:18 +00:00
|
|
|
@http.route('/website/info', type='http', auth="public", website=True)
|
|
|
|
def website_info(self):
|
|
|
|
try:
|
|
|
|
request.website.get_template('website.info').name
|
|
|
|
except Exception, e:
|
|
|
|
return request.registry['ir.http']._handle_exception(e, 404)
|
|
|
|
irm = request.env()['ir.module.module'].sudo()
|
|
|
|
apps = irm.search([('state','=','installed'),('application','=',True)])
|
|
|
|
modules = irm.search([('state','=','installed'),('application','=',False)])
|
|
|
|
values = {
|
|
|
|
'apps': apps,
|
|
|
|
'modules': modules,
|
|
|
|
'version': openerp.service.common.exp_version()
|
|
|
|
}
|
|
|
|
return request.render('website.info', values)
|
|
|
|
|
2014-01-29 16:38:09 +00:00
|
|
|
#------------------------------------------------------
|
|
|
|
# Edit
|
|
|
|
#------------------------------------------------------
|
|
|
|
@http.route('/website/add/<path:path>', type='http', auth="user", website=True)
|
2014-02-13 15:27:28 +00:00
|
|
|
def pagenew(self, path, noredirect=False, add_menu=None):
|
2014-01-28 14:00:17 +00:00
|
|
|
xml_id = request.registry['website'].new_page(request.cr, request.uid, path, context=request.context)
|
2014-02-13 15:27:28 +00:00
|
|
|
if add_menu:
|
|
|
|
model, id = request.registry["ir.model.data"].get_object_reference(request.cr, request.uid, 'website', 'main_menu')
|
|
|
|
request.registry['website.menu'].create(request.cr, request.uid, {
|
|
|
|
'name': path,
|
2015-08-14 14:57:53 +00:00
|
|
|
'url': "/page/" + xml_id[8:],
|
2014-02-13 15:27:28 +00:00
|
|
|
'parent_id': id,
|
|
|
|
}, context=request.context)
|
2014-04-25 08:35:21 +00:00
|
|
|
# Reverse action in order to allow shortcut for /page/<website_xml_id>
|
|
|
|
url = "/page/" + re.sub(r"^website\.", '', xml_id)
|
|
|
|
|
2014-01-28 14:00:17 +00:00
|
|
|
if noredirect:
|
2013-09-03 14:32:37 +00:00
|
|
|
return werkzeug.wrappers.Response(url, mimetype='text/plain')
|
|
|
|
return werkzeug.utils.redirect(url)
|
2013-07-02 19:11:30 +00:00
|
|
|
|
2014-01-29 16:08:39 +00:00
|
|
|
@http.route('/website/theme_change', type='http', auth="user", website=True)
|
2013-08-27 09:56:58 +00:00
|
|
|
def theme_change(self, theme_id=False, **kwargs):
|
|
|
|
imd = request.registry['ir.model.data']
|
2014-05-27 09:54:01 +00:00
|
|
|
Views = request.registry['ir.ui.view']
|
2013-08-27 09:56:58 +00:00
|
|
|
|
2014-05-27 09:54:01 +00:00
|
|
|
_, theme_template_id = imd.get_object_reference(
|
2013-09-09 10:00:59 +00:00
|
|
|
request.cr, request.uid, 'website', 'theme')
|
2014-05-27 09:54:01 +00:00
|
|
|
views = Views.search(request.cr, request.uid, [
|
|
|
|
('inherit_id', '=', theme_template_id),
|
|
|
|
], context=request.context)
|
|
|
|
Views.write(request.cr, request.uid, views, {
|
2014-08-31 14:56:44 +00:00
|
|
|
'active': False,
|
|
|
|
}, context=dict(request.context or {}, active_test=True))
|
2013-08-27 09:56:58 +00:00
|
|
|
|
|
|
|
if theme_id:
|
|
|
|
module, xml_id = theme_id.split('.')
|
2014-05-27 09:54:01 +00:00
|
|
|
_, view_id = imd.get_object_reference(
|
2013-09-09 10:00:59 +00:00
|
|
|
request.cr, request.uid, module, xml_id)
|
2014-05-27 09:54:01 +00:00
|
|
|
Views.write(request.cr, request.uid, [view_id], {
|
2014-08-31 14:56:44 +00:00
|
|
|
'active': True
|
|
|
|
}, context=dict(request.context or {}, active_test=True))
|
2013-08-27 09:56:58 +00:00
|
|
|
|
2014-02-19 10:30:32 +00:00
|
|
|
return request.render('website.themes', {'theme_changed': True})
|
2013-08-27 09:56:58 +00:00
|
|
|
|
2014-01-20 15:37:33 +00:00
|
|
|
@http.route(['/website/snippets'], type='json', auth="public", website=True)
|
2013-10-04 10:41:33 +00:00
|
|
|
def snippets(self):
|
2013-11-21 09:59:59 +00:00
|
|
|
return request.website._render('website.snippets')
|
2013-10-04 10:41:33 +00:00
|
|
|
|
2014-01-20 15:37:33 +00:00
|
|
|
@http.route('/website/reset_templates', type='http', auth='user', methods=['POST'], website=True)
|
2013-12-17 13:46:00 +00:00
|
|
|
def reset_template(self, templates, redirect='/'):
|
|
|
|
templates = request.httprequest.form.getlist('templates')
|
|
|
|
modules_to_update = []
|
|
|
|
for temp_id in templates:
|
|
|
|
view = request.registry['ir.ui.view'].browse(request.cr, request.uid, int(temp_id), context=request.context)
|
2014-07-30 13:10:22 +00:00
|
|
|
if view.page:
|
|
|
|
continue
|
2013-12-17 13:46:00 +00:00
|
|
|
view.model_data_id.write({
|
|
|
|
'noupdate': False
|
|
|
|
})
|
|
|
|
if view.model_data_id.module not in modules_to_update:
|
|
|
|
modules_to_update.append(view.model_data_id.module)
|
2014-07-30 13:10:22 +00:00
|
|
|
|
|
|
|
if modules_to_update:
|
|
|
|
module_obj = request.registry['ir.module.module']
|
|
|
|
module_ids = module_obj.search(request.cr, request.uid, [('name', 'in', modules_to_update)], context=request.context)
|
|
|
|
if module_ids:
|
|
|
|
module_obj.button_immediate_upgrade(request.cr, request.uid, module_ids, context=request.context)
|
2013-12-17 13:46:00 +00:00
|
|
|
return request.redirect(redirect)
|
|
|
|
|
2014-01-20 15:37:33 +00:00
|
|
|
@http.route('/website/customize_template_get', type='json', auth='user', website=True)
|
2014-05-27 09:34:49 +00:00
|
|
|
def customize_template_get(self, xml_id, full=False):
|
2015-02-13 12:30:53 +00:00
|
|
|
return request.registry["ir.ui.view"].customize_template_get(
|
|
|
|
request.cr, request.uid, xml_id, full=full, context=request.context)
|
2014-01-15 11:05:47 +00:00
|
|
|
|
2015-04-08 16:31:43 +00:00
|
|
|
@http.route('/website/get_view_translations', type='json', auth='public', website=True)
|
2013-09-24 19:06:37 +00:00
|
|
|
def get_view_translations(self, xml_id, lang=None):
|
|
|
|
lang = lang or request.context.get('lang')
|
2015-02-13 12:30:53 +00:00
|
|
|
return request.registry["ir.ui.view"].get_view_translations(
|
|
|
|
request.cr, request.uid, xml_id, lang=lang, context=request.context)
|
|
|
|
|
2015-04-08 16:31:43 +00:00
|
|
|
@http.route('/website/set_translations', type='json', auth='public', website=True)
|
2013-09-23 12:00:58 +00:00
|
|
|
def set_translations(self, data, lang):
|
|
|
|
irt = request.registry.get('ir.translation')
|
|
|
|
for view_id, trans in data.items():
|
|
|
|
view_id = int(view_id)
|
|
|
|
for t in trans:
|
|
|
|
initial_content = t['initial_content'].strip()
|
|
|
|
new_content = t['new_content'].strip()
|
2013-09-24 19:06:37 +00:00
|
|
|
tid = t['translation_id']
|
|
|
|
if not tid:
|
|
|
|
old_trans = irt.search_read(
|
|
|
|
request.cr, request.uid,
|
|
|
|
[
|
|
|
|
('type', '=', 'view'),
|
|
|
|
('res_id', '=', view_id),
|
|
|
|
('lang', '=', lang),
|
|
|
|
('src', '=', initial_content),
|
|
|
|
])
|
|
|
|
if old_trans:
|
|
|
|
tid = old_trans[0]['id']
|
|
|
|
if tid:
|
2013-09-23 12:00:58 +00:00
|
|
|
vals = {'value': new_content}
|
2013-09-24 19:06:37 +00:00
|
|
|
irt.write(request.cr, request.uid, [tid], vals)
|
2013-09-23 12:00:58 +00:00
|
|
|
else:
|
|
|
|
new_trans = {
|
|
|
|
'name': 'website',
|
|
|
|
'res_id': view_id,
|
|
|
|
'lang': lang,
|
|
|
|
'type': 'view',
|
|
|
|
'source': initial_content,
|
|
|
|
'value': new_content,
|
|
|
|
}
|
2014-03-18 13:30:36 +00:00
|
|
|
if t.get('gengo_translation'):
|
|
|
|
new_trans['gengo_translation'] = t.get('gengo_translation')
|
2014-03-24 11:12:53 +00:00
|
|
|
new_trans['gengo_comment'] = t.get('gengo_comment')
|
2013-09-23 12:00:58 +00:00
|
|
|
irt.create(request.cr, request.uid, new_trans)
|
|
|
|
return True
|
|
|
|
|
2015-04-08 16:31:43 +00:00
|
|
|
@http.route('/website/translations', type='json', auth="public", website=True)
|
2015-02-22 20:14:15 +00:00
|
|
|
def get_website_translations(self, lang):
|
|
|
|
module_obj = request.registry['ir.module.module']
|
|
|
|
module_ids = module_obj.search(request.cr, request.uid, [('name', 'ilike', 'website'), ('state', '=', 'installed')], context=request.context)
|
|
|
|
modules = [x['name'] for x in module_obj.read(request.cr, request.uid, module_ids, ['name'], context=request.context)]
|
|
|
|
return WebClient().translations(mods=modules, lang=lang)
|
|
|
|
|
2014-01-29 16:38:09 +00:00
|
|
|
@http.route('/website/attach', type='http', auth='user', methods=['POST'], website=True)
|
2014-09-08 13:54:38 +00:00
|
|
|
def attach(self, func, upload=None, url=None, disable_optimization=None):
|
2014-03-27 16:03:24 +00:00
|
|
|
Attachments = request.registry['ir.attachment']
|
2013-07-31 12:13:43 +00:00
|
|
|
|
2014-03-27 16:03:24 +00:00
|
|
|
website_url = message = None
|
|
|
|
if not upload:
|
|
|
|
website_url = url
|
|
|
|
name = url.split("/").pop()
|
2014-03-03 14:24:01 +00:00
|
|
|
attachment_id = Attachments.create(request.cr, request.uid, {
|
2015-03-26 09:24:56 +00:00
|
|
|
'name': name,
|
2014-03-27 16:03:24 +00:00
|
|
|
'type': 'url',
|
|
|
|
'url': url,
|
2013-07-31 12:13:43 +00:00
|
|
|
'res_model': 'ir.ui.view',
|
|
|
|
}, request.context)
|
2014-03-27 16:03:24 +00:00
|
|
|
else:
|
|
|
|
try:
|
|
|
|
image_data = upload.read()
|
|
|
|
image = Image.open(cStringIO.StringIO(image_data))
|
|
|
|
w, h = image.size
|
|
|
|
if w*h > 42e6: # Nokia Lumia 1020 photo resolution
|
|
|
|
raise ValueError(
|
|
|
|
u"Image size excessive, uploaded images must be smaller "
|
|
|
|
u"than 42 million pixel")
|
|
|
|
|
2014-09-08 13:54:38 +00:00
|
|
|
if not disable_optimization and image.format in ('PNG', 'JPEG'):
|
|
|
|
image_data = image_save_for_web(image)
|
|
|
|
|
2014-03-27 16:03:24 +00:00
|
|
|
attachment_id = Attachments.create(request.cr, request.uid, {
|
|
|
|
'name': upload.filename,
|
|
|
|
'datas': image_data.encode('base64'),
|
|
|
|
'datas_fname': upload.filename,
|
|
|
|
'res_model': 'ir.ui.view',
|
|
|
|
}, request.context)
|
|
|
|
|
|
|
|
[attachment] = Attachments.read(
|
|
|
|
request.cr, request.uid, [attachment_id], ['website_url'],
|
|
|
|
context=request.context)
|
|
|
|
website_url = attachment['website_url']
|
|
|
|
except Exception, e:
|
|
|
|
logger.exception("Failed to upload image to attachment")
|
|
|
|
message = unicode(e)
|
2013-07-31 12:13:43 +00:00
|
|
|
|
|
|
|
return """<script type='text/javascript'>
|
2013-09-02 07:52:16 +00:00
|
|
|
window.parent['%s'](%s, %s);
|
2014-03-27 16:03:24 +00:00
|
|
|
</script>""" % (func, json.dumps(website_url), json.dumps(message))
|
2013-07-31 12:13:43 +00:00
|
|
|
|
2015-04-08 16:31:43 +00:00
|
|
|
@http.route(['/website/publish'], type='json', auth="public", website=True)
|
2013-09-26 11:18:54 +00:00
|
|
|
def publish(self, id, object):
|
|
|
|
_id = int(id)
|
|
|
|
_object = request.registry[object]
|
2013-08-14 08:45:05 +00:00
|
|
|
obj = _object.browse(request.cr, request.uid, _id)
|
2013-10-11 12:54:29 +00:00
|
|
|
|
|
|
|
values = {}
|
[IMP] use model._fields instead of model._all_columns to cover all fields
The old-api model._all_columns contains information about model._columns and
inherited columns. This dictionary is missing new-api computed non-stored
fields, and the new field objects provide a more readable api...
This commit contains the following changes:
- adapt several methods of BaseModel to use fields instead of columns and
_all_columns
- copy all semantic-free attributes of related fields from their source
- add attribute 'group_operator' on integer and float fields
- base, base_action_rule, crm, edi, hr, mail, mass_mailing, pad,
payment_acquirer, share, website, website_crm, website_mail: simply use
_fields instead of _all_columns
- base, decimal_precision, website: adapt qweb rendering methods to use fields
instead of columns
2014-11-03 15:00:50 +00:00
|
|
|
if 'website_published' in _object._fields:
|
2013-10-11 12:54:29 +00:00
|
|
|
values['website_published'] = not obj.website_published
|
2013-09-05 15:18:03 +00:00
|
|
|
_object.write(request.cr, request.uid, [_id],
|
2013-10-11 12:54:29 +00:00
|
|
|
values, context=request.context)
|
|
|
|
|
2013-08-14 08:45:05 +00:00
|
|
|
obj = _object.browse(request.cr, request.uid, _id)
|
2013-11-16 15:09:27 +00:00
|
|
|
return bool(obj.website_published)
|
2013-08-14 08:45:05 +00:00
|
|
|
|
2014-08-04 10:25:59 +00:00
|
|
|
@http.route(['/website/seo_suggest/<keywords>'], type='http', auth="public", website=True)
|
|
|
|
def seo_suggest(self, keywords):
|
2014-08-04 12:41:55 +00:00
|
|
|
url = "http://google.com/complete/search"
|
2014-08-11 08:32:08 +00:00
|
|
|
try:
|
|
|
|
req = urllib2.Request("%s?%s" % (url, werkzeug.url_encode({
|
|
|
|
'ie': 'utf8', 'oe': 'utf8', 'output': 'toolbar', 'q': keywords})))
|
|
|
|
request = urllib2.urlopen(req)
|
|
|
|
except (urllib2.HTTPError, urllib2.URLError):
|
|
|
|
return []
|
2014-08-04 10:25:59 +00:00
|
|
|
xmlroot = ET.fromstring(request.read())
|
|
|
|
return json.dumps([sugg[0].attrib['data'] for sugg in xmlroot if len(sugg) and sugg[0].attrib['data']])
|
|
|
|
|
2014-01-29 16:38:09 +00:00
|
|
|
#------------------------------------------------------
|
|
|
|
# Helpers
|
|
|
|
#------------------------------------------------------
|
2014-03-14 16:23:46 +00:00
|
|
|
@http.route(['/website/kanban'], type='http', auth="public", methods=['POST'], website=True)
|
2013-08-21 16:06:32 +00:00
|
|
|
def kanban(self, **post):
|
2013-09-10 13:41:33 +00:00
|
|
|
return request.website.kanban_col(**post)
|
2013-08-21 16:06:32 +00:00
|
|
|
|
2013-10-09 15:40:46 +00:00
|
|
|
def placeholder(self, response):
|
2014-05-14 14:32:36 +00:00
|
|
|
return request.registry['website']._image_placeholder(response)
|
2013-10-09 15:40:46 +00:00
|
|
|
|
2014-01-29 16:38:09 +00:00
|
|
|
@http.route([
|
|
|
|
'/website/image',
|
2014-09-17 14:54:08 +00:00
|
|
|
'/website/image/<model>/<id>/<field>',
|
|
|
|
'/website/image/<model>/<id>/<field>/<int:max_width>x<int:max_height>'
|
2014-01-29 16:38:09 +00:00
|
|
|
], auth="public", website=True)
|
2014-05-01 11:40:03 +00:00
|
|
|
def website_image(self, model, id, field, max_width=None, max_height=None):
|
2014-03-17 10:42:51 +00:00
|
|
|
""" Fetches the requested field and ensures it does not go above
|
|
|
|
(max_width, max_height), resizing it if necessary.
|
|
|
|
|
|
|
|
If the record is not found or does not have the requested field,
|
|
|
|
returns a placeholder image via :meth:`~.placeholder`.
|
|
|
|
|
|
|
|
Sets and checks conditional response parameters:
|
|
|
|
* :mailheader:`ETag` is always set (and checked)
|
|
|
|
* :mailheader:`Last-Modified is set iif the record has a concurrency
|
|
|
|
field (``__last_update``)
|
|
|
|
|
|
|
|
The requested field is assumed to be base64-encoded image data in
|
|
|
|
all cases.
|
|
|
|
"""
|
2014-08-06 14:20:22 +00:00
|
|
|
try:
|
2014-09-05 16:20:55 +00:00
|
|
|
idsha = id.split('_')
|
|
|
|
id = idsha[0]
|
2014-08-06 14:20:22 +00:00
|
|
|
response = werkzeug.wrappers.Response()
|
|
|
|
return request.registry['website']._image(
|
2014-09-05 16:20:55 +00:00
|
|
|
request.cr, request.uid, model, id, field, response, max_width, max_height,
|
|
|
|
cache=STATIC_CACHE if len(idsha) > 1 else None)
|
2014-08-06 14:20:22 +00:00
|
|
|
except Exception:
|
|
|
|
logger.exception("Cannot render image field %r of record %s[%s] at size(%s,%s)",
|
|
|
|
field, model, id, max_width, max_height)
|
|
|
|
response = werkzeug.wrappers.Response()
|
|
|
|
return self.placeholder(response)
|
2013-10-02 09:59:17 +00:00
|
|
|
|
2014-02-05 09:04:47 +00:00
|
|
|
#------------------------------------------------------
|
|
|
|
# Server actions
|
|
|
|
#------------------------------------------------------
|
2015-03-18 12:10:07 +00:00
|
|
|
@http.route([
|
|
|
|
'/website/action/<path_or_xml_id_or_id>',
|
|
|
|
'/website/action/<path_or_xml_id_or_id>/<path:path>',
|
|
|
|
], type='http', auth="public", website=True)
|
2014-02-07 12:19:21 +00:00
|
|
|
def actions_server(self, path_or_xml_id_or_id, **post):
|
2014-02-05 09:04:47 +00:00
|
|
|
cr, uid, context = request.cr, request.uid, request.context
|
|
|
|
res, action_id, action = None, None, None
|
|
|
|
ServerActions = request.registry['ir.actions.server']
|
|
|
|
|
2014-02-07 12:19:21 +00:00
|
|
|
# find the action_id: either an xml_id, the path, or an ID
|
|
|
|
if isinstance(path_or_xml_id_or_id, basestring) and '.' in path_or_xml_id_or_id:
|
|
|
|
action_id = request.registry['ir.model.data'].xmlid_to_res_id(request.cr, request.uid, path_or_xml_id_or_id, raise_if_not_found=False)
|
|
|
|
if not action_id:
|
2014-02-12 10:39:12 +00:00
|
|
|
action_ids = ServerActions.search(cr, uid, [('website_path', '=', path_or_xml_id_or_id), ('website_published', '=', True)], context=context)
|
2014-02-07 12:19:21 +00:00
|
|
|
action_id = action_ids and action_ids[0] or None
|
|
|
|
if not action_id:
|
2014-02-05 09:04:47 +00:00
|
|
|
try:
|
2014-02-07 12:19:21 +00:00
|
|
|
action_id = int(path_or_xml_id_or_id)
|
2014-02-05 09:04:47 +00:00
|
|
|
except ValueError:
|
|
|
|
pass
|
2014-02-07 12:19:21 +00:00
|
|
|
|
2014-02-05 09:04:47 +00:00
|
|
|
# check it effectively exists
|
|
|
|
if action_id:
|
|
|
|
action_ids = ServerActions.exists(cr, uid, [action_id], context=context)
|
|
|
|
action_id = action_ids and action_ids[0] or None
|
2014-02-19 10:30:32 +00:00
|
|
|
# run it, return only if we got a Response object
|
2014-02-05 09:04:47 +00:00
|
|
|
if action_id:
|
2014-02-05 09:45:30 +00:00
|
|
|
action = ServerActions.browse(cr, uid, action_id, context=context)
|
|
|
|
if action.state == 'code' and action.website_published:
|
|
|
|
action_res = ServerActions.run(cr, uid, [action_id], context=context)
|
2014-06-18 11:39:30 +00:00
|
|
|
if isinstance(action_res, werkzeug.wrappers.Response):
|
2014-02-05 09:45:30 +00:00
|
|
|
res = action_res
|
2014-02-05 09:04:47 +00:00
|
|
|
if res:
|
|
|
|
return res
|
|
|
|
return request.redirect('/')
|
2014-02-17 01:28:27 +00:00
|
|
|
|