[MERGE] forward merge 7.0 until revision 4919.

bzr revid: vmt@openerp.com-20130404130704-24vsmczw34cssytd
This commit is contained in:
Vo Minh Thu 2013-04-04 15:07:04 +02:00
commit 307ca374d6
39 changed files with 305 additions and 177 deletions

3
debian/control vendored
View File

@ -19,6 +19,7 @@ Depends:
python-docutils, python-docutils,
python-feedparser, python-feedparser,
python-gdata, python-gdata,
python-imaging,
python-jinja2, python-jinja2,
python-ldap, python-ldap,
python-libxslt1, python-libxslt1,
@ -46,7 +47,7 @@ Depends:
Conflicts: tinyerp-server, openerp-server, openerp-web Conflicts: tinyerp-server, openerp-server, openerp-web
Replaces: tinyerp-server, openerp-server, openerp-web Replaces: tinyerp-server, openerp-server, openerp-web
Recommends: Recommends:
graphviz, ghostscript, postgresql, python-imaging, python-matplotlib graphviz, ghostscript, postgresql, python-matplotlib, poppler-utils
Description: OpenERP Enterprise Resource Management Description: OpenERP Enterprise Resource Management
OpenERP, previously known as TinyERP, is a complete ERP and CRM. The main OpenERP, previously known as TinyERP, is a complete ERP and CRM. The main
features are accounting (analytic and financial), stock management, sales and features are accounting (analytic and financial), stock management, sales and

69
debian/openerp.init vendored
View File

@ -17,55 +17,46 @@ DAEMON=/usr/bin/openerp-server
NAME=openerp-server NAME=openerp-server
DESC=openerp-server DESC=openerp-server
CONFIG=/etc/openerp/openerp-server.conf CONFIG=/etc/openerp/openerp-server.conf
LOGFILE=/var/log/openerp-server.log LOGFILE=/var/log/openerp/openerp-server.log
USER=openerp USER=openerp
test -x ${DAEMON} || exit 0 test -x ${DAEMON} || exit 0
set -e set -e
do_start () {
echo -n "Starting ${DESC}: "
start-stop-daemon --start --quiet --pidfile /var/run/${NAME}.pid --chuid ${USER} --background --make-pidfile --exec ${DAEMON} -- --config=${CONFIG} --logfile=${LOGFILE}
echo "${NAME}."
}
do_stop () {
echo -n "Stopping ${DESC}: "
start-stop-daemon --stop --quiet --pidfile /var/run/${NAME}.pid --oknodo
echo "${NAME}."
}
case "${1}" in case "${1}" in
start) start)
echo -n "Starting ${DESC}: " do_start
;;
start-stop-daemon --start --quiet --pidfile /var/run/${NAME}.pid \ stop)
--chuid ${USER} --background --make-pidfile \ do_stop
--exec ${DAEMON} -- --config=${CONFIG} \ ;;
--logfile=${LOGFILE}
echo "${NAME}." restart|force-reload)
;; echo -n "Restarting ${DESC}: "
do_stop
sleep 1
do_start
;;
stop) *)
echo -n "Stopping ${DESC}: " N=/etc/init.d/${NAME}
echo "Usage: ${NAME} {start|stop|restart|force-reload}" >&2
start-stop-daemon --stop --quiet --pidfile /var/run/${NAME}.pid \ exit 1
--oknodo ;;
echo "${NAME}."
;;
restart|force-reload)
echo -n "Restarting ${DESC}: "
start-stop-daemon --stop --quiet --pidfile /var/run/${NAME}.pid \
--oknodo
sleep 1
start-stop-daemon --start --quiet --pidfile /var/run/${NAME}.pid \
--chuid ${USER} --background --make-pidfile \
--exec ${DAEMON} -- --config=${CONFIG} \
--logfile=${LOGFILE}
echo "${NAME}."
;;
*)
N=/etc/init.d/${NAME}
echo "Usage: ${NAME} {start|stop|restart|force-reload}" >&2
exit 1
;;
esac esac
exit 0 exit 0

View File

@ -12,9 +12,9 @@ case "${1}" in
chown openerp:openerp /etc/openerp/openerp-server.conf chown openerp:openerp /etc/openerp/openerp-server.conf
chmod 0640 /etc/openerp/openerp-server.conf chmod 0640 /etc/openerp/openerp-server.conf
# Creating log file # Creating log file
touch /var/log/openerp-server.log mkdir -p /var/log/openerp/
chown openerp:openerp /var/log/openerp-server.log chown openerp:openerp /var/log/openerp
chmod 0640 /var/log/openerp-server.log chmod 0750 /var/log/openerp
# Creating local storage directory # Creating local storage directory
mkdir -p /var/lib/openerp/filestore mkdir -p /var/lib/openerp/filestore
chown openerp:openerp -R /var/lib/openerp chown openerp:openerp -R /var/lib/openerp

View File

@ -22,6 +22,17 @@
""" OpenERP core library. """ OpenERP core library.
""" """
# Make sure the OpenERP server runs in UTC. This is especially necessary
# under Windows as under Linux it seems the real import of time is
# sufficiently deferred so that setting the TZ environment variable
# in openerp.cli.server was working.
import os
os.environ['TZ'] = 'UTC' # Set the timezone...
import time # ... *then* import time.
del os
del time
# The hard-coded super-user id (a.k.a. administrator, or root user). # The hard-coded super-user id (a.k.a. administrator, or root user).
SUPERUSER_ID = 1 SUPERUSER_ID = 1

View File

@ -632,7 +632,7 @@ class actions_server(osv.osv):
.read(cr, uid, action.action_id.id, context=context) .read(cr, uid, action.action_id.id, context=context)
if action.state=='code': if action.state=='code':
eval(action.code, cxt, mode="exec", nocopy=True) # nocopy allows to return 'action' eval(action.code.strip(), cxt, mode="exec", nocopy=True) # nocopy allows to return 'action'
if 'action' in cxt: if 'action' in cxt:
return cxt['action'] return cxt['action']

View File

@ -83,7 +83,7 @@ class ir_attachment(osv.osv):
if bin_size: if bin_size:
r = os.path.getsize(full_path) r = os.path.getsize(full_path)
else: else:
r = open(full_path).read().encode('base64') r = open(full_path,'rb').read().encode('base64')
except IOError: except IOError:
_logger.error("_read_file reading %s",full_path) _logger.error("_read_file reading %s",full_path)
return r return r

View File

@ -18,8 +18,9 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>. # along with this program. If not, see <http://www.gnu.org/licenses/>.
# #
############################################################################## ##############################################################################
import time
import logging import logging
import threading
import time
import psycopg2 import psycopg2
from datetime import datetime from datetime import datetime
from dateutil.relativedelta import relativedelta from dateutil.relativedelta import relativedelta
@ -188,6 +189,7 @@ class ir_cron(osv.osv):
If a job was processed, returns True, otherwise returns False. If a job was processed, returns True, otherwise returns False.
""" """
db = openerp.sql_db.db_connect(db_name) db = openerp.sql_db.db_connect(db_name)
threading.current_thread().dbname = db_name
cr = db.cursor() cr = db.cursor()
jobs = [] jobs = []
try: try:
@ -242,6 +244,9 @@ class ir_cron(osv.osv):
# we're exiting due to an exception while acquiring the lock # we're exiting due to an exception while acquiring the lock
lock_cr.close() lock_cr.close()
if hasattr(threading.current_thread(), 'dbname'): # cron job could have removed it as side-effect
del threading.current_thread().dbname
def _try_lock(self, cr, uid, ids, context=None): def _try_lock(self, cr, uid, ids, context=None):
"""Try to grab a dummy exclusive write-lock to the rows with the given ids, """Try to grab a dummy exclusive write-lock to the rows with the given ids,
to make sure a following write() or unlink() will not block due to make sure a following write() or unlink() will not block due

View File

@ -28,7 +28,7 @@ class ir_filters(osv.osv):
_description = 'Filters' _description = 'Filters'
def _list_all_models(self, cr, uid, context=None): def _list_all_models(self, cr, uid, context=None):
cr.execute("SELECT model, name from ir_model") cr.execute("SELECT model, name FROM ir_model ORDER BY name")
return cr.fetchall() return cr.fetchall()
def copy(self, cr, uid, id, default=None, context=None): def copy(self, cr, uid, id, default=None, context=None):

View File

@ -25,6 +25,7 @@ import time
import types import types
import openerp import openerp
import openerp.modules.registry
from openerp import SUPERUSER_ID from openerp import SUPERUSER_ID
from openerp import tools from openerp import tools
from openerp.osv import fields,osv from openerp.osv import fields,osv
@ -168,7 +169,9 @@ class ir_model(osv.osv):
if not context.get(MODULE_UNINSTALL_FLAG): if not context.get(MODULE_UNINSTALL_FLAG):
# only reload pool for normal unlink. For module uninstall the # only reload pool for normal unlink. For module uninstall the
# reload is done independently in openerp.modules.loading # reload is done independently in openerp.modules.loading
cr.commit() # must be committed before reloading registry in new cursor
openerp.modules.registry.RegistryManager.new(cr.dbname) openerp.modules.registry.RegistryManager.new(cr.dbname)
openerp.modules.registry.RegistryManager.signal_registry_change(cr.dbname)
return res return res
@ -194,6 +197,7 @@ class ir_model(osv.osv):
field_state='manual', field_state='manual',
select=vals.get('select_level', '0')) select=vals.get('select_level', '0'))
self.pool[vals['model']]._auto_init(cr, ctx) self.pool[vals['model']]._auto_init(cr, ctx)
openerp.modules.registry.RegistryManager.signal_registry_change(cr.dbname)
return res return res
def instanciate(self, cr, user, model, context=None): def instanciate(self, cr, user, model, context=None):
@ -259,7 +263,6 @@ class ir_model_fields(osv.osv):
'state': lambda self,cr,uid,ctx=None: (ctx and ctx.get('manual',False)) and 'manual' or 'base', 'state': lambda self,cr,uid,ctx=None: (ctx and ctx.get('manual',False)) and 'manual' or 'base',
'on_delete': 'set null', 'on_delete': 'set null',
'select_level': '0', 'select_level': '0',
'size': 64,
'field_description': '', 'field_description': '',
'selectable': 1, 'selectable': 1,
} }
@ -289,10 +292,10 @@ class ir_model_fields(osv.osv):
return True return True
def _size_gt_zero_msg(self, cr, user, ids, context=None): def _size_gt_zero_msg(self, cr, user, ids, context=None):
return _('Size of the field can never be less than 1 !') return _('Size of the field can never be less than 0 !')
_sql_constraints = [ _sql_constraints = [
('size_gt_zero', 'CHECK (size>0)',_size_gt_zero_msg ), ('size_gt_zero', 'CHECK (size>=0)',_size_gt_zero_msg ),
] ]
def _drop_column(self, cr, uid, ids, context=None): def _drop_column(self, cr, uid, ids, context=None):
@ -318,6 +321,9 @@ class ir_model_fields(osv.osv):
self._drop_column(cr, user, ids, context) self._drop_column(cr, user, ids, context)
res = super(ir_model_fields, self).unlink(cr, user, ids, context) res = super(ir_model_fields, self).unlink(cr, user, ids, context)
if not context.get(MODULE_UNINSTALL_FLAG):
cr.commit()
openerp.modules.registry.RegistryManager.signal_registry_change(cr.dbname)
return res return res
def create(self, cr, user, vals, context=None): def create(self, cr, user, vals, context=None):
@ -349,6 +355,7 @@ class ir_model_fields(osv.osv):
select=vals.get('select_level', '0'), select=vals.get('select_level', '0'),
update_custom_fields=True) update_custom_fields=True)
self.pool[vals['model']]._auto_init(cr, ctx) self.pool[vals['model']]._auto_init(cr, ctx)
openerp.modules.registry.RegistryManager.signal_registry_change(cr.dbname)
return res return res
@ -465,6 +472,7 @@ class ir_model_fields(osv.osv):
for col_name, col_prop, val in patch_struct[1]: for col_name, col_prop, val in patch_struct[1]:
setattr(obj._columns[col_name], col_prop, val) setattr(obj._columns[col_name], col_prop, val)
obj._auto_init(cr, ctx) obj._auto_init(cr, ctx)
openerp.modules.registry.RegistryManager.signal_registry_change(cr.dbname)
return res return res
class ir_model_constraint(Model): class ir_model_constraint(Model):

View File

@ -151,7 +151,7 @@
'readonly': [('ttype','not in', ['many2one','one2many','many2many'])]}"/> 'readonly': [('ttype','not in', ['many2one','one2many','many2many'])]}"/>
<field name="relation_field" attrs="{'required': [('ttype','=','one2many')], 'readonly': [('ttype','!=','one2many')]}"/> <field name="relation_field" attrs="{'required': [('ttype','=','one2many')], 'readonly': [('ttype','!=','one2many')]}"/>
<field name="selection" attrs="{'required': [('ttype','in',['selection','reference'])], 'readonly': [('ttype','not in',['selection','reference'])]}"/> <field name="selection" attrs="{'required': [('ttype','in',['selection','reference'])], 'readonly': [('ttype','not in',['selection','reference'])]}"/>
<field name="size" attrs="{'required': [('ttype','in',['char','reference'])], 'readonly': [('ttype','not in',['char','reference'])]}"/> <field name="size" attrs="{'invisible': [('ttype','not in',['char','text','reference'])]}"/>
<field name="domain" attrs="{'readonly': [('relation','=','')]}"/> <field name="domain" attrs="{'readonly': [('relation','=','')]}"/>
<field name="serialization_field_id" attrs="{'readonly': [('state','=','base')]}" domain="[('ttype','=','serialized'), ('model_id', '=', model_id)]"/> <field name="serialization_field_id" attrs="{'readonly': [('state','=','base')]}" domain="[('ttype','=','serialized'), ('model_id', '=', model_id)]"/>
<field name="on_delete" attrs="{'readonly': [('ttype','!=','many2one')]}"/> <field name="on_delete" attrs="{'readonly': [('ttype','!=','many2one')]}"/>

View File

@ -83,7 +83,8 @@ class view(osv.osv):
} }
_defaults = { _defaults = {
'arch': '<?xml version="1.0"?>\n<tree string="My view">\n\t<field name="name"/>\n</tree>', 'arch': '<?xml version="1.0"?>\n<tree string="My view">\n\t<field name="name"/>\n</tree>',
'priority': 16 'priority': 16,
'type': 'tree',
} }
_order = "priority,name" _order = "priority,name"

View File

@ -411,7 +411,6 @@ class module(osv.osv):
if to_install_ids: if to_install_ids:
self.button_install(cr, uid, to_install_ids, context=context) self.button_install(cr, uid, to_install_ids, context=context)
openerp.modules.registry.RegistryManager.signal_registry_change(cr.dbname)
return dict(ACTION_DICT, name=_('Install')) return dict(ACTION_DICT, name=_('Install'))
def button_immediate_install(self, cr, uid, ids, context=None): def button_immediate_install(self, cr, uid, ids, context=None):
@ -500,7 +499,6 @@ class module(osv.osv):
raise orm.except_orm(_('Error'), _("The `base` module cannot be uninstalled")) raise orm.except_orm(_('Error'), _("The `base` module cannot be uninstalled"))
dep_ids = self.downstream_dependencies(cr, uid, ids, context=context) dep_ids = self.downstream_dependencies(cr, uid, ids, context=context)
self.write(cr, uid, ids + dep_ids, {'state': 'to remove'}) self.write(cr, uid, ids + dep_ids, {'state': 'to remove'})
openerp.modules.registry.RegistryManager.signal_registry_change(cr.dbname)
return dict(ACTION_DICT, name=_('Uninstall')) return dict(ACTION_DICT, name=_('Uninstall'))
def button_uninstall_cancel(self, cr, uid, ids, context=None): def button_uninstall_cancel(self, cr, uid, ids, context=None):

View File

@ -305,7 +305,7 @@ class res_company(osv.osv):
<frame id="first" x1="1.3cm" y1="3.0cm" height="%s" width="19.0cm"/> <frame id="first" x1="1.3cm" y1="3.0cm" height="%s" width="19.0cm"/>
<stylesheet> <stylesheet>
<paraStyle name="main_footer" fontName="DejaVu Sans" fontSize="8.0" alignment="CENTER"/> <paraStyle name="main_footer" fontName="DejaVu Sans" fontSize="8.0" alignment="CENTER"/>
<paraStyle name="main_header" fontName="DejaVu Sans" fontSize="8.0" leading="10" alignment="LEFT" spaceBefore="0.0" spaceAfter="0.0"/> <paraStyle name="main_header" fontName="DejaVu Sans" fontSize="8.0" leading="10" alignment="LEFT" spaceBefore="0.0" spaceAfter="0.0"/>
</stylesheet> </stylesheet>
<pageGraphics> <pageGraphics>
<!-- You Logo - Change X,Y,Width and Height --> <!-- You Logo - Change X,Y,Width and Height -->
@ -344,8 +344,8 @@ class res_company(osv.osv):
</pageTemplate> </pageTemplate>
</header>""" </header>"""
_header_a4 = _header_main % ('23.0cm', '27.6cm', '27.7cm', '27.7cm', '27.8cm', '27.3cm', '25.3cm', '25.0cm', '25.0cm', '24.6cm', '24.6cm', '24.5cm', '24.5cm') _header_a4 = _header_main % ('21.7cm', '27.7cm', '27.7cm', '27.7cm', '27.8cm', '27.3cm', '25.3cm', '25.0cm', '25.0cm', '24.6cm', '24.6cm', '24.5cm', '24.5cm')
_header_letter = _header_main % ('21.3cm', '25.9cm', '26.0cm', '26.0cm', '26.1cm', '25.6cm', '23.6cm', '23.3cm', '23.3cm', '22.9cm', '22.9cm', '22.8cm', '22.8cm') _header_letter = _header_main % ('20cm', '26.0cm', '26.0cm', '26.0cm', '26.1cm', '25.6cm', '23.6cm', '23.3cm', '23.3cm', '22.9cm', '22.9cm', '22.8cm', '22.8cm')
def onchange_paper_format(self, cr, uid, ids, paper_format, context=None): def onchange_paper_format(self, cr, uid, ids, paper_format, context=None):
if paper_format == 'us_letter': if paper_format == 'us_letter':

View File

@ -49,7 +49,7 @@ class res_currency(osv.osv):
id, rate = cr.fetchall()[0] id, rate = cr.fetchall()[0]
res[id] = rate res[id] = rate
else: else:
res[id] = 0 raise osv.except_osv(_('Error!'),_("No currency rate associated for currency %d for the given period" % (id)))
return res return res
_name = "res.currency" _name = "res.currency"
_description = "Currency" _description = "Currency"

View File

@ -375,16 +375,30 @@ class res_partner(osv.osv, format_address):
def create(self, cr, uid, vals, context=None): def create(self, cr, uid, vals, context=None):
if context is None: if context is None:
context={} context = {}
# Update parent and siblings records # Update parent and siblings records
if vals.get('parent_id') and vals.get('use_parent_address'): if vals.get('parent_id'):
domain_siblings = [('parent_id', '=', vals['parent_id']), ('use_parent_address', '=', True)] if 'use_parent_address' in vals:
update_ids = [vals['parent_id']] + self.search(cr, uid, domain_siblings, context=context) use_parent_address = vals['use_parent_address']
self.update_address(cr, uid, update_ids, vals, context) else:
return super(res_partner,self).create(cr, uid, vals, context=context) use_parent_address = self.default_get(cr, uid, ['use_parent_address'], context=context)['use_parent_address']
if use_parent_address:
domain_siblings = [('parent_id', '=', vals['parent_id']), ('use_parent_address', '=', True)]
update_ids = [vals['parent_id']] + self.search(cr, uid, domain_siblings, context=context)
self.update_address(cr, uid, update_ids, vals, context)
# add missing address keys
onchange_values = self.onchange_address(cr, uid, [], use_parent_address,
vals['parent_id'], context=context).get('value') or {}
vals.update(dict((key, value)
for key, value in onchange_values.iteritems()
if key in ADDRESS_FIELDS and key not in vals))
return super(res_partner, self).create(cr, uid, vals, context=context)
def update_address(self, cr, uid, ids, vals, context=None): def update_address(self, cr, uid, ids, vals, context=None):
addr_vals = dict((key, vals[key]) for key in POSTAL_ADDRESS_FIELDS if vals.get(key)) addr_vals = dict((key, vals[key]) for key in POSTAL_ADDRESS_FIELDS if key in vals)
if addr_vals: if addr_vals:
return super(res_partner, self).write(cr, uid, ids, addr_vals, context) return super(res_partner, self).write(cr, uid, ids, addr_vals, context)
@ -411,10 +425,10 @@ class res_partner(osv.osv, format_address):
""" Supported syntax: """ Supported syntax:
- 'Raoul <raoul@grosbedon.fr>': will find name and email address - 'Raoul <raoul@grosbedon.fr>': will find name and email address
- otherwise: default, everything is set as the name """ - otherwise: default, everything is set as the name """
match = re.search(r'([^\s,<@]+@[^>\s,]+)', text) emails = tools.email_split(text)
if match: if emails:
email = match.group(1) email = emails[0]
name = text[:text.index(email)].replace('"','').replace('<','').strip() name = text[:text.index(email)].replace('"', '').replace('<', '').strip()
else: else:
name, email = text, '' name, email = text, ''
return name, email return name, email
@ -457,8 +471,7 @@ class res_partner(osv.osv, format_address):
OR partner.name || ' (' || COALESCE(company.name,'') || ')' OR partner.name || ' (' || COALESCE(company.name,'') || ')'
''' + operator + ' %(name)s ' + limit_str, query_args) ''' + operator + ' %(name)s ' + limit_str, query_args)
ids = map(lambda x: x[0], cr.fetchall()) ids = map(lambda x: x[0], cr.fetchall())
if args: ids = self.search(cr, uid, [('id', 'in', ids)] + args, limit=limit, context=context)
ids = self.search(cr, uid, [('id', 'in', ids)] + args, limit=limit, context=context)
if ids: if ids:
return self.name_get(cr, uid, ids, context) return self.name_get(cr, uid, ids, context)
return super(res_partner,self).name_search(cr, uid, name, args, operator=operator, context=context, limit=limit) return super(res_partner,self).name_search(cr, uid, name, args, operator=operator, context=context, limit=limit)

View File

@ -305,15 +305,15 @@
filter_domain="['|','|',('name','ilike',self),('parent_id','ilike',self),('ref','=',self)]"/> filter_domain="['|','|',('name','ilike',self),('parent_id','ilike',self),('ref','=',self)]"/>
<filter help="My Partners" icon="terp-personal+" domain="[('user_id','=',uid)]"/> <filter help="My Partners" icon="terp-personal+" domain="[('user_id','=',uid)]"/>
<separator/> <separator/>
<filter string="Persons" name="type_person" icon="terp-personal" domain="[('is_company','=',0)]"/> <filter string="Persons" name="type_person" domain="[('is_company','=',0)]"/>
<filter string="Companies" name="type_company" icon="terp-partner" domain="[('is_company','=',1)]"/> <filter string="Companies" name="type_company" domain="[('is_company','=',1)]"/>
<separator/> <separator/>
<filter string="Customers" name="customer" icon="terp-personal" domain="[('customer','=',1)]" help="Customer Partners"/> <filter string="Customers" name="customer" domain="[('customer','=',1)]" help="Customer Partners"/>
<filter string="Suppliers" name="supplier" domain="[('supplier','=',1)]" help="Supplier Partners"/>
<separator/> <separator/>
<filter string="Suppliers" name="supplier" icon="terp-personal" domain="[('supplier','=',1)]" help="Supplier Partners"/>
<field name="category_id" string="Tag" filter_domain="[('category_id','ilike', self)]"/> <field name="category_id" string="Tag" filter_domain="[('category_id','ilike', self)]"/>
<field name="user_id"/> <field name="user_id"/>
<field name="parent_id" filter_domain="[('parent_id','child_of',[self])]"/> <field name="parent_id" domain="[('is_company','=',1)]" filter_domain="[('parent_id','child_of',[self])]"/>
<group expand="0" string="Group By..."> <group expand="0" string="Group By...">
<filter string="Salesperson" icon="terp-personal" domain="[]" context="{'group_by' : 'user_id'}" /> <filter string="Salesperson" icon="terp-personal" domain="[]" context="{'group_by' : 'user_id'}" />
<filter string="Company" context="{'group_by': 'parent_id'}"/> <filter string="Company" context="{'group_by': 'parent_id'}"/>

View File

@ -3,7 +3,7 @@
# #
# OpenERP, Open Source Management Solution # OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
# Copyright (C) 2010-2012 OpenERP s.a. (<http://openerp.com>). # Copyright (C) 2010-2013 OpenERP s.a. (<http://openerp.com>).
# #
# This program is free software: you can redistribute it and/or modify # This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as # it under the terms of the GNU Affero General Public License as
@ -172,6 +172,10 @@ class res_users(osv.osv):
} }
} }
def onchange_state(self, cr, uid, ids, state_id, context=None):
partner_ids = [user.partner_id.id for user in self.browse(cr, uid, ids, context=context)]
return self.pool.get('res.partner').onchange_state(cr, uid, partner_ids, state_id, context=context)
def onchange_type(self, cr, uid, ids, is_company, context=None): def onchange_type(self, cr, uid, ids, is_company, context=None):
""" Wrapper on the user.partner onchange_type, because some calls to the """ Wrapper on the user.partner onchange_type, because some calls to the
partner form view applied to the user may trigger the partner form view applied to the user may trigger the
@ -426,7 +430,9 @@ class res_users(osv.osv):
cr = self.pool.db.cursor() cr = self.pool.db.cursor()
try: try:
base = user_agent_env['base_location'] base = user_agent_env['base_location']
self.pool['ir.config_parameter'].set_param(cr, uid, 'web.base.url', base) ICP = self.pool['ir.config_parameter']
if not ICP.get_param(cr, uid, 'web.base.url.freeze'):
ICP.set_param(cr, uid, 'web.base.url', base)
cr.commit() cr.commit()
except Exception: except Exception:
_logger.exception("Failed to update web.base.url configuration parameter") _logger.exception("Failed to update web.base.url configuration parameter")

View File

@ -56,7 +56,8 @@ openerp.base = function(instance) {
}); });
}; };
i.src = _.str.sprintf('%s/web/static/src/img/sep-a.gif', client.origin); var ts = new Date().getTime();
i.src = _.str.sprintf('%s/web/static/src/img/sep-a.gif?%s', client.origin, ts);
return d.promise(); return d.promise();
}; };
if (instance.base.apps_client) { if (instance.base.apps_client) {
@ -96,7 +97,7 @@ openerp.base = function(instance) {
client.replace(self.$el). client.replace(self.$el).
done(function() { done(function() {
client.$el.removeClass('openerp'); client.$el.removeClass('openerp');
client.do_action(self.remote_action_id); client.do_action(self.remote_action_id, {hide_breadcrumb: true});
}); });
}). }).
fail(function(client) { fail(function(client) {

View File

@ -114,6 +114,12 @@ class test_expression(common.TransactionCase):
# Test2: inheritance + relational fields # Test2: inheritance + relational fields
user_ids = users_obj.search(cr, uid, [('child_ids.name', 'like', 'test_B')]) user_ids = users_obj.search(cr, uid, [('child_ids.name', 'like', 'test_B')])
self.assertEqual(set(user_ids), set([b1]), 'searching through inheritance failed') self.assertEqual(set(user_ids), set([b1]), 'searching through inheritance failed')
# Special =? operator mean "is equal if right is set, otherwise always True"
user_ids = users_obj.search(cr, uid, [('name', 'like', 'test'), ('parent_id', '=?', False)])
self.assertEqual(set(user_ids), set([a, b1, b2]), '(x =? False) failed')
user_ids = users_obj.search(cr, uid, [('name', 'like', 'test'), ('parent_id', '=?', b1_user.partner_id.id)])
self.assertEqual(set(user_ids), set([b2]), '(x =? id) failed')
def test_20_auto_join(self): def test_20_auto_join(self):
registry, cr, uid = self.registry, self.cr, self.uid registry, cr, uid = self.registry, self.cr, self.uid

View File

@ -220,15 +220,7 @@ def quit_on_signals():
os.unlink(config['pidfile']) os.unlink(config['pidfile'])
sys.exit(0) sys.exit(0)
def configure_babel_localedata_path():
# Workaround: py2exe and babel.
if hasattr(sys, 'frozen'):
import babel
babel.localedata._dirname = os.path.join(os.path.dirname(sys.executable), 'localedata')
def main(args): def main(args):
os.environ["TZ"] = "UTC"
check_root_user() check_root_user()
openerp.tools.config.parse_config(args) openerp.tools.config.parse_config(args)
@ -246,8 +238,6 @@ def main(args):
config = openerp.tools.config config = openerp.tools.config
configure_babel_localedata_path()
setup_signal_handlers(signal_handler) setup_signal_handlers(signal_handler)
if config["test_file"]: if config["test_file"]:

View File

@ -34,6 +34,7 @@ import openerp
import openerp.modules.db import openerp.modules.db
import openerp.modules.graph import openerp.modules.graph
import openerp.modules.migration import openerp.modules.migration
import openerp.modules.registry
import openerp.osv as osv import openerp.osv as osv
import openerp.tools as tools import openerp.tools as tools
from openerp import SUPERUSER_ID from openerp import SUPERUSER_ID
@ -131,7 +132,7 @@ def load_module_graph(cr, graph, status=None, perform_checks=True, skip_modules=
loaded_modules = [] loaded_modules = []
registry = openerp.registry(cr.dbname) registry = openerp.registry(cr.dbname)
migrations = openerp.modules.migration.MigrationManager(cr, graph) migrations = openerp.modules.migration.MigrationManager(cr, graph)
_logger.debug('loading %d packages...', len(graph)) _logger.info('loading %d modules...', len(graph))
# Query manual fields for all models at once and save them on the registry # Query manual fields for all models at once and save them on the registry
# so the initialization code for each model does not have to do it # so the initialization code for each model does not have to do it
@ -149,7 +150,7 @@ def load_module_graph(cr, graph, status=None, perform_checks=True, skip_modules=
if skip_modules and module_name in skip_modules: if skip_modules and module_name in skip_modules:
continue continue
_logger.info('module %s: loading objects', package.name) _logger.debug('module %s: loading objects', package.name)
migrations.migrate_module(package, 'pre') migrations.migrate_module(package, 'pre')
load_openerp_module(package.name) load_openerp_module(package.name)

View File

@ -190,6 +190,10 @@ class RegistryManager(object):
except KeyError: except KeyError:
return cls.new(db_name, force_demo, status, return cls.new(db_name, force_demo, status,
update_module) update_module)
finally:
# set db tracker - cleaned up at the WSGI
# dispatching phase in openerp.service.wsgi_server.application
threading.current_thread().dbname = db_name
@classmethod @classmethod
def new(cls, db_name, force_demo=False, status=None, def new(cls, db_name, force_demo=False, status=None,
@ -231,6 +235,9 @@ class RegistryManager(object):
registry.ready = True registry.ready = True
if update_module:
# only in case of update, otherwise we'll have an infinite reload loop!
cls.signal_registry_change(db_name)
return registry return registry
@classmethod @classmethod

View File

@ -198,7 +198,7 @@ def normalize_domain(domain):
expected -= 1 expected -= 1
else: else:
expected += op_arity.get(token, 0) - 1 expected += op_arity.get(token, 0) - 1
assert expected == 0 assert expected == 0, 'This domain is syntactically not correct: %s' % (domain)
return result return result
@ -597,6 +597,15 @@ class ExtendedLeaf(object):
self.leaf = normalize_leaf(self.leaf) self.leaf = normalize_leaf(self.leaf)
return True return True
def create_substitution_leaf(leaf, new_elements, new_model=None):
""" From a leaf, create a new leaf (based on the new_elements tuple
and new_model), that will have the same join context. Used to
insert equivalent leafs in the processing stack. """
if new_model is None:
new_model = leaf.model
new_join_context = [tuple(context) for context in leaf.join_context]
new_leaf = ExtendedLeaf(new_elements, new_model, join_context=new_join_context)
return new_leaf
class expression(object): class expression(object):
""" Parse a domain expression """ Parse a domain expression
@ -714,16 +723,6 @@ class expression(object):
return ids + recursive_children(ids2, model, parent_field) return ids + recursive_children(ids2, model, parent_field)
return [(left, 'in', recursive_children(ids, left_model, parent or left_model._parent_name))] return [(left, 'in', recursive_children(ids, left_model, parent or left_model._parent_name))]
def create_substitution_leaf(leaf, new_elements, new_model=None):
""" From a leaf, create a new leaf (based on the new_elements tuple
and new_model), that will have the same join context. Used to
insert equivalent leafs in the processing stack. """
if new_model is None:
new_model = leaf.model
new_join_context = [tuple(context) for context in leaf.join_context]
new_leaf = ExtendedLeaf(new_elements, new_model, join_context=new_join_context)
return new_leaf
def pop(): def pop():
""" Pop a leaf to process. """ """ Pop a leaf to process. """
return self.stack.pop() return self.stack.pop()
@ -1152,7 +1151,8 @@ class expression(object):
params = [] params = []
else: else:
# '=?' behaves like '=' in other cases # '=?' behaves like '=' in other cases
query, params = self.__leaf_to_sql((left, '=', right), model) query, params = self.__leaf_to_sql(
create_substitution_leaf(eleaf, (left, '=', right), model))
elif left == 'id': elif left == 'id':
query = '%s.id %s %%s' % (table_alias, operator) query = '%s.id %s %%s' % (table_alias, operator)

View File

@ -1028,7 +1028,7 @@ class BaseModel(object):
'required': bool(field['required']), 'required': bool(field['required']),
'readonly': bool(field['readonly']), 'readonly': bool(field['readonly']),
'domain': eval(field['domain']) if field['domain'] else None, 'domain': eval(field['domain']) if field['domain'] else None,
'size': field['size'], 'size': field['size'] or None,
'ondelete': field['on_delete'], 'ondelete': field['on_delete'],
'translate': (field['translate']), 'translate': (field['translate']),
'manual': True, 'manual': True,
@ -4451,7 +4451,6 @@ class BaseModel(object):
upd1 += ",%s,(now() at time zone 'UTC'),%s,(now() at time zone 'UTC')" upd1 += ",%s,(now() at time zone 'UTC'),%s,(now() at time zone 'UTC')"
upd2.extend((user, user)) upd2.extend((user, user))
cr.execute('insert into "'+self._table+'" (id'+upd0+") values ("+str(id_new)+upd1+')', tuple(upd2)) cr.execute('insert into "'+self._table+'" (id'+upd0+") values ("+str(id_new)+upd1+')', tuple(upd2))
self.check_access_rule(cr, user, [id_new], 'create', context=context)
upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority) upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)
if self._parent_store and not context.get('defer_parent_store_computation'): if self._parent_store and not context.get('defer_parent_store_computation'):
@ -4504,6 +4503,7 @@ class BaseModel(object):
self.name_get(cr, user, [id_new], context=context)[0][1] + \ self.name_get(cr, user, [id_new], context=context)[0][1] + \
"' " + _("created.") "' " + _("created.")
self.log(cr, user, id_new, message, True, context=context) self.log(cr, user, id_new, message, True, context=context)
self.check_access_rule(cr, user, [id_new], 'create', context=context)
self.create_workflow(cr, user, [id_new], context=context) self.create_workflow(cr, user, [id_new], context=context)
return id_new return id_new

View File

@ -49,6 +49,19 @@ _logger = logging.getLogger(__name__)
encoding = 'utf-8' encoding = 'utf-8'
def select_fontname(fontname, default_fontname):
if fontname not in pdfmetrics.getRegisteredFontNames()\
or fontname not in pdfmetrics.standardFonts:
# let reportlab attempt to find it
try:
pdfmetrics.getFont(fontname)
except Exception:
_logger.warning('Could not locate font %s, substituting default: %s',
fontname, default_fontname)
fontname = default_fontname
return fontname
def _open_image(filename, path=None): def _open_image(filename, path=None):
"""Attempt to open a binary file and return the descriptor """Attempt to open a binary file and return the descriptor
""" """
@ -159,7 +172,12 @@ class _rml_styles(object,):
for attr in ['textColor', 'backColor', 'bulletColor', 'borderColor']: for attr in ['textColor', 'backColor', 'bulletColor', 'borderColor']:
if node.get(attr): if node.get(attr):
data[attr] = color.get(node.get(attr)) data[attr] = color.get(node.get(attr))
for attr in ['fontName', 'bulletFontName', 'bulletText']: for attr in ['bulletFontName', 'fontName']:
if node.get(attr):
fontname= select_fontname(node.get(attr), None)
if fontname is not None:
data['fontName'] = fontname
for attr in ['bulletText']:
if node.get(attr): if node.get(attr):
data[attr] = node.get(attr) data[attr] = node.get(attr)
for attr in ['fontSize', 'leftIndent', 'rightIndent', 'spaceBefore', 'spaceAfter', for attr in ['fontSize', 'leftIndent', 'rightIndent', 'spaceBefore', 'spaceAfter',
@ -537,17 +555,7 @@ class _rml_canvas(object):
self.canvas.drawPath(self.path, **utils.attr_get(node, [], {'fill':'bool','stroke':'bool'})) self.canvas.drawPath(self.path, **utils.attr_get(node, [], {'fill':'bool','stroke':'bool'}))
def setFont(self, node): def setFont(self, node):
fontname = node.get('name') fontname = select_fontname(node.get('name'), self.canvas._fontname)
if fontname not in pdfmetrics.getRegisteredFontNames()\
or fontname not in pdfmetrics.standardFonts:
# let reportlab attempt to find it
try:
pdfmetrics.getFont(fontname)
except Exception:
_logger.debug('Could not locate font %s, substituting default: %s',
fontname,
self.canvas._fontname)
fontname = self.canvas._fontname
return self.canvas.setFont(fontname, utils.unit_get(node.get('size'))) return self.canvas.setFont(fontname, utils.unit_get(node.get('size')))
def render(self, node): def render(self, node):

View File

@ -30,6 +30,7 @@ cron jobs, for all databases of a single OpenERP server instance.
import logging import logging
import threading import threading
import time import time
from datetime import datetime
import openerp import openerp
@ -56,6 +57,12 @@ def start_service():
threads it spawns are not marked daemon). threads it spawns are not marked daemon).
""" """
# Force call to strptime just before starting the cron thread
# to prevent time.strptime AttributeError within the thread.
# See: http://bugs.python.org/issue7980
datetime.strptime('2012-01-01', '%Y-%m-%d')
for i in range(openerp.tools.config['max_cron_threads']): for i in range(openerp.tools.config['max_cron_threads']):
def target(): def target():
cron_runner(i) cron_runner(i)

View File

@ -197,18 +197,26 @@ def exp_drop(db_name):
return True return True
@contextlib.contextmanager @contextlib.contextmanager
def _set_pg_password_in_environment(): def _set_pg_password_in_environment(self):
""" On Win32, pg_dump (and pg_restore) require that """ On systems where pg_restore/pg_dump require an explicit
:envvar:`PGPASSWORD` be set password (i.e. when not connecting via unix sockets, and most
importantly on Windows), it is necessary to pass the PG user
password in the environment or in a special .pgpass file.
This context management method handles setting This context management method handles setting
:envvar:`PGPASSWORD` iif win32 and the envvar is not already :envvar:`PGPASSWORD` if it is not already
set, and removing it afterwards. set, and removing it afterwards.
See also http://www.postgresql.org/docs/8.4/static/libpq-envars.html
.. note:: This is not thread-safe, and should never be enabled for
SaaS (giving SaaS users the super-admin password is not a good idea
anyway)
""" """
if os.name != 'nt' or os.environ.get('PGPASSWORD'): if os.environ.get('PGPASSWORD') or not tools.config['db_password']:
yield yield
else: else:
os.environ['PGPASSWORD'] = openerp.tools.config['db_password'] os.environ['PGPASSWORD'] = tools.config['db_password']
try: try:
yield yield
finally: finally:
@ -234,7 +242,7 @@ def exp_dump(db_name):
if not data or res: if not data or res:
_logger.error( _logger.error(
'DUMP DB: %s failed! Please verify the configuration of the database password on the server. ' 'DUMP DB: %s failed! Please verify the configuration of the database password on the server. '
'It should be provided as a -w <PASSWD> command-line option, or as `db_password` in the ' 'You may need to create a .pgpass file for authentication, or specify `db_password` in the '
'server configuration file.\n %s', db_name, data) 'server configuration file.\n %s', db_name, data)
raise Exception, "Couldn't dump database" raise Exception, "Couldn't dump database"
_logger.info('DUMP DB successful: %s', db_name) _logger.info('DUMP DB successful: %s', db_name)

View File

@ -3,7 +3,9 @@
from functools import wraps from functools import wraps
import logging import logging
from psycopg2 import IntegrityError, errorcodes from psycopg2 import IntegrityError, errorcodes
import random
import threading import threading
import time
import openerp import openerp
from openerp.tools.translate import translate from openerp.tools.translate import translate
@ -13,9 +15,16 @@ import security
_logger = logging.getLogger(__name__) _logger = logging.getLogger(__name__)
PG_CONCURRENCY_ERRORS_TO_RETRY = (errorcodes.LOCK_NOT_AVAILABLE, errorcodes.SERIALIZATION_FAILURE, errorcodes.DEADLOCK_DETECTED)
MAX_TRIES_ON_CONCURRENCY_FAILURE = 5
def dispatch(method, params): def dispatch(method, params):
(db, uid, passwd ) = params[0:3] (db, uid, passwd ) = params[0:3]
# set uid tracker - cleaned up at the WSGI
# dispatching phase in openerp.service.wsgi_server.application
threading.current_thread().uid = uid threading.current_thread().uid = uid
params = params[3:] params = params[3:]
if method == 'obj_list': if method == 'obj_list':
raise NameError("obj_list has been discontinued via RPC as of 6.0, please query ir.model directly!") raise NameError("obj_list has been discontinued via RPC as of 6.0, please query ir.model directly!")
@ -94,37 +103,50 @@ def check(f):
def _(src): def _(src):
return tr(src, 'code') return tr(src, 'code')
try: tries = 0
if openerp.registry(dbname)._init: while True:
raise openerp.exceptions.Warning('Currently, this database is not fully loaded and can not be used.') try:
return f(dbname, *args, **kwargs) if openerp.registry(dbname)._init:
except IntegrityError, inst: raise openerp.exceptions.Warning('Currently, this database is not fully loaded and can not be used.')
registry = openerp.registry(dbname) return f(dbname, *args, **kwargs)
for key in registry._sql_error.keys(): except OperationalError, e:
if key in inst[0]: # Automatically retry the typical transaction serialization errors
raise openerp.osv.orm.except_orm(_('Constraint Error'), tr(registry._sql_error[key], 'sql_constraint') or inst[0]) if e.pgcode not in PG_CONCURRENCY_ERRORS_TO_RETRY:
if inst.pgcode in (errorcodes.NOT_NULL_VIOLATION, errorcodes.FOREIGN_KEY_VIOLATION, errorcodes.RESTRICT_VIOLATION): raise
msg = _('The operation cannot be completed, probably due to the following:\n- deletion: you may be trying to delete a record while other records still reference it\n- creation/update: a mandatory field is not correctly set') if tries >= MAX_TRIES_ON_CONCURRENCY_FAILURE:
_logger.debug("IntegrityError", exc_info=True) _logger.warning("%s, maximum number of tries reached" % errorcodes.lookup(e.pgcode))
try: raise
errortxt = inst.pgerror.replace('«','"').replace('»','"') wait_time = random.uniform(0.0, 2 ** tries)
if '"public".' in errortxt: tries += 1
context = errortxt.split('"public".')[1] _logger.info("%s, retry %d/%d in %.04f sec..." % (errorcodes.lookup(e.pgcode), tries, MAX_TRIES_ON_CONCURRENCY_FAILURE, wait_time))
model_name = table = context.split('"')[1] time.sleep(wait_time)
else: except IntegrityError, inst:
last_quote_end = errortxt.rfind('"') registry = openerp.registry(dbname)
last_quote_begin = errortxt.rfind('"', 0, last_quote_end) for key in registry._sql_error.keys():
model_name = table = errortxt[last_quote_begin+1:last_quote_end].strip() if key in inst[0]:
model = table.replace("_",".") raise openerp.osv.orm.except_orm(_('Constraint Error'), tr(registry._sql_error[key], 'sql_constraint') or inst[0])
model_obj = registry.get(model) if inst.pgcode in (errorcodes.NOT_NULL_VIOLATION, errorcodes.FOREIGN_KEY_VIOLATION, errorcodes.RESTRICT_VIOLATION):
if model_obj: msg = _('The operation cannot be completed, probably due to the following:\n- deletion: you may be trying to delete a record while other records still reference it\n- creation/update: a mandatory field is not correctly set')
model_name = model_obj._description or model_obj._name _logger.debug("IntegrityError", exc_info=True)
msg += _('\n\n[object with reference: %s - %s]') % (model_name, model) try:
except Exception: errortxt = inst.pgerror.replace('«','"').replace('»','"')
pass if '"public".' in errortxt:
raise openerp.osv.orm.except_orm(_('Integrity Error'), msg) context = errortxt.split('"public".')[1]
else: model_name = table = context.split('"')[1]
raise openerp.osv.orm.except_orm(_('Integrity Error'), inst[0]) else:
last_quote_end = errortxt.rfind('"')
last_quote_begin = errortxt.rfind('"', 0, last_quote_end)
model_name = table = errortxt[last_quote_begin+1:last_quote_end].strip()
model = table.replace("_",".")
model_obj = registry.get(model)
if model_obj:
model_name = model_obj._description or model_obj._name
msg += _('\n\n[object with reference: %s - %s]') % (model_name, model)
except Exception:
pass
raise openerp.osv.orm.except_orm(_('Integrity Error'), msg)
else:
raise openerp.osv.orm.except_orm(_('Integrity Error'), inst[0])
return wrapper return wrapper

View File

@ -388,9 +388,19 @@ class WorkerBaseWSGIServer(werkzeug.serving.BaseWSGIServer):
class WorkerCron(Worker): class WorkerCron(Worker):
""" Cron workers """ """ Cron workers """
def __init__(self, multi):
super(WorkerCron, self).__init__(multi)
# process_work() below process a single database per call.
# The variable db_index is keeping track of the next database to
# process.
self.db_index = 0
def sleep(self): def sleep(self):
interval = 60 + self.pid % 10 # chorus effect # Really sleep once all the databases have been processed.
time.sleep(interval) if self.db_index == 0:
interval = 60 + self.pid % 10 # chorus effect
time.sleep(interval)
def process_work(self): def process_work(self):
rpc_request = logging.getLogger('openerp.netsvc.rpc.request') rpc_request = logging.getLogger('openerp.netsvc.rpc.request')
@ -400,7 +410,9 @@ class WorkerCron(Worker):
db_names = config['db_name'].split(',') db_names = config['db_name'].split(',')
else: else:
db_names = openerp.service.db.exp_list(True) db_names = openerp.service.db.exp_list(True)
for db_name in db_names: if len(db_names):
self.db_index = (self.db_index + 1) % len(db_names)
db_name = db_names[self.db_index]
if rpc_request_flag: if rpc_request_flag:
start_time = time.time() start_time = time.time()
start_rss, start_vms = psutil.Process(os.getpid()).get_memory_info() start_rss, start_vms = psutil.Process(os.getpid()).get_memory_info()
@ -419,8 +431,14 @@ class WorkerCron(Worker):
end_rss, end_vms = psutil.Process(os.getpid()).get_memory_info() end_rss, end_vms = psutil.Process(os.getpid()).get_memory_info()
logline = '%s time:%.3fs mem: %sk -> %sk (diff: %sk)' % (db_name, end_time - start_time, start_vms / 1024, end_vms / 1024, (end_vms - start_vms)/1024) logline = '%s time:%.3fs mem: %sk -> %sk (diff: %sk)' % (db_name, end_time - start_time, start_vms / 1024, end_vms / 1024, (end_vms - start_vms)/1024)
_logger.debug("WorkerCron (%s) %s", self.pid, logline) _logger.debug("WorkerCron (%s) %s", self.pid, logline)
# TODO Each job should be considered as one request instead of each run
self.request_count += 1 self.request_count += 1
if self.request_count >= self.request_max and self.request_max < len(db_names):
_logger.error("There are more dabatases to process than allowed "
"by the `limit_request` configuration variable: %s more.",
len(db_names) - self.request_max)
else:
self.db_index = 0
def start(self): def start(self):
Worker.start(self) Worker.start(self)

View File

@ -390,6 +390,16 @@ def register_rpc_endpoint(endpoint, handler):
def application_unproxied(environ, start_response): def application_unproxied(environ, start_response):
""" WSGI entry point.""" """ WSGI entry point."""
# cleanup db/uid trackers - they're set at HTTP dispatch in
# web.session.OpenERPSession.send() and at RPC dispatch in
# openerp.service.web_services.objects_proxy.dispatch().
# /!\ The cleanup cannot be done at the end of this `application`
# method because werkzeug still produces relevant logging afterwards
if hasattr(threading.current_thread(), 'uid'):
del threading.current_thread().uid
if hasattr(threading.current_thread(), 'dbname'):
del threading.current_thread().dbname
openerp.service.start_internal() openerp.service.start_internal()
# Try all handlers until one returns some result (i.e. not None). # Try all handlers until one returns some result (i.e. not None).
@ -401,7 +411,6 @@ def application_unproxied(environ, start_response):
continue continue
return result return result
# We never returned from the loop. # We never returned from the loop.
response = 'No handler found.\n' response = 'No handler found.\n'
start_response('404 Not Found', [('Content-Type', 'text/plain'), ('Content-Length', str(len(response)))]) start_response('404 Not Found', [('Content-Type', 'text/plain'), ('Content-Length', str(len(response)))])

View File

@ -3,7 +3,7 @@
# #
# OpenERP, Open Source Management Solution # OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
# Copyright (C) 2010-2011 OpenERP s.a. (<http://openerp.com>). # Copyright (C) 2010-2013 OpenERP s.a. (<http://openerp.com>).
# #
# This program is free software: you can redistribute it and/or modify # This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as # it under the terms of the GNU Affero General Public License as
@ -36,7 +36,6 @@ import psycopg2.extensions
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT, ISOLATION_LEVEL_READ_COMMITTED, ISOLATION_LEVEL_REPEATABLE_READ from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT, ISOLATION_LEVEL_READ_COMMITTED, ISOLATION_LEVEL_REPEATABLE_READ
from psycopg2.pool import PoolError from psycopg2.pool import PoolError
from psycopg2.psycopg1 import cursor as psycopg1cursor from psycopg2.psycopg1 import cursor as psycopg1cursor
from threading import currentThread
psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
@ -393,7 +392,7 @@ class ConnectionPool(object):
def borrow(self, dsn): def borrow(self, dsn):
self._debug('Borrow connection to %r', dsn) self._debug('Borrow connection to %r', dsn)
# free leaked connections # free dead and leaked connections
for i, (cnx, _) in tools.reverse_enumerate(self._connections): for i, (cnx, _) in tools.reverse_enumerate(self._connections):
if cnx.closed: if cnx.closed:
self._connections.pop(i) self._connections.pop(i)
@ -407,6 +406,14 @@ class ConnectionPool(object):
for i, (cnx, used) in enumerate(self._connections): for i, (cnx, used) in enumerate(self._connections):
if not used and dsn_are_equals(cnx.dsn, dsn): if not used and dsn_are_equals(cnx.dsn, dsn):
try:
cnx.reset()
except psycopg2.OperationalError:
self._debug('Cannot reset connection at index %d: %r', i, cnx.dsn)
# psycopg2 2.4.4 and earlier do not allow closing a closed connection
if not cnx.closed:
cnx.close()
continue
self._connections.pop(i) self._connections.pop(i)
self._connections.append((cnx, True)) self._connections.append((cnx, True))
self._debug('Existing connection found at index %d', i) self._debug('Existing connection found at index %d', i)
@ -507,7 +514,6 @@ def db_connect(db_name):
global _Pool global _Pool
if _Pool is None: if _Pool is None:
_Pool = ConnectionPool(int(tools.config['db_maxconn'])) _Pool = ConnectionPool(int(tools.config['db_maxconn']))
currentThread().dbname = db_name
return Connection(_Pool, db_name) return Connection(_Pool, db_name)
def close_db(db_name): def close_db(db_name):
@ -515,9 +521,6 @@ def close_db(db_name):
global _Pool global _Pool
if _Pool: if _Pool:
_Pool.close_all(dsn(db_name)) _Pool.close_all(dsn(db_name))
ct = currentThread()
if hasattr(ct, 'dbname'):
delattr(ct, 'dbname')
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:

View File

@ -83,6 +83,8 @@ def image_resize_image(base64_source, size=(1024, 1024), encoding='base64', file
if image.size != size: if image.size != size:
# If you need faster thumbnails you may use use Image.NEAREST # If you need faster thumbnails you may use use Image.NEAREST
image = ImageOps.fit(image, size, Image.ANTIALIAS) image = ImageOps.fit(image, size, Image.ANTIALIAS)
if image.mode not in ["1", "L", "P", "RGB", "RGBA"]:
image = image.convert("RGB")
background_stream = StringIO.StringIO() background_stream = StringIO.StringIO()
image.save(background_stream, filetype) image.save(background_stream, filetype)

View File

@ -50,7 +50,7 @@ def html_sanitize(src):
src = ustr(src, errors='replace') src = ustr(src, errors='replace')
# html encode email tags # html encode email tags
part = re.compile(r"(<[^<>]+@[^<>]+>)", re.IGNORECASE | re.DOTALL) part = re.compile(r"(<(([^a<>]|a[^<>\s])[^<>]*)@[^<>]+>)", re.IGNORECASE | re.DOTALL)
src = part.sub(lambda m: cgi.escape(m.group(1)), src) src = part.sub(lambda m: cgi.escape(m.group(1)), src)
# some corner cases make the parser crash (such as <SCRIPT/XSS SRC=\"http://ha.ckers.org/xss.js\"></SCRIPT> in test_mail) # some corner cases make the parser crash (such as <SCRIPT/XSS SRC=\"http://ha.ckers.org/xss.js\"></SCRIPT> in test_mail)
@ -185,6 +185,8 @@ def html2plaintext(html, body_id=None, encoding='utf-8'):
url_index.append(url) url_index.append(url)
html = ustr(etree.tostring(tree, encoding=encoding)) html = ustr(etree.tostring(tree, encoding=encoding))
# \r char is converted into &#13;, must remove it
html = html.replace('&#13;', '')
html = html.replace('<strong>', '*').replace('</strong>', '*') html = html.replace('<strong>', '*').replace('</strong>', '*')
html = html.replace('<b>', '*').replace('</b>', '*') html = html.replace('<b>', '*').replace('</b>', '*')

View File

@ -138,6 +138,7 @@ def file_open(name, mode="r", subdir='addons', pathinfo=False):
# Is it below 'addons_path' or 'root_path'? # Is it below 'addons_path' or 'root_path'?
name = os.path.normcase(os.path.normpath(name)) name = os.path.normcase(os.path.normpath(name))
for root in adps + [rtp]: for root in adps + [rtp]:
root = os.path.normcase(os.path.normpath(root)) + os.sep
if name.startswith(root): if name.startswith(root):
base = root.rstrip(os.sep) base = root.rstrip(os.sep)
name = name[len(base) + 1:] name = name[len(base) + 1:]

View File

@ -550,6 +550,8 @@ def trans_parse_view(de):
res.append(de.get('sum').encode("utf8")) res.append(de.get('sum').encode("utf8"))
if de.get("confirm"): if de.get("confirm"):
res.append(de.get('confirm').encode("utf8")) res.append(de.get('confirm').encode("utf8"))
if de.get("placeholder"):
res.append(de.get('placeholder').encode("utf8"))
for n in de: for n in de:
res.extend(trans_parse_view(n)) res.extend(trans_parse_view(n))
return res return res

View File

@ -25,7 +25,6 @@ def run(args):
openerp.cli.server.check_root_user() openerp.cli.server.check_root_user()
openerp.netsvc.init_logger() openerp.netsvc.init_logger()
#openerp.cli.server.report_configuration() #openerp.cli.server.report_configuration()
openerp.cli.server.configure_babel_localedata_path()
openerp.cli.server.setup_signal_handlers(openerp.cli.server.signal_handler) openerp.cli.server.setup_signal_handlers(openerp.cli.server.signal_handler)
import openerp.addons.base import openerp.addons.base
if args.database: if args.database:

View File

@ -49,7 +49,6 @@ def run(args):
openerp.cli.server.check_root_user() openerp.cli.server.check_root_user()
openerp.netsvc.init_logger() openerp.netsvc.init_logger()
#openerp.cli.server.report_configuration() #openerp.cli.server.report_configuration()
openerp.cli.server.configure_babel_localedata_path()
target = openerp.service.wsgi_server.serve target = openerp.service.wsgi_server.serve
if not args.gevent: if not args.gevent:

View File

@ -291,9 +291,10 @@ Function .onInit
!insertmacro MUI_LANGDLL_DISPLAY !insertmacro MUI_LANGDLL_DISPLAY
ClearErrors ClearErrors
EnumRegKey $0 HKLM "SOFTWARE\PostgreSQL" 0 EnumRegKey $0 HKLM "SOFTWARE\PostgreSQL\Installations" 0
IfErrors DoInstallPostgreSQL 0 IfErrors DoInstallPostgreSQL 0
StrCpy $HasPostgreSQL 1 StrCmp $0 "" DoInstallPostgreSQL
StrCpy $HasPostgreSQL 1
DoInstallPostgreSQL: DoInstallPostgreSQL:
FunctionEnd FunctionEnd

View File

@ -35,7 +35,15 @@ def data():
r["Microsoft.VC90.CRT"] = glob.glob('C:\Microsoft.VC90.CRT\*.*') r["Microsoft.VC90.CRT"] = glob.glob('C:\Microsoft.VC90.CRT\*.*')
import babel import babel
r["localedata"] = glob.glob(os.path.join(os.path.dirname(babel.__file__), "localedata", '*')) # Add data, but also some .py files py2exe won't include automatically.
# TODO This should probably go under `packages`, instead of `data`,
# but this will work fine (especially since we don't use the ZIP file
# approach).
r["babel/localedata"] = glob.glob(os.path.join(os.path.dirname(babel.__file__), "localedata", '*'))
others = ['global.dat', 'numbers.py', 'support.py']
r["babel"] = map(lambda f: os.path.join(os.path.dirname(babel.__file__), f), others)
others = ['frontend.py', 'mofile.py']
r["babel/messages"] = map(lambda f: os.path.join(os.path.dirname(babel.__file__), "messages", f), others)
import pytz import pytz
tzdir = os.path.dirname(pytz.__file__) tzdir = os.path.dirname(pytz.__file__)
@ -66,7 +74,7 @@ def py2exe_options():
'options' : { 'options' : {
"py2exe": { "py2exe": {
"skip_archive": 1, "skip_archive": 1,
"optimize": 2, "optimize": 0, # keep the assert running, because the integrated tests rely on them.
"dist_dir": 'dist', "dist_dir": 'dist',
"packages": [ "DAV", "HTMLParser", "PIL", "asynchat", "asyncore", "commands", "dateutil", "decimal", "docutils", "email", "encodings", "imaplib", "jinja2", "lxml", "lxml._elementpath", "lxml.builder", "lxml.etree", "lxml.objectify", "mako", "openerp", "poplib", "pychart", "pydot", "pyparsing", "pytz", "reportlab", "select", "simplejson", "smtplib", "uuid", "vatnumber", "vobject", "xml", "xml.dom", "yaml", ], "packages": [ "DAV", "HTMLParser", "PIL", "asynchat", "asyncore", "commands", "dateutil", "decimal", "docutils", "email", "encodings", "imaplib", "jinja2", "lxml", "lxml._elementpath", "lxml.builder", "lxml.etree", "lxml.objectify", "mako", "openerp", "poplib", "pychart", "pydot", "pyparsing", "pytz", "reportlab", "select", "simplejson", "smtplib", "uuid", "vatnumber", "vobject", "xml", "xml.dom", "yaml", ],
"excludes" : ["Tkconstants","Tkinter","tcl"], "excludes" : ["Tkconstants","Tkinter","tcl"],
@ -118,7 +126,7 @@ setuptools.setup(
'mock', 'mock',
'PIL', # windows binary http://www.lfd.uci.edu/~gohlke/pythonlibs/ 'PIL', # windows binary http://www.lfd.uci.edu/~gohlke/pythonlibs/
'psutil', # windows binary code.google.com/p/psutil/downloads/list 'psutil', # windows binary code.google.com/p/psutil/downloads/list
'psycopg2', 'psycopg2 >= 2.2',
'pydot', 'pydot',
'pyparsing < 2', 'pyparsing < 2',
'python-dateutil < 2', 'python-dateutil < 2',