niv-openerp 2012-12-06 11:18:00 +01:00
commit 36cc0540cb
90 changed files with 24490 additions and 21996 deletions

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -502,6 +502,19 @@
<menuitem id="menu_ir_actions_todo" name="Configuration Wizards" parent="menu_custom" sequence="20" groups="base.group_no_one"/>
<menuitem id="menu_ir_actions_todo_form" action="act_ir_actions_todo_form" parent="menu_ir_actions_todo"/>
<record id="action_run_ir_action_todo" model="ir.actions.server">
<field name="name">Run Remaining Action Todo</field>
<field name="condition">True</field>
<field name="type">ir.actions.server</field>
<field name="model_id" ref="model_res_config"/>
<field name="state">code</field>
<field name="code">
config = self.next(cr, uid, [], context=context) or {}
if config.get('type') not in ('ir.actions.act_window_close',):
action = config
</field>
</record>
</data>
</openerp>

View File

@ -64,10 +64,18 @@ class ir_attachment(osv.osv):
return 0
return []
# Work with a set, as list.remove() is prohibitive for large lists of documents
# (takes 20+ seconds on a db with 100k docs during search_count()!)
orig_ids = ids
ids = set(ids)
# For attachments, the permissions of the document they are attached to
# apply, so we must remove attachments for which the user cannot access
# the linked document.
targets = super(ir_attachment,self).read(cr, uid, ids, ['id', 'res_model', 'res_id'])
# Use pure SQL rather than read() as it is about 50% faster for large dbs (100k+ docs),
# and the permissions are checked in super() and below anyway.
cr.execute("""SELECT id, res_model, res_id FROM ir_attachment WHERE id = ANY(%s)""", (list(ids),))
targets = cr.dictfetchall()
model_attachments = {}
for target_dict in targets:
if not (target_dict['res_id'] and target_dict['res_model']):
@ -92,9 +100,10 @@ class ir_attachment(osv.osv):
for res_id in disallowed_ids:
for attach_id in targets[res_id]:
ids.remove(attach_id)
if count:
return len(ids)
return ids
# sort result according to the original sort ordering
result = [id for id in orig_ids if id in ids]
return len(result) if count else list(result)
def read(self, cr, uid, ids, fields_to_read=None, context=None, load='_classic_read'):
self.check(cr, uid, ids, 'read', context=context)

View File

@ -70,7 +70,10 @@ class ir_model(osv.osv):
models = self.browse(cr, uid, ids, context=context)
res = dict.fromkeys(ids)
for model in models:
res[model.id] = self.pool.get(model.model).is_transient()
if self.pool.get(model.model):
res[model.id] = self.pool.get(model.model).is_transient()
else:
_logger.error('Missing model %s' % (model.model, ))
return res
def _search_osv_memory(self, cr, uid, model, name, domain, context=None):
@ -206,6 +209,7 @@ class ir_model(osv.osv):
else:
x_name = a._columns.keys()[0]
x_custom_model._rec_name = x_name
a._rec_name = x_name
class ir_model_fields(osv.osv):
_name = 'ir.model.fields'

View File

@ -44,7 +44,7 @@
<field name="selection" attrs="{'required': [('ttype','in',['selection','reference'])], 'readonly': [('ttype','not in',['selection','reference'])]}"/>
<field name="size" attrs="{'required': [('ttype','in',['char','reference'])], 'readonly': [('ttype','not in',['char','reference'])]}"/>
<field name="domain" attrs="{'readonly': [('relation','=','')]}"/>
<field name="serialization_field_id" attrs="{'readonly': [('state','=','base')]}" domain="[('ttype','=','serialized'), ('model_id', '=', model_id)]"/>
<field name="serialization_field_id" attrs="{'readonly': [('state','=','base')]}" domain="[('ttype','=','serialized'), ('model_id', '=', parent.model)]"/>
</group>
<group>
<field name="required"/>

View File

@ -202,7 +202,7 @@ class res_partner(osv.osv, format_address):
help="The partner's timezone, used to output proper date and time values inside printed reports. "
"It is important to set a value for this field. You should use the same timezone "
"that is otherwise used to pick and render date and time values: your computer's timezone."),
'tz_offset': fields.function(_get_tz_offset, type='char', size=5, string='Timezone offset', store=True),
'tz_offset': fields.function(_get_tz_offset, type='char', size=5, string='Timezone offset', invisible=True),
'user_id': fields.many2one('res.users', 'Salesperson', help='The internal user that is in charge of communicating with this contact if any.'),
'vat': fields.char('TIN', size=32, help="Tax Identification Number. Check the box if this contact is subjected to taxes. Used by the some of the legal statements."),
'bank_ids': fields.one2many('res.partner.bank', 'partner_id', 'Banks'),

View File

@ -153,7 +153,7 @@ class res_users(osv.osv):
help="Specify a value only when creating a user or if you're "\
"changing the user's password, otherwise leave empty. After "\
"a change of password, the user has to login again."),
'signature': fields.text('Signature', size=64),
'signature': fields.text('Signature'),
'active': fields.boolean('Active'),
'action_id': fields.many2one('ir.actions.actions', 'Home Action', help="If specified, this action will be opened at logon for this user, in addition to the standard menu."),
'menu_id': fields.many2one('ir.actions.actions', 'Menu Action', help="If specified, the action will replace the standard menu for this user."),
@ -343,7 +343,7 @@ class res_users(osv.osv):
for k in self._all_columns.keys():
if k.startswith('context_'):
context_key = k[8:]
elif k in ['lang', 'tz', 'tz_offset']:
elif k in ['lang', 'tz']:
context_key = k
else:
context_key = False

View File

@ -50,20 +50,25 @@ class test_ir_values(common.TransactionCase):
# Create some action bindings for a non-existing model.
act_id_1 = self.ref('base.act_values_form_action')
act_id_2 = self.ref('base.act_values_form_defaults')
act_id_3 = self.ref('base.action_res_company_form')
act_id_4 = self.ref('base.action_res_company_tree')
ir_values = self.registry('ir.values')
ir_values.set(self.cr, self.uid, 'action', 'tree_but_open', 'OnDblClick Action', ['unexisting_model'], 'ir.actions.act_window,10', isobject=True)
ir_values.set(self.cr, self.uid, 'action', 'tree_but_open', 'OnDblClick Action 2', ['unexisting_model'], 'ir.actions.act_window,11', isobject=True)
ir_values.set(self.cr, self.uid, 'action', 'client_action_multi', 'Side Wizard', ['unexisting_model'], 'ir.actions.act_window,12', isobject=True)
ir_values.set(self.cr, self.uid, 'action', 'tree_but_open', 'OnDblClick Action', ['unexisting_model'], 'ir.actions.act_window,%d' % act_id_1, isobject=True)
ir_values.set(self.cr, self.uid, 'action', 'tree_but_open', 'OnDblClick Action 2', ['unexisting_model'], 'ir.actions.act_window,%d' % act_id_2, isobject=True)
ir_values.set(self.cr, self.uid, 'action', 'client_action_multi', 'Side Wizard', ['unexisting_model'], 'ir.actions.act_window,%d' % act_id_3, isobject=True)
report_ids = self.registry('ir.actions.report.xml').search(self.cr, self.uid, [], {})
reports = self.registry('ir.actions.report.xml').browse(self.cr, self.uid, report_ids, {})
report_id = [report.id for report in reports if not report.groups_id][0] # assume at least one
ir_values.set(self.cr, self.uid, 'action', 'client_print_multi', 'Nice Report', ['unexisting_model'], 'ir.actions.report.xml,%s'%report_id, isobject=True)
ir_values.set(self.cr, self.uid, 'action', 'client_action_relate', 'Related Stuff', ['unexisting_model'], 'ir.actions.act_window,14', isobject=True)
report_id = [report.id for report in reports if not report.groups_id][0] # assume at least one
ir_values.set(self.cr, self.uid, 'action', 'client_print_multi', 'Nice Report', ['unexisting_model'], 'ir.actions.report.xml,%d' % report_id, isobject=True)
ir_values.set(self.cr, self.uid, 'action', 'client_action_relate', 'Related Stuff', ['unexisting_model'], 'ir.actions.act_window,%d' % act_id_4, isobject=True)
# Replace one action binding to set a new name.
ir_values = self.registry('ir.values')
ir_values.set(self.cr, self.uid, 'action', 'tree_but_open', 'OnDblClick Action New', ['unexisting_model'], 'ir.actions.act_window,10', isobject=True)
ir_values.set(self.cr, self.uid, 'action', 'tree_but_open', 'OnDblClick Action New', ['unexisting_model'], 'ir.actions.act_window,%d' % act_id_1, isobject=True)
# Retrieve the action bindings and check they're correct
@ -73,17 +78,17 @@ class test_ir_values(common.TransactionCase):
#first action
assert len(actions[0]) == 3, "Malformed action definition"
assert actions[0][1] == 'OnDblClick Action 2', 'Bound action does not match definition'
assert isinstance(actions[0][2], dict) and actions[0][2]['id'] == 11, 'Bound action does not match definition'
assert isinstance(actions[0][2], dict) and actions[0][2]['id'] == act_id_2, 'Bound action does not match definition'
#second action - this ones comes last because it was re-created with a different name
assert len(actions[1]) == 3, "Malformed action definition"
assert actions[1][1] == 'OnDblClick Action New', 'Re-Registering an action should replace it'
assert isinstance(actions[1][2], dict) and actions[1][2]['id'] == 10, 'Bound action does not match definition'
assert isinstance(actions[1][2], dict) and actions[1][2]['id'] == act_id_1, 'Bound action does not match definition'
actions = ir_values.get(self.cr, self.uid, 'action', 'client_action_multi', ['unexisting_model'])
assert len(actions) == 1, "Mismatching number of bound actions"
assert len(actions[0]) == 3, "Malformed action definition"
assert actions[0][1] == 'Side Wizard', 'Bound action does not match definition'
assert isinstance(actions[0][2], dict) and actions[0][2]['id'] == 12, 'Bound action does not match definition'
assert isinstance(actions[0][2], dict) and actions[0][2]['id'] == act_id_3, 'Bound action does not match definition'
actions = ir_values.get(self.cr, self.uid, 'action', 'client_print_multi', ['unexisting_model'])
assert len(actions) == 1, "Mismatching number of bound actions"
@ -95,7 +100,7 @@ class test_ir_values(common.TransactionCase):
assert len(actions) == 1, "Mismatching number of bound actions"
assert len(actions[0]) == 3, "Malformed action definition"
assert actions[0][1] == 'Related Stuff', 'Bound action does not match definition'
assert isinstance(actions[0][2], dict) and actions[0][2]['id'] == 14, 'Bound action does not match definition'
assert isinstance(actions[0][2], dict) and actions[0][2]['id'] == act_id_4, 'Bound action does not match definition'
if __name__ == '__main__':

View File

@ -131,12 +131,13 @@ def ustr(value, hint_encoding='utf-8', errors='strict'):
upstream and should be tried first to decode ``value``.
:param str error: optional `errors` flag to pass to the unicode
built-in to indicate how illegal character values should be
treated when converting a string: 'strict', 'ignore' or 'replace'.
treated when converting a string: 'strict', 'ignore' or 'replace'
(see ``unicode()`` constructor).
Passing anything other than 'strict' means that the first
encoding tried will be used, even if it's not the correct
one to use, so be careful! Ignore if value is not a string/unicode.
:rtype: unicode
one to use, so be careful! Ignored if value is not a string/unicode.
:raise: UnicodeError if value cannot be coerced to unicode
:return: unicode string representing the given value
"""
if isinstance(value, Exception):
return exception_to_unicode(value)

View File

@ -104,7 +104,10 @@ def load_module_graph(cr, graph, status=None, perform_checks=True, skip_modules=
"""
for filename in package.data[kind]:
_logger.info("module %s: loading %s", module_name, filename)
if kind == 'test':
_logger.log(logging.TEST, "module %s: loading %s", module_name, filename)
else:
_logger.info("module %s: loading %s", module_name, filename)
_, ext = os.path.splitext(filename)
pathname = os.path.join(module_name, filename)
fp = tools.file_open(pathname)

View File

@ -553,7 +553,7 @@ def run_unit_tests(module_name):
for m in ms:
suite.addTests(unittest2.TestLoader().loadTestsFromModule(m))
if ms:
_logger.info('module %s: executing %s `fast_suite` and/or `checks` sub-modules', module_name, len(ms))
_logger.log(logging.TEST, 'module %s: executing %s `fast_suite` and/or `checks` sub-modules', module_name, len(ms))
# Use a custom stream object to log the test executions.
class MyStream(object):
def __init__(self):

View File

@ -54,7 +54,7 @@ def close_socket(sock):
# of the other side (or something), see
# http://bugs.python.org/issue4397
# note: stdlib fixed test, not behavior
if e.errno != errno.ENOTCONN or platform.system() != 'Darwin':
if e.errno != errno.ENOTCONN or platform.system() not in ['Darwin', 'Windows']:
raise
sock.close()

View File

@ -706,10 +706,7 @@ class many2many(_column):
if where_c:
where_c = ' AND ' + where_c
if offset or self._limit:
order_by = ' ORDER BY "%s".%s' %(obj._table, obj._order.split(',')[0])
else:
order_by = ''
order_by = ' ORDER BY "%s".%s' %(obj._table, obj._order.split(',')[0])
limit_str = ''
if self._limit is not None:

View File

@ -916,7 +916,16 @@ class BaseModel(object):
else:
new.extend(cls.__dict__.get(s, []))
nattr[s] = new
# Keep links to non-inherited constraints, e.g. useful when exporting translations
nattr['_local_constraints'] = cls.__dict__.get('_constraints', [])
nattr['_local_sql_constraints'] = cls.__dict__.get('_sql_constraints', [])
cls = type(name, (cls, parent_class), dict(nattr, _register=False))
else:
cls._local_constraints = getattr(cls, '_constraints', [])
cls._local_sql_constraints = getattr(cls, '_sql_constraints', [])
if not getattr(cls, '_original_module', None):
cls._original_module = cls._module
obj = object.__new__(cls)
@ -1111,7 +1120,7 @@ class BaseModel(object):
if not model_data.search(cr, uid, [('name', '=', n)]):
break
postfix += 1
model_data.create(cr, uid, {
model_data.create(cr, SUPERUSER_ID, {
'name': n,
'model': self._name,
'res_id': r['id'],
@ -1515,6 +1524,8 @@ class BaseModel(object):
error_msgs = []
for constraint in self._constraints:
fun, msg, fields = constraint
# We don't pass around the context here: validation code
# must always yield the same results.
if not fun(self, cr, uid, ids):
# Check presence of __call__ directly instead of using
# callable() because it will be deprecated as of Python 3.0
@ -1838,7 +1849,7 @@ class BaseModel(object):
if trans:
node.set('string', trans)
for attr_name in ('confirm', 'sum', 'help', 'placeholder'):
for attr_name in ('confirm', 'sum', 'avg', 'help', 'placeholder'):
attr_value = node.get(attr_name)
if attr_value:
trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], attr_value)
@ -2670,13 +2681,19 @@ class BaseModel(object):
order = orderby or groupby
data_ids = self.search(cr, uid, [('id', 'in', alldata.keys())], order=order, context=context)
# the IDS of records that have groupby field value = False or '' should be sorted too
data_ids += filter(lambda x:x not in data_ids, alldata.keys())
data = self.read(cr, uid, data_ids, groupby and [groupby] or ['id'], context=context)
# restore order of the search as read() uses the default _order (this is only for groups, so the size of data_read shoud be small):
data.sort(lambda x,y: cmp(data_ids.index(x['id']), data_ids.index(y['id'])))
# the IDs of records that have groupby field value = False or '' should be included too
data_ids += set(alldata.keys()).difference(data_ids)
if groupby:
data = self.read(cr, uid, data_ids, [groupby], context=context)
# restore order of the search as read() uses the default _order (this is only for groups, so the footprint of data should be small):
data_dict = dict((d['id'], d[groupby] ) for d in data)
result = [{'id': i, groupby: data_dict[i]} for i in data_ids]
else:
result = [{'id': i} for i in data_ids]
for d in data:
for d in result:
if groupby:
d['__domain'] = [(groupby, '=', alldata[d['id']][groupby] or False)] + domain
if not isinstance(groupby_list, (str, unicode)):
@ -2697,11 +2714,11 @@ class BaseModel(object):
del d['id']
if groupby and groupby in self._group_by_full:
data = self._read_group_fill_results(cr, uid, domain, groupby, groupby_list,
aggregated_fields, data, read_group_order=order,
context=context)
result = self._read_group_fill_results(cr, uid, domain, groupby, groupby_list,
aggregated_fields, result, read_group_order=order,
context=context)
return data
return result
def _inherits_join_add(self, current_table, parent_model_name, query):
"""
@ -3898,7 +3915,7 @@ class BaseModel(object):
if isinstance(ids, (int, long)):
ids = [ids]
result_store = self._store_get_values(cr, uid, ids, None, context)
result_store = self._store_get_values(cr, uid, ids, self._all_columns.keys(), context)
self._check_concurrency(cr, ids, context)
@ -4305,11 +4322,16 @@ class BaseModel(object):
del vals[self._inherits[table]]
record_id = tocreate[table].pop('id', None)
# When linking/creating parent records, force context without 'no_store_function' key that
# defers stored functions computing, as these won't be computed in batch at the end of create().
parent_context = dict(context)
parent_context.pop('no_store_function', None)
if record_id is None or not record_id:
record_id = self.pool.get(table).create(cr, user, tocreate[table], context=context)
record_id = self.pool.get(table).create(cr, user, tocreate[table], context=parent_context)
else:
self.pool.get(table).write(cr, user, [record_id], tocreate[table], context=context)
self.pool.get(table).write(cr, user, [record_id], tocreate[table], context=parent_context)
upd0 += ',' + self._inherits[table]
upd1 += ',%s'
@ -5198,6 +5220,7 @@ class AbstractModel(BaseModel):
"""
_auto = False # don't create any database backend for AbstractModels
_register = False # not visible in ORM registry, meant to be python-inherited only
_transient = False
def itemgetter_tuple(items):
""" Fixes itemgetter inconsistency (useful in some cases) of not returning

View File

@ -163,15 +163,30 @@ class report_rml(report_int):
# * (re)build/update the stylesheet with the translated items
def translate(doc, lang):
for node in doc.xpath('//*[@t]'):
if not node.text:
continue
translation = ir_translation_obj._get_source(cr, uid, self.name2, 'xsl', lang, node.text)
if translation:
node.text = translation
translate_aux(doc, lang, False)
def translate_aux(doc, lang, t):
for node in doc:
t = t or node.get("t")
if t:
text = None
tail = None
if node.text:
text = node.text.strip().replace('\n',' ')
if node.tail:
tail = node.tail.strip().replace('\n',' ')
if text:
translation1 = ir_translation_obj._get_source(cr, uid, self.name2, 'xsl', lang, text)
if translation1:
node.text = node.text.replace(text, translation1)
if tail:
translation2 = ir_translation_obj._get_source(cr, uid, self.name2, 'xsl', lang, tail)
if translation2:
node.tail = node.tail.replace(tail, translation2)
translate_aux(node, lang, t)
if context.get('lang', False):
translate(stylesheet, context['lang'])
translate(stylesheet.iter(), context['lang'])
transform = etree.XSLT(stylesheet)
xml = etree.tostring(

View File

@ -99,7 +99,7 @@ class NumberedCanvas(canvas.Canvas):
key = key + 1
self.setFont("Helvetica", 8)
self.drawRightString((self._pagesize[0]-30), (self._pagesize[1]-40),
"Page %(this)i of %(total)i" % {
" %(this)i / %(total)i" % {
'this': self._pageNumber+1,
'total': self.pages.get(key,False),
}
@ -118,15 +118,19 @@ class NumberedCanvas(canvas.Canvas):
self._doc.SaveToFile(self._filename, self)
class PageCount(platypus.Flowable):
def __init__(self, story_count=0):
platypus.Flowable.__init__(self)
self.story_count = story_count
def draw(self):
self.canv.beginForm("pageCount")
self.canv.beginForm("pageCount%d" % (self.story_count))
self.canv.setFont("Helvetica", utils.unit_get(str(8)))
self.canv.drawString(0, 0, str(self.canv.getPageNumber()))
self.canv.endForm()
class PageReset(platypus.Flowable):
def draw(self):
self.canv._pageNumber = 0
self.canv._doPageReset = True
class _rml_styles(object,):
def __init__(self, nodes, localcontext):
@ -343,7 +347,7 @@ class _rml_canvas(object):
if n.tag == 'pageCount':
if x or y:
self.canvas.translate(x,y)
self.canvas.doForm('pageCount')
self.canvas.doForm('pageCount%s' % (self.canvas._storyCount,))
if x or y:
self.canvas.translate(-x,-y)
if n.tag == 'pageNumber':
@ -878,6 +882,13 @@ class EndFrameFlowable(ActionFlowable):
ActionFlowable.__init__(self,('frameEnd',resume))
class TinyDocTemplate(platypus.BaseDocTemplate):
def beforeDocument(self):
# Store some useful value directly inside canvas, so it's available
# on flowable drawing (needed for proper PageCount handling)
self.canv._doPageReset = False
self.canv._storyCount = 0
def ___handle_pageBegin(self):
self.page = self.page + 1
self.pageTemplate.beforeDrawPage(self.canv,self)
@ -893,12 +904,24 @@ class TinyDocTemplate(platypus.BaseDocTemplate):
self.frame = f
break
self.handle_frameBegin()
def afterFlowable(self, flowable):
if isinstance(flowable, PageReset):
self.canv._pageCount=self.page
self.page=0
self.canv._flag=True
def afterPage(self):
if self.canv._doPageReset:
# Following a <pageReset/> tag:
# - we reset page number to 0
# - we add an new PageCount flowable (relative to the current
# story number), but not for NumeredCanvas at is handle page
# count itself)
# NOTE: _rml_template render() method add a PageReset flowable at end
# of each story, so we're sure to pass here at least once per story.
if not isinstance(self.canv, NumberedCanvas):
self.handle_flowable([ PageCount(story_count=self.canv._storyCount) ])
self.canv._pageCount = self.page
self.page = 0
self.canv._flag = True
self.canv._pageNumber = 0
self.canv._doPageReset = False
self.canv._storyCount += 1
class _rml_template(object):
def __init__(self, localcontext, out, node, doc, images=None, path='.', title=None):
@ -965,7 +988,6 @@ class _rml_template(object):
self.doc_tmpl.afterFlowable(fis)
self.doc_tmpl.build(fis,canvasmaker=NumberedCanvas)
else:
fis.append(PageCount())
self.doc_tmpl.build(fis)
def parseNode(rml, localcontext=None, fout=None, images=None, path='.', title=None):

View File

@ -1,8 +1,9 @@
# -*- coding: utf-8 -*-
##############################################################################
#
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
# Copyright (C) 2010-2012 OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
@ -15,7 +16,7 @@
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
@ -23,10 +24,8 @@ import logging
import threading
import time
import http_server
import netrpc_server
import web_services
import websrv_lib
import openerp.cron
import openerp.modules
@ -61,6 +60,7 @@ Maybe you forgot to add those addons in your addons_path configuration."""
_logger.exception('Failed to load server-wide module `%s`.%s', m, msg)
start_internal_done = False
main_thread_id = threading.currentThread().ident
def start_internal():
global start_internal_done
@ -99,7 +99,6 @@ def stop_services():
openerp.netsvc.Server.quitAll()
openerp.service.wsgi_server.stop_server()
config = openerp.tools.config
_logger.info("Initiating shutdown")
_logger.info("Hit CTRL-C again or send a second signal to force the shutdown.")
logging.shutdown()
@ -107,8 +106,9 @@ def stop_services():
# Manually join() all threads before calling sys.exit() to allow a second signal
# to trigger _force_quit() in case some non-daemon threads won't exit cleanly.
# threading.Thread.join() should not mask signals (at least in python 2.5).
me = threading.currentThread()
for thread in threading.enumerate():
if thread != threading.currentThread() and not thread.isDaemon():
if thread != me and not thread.isDaemon() and thread.ident != main_thread_id:
while thread.isAlive():
# Need a busyloop here as thread.join() masks signals
# and would prevent the forced shutdown.
@ -119,10 +119,9 @@ def stop_services():
def start_services_workers():
import openerp.service.workers
openerp.multi_process = True # Nah!
openerp.multi_process = True
openerp.service.workers.Multicorn(openerp.service.wsgi_server.application).run()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:

View File

@ -58,6 +58,8 @@ def check_ssl():
except:
return False
DEFAULT_LOG_HANDLER = [':INFO']
class configmanager(object):
def __init__(self, fname=None):
# Options not exposed on the command line. Command line options will be added
@ -180,7 +182,7 @@ class configmanager(object):
group.add_option("--logfile", dest="logfile", help="file where the server log will be stored")
group.add_option("--no-logrotate", dest="logrotate", action="store_false", my_default=True, help="do not rotate the logfile")
group.add_option("--syslog", action="store_true", dest="syslog", my_default=False, help="Send the log to the syslog server")
group.add_option('--log-handler', action="append", default=[':INFO'], my_default=[':INFO'], metavar="PREFIX:LEVEL", help='setup a handler at LEVEL for a given PREFIX. An empty PREFIX indicates the root logger. This option can be repeated. Example: "openerp.orm:DEBUG" or "werkzeug:CRITICAL" (default: ":INFO")')
group.add_option('--log-handler', action="append", default=DEFAULT_LOG_HANDLER, my_default=DEFAULT_LOG_HANDLER, metavar="PREFIX:LEVEL", help='setup a handler at LEVEL for a given PREFIX. An empty PREFIX indicates the root logger. This option can be repeated. Example: "openerp.orm:DEBUG" or "werkzeug:CRITICAL" (default: ":INFO")')
group.add_option('--log-request', action="append_const", dest="log_handler", const="openerp.netsvc.rpc.request:DEBUG", help='shortcut for --log-handler=openerp.netsvc.rpc.request:DEBUG')
group.add_option('--log-response', action="append_const", dest="log_handler", const="openerp.netsvc.rpc.response:DEBUG", help='shortcut for --log-handler=openerp.netsvc.rpc.response:DEBUG')
group.add_option('--log-web', action="append_const", dest="log_handler", const="openerp.addons.web.http:DEBUG", help='shortcut for --log-handler=openerp.addons.web.http:DEBUG')
@ -300,8 +302,9 @@ class configmanager(object):
# Copy all optparse options (i.e. MyOption) into self.options.
for group in parser.option_groups:
for option in group.option_list:
self.options[option.dest] = option.my_default
self.casts[option.dest] = option
if option.dest not in self.options:
self.options[option.dest] = option.my_default
self.casts[option.dest] = option
self.parse_config(None, False)
@ -385,13 +388,18 @@ class configmanager(object):
]
for arg in keys:
# Copy the command-line argument...
if getattr(opt, arg):
# Copy the command-line argument (except the special case for log_handler, due to
# action=append requiring a real default, so we cannot use the my_default workaround)
if getattr(opt, arg) and getattr(opt, arg) != DEFAULT_LOG_HANDLER:
self.options[arg] = getattr(opt, arg)
# ... or keep, but cast, the config file value.
elif isinstance(self.options[arg], basestring) and self.casts[arg].type in optparse.Option.TYPE_CHECKER:
self.options[arg] = optparse.Option.TYPE_CHECKER[self.casts[arg].type](self.casts[arg], arg, self.options[arg])
if isinstance(self.options['log_handler'], basestring):
self.options['log_handler'] = self.options['log_handler'].split(',')
# if defined but None take the configfile value
keys = [
'language', 'translate_out', 'translate_in', 'overwrite_existing_translations',
@ -614,6 +622,9 @@ class configmanager(object):
def __setitem__(self, key, value):
self.options[key] = value
if key in self.options and isinstance(self.options[key], basestring) and \
key in self.casts and self.casts[key].type in optparse.Option.TYPE_CHECKER:
self.options[key] = optparse.Option.TYPE_CHECKER[self.casts[key].type](self.casts[key], key, self.options[key])
def __getitem__(self, key):
return self.options[key]

View File

@ -358,10 +358,11 @@ def email_send(email_from, email_to, subject, body, email_cc=None, email_bcc=Non
"""
# If not cr, get cr from current thread database
local_cr = None
if not cr:
db_name = getattr(threading.currentThread(), 'dbname', None)
if db_name:
cr = pooler.get_db_only(db_name).cursor()
local_cr = cr = pooler.get_db(db_name).cursor()
else:
raise Exception("No database cursor found, please pass one explicitly")
@ -380,7 +381,8 @@ def email_send(email_from, email_to, subject, body, email_cc=None, email_bcc=Non
_logger.exception("tools.email_send failed to deliver email")
return False
finally:
cr.close()
if local_cr:
cr.close()
return res
def email_split(text):

View File

@ -27,6 +27,7 @@ Miscellaneous tools used by OpenERP.
"""
from functools import wraps
import cProfile
import subprocess
import logging
import os
@ -592,16 +593,10 @@ class profile(object):
def __call__(self, f):
@wraps(f)
def wrapper(*args, **kwargs):
class profile_wrapper(object):
def __init__(self):
self.result = None
def __call__(self):
self.result = f(*args, **kwargs)
pw = profile_wrapper()
import cProfile
fname = self.fname or ("%s.cprof" % (f.func_name,))
cProfile.runctx('pw()', globals(), locals(), filename=fname)
return pw.result
profile = cProfile.Profile()
result = profile.runcall(f, *args, **kwargs)
profile.dump_stats(self.fname or ("%s.cprof" % (f.func_name,)))
return result
return wrapper

View File

@ -2,7 +2,7 @@
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2010 OpenERP s.a. (<http://openerp.com>).
# Copyright (C) 2010-2012 OpenERP s.a. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
@ -73,8 +73,9 @@ def try_report(cr, uid, rname, ids, data=None, context=None, our_module=None):
os.write(fd, res_data)
os.close(fd)
fp = Popen(['pdftotext', '-enc', 'UTF-8', '-nopgbrk', rfname, '-'], shell=False, stdout=PIPE).stdout
res_text = tools.ustr(fp.read())
proc = Popen(['pdftotext', '-enc', 'UTF-8', '-nopgbrk', rfname, '-'], shell=False, stdout=PIPE)
stdout, stderr = proc.communicate()
res_text = tools.ustr(stdout)
os.unlink(rfname)
except Exception:
_logger.debug("Unable to parse PDF report: install pdftotext to perform automated tests.")

View File

@ -167,19 +167,22 @@ class GettextAlias(object):
if db_name:
return sql_db.db_connect(db_name)
def _get_cr(self, frame):
def _get_cr(self, frame, allow_create=True):
is_new_cr = False
cr = frame.f_locals.get('cr', frame.f_locals.get('cursor'))
if not cr:
s = frame.f_locals.get('self', {})
cr = getattr(s, 'cr', None)
if not cr:
if not cr and allow_create:
db = self._get_db()
if db:
cr = db.cursor()
is_new_cr = True
return cr, is_new_cr
def _get_uid(self, frame):
return frame.f_locals.get('uid') or frame.f_locals.get('user')
def _get_lang(self, frame):
lang = None
ctx = frame.f_locals.get('context')
@ -194,11 +197,21 @@ class GettextAlias(object):
ctx = kwargs.get('context')
if ctx:
lang = ctx.get('lang')
s = frame.f_locals.get('self', {})
if not lang:
s = frame.f_locals.get('self', {})
c = getattr(s, 'localcontext', None)
if c:
lang = c.get('lang')
if not lang:
# Last resort: attempt to guess the language of the user
# Pitfall: some operations are performed in sudo mode, and we
# don't know the originial uid, so the language may
# be wrong when the admin language differs.
pool = getattr(s, 'pool', None)
(cr, dummy) = self._get_cr(frame, allow_create=False)
uid = self._get_uid(frame)
if pool and cr and uid:
lang = pool.get('res.users').context_get(cr, uid)['lang']
return lang
def __call__(self, source):
@ -443,12 +456,17 @@ def trans_export(lang, modules, buffer, format, cr):
for module, type, name, res_id, src, trad, comments in rows:
row = grouped_rows.setdefault(src, {})
row.setdefault('modules', set()).add(module)
if ('translation' not in row) or (not row['translation']):
if not row.get('translation') and trad != src:
row['translation'] = trad
row.setdefault('tnrs', []).append((type, name, res_id))
row.setdefault('comments', set()).update(comments)
for src, row in grouped_rows.items():
if not lang:
# translation template, so no translation value
row['translation'] = ''
elif not row.get('translation'):
row['translation'] = src
writer.write(row['modules'], row['tnrs'], src, row['translation'], row['comments'])
elif format == 'tgz':
@ -484,16 +502,25 @@ def trans_export(lang, modules, buffer, format, cr):
del translations
def trans_parse_xsl(de):
return list(set(trans_parse_xsl_aux(de, False)))
def trans_parse_xsl_aux(de, t):
res = []
for n in de:
if n.get("t"):
for m in n:
if isinstance(m, SKIPPED_ELEMENT_TYPES) or not m.text:
t = t or n.get("t")
if t:
if isinstance(n, SKIPPED_ELEMENT_TYPES) or n.tag.startswith('{http://www.w3.org/1999/XSL/Transform}'):
continue
l = m.text.strip().replace('\n',' ')
if len(l):
res.append(l.encode("utf8"))
res.extend(trans_parse_xsl(n))
if n.text:
l = n.text.strip().replace('\n',' ')
if len(l):
res.append(l.encode("utf8"))
if n.tail:
l = n.tail.strip().replace('\n',' ')
if len(l):
res.append(l.encode("utf8"))
res.extend(trans_parse_xsl_aux(n, t))
return res
def trans_parse_rml(de):
@ -766,26 +793,33 @@ def trans_generate(lang, modules, cr):
cr.execute(query_models, query_param)
def push_constraint_msg(module, term_type, model, msg):
# Check presence of __call__ directly instead of using
# callable() because it will be deprecated as of Python 3.0
if not hasattr(msg, '__call__'):
push_translation(module, term_type, model, 0, encode(msg))
push_translation(encode(module), term_type, encode(model), 0, encode(msg))
def push_local_constraints(module, model, cons_type='sql_constraints'):
"""Climb up the class hierarchy and ignore inherited constraints
from other modules"""
term_type = 'sql_constraint' if cons_type == 'sql_constraints' else 'constraint'
msg_pos = 2 if cons_type == 'sql_constraints' else 1
for cls in model.__class__.__mro__:
if getattr(cls, '_module', None) != module:
continue
constraints = getattr(cls, '_local_' + cons_type, [])
for constraint in constraints:
push_constraint_msg(module, term_type, model._name, constraint[msg_pos])
for (_, model, module) in cr.fetchall():
module = encode(module)
model = encode(model)
model_obj = pool.get(model)
if not model_obj:
_logger.error("Unable to find object %r", model)
continue
for constraint in getattr(model_obj, '_constraints', []):
push_constraint_msg(module, 'constraint', model, constraint[1])
if model_obj._constraints:
push_local_constraints(module, model_obj, 'constraints')
for constraint in getattr(model_obj, '_sql_constraints', []):
push_constraint_msg(module, 'sql_constraint', model, constraint[2])
if model_obj._sql_constraints:
push_local_constraints(module, model_obj, 'sql_constraints')
def get_module_from_path(path, mod_paths=None):
if not mod_paths:

View File

@ -350,39 +350,41 @@ class YamlInterpreter(object):
return one2many_view
def process_val(key, val):
if fg[key]['type']=='many2one':
if fg[key]['type'] == 'many2one':
if type(val) in (tuple,list):
val = val[0]
elif (fg[key]['type']=='one2many'):
if val is False:
val = []
if len(val) and type(val[0]) == dict:
#we want to return only the fields that aren't readonly
#For that, we need to first get the right tree view to consider for the field `key´
elif fg[key]['type'] == 'one2many':
if val and isinstance(val, (list,tuple)) and isinstance(val[0], dict):
# we want to return only the fields that aren't readonly
# For that, we need to first get the right tree view to consider for the field `key´
one2many_tree_view = _get_right_one2many_view(fg, key, 'tree')
arch = etree.fromstring(one2many_tree_view['arch'].encode('utf-8'))
for rec in val:
#make a copy for the iteration, as we will alterate the size of `rec´ dictionary
# make a copy for the iteration, as we will alter `rec´
rec_copy = rec.copy()
for field_key in rec_copy:
#seek in the view for the field `field_key´ and removing it from `key´ values, as this column is readonly in the tree view
subfield_obj = etree.fromstring(one2many_tree_view['arch'].encode('utf-8')).xpath("//field[@name='%s']"%(field_key))
if subfield_obj and (subfield_obj[0].get('modifiers', '{}').find('"readonly": true') >= 0):
#TODO: currently we only support if readonly is True in the modifiers. Some improvement may be done in
#order to support also modifiers that look like {"readonly": [["state", "not in", ["draft", "confirm"]]]}
del(rec[field_key])
#now that unwanted values have been removed from val, we can encapsulate it in a tuple as returned value
# if field is missing in view or has a readonly modifier, drop it
field_elem = arch.xpath("//field[@name='%s']" % field_key)
if field_elem and (field_elem[0].get('modifiers', '{}').find('"readonly": true') >= 0):
# TODO: currently we only support if readonly is True in the modifiers. Some improvement may be done in
# order to support also modifiers that look like {"readonly": [["state", "not in", ["draft", "confirm"]]]}
del rec[field_key]
# now that unwanted values have been removed from val, we can encapsulate it in a tuple as returned value
val = map(lambda x: (0,0,x), val)
elif fg[key]['type'] == 'many2many':
if val and isinstance(val,(list,tuple)) and isinstance(val[0], (int,long)):
val = [(6,0,val)]
#we want to return only the fields that aren't readonly
# we want to return only the fields that aren't readonly
if el.get('modifiers', '{}').find('"readonly": true') >= 0:
#TODO: currently we only support if readonly is True in the modifiers. Some improvement may be done in
#order to support also modifiers that look like {"readonly": [["state", "not in", ["draft", "confirm"]]]}
# TODO: currently we only support if readonly is True in the modifiers. Some improvement may be done in
# order to support also modifiers that look like {"readonly": [["state", "not in", ["draft", "confirm"]]]}
return False
return val
if view_info:
arch = etree.fromstring(view_info['arch'].encode('utf-8'))
arch = etree.fromstring(view_info['arch'].decode('utf-8'))
view = arch if len(arch) else False
else:
view = False

View File

@ -19,9 +19,16 @@
#
##############################################################################
import os
import glob
from distutils.core import setup
import py2exe
def datas():
r = []
if os.name == 'nt':
r.append(("Microsoft.VC90.CRT", glob.glob('C:\Microsoft.VC90.CRT\*.*')))
return r
setup(service=["OpenERPServerService"],
options={"py2exe":{"excludes":["Tkconstants","Tkinter","tcl",
@ -29,7 +36,8 @@ setup(service=["OpenERPServerService"],
"ImageTk", "PIL.ImageTk",
"FixTk"],
"skip_archive": 1,
"optimize": 2,}}
"optimize": 2,}},
data_files=datas(),
)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: