[IMP] improved error message, constraints must allways propose a solution

bzr revid: fp@fp-laptop-20111231075720-4kak7o320td4wxqc
This commit is contained in:
fp 2011-12-31 08:57:20 +01:00
parent 917d171804
commit 8aff5209ea
20 changed files with 179 additions and 180 deletions

View File

@ -620,7 +620,7 @@ class account_account(osv.osv):
if method == 'write': if method == 'write':
raise osv.except_osv(_('Error !'), _('You can not desactivate an account that contains some journal items.')) raise osv.except_osv(_('Error !'), _('You can not desactivate an account that contains some journal items.'))
elif method == 'unlink': elif method == 'unlink':
raise osv.except_osv(_('Error !'), _('You can not remove an account containing journal items!. ')) raise osv.except_osv(_('Error !'), _('You can not remove an account containing journal items.'))
#Checking whether the account is set as a property to any Partner or not #Checking whether the account is set as a property to any Partner or not
value = 'account.account,' + str(ids[0]) value = 'account.account,' + str(ids[0])
partner_prop_acc = self.pool.get('ir.property').search(cr, uid, [('value_reference','=',value)], context=context) partner_prop_acc = self.pool.get('ir.property').search(cr, uid, [('value_reference','=',value)], context=context)
@ -660,7 +660,7 @@ class account_account(osv.osv):
# Allow the write if the value is the same # Allow the write if the value is the same
for i in [i['company_id'][0] for i in self.read(cr,uid,ids,['company_id'])]: for i in [i['company_id'][0] for i in self.read(cr,uid,ids,['company_id'])]:
if vals['company_id']!=i: if vals['company_id']!=i:
raise osv.except_osv(_('Warning !'), _('You cannot modify Company of account as its related record exist in Entry Lines')) raise osv.except_osv(_('Warning !'), _('You cannot modify the company as its related to existing journal items.'))
if 'active' in vals and not vals['active']: if 'active' in vals and not vals['active']:
self._check_moves(cr, uid, ids, "write", context=context) self._check_moves(cr, uid, ids, "write", context=context)
if 'type' in vals.keys(): if 'type' in vals.keys():
@ -959,7 +959,7 @@ class account_fiscalyear(osv.osv):
ids = self.search(cr, uid, args, context=context) ids = self.search(cr, uid, args, context=context)
if not ids: if not ids:
if exception: if exception:
raise osv.except_osv(_('Error !'), _('No fiscal year defined for this date !\nPlease create one.')) raise osv.except_osv(_('Error !'), _('No fiscal year defined for this date !\nPlease create one from the configuration of the accounting menu.'))
else: else:
return [] return []
return ids return ids

View File

@ -297,7 +297,7 @@ class account_bank_statement(osv.osv):
context=context): context=context):
if line.state <> 'valid': if line.state <> 'valid':
raise osv.except_osv(_('Error !'), raise osv.except_osv(_('Error !'),
_('Journal Item "%s" is not valid') % line.name) _('Journal item "%s" is not valid.') % line.name)
# Bank statements will not consider boolean on journal entry_posted # Bank statements will not consider boolean on journal entry_posted
account_move_obj.post(cr, uid, [move_id], context=context) account_move_obj.post(cr, uid, [move_id], context=context)
@ -352,7 +352,7 @@ class account_bank_statement(osv.osv):
for st_line in st.line_ids: for st_line in st.line_ids:
if st_line.analytic_account_id: if st_line.analytic_account_id:
if not st.journal_id.analytic_journal_id: if not st.journal_id.analytic_journal_id:
raise osv.except_osv(_('No Analytic Journal !'),_("You have to define an analytic journal on the '%s' journal!") % (st.journal_id.name,)) raise osv.except_osv(_('No Analytic Journal !'),_("You have to assign an analytic journal on the '%s' journal!") % (st.journal_id.name,))
if not st_line.amount: if not st_line.amount:
continue continue
st_line_number = self.get_next_st_line_number(cr, uid, st_number, st_line, context) st_line_number = self.get_next_st_line_number(cr, uid, st_number, st_line, context)

View File

@ -311,7 +311,7 @@ class account_cash_statement(osv.osv):
if journal_type == 'bank': if journal_type == 'bank':
return super(account_cash_statement, self).balance_check(cr, uid, cash_id, journal_type, context) return super(account_cash_statement, self).balance_check(cr, uid, cash_id, journal_type, context)
if not self._equal_balance(cr, uid, cash_id, context): if not self._equal_balance(cr, uid, cash_id, context):
raise osv.except_osv(_('Error !'), _('The closing balance should be the same than the computed balance !')) raise osv.except_osv(_('Error !'), _('The closing balance should be the same than the computed balance!'))
return True return True
def statement_close(self, cr, uid, ids, journal_type='bank', context=None): def statement_close(self, cr, uid, ids, journal_type='bank', context=None):

View File

@ -68,7 +68,7 @@ class account_invoice(osv.osv):
tt = type2journal.get(type_inv, 'sale') tt = type2journal.get(type_inv, 'sale')
result = self.pool.get('account.analytic.journal').search(cr, uid, [('type','=',tt)], context=context) result = self.pool.get('account.analytic.journal').search(cr, uid, [('type','=',tt)], context=context)
if not result: if not result:
raise osv.except_osv(_('No Analytic Journal !'),_("You must define an analytic journal of type '%s' !") % (tt,)) raise osv.except_osv(_('No Analytic Journal !'),_("You must define an analytic journal of type '%s'!") % (tt,))
return result[0] return result[0]
def _get_type(self, cr, uid, context=None): def _get_type(self, cr, uid, context=None):
@ -261,7 +261,7 @@ class account_invoice(osv.osv):
'partner_bank_id': fields.many2one('res.partner.bank', 'Bank Account', 'partner_bank_id': fields.many2one('res.partner.bank', 'Bank Account',
help='Bank Account Number, Company bank account if Invoice is customer or supplier refund, otherwise Partner bank account number.', readonly=True, states={'draft':[('readonly',False)]}), help='Bank Account Number, Company bank account if Invoice is customer or supplier refund, otherwise Partner bank account number.', readonly=True, states={'draft':[('readonly',False)]}),
'move_lines':fields.function(_get_lines, type='many2many', relation='account.move.line', string='Entry Lines'), 'move_lines':fields.function(_get_lines, type='many2many', relation='account.move.line', string='Entry Lines'),
'residual': fields.function(_amount_residual, digits_compute=dp.get_precision('Account'), string='To Pay', 'residual': fields.function(_amount_residual, digits_compute=dp.get_precision('Account'), string='Balance',
store={ store={
'account.invoice': (lambda self, cr, uid, ids, c={}: ids, ['invoice_line','move_id'], 50), 'account.invoice': (lambda self, cr, uid, ids, c={}: ids, ['invoice_line','move_id'], 50),
'account.invoice.tax': (_get_invoice_tax, None, 50), 'account.invoice.tax': (_get_invoice_tax, None, 50),
@ -414,7 +414,7 @@ class account_invoice(osv.osv):
pay_res_id = pay_line_data and pay_line_data[0].get('value_reference',False) and int(pay_line_data[0]['value_reference'].split(',')[1]) or False pay_res_id = pay_line_data and pay_line_data[0].get('value_reference',False) and int(pay_line_data[0]['value_reference'].split(',')[1]) or False
if not rec_res_id and not pay_res_id: if not rec_res_id and not pay_res_id:
raise osv.except_osv(_('Configuration Error !'), raise osv.except_osv(_('Configuration Error !'),
_('Can not find account chart for this company, Please Create account.')) _('Can not find a chart of accounts for this company, you should create one.'))
account_obj = self.pool.get('account.account') account_obj = self.pool.get('account.account')
rec_obj_acc = account_obj.browse(cr, uid, [rec_res_id]) rec_obj_acc = account_obj.browse(cr, uid, [rec_res_id])
pay_obj_acc = account_obj.browse(cr, uid, [pay_res_id]) pay_obj_acc = account_obj.browse(cr, uid, [pay_res_id])
@ -481,7 +481,7 @@ class account_invoice(osv.osv):
pterm_list.sort() pterm_list.sort()
res = {'value':{'date_due': pterm_list[-1]}} res = {'value':{'date_due': pterm_list[-1]}}
else: else:
raise osv.except_osv(_('Data Insufficient !'), _('The Payment Term of Supplier does not have Payment Term Lines(Computation) defined !')) raise osv.except_osv(_('Data Insufficient !'), _('The payment term of supplier does not have a payment term line!'))
return res return res
def onchange_invoice_line(self, cr, uid, ids, lines): def onchange_invoice_line(self, cr, uid, ids, lines):
@ -514,7 +514,7 @@ class account_invoice(osv.osv):
pay_res_id = pay_line_data and pay_line_data[0].get('value_reference',False) and int(pay_line_data[0]['value_reference'].split(',')[1]) or False pay_res_id = pay_line_data and pay_line_data[0].get('value_reference',False) and int(pay_line_data[0]['value_reference'].split(',')[1]) or False
if not rec_res_id and not pay_res_id: if not rec_res_id and not pay_res_id:
raise osv.except_osv(_('Configuration Error !'), raise osv.except_osv(_('Configuration Error !'),
_('Can not find account chart for this company, Please Create account.')) _('Can not find a chart of account, you should create one from the configuration of the accounting menu.'))
if type in ('out_invoice', 'out_refund'): if type in ('out_invoice', 'out_refund'):
acc_id = rec_res_id acc_id = rec_res_id
else: else:
@ -529,7 +529,7 @@ class account_invoice(osv.osv):
result_id = account_obj.search(cr, uid, [('name','=',line.account_id.name),('company_id','=',company_id)]) result_id = account_obj.search(cr, uid, [('name','=',line.account_id.name),('company_id','=',company_id)])
if not result_id: if not result_id:
raise osv.except_osv(_('Configuration Error !'), raise osv.except_osv(_('Configuration Error !'),
_('Can not find account chart for this company in invoice line account, Please Create account.')) _('Can not find a chart of account, you should create one from the configuration of the accounting menu.'))
inv_line_obj.write(cr, uid, [line.id], {'account_id': result_id[-1]}) inv_line_obj.write(cr, uid, [line.id], {'account_id': result_id[-1]})
else: else:
if invoice_line: if invoice_line:
@ -730,13 +730,13 @@ class account_invoice(osv.osv):
key = (tax.tax_code_id.id, tax.base_code_id.id, tax.account_id.id) key = (tax.tax_code_id.id, tax.base_code_id.id, tax.account_id.id)
tax_key.append(key) tax_key.append(key)
if not key in compute_taxes: if not key in compute_taxes:
raise osv.except_osv(_('Warning !'), _('Global taxes defined, but are not in invoice lines !')) raise osv.except_osv(_('Warning !'), _('Global taxes defined, but they are not in invoice lines !'))
base = compute_taxes[key]['base'] base = compute_taxes[key]['base']
if abs(base - tax.base) > inv.company_id.currency_id.rounding: if abs(base - tax.base) > inv.company_id.currency_id.rounding:
raise osv.except_osv(_('Warning !'), _('Tax base different !\nClick on compute to update tax base')) raise osv.except_osv(_('Warning !'), _('Tax base different!\nClick on compute to update the tax base.'))
for key in compute_taxes: for key in compute_taxes:
if not key in tax_key: if not key in tax_key:
raise osv.except_osv(_('Warning !'), _('Taxes missing !')) raise osv.except_osv(_('Warning !'), _('Taxes are missing!\nClick on compute button.'))
def compute_invoice_totals(self, cr, uid, inv, company_currency, ref, invoice_move_lines): def compute_invoice_totals(self, cr, uid, inv, company_currency, ref, invoice_move_lines):
total = 0 total = 0
@ -802,7 +802,7 @@ class account_invoice(osv.osv):
context = {} context = {}
for inv in self.browse(cr, uid, ids): for inv in self.browse(cr, uid, ids):
if not inv.journal_id.sequence_id: if not inv.journal_id.sequence_id:
raise osv.except_osv(_('Error !'), _('Please define sequence on invoice journal')) raise osv.except_osv(_('Error !'), _('Please define sequence on the journal related to this invoice.'))
if not inv.invoice_line: if not inv.invoice_line:
raise osv.except_osv(_('No Invoice Lines !'), _('Please create some invoice lines.')) raise osv.except_osv(_('No Invoice Lines !'), _('Please create some invoice lines.'))
if inv.move_id: if inv.move_id:
@ -832,7 +832,7 @@ class account_invoice(osv.osv):
total_percent += line.value_amount total_percent += line.value_amount
total_fixed = (total_fixed * 100) / (inv.amount_total or 1.0) total_fixed = (total_fixed * 100) / (inv.amount_total or 1.0)
if (total_fixed + total_percent) > 100: if (total_fixed + total_percent) > 100:
raise osv.except_osv(_('Error !'), _("Can not create the invoice !\nThe related payment term is probably misconfigured as it gives a computed amount greater than the total invoiced amount.")) raise osv.except_osv(_('Error !'), _("Can not create the invoice !\nThe related payment term is probably misconfigured as it gives a computed amount greater than the total invoiced amount. The latest line of your payment term must be of type 'balance' to avoid rounding issues."))
# one move line per tax line # one move line per tax line
iml += ait_obj.move_line_get(cr, uid, inv.id) iml += ait_obj.move_line_get(cr, uid, inv.id)

View File

@ -94,7 +94,7 @@ class account_move_line(osv.osv):
if initial_bal and not context.get('periods', False) and not where_move_lines_by_date: if initial_bal and not context.get('periods', False) and not where_move_lines_by_date:
#we didn't pass any filter in the context, and the initial balance can't be computed using only the fiscalyear otherwise entries will be summed twice #we didn't pass any filter in the context, and the initial balance can't be computed using only the fiscalyear otherwise entries will be summed twice
#so we have to invalidate this query #so we have to invalidate this query
raise osv.except_osv(_('Warning !'),_("You haven't supplied enough argument to compute the initial balance")) raise osv.except_osv(_('Warning !'),_("You haven't supplied enough argument to compute the initial balance, please select a period and journal in the context."))
if context.get('journal_ids', False): if context.get('journal_ids', False):
@ -581,14 +581,14 @@ class account_move_line(osv.osv):
lines = self.browse(cr, uid, ids, context=context) lines = self.browse(cr, uid, ids, context=context)
for l in lines: for l in lines:
if l.account_id.type == 'view': if l.account_id.type == 'view':
raise osv.except_osv(_('Error :'), _('You can not create move line on view account %s %s') % (l.account_id.code, l.account_id.name)) raise osv.except_osv(_('Error :'), _('You can not create journal items on a "view" account %s %s') % (l.account_id.code, l.account_id.name))
return True return True
def _check_no_closed(self, cr, uid, ids, context=None): def _check_no_closed(self, cr, uid, ids, context=None):
lines = self.browse(cr, uid, ids, context=context) lines = self.browse(cr, uid, ids, context=context)
for l in lines: for l in lines:
if l.account_id.type == 'closed': if l.account_id.type == 'closed':
raise osv.except_osv(_('Error :'), _('You can not create move line on closed account %s %s') % (l.account_id.code, l.account_id.name)) raise osv.except_osv(_('Error :'), _('You can not create journal items on a closed account %s %s') % (l.account_id.code, l.account_id.name))
return True return True
def _check_company_id(self, cr, uid, ids, context=None): def _check_company_id(self, cr, uid, ids, context=None):
@ -613,11 +613,11 @@ class account_move_line(osv.osv):
return True return True
_constraints = [ _constraints = [
(_check_no_view, 'You can not create move line on view account.', ['account_id']), (_check_no_view, 'You can not create journal items on an account of type view.', ['account_id']),
(_check_no_closed, 'You can not create move line on closed account.', ['account_id']), (_check_no_closed, 'You can not create journal items on closed account.', ['account_id']),
(_check_company_id, 'Company must be same for its related account and period.', ['company_id']), (_check_company_id, 'Company must be the same for its related account and period.', ['company_id']),
(_check_date, 'The date of your Journal Entry is not in the defined period!', ['date']), (_check_date, 'The date of your Journal Entry is not in the defined period! You should change the date or remove this constraint from the journal.', ['date']),
(_check_currency, 'The selected account of your Journal Entry must receive a value in its secondary currency', ['currency_id']), (_check_currency, 'The selected account of your Journal Entry forces to provide a secondary currency. You should remove the secondary currency on the account or select a multi-currency view on the journal.', ['currency_id']),
] ]
#TODO: ONCHANGE_ACCOUNT_ID: set account_tax_id #TODO: ONCHANGE_ACCOUNT_ID: set account_tax_id
@ -832,7 +832,7 @@ class account_move_line(osv.osv):
raise osv.except_osv(_('Error'), _('Entry is already reconciled')) raise osv.except_osv(_('Error'), _('Entry is already reconciled'))
account = account_obj.browse(cr, uid, account_id, context=context) account = account_obj.browse(cr, uid, account_id, context=context)
if not context.get('fy_closing', False) and not account.reconcile: if not context.get('fy_closing', False) and not account.reconcile:
raise osv.except_osv(_('Error'), _('The account is not defined to be reconciled !')) raise osv.except_osv(_('Error'), _('This account does not allow reconciliation! You should update the account definition to change this.'))
if r[0][1] != None: if r[0][1] != None:
raise osv.except_osv(_('Error'), _('Some entries are already reconciled !')) raise osv.except_osv(_('Error'), _('Some entries are already reconciled !'))
@ -1212,9 +1212,9 @@ class account_move_line(osv.osv):
for line in self.browse(cr, uid, ids, context=context): for line in self.browse(cr, uid, ids, context=context):
err_msg = _('Move name (id): %s (%s)') % (line.move_id.name, str(line.move_id.id)) err_msg = _('Move name (id): %s (%s)') % (line.move_id.name, str(line.move_id.id))
if line.move_id.state <> 'draft' and (not line.journal_id.entry_posted): if line.move_id.state <> 'draft' and (not line.journal_id.entry_posted):
raise osv.except_osv(_('Error !'), _('You can not do this modification on a confirmed entry ! Please note that you can just change some non important fields ! \n%s') % err_msg) raise osv.except_osv(_('Error !'), _('You can not do this modification on a confirmed entry! You can just change some non legal fields or you must unconfirm the journal entry first! \n%s') % err_msg)
if line.reconcile_id: if line.reconcile_id:
raise osv.except_osv(_('Error !'), _('You can not do this modification on a reconciled entry ! Please note that you can just change some non important fields ! \n%s') % err_msg) raise osv.except_osv(_('Error !'), _('You can not do this modification on a reconciled entry! You can just change some non legal fields or you must unreconcile first!\n%s') % err_msg)
t = (line.journal_id.id, line.period_id.id) t = (line.journal_id.id, line.period_id.id)
if t not in done: if t not in done:
self._update_journal_check(cr, uid, line.journal_id.id, line.period_id.id, context) self._update_journal_check(cr, uid, line.journal_id.id, line.period_id.id, context)
@ -1247,7 +1247,7 @@ class account_move_line(osv.osv):
if 'period_id' not in context or not isinstance(context.get('period_id', ''), (int, long)): if 'period_id' not in context or not isinstance(context.get('period_id', ''), (int, long)):
period_candidate_ids = self.pool.get('account.period').name_search(cr, uid, name=context.get('period_id','')) period_candidate_ids = self.pool.get('account.period').name_search(cr, uid, name=context.get('period_id',''))
if len(period_candidate_ids) != 1: if len(period_candidate_ids) != 1:
raise osv.except_osv(_('Encoding error'), _('No period found or period given is ambigous.')) raise osv.except_osv(_('Encoding error'), _('No period found or more than one period found for the given date.'))
context['period_id'] = period_candidate_ids[0][0] context['period_id'] = period_candidate_ids[0][0]
if not context.get('journal_id', False) and context.get('search_default_journal_id', False): if not context.get('journal_id', False) and context.get('search_default_journal_id', False):
context['journal_id'] = context.get('search_default_journal_id') context['journal_id'] = context.get('search_default_journal_id')

View File

@ -116,7 +116,7 @@ class crossovered_budget_lines(osv.osv):
for line in self.browse(cr, uid, ids, context=context): for line in self.browse(cr, uid, ids, context=context):
acc_ids = [x.id for x in line.general_budget_id.account_ids] acc_ids = [x.id for x in line.general_budget_id.account_ids]
if not acc_ids: if not acc_ids:
raise osv.except_osv(_('Error!'),_("The General Budget '%s' has no Accounts!") % str(line.general_budget_id.name)) raise osv.except_osv(_('Error!'),_("The Budget '%s' has no accounts!") % str(line.general_budget_id.name))
date_to = line.date_to date_to = line.date_to
date_from = line.date_from date_from = line.date_from
if context.has_key('wizard_date_from'): if context.has_key('wizard_date_from'):

View File

@ -441,7 +441,7 @@ class account_voucher(osv.osv):
tr_type = 'purchase' tr_type = 'purchase'
else: else:
if not journal.default_credit_account_id or not journal.default_debit_account_id: if not journal.default_credit_account_id or not journal.default_debit_account_id:
raise osv.except_osv(_('Error !'), _('Please define default credit/debit account on the %s !') % (journal.name)) raise osv.except_osv(_('Error !'), _('Please define default credit/debit accounts on the journal "%s" !') % (journal.name))
account_id = journal.default_credit_account_id.id or journal.default_debit_account_id.id account_id = journal.default_credit_account_id.id or journal.default_debit_account_id.id
tr_type = 'receipt' tr_type = 'receipt'

View File

@ -427,7 +427,7 @@ class crm_case(crm_base):
if case.section_id.parent_id.user_id: if case.section_id.parent_id.user_id:
data['user_id'] = case.section_id.parent_id.user_id.id data['user_id'] = case.section_id.parent_id.user_id.id
else: else:
raise osv.except_osv(_('Error !'), _('You can not escalate, You are already at the top level regarding your sales-team category.')) raise osv.except_osv(_('Error !'), _('You can not escalate, you are already at the top level regarding your sales-team category.'))
self.write(cr, uid, [case.id], data) self.write(cr, uid, [case.id], data)
cases = self.browse(cr, uid, ids) cases = self.browse(cr, uid, ids)
self.message_append(cr, uid, cases, _('Escalate')) self.message_append(cr, uid, cases, _('Escalate'))

View File

@ -194,7 +194,7 @@ class crm_lead(crm_case, osv.osv):
# Only used for type opportunity # Only used for type opportunity
'partner_address_id': fields.many2one('res.partner.address', 'Partner Contact', domain="[('partner_id','=',partner_id)]"), 'partner_address_id': fields.many2one('res.partner.address', 'Partner Contact', domain="[('partner_id','=',partner_id)]"),
'probability': fields.float('Probability (%)',group_operator="avg"), 'probability': fields.float('Probability (%)',group_operator="avg"),
'planned_revenue': fields.float('Expected Revenue'), 'planned_revenue': fields.float('Expected Revenue'),
'ref': fields.reference('Reference', selection=crm._links_get, size=128), 'ref': fields.reference('Reference', selection=crm._links_get, size=128),
@ -352,7 +352,7 @@ class crm_lead(crm_case, osv.osv):
""" """
return self.set_priority(cr, uid, ids, '3') return self.set_priority(cr, uid, ids, '3')
def _merge_data(self, cr, uid, ids, oldest, fields, context=None): def _merge_data(self, cr, uid, ids, oldest, fields, context=None):
# prepare opportunity data into dictionary for merging # prepare opportunity data into dictionary for merging
opportunities = self.browse(cr, uid, ids, context=context) opportunities = self.browse(cr, uid, ids, context=context)
@ -367,7 +367,7 @@ class crm_lead(crm_case, osv.osv):
def _get_first_not_null_id(attr): def _get_first_not_null_id(attr):
res = _get_first_not_null(attr) res = _get_first_not_null(attr)
return res and res.id or False return res and res.id or False
def _concat_all(attr): def _concat_all(attr):
return ', '.join(filter(lambda x: x, [getattr(opportunity, attr) or '' for opportunity in opportunities if hasattr(opportunity, attr)])) return ', '.join(filter(lambda x: x, [getattr(opportunity, attr) or '' for opportunity in opportunities if hasattr(opportunity, attr)]))
@ -378,7 +378,7 @@ class crm_lead(crm_case, osv.osv):
continue continue
field = field_info.column field = field_info.column
if field._type in ('many2many', 'one2many'): if field._type in ('many2many', 'one2many'):
continue continue
elif field._type == 'many2one': elif field._type == 'many2one':
data[field_name] = _get_first_not_null_id(field_name) # !! data[field_name] = _get_first_not_null_id(field_name) # !!
elif field._type == 'text': elif field._type == 'text':
@ -437,18 +437,18 @@ class crm_lead(crm_case, osv.osv):
subject.append(opportunity.name) subject.append(opportunity.name)
title = "%s : %s" % (merge_message, opportunity.name) title = "%s : %s" % (merge_message, opportunity.name)
details.append(self._mail_body_text(cr, uid, opportunity, fields, title=title, context=context)) details.append(self._mail_body_text(cr, uid, opportunity, fields, title=title, context=context))
subject = subject[0] + ", ".join(subject[1:]) subject = subject[0] + ", ".join(subject[1:])
details = "\n\n".join(details) details = "\n\n".join(details)
return self.message_append(cr, uid, [opportunity_id], subject, body_text=details, context=context) return self.message_append(cr, uid, [opportunity_id], subject, body_text=details, context=context)
def _merge_opportunity_history(self, cr, uid, opportunity_id, opportunities, context=None): def _merge_opportunity_history(self, cr, uid, opportunity_id, opportunities, context=None):
message = self.pool.get('mail.message') message = self.pool.get('mail.message')
for opportunity in opportunities: for opportunity in opportunities:
for history in opportunity.message_ids: for history in opportunity.message_ids:
message.write(cr, uid, history.id, { message.write(cr, uid, history.id, {
'res_id': opportunity_id, 'res_id': opportunity_id,
'subject' : _("From %s : %s") % (opportunity.name, history.subject) 'subject' : _("From %s : %s") % (opportunity.name, history.subject)
}, context=context) }, context=context)
return True return True
@ -474,8 +474,8 @@ class crm_lead(crm_case, osv.osv):
) )
attachment.write(values) attachment.write(values)
count+=1 count+=1
return True return True
def merge_opportunity(self, cr, uid, ids, context=None): def merge_opportunity(self, cr, uid, ids, context=None):
""" """
@ -483,12 +483,12 @@ class crm_lead(crm_case, osv.osv):
:param ids: list of opportunities ids to merge :param ids: list of opportunities ids to merge
""" """
if context is None: context = {} if context is None: context = {}
#TOCHECK: where pass lead_ids in context? #TOCHECK: where pass lead_ids in context?
lead_ids = context and context.get('lead_ids', []) or [] lead_ids = context and context.get('lead_ids', []) or []
if len(ids) <= 1: if len(ids) <= 1:
raise osv.except_osv(_('Warning !'),_('Please select more than one opportunities.')) raise osv.except_osv(_('Warning !'),_('Please select more than one opportunity from the list view.'))
ctx_opportunities = self.browse(cr, uid, lead_ids, context=context) ctx_opportunities = self.browse(cr, uid, lead_ids, context=context)
opportunities = self.browse(cr, uid, ids, context=context) opportunities = self.browse(cr, uid, ids, context=context)
@ -501,11 +501,11 @@ class crm_lead(crm_case, osv.osv):
first_opportunity = opportunities_list[0] first_opportunity = opportunities_list[0]
tail_opportunities = opportunities_list[1:] tail_opportunities = opportunities_list[1:]
fields = ['partner_id', 'title', 'name', 'categ_id', 'channel_id', 'city', 'company_id', 'contact_name', 'country_id', fields = ['partner_id', 'title', 'name', 'categ_id', 'channel_id', 'city', 'company_id', 'contact_name', 'country_id',
'partner_address_id', 'type_id', 'user_id', 'section_id', 'state_id', 'description', 'email', 'fax', 'mobile', 'partner_address_id', 'type_id', 'user_id', 'section_id', 'state_id', 'description', 'email', 'fax', 'mobile',
'partner_name', 'phone', 'probability', 'planned_revenue', 'street', 'street2', 'zip', 'create_date', 'date_action_last', 'partner_name', 'phone', 'probability', 'planned_revenue', 'street', 'street2', 'zip', 'create_date', 'date_action_last',
'date_action_next', 'email_from', 'email_cc', 'partner_name'] 'date_action_next', 'email_from', 'email_cc', 'partner_name']
data = self._merge_data(cr, uid, ids, oldest, fields, context=context) data = self._merge_data(cr, uid, ids, oldest, fields, context=context)
# merge data into first opportunity # merge data into first opportunity
@ -513,8 +513,8 @@ class crm_lead(crm_case, osv.osv):
#copy message and attachements into the first opportunity #copy message and attachements into the first opportunity
self._merge_opportunity_history(cr, uid, first_opportunity.id, tail_opportunities, context=context) self._merge_opportunity_history(cr, uid, first_opportunity.id, tail_opportunities, context=context)
self._merge_opportunity_attachments(cr, uid, first_opportunity.id, tail_opportunities, context=context) self._merge_opportunity_attachments(cr, uid, first_opportunity.id, tail_opportunities, context=context)
#Notification about loss of information #Notification about loss of information
self._merge_notification(cr, uid, first_opportunity, opportunities, context=context) self._merge_notification(cr, uid, first_opportunity, opportunities, context=context)
#delete tail opportunities #delete tail opportunities
@ -534,7 +534,7 @@ class crm_lead(crm_case, osv.osv):
if section_id: if section_id:
stage_ids = crm_stage.search(cr, uid, [('sequence','>=',1), ('section_ids','=', section_id)]) stage_ids = crm_stage.search(cr, uid, [('sequence','>=',1), ('section_ids','=', section_id)])
else: else:
stage_ids = crm_stage.search(cr, uid, [('sequence','>=',1)]) stage_ids = crm_stage.search(cr, uid, [('sequence','>=',1)])
stage_id = stage_ids and stage_ids[0] or False stage_id = stage_ids and stage_ids[0] or False
return { return {
'planned_revenue': lead.planned_revenue, 'planned_revenue': lead.planned_revenue,
@ -565,10 +565,10 @@ class crm_lead(crm_case, osv.osv):
continue continue
if user_ids or section_id: if user_ids or section_id:
self.allocate_salesman(cr, uid, [lead.id], user_ids, section_id, context=context) self.allocate_salesman(cr, uid, [lead.id], user_ids, section_id, context=context)
vals = self._convert_opportunity_data(cr, uid, lead, customer, section_id, context=context) vals = self._convert_opportunity_data(cr, uid, lead, customer, section_id, context=context)
self.write(cr, uid, [lead.id], vals, context=context) self.write(cr, uid, [lead.id], vals, context=context)
self._convert_opportunity_notification(cr, uid, lead, context=context) self._convert_opportunity_notification(cr, uid, lead, context=context)
#TOCHECK: why need to change partner details in all messages of lead ? #TOCHECK: why need to change partner details in all messages of lead ?
if lead.partner_id: if lead.partner_id:
@ -596,7 +596,7 @@ class crm_lead(crm_case, osv.osv):
res_partner.write(cr, uid, partner_id, {'section_id': lead.section_id.id or False}) res_partner.write(cr, uid, partner_id, {'section_id': lead.section_id.id or False})
contact_id = res_partner.address_get(cr, uid, [partner_id])['default'] contact_id = res_partner.address_get(cr, uid, [partner_id])['default']
res = lead.write({'partner_id' : partner_id, 'partner_address_id': contact_id}, context=context) res = lead.write({'partner_id' : partner_id, 'partner_address_id': contact_id}, context=context)
return res return res
def _lead_create_partner_address(self, cr, uid, lead, partner_id, context=None): def _lead_create_partner_address(self, cr, uid, lead, partner_id, context=None):
@ -628,7 +628,7 @@ class crm_lead(crm_case, osv.osv):
context = {} context = {}
partner_ids = {} partner_ids = {}
for lead in self.browse(cr, uid, ids, context=context): for lead in self.browse(cr, uid, ids, context=context):
if action == 'create': if action == 'create':
if not partner_id: if not partner_id:
partner_id = self._lead_create_partner(cr, uid, lead, context=context) partner_id = self._lead_create_partner(cr, uid, lead, context=context)
self._lead_create_partner_address(cr, uid, lead, partner_id, context=context) self._lead_create_partner_address(cr, uid, lead, partner_id, context=context)
@ -641,12 +641,12 @@ class crm_lead(crm_case, osv.osv):
Send mail to salesman with updated Lead details. Send mail to salesman with updated Lead details.
@ lead: browse record of 'crm.lead' object. @ lead: browse record of 'crm.lead' object.
""" """
#TOFIX: mail template should be used here instead of fix subject, body text. #TOFIX: mail template should be used here instead of fix subject, body text.
message = self.pool.get('mail.message') message = self.pool.get('mail.message')
email_to = lead.user_id and lead.user_id.user_email email_to = lead.user_id and lead.user_id.user_email
if not email_to: if not email_to:
return False return False
email_from = lead.section_id and lead.section_id.user_id and lead.section_id.user_id.user_email or email_to email_from = lead.section_id and lead.section_id.user_id and lead.section_id.user_id.user_email or email_to
partner = lead.partner_id and lead.partner_id.name or lead.partner_name partner = lead.partner_id and lead.partner_id.name or lead.partner_name
subject = "lead %s converted into opportunity" % lead.name subject = "lead %s converted into opportunity" % lead.name
@ -697,7 +697,7 @@ class crm_lead(crm_case, osv.osv):
'partner_mobile' : lead.partner_address_id and lead.partner_address_id.mobile or False, 'partner_mobile' : lead.partner_address_id and lead.partner_address_id.mobile or False,
'priority': lead.priority, 'priority': lead.priority,
} }
new_id = phonecall.create(cr, uid, vals, context=context) new_id = phonecall.create(cr, uid, vals, context=context)
phonecall.case_open(cr, uid, [new_id]) phonecall.case_open(cr, uid, [new_id])
if action == 'log': if action == 'log':
@ -799,10 +799,10 @@ class crm_lead(crm_case, osv.osv):
context.update({ context.update({
'default_opportunity_id': opp.id, 'default_opportunity_id': opp.id,
'default_partner_id': opp.partner_id and opp.partner_id.id or False, 'default_partner_id': opp.partner_id and opp.partner_id.id or False,
'default_user_id': uid, 'default_user_id': uid,
'default_section_id': opp.section_id and opp.section_id.id or False, 'default_section_id': opp.section_id and opp.section_id.id or False,
'default_email_from': opp.email_from, 'default_email_from': opp.email_from,
'default_state': 'open', 'default_state': 'open',
'default_name': opp.name 'default_name': opp.name
}) })
value = { value = {

View File

@ -212,7 +212,7 @@ class delivery_grid(osv.osv):
ok = True ok = True
break break
if not ok: if not ok:
raise osv.except_osv(_('No price available !'), _('No line matched this order in the choosed delivery grids !')) raise osv.except_osv(_('No price available!'), _('No line matched this product or order in the choosed delivery grid.'))
return price return price

View File

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
############################################################################## ##############################################################################
# #
# OpenERP, Open Source Management Solution # OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
# #
@ -15,7 +15,7 @@
# GNU Affero General Public License for more details. # GNU Affero General Public License for more details.
# #
# You should have received a copy of the GNU Affero General Public License # You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>. # along with this program. If not, see <http://www.gnu.org/licenses/>.
# #
############################################################################## ##############################################################################
import logging import logging
@ -29,20 +29,20 @@ class NhException(Exception):
class indexer(object): class indexer(object):
""" An indexer knows how to parse the content of some file. """ An indexer knows how to parse the content of some file.
Typically, one indexer should be instantiated per file Typically, one indexer should be instantiated per file
type. type.
Override this class to add more functionality. Note that Override this class to add more functionality. Note that
you should only override the Content or the File methods you should only override the Content or the File methods
that give an optimal result. """ that give an optimal result. """
def _getMimeTypes(self): def _getMimeTypes(self):
""" Return supported mimetypes """ """ Return supported mimetypes """
return [] return []
def _getExtensions(self): def _getExtensions(self):
return [] return []
def _getDefMime(self, ext): def _getDefMime(self, ext):
""" Return a mimetype for this document type, ideally the """ Return a mimetype for this document type, ideally the
closest to the extension ext. """ closest to the extension ext. """
@ -63,23 +63,23 @@ class indexer(object):
return self._doIndexContent(content) return self._doIndexContent(content)
except NhException: except NhException:
pass pass
if realfile != None: if realfile != None:
try: try:
return self._doIndexFile(realfile) return self._doIndexFile(realfile)
except NhException: except NhException:
pass pass
fp = open(realfile,'rb') fp = open(realfile,'rb')
try: try:
content2 = fp.read() content2 = fp.read()
finally: finally:
fp.close() fp.close()
# The not-handled exception may be raised here # The not-handled exception may be raised here
return self._doIndexContent(content2) return self._doIndexContent(content2)
# last try, with a tmp file # last try, with a tmp file
if content: if content:
try: try:
@ -94,7 +94,7 @@ class indexer(object):
pass pass
raise NhException('No appropriate method to index file') raise NhException('No appropriate method to index file')
def _doIndexContent(self,content): def _doIndexContent(self,content):
raise NhException("Content not handled here") raise NhException("Content not handled here")
@ -103,7 +103,7 @@ class indexer(object):
def __repr__(self): def __repr__(self):
return "<indexer %s.%s>" %(self.__module__, self.__class__.__name__) return "<indexer %s.%s>" %(self.__module__, self.__class__.__name__)
def mime_match(mime, mdict): def mime_match(mime, mdict):
if mdict.has_key(mime): if mdict.has_key(mime):
@ -112,7 +112,7 @@ def mime_match(mime, mdict):
mpat = mime.split('/')[0]+'/*' mpat = mime.split('/')[0]+'/*'
if mdict.has_key(mpat): if mdict.has_key(mpat):
return (mime, mdict[mpat]) return (mime, mdict[mpat])
return (None, None) return (None, None)
class contentIndex(object): class contentIndex(object):
@ -120,22 +120,22 @@ class contentIndex(object):
def __init__(self): def __init__(self):
self.mimes = {} self.mimes = {}
self.exts = {} self.exts = {}
def register(self, obj): def register(self, obj):
f = False f = False
for mime in obj._getMimeTypes(): for mime in obj._getMimeTypes():
self.mimes[mime] = obj self.mimes[mime] = obj
f = True f = True
for ext in obj._getExtensions(): for ext in obj._getExtensions():
self.exts[ext] = obj self.exts[ext] = obj
f = True f = True
if f: if f:
self.__logger.debug('Register content indexer: %r', obj) self.__logger.debug('Register content indexer: %r', obj)
if not f: if not f:
raise Exception("Your indexer should at least suport a mimetype or extension") raise Exception("Your indexer should at least suport a mimetype or extension")
def doIndex(self, content, filename=None, content_type=None, realfname = None, debug=False): def doIndex(self, content, filename=None, content_type=None, realfname = None, debug=False):
fobj = None fobj = None
fname = None fname = None
@ -148,10 +148,10 @@ class contentIndex(object):
if self.exts.has_key(ext): if self.exts.has_key(ext):
fobj = self.exts[ext] fobj = self.exts[ext]
mime = fobj._getDefMime(ext) mime = fobj._getDefMime(ext)
if content_type and not fobj: if content_type and not fobj:
mime,fobj = mime_match(content_type, self.mimes) mime,fobj = mime_match(content_type, self.mimes)
if not fobj: if not fobj:
try: try:
if realfname : if realfname :
@ -164,10 +164,10 @@ class contentIndex(object):
fd, fname = tempfile.mkstemp(suffix=ext) fd, fname = tempfile.mkstemp(suffix=ext)
os.write(fd, content) os.write(fd, content)
os.close(fd) os.close(fd)
pop = Popen(['file','-b','--mime',fname], shell=False, stdout=PIPE) pop = Popen(['file','-b','--mime',fname], shell=False, stdout=PIPE)
(result, _) = pop.communicate() (result, _) = pop.communicate()
mime2 = result.split(';')[0] mime2 = result.split(';')[0]
self.__logger.debug('File gave us: %s', mime2) self.__logger.debug('File gave us: %s', mime2)
# Note that the temporary file still exists now. # Note that the temporary file still exists now.
@ -176,7 +176,7 @@ class contentIndex(object):
mime = mime2 mime = mime2
except Exception: except Exception:
self.__logger.exception('Cannot determine mime type') self.__logger.exception('Cannot determine mime type')
try: try:
if fobj: if fobj:
res = (mime, fobj.indexContent(content,filename,fname or realfname) ) res = (mime, fobj.indexContent(content,filename,fname or realfname) )
@ -187,14 +187,14 @@ class contentIndex(object):
self.__logger.exception("Could not index file %s (%s)", self.__logger.exception("Could not index file %s (%s)",
filename, fname or realfname) filename, fname or realfname)
res = None res = None
# If we created a tmp file, unlink it now # If we created a tmp file, unlink it now
if not realfname and fname: if not realfname and fname:
try: try:
os.unlink(fname) os.unlink(fname)
except Exception: except Exception:
self.__logger.exception("Could not unlink %s", fname) self.__logger.exception("Could not unlink %s", fname)
return res return res
cntIndex = contentIndex() cntIndex = contentIndex()

View File

@ -214,7 +214,7 @@ class email_template(osv.osv):
ir_values_obj = self.pool.get('ir.values') ir_values_obj = self.pool.get('ir.values')
ir_values_obj.unlink(cr, uid, template.ref_ir_value.id, context) ir_values_obj.unlink(cr, uid, template.ref_ir_value.id, context)
except: except:
raise osv.except_osv(_("Warning"), _("Deletion of Record failed")) raise osv.except_osv(_("Warning"), _("Deletion of the action record failed."))
return True return True
def unlink(self, cr, uid, ids, context=None): def unlink(self, cr, uid, ids, context=None):

View File

@ -433,9 +433,8 @@ class hr_applicant(crm.crm_case, osv.osv):
}) })
self.case_close(cr, uid, [applicant.id], *args) self.case_close(cr, uid, [applicant.id], *args)
else: else:
raise osv.except_osv(_('Warning!'),_('You must define Applied Job for Applicant !')) raise osv.except_osv(_('Warning!'),_('You must define Applied Job for this applicant.'))
action_model, action_id = model_data.get_object_reference(cr, uid, 'hr', 'open_view_employee_list') action_model, action_id = model_data.get_object_reference(cr, uid, 'hr', 'open_view_employee_list')
dict_act_window = act_window.read(cr, uid, action_id, []) dict_act_window = act_window.read(cr, uid, action_id, [])
if emp_id: if emp_id:

View File

@ -271,7 +271,7 @@ class idea_idea(osv.osv):
if vals.get('my_vote', False): if vals.get('my_vote', False):
if vals.get('state', state) != 'open': if vals.get('state', state) != 'open':
raise osv.except_osv(_("Warning !"), _("Draft/Accepted/Cancelled ideas Could not be voted")) raise osv.except_osv(_("Warning !"), _("You can not vote on a Draft/Accepted/Cancelled ideas."))
res = super(idea_idea, self).write(cr, user, ids, vals, context=context) res = super(idea_idea, self).write(cr, user, ids, vals, context=context)
return res return res

View File

@ -42,12 +42,12 @@ class import_framework(Thread):
for advanced purpose get_default_hook can also be extended for advanced purpose get_default_hook can also be extended
@see dummy import for a minimal exemple @see dummy import for a minimal exemple
""" """
""" """
for import_object, this domain will avoid to find an already existing object for import_object, this domain will avoid to find an already existing object
""" """
DO_NOT_FIND_DOMAIN = [('id', '=', 0)] DO_NOT_FIND_DOMAIN = [('id', '=', 0)]
#TODO don't use context to pass credential parameters #TODO don't use context to pass credential parameters
def __init__(self, obj, cr, uid, instance_name, module_name, email_to_notify=False, context=None): def __init__(self, obj, cr, uid, instance_name, module_name, email_to_notify=False, context=None):
Thread.__init__(self) Thread.__init__(self)
@ -64,34 +64,34 @@ class import_framework(Thread):
self.initialize() self.initialize()
""" """
Abstract Method to be implemented in Abstract Method to be implemented in
the real instance the real instance
""" """
def initialize(self): def initialize(self):
""" """
init before import init before import
usually for the login usually for the login
""" """
pass pass
def init_run(self): def init_run(self):
""" """
call after intialize run in the thread, not in the main process call after intialize run in the thread, not in the main process
TO use for long initialization operation TO use for long initialization operation
""" """
pass pass
def get_data(self, table): def get_data(self, table):
""" """
@return: a list of dictionaries @return: a list of dictionaries
each dictionnaries contains the list of pair external_field_name : value each dictionnaries contains the list of pair external_field_name : value
""" """
return [{}] return [{}]
def get_link(self, from_table, ids, to_table): def get_link(self, from_table, ids, to_table):
""" """
@return: a dictionaries that contains the association between the id (from_table) @return: a dictionaries that contains the association between the id (from_table)
and the list (to table) of id linked and the list (to table) of id linked
""" """
return {} return {}
@ -103,14 +103,14 @@ class import_framework(Thread):
of self.external_id_field of self.external_id_field
""" """
return data[self.external_id_field] return data[self.external_id_field]
def get_mapping(self): def get_mapping(self):
""" """
@return: { TABLE_NAME : { @return: { TABLE_NAME : {
'model' : 'openerp.model.name', 'model' : 'openerp.model.name',
#if true import the table if not just resolve dependencies, use for meta package, by default => True #if true import the table if not just resolve dependencies, use for meta package, by default => True
#Not required #Not required
'import' : True or False, 'import' : True or False,
#Not required #Not required
'dependencies' : [TABLE_1, TABLE_2], 'dependencies' : [TABLE_1, TABLE_2],
#Not required #Not required
@ -127,40 +127,40 @@ class import_framework(Thread):
'field' : call(method, val('external_field') interface of method is self, val where val is the value of the field 'field' : call(method, val('external_field') interface of method is self, val where val is the value of the field
'field' : const(value) #always set this field to value 'field' : const(value) #always set this field to value
+ any custom mapper that you will define + any custom mapper that you will define
} }
}, },
} }
""" """
return {} return {}
def default_hook(self, val): def default_hook(self, val):
""" """
this hook will be apply on each table that don't have hook this hook will be apply on each table that don't have hook
here we define the identity hook here we define the identity hook
""" """
return val return val
def _import_table(self, table): def _import_table(self, table):
data = self.get_data(table) data = self.get_data(table)
map = self.get_mapping()[table]['map'] map = self.get_mapping()[table]['map']
hook = self.get_mapping()[table].get('hook', self.default_hook) hook = self.get_mapping()[table].get('hook', self.default_hook)
model = self.get_mapping()[table]['model'] model = self.get_mapping()[table]['model']
final_data = [] final_data = []
for val in data: for val in data:
res = hook(val) res = hook(val)
if res: if res:
final_data.append(res) final_data.append(res)
return self._save_data(model, dict(map), final_data, table) return self._save_data(model, dict(map), final_data, table)
def _save_data(self, model, mapping, datas, table): def _save_data(self, model, mapping, datas, table):
""" """
@param model: the model of the object to import @param model: the model of the object to import
@param table : the external table where the data come from @param table : the external table where the data come from
@param mapping : definition of the mapping @param mapping : definition of the mapping
@see: get_mapping @see: get_mapping
@param datas : list of dictionnaries @param datas : list of dictionnaries
datas = [data_1, data_2, ..] datas = [data_1, data_2, ..]
data_i is a map external field_name => value data_i is a map external field_name => value
and each data_i have a external id => in data_id['id'] and each data_i have a external id => in data_id['id']
@ -170,21 +170,21 @@ class import_framework(Thread):
return (0, 'No data found') return (0, 'No data found')
mapping['id'] = 'id_new' mapping['id'] = 'id_new'
res = [] res = []
self_dependencies = [] self_dependencies = []
for k in mapping.keys(): for k in mapping.keys():
if '_parent' in k: if '_parent' in k:
self_dependencies.append((k[:-7], mapping.pop(k))) self_dependencies.append((k[:-7], mapping.pop(k)))
for data in datas: for data in datas:
for k, field_name in self_dependencies: for k, field_name in self_dependencies:
data[k] = data.get(field_name) and self._generate_xml_id(data.get(field_name), table) data[k] = data.get(field_name) and self._generate_xml_id(data.get(field_name), table)
data['id_new'] = self._generate_xml_id(self.get_external_id(data), table) data['id_new'] = self._generate_xml_id(self.get_external_id(data), table)
fields, values = self._fields_mapp(data, mapping, table) fields, values = self._fields_mapp(data, mapping, table)
res.append(values) res.append(values)
model_obj = self.obj.pool.get(model) model_obj = self.obj.pool.get(model)
if not model_obj: if not model_obj:
raise ValueError(_("%s is not a valid model name") % model) raise ValueError(_("%s is not a valid model name") % model)
@ -193,13 +193,13 @@ class import_framework(Thread):
for (field, field_name) in self_dependencies: for (field, field_name) in self_dependencies:
self._import_self_dependencies(model_obj, field, datas) self._import_self_dependencies(model_obj, field, datas)
return (len(res), warning) return (len(res), warning)
def _import_self_dependencies(self, obj, parent_field, datas): def _import_self_dependencies(self, obj, parent_field, datas):
""" """
@param parent_field: the name of the field that generate a self_dependencies, we call the object referenced in this @param parent_field: the name of the field that generate a self_dependencies, we call the object referenced in this
field the parent of the object field the parent of the object
@param datas: a list of dictionnaries @param datas: a list of dictionnaries
Dictionnaries need to contains Dictionnaries need to contains
id_new : the xml_id of the object id_new : the xml_id of the object
field_new : the xml_id of the parent field_new : the xml_id of the parent
""" """
@ -207,14 +207,14 @@ class import_framework(Thread):
for data in datas: for data in datas:
if data.get(parent_field): if data.get(parent_field):
values = [data['id_new'], data[parent_field]] values = [data['id_new'], data[parent_field]]
obj.import_data(self.cr, self.uid, fields, [values], mode='update', current_module=self.module_name, noupdate=True, context=self.context) obj.import_data(self.cr, self.uid, fields, [values], mode='update', current_module=self.module_name, noupdate=True, context=self.context)
def _preprocess_mapping(self, mapping): def _preprocess_mapping(self, mapping):
""" """
Preprocess the mapping : Preprocess the mapping :
after the preprocces, everything is after the preprocces, everything is
callable in the val of the dictionary callable in the val of the dictionary
use to allow syntaxical sugar like 'field': 'external_field' use to allow syntaxical sugar like 'field': 'external_field'
instead of 'field' : value('external_field') instead of 'field' : value('external_field')
""" """
@ -223,14 +223,14 @@ class import_framework(Thread):
if isinstance(value, basestring): if isinstance(value, basestring):
map[key] = mapper.value(value) map[key] = mapper.value(value)
#set parent for instance of dbmapper #set parent for instance of dbmapper
elif isinstance(value, mapper.dbmapper): elif isinstance(value, mapper.dbmapper):
value.set_parent(self) value.set_parent(self)
return map return map
def _fields_mapp(self,dict_sugar, openerp_dict, table): def _fields_mapp(self,dict_sugar, openerp_dict, table):
""" """
call all the mapper and transform data call all the mapper and transform data
to be compatible with import_data to be compatible with import_data
""" """
fields=[] fields=[]
@ -242,7 +242,7 @@ class import_framework(Thread):
value = val(dict(dict_sugar)) value = val(dict(dict_sugar))
data_lst.append(value) data_lst.append(value)
return fields, data_lst return fields, data_lst
def _generate_xml_id(self, name, table): def _generate_xml_id(self, name, table):
""" """
@param name: name of the object, has to be unique in for a given table @param name: name of the object, has to be unique in for a given table
@ -253,8 +253,8 @@ class import_framework(Thread):
sugar_instance = self.instance_name sugar_instance = self.instance_name
name = name.replace('.', '_').replace(',', '_') name = name.replace('.', '_').replace(',', '_')
return sugar_instance + "_" + table + "_" + name return sugar_instance + "_" + table + "_" + name
""" """
Public interface of the framework Public interface of the framework
those function can be use in the callable function defined in the mapping those function can be use in the callable function defined in the mapping
@ -268,11 +268,11 @@ class import_framework(Thread):
""" """
if not external_id: if not external_id:
return False return False
xml_id = self._generate_xml_id(external_id, table) xml_id = self._generate_xml_id(external_id, table)
id = self.obj.pool.get('ir.model.data').search(self.cr, self.uid, [('name', '=', xml_id), ('module', '=', self.module_name)]) id = self.obj.pool.get('ir.model.data').search(self.cr, self.uid, [('name', '=', xml_id), ('module', '=', self.module_name)])
return id and xml_id or False return id and xml_id or False
def name_exist(self, table, name, model): def name_exist(self, table, name, model):
""" """
Check if the object with the name exist in the openerp database Check if the object with the name exist in the openerp database
@ -282,21 +282,21 @@ class import_framework(Thread):
fields = ['name'] fields = ['name']
data = [name] data = [name]
return self.import_object(fields, data, model, table, name, [('name', '=', name)]) return self.import_object(fields, data, model, table, name, [('name', '=', name)])
def get_mapped_id(self, table, external_id, context=None): def get_mapped_id(self, table, external_id, context=None):
""" """
@return return the databse id linked with the external_id @return return the databse id linked with the external_id
""" """
if not external_id: if not external_id:
return False return False
xml_id = self._generate_xml_id(external_id, table) xml_id = self._generate_xml_id(external_id, table)
return self.obj.pool.get('ir.model.data').get_object_reference(self.cr, self.uid, self.module_name, xml_id)[1] return self.obj.pool.get('ir.model.data').get_object_reference(self.cr, self.uid, self.module_name, xml_id)[1]
def import_object_mapping(self, mapping, data, model, table, name, domain_search=False): def import_object_mapping(self, mapping, data, model, table, name, domain_search=False):
""" """
same as import_objects but instead of two list fields and data, same as import_objects but instead of two list fields and data,
this method take a dictionnaries : external_field : value this method take a dictionnaries : external_field : value
and the mapping similar to the one define in 'map' key and the mapping similar to the one define in 'map' key
@see import_object, get_mapping @see import_object, get_mapping
""" """
@ -308,7 +308,7 @@ class import_framework(Thread):
This method will import an object in the openerp, usefull for field that is only a char in sugar and is an object in openerp This method will import an object in the openerp, usefull for field that is only a char in sugar and is an object in openerp
use import_data that will take care to create/update or do nothing with the data use import_data that will take care to create/update or do nothing with the data
this method return the xml_id this method return the xml_id
To be use, when you want to create an object or link if already exist To be use, when you want to create an object or link if already exist
use DO_NOT_LINK_DOMAIN to create always a new object use DO_NOT_LINK_DOMAIN to create always a new object
@param fields: list of fields needed to create the object without id @param fields: list of fields needed to create the object without id
@ -318,21 +318,21 @@ class import_framework(Thread):
@param table: the table where data come from in sugarcrm, no need to fit the real name of openerp name, just need to be unique @param table: the table where data come from in sugarcrm, no need to fit the real name of openerp name, just need to be unique
@param unique_name: the name of the object that we want to create/update get the id @param unique_name: the name of the object that we want to create/update get the id
@param domain_search : the domain that should find the unique existing record @param domain_search : the domain that should find the unique existing record
@return: the xml_id of the ressources @return: the xml_id of the ressources
""" """
domain_search = not domain_search and [('name', 'ilike', name)] or domain_search domain_search = not domain_search and [('name', 'ilike', name)] or domain_search
obj = self.obj.pool.get(model) obj = self.obj.pool.get(model)
if not obj: #if the model doesn't exist if not obj: #if the model doesn't exist
return False return False
xml_id = self._generate_xml_id(name, table) xml_id = self._generate_xml_id(name, table)
xml_ref = self.mapped_id_if_exist(model, domain_search, table, name) xml_ref = self.mapped_id_if_exist(model, domain_search, table, name)
fields.append('id') fields.append('id')
data.append(xml_id) data.append(xml_id)
obj.import_data(self.cr, self.uid, fields, [data], mode='update', current_module=self.module_name, noupdate=True, context=self.context) obj.import_data(self.cr, self.uid, fields, [data], mode='update', current_module=self.module_name, noupdate=True, context=self.context)
return xml_ref or xml_id return xml_ref or xml_id
def mapped_id_if_exist(self, model, domain, table, name): def mapped_id_if_exist(self, model, domain, table, name):
""" """
@ -354,8 +354,8 @@ class import_framework(Thread):
noupdate=True, res_id=ids[0], context=self.context) noupdate=True, res_id=ids[0], context=self.context)
return xml_id return xml_id
return False return False
def set_table_list(self, table_list): def set_table_list(self, table_list):
""" """
Set the list of table to import, this method should be call before run Set the list of table to import, this method should be call before run
@ -363,13 +363,13 @@ class import_framework(Thread):
['Leads', 'Opportunity'] ['Leads', 'Opportunity']
""" """
self.table_list = table_list self.table_list = table_list
def run(self): def run(self):
""" """
Import all data into openerp, Import all data into openerp,
this is the Entry point to launch the process of import this is the Entry point to launch the process of import
""" """
self.data_started = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") self.data_started = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
self.cr = pooler.get_db(self.cr.dbname).cursor() self.cr = pooler.get_db(self.cr.dbname).cursor()
@ -388,21 +388,21 @@ class import_framework(Thread):
result.append((table, position, warning)) result.append((table, position, warning))
imported.add(table) imported.add(table)
self.cr.commit() self.cr.commit()
except Exception, err: except Exception, err:
sh = StringIO.StringIO() sh = StringIO.StringIO()
traceback.print_exc(file=sh) traceback.print_exc(file=sh)
error = sh.getvalue() error = sh.getvalue()
print error print error
self.date_ended = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") self.date_ended = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
self._send_notification_email(result, error) self._send_notification_email(result, error)
self.cr.close() self.cr.close()
def _resolve_dependencies(self, dep, imported): def _resolve_dependencies(self, dep, imported):
""" """
import dependencies recursively import dependencies recursively
and avoid to import twice the same table and avoid to import twice the same table
""" """
@ -418,13 +418,13 @@ class import_framework(Thread):
result.append((dependency, position, warning)) result.append((dependency, position, warning))
imported.add(dependency) imported.add(dependency)
return result return result
def _send_notification_email(self, result, error): def _send_notification_email(self, result, error):
if not self.email: if not self.email:
return False return False
email_obj = self.obj.pool.get('mail.message') email_obj = self.obj.pool.get('mail.message')
email_id = email_obj.create(self.cr, self.uid, { email_id = email_obj.create(self.cr, self.uid, {
'email_from' : 'import@module.openerp', 'email_from' : 'import@module.openerp',
'email_to' : self.email, 'email_to' : self.email,
'body_text' : self.get_email_body(result, error), 'body_text' : self.get_email_body(result, error),
'subject' : self.get_email_subject(result, error), 'subject' : self.get_email_subject(result, error),
@ -434,34 +434,34 @@ class import_framework(Thread):
self.logger.error(_("Import failed due to an unexpected error")) self.logger.error(_("Import failed due to an unexpected error"))
else: else:
self.logger.info(_("Import finished, notification email sended")) self.logger.info(_("Import finished, notification email sended"))
def get_email_subject(self, result, error=False): def get_email_subject(self, result, error=False):
""" """
This method define the subject of the email send by openerp at the end of import This method define the subject of the email send by openerp at the end of import
@param result: a list of tuple @param result: a list of tuple
(table_name, number_of_record_created/updated, warning) for each table (table_name, number_of_record_created/updated, warning) for each table
@return the subject of the mail @return the subject of the mail
""" """
if error: if error:
return _("Data Import failed at %s due to an unexpected error") % self.date_ended return _("Data Import failed at %s due to an unexpected error") % self.date_ended
return _("Import of your data finished at %s") % self.date_ended return _("Import of your data finished at %s") % self.date_ended
def get_email_body(self, result, error=False): def get_email_body(self, result, error=False):
""" """
This method define the body of the email send by openerp at the end of import. The body is separated in two part This method define the body of the email send by openerp at the end of import. The body is separated in two part
the header (@see get_body_header), and the generate body with the list of table and number of record imported. the header (@see get_body_header), and the generate body with the list of table and number of record imported.
If you want to keep this it's better to overwrite get_body_header If you want to keep this it's better to overwrite get_body_header
@param result: a list of tuple @param result: a list of tuple
(table_name, number_of_record_created/updated, warning) for each table (table_name, number_of_record_created/updated, warning) for each table
@return the subject of the mail @return the subject of the mail
""" """
body = _("started at %s and finished at %s \n") % (self.data_started, self.date_ended) body = _("started at %s and finished at %s \n") % (self.data_started, self.date_ended)
if error: if error:
body += _("but failed, in consequence no data were imported to keep database consistency \n error : \n") + error body += _("but failed, in consequence no data were imported to keep database consistency \n error : \n") + error
for (table, nb, warning) in result: for (table, nb, warning) in result:
if not warning: if not warning:
warning = _("with no warning") warning = _("with no warning")
@ -469,13 +469,13 @@ class import_framework(Thread):
warning = _("with warning : %s") % warning warning = _("with warning : %s") % warning
body += _("%s has been successfully imported from %s %s, %s \n") % (nb, self.instance_name, table, warning) body += _("%s has been successfully imported from %s %s, %s \n") % (nb, self.instance_name, table, warning)
return self.get_body_header(result) + "\n\n" + body return self.get_body_header(result) + "\n\n" + body
def get_body_header(self, result): def get_body_header(self, result):
""" """
@return the first sentences written in the mail's body @return the first sentences written in the mail's body
""" """
return _("The import of data \n instance name : %s \n") % self.instance_name return _("The import of data \n instance name : %s \n") % self.instance_name
#TODO documentation test #TODO documentation test
def run_test(self): def run_test(self):
@ -489,13 +489,13 @@ class import_framework(Thread):
self.get_data = back_get_data self.get_data = back_get_data
self.get_link = back_get_link self.get_link = back_get_link
self.initialize = back_init self.initialize = back_init
def get_data_test(self, table): def get_data_test(self, table):
return [{}] return [{}]
def get_link_test(self, from_table, ids, to_table): def get_link_test(self, from_table, ids, to_table):
return {} return {}
def intialize_test(self): def intialize_test(self):
pass pass

View File

@ -133,7 +133,7 @@ Normal - the campaign runs normally and automatically sends all emails and repor
campaign = self.browse(cr, uid, ids[0]) campaign = self.browse(cr, uid, ids[0])
if not campaign.activity_ids: if not campaign.activity_ids:
raise osv.except_osv(_("Error"), _("The campaign cannot be started: there are no activities in it")) raise osv.except_osv(_("Error"), _("The campaign cannot be started: there are no activities in it."))
has_start = False has_start = False
has_signal_without_from = False has_signal_without_from = False
@ -145,7 +145,7 @@ Normal - the campaign runs normally and automatically sends all emails and repor
has_signal_without_from = True has_signal_without_from = True
if not has_start and not has_signal_without_from: if not has_start and not has_signal_without_from:
raise osv.except_osv(_("Error"), _("The campaign cannot be started: it doesn't have any starting activity (or any activity with a signal and no previous activity)")) raise osv.except_osv(_("Error"), _("The campaign cannot be started: it doesn't have any starting activity. Modify campaign's activities to mark one as the starting point."))
return self.write(cr, uid, ids, {'state': 'running'}) return self.write(cr, uid, ids, {'state': 'running'})
@ -155,7 +155,7 @@ Normal - the campaign runs normally and automatically sends all emails and repor
[('campaign_id', 'in', ids), [('campaign_id', 'in', ids),
('state', '=', 'running')]) ('state', '=', 'running')])
if segment_ids : if segment_ids :
raise osv.except_osv(_("Error"), _("The campaign cannot be marked as done before all segments are done")) raise osv.except_osv(_("Error"), _("The campaign cannot be marked as done before all segments are closed."))
self.write(cr, uid, ids, {'state': 'done'}) self.write(cr, uid, ids, {'state': 'done'})
return True return True
@ -211,7 +211,7 @@ Normal - the campaign runs normally and automatically sends all emails and repor
# prevent duplication until the server properly duplicates several levels of nested o2m # prevent duplication until the server properly duplicates several levels of nested o2m
def copy(self, cr, uid, id, default=None, context=None): def copy(self, cr, uid, id, default=None, context=None):
raise osv.except_osv(_("Operation not supported"), _("Sorry, campaign duplication is not supported at the moment.")) raise osv.except_osv(_("Operation not supported"), _("You can not duplicate a campaign, it's not supported yet."))
def _find_duplicate_workitems(self, cr, uid, record, campaign_rec, context=None): def _find_duplicate_workitems(self, cr, uid, record, campaign_rec, context=None):
"""Finds possible duplicates workitems for a record in this campaign, based on a uniqueness """Finds possible duplicates workitems for a record in this campaign, based on a uniqueness

View File

@ -600,7 +600,7 @@ class mrp_production(osv.osv):
self.write(cr, uid, [production.id], {'bom_id': bom_id, 'routing_id': routing_id}) self.write(cr, uid, [production.id], {'bom_id': bom_id, 'routing_id': routing_id})
if not bom_id: if not bom_id:
raise osv.except_osv(_('Error'), _("Couldn't find bill of material for product")) raise osv.except_osv(_('Error'), _("Couldn't find a bill of material for this product."))
factor = uom_obj._compute_qty(cr, uid, production.product_uom.id, production.product_qty, bom_point.product_uom.id) factor = uom_obj._compute_qty(cr, uid, production.product_uom.id, production.product_qty, bom_point.product_uom.id)
res = bom_obj._bom_explode(cr, uid, bom_point, factor / bom_point.product_qty, properties, routing_id=production.routing_id.id) res = bom_obj._bom_explode(cr, uid, bom_point, factor / bom_point.product_qty, properties, routing_id=production.routing_id.id)
results = res[0] results = res[0]

View File

@ -278,7 +278,7 @@ class pos_order(osv.osv):
('user_id', '=', uid), ('user_id', '=', uid),
('state', '=', 'open')], context=context) ('state', '=', 'open')], context=context)
if len(statement_id) == 0: if len(statement_id) == 0:
raise osv.except_osv(_('Error !'), _('You have to open at least one cashbox')) raise osv.except_osv(_('Error !'), _('You have to open at least one cashbox.'))
if statement_id: if statement_id:
statement_id = statement_id[0] statement_id = statement_id[0]
args['statement_id'] = statement_id args['statement_id'] = statement_id

View File

@ -135,7 +135,7 @@ class procurement_order(osv.osv):
unlink_ids.append(s['id']) unlink_ids.append(s['id'])
else: else:
raise osv.except_osv(_('Invalid action !'), raise osv.except_osv(_('Invalid action !'),
_('Cannot delete Procurement Order(s) which are in %s State!') % \ _('Cannot delete Procurement Order(s) which are in %s state!') % \
s['state']) s['state'])
return osv.osv.unlink(self, cr, uid, unlink_ids, context=context) return osv.osv.unlink(self, cr, uid, unlink_ids, context=context)
@ -325,7 +325,7 @@ class procurement_order(osv.osv):
for procurement in self.browse(cr, uid, ids, context=context): for procurement in self.browse(cr, uid, ids, context=context):
if procurement.product_qty <= 0.00: if procurement.product_qty <= 0.00:
raise osv.except_osv(_('Data Insufficient !'), raise osv.except_osv(_('Data Insufficient !'),
_('Please check the Quantity in Procurement Order(s), it should not be less than 1!')) _('Please check the quantity in procurement order(s), it should not be 0 or less!'))
if procurement.product_id.type in ('product', 'consu'): if procurement.product_id.type in ('product', 'consu'):
if not procurement.move_id: if not procurement.move_id:
source = procurement.location_id.id source = procurement.location_id.id

View File

@ -142,7 +142,7 @@ class account_invoice_line(osv.osv):
if not price_unit and partner_id: if not price_unit and partner_id:
pricelist =partner_obj.browse(cr, uid, partner_id).property_product_pricelist_purchase.id pricelist =partner_obj.browse(cr, uid, partner_id).property_product_pricelist_purchase.id
if not pricelist: if not pricelist:
raise osv.except_osv(_('No Purchase Pricelist Found !'),_("You must first define a pricelist for Supplier !")) raise osv.except_osv(_('No Purchase Pricelist Found!'),_("You must first define a pricelist on the supplier form!"))
price_unit_res = pricelist_obj.price_get(cr, uid, [pricelist], product.id, qty or 1.0, partner_id, {'uom': uom}) price_unit_res = pricelist_obj.price_get(cr, uid, [pricelist], product.id, qty or 1.0, partner_id, {'uom': uom})
price_unit = price_unit_res[pricelist] price_unit = price_unit_res[pricelist]
real_price = get_real_price(price_unit_res, product.id, qty, uom, pricelist) real_price = get_real_price(price_unit_res, product.id, qty, uom, pricelist)
@ -150,7 +150,7 @@ class account_invoice_line(osv.osv):
if partner_id: if partner_id:
pricelist = partner_obj.browse(cr, uid, partner_id).property_product_pricelist.id pricelist = partner_obj.browse(cr, uid, partner_id).property_product_pricelist.id
if not pricelist: if not pricelist:
raise osv.except_osv(_('No Sale Pricelist Found '),_("You must first define a pricelist for Customer !")) raise osv.except_osv(_('No Sale Pricelist Found!'),_("You must first define a pricelist on the customer form!"))
price_unit_res = pricelist_obj.price_get(cr, uid, [pricelist], product.id, qty or 1.0, partner_id, {'uom': uom}) price_unit_res = pricelist_obj.price_get(cr, uid, [pricelist], product.id, qty or 1.0, partner_id, {'uom': uom})
price_unit = price_unit_res[pricelist] price_unit = price_unit_res[pricelist]