[MERGE] exception and warning messages improvement
bzr revid: abo@openerp.com-20120808130608-tnk060lr543nqn2n
This commit is contained in:
commit
54bbe3d78b
|
@ -125,7 +125,7 @@ class account_payment_term_line(osv.osv):
|
|||
return True
|
||||
|
||||
_constraints = [
|
||||
(_check_percent, 'Percentages for Payment Term Line must be between 0 and 1, Example: 0.02 for 2% ', ['value_amount']),
|
||||
(_check_percent, 'Percentages for Payment Term Line must be between 0 and 1, Example: 0.02 for 2%.', ['value_amount']),
|
||||
]
|
||||
|
||||
account_payment_term_line()
|
||||
|
@ -404,12 +404,12 @@ class account_account(osv.osv):
|
|||
journal_obj = self.pool.get('account.journal')
|
||||
jids = journal_obj.search(cr, uid, [('type','=','situation'),('centralisation','=',1),('company_id','=',account.company_id.id)], context=context)
|
||||
if not jids:
|
||||
raise osv.except_osv(_('Error!'),_("You need an Opening journal with centralisation checked to set the initial balance!"))
|
||||
raise osv.except_osv(_('Error!'),_("You need an Opening journal with centralisation checked to set the initial balance."))
|
||||
|
||||
period_obj = self.pool.get('account.period')
|
||||
pids = period_obj.search(cr, uid, [('special','=',True),('company_id','=',account.company_id.id)], context=context)
|
||||
if not pids:
|
||||
raise osv.except_osv(_('Error!'),_("No opening/closing period defined, please create one to set the initial balance!"))
|
||||
raise osv.except_osv(_('Error!'),_("There is no opening/closing period defined, please create one to set the initial balance."))
|
||||
|
||||
move_obj = self.pool.get('account.move.line')
|
||||
move_id = move_obj.search(cr, uid, [
|
||||
|
@ -426,7 +426,7 @@ class account_account(osv.osv):
|
|||
}, context=context)
|
||||
else:
|
||||
if diff<0.0:
|
||||
raise osv.except_osv(_('Error!'),_("Unable to adapt the initial balance (negative value)!"))
|
||||
raise osv.except_osv(_('Error!'),_("Unable to adapt the initial balance (negative value)."))
|
||||
nameinv = (name=='credit' and 'debit') or 'credit'
|
||||
move_id = move_obj.create(cr, uid, {
|
||||
'name': _('Opening Balance'),
|
||||
|
@ -541,9 +541,9 @@ class account_account(osv.osv):
|
|||
return True
|
||||
|
||||
_constraints = [
|
||||
(_check_recursion, 'Error ! You can not create recursive accounts.', ['parent_id']),
|
||||
(_check_type, 'Configuration Error! \nYou can not define children to an account with internal type different of "View"! ', ['type']),
|
||||
(_check_account_type, 'Configuration Error! \nYou can not select an account type with a deferral method different of "Unreconciled" for accounts with internal type "Payable/Receivable"! ', ['user_type','type']),
|
||||
(_check_recursion, 'Error!\nYou cannot create recursive accounts.', ['parent_id']),
|
||||
(_check_type, 'Configuration Error!\nYou cannot define children to an account with internal type different of "View".', ['type']),
|
||||
(_check_account_type, 'Configuration Error!\nYou cannot select an account type with a deferral method different of "Unreconciled" for accounts with internal type "Payable/Receivable".', ['user_type','type']),
|
||||
]
|
||||
_sql_constraints = [
|
||||
('code_company_uniq', 'unique (code,company_id)', 'The code of the account must be unique per company !')
|
||||
|
@ -619,14 +619,14 @@ class account_account(osv.osv):
|
|||
|
||||
if line_obj.search(cr, uid, [('account_id', 'in', account_ids)]):
|
||||
if method == 'write':
|
||||
raise osv.except_osv(_('Error !'), _('You can not desactivate an account that contains some journal items.'))
|
||||
raise osv.except_osv(_('Error!'), _('You cannot deactivate an account that contains journal items.'))
|
||||
elif method == 'unlink':
|
||||
raise osv.except_osv(_('Error !'), _('You can not remove an account containing journal items.'))
|
||||
raise osv.except_osv(_('Error!'), _('You cannot remove an account that contains journal items.'))
|
||||
#Checking whether the account is set as a property to any Partner or not
|
||||
value = 'account.account,' + str(ids[0])
|
||||
partner_prop_acc = self.pool.get('ir.property').search(cr, uid, [('value_reference','=',value)], context=context)
|
||||
if partner_prop_acc:
|
||||
raise osv.except_osv(_('Warning !'), _('You can not remove/desactivate an account which is set on a customer or supplier.'))
|
||||
raise osv.except_osv(_('Warning!'), _('You cannot remove/deactivate an account which is set on a customer or supplier.'))
|
||||
return True
|
||||
|
||||
def _check_allow_type_change(self, cr, uid, ids, new_type, context=None):
|
||||
|
@ -768,7 +768,7 @@ class account_journal(osv.osv):
|
|||
return True
|
||||
|
||||
_constraints = [
|
||||
(_check_currency, 'Configuration error! The currency chosen should be shared by the default accounts too.', ['currency','default_debit_account_id','default_credit_account_id']),
|
||||
(_check_currency, 'Configuration error!\nThe currency chosen should be shared by the default accounts too.', ['currency','default_debit_account_id','default_credit_account_id']),
|
||||
]
|
||||
|
||||
def copy(self, cr, uid, id, default={}, context=None, done_list=[], local=False):
|
||||
|
@ -790,7 +790,7 @@ class account_journal(osv.osv):
|
|||
if 'company_id' in vals and journal.company_id.id != vals['company_id']:
|
||||
move_lines = self.pool.get('account.move.line').search(cr, uid, [('journal_id', 'in', ids)])
|
||||
if move_lines:
|
||||
raise osv.except_osv(_('Warning !'), _('You can not modify the company of this journal as its related record exist in journal items'))
|
||||
raise osv.except_osv(_('Warning!'), _('This journal already contains items, therefore you cannot modify its company field.'))
|
||||
return super(account_journal, self).write(cr, uid, ids, vals, context=context)
|
||||
|
||||
def create_sequence(self, cr, uid, vals, context=None):
|
||||
|
@ -915,7 +915,7 @@ class account_fiscalyear(osv.osv):
|
|||
return True
|
||||
|
||||
_constraints = [
|
||||
(_check_duration, 'Error! The start date of the fiscal year must be before his end date.', ['date_start','date_stop'])
|
||||
(_check_duration, 'Error!\nThe start date of a fiscal year must precede its end date.', ['date_start','date_stop'])
|
||||
]
|
||||
|
||||
def create_period3(self, cr, uid, ids, context=None):
|
||||
|
@ -966,7 +966,7 @@ class account_fiscalyear(osv.osv):
|
|||
ids = self.search(cr, uid, args, context=context)
|
||||
if not ids:
|
||||
if exception:
|
||||
raise osv.except_osv(_('Error !'), _('No fiscal year defined for this date !\nPlease create one from the configuration of the accounting menu.'))
|
||||
raise osv.except_osv(_('Error!'), _('There is no fiscal year defined for this date.\nPlease create one from the configuration of the accounting menu.'))
|
||||
else:
|
||||
return []
|
||||
return ids
|
||||
|
@ -1032,8 +1032,8 @@ class account_period(osv.osv):
|
|||
return True
|
||||
|
||||
_constraints = [
|
||||
(_check_duration, 'Error ! The duration of the Period(s) is/are invalid. ', ['date_stop']),
|
||||
(_check_year_limit, 'Invalid period ! Some periods overlap or the date period is not in the scope of the fiscal year. ', ['date_stop'])
|
||||
(_check_duration, 'Error!\nThe duration of the Period(s) is/are invalid.', ['date_stop']),
|
||||
(_check_year_limit, 'Error!\nThe period is invalid. Either some periods are overlapping or the period\'s dates are not matching the scope of the fiscal year.', ['date_stop'])
|
||||
]
|
||||
|
||||
def next(self, cr, uid, period, step, context=None):
|
||||
|
@ -1055,7 +1055,7 @@ class account_period(osv.osv):
|
|||
args.append(('company_id', '=', company_id))
|
||||
ids = self.search(cr, uid, args, context=context)
|
||||
if not ids:
|
||||
raise osv.except_osv(_('Error !'), _('No period defined for this date: %s !\nPlease create one.')%dt)
|
||||
raise osv.except_osv(_('Error!'), _('There is no period defined for this date: %s.\nPlease create one.')%dt)
|
||||
return ids
|
||||
|
||||
def action_draft(self, cr, uid, ids, *args):
|
||||
|
@ -1080,7 +1080,7 @@ class account_period(osv.osv):
|
|||
if 'company_id' in vals:
|
||||
move_lines = self.pool.get('account.move.line').search(cr, uid, [('period_id', 'in', ids)])
|
||||
if move_lines:
|
||||
raise osv.except_osv(_('Warning !'), _('You can not modify company of this period as some journal items exists.'))
|
||||
raise osv.except_osv(_('Warning!'), _('This journal already contains items for this period, therefore you cannot modify its company field.'))
|
||||
return super(account_period, self).write(cr, uid, ids, vals, context=context)
|
||||
|
||||
def build_ctx_periods(self, cr, uid, period_from_id, period_to_id):
|
||||
|
@ -1093,9 +1093,9 @@ class account_period(osv.osv):
|
|||
period_date_stop = period_to.date_stop
|
||||
company2_id = period_to.company_id.id
|
||||
if company1_id != company2_id:
|
||||
raise osv.except_osv(_('Error'), _('You should have chosen periods that belongs to the same company'))
|
||||
raise osv.except_osv(_('Error!'), _('You should choose the periods that belong to the same company.'))
|
||||
if period_date_start > period_date_stop:
|
||||
raise osv.except_osv(_('Error'), _('Start period should be smaller then End period'))
|
||||
raise osv.except_osv(_('Error!'), _('Start period should precede then end period.'))
|
||||
#for period from = january, we want to exclude the opening period (but it has same date_from, so we have to check if period_from is special or not to include that clause or not in the search).
|
||||
if period_from.special:
|
||||
return self.search(cr, uid, [('date_start', '>=', period_date_start), ('date_stop', '<=', period_date_stop), ('company_id', '=', company1_id)])
|
||||
|
@ -1134,7 +1134,7 @@ class account_journal_period(osv.osv):
|
|||
cr.execute('select * from account_move_line where journal_id=%s and period_id=%s limit 1', (obj.journal_id.id, obj.period_id.id))
|
||||
res = cr.fetchall()
|
||||
if res:
|
||||
raise osv.except_osv(_('Error !'), _('You can not modify/delete a journal with entries for this period !'))
|
||||
raise osv.except_osv(_('Error!'), _('You cannot modify/delete a journal with entries for this period.'))
|
||||
return True
|
||||
|
||||
def write(self, cr, uid, ids, vals, context=None):
|
||||
|
@ -1303,7 +1303,7 @@ class account_move(osv.osv):
|
|||
|
||||
_constraints = [
|
||||
(_check_centralisation,
|
||||
'You can not create more than one move per period on centralized journal',
|
||||
'You cannot create more than one move per period on a centralized journal.',
|
||||
['journal_id']),
|
||||
]
|
||||
|
||||
|
@ -1314,7 +1314,7 @@ class account_move(osv.osv):
|
|||
valid_moves = self.validate(cr, uid, ids, context)
|
||||
|
||||
if not valid_moves:
|
||||
raise osv.except_osv(_('Integrity Error !'), _('You can not validate a non-balanced entry !\nMake sure you have configured payment terms properly !\nThe latest payment term line should be of the type "Balance" !'))
|
||||
raise osv.except_osv(_('Error!'), _('You cannot validate a non-balanced entry.\nMake sure you have configured payment terms properly.\nThe latest payment term line should be of the "Balance" type.'))
|
||||
obj_sequence = self.pool.get('ir.sequence')
|
||||
for move in self.browse(cr, uid, valid_moves, context=context):
|
||||
if move.name =='/':
|
||||
|
@ -1328,7 +1328,7 @@ class account_move(osv.osv):
|
|||
c = {'fiscalyear_id': move.period_id.fiscalyear_id.id}
|
||||
new_name = obj_sequence.next_by_id(cr, uid, journal.sequence_id.id, c)
|
||||
else:
|
||||
raise osv.except_osv(_('Error'), _('No sequence defined on the journal !'))
|
||||
raise osv.except_osv(_('Error!'), _('Please define a sequence on the journal.'))
|
||||
|
||||
if new_name:
|
||||
self.write(cr, uid, [move.id], {'name':new_name})
|
||||
|
@ -1352,13 +1352,13 @@ class account_move(osv.osv):
|
|||
top_common = top_account
|
||||
elif top_account.id != top_common.id:
|
||||
raise osv.except_osv(_('Error!'),
|
||||
_('You cannot validate this journal entry because account "%s" does not belong to chart of accounts "%s"!') % (account.name, top_common.name))
|
||||
_('You cannot validate this journal entry because account "%s" does not belong to chart of accounts "%s".') % (account.name, top_common.name))
|
||||
return self.post(cursor, user, ids, context=context)
|
||||
|
||||
def button_cancel(self, cr, uid, ids, context=None):
|
||||
for line in self.browse(cr, uid, ids, context=context):
|
||||
if not line.journal_id.update_posted:
|
||||
raise osv.except_osv(_('Error !'), _('You can not modify a posted entry of this journal !\nYou should set the journal to allow cancelling entries if you want to do that.'))
|
||||
raise osv.except_osv(_('Error!'), _('You cannot modify a posted entry of this journal.\nFirst you should set the journal to allow cancelling entries.'))
|
||||
if ids:
|
||||
cr.execute('UPDATE account_move '\
|
||||
'SET state=%s '\
|
||||
|
@ -1445,8 +1445,8 @@ class account_move(osv.osv):
|
|||
obj_move_line = self.pool.get('account.move.line')
|
||||
for move in self.browse(cr, uid, ids, context=context):
|
||||
if move['state'] != 'draft':
|
||||
raise osv.except_osv(_('UserError'),
|
||||
_('You can not delete a posted journal entry "%s"!') % \
|
||||
raise osv.except_osv(_('User Error!'),
|
||||
_('You cannot delete a posted journal entry "%s".') % \
|
||||
move['name'])
|
||||
line_ids = map(lambda x: x.id, move.line_id)
|
||||
context['journal_id'] = move.journal_id.id
|
||||
|
@ -1474,16 +1474,16 @@ class account_move(osv.osv):
|
|||
account_id = move.journal_id.default_debit_account_id.id
|
||||
mode2 = 'debit'
|
||||
if not account_id:
|
||||
raise osv.except_osv(_('UserError'),
|
||||
_('There is no default default debit account defined \n' \
|
||||
'on journal "%s"') % move.journal_id.name)
|
||||
raise osv.except_osv(_('User Error!'),
|
||||
_('There is no default debit account defined \n' \
|
||||
'on journal "%s".') % move.journal_id.name)
|
||||
else:
|
||||
account_id = move.journal_id.default_credit_account_id.id
|
||||
mode2 = 'credit'
|
||||
if not account_id:
|
||||
raise osv.except_osv(_('UserError'),
|
||||
_('There is no default default credit account defined \n' \
|
||||
'on journal "%s"') % move.journal_id.name)
|
||||
raise osv.except_osv(_('User Error!'),
|
||||
_('There is no default credit account defined \n' \
|
||||
'on journal "%s".') % move.journal_id.name)
|
||||
|
||||
# find the first line of this move with the current mode
|
||||
# or create it if it doesn't exist
|
||||
|
@ -1577,11 +1577,11 @@ class account_move(osv.osv):
|
|||
if not company_id:
|
||||
company_id = line.account_id.company_id.id
|
||||
if not company_id == line.account_id.company_id.id:
|
||||
raise osv.except_osv(_('Error'), _("Couldn't create move between different companies"))
|
||||
raise osv.except_osv(_('Error!'), _("Cannot create moves for different companies."))
|
||||
|
||||
if line.account_id.currency_id and line.currency_id:
|
||||
if line.account_id.currency_id.id != line.currency_id.id and (line.account_id.currency_id.id != line.account_id.company_id.currency_id.id):
|
||||
raise osv.except_osv(_('Error'), _("""Couldn't create move with currency different from the secondary currency of the account "%s - %s". Clear the secondary currency field of the account definition if you want to accept all currencies.""") % (line.account_id.code, line.account_id.name))
|
||||
raise osv.except_osv(_('Error!'), _("""Cannot create move with currency different from ..""") % (line.account_id.code, line.account_id.name))
|
||||
|
||||
if abs(amount) < 10 ** -4:
|
||||
# If the move is balanced
|
||||
|
@ -1833,7 +1833,7 @@ class account_tax_code(osv.osv):
|
|||
|
||||
_check_recursion = check_cycle
|
||||
_constraints = [
|
||||
(_check_recursion, 'Error ! You can not create recursive accounts.', ['parent_id'])
|
||||
(_check_recursion, 'Error!\nYou cannot create recursive accounts.', ['parent_id'])
|
||||
]
|
||||
_order = 'code'
|
||||
|
||||
|
@ -2118,7 +2118,7 @@ class account_tax(osv.osv):
|
|||
}
|
||||
|
||||
def compute(self, cr, uid, taxes, price_unit, quantity, product=None, partner=None):
|
||||
_logger.warning("Deprecated, use compute_all(...)['taxes'] instead of compute(...) to manage prices with tax included")
|
||||
_logger.warning("Deprecated, use compute_all(...)['taxes'] instead of compute(...) to manage prices with tax included.")
|
||||
return self._compute(cr, uid, taxes, price_unit, quantity, product, partner)
|
||||
|
||||
def _compute(self, cr, uid, taxes, price_unit, quantity, product=None, partner=None, precision=None):
|
||||
|
@ -2521,8 +2521,8 @@ class account_account_template(osv.osv):
|
|||
|
||||
_check_recursion = check_cycle
|
||||
_constraints = [
|
||||
(_check_recursion, 'Error ! You can not create recursive account templates.', ['parent_id']),
|
||||
(_check_type, 'Configuration Error!\nYou can not define children to an account with internal type different of "View"! ', ['type']),
|
||||
(_check_recursion, 'Error!\nYou cannot create recursive account templates.', ['parent_id']),
|
||||
(_check_type, 'Configuration Error!\nYou cannot define children to an account that has internal type other than "View".', ['type']),
|
||||
|
||||
]
|
||||
|
||||
|
@ -2623,7 +2623,7 @@ class account_add_tmpl_wizard(osv.osv_memory):
|
|||
ptids = tmpl_obj.read(cr, uid, [tids[0]['parent_id'][0]], ['code'])
|
||||
res = None
|
||||
if not ptids or not ptids[0]['code']:
|
||||
raise osv.except_osv(_('Error !'), _('I can not locate a parent code for the template account!'))
|
||||
raise osv.except_osv(_('Error!'), _('There is no parent code for the template account.'))
|
||||
res = acc_obj.search(cr, uid, [('code','=',ptids[0]['code'])])
|
||||
return res and res[0] or False
|
||||
|
||||
|
@ -2729,7 +2729,7 @@ class account_tax_code_template(osv.osv):
|
|||
|
||||
_check_recursion = check_cycle
|
||||
_constraints = [
|
||||
(_check_recursion, 'Error ! You can not create recursive Tax Codes.', ['parent_id'])
|
||||
(_check_recursion, 'Error!\nYou cannot create recursive Tax Codes.', ['parent_id'])
|
||||
]
|
||||
_order = 'code,name'
|
||||
account_tax_code_template()
|
||||
|
@ -3386,7 +3386,7 @@ class wizard_multi_charts_accounts(osv.osv_memory):
|
|||
if not ids:
|
||||
break
|
||||
else:
|
||||
raise osv.except_osv(_('Error'), _('Cannot generate an unused journal code.'))
|
||||
raise osv.except_osv(_('Error!'), _('Cannot generate an unused journal code.'))
|
||||
|
||||
vals = {
|
||||
'name': line['acc_name'],
|
||||
|
@ -3464,7 +3464,7 @@ class wizard_multi_charts_accounts(osv.osv_memory):
|
|||
journal_data.append(vals)
|
||||
ref_acc_bank = obj_wizard.chart_template_id.bank_account_view_id
|
||||
if journal_data and not ref_acc_bank.code:
|
||||
raise osv.except_osv(_('Configuration Error !'), _('The bank account defined on the selected chart of accounts hasn\'t a code.'))
|
||||
raise osv.except_osv(_('Configuration Error!'), _('You have to set a code for the bank account defined on the selected chart of accounts.'))
|
||||
|
||||
current_num = 1
|
||||
for line in journal_data:
|
||||
|
|
|
@ -89,7 +89,7 @@ class account_analytic_line(osv.osv):
|
|||
if not a:
|
||||
raise osv.except_osv(_('Error!'),
|
||||
_('There is no expense account defined ' \
|
||||
'for this product: "%s" (id:%d)') % \
|
||||
'for this product: "%s" (id:%d).') % \
|
||||
(prod.name, prod.id,))
|
||||
else:
|
||||
a = prod.product_tmpl_id.property_account_income.id
|
||||
|
@ -98,7 +98,7 @@ class account_analytic_line(osv.osv):
|
|||
if not a:
|
||||
raise osv.except_osv(_('Error!'),
|
||||
_('There is no income account defined ' \
|
||||
'for this product: "%s" (id:%d)') % \
|
||||
'for this product: "%s" (id:%d).') % \
|
||||
(prod.name, prod_id,))
|
||||
|
||||
flag = False
|
||||
|
|
|
@ -469,7 +469,7 @@ class account_bank_statement(osv.osv):
|
|||
if t['state'] in ('draft'):
|
||||
unlink_ids.append(t['id'])
|
||||
else:
|
||||
raise osv.except_osv(_('Invalid action !'), _('In order to delete a bank statement, you must first cancel it to delete related journal items.'))
|
||||
raise osv.except_osv(_('Invalid Action!'), _('In order to delete a bank statement, you must first cancel it to delete related journal items.'))
|
||||
osv.osv.unlink(self, cr, uid, unlink_ids, context=context)
|
||||
return True
|
||||
|
||||
|
|
|
@ -283,7 +283,7 @@ class account_cash_statement(osv.osv):
|
|||
for item_label, item_account in TALBES:
|
||||
if getattr(obj.journal_id, item_account):
|
||||
raise osv.except_osv(_('Error!'),
|
||||
_('There is no %s Account on the Journal %s') % (item_label, obj.journal_id.name,))
|
||||
_('There is no %s Account on the journal %s.') % (item_label, obj.journal_id.name,))
|
||||
|
||||
is_profit = obj.difference < 0.0
|
||||
|
||||
|
|
|
@ -364,15 +364,15 @@ class account_invoice(osv.osv):
|
|||
except Exception, e:
|
||||
if '"journal_id" viol' in e.args[0]:
|
||||
raise orm.except_orm(_('Configuration Error!'),
|
||||
_('There is no Accounting Journal of type Sale/Purchase defined!'))
|
||||
_('There is no Sale/Purchase Journal(s) defined.'))
|
||||
else:
|
||||
raise orm.except_orm(_('Unknown Error'), str(e))
|
||||
raise orm.except_orm(_('Unknown Error!'), str(e))
|
||||
|
||||
def invoice_print(self, cr, uid, ids, context=None):
|
||||
'''
|
||||
This function prints the invoice and mark it as sent, so that we can see more easily the next step of the workflow
|
||||
'''
|
||||
assert len(ids) == 1, 'This option should only be used for a single id at a time'
|
||||
assert len(ids) == 1, 'This option should only be used for a single id at a time.'
|
||||
self.write(cr, uid, ids, {'sent': True}, context=context)
|
||||
datas = {
|
||||
'ids': ids,
|
||||
|
@ -425,7 +425,7 @@ class account_invoice(osv.osv):
|
|||
if t['state'] in ('draft', 'cancel') and t['internal_number']== False:
|
||||
unlink_ids.append(t['id'])
|
||||
else:
|
||||
raise osv.except_osv(_('Invalid action !'), _('You can not delete an invoice which is open or paid. We suggest you to refund it instead.'))
|
||||
raise osv.except_osv(_('Invalid Action!'), _('You cannot delete an invoice which is open or paid. You should refund it instead.'))
|
||||
osv.osv.unlink(self, cr, uid, unlink_ids, context=context)
|
||||
return True
|
||||
|
||||
|
@ -521,7 +521,7 @@ class account_invoice(osv.osv):
|
|||
pterm_list.sort()
|
||||
res = {'value':{'date_due': pterm_list[-1]}}
|
||||
else:
|
||||
raise osv.except_osv(_('Data Insufficient !'), _('The payment term of supplier does not have a payment term line!'))
|
||||
raise osv.except_osv(_('Insufficient Data!'), _('The payment term of supplier does not have a payment term line.'))
|
||||
return res
|
||||
|
||||
def onchange_invoice_line(self, cr, uid, ids, lines):
|
||||
|
@ -554,7 +554,7 @@ class account_invoice(osv.osv):
|
|||
pay_res_id = pay_line_data and pay_line_data[0].get('value_reference',False) and int(pay_line_data[0]['value_reference'].split(',')[1]) or False
|
||||
if not rec_res_id and not pay_res_id:
|
||||
raise osv.except_osv(_('Configuration Error!'),
|
||||
_('Can not find a chart of account, you should create one from the configuration of the accounting menu.'))
|
||||
_('Cannot find a chart of account, you should create one from Settings\Configuration\Accounting menu.'))
|
||||
if type in ('out_invoice', 'out_refund'):
|
||||
acc_id = rec_res_id
|
||||
else:
|
||||
|
@ -569,7 +569,7 @@ class account_invoice(osv.osv):
|
|||
result_id = account_obj.search(cr, uid, [('name','=',line.account_id.name),('company_id','=',company_id)])
|
||||
if not result_id:
|
||||
raise osv.except_osv(_('Configuration Error!'),
|
||||
_('Can not find a chart of account, you should create one from the configuration of the accounting menu.'))
|
||||
_('Cannot find a chart of account, you should create one from Settings\Configuration\Accounting menu.'))
|
||||
inv_line_obj.write(cr, uid, [line.id], {'account_id': result_id[-1]})
|
||||
else:
|
||||
if invoice_line:
|
||||
|
@ -577,7 +577,7 @@ class account_invoice(osv.osv):
|
|||
obj_l = account_obj.browse(cr, uid, inv_line[2]['account_id'])
|
||||
if obj_l.company_id.id != company_id:
|
||||
raise osv.except_osv(_('Configuration Error!'),
|
||||
_('Invoice line account company does not match with invoice company.'))
|
||||
_('Invoice line account\'s company and invoice\'s compnay does not match.'))
|
||||
else:
|
||||
continue
|
||||
if company_id and type:
|
||||
|
@ -598,7 +598,7 @@ class account_invoice(osv.osv):
|
|||
if r[1] == 'journal_id' and r[2] in journal_ids:
|
||||
val['journal_id'] = r[2]
|
||||
if not val.get('journal_id', False):
|
||||
raise osv.except_osv(_('Configuration Error !'), (_('Can\'t find any account journal of %s type for this company.\n\nYou can create one in the menu: \nConfiguration\Financial Accounting\Accounts\Journals.') % (journal_type)))
|
||||
raise osv.except_osv(_('Configuration Error!'), (_('Cannot find any account journal of %s type for this company.\n\nYou can create one in the menu: \nConfiguration\Journals\Journals.') % (journal_type)))
|
||||
dom = {'journal_id': [('id', 'in', journal_ids)]}
|
||||
else:
|
||||
journal_ids = obj_journal.search(cr, uid, [])
|
||||
|
@ -866,7 +866,7 @@ class account_invoice(osv.osv):
|
|||
total_percent += line.value_amount
|
||||
total_fixed = (total_fixed * 100) / (inv.amount_total or 1.0)
|
||||
if (total_fixed + total_percent) > 100:
|
||||
raise osv.except_osv(_('Error !'), _("Can not create the invoice !\nThe related payment term is probably misconfigured as it gives a computed amount greater than the total invoiced amount. The latest line of your payment term must be of type 'balance' to avoid rounding issues."))
|
||||
raise osv.except_osv(_('Error!'), _("Cannot create the invoice.\nThe related payment term is probably misconfigured as it gives a computed amount greater than the total invoiced amount. In order to avoid rounding issues, the latest line of your payment term must be of type 'balance'."))
|
||||
|
||||
# one move line per tax line
|
||||
iml += ait_obj.move_line_get(cr, uid, inv.id)
|
||||
|
@ -947,8 +947,8 @@ class account_invoice(osv.osv):
|
|||
journal_id = inv.journal_id.id
|
||||
journal = journal_obj.browse(cr, uid, journal_id, context=ctx)
|
||||
if journal.centralisation:
|
||||
raise osv.except_osv(_('UserError'),
|
||||
_('You cannot create an invoice on a centralised journal. Uncheck the centralised counterpart box in the related journal from the configuration menu.'))
|
||||
raise osv.except_osv(_('User Error!'),
|
||||
_('You cannot create an invoice on a centralized journal. Uncheck the centralized counterpart box in the related journal from the configuration menu.'))
|
||||
|
||||
line = self.finalize_invoice_move_lines(cr, uid, inv, line)
|
||||
|
||||
|
@ -1061,7 +1061,7 @@ class account_invoice(osv.osv):
|
|||
pay_ids = account_move_line_obj.browse(cr, uid, i['payment_ids'])
|
||||
for move_line in pay_ids:
|
||||
if move_line.reconcile_partial_id and move_line.reconcile_partial_id.line_partial_ids:
|
||||
raise osv.except_osv(_('Error !'), _('You can not cancel an invoice which is partially paid! You need to unreconcile related payment entries first!'))
|
||||
raise osv.except_osv(_('Error!'), _('You cannot cancel an invoice which is partially paid. You need to unreconcile related payment entries first.'))
|
||||
|
||||
# First, set the invoices as cancelled and detach the move ids
|
||||
self.write(cr, uid, ids, {'state':'cancel', 'move_id':False})
|
||||
|
@ -1187,7 +1187,7 @@ class account_invoice(osv.osv):
|
|||
if context is None:
|
||||
context = {}
|
||||
#TODO check if we can use different period for payment and the writeoff line
|
||||
assert len(ids)==1, "Can only pay one invoice at a time"
|
||||
assert len(ids)==1, "Can only pay one invoice at a time."
|
||||
invoice = self.browse(cr, uid, ids[0], context=context)
|
||||
src_account_id = invoice.account_id.id
|
||||
# Take the seq as name for move
|
||||
|
@ -1273,7 +1273,7 @@ class account_invoice(osv.osv):
|
|||
else:
|
||||
code = invoice.currency_id.symbol
|
||||
# TODO: use currency's formatting function
|
||||
msg = _("Invoice '%s' is paid partially: %s%s of %s%s (%s%s remaining)") % \
|
||||
msg = _("Invoice '%s' is paid partially: %s%s of %s%s (%s%s remaining).") % \
|
||||
(name, pay_amount, code, invoice.amount_total, code, total, code)
|
||||
self.message_append_note(cr, uid, [inv_id], body=msg, context=context)
|
||||
self.pool.get('account.move.line').reconcile_partial(cr, uid, line_ids, 'manual', context)
|
||||
|
@ -1475,7 +1475,7 @@ class account_invoice_line(osv.osv):
|
|||
if prod.uom_id.category_id.id != prod_uom.category_id.id:
|
||||
warning = {
|
||||
'title': _('Warning!'),
|
||||
'message': _('You selected an Unit of Measure which is not compatible with the product.')
|
||||
'message': _('The selected unit of measure is not compatible with the unit of measure of the product.')
|
||||
}
|
||||
return {'value': res['value'], 'warning': warning}
|
||||
return res
|
||||
|
|
|
@ -95,7 +95,7 @@ class account_move_line(osv.osv):
|
|||
if initial_bal and not context.get('periods', False) and not where_move_lines_by_date:
|
||||
#we didn't pass any filter in the context, and the initial balance can't be computed using only the fiscalyear otherwise entries will be summed twice
|
||||
#so we have to invalidate this query
|
||||
raise osv.except_osv(_('Warning !'),_("You haven't supplied enough argument to compute the initial balance, please select a period and journal in the context."))
|
||||
raise osv.except_osv(_('Warning!'),_("You have not supplied enough arguments to compute the initial balance, please select a period and a journal in the context."))
|
||||
|
||||
|
||||
if context.get('journal_ids', False):
|
||||
|
@ -577,14 +577,14 @@ class account_move_line(osv.osv):
|
|||
lines = self.browse(cr, uid, ids, context=context)
|
||||
for l in lines:
|
||||
if l.account_id.type == 'view':
|
||||
raise osv.except_osv(_('Error :'), _('You can not create journal items on a "view" account %s %s') % (l.account_id.code, l.account_id.name))
|
||||
raise osv.except_osv(_('Error!'), _('You cannot create journal items on “View” type account %s %s.') % (l.account_id.code, l.account_id.name))
|
||||
return True
|
||||
|
||||
def _check_no_closed(self, cr, uid, ids, context=None):
|
||||
lines = self.browse(cr, uid, ids, context=context)
|
||||
for l in lines:
|
||||
if l.account_id.type == 'closed':
|
||||
raise osv.except_osv(_('Error :'), _('You can not create journal items on a closed account %s %s') % (l.account_id.code, l.account_id.name))
|
||||
raise osv.except_osv(_('Error!'), _('You cannot create journal items on a closed account %s %s.') % (l.account_id.code, l.account_id.name))
|
||||
return True
|
||||
|
||||
def _check_company_id(self, cr, uid, ids, context=None):
|
||||
|
@ -611,7 +611,7 @@ class account_move_line(osv.osv):
|
|||
_constraints = [
|
||||
(_check_no_view, 'You cannot create journal items on an account of type view.', ['account_id']),
|
||||
(_check_no_closed, 'You cannot create journal items on closed account.', ['account_id']),
|
||||
(_check_company_id, 'Company must be the same for its related account and period.', ['company_id']),
|
||||
(_check_company_id, 'Account and Period must belong to the same company.', ['company_id']),
|
||||
(_check_date, 'The date of your Journal Entry is not in the defined period! You should change the date or remove this constraint from the journal.', ['date']),
|
||||
(_check_currency, 'The selected account of your Journal Entry forces to provide a secondary currency. You should remove the secondary currency on the account or select a multi-currency view on the journal.', ['currency_id']),
|
||||
]
|
||||
|
@ -743,7 +743,7 @@ class account_move_line(osv.osv):
|
|||
context = {}
|
||||
for line in self.browse(cr, uid, ids, context=context):
|
||||
if company_list and not line.company_id.id in company_list:
|
||||
raise osv.except_osv(_('Warning !'), _('To reconcile the entries company should be the same for all entries'))
|
||||
raise osv.except_osv(_('Warning!'), _('To reconcile the entries company should be the same for all entries.'))
|
||||
company_list.append(line.company_id.id)
|
||||
|
||||
for line in self.browse(cr, uid, ids, context=context):
|
||||
|
@ -752,7 +752,7 @@ class account_move_line(osv.osv):
|
|||
else:
|
||||
currency_id = line.company_id.currency_id
|
||||
if line.reconcile_id:
|
||||
raise osv.except_osv(_('Warning'), _('Already Reconciled!'))
|
||||
raise osv.except_osv(_('Warning!'), _('Already reconciled.'))
|
||||
if line.reconcile_partial_id:
|
||||
for line2 in line.reconcile_partial_id.line_partial_ids:
|
||||
if not line2.reconcile_id:
|
||||
|
@ -796,11 +796,11 @@ class account_move_line(osv.osv):
|
|||
company_list = []
|
||||
for line in self.browse(cr, uid, ids, context=context):
|
||||
if company_list and not line.company_id.id in company_list:
|
||||
raise osv.except_osv(_('Warning !'), _('To reconcile the entries company should be the same for all entries'))
|
||||
raise osv.except_osv(_('Warning!'), _('To reconcile the entries company should be the same for all entries.'))
|
||||
company_list.append(line.company_id.id)
|
||||
for line in unrec_lines:
|
||||
if line.state <> 'valid':
|
||||
raise osv.except_osv(_('Error'),
|
||||
raise osv.except_osv(_('Error!'),
|
||||
_('Entry "%s" is not valid !') % line.name)
|
||||
credit += line['credit']
|
||||
debit += line['debit']
|
||||
|
@ -823,15 +823,15 @@ class account_move_line(osv.osv):
|
|||
r = cr.fetchall()
|
||||
#TODO: move this check to a constraint in the account_move_reconcile object
|
||||
if not unrec_lines:
|
||||
raise osv.except_osv(_('Error'), _('Entry is already reconciled'))
|
||||
raise osv.except_osv(_('Error!'), _('Entry is already reconciled.'))
|
||||
account = account_obj.browse(cr, uid, account_id, context=context)
|
||||
if r[0][1] != None:
|
||||
raise osv.except_osv(_('Error'), _('Some entries are already reconciled !'))
|
||||
raise osv.except_osv(_('Error!'), _('Some entries are already reconciled.'))
|
||||
|
||||
if (not currency_obj.is_zero(cr, uid, account.company_id.currency_id, writeoff)) or \
|
||||
(account.currency_id and (not currency_obj.is_zero(cr, uid, account.currency_id, currency))):
|
||||
if not writeoff_acc_id:
|
||||
raise osv.except_osv(_('Warning'), _('You have to provide an account for the write off/exchange difference entry !'))
|
||||
raise osv.except_osv(_('Warning!'), _('You have to provide an account for the write off/exchange difference entry.'))
|
||||
if writeoff > 0:
|
||||
debit = writeoff
|
||||
credit = 0.0
|
||||
|
@ -1090,9 +1090,9 @@ class account_move_line(osv.osv):
|
|||
res = cr.fetchone()
|
||||
if res:
|
||||
if res[1] != 'draft':
|
||||
raise osv.except_osv(_('UserError'),
|
||||
raise osv.except_osv(_('User Error!'),
|
||||
_('The account move (%s) for centralisation ' \
|
||||
'has been confirmed!') % res[2])
|
||||
'has been confirmed.') % res[2])
|
||||
return res
|
||||
|
||||
def _remove_move_reconcile(self, cr, uid, move_ids=[], context=None):
|
||||
|
@ -1139,9 +1139,9 @@ class account_move_line(osv.osv):
|
|||
if isinstance(ids, (int, long)):
|
||||
ids = [ids]
|
||||
if vals.get('account_tax_id', False):
|
||||
raise osv.except_osv(_('Unable to change tax !'), _('You can not change the tax, you should remove and recreate lines !'))
|
||||
raise osv.except_osv(_('Unable to change tax!'), _('You cannot change the tax, you should remove and recreate lines.'))
|
||||
if ('account_id' in vals) and not account_obj.read(cr, uid, vals['account_id'], ['active'])['active']:
|
||||
raise osv.except_osv(_('Bad account!'), _('You can not use an inactive account!'))
|
||||
raise osv.except_osv(_('Bad Account!'), _('You cannot use an inactive account.'))
|
||||
if update_check:
|
||||
if ('account_id' in vals) or ('journal_id' in vals) or ('period_id' in vals) or ('move_id' in vals) or ('debit' in vals) or ('credit' in vals) or ('date' in vals):
|
||||
self._update_check(cr, uid, ids, context)
|
||||
|
@ -1202,9 +1202,9 @@ class account_move_line(osv.osv):
|
|||
for line in self.browse(cr, uid, ids, context=context):
|
||||
err_msg = _('Move name (id): %s (%s)') % (line.move_id.name, str(line.move_id.id))
|
||||
if line.move_id.state <> 'draft' and (not line.journal_id.entry_posted):
|
||||
raise osv.except_osv(_('Error !'), _('You can not do this modification on a confirmed entry! You can just change some non legal fields or you must unconfirm the journal entry first! \n%s') % err_msg)
|
||||
raise osv.except_osv(_('Error!'), _('You cannot do this modification on a confirmed entry. You can just change some non legal fields or you must unconfirm the journal entry first.\n%s.') % err_msg)
|
||||
if line.reconcile_id:
|
||||
raise osv.except_osv(_('Error !'), _('You can not do this modification on a reconciled entry! You can just change some non legal fields or you must unreconcile first!\n%s') % err_msg)
|
||||
raise osv.except_osv(_('Error!'), _('You cannot do this modification on a reconciled entry. You can just change some non legal fields or you must unreconcile first.\n%s.') % err_msg)
|
||||
t = (line.journal_id.id, line.period_id.id)
|
||||
if t not in done:
|
||||
self._update_journal_check(cr, uid, line.journal_id.id, line.period_id.id, context)
|
||||
|
@ -1224,7 +1224,7 @@ class account_move_line(osv.osv):
|
|||
if company_id:
|
||||
vals['company_id'] = company_id[0]
|
||||
if ('account_id' in vals) and not account_obj.read(cr, uid, vals['account_id'], ['active'])['active']:
|
||||
raise osv.except_osv(_('Bad account!'), _('You can not use an inactive account!'))
|
||||
raise osv.except_osv(_('Bad Account!'), _('You cannot use an inactive account.'))
|
||||
if 'journal_id' in vals:
|
||||
context['journal_id'] = vals['journal_id']
|
||||
if 'period_id' in vals:
|
||||
|
@ -1237,7 +1237,7 @@ class account_move_line(osv.osv):
|
|||
if 'period_id' not in context or not isinstance(context.get('period_id', ''), (int, long)):
|
||||
period_candidate_ids = self.pool.get('account.period').name_search(cr, uid, name=context.get('period_id',''))
|
||||
if len(period_candidate_ids) != 1:
|
||||
raise osv.except_osv(_('Encoding error'), _('No period found or more than one period found for the given date.'))
|
||||
raise osv.except_osv(_('Error!'), _('No period found or more than one period found for the given date.'))
|
||||
context['period_id'] = period_candidate_ids[0][0]
|
||||
if not context.get('journal_id', False) and context.get('search_default_journal_id', False):
|
||||
context['journal_id'] = context.get('search_default_journal_id')
|
||||
|
@ -1263,7 +1263,7 @@ class account_move_line(osv.osv):
|
|||
move_id = move_obj.create(cr, uid, v, context)
|
||||
vals['move_id'] = move_id
|
||||
else:
|
||||
raise osv.except_osv(_('No piece number !'), _('Can not create an automatic sequence for this piece!\nPut a sequence in the journal definition for automatic numbering or create a sequence manually for this piece.'))
|
||||
raise osv.except_osv(_('No piece number !'), _('Cannot create an automatic sequence for this piece.\nPut a sequence in the journal definition for automatic numbering or create a sequence manually for this piece.'))
|
||||
ok = not (journal.type_control_ids or journal.account_control_ids)
|
||||
if ('account_id' in vals):
|
||||
account = account_obj.browse(cr, uid, vals['account_id'], context=context)
|
||||
|
@ -1288,7 +1288,7 @@ class account_move_line(osv.osv):
|
|||
vals['amount_currency'] = cur_obj.compute(cr, uid, account.company_id.currency_id.id,
|
||||
account.currency_id.id, vals.get('debit', 0.0)-vals.get('credit', 0.0), context=ctx)
|
||||
if not ok:
|
||||
raise osv.except_osv(_('Bad account !'), _('You can not use this general account in this journal, check the tab \'Entry Controls\' on the related journal !'))
|
||||
raise osv.except_osv(_('Bad Account!'), _('You cannot use this general account in this journal, check the tab \'Entry Controls\' on the related journal.'))
|
||||
|
||||
if vals.get('analytic_account_id',False):
|
||||
if journal.analytic_journal_id:
|
||||
|
|
|
@ -209,7 +209,7 @@ msgstr ""
|
|||
#. module: account
|
||||
#: code:addons/account/account_invoice.py:1241
|
||||
#, python-format
|
||||
msgid "Invoice '%s' is paid partially: %s%s of %s%s (%s%s remaining)"
|
||||
msgid "Invoice '%s' is paid partially: %s%s of %s%s (%s%s remaining)."
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
@ -579,7 +579,7 @@ msgstr ""
|
|||
#. module: account
|
||||
#: code:addons/account/account_move_line.py:1251
|
||||
#, python-format
|
||||
msgid "No period found or more than one period found for the given date."
|
||||
msgid "No period is found or more than one period found for the given date."
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
@ -618,7 +618,7 @@ msgstr ""
|
|||
#: code:addons/account/account_move_line.py:750
|
||||
#: code:addons/account/account_move_line.py:803
|
||||
#, python-format
|
||||
msgid "To reconcile the entries company should be the same for all entries"
|
||||
msgid "To reconcile the entries company should be the same for all entries."
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
@ -787,7 +787,7 @@ msgstr ""
|
|||
#. module: account
|
||||
#: code:addons/account/wizard/account_invoice_refund.py:110
|
||||
#, python-format
|
||||
msgid "Can not %s invoice which is already reconciled, invoice should be unreconciled first. You can only Refund this invoice"
|
||||
msgid "Cannot %s invoice which is already reconciled, invoice should be unreconciled first. You can only Refund this invoice."
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
@ -1239,7 +1239,7 @@ msgstr ""
|
|||
#: code:addons/account/wizard/account_financial_report.py:69
|
||||
#: code:addons/account/wizard/account_report_common.py:144
|
||||
#, python-format
|
||||
msgid "Select a starting and an ending period"
|
||||
msgid "Select a starting and an ending period."
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
@ -1564,7 +1564,7 @@ msgstr ""
|
|||
#. module: account
|
||||
#: code:addons/account/account_invoice.py:1429
|
||||
#, python-format
|
||||
msgid "You selected an Unit of Measure which is not compatible with the product."
|
||||
msgid "Selected Unit of Measure is not compatible with the Unit of Measure of the product."
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
@ -1934,8 +1934,8 @@ msgstr ""
|
|||
#. module: account
|
||||
#: code:addons/account/account.py:1461
|
||||
#, python-format
|
||||
msgid "There is no default default debit account defined \n"
|
||||
"on journal \"%s\""
|
||||
msgid "No default debit account is defined \n"
|
||||
"on journal \"%s\"."
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
@ -1959,7 +1959,7 @@ msgstr ""
|
|||
#. module: account
|
||||
#: code:addons/account/account.py:787
|
||||
#, python-format
|
||||
msgid "You can not modify the company of this journal as its related record exist in journal items"
|
||||
msgid "You cannot modify the company of this journal as its related record exist in journal items."
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
@ -2013,7 +2013,7 @@ msgstr ""
|
|||
#. module: account
|
||||
#: code:addons/account/account_invoice.py:370
|
||||
#, python-format
|
||||
msgid "There is no Accounting Journal of type Sale/Purchase defined!"
|
||||
msgid "No Sale/Purchase Journal(s) is defined!"
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
@ -2112,8 +2112,8 @@ msgstr ""
|
|||
#. module: account
|
||||
#: code:addons/account/account.py:1468
|
||||
#, python-format
|
||||
msgid "There is no default default credit account defined \n"
|
||||
"on journal \"%s\""
|
||||
msgid "No default credit account is defined \n"
|
||||
"on journal \"%s\"."
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
@ -2530,7 +2530,7 @@ msgstr ""
|
|||
#. module: account
|
||||
#: code:addons/account/account.py:1321
|
||||
#, python-format
|
||||
msgid "No sequence defined on the journal !"
|
||||
msgid "Please define sequence on the journal !"
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
@ -2538,7 +2538,7 @@ msgstr ""
|
|||
#: code:addons/account/account_invoice.py:688
|
||||
#: code:addons/account/account_move_line.py:173
|
||||
#, python-format
|
||||
msgid "You have to define an analytic journal on the '%s' journal!"
|
||||
msgid "You have to define an analytic journal on the '%s' journal."
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
@ -2915,7 +2915,7 @@ msgstr ""
|
|||
#. module: account
|
||||
#: code:addons/account/wizard/account_open_closed_fiscalyear.py:39
|
||||
#, python-format
|
||||
msgid "No End of year journal defined for the fiscal year"
|
||||
msgid "Please define End of year journal for the fiscal year."
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
@ -3216,7 +3216,7 @@ msgstr ""
|
|||
#. module: account
|
||||
#: code:addons/account/account_move_line.py:584
|
||||
#, python-format
|
||||
msgid "You can not create journal items on a \"view\" account %s %s"
|
||||
msgid "You cannot create journal items on a \"view\" account %s %s."
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
@ -3449,7 +3449,7 @@ msgstr ""
|
|||
#. module: account
|
||||
#: code:addons/account/account_invoice.py:921
|
||||
#, python-format
|
||||
msgid "You cannot create an invoice on a centralised journal. Uncheck the centralised counterpart box in the related journal from the configuration menu."
|
||||
msgid "You cannot create an invoice on a centralized journal. Uncheck the centralized counterpart box in the related journal from the configuration menu."
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
@ -3473,7 +3473,7 @@ msgstr ""
|
|||
#. module: account
|
||||
#: code:addons/account/account_move_line.py:591
|
||||
#, python-format
|
||||
msgid "You can not create journal items on a closed account %s %s"
|
||||
msgid "You cannot create journal items on a closed account %s %s."
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
@ -3737,7 +3737,7 @@ msgstr ""
|
|||
#. module: account
|
||||
#: code:addons/account/account_move_line.py:97
|
||||
#, python-format
|
||||
msgid "You haven't supplied enough argument to compute the initial balance, please select a period and journal in the context."
|
||||
msgid "You havenot supplied enough argument to compute the initial balance, please select a period and journal in the context."
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
@ -3776,7 +3776,7 @@ msgstr ""
|
|||
#: code:addons/account/account_move_line.py:1216
|
||||
#, python-format
|
||||
msgid "You cannot do this modification on a confirmed entry! You can just change some non legal fields or you must unconfirm the journal entry first! \n"
|
||||
"%s"
|
||||
"%s."
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
@ -3830,7 +3830,7 @@ msgstr ""
|
|||
#. module: account
|
||||
#: code:addons/account/account_analytic_line.py:93
|
||||
#, python-format
|
||||
msgid "There is no expense account defined for this product: \"%s\" (id:%d)"
|
||||
msgid "No expense account is defined for this product: \"%s\" (id:%d)."
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
@ -3980,7 +3980,7 @@ msgstr ""
|
|||
#. module: account
|
||||
#: code:addons/account/report/common_report_header.py:92
|
||||
#, python-format
|
||||
msgid "Not implemented"
|
||||
msgid "Not implemented!"
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
@ -4032,7 +4032,7 @@ msgstr ""
|
|||
#. module: account
|
||||
#: code:addons/account/account.py:1077
|
||||
#, python-format
|
||||
msgid "You can not modify company of this period as some journal items exists."
|
||||
msgid "You cannot modify company of this period as some journal items exist."
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
@ -4065,7 +4065,7 @@ msgstr ""
|
|||
#. module: account
|
||||
#: code:addons/account/account_move_line.py:1251
|
||||
#, python-format
|
||||
msgid "Encoding error"
|
||||
msgid "Encoding error!"
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
@ -4178,7 +4178,7 @@ msgstr ""
|
|||
#. module: account
|
||||
#: code:addons/account/account.py:1567
|
||||
#, python-format
|
||||
msgid "Couldn't create move with currency different from the secondary currency of the account \"%s - %s\". Clear the secondary currency field of the account definition if you want to accept all currencies."
|
||||
msgid "Cannot create move with currency different from the secondary currency of the account \"%s - %s\". Clear the secondary currency field of the account definition if you want to accept all currencies."
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
@ -4404,7 +4404,7 @@ msgstr ""
|
|||
#: code:addons/account/wizard/account_report_common.py:144
|
||||
#: code:addons/account/wizard/account_report_common.py:150
|
||||
#, python-format
|
||||
msgid "Error"
|
||||
msgid "Error!"
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
@ -4727,7 +4727,7 @@ msgstr ""
|
|||
#. module: account
|
||||
#: code:addons/account/wizard/account_invoice_refund.py:146
|
||||
#, python-format
|
||||
msgid "No Period found on Invoice!"
|
||||
msgid "No Period is found on Invoice!"
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
@ -4910,7 +4910,7 @@ msgstr ""
|
|||
|
||||
#. module: account
|
||||
#: constraint:account.move:0
|
||||
msgid "You can not create more than one move per period on centralized journal"
|
||||
msgid "You cannot create more than one move per period on centralized journal."
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
@ -5024,7 +5024,7 @@ msgstr ""
|
|||
#. module: account
|
||||
#: code:addons/account/wizard/account_validate_account_move.py:39
|
||||
#, python-format
|
||||
msgid "Specified Journal does not have any account move entries in draft state for this period"
|
||||
msgid "Specified Journal does not have any account move entries in draft state for this period."
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
@ -5118,7 +5118,7 @@ msgstr ""
|
|||
#. module: account
|
||||
#: code:addons/account/account.py:963
|
||||
#, python-format
|
||||
msgid "No fiscal year defined for this date !\n"
|
||||
msgid "No fiscal year is defined for this date !\n"
|
||||
"Please create one from the configuration of the accounting menu."
|
||||
msgstr ""
|
||||
|
||||
|
@ -5475,7 +5475,7 @@ msgstr ""
|
|||
#. module: account
|
||||
#: code:addons/account/wizard/account_report_common.py:150
|
||||
#, python-format
|
||||
msgid "not implemented"
|
||||
msgid "Not implemented!"
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
@ -5640,7 +5640,7 @@ msgstr ""
|
|||
#. module: account
|
||||
#: code:addons/account/account.py:629
|
||||
#, python-format
|
||||
msgid "You can not remove/desactivate an account which is set on a customer or supplier."
|
||||
msgid "You cannot remove/deactivate an account which is set on a customer or supplier."
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
@ -5929,7 +5929,7 @@ msgstr ""
|
|||
|
||||
#. module: account
|
||||
#: constraint:account.payment.term.line:0
|
||||
msgid "Percentages for Payment Term Line must be between 0 and 1, Example: 0.02 for 2% "
|
||||
msgid "Percentages for Payment Term Line must be between 0 and 1, Example: 0.02 for 2% ."
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
@ -5956,7 +5956,7 @@ msgstr ""
|
|||
#. module: account
|
||||
#: code:addons/account/account.py:622
|
||||
#, python-format
|
||||
msgid "You can not desactivate an account that contains some journal items."
|
||||
msgid "You cannot deactivate an account that contains some journal items."
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
@ -5988,7 +5988,7 @@ msgstr ""
|
|||
#. module: account
|
||||
#: code:addons/account/wizard/account_state_open.py:37
|
||||
#, python-format
|
||||
msgid "Invoice is already reconciled"
|
||||
msgid "Invoice is already reconciled."
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
@ -6035,7 +6035,7 @@ msgstr ""
|
|||
#. module: account
|
||||
#: code:addons/account/account.py:1563
|
||||
#, python-format
|
||||
msgid "Couldn't create move between different companies"
|
||||
msgid "Cannot create move between different companies"
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
@ -6183,7 +6183,7 @@ msgstr ""
|
|||
#: code:addons/account/account_move_line.py:584
|
||||
#: code:addons/account/account_move_line.py:591
|
||||
#, python-format
|
||||
msgid "Error :"
|
||||
msgid "Error !"
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
@ -6231,7 +6231,7 @@ msgstr ""
|
|||
#: code:addons/account/account_move_line.py:1218
|
||||
#, python-format
|
||||
msgid "You cannot do this modification on a reconciled entry! You can just change some non legal fields or you must unreconcile first!\n"
|
||||
"%s"
|
||||
"%s."
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
@ -6548,7 +6548,7 @@ msgstr ""
|
|||
#: code:addons/account/account_invoice.py:528
|
||||
#: code:addons/account/account_invoice.py:543
|
||||
#, python-format
|
||||
msgid "Can not find a chart of account, you should create one from the configuration of the accounting menu."
|
||||
msgid "Cannot find a chart of account, you should create one from Settings\Configuration\Accounting menu."
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
@ -6720,7 +6720,7 @@ msgstr ""
|
|||
#: code:addons/account/wizard/account_validate_account_move.py:39
|
||||
#: code:addons/account/wizard/account_validate_account_move.py:61
|
||||
#, python-format
|
||||
msgid "Warning"
|
||||
msgid "Warning !"
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
@ -6797,7 +6797,7 @@ msgstr ""
|
|||
#. module: account
|
||||
#: code:addons/account/wizard/account_fiscalyear_close.py:73
|
||||
#, python-format
|
||||
msgid "The periods to generate opening entries were not found"
|
||||
msgid "The periods to generate opening entries were not found."
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
@ -6843,7 +6843,7 @@ msgstr ""
|
|||
#. module: account
|
||||
#: code:addons/account/wizard/account_fiscalyear_close.py:84
|
||||
#, python-format
|
||||
msgid "The journal must have default credit and debit account"
|
||||
msgid "The journal must have default credit and debit account."
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
@ -7056,7 +7056,7 @@ msgstr ""
|
|||
#: code:addons/account/wizard/account_report_aged_partner_balance.py:56
|
||||
#: code:addons/account/wizard/account_report_aged_partner_balance.py:58
|
||||
#, python-format
|
||||
msgid "UserError"
|
||||
msgid "UserError!"
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
@ -7246,7 +7246,7 @@ msgstr ""
|
|||
#. module: account
|
||||
#: code:addons/account/account.py:3446
|
||||
#, python-format
|
||||
msgid "The bank account defined on the selected chart of accounts hasn't a code."
|
||||
msgid "The bank account defined on the selected chart of accounts hasnot a code."
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
@ -7634,7 +7634,7 @@ msgstr ""
|
|||
#. module: account
|
||||
#: code:addons/account/account.py:1052
|
||||
#, python-format
|
||||
msgid "No period defined for this date: %s !\n"
|
||||
msgid "No period is defined for this date: %s !\n"
|
||||
"Please create one."
|
||||
msgstr ""
|
||||
|
||||
|
@ -8427,7 +8427,7 @@ msgstr ""
|
|||
#. module: account
|
||||
#: code:addons/account/wizard/account_automatic_reconcile.py:152
|
||||
#, python-format
|
||||
msgid "You must select accounts to reconcile"
|
||||
msgid "You must select accounts to reconcile."
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
@ -8519,7 +8519,7 @@ msgstr ""
|
|||
#. module: account
|
||||
#: code:addons/account/wizard/account_fiscalyear_close.py:87
|
||||
#, python-format
|
||||
msgid "The journal must have centralised counterpart without the Skipping draft state option checked!"
|
||||
msgid "The journal must have centralized counterpart without the Skipping draft state option checked!"
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
@ -8561,7 +8561,7 @@ msgstr ""
|
|||
#. module: account
|
||||
#: code:addons/account/account.py:412
|
||||
#, python-format
|
||||
msgid "No opening/closing period defined, please create one to set the initial balance!"
|
||||
msgid "No opening/closing period is defined, please create one to set the initial balance!"
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
@ -8726,7 +8726,7 @@ msgstr ""
|
|||
#. module: account
|
||||
#: code:addons/account/account_invoice.py:372
|
||||
#, python-format
|
||||
msgid "Unknown Error"
|
||||
msgid "Unknown Error!"
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
@ -9055,7 +9055,7 @@ msgstr ""
|
|||
#. module: account
|
||||
#: code:addons/account/account_analytic_line.py:102
|
||||
#, python-format
|
||||
msgid "There is no income account defined for this product: \"%s\" (id:%d)"
|
||||
msgid "No income account is defined for this product: \"%s\" (id:%d)."
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
@ -9327,7 +9327,7 @@ msgstr ""
|
|||
#. module: account
|
||||
#: code:addons/account/account_move_line.py:832
|
||||
#, python-format
|
||||
msgid "Entry is already reconciled"
|
||||
msgid "Entry is already reconciled!"
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
@ -9774,7 +9774,7 @@ msgstr ""
|
|||
#. module: account
|
||||
#: code:addons/account/wizard/account_report_aged_partner_balance.py:56
|
||||
#, python-format
|
||||
msgid "You must enter a period length that cannot be 0 or below !"
|
||||
msgid "You must enter a period length greater than 0 !"
|
||||
msgstr ""
|
||||
|
||||
#. module: account
|
||||
|
|
|
@ -91,7 +91,7 @@ class account_installer(osv.osv_memory):
|
|||
def check_unconfigured_cmp(self, cr, uid, context=None):
|
||||
""" check if there are still unconfigured companies """
|
||||
if not self.get_unconfigured_cmp(cr, uid, context=context):
|
||||
raise osv.except_osv(_('No unconfigured company !'), _("There are currently no company without chart of account. The wizard will therefore not be executed."))
|
||||
raise osv.except_osv(_('No unconfigured company !'), _("There is currently no company without chart of account. The wizard will therefore not be executed."))
|
||||
|
||||
def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False):
|
||||
if context is None:context = {}
|
||||
|
|
|
@ -89,7 +89,7 @@ class common_report_header(object):
|
|||
return ''
|
||||
|
||||
def _get_sortby(self, data):
|
||||
raise (_('Error'), _('Not implemented'))
|
||||
raise (_('Error!'), _('Not implemented.'))
|
||||
|
||||
def _get_filter(self, data):
|
||||
if data.get('form', False) and data['form'].get('filter', False):
|
||||
|
|
|
@ -70,4 +70,4 @@
|
|||
try:
|
||||
self.button_cancel(cr, uid, [ref("account_bank_statement_0")])
|
||||
except Exception, e:
|
||||
assert e[0]=='UserError', 'Another exception has been raised!'
|
||||
assert e[0]=='User Error!', 'Another exception has been raised!'
|
||||
|
|
|
@ -145,7 +145,7 @@ class account_automatic_reconcile(osv.osv_memory):
|
|||
allow_write_off = form.allow_write_off
|
||||
reconciled = unreconciled = 0
|
||||
if not form.account_ids:
|
||||
raise osv.except_osv(_('UserError'), _('You must select accounts to reconcile'))
|
||||
raise osv.except_osv(_('User Error!'), _('You must select accounts to reconcile.'))
|
||||
for account_id in form.account_ids:
|
||||
params = (account_id.id,)
|
||||
if not allow_write_off:
|
||||
|
|
|
@ -35,7 +35,7 @@ class account_change_currency(osv.osv_memory):
|
|||
context = {}
|
||||
if context.get('active_id',False):
|
||||
if obj_inv.browse(cr, uid, context['active_id']).state != 'draft':
|
||||
raise osv.except_osv(_('Error'), _('You can only change currency for Draft Invoice !'))
|
||||
raise osv.except_osv(_('Error!'), _('You can only change currency for Draft Invoice.'))
|
||||
pass
|
||||
|
||||
def change_currency(self, cr, uid, ids, context=None):
|
||||
|
@ -56,18 +56,18 @@ class account_change_currency(osv.osv_memory):
|
|||
if invoice.company_id.currency_id.id == invoice.currency_id.id:
|
||||
new_price = line.price_unit * rate
|
||||
if new_price <= 0:
|
||||
raise osv.except_osv(_('Error'), _('New currency is not configured properly !'))
|
||||
raise osv.except_osv(_('Error!'), _('New currency is not configured properly.'))
|
||||
|
||||
if invoice.company_id.currency_id.id != invoice.currency_id.id and invoice.company_id.currency_id.id == new_currency:
|
||||
old_rate = invoice.currency_id.rate
|
||||
if old_rate <= 0:
|
||||
raise osv.except_osv(_('Error'), _('Current currency is not configured properly !'))
|
||||
raise osv.except_osv(_('Error!'), _('Current currency is not configured properly.'))
|
||||
new_price = line.price_unit / old_rate
|
||||
|
||||
if invoice.company_id.currency_id.id != invoice.currency_id.id and invoice.company_id.currency_id.id != new_currency:
|
||||
old_rate = invoice.currency_id.rate
|
||||
if old_rate <= 0:
|
||||
raise osv.except_osv(_('Error'), _('Current currency is not configured properly !'))
|
||||
raise osv.except_osv(_('Error!'), _('Current currency is not configured properly.'))
|
||||
new_price = (line.price_unit / old_rate ) * rate
|
||||
obj_inv_line.write(cr, uid, [line.id], {'price_unit': new_price})
|
||||
obj_inv.write(cr, uid, [invoice.id], {'currency_id': new_currency}, context=context)
|
||||
|
|
|
@ -67,7 +67,7 @@ class accounting_report(osv.osv_memory):
|
|||
result['date_to'] = data['form']['date_to_cmp']
|
||||
elif data['form']['filter_cmp'] == 'filter_period':
|
||||
if not data['form']['period_from_cmp'] or not data['form']['period_to_cmp']:
|
||||
raise osv.except_osv(_('Error'),_('Select a starting and an ending period'))
|
||||
raise osv.except_osv(_('Error!'),_('Select a starting and an ending period'))
|
||||
result['period_from'] = data['form']['period_from_cmp']
|
||||
result['period_to'] = data['form']['period_to_cmp']
|
||||
return result
|
||||
|
|
|
@ -59,7 +59,7 @@ class account_fiscalyear_close(osv.osv_memory):
|
|||
#check that the reconcilation concern journal entries from only one company
|
||||
cr.execute('select distinct(company_id) from account_move_line where id in %s',(tuple(ids),))
|
||||
if len(cr.fetchall()) > 1:
|
||||
raise osv.except_osv(_('Warning !'), _('The entries to reconcile should belong to the same company'))
|
||||
raise osv.except_osv(_('Warning!'), _('The entries to reconcile should belong to the same company.'))
|
||||
r_id = self.pool.get('account.move.reconcile').create(cr, uid, {'type': 'auto'})
|
||||
cr.execute('update account_move_line set reconcile_id = %s where id in %s',(r_id, tuple(ids),))
|
||||
return r_id
|
||||
|
@ -85,7 +85,7 @@ class account_fiscalyear_close(osv.osv_memory):
|
|||
fy2_period_set = ','.join(map(lambda id: str(id[0]), cr.fetchall()))
|
||||
|
||||
if not fy_period_set or not fy2_period_set:
|
||||
raise osv.except_osv(_('UserError'), _('The periods to generate opening entries were not found'))
|
||||
raise osv.except_osv(_('User Error!'), _('The periods to generate opening entries cannot be found.'))
|
||||
|
||||
period = obj_acc_period.browse(cr, uid, data[0].period_id.id, context=context)
|
||||
new_fyear = obj_acc_fiscalyear.browse(cr, uid, data[0].fy2_id.id, context=context)
|
||||
|
@ -96,11 +96,11 @@ class account_fiscalyear_close(osv.osv_memory):
|
|||
company_id = new_journal.company_id.id
|
||||
|
||||
if not new_journal.default_credit_account_id or not new_journal.default_debit_account_id:
|
||||
raise osv.except_osv(_('UserError'),
|
||||
_('The journal must have default credit and debit account'))
|
||||
raise osv.except_osv(_('User Error!'),
|
||||
_('The journal must have default credit and debit account.'))
|
||||
if (not new_journal.centralisation) or new_journal.entry_posted:
|
||||
raise osv.except_osv(_('UserError'),
|
||||
_('The journal must have centralised counterpart without the Skipping draft state option checked!'))
|
||||
raise osv.except_osv(_('User Error!'),
|
||||
_('The journal must have centralized counterpart without the Skipping draft state option checked.'))
|
||||
|
||||
#delete existing move and move lines if any
|
||||
move_ids = obj_acc_move.search(cr, uid, [
|
||||
|
|
|
@ -108,7 +108,7 @@ class account_invoice_refund(osv.osv_memory):
|
|||
if inv.state in ['draft', 'proforma2', 'cancel']:
|
||||
raise osv.except_osv(_('Error!'), _('Cannot %s draft/proforma/cancel invoice.') % (mode))
|
||||
if inv.reconciled and mode in ('cancel', 'modify'):
|
||||
raise osv.except_osv(_('Error !'), _('Can not %s invoice which is already reconciled, invoice should be unreconciled first. You can only Refund this invoice') % (mode))
|
||||
raise osv.except_osv(_('Error!'), _('Cannot %s invoice which is already reconciled, invoice should be unreconciled first. You can only refund this invoice.') % (mode))
|
||||
if form.period.id:
|
||||
period = form.period.id
|
||||
else:
|
||||
|
@ -143,8 +143,8 @@ class account_invoice_refund(osv.osv_memory):
|
|||
description = inv.name
|
||||
|
||||
if not period:
|
||||
raise osv.except_osv(_('Data Insufficient !'), \
|
||||
_('No Period found on Invoice!'))
|
||||
raise osv.except_osv(_('Insufficient Data!'), \
|
||||
_('No period found on the invoice.'))
|
||||
|
||||
refund_id = inv_obj.refund(cr, uid, [inv.id], date, period, description, journal_id)
|
||||
refund = inv_obj.browse(cr, uid, refund_id[0], context=context)
|
||||
|
|
|
@ -41,7 +41,7 @@ class account_invoice_confirm(osv.osv_memory):
|
|||
|
||||
for record in data_inv:
|
||||
if record['state'] not in ('draft','proforma','proforma2'):
|
||||
raise osv.except_osv(_('Warning'), _("Selected Invoice(s) cannot be confirmed as they are not in 'Draft' or 'Pro-Forma' state!"))
|
||||
raise osv.except_osv(_('Warning!'), _("Selected invoice(s) cannot be confirmed as they are not in 'Draft' or 'Pro-Forma' state."))
|
||||
wf_service.trg_validate(uid, 'account.invoice', record['id'], 'invoice_open', cr)
|
||||
return {'type': 'ir.actions.act_window_close'}
|
||||
|
||||
|
@ -65,7 +65,7 @@ class account_invoice_cancel(osv.osv_memory):
|
|||
|
||||
for record in data_inv:
|
||||
if record['state'] in ('cancel','paid'):
|
||||
raise osv.except_osv(_('Warning'), _("Selected Invoice(s) cannot be cancelled as they are already in 'Cancelled' or 'Done' state!"))
|
||||
raise osv.except_osv(_('Warning!'), _("Selected invoice(s) cannot be cancelled as they are already in 'Cancelled' or 'Done' state."))
|
||||
wf_service.trg_validate(uid, 'account.invoice', record['id'], 'invoice_cancel', cr)
|
||||
return {'type': 'ir.actions.act_window_close'}
|
||||
|
||||
|
|
|
@ -46,7 +46,7 @@ class account_move_bank_reconcile(osv.osv_memory):
|
|||
from account_journal where id=%s', (data['journal_id'],))
|
||||
account_id = cr.fetchone()[0]
|
||||
if not account_id:
|
||||
raise osv.except_osv(_('Error'), _('You have to define \
|
||||
raise osv.except_osv(_('Error!'), _('You have to define \
|
||||
the bank account\nin the journal definition for reconciliation.'))
|
||||
return {
|
||||
'domain': "[('journal_id','=',%d), ('account_id','=',%d), ('state','<>','draft')]" % (data['journal_id'], account_id),
|
||||
|
|
|
@ -60,7 +60,7 @@ class account_move_journal(osv.osv_memory):
|
|||
if context.get('journal_type', False):
|
||||
jids = journal_pool.search(cr, uid, [('type','=', context.get('journal_type'))])
|
||||
if not jids:
|
||||
raise osv.except_osv(_('Configuration Error !'), _('Can\'t find any account journal of %s type for this company.\n\nYou can create one in the menu: \nConfiguration/Financial Accounting/Accounts/Journals.') % context.get('journal_type'))
|
||||
raise osv.except_osv(_('Configuration Error!'), _('Cannot find any account journal of %s type for this company.\n\nYou can create one in the menu: \nConfiguration/Journals/Journals.') % context.get('journal_type'))
|
||||
journal_id = jids[0]
|
||||
|
||||
return journal_id
|
||||
|
@ -158,7 +158,7 @@ class account_move_journal(osv.osv_memory):
|
|||
state = period.state
|
||||
|
||||
if state == 'done':
|
||||
raise osv.except_osv(_('UserError'), _('This period is already closed !'))
|
||||
raise osv.except_osv(_('User Error!'), _('This period is already closed.'))
|
||||
|
||||
company = period.company_id.id
|
||||
res = {
|
||||
|
|
|
@ -36,7 +36,7 @@ class account_open_closed_fiscalyear(osv.osv_memory):
|
|||
data = self.browse(cr, uid, ids, context=context)[0]
|
||||
period_journal = data.fyear_id.end_journal_period_id or False
|
||||
if not period_journal:
|
||||
raise osv.except_osv(_('Error !'), _('No End of year journal defined for the fiscal year'))
|
||||
raise osv.except_osv(_('Error!'), _('You have to set the end of the fiscal year for this journal.'))
|
||||
|
||||
ids_move = move_obj.search(cr, uid, [('journal_id','=',period_journal.journal_id.id),('period_id','=',period_journal.period_id.id)])
|
||||
if ids_move:
|
||||
|
|
|
@ -48,7 +48,7 @@ class account_period_close(osv.osv_memory):
|
|||
for id in context['active_ids']:
|
||||
account_move_ids = account_move_obj.search(cr, uid, [('period_id', '=', id), ('state', '=', "draft")], context=context)
|
||||
if account_move_ids:
|
||||
raise osv.except_osv(_('Invalid action !'), _('In order to close a period, you must first post related journal entries.'))
|
||||
raise osv.except_osv(_('Invalid Action!'), _('In order to close a period, you must first post related journal entries.'))
|
||||
|
||||
cr.execute('update account_journal_period set state=%s where period_id=%s', (mode, id))
|
||||
cr.execute('update account_period set state=%s where id=%s', (mode, id))
|
||||
|
|
|
@ -53,9 +53,9 @@ class account_aged_trial_balance(osv.osv_memory):
|
|||
|
||||
period_length = data['form']['period_length']
|
||||
if period_length<=0:
|
||||
raise osv.except_osv(_('UserError'), _('You must enter a period length that cannot be 0 or below !'))
|
||||
raise osv.except_osv(_('User Error!'), _('You must set a period length greater than 0.'))
|
||||
if not data['form']['date_from']:
|
||||
raise osv.except_osv(_('UserError'), _('Enter a Start date !'))
|
||||
raise osv.except_osv(_('User Error!'), _('You must set a start date.'))
|
||||
|
||||
start = datetime.strptime(data['form']['date_from'], "%Y-%m-%d")
|
||||
|
||||
|
|
|
@ -144,13 +144,13 @@ class account_common_report(osv.osv_memory):
|
|||
result['date_to'] = data['form']['date_to']
|
||||
elif data['form']['filter'] == 'filter_period':
|
||||
if not data['form']['period_from'] or not data['form']['period_to']:
|
||||
raise osv.except_osv(_('Error'),_('Select a starting and an ending period'))
|
||||
raise osv.except_osv(_('Error!'),_('Select a starting and an ending period.'))
|
||||
result['period_from'] = data['form']['period_from']
|
||||
result['period_to'] = data['form']['period_to']
|
||||
return result
|
||||
|
||||
def _print_report(self, cr, uid, ids, data, context=None):
|
||||
raise (_('Error'), _('not implemented'))
|
||||
raise (_('Error!'), _('Not implemented.'))
|
||||
|
||||
def check_report(self, cr, uid, ids, context=None):
|
||||
if context is None:
|
||||
|
|
|
@ -34,7 +34,7 @@ class account_state_open(osv.osv_memory):
|
|||
if 'active_ids' in context:
|
||||
data_inv = obj_invoice.browse(cr, uid, context['active_ids'][0], context=context)
|
||||
if data_inv.reconciled:
|
||||
raise osv.except_osv(_('Warning'), _('Invoice is already reconciled'))
|
||||
raise osv.except_osv(_('Warning!'), _('Invoice is already reconciled.'))
|
||||
wf_service = netsvc.LocalService("workflow")
|
||||
wf_service.trg_validate(uid, 'account.invoice', context['active_ids'][0], 'open_test', cr)
|
||||
return {'type': 'ir.actions.act_window_close'}
|
||||
|
|
|
@ -36,7 +36,7 @@ class validate_account_move(osv.osv_memory):
|
|||
data = self.browse(cr, uid, ids, context=context)[0]
|
||||
ids_move = obj_move.search(cr, uid, [('state','=','draft'),('journal_id','=',data.journal_id.id),('period_id','=',data.period_id.id)])
|
||||
if not ids_move:
|
||||
raise osv.except_osv(_('Warning'), _('Specified Journal does not have any account move entries in draft state for this period'))
|
||||
raise osv.except_osv(_('Warning!'), _('Specified journal does not have any account move entries in draft state for this period.'))
|
||||
obj_move.button_validate(cr, uid, ids_move, context=context)
|
||||
return {'type': 'ir.actions.act_window_close'}
|
||||
|
||||
|
@ -58,7 +58,7 @@ class validate_account_move_lines(osv.osv_memory):
|
|||
move_ids.append(line.move_id.id)
|
||||
move_ids = list(set(move_ids))
|
||||
if not move_ids:
|
||||
raise osv.except_osv(_('Warning'), _('Selected Entry Lines does not have any account move enties in draft state'))
|
||||
raise osv.except_osv(_('Warning!'), _('Selected Entry Lines does not have any account move enties in draft state.'))
|
||||
obj_move.button_validate(cr, uid, move_ids, context)
|
||||
return {'type': 'ir.actions.act_window_close'}
|
||||
validate_account_move_lines()
|
||||
|
|
|
@ -218,7 +218,7 @@ class account_analytic_plan_instance(osv.osv):
|
|||
|
||||
pids = ana_plan_instance_obj.search(cr, uid, [('name','=',vals['name']), ('code','=',vals['code']), ('plan_id','<>',False)], context=context)
|
||||
if pids:
|
||||
raise osv.except_osv(_('Error'), _('A model having this name and code already exists !'))
|
||||
raise osv.except_osv(_('Error!'), _('A model with this name and code already exists.'))
|
||||
|
||||
res = acct_anal_plan_line_obj.search(cr, uid, [('plan_id','=',journal.plan_id.id)], context=context)
|
||||
for i in res:
|
||||
|
@ -231,7 +231,7 @@ class account_analytic_plan_instance(osv.osv):
|
|||
if acct_anal_acct.search(cr, uid, [('parent_id', 'child_of', [item.root_analytic_id.id]), ('id', '=', tempo[2]['analytic_account_id'])], context=context):
|
||||
total_per_plan += tempo[2]['rate']
|
||||
if total_per_plan < item.min_required or total_per_plan > item.max_required:
|
||||
raise osv.except_osv(_('Value Error'),_('The Total Should be Between %s and %s') % (str(item.min_required), str(item.max_required)))
|
||||
raise osv.except_osv(_('Error!'),_('The total should be between %s and %s.') % (str(item.min_required), str(item.max_required)))
|
||||
|
||||
return super(account_analytic_plan_instance, self).create(cr, uid, vals, context=context)
|
||||
|
||||
|
@ -338,7 +338,7 @@ class account_move_line(osv.osv):
|
|||
for line in self.browse(cr, uid, ids, context=context):
|
||||
if line.analytics_id:
|
||||
if not line.journal_id.analytic_journal_id:
|
||||
raise osv.except_osv(_('No Analytic Journal !'),_("You have to define an analytic journal on the '%s' journal!") % (line.journal_id.name,))
|
||||
raise osv.except_osv(_('No Analytic Journal !'),_("You have to define an analytic journal on the '%s' journal.") % (line.journal_id.name,))
|
||||
|
||||
toremove = analytic_line_obj.search(cr, uid, [('move_id','=',line.id)], context=context)
|
||||
if toremove:
|
||||
|
@ -482,7 +482,7 @@ class account_bank_statement(osv.osv):
|
|||
for st_line in st.line_ids:
|
||||
if st_line.analytics_id:
|
||||
if not st.journal_id.analytic_journal_id:
|
||||
raise osv.except_osv(_('No Analytic Journal !'),_("You have to define an analytic journal on the '%s' journal!") % (st.journal_id.name,))
|
||||
raise osv.except_osv(_('No Analytic Journal !'),_("You have to define an analytic journal on the '%s' journal.") % (st.journal_id.name,))
|
||||
if not st_line.amount:
|
||||
continue
|
||||
return True
|
||||
|
|
|
@ -64,7 +64,7 @@ msgstr ""
|
|||
#. module: account_analytic_plans
|
||||
#: code:addons/account_analytic_plans/wizard/account_crossovered_analytic.py:61
|
||||
#, python-format
|
||||
msgid "User Error"
|
||||
msgid "User Error!"
|
||||
msgstr ""
|
||||
|
||||
#. module: account_analytic_plans
|
||||
|
@ -189,7 +189,7 @@ msgstr ""
|
|||
#. module: account_analytic_plans
|
||||
#: code:addons/account_analytic_plans/wizard/analytic_plan_create_model.py:41
|
||||
#, python-format
|
||||
msgid "No analytic plan defined !"
|
||||
msgid "Please define analytic plan."
|
||||
msgstr ""
|
||||
|
||||
#. module: account_analytic_plans
|
||||
|
@ -278,7 +278,7 @@ msgstr ""
|
|||
#: code:addons/account_analytic_plans/account_analytic_plans.py:341
|
||||
#: code:addons/account_analytic_plans/account_analytic_plans.py:485
|
||||
#, python-format
|
||||
msgid "You have to define an analytic journal on the '%s' journal!"
|
||||
msgid "You have to define an analytic journal on the '%s' journal."
|
||||
msgstr ""
|
||||
|
||||
#. module: account_analytic_plans
|
||||
|
@ -341,7 +341,7 @@ msgstr ""
|
|||
#: code:addons/account_analytic_plans/wizard/analytic_plan_create_model.py:38
|
||||
#: code:addons/account_analytic_plans/wizard/analytic_plan_create_model.py:41
|
||||
#, python-format
|
||||
msgid "Error"
|
||||
msgid "Error!"
|
||||
msgstr ""
|
||||
|
||||
#. module: account_analytic_plans
|
||||
|
@ -362,7 +362,7 @@ msgstr ""
|
|||
#. module: account_analytic_plans
|
||||
#: code:addons/account_analytic_plans/wizard/analytic_plan_create_model.py:38
|
||||
#, python-format
|
||||
msgid "Please put a name and a code before saving the model !"
|
||||
msgid "Please put a name and a code before saving the model."
|
||||
msgstr ""
|
||||
|
||||
#. module: account_analytic_plans
|
||||
|
@ -426,7 +426,7 @@ msgstr ""
|
|||
#. module: account_analytic_plans
|
||||
#: code:addons/account_analytic_plans/account_analytic_plans.py:234
|
||||
#, python-format
|
||||
msgid "The Total Should be Between %s and %s"
|
||||
msgid "The total should be between %s and %s."
|
||||
msgstr ""
|
||||
|
||||
#. module: account_analytic_plans
|
||||
|
@ -523,6 +523,6 @@ msgstr ""
|
|||
#. module: account_analytic_plans
|
||||
#: code:addons/account_analytic_plans/account_analytic_plans.py:234
|
||||
#, python-format
|
||||
msgid "Value Error"
|
||||
msgid "Value Error!"
|
||||
msgstr ""
|
||||
|
||||
|
|
|
@ -58,7 +58,7 @@ class account_crossovered_analytic(osv.osv_memory):
|
|||
flag = False
|
||||
break
|
||||
if flag:
|
||||
raise osv.except_osv(_('User Error'),_('There are no Analytic lines related to Account %s' % name))
|
||||
raise osv.except_osv(_('User Error!'),_('There are no analytic lines related to account %s.' % name))
|
||||
|
||||
datas = {
|
||||
'ids': [],
|
||||
|
|
|
@ -35,10 +35,10 @@ class analytic_plan_create_model(osv.osv_memory):
|
|||
if 'active_id' in context and context['active_id']:
|
||||
plan = plan_obj.browse(cr, uid, context['active_id'], context=context)
|
||||
if (not plan.name) or (not plan.code):
|
||||
raise osv.except_osv(_('Error'), _('Please put a name and a code before saving the model !'))
|
||||
raise osv.except_osv(_('Error!'), _('Please put a name and a code before saving the model.'))
|
||||
pids = anlytic_plan_obj.search(cr, uid, [], context=context)
|
||||
if not pids:
|
||||
raise osv.except_osv(_('Error'), _('No analytic plan defined !'))
|
||||
raise osv.except_osv(_('Error!'), _('There is no analytic plan defined.'))
|
||||
plan_obj.write(cr, uid, [context['active_id']], {'plan_id':pids[0]}, context=context)
|
||||
|
||||
model_data_ids = mod_obj.search(cr, uid, [('model', '=', 'ir.ui.view'),('name', '=', 'view_analytic_plan_create_model')], context=context)
|
||||
|
|
|
@ -127,8 +127,8 @@ class account_bank_statement_line(osv.osv):
|
|||
if context is None:
|
||||
context = {}
|
||||
if context.get('block_statement_line_delete', False):
|
||||
raise osv.except_osv(_('Warning'), _('Delete operation not allowed ! \
|
||||
Please go to the associated bank statement in order to delete and/or modify this bank statement line'))
|
||||
raise osv.except_osv(_('Warning!'), _('Delete operation not allowed. \
|
||||
Please go to the associated bank statement in order to delete and/or modify bank statement line.'))
|
||||
return super(account_bank_statement_line, self).unlink(cr, uid, ids, context=context)
|
||||
|
||||
account_bank_statement_line()
|
||||
|
|
|
@ -112,7 +112,7 @@ msgstr ""
|
|||
#. module: account_bank_statement_extensions
|
||||
#: code:addons/account_bank_statement_extensions/account_bank_statement.py:130
|
||||
#, python-format
|
||||
msgid "Delete operation not allowed ! Please go to the associated bank statement in order to delete and/or modify this bank statement line"
|
||||
msgid "Delete operation not allowed ! Please go to the associated bank statement in order to delete and/or modify bank statement line."
|
||||
msgstr ""
|
||||
|
||||
#. module: account_bank_statement_extensions
|
||||
|
@ -155,7 +155,7 @@ msgstr ""
|
|||
#. module: account_bank_statement_extensions
|
||||
#: code:addons/account_bank_statement_extensions/account_bank_statement.py:130
|
||||
#, python-format
|
||||
msgid "Warning"
|
||||
msgid "Warning!"
|
||||
msgstr ""
|
||||
|
||||
#. module: account_bank_statement_extensions
|
||||
|
|
|
@ -293,10 +293,10 @@ class coda_bank_statement(osv.osv):
|
|||
# unlink CODA banks statements as well as associated bank statements and CODA files
|
||||
for coda_statement in self.browse(cr, uid, new_ids, context=context):
|
||||
if coda_statement.statement_id.state == 'confirm':
|
||||
raise osv.except_osv(_('Invalid action !'),
|
||||
_("Cannot delete CODA Bank Statement '%s' of Journal '%s'." \
|
||||
"\nThe associated Bank Statement has already been confirmed !" \
|
||||
"\nPlease undo this action first!") \
|
||||
raise osv.except_osv(_('Invalid Action!'),
|
||||
_("Cannot delete CODA Bank Statement '%s' of journal '%s'." \
|
||||
"\nThe associated Bank Statement has already been confirmed." \
|
||||
"\nPlease undo this action first.") \
|
||||
% (coda_statement.name, coda_statement.journal_id.name))
|
||||
else:
|
||||
if not context.get('coda_unlink', False):
|
||||
|
@ -393,7 +393,7 @@ class coda_bank_statement_line(osv.osv):
|
|||
if context is None:
|
||||
context = {}
|
||||
if context.get('block_statement_line_delete', False):
|
||||
raise osv.except_osv('Warning', _('Delete operation not allowed !'))
|
||||
raise osv.except_osv('Warning', _('Delete operation not allowed.'))
|
||||
return super(account_bank_statement_line, self).unlink(cr, uid, ids, context=context)
|
||||
|
||||
coda_bank_statement_line()
|
||||
|
|
|
@ -54,7 +54,7 @@ class account_coda_import(osv.osv_memory):
|
|||
codafile = data.coda_data
|
||||
codafilename = data.coda_fname
|
||||
except:
|
||||
raise osv.except_osv(_('Error!'), _('Wizard in incorrect state. Please hit the Cancel button!'))
|
||||
raise osv.except_osv(_('Error!'), _('Wizard in incorrect state. Please hit the Cancel button.'))
|
||||
return {}
|
||||
|
||||
currency_obj = self.pool.get('res.currency')
|
||||
|
@ -113,7 +113,7 @@ class account_coda_import(osv.osv_memory):
|
|||
coda_statement['version'] = line[127]
|
||||
coda_version = line[127]
|
||||
if coda_version not in ['1','2']:
|
||||
err_string = _('\nCODA V%s statements are not supported, please contact your bank!') % coda_version
|
||||
err_string = _('\nCODA V%s statements are not supported, please contact your bank.') % coda_version
|
||||
err_code = 'R0001'
|
||||
if batch:
|
||||
return (err_code, err_string)
|
||||
|
@ -122,7 +122,7 @@ class account_coda_import(osv.osv_memory):
|
|||
coda_statement['date'] = str2date(line[5:11])
|
||||
period_id = period_obj.search(cr , uid, [('date_start' ,'<=', coda_statement['date']), ('date_stop','>=',coda_statement['date'])])
|
||||
if not period_id:
|
||||
err_string = _("\nThe CODA creation date doesn't fall within a defined Accounting Period!" \
|
||||
err_string = _("\nThe CODA creation date doesn't fall within a defined Accounting Period." \
|
||||
"\nPlease create the Accounting Period for date %s.") % coda_statement['date']
|
||||
err_code = 'R0002'
|
||||
if batch:
|
||||
|
@ -136,7 +136,7 @@ class account_coda_import(osv.osv_memory):
|
|||
('coda_creation_date', '=', coda_statement['date']),
|
||||
])
|
||||
if coda_id:
|
||||
err_string = _("\nCODA File with Filename '%s' and Creation Date '%s' has already been imported !") \
|
||||
err_string = _("\nCODA File with Filename '%s' and Creation Date '%s' has already been imported.") \
|
||||
% (codafilename, coda_statement['date'])
|
||||
err_code = 'W0001'
|
||||
if batch:
|
||||
|
@ -152,7 +152,7 @@ class account_coda_import(osv.osv_memory):
|
|||
coda_statement['acc_number'] = line[5:17]
|
||||
coda_statement['currency'] = line[18:21]
|
||||
elif line[1] == '1': # foreign bank account BBAN structure
|
||||
err_string = _('\nForeign bank accounts with BBAN structure are not supported !')
|
||||
err_string = _('\nForeign bank accounts with BBAN structure are not supported.')
|
||||
err_code = 'R1001'
|
||||
if batch:
|
||||
return (err_code, err_string)
|
||||
|
@ -161,13 +161,13 @@ class account_coda_import(osv.osv_memory):
|
|||
coda_statement['acc_number']=line[5:21]
|
||||
coda_statement['currency'] = line[39:42]
|
||||
elif line[1] == '3': # foreign bank account IBAN structure
|
||||
err_string = _('\nForeign bank accounts with IBAN structure are not supported !')
|
||||
err_string = _('\nForeign bank accounts with IBAN structure are not supported.')
|
||||
err_code = 'R1002'
|
||||
if batch:
|
||||
return (err_code, err_string)
|
||||
raise osv.except_osv(_('Data Error!'), err_string)
|
||||
else:
|
||||
err_string = _('\nUnsupported bank account structure !')
|
||||
err_string = _('\nUnsupported bank account structure.')
|
||||
err_code = 'R1003'
|
||||
if batch:
|
||||
return (err_code, err_string)
|
||||
|
@ -189,8 +189,8 @@ class account_coda_import(osv.osv_memory):
|
|||
find_bbacom = coda_bank['find_bbacom']
|
||||
find_partner = coda_bank['find_partner']
|
||||
else:
|
||||
err_string = _("\nNo matching CODA Bank Account Configuration record found !") + \
|
||||
_("\nPlease check if the 'Bank Account Number', 'Currency' and 'Account Description' fields of your configuration record match with '%s', '%s' and '%s' !") \
|
||||
err_string = _("\nNo matching CODA Bank Account Configuration record found.") + \
|
||||
_("\nPlease check if the 'Bank Account Number', 'Currency' and 'Account Description' fields of your configuration record match with '%s', '%s' and '%s'.") \
|
||||
% (coda_statement['acc_number'], coda_statement['currency'], coda_statement['description'])
|
||||
err_code = 'R1004'
|
||||
if batch:
|
||||
|
@ -265,7 +265,7 @@ class account_coda_import(osv.osv_memory):
|
|||
st_line['trans_type'] = line[53]
|
||||
trans_type = filter(lambda x: st_line['trans_type'] == x['type'], trans_type_table)
|
||||
if not trans_type:
|
||||
err_string = _('\nThe File contains an invalid CODA Transaction Type : %s!') % st_line['trans_type']
|
||||
err_string = _('\nThe File contains an invalid CODA Transaction Type : %s.') % st_line['trans_type']
|
||||
err_code = 'R2001'
|
||||
if batch:
|
||||
return (err_code, err_string)
|
||||
|
@ -274,7 +274,7 @@ class account_coda_import(osv.osv_memory):
|
|||
st_line['trans_family'] = line[54:56]
|
||||
trans_family = filter(lambda x: (x['type'] == 'family') and (st_line['trans_family'] == x['code']), trans_code_table)
|
||||
if not trans_family:
|
||||
err_string = _('\nThe File contains an invalid CODA Transaction Family : %s!') % st_line['trans_family']
|
||||
err_string = _('\nThe File contains an invalid CODA Transaction Family : %s.') % st_line['trans_family']
|
||||
err_code = 'R2002'
|
||||
if batch:
|
||||
return (err_code, err_string)
|
||||
|
@ -298,7 +298,7 @@ class account_coda_import(osv.osv_memory):
|
|||
st_line['struct_comm_type'] = line[62:65]
|
||||
comm_type = filter(lambda x: st_line['struct_comm_type'] == x['code'], comm_type_table)
|
||||
if not comm_type:
|
||||
err_string = _('\nThe File contains an invalid Structured Communication Type : %s!') % st_line['struct_comm_type']
|
||||
err_string = _('\nThe File contains an invalid Structured Communication Type : %s.') % st_line['struct_comm_type']
|
||||
err_code = 'R2003'
|
||||
if batch:
|
||||
return (err_code, err_string)
|
||||
|
@ -317,7 +317,7 @@ class account_coda_import(osv.osv_memory):
|
|||
elif line[1] == '2':
|
||||
# movement data record 2.2
|
||||
if coda_statement['coda_statement_lines'][st_line_seq]['ref'] != line[2:10]:
|
||||
err_string = _('\nCODA parsing error on movement data record 2.2, seq nr %s!' \
|
||||
err_string = _('\nCODA parsing error on movement data record 2.2, seq nr %s.' \
|
||||
'\nPlease report this issue via your OpenERP support channel.') % line[2:10]
|
||||
err_code = 'R2004'
|
||||
if batch:
|
||||
|
@ -329,7 +329,7 @@ class account_coda_import(osv.osv_memory):
|
|||
elif line[1] == '3':
|
||||
# movement data record 2.3
|
||||
if coda_statement['coda_statement_lines'][st_line_seq]['ref'] != line[2:10]:
|
||||
err_string = _('\nCODA parsing error on movement data record 2.3, seq nr %s!' \
|
||||
err_string = _('\nCODA parsing error on movement data record 2.3, seq nr %s.' \
|
||||
'\nPlease report this issue via your OpenERP support channel.') % line[2:10]
|
||||
err_code = 'R2005'
|
||||
if batch:
|
||||
|
@ -354,7 +354,7 @@ class account_coda_import(osv.osv_memory):
|
|||
st_line['counterparty_currency'] = counterparty_currency
|
||||
st_line['counterparty_name'] = counterparty_name
|
||||
if counterparty_currency not in [coda_bank['currency_name'], '']:
|
||||
err_string = _('\nCODA parsing error on movement data record 2.3, seq nr %s!' \
|
||||
err_string = _('\nCODA parsing error on movement data record 2.3, seq nr %s.' \
|
||||
'\nPlease report this issue via your OpenERP support channel.') % line[2:10]
|
||||
err_code = 'R2006'
|
||||
if batch:
|
||||
|
@ -386,7 +386,7 @@ class account_coda_import(osv.osv_memory):
|
|||
match = True
|
||||
else:
|
||||
coda_parsing_note += _("\n Bank Statement '%s' line '%s':" \
|
||||
"\n There is no invoice matching the Structured Communication '%s'!" \
|
||||
"\n There is no invoice matching the Structured Communication '%s'." \
|
||||
"\n Please verify and adjust the invoice and perform the import again or otherwise change the corresponding entry manually in the generated Bank Statement.") \
|
||||
% (coda_statement['name'], st_line['ref'], reference)
|
||||
# lookup partner via counterparty_number
|
||||
|
@ -402,7 +402,7 @@ class account_coda_import(osv.osv_memory):
|
|||
if not match and find_partner and bank_ids:
|
||||
if len(bank_ids) > 1:
|
||||
coda_parsing_note += _("\n Bank Statement '%s' line '%s':" \
|
||||
"\n No partner record assigned: There are multiple partners with the same Bank Account Number '%s'!" \
|
||||
"\n No partner record assigned: There are multiple partners with the same Bank Account Number '%s'." \
|
||||
"\n Please correct the configuration and perform the import again or otherwise change the corresponding entry manually in the generated Bank Statement.") \
|
||||
% (coda_statement['name'], st_line['ref'], counterparty_number)
|
||||
else:
|
||||
|
@ -418,13 +418,13 @@ class account_coda_import(osv.osv_memory):
|
|||
elif not match and find_partner:
|
||||
if counterparty_number:
|
||||
coda_parsing_note += _("\n Bank Statement '%s' line '%s':" \
|
||||
"\n The bank account '%s' is not defined for the partner '%s'!" \
|
||||
"\n The bank account '%s' is not defined for the partner '%s'." \
|
||||
"\n Please correct the configuration and perform the import again or otherwise change the corresponding entry manually in the generated Bank Statement.") \
|
||||
% (coda_statement['name'], st_line['ref'],
|
||||
counterparty_number, counterparty_name)
|
||||
else:
|
||||
coda_parsing_note += _("\n Bank Statement '%s' line '%s':" \
|
||||
"\n No matching partner record found!" \
|
||||
"\n No matching partner record found." \
|
||||
"\n Please adjust the corresponding entry manually in the generated Bank Statement.") \
|
||||
% (coda_statement['name'], st_line['ref'])
|
||||
st_line['account_id'] = awaiting_acc
|
||||
|
@ -433,7 +433,7 @@ class account_coda_import(osv.osv_memory):
|
|||
coda_statement['coda_statement_lines'] = coda_statement_lines
|
||||
else:
|
||||
# movement data record 2.x (x <> 1,2,3)
|
||||
err_string = _('\nMovement data records of type 2.%s are not supported !') % line[1]
|
||||
err_string = _('\nMovement data records of type 2.%s are not supported.') % line[1]
|
||||
err_code = 'R2007'
|
||||
if batch:
|
||||
return (err_code, err_string)
|
||||
|
@ -466,7 +466,7 @@ class account_coda_import(osv.osv_memory):
|
|||
info_line['trans_family'] = line[32:34]
|
||||
trans_family = filter(lambda x: (x['type'] == 'family') and (info_line['trans_family'] == x['code']), trans_code_table)
|
||||
if not trans_family:
|
||||
err_string = _('\nThe File contains an invalid CODA Transaction Family : %s!') % st_line['trans_family']
|
||||
err_string = _('\nThe File contains an invalid CODA Transaction Family : %s.') % st_line['trans_family']
|
||||
err_code = 'R3002'
|
||||
if batch:
|
||||
return (err_code, err_string)
|
||||
|
@ -490,7 +490,7 @@ class account_coda_import(osv.osv_memory):
|
|||
info_line['struct_comm_type'] = line[40:43]
|
||||
comm_type = filter(lambda x: info_line['struct_comm_type'] == x['code'], comm_type_table)
|
||||
if not comm_type:
|
||||
err_string = _('\nThe File contains an invalid Structured Communication Type : %s!') % info_line['struct_comm_type']
|
||||
err_string = _('\nThe File contains an invalid Structured Communication Type : %s.') % info_line['struct_comm_type']
|
||||
err_code = 'R3003'
|
||||
if batch:
|
||||
return (err_code, err_string)
|
||||
|
@ -505,7 +505,7 @@ class account_coda_import(osv.osv_memory):
|
|||
elif line[1] == '2':
|
||||
# information data record 3.2
|
||||
if coda_statement['coda_statement_lines'][st_line_seq]['ref'] != line[2:10]:
|
||||
err_string = _('\nCODA parsing error on information data record 3.2, seq nr %s!' \
|
||||
err_string = _('\nCODA parsing error on information data record 3.2, seq nr %s.' \
|
||||
'\nPlease report this issue via your OpenERP support channel.') % line[2:10]
|
||||
err_code = 'R3004'
|
||||
if batch:
|
||||
|
@ -516,7 +516,7 @@ class account_coda_import(osv.osv_memory):
|
|||
elif line[1] == '3':
|
||||
# information data record 3.3
|
||||
if coda_statement['coda_statement_lines'][st_line_seq]['ref'] != line[2:10]:
|
||||
err_string = _('\nCODA parsing error on information data record 3.3, seq nr %s!' \
|
||||
err_string = _('\nCODA parsing error on information data record 3.3, seq nr %s.' \
|
||||
'\nPlease report this issue via your OpenERP support channel.') % line[2:10]
|
||||
err_code = 'R3005'
|
||||
if batch:
|
||||
|
@ -585,7 +585,7 @@ class account_coda_import(osv.osv_memory):
|
|||
err_code = 'G0001'
|
||||
if batch:
|
||||
return (err_code, err_string)
|
||||
raise osv.except_osv(_('CODA Import failed !'), err_string)
|
||||
raise osv.except_osv(_('CODA Import failed.'), err_string)
|
||||
|
||||
nb_err = 0
|
||||
err_string = ''
|
||||
|
@ -627,12 +627,11 @@ class account_coda_import(osv.osv_memory):
|
|||
balance_start_check = journal.default_debit_account_id.balance
|
||||
else:
|
||||
nb_err += 1
|
||||
err_string += _('\nConfiguration Error in journal %s!' \
|
||||
'\nPlease verify the Default Debit and Credit Account settings.') % journal.name
|
||||
err_string += _('\nConfiguration Error!\nPlease verify the Default Debit and Credit Account settings in journal %s.') % journal.name
|
||||
break
|
||||
if balance_start_check <> statement['balance_start']:
|
||||
nb_err += 1
|
||||
err_string += _('\nThe CODA Statement %s Starting Balance (%.2f) does not correspond with the previous Closing Balance (%.2f) in journal %s!') \
|
||||
err_string += _('\nThe CODA Statement %s Starting Balance (%.2f) does not correspond with the previous Closing Balance (%.2f) in journal %s.') \
|
||||
% (statement['name'], statement['balance_start'], balance_start_check, journal.name)
|
||||
break
|
||||
|
||||
|
@ -853,7 +852,7 @@ class account_coda_import(osv.osv_memory):
|
|||
coda_st_obj.write(cr, uid, [coda_st_id], {}, context=context) # calculate balance
|
||||
st_balance = coda_st_obj.read(cr, uid, coda_st_id, ['balance_end', 'balance_end_real'], context=context)
|
||||
if st_balance['balance_end'] <> st_balance['balance_end_real']:
|
||||
err_string += _('\nIncorrect ending Balance in CODA Statement %s for Bank Account %s!') \
|
||||
err_string += _('\nIncorrect ending Balance in CODA Statement %s for Bank Account %s.') \
|
||||
% (statement['coda_seq_number'], (statement['acc_number'] + ' (' + statement['currency'] + ') - ' + statement['description']))
|
||||
if statement['type'] == 'normal':
|
||||
nb_err += 1
|
||||
|
@ -920,7 +919,7 @@ class account_coda_import(osv.osv_memory):
|
|||
if batch:
|
||||
err_code = 'G0002'
|
||||
return (err_code, err_string)
|
||||
raise osv.except_osv(_('CODA Import failed !'), err_string)
|
||||
raise osv.except_osv(_('CODA Import failed.'), err_string)
|
||||
|
||||
context.update({ 'bk_st_ids': bk_st_ids})
|
||||
model_data_ids = mod_obj.search(cr, uid, [('model', '=', 'ir.ui.view'), ('name', '=', 'account_coda_import_result_view')], context=context)
|
||||
|
|
|
@ -107,7 +107,7 @@ class account_move_line(osv.osv):
|
|||
if line.id not in line2bank and line.partner_id.bank_ids:
|
||||
line2bank[line.id] = line.partner_id.bank_ids[0].id
|
||||
else:
|
||||
raise osv.except_osv(_('Error !'), _('No partner defined on entry line'))
|
||||
raise osv.except_osv(_('Error!'), _('There is no partner defined on the entry line.'))
|
||||
return line2bank
|
||||
|
||||
_columns = {
|
||||
|
|
|
@ -71,8 +71,8 @@ class payment_order(osv.osv):
|
|||
#dead code
|
||||
def get_wizard(self, type):
|
||||
logger = netsvc.Logger()
|
||||
logger.notifyChannel("warning", netsvc.LOG_WARNING,
|
||||
"No wizard found for the payment type '%s'." % type)
|
||||
logger.notifyChannel("Warning!", netsvc.LOG_WARNING,
|
||||
"No wizard is found for the payment type '%s'." % type)
|
||||
return None
|
||||
|
||||
def _total(self, cursor, user, ids, name, args, context=None):
|
||||
|
|
|
@ -402,7 +402,7 @@ msgstr ""
|
|||
#. module: account_payment
|
||||
#: code:addons/account_payment/account_move_line.py:110
|
||||
#, python-format
|
||||
msgid "No partner defined on entry line"
|
||||
msgid "No partner is defined on entry line."
|
||||
msgstr ""
|
||||
|
||||
#. module: account_payment
|
||||
|
|
|
@ -473,7 +473,7 @@ class account_voucher(osv.osv):
|
|||
tr_type = 'purchase'
|
||||
else:
|
||||
if not journal.default_credit_account_id or not journal.default_debit_account_id:
|
||||
raise osv.except_osv(_('Error !'), _('Please define default credit/debit accounts on the journal "%s" !') % (journal.name))
|
||||
raise osv.except_osv(_('Error!'), _('Please define default credit/debit accounts on the journal "%s".') % (journal.name))
|
||||
account_id = journal.default_credit_account_id.id or journal.default_debit_account_id.id
|
||||
tr_type = 'receipt'
|
||||
|
||||
|
@ -822,7 +822,7 @@ class account_voucher(osv.osv):
|
|||
def unlink(self, cr, uid, ids, context=None):
|
||||
for t in self.read(cr, uid, ids, ['state'], context=context):
|
||||
if t['state'] not in ('draft', 'cancel'):
|
||||
raise osv.except_osv(_('Invalid action !'), _('Cannot delete Voucher(s) which are already opened or paid !'))
|
||||
raise osv.except_osv(_('Invalid Action!'), _('Cannot delete voucher(s) which are already opened or paid.'))
|
||||
return super(account_voucher, self).unlink(cr, uid, ids, context=context)
|
||||
|
||||
def onchange_payment(self, cr, uid, ids, pay_now, journal_id, partner_id, ttype='sale'):
|
||||
|
@ -920,7 +920,7 @@ class account_voucher(osv.osv):
|
|||
name = seq_obj.next_by_id(cr, uid, voucher_brw.journal_id.sequence_id.id, context=context)
|
||||
else:
|
||||
raise osv.except_osv(_('Error!'),
|
||||
_('Please define a sequence on the journal !'))
|
||||
_('Please define a sequence on the journal.'))
|
||||
if not voucher_brw.reference:
|
||||
ref = name.replace('/','')
|
||||
else:
|
||||
|
@ -952,11 +952,11 @@ class account_voucher(osv.osv):
|
|||
if amount_residual > 0:
|
||||
account_id = line.voucher_id.company_id.expense_currency_exchange_account_id
|
||||
if not account_id:
|
||||
raise osv.except_osv(_('Warning'),_("Unable to create accounting entry for currency rate difference. You have to configure the field 'Income Currency Rate' on the company! "))
|
||||
raise osv.except_osv(_('Warning!'),_("First you have to configure the 'Income Currency Rate' on the company, then create accounting entry for currency rate difference."))
|
||||
else:
|
||||
account_id = line.voucher_id.company_id.income_currency_exchange_account_id
|
||||
if not account_id:
|
||||
raise osv.except_osv(_('Warning'),_("Unable to create accounting entry for currency rate difference. You have to configure the field 'Expense Currency Rate' on the company! "))
|
||||
raise osv.except_osv(_('Warning!'),_("First you have to configure the 'Expense Currency Rate' on the company, then create accounting entry for currency rate difference."))
|
||||
# Even if the amount_currency is never filled, we need to pass the foreign currency because otherwise
|
||||
# the receivable/payable account may have a secondary currency, which render this field mandatory
|
||||
account_currency_id = company_currency <> current_currency and current_currency or False
|
||||
|
@ -1516,7 +1516,7 @@ class account_bank_statement_line(osv.osv):
|
|||
return True
|
||||
|
||||
_constraints = [
|
||||
(_check_amount, 'The amount of the voucher must be the same amount as the one on the statement line', ['amount']),
|
||||
(_check_amount, 'The amount of the voucher must be the same amount as the one on the statement line.', ['amount']),
|
||||
]
|
||||
|
||||
_columns = {
|
||||
|
|
|
@ -81,7 +81,7 @@ msgstr ""
|
|||
#. module: account_voucher
|
||||
#: code:addons/account_voucher/account_voucher.py:797
|
||||
#, python-format
|
||||
msgid "Cannot delete Voucher(s) which are already opened or paid !"
|
||||
msgid "Cannot delete Voucher(s) which are already opened or paid."
|
||||
msgstr ""
|
||||
|
||||
#. module: account_voucher
|
||||
|
@ -299,7 +299,7 @@ msgstr ""
|
|||
#: code:addons/account_voucher/account_voucher.py:927
|
||||
#: code:addons/account_voucher/account_voucher.py:931
|
||||
#, python-format
|
||||
msgid "Warning"
|
||||
msgid "Warning!"
|
||||
msgstr ""
|
||||
|
||||
#. module: account_voucher
|
||||
|
@ -520,7 +520,7 @@ msgstr ""
|
|||
#: constraint:account.bank.statement.line:0
|
||||
msgid ""
|
||||
"The amount of the voucher must be the same amount as the one on the "
|
||||
"statement line"
|
||||
"statement line."
|
||||
msgstr ""
|
||||
|
||||
#. module: account_voucher
|
||||
|
@ -620,8 +620,8 @@ msgstr ""
|
|||
#: code:addons/account_voucher/account_voucher.py:927
|
||||
#, python-format
|
||||
msgid ""
|
||||
"Unable to create accounting entry for currency rate difference. You have to "
|
||||
"configure the field 'Income Currency Rate' on the company! "
|
||||
"First configure the 'Income Currency Rate' on the company,after that create "
|
||||
"accounting entry for currency rate difference."
|
||||
msgstr ""
|
||||
|
||||
#. module: account_voucher
|
||||
|
@ -751,7 +751,7 @@ msgstr ""
|
|||
#. module: account_voucher
|
||||
#: code:addons/account_voucher/account_voucher.py:462
|
||||
#, python-format
|
||||
msgid "Please define default credit/debit accounts on the journal \"%s\" !"
|
||||
msgid "Please define default credit/debit accounts on the journal \"%s\"."
|
||||
msgstr ""
|
||||
|
||||
#. module: account_voucher
|
||||
|
@ -762,7 +762,7 @@ msgstr ""
|
|||
#. module: account_voucher
|
||||
#: code:addons/account_voucher/account_voucher.py:895
|
||||
#, python-format
|
||||
msgid "Please define a sequence on the journal !"
|
||||
msgid "Please define a sequence on the journal."
|
||||
msgstr ""
|
||||
|
||||
#. module: account_voucher
|
||||
|
@ -1059,8 +1059,8 @@ msgstr ""
|
|||
#: code:addons/account_voucher/account_voucher.py:931
|
||||
#, python-format
|
||||
msgid ""
|
||||
"Unable to create accounting entry for currency rate difference. You have to "
|
||||
"configure the field 'Expense Currency Rate' on the company! "
|
||||
"First configure the 'Expense Currency Rate' on the company,after that create "
|
||||
"accounting entry for currency rate difference."
|
||||
msgstr ""
|
||||
|
||||
#. module: account_voucher
|
||||
|
|
|
@ -425,7 +425,7 @@ class ir_model_fields_anonymize_wizard(osv.osv_memory):
|
|||
elif field_type == 'integer':
|
||||
anonymized_value = 0
|
||||
elif field_type in ['binary', 'many2many', 'many2one', 'one2many', 'reference']: # cannot anonymize these kind of fields
|
||||
msg = "Cannot anonymize fields of these types: binary, many2many, many2one, one2many, reference"
|
||||
msg = "Cannot anonymize fields of these types: binary, many2many, many2one, one2many, reference."
|
||||
self._raise_after_history_update(cr, uid, history_id, 'Error !', msg)
|
||||
|
||||
if anonymized_value is None:
|
||||
|
@ -453,9 +453,9 @@ class ir_model_fields_anonymize_wizard(osv.osv_memory):
|
|||
# add a result message in the wizard:
|
||||
msgs = ["Anonymization successful.",
|
||||
"",
|
||||
"Don't forget to save the resulting file to a safe place because you will not be able to revert the anonymization without this file.",
|
||||
"Donot forget to save the resulting file to a safe place because you will not be able to revert the anonymization without this file.",
|
||||
"",
|
||||
"This file is also stored in the %s directory. The absolute file path is: %s",
|
||||
"This file is also stored in the %s directory. The absolute file path is: %s.",
|
||||
]
|
||||
msg = '\n'.join(msgs) % (dirpath, abs_filepath)
|
||||
|
||||
|
@ -515,7 +515,7 @@ class ir_model_fields_anonymize_wizard(osv.osv_memory):
|
|||
wizards = self.browse(cr, uid, ids, context=context)
|
||||
for wizard in wizards:
|
||||
if not wizard.file_import:
|
||||
msg = "The anonymization export file was not supplied. It is not possible to reverse the anonymization process without this file."
|
||||
msg = "It is not possible to reverse the anonymization process without supplying anonymization export file."
|
||||
self._raise_after_history_update(cr, uid, history_id, 'Error !', msg)
|
||||
|
||||
# reverse the anonymization:
|
||||
|
|
|
@ -327,7 +327,7 @@ the rule to mark CC(mail to any other person defined in actions)."),
|
|||
reply_to = emailfrom
|
||||
if not emailfrom:
|
||||
raise osv.except_osv(_('Error!'),
|
||||
_("No Email ID Found for your Company address!"))
|
||||
_("No email ID found for your company address."))
|
||||
return mail_message.schedule_with_attach(cr, uid, emailfrom, emails, name, body, model='base.action.rule', reply_to=reply_to, res_id=obj.id)
|
||||
|
||||
|
||||
|
@ -490,7 +490,7 @@ the rule to mark CC(mail to any other person defined in actions)."),
|
|||
return True
|
||||
|
||||
_constraints = [
|
||||
(_check_mail, 'Error: The mail is not well formated', ['act_mail_body']),
|
||||
(_check_mail, 'Error ! The mail is not well formated.', ['act_mail_body']),
|
||||
]
|
||||
|
||||
base_action_rule()
|
||||
|
|
|
@ -286,7 +286,7 @@ msgstr ""
|
|||
#. module: base_action_rule
|
||||
#: code:addons/base_action_rule/base_action_rule.py:329
|
||||
#, python-format
|
||||
msgid "No Email ID Found for your Company address!"
|
||||
msgid "No Email ID is found for your Company address!"
|
||||
msgstr ""
|
||||
|
||||
#. module: base_action_rule
|
||||
|
@ -332,7 +332,7 @@ msgstr ""
|
|||
|
||||
#. module: base_action_rule
|
||||
#: constraint:base.action.rule:0
|
||||
msgid "Error: The mail is not well formated"
|
||||
msgid "Error ! The mail is not well formated."
|
||||
msgstr ""
|
||||
|
||||
#. module: base_action_rule
|
||||
|
|
|
@ -413,7 +413,7 @@ property or property parameter."),
|
|||
cal = vobject.iCalendar()
|
||||
event = cal.add('vevent')
|
||||
if not event_obj.date_deadline or not event_obj.date:
|
||||
raise osv.except_osv(_('Warning !'),_("Couldn't Invite because date is not specified!"))
|
||||
raise osv.except_osv(_('Warning!'),_("First you have to specify the date of the invitation."))
|
||||
event.add('created').value = ics_datetime(time.strftime('%Y-%m-%d %H:%M:%S'))
|
||||
event.add('dtstart').value = ics_datetime(event_obj.date)
|
||||
event.add('dtend').value = ics_datetime(event_obj.date_deadline)
|
||||
|
@ -1003,9 +1003,9 @@ class calendar_event(osv.osv):
|
|||
for datas in self.read(cr, uid, ids, ['id','byday','recurrency', 'month_list','end_date', 'rrule_type', 'select1', 'interval', 'count', 'end_type', 'mo', 'tu', 'we', 'th', 'fr', 'sa', 'su', 'exrule', 'day', 'week_list' ], context=context):
|
||||
event = datas['id']
|
||||
if datas.get('interval', 0) < 0:
|
||||
raise osv.except_osv(_('Warning!'), _('Interval cannot be negative'))
|
||||
raise osv.except_osv(_('Warning!'), _('Interval cannot be negative.'))
|
||||
if datas.get('count', 0) < 0:
|
||||
raise osv.except_osv(_('Warning!'), _('Count cannot be negative'))
|
||||
raise osv.except_osv(_('Warning!'), _('Count cannot be negative.'))
|
||||
if datas['recurrency']:
|
||||
result[event] = self.compute_rule_string(datas)
|
||||
else:
|
||||
|
@ -1194,7 +1194,7 @@ rule or repeating pattern of time to exclude from the recurring rule."),
|
|||
def get_month_string(freq, datas):
|
||||
if freq == 'monthly':
|
||||
if datas.get('select1')=='date' and (datas.get('day') < 1 or datas.get('day') > 31):
|
||||
raise osv.except_osv(_('Error!'), ("Please select proper Day of month"))
|
||||
raise osv.except_osv(_('Error!'), ("Please select a proper day of the month."))
|
||||
|
||||
if datas.get('select1')=='day':
|
||||
return ';BYDAY=' + datas.get('byday') + datas.get('week_list')
|
||||
|
@ -1411,7 +1411,7 @@ rule or repeating pattern of time to exclude from the recurring rule."),
|
|||
context = {}
|
||||
|
||||
if 'date' in groupby:
|
||||
raise osv.except_osv(_('Warning !'), _('Group by date not supported, use the calendar view instead'))
|
||||
raise osv.except_osv(_('Warning!'), _('Group by date is not supported, use the calendar view instead.'))
|
||||
virtual_id = context.get('virtual_id', True)
|
||||
context.update({'virtual_id': False})
|
||||
res = super(calendar_event, self).read_group(cr, uid, domain, fields, groupby, offset=offset, limit=limit, context=context, orderby=orderby)
|
||||
|
|
|
@ -108,7 +108,7 @@ msgstr ""
|
|||
#. module: base_calendar
|
||||
#: code:addons/base_calendar/base_calendar.py:1006
|
||||
#, python-format
|
||||
msgid "Count cannot be negative"
|
||||
msgid "Count cannot be negative."
|
||||
msgstr ""
|
||||
|
||||
#. module: base_calendar
|
||||
|
@ -261,7 +261,7 @@ msgstr ""
|
|||
#. module: base_calendar
|
||||
#: code:addons/base_calendar/base_calendar.py:1004
|
||||
#, python-format
|
||||
msgid "Interval cannot be negative"
|
||||
msgid "Interval cannot be negative."
|
||||
msgstr ""
|
||||
|
||||
#. module: base_calendar
|
||||
|
@ -273,7 +273,7 @@ msgstr ""
|
|||
#. module: base_calendar
|
||||
#: code:addons/base_calendar/wizard/base_calendar_invite_attendee.py:143
|
||||
#, python-format
|
||||
msgid "%s must have an email address to send mail"
|
||||
msgid "%s must have an email address to send mail."
|
||||
msgstr ""
|
||||
|
||||
#. module: base_calendar
|
||||
|
@ -407,7 +407,7 @@ msgstr ""
|
|||
#. module: base_calendar
|
||||
#: code:addons/base_calendar/base_calendar.py:1411
|
||||
#, python-format
|
||||
msgid "Group by date not supported, use the calendar view instead"
|
||||
msgid "Group by date is not supported, use the calendar view instead."
|
||||
msgstr ""
|
||||
|
||||
#. module: base_calendar
|
||||
|
@ -1405,7 +1405,7 @@ msgstr ""
|
|||
#. module: base_calendar
|
||||
#: code:addons/base_calendar/base_calendar.py:418
|
||||
#, python-format
|
||||
msgid "Couldn't Invite because date is not specified!"
|
||||
msgid "First specified the date for Invitation."
|
||||
msgstr ""
|
||||
|
||||
#. module: base_calendar
|
||||
|
|
|
@ -94,7 +94,7 @@ send an Email to Invited Person')
|
|||
if type == 'internal':
|
||||
|
||||
if not datas.get('user_ids'):
|
||||
raise osv.except_osv(_('Error!'), ("Please select any User"))
|
||||
raise osv.except_osv(_('Error!'), ("Please select any user."))
|
||||
for user_id in datas.get('user_ids'):
|
||||
user = user_obj.browse(cr, uid, user_id)
|
||||
res = {
|
||||
|
@ -141,7 +141,7 @@ send an Email to Invited Person')
|
|||
if not mail_to:
|
||||
name = map(lambda x: x[1], filter(lambda x: type==x[0], \
|
||||
self._columns['type'].selection))
|
||||
raise osv.except_osv(_('Error!'), _("%s must have an email address to send mail") %(name[0]))
|
||||
raise osv.except_osv(_('Error!'), _("%s must have an email address to send mail.") %(name[0]))
|
||||
att_obj._send_mail(cr, uid, attendees, mail_to, \
|
||||
email_from = current_user.user_email or tools.config.get('email_from', False))
|
||||
|
||||
|
|
|
@ -139,7 +139,7 @@ class users(osv.osv):
|
|||
|
||||
def set_pw(self, cr, uid, id, name, value, args, context):
|
||||
if not value:
|
||||
raise osv.except_osv(_('Error'), _("Please specify the password !"))
|
||||
raise osv.except_osv(_('Error!'), _("You have to specify a password."))
|
||||
|
||||
obj = pooler.get_pool(cr.dbname).get('res.users')
|
||||
if not hasattr(obj, "_salt_cache"):
|
||||
|
@ -181,8 +181,8 @@ class users(osv.osv):
|
|||
cr = pooler.get_db(db).cursor()
|
||||
return self._login(cr, db, login, password)
|
||||
except Exception:
|
||||
_logger.exception('Could not authenticate')
|
||||
return Exception('Access Denied')
|
||||
_logger.exception('Cannot authenticate.')
|
||||
return Exception('Access denied.')
|
||||
finally:
|
||||
if cr is not None:
|
||||
cr.close()
|
||||
|
@ -296,4 +296,5 @@ class users(osv.osv):
|
|||
return pw
|
||||
|
||||
users()
|
||||
|
||||
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
||||
|
|
|
@ -39,6 +39,6 @@ msgstr ""
|
|||
#. module: base_crypt
|
||||
#: code:addons/base_crypt/crypt.py:140
|
||||
#, python-format
|
||||
msgid "Error"
|
||||
msgid "Error!"
|
||||
msgstr ""
|
||||
|
||||
|
|
|
@ -33,7 +33,7 @@ def _create_yaml(self, cr, uid, data, context=None):
|
|||
try:
|
||||
res_xml = mod.generate_yaml(cr, uid)
|
||||
except Exception, e:
|
||||
raise osv.except_osv(_('Error'),_(str(e)))
|
||||
raise osv.except_osv(_('Error!'),_(str(e)))
|
||||
return {
|
||||
'yaml_file': base64.encodestring(res_xml),
|
||||
}
|
||||
|
|
|
@ -341,7 +341,7 @@ if __name__ == "__main__":
|
|||
parser.add_option("-o", "--output", dest="output", default='.', help="directory of image output")
|
||||
(opt, args) = parser.parse_args()
|
||||
if len(args) != 1:
|
||||
parser.error("incorrect number of arguments")
|
||||
parser.error("Incorrect number of arguments.")
|
||||
|
||||
import sys
|
||||
|
||||
|
|
|
@ -145,7 +145,7 @@ class AddAttachment(unohelper.Base, XJobExecutor ):
|
|||
self.aSearchResult =self.sock.execute( database, uid, self.password, self.dModel[modelSelectedItem], 'name_search', self.win.getEditText("txtSearchName"))
|
||||
self.win.removeListBoxItems("lstResource", 0, self.win.getListBoxItemCount("lstResource"))
|
||||
if self.aSearchResult == []:
|
||||
ErrorDialog("No search result found !!!", "", "Search ERROR" )
|
||||
ErrorDialog("No search result found.", "", "Search Error.")
|
||||
return
|
||||
|
||||
for result in self.aSearchResult:
|
||||
|
@ -172,7 +172,7 @@ class AddAttachment(unohelper.Base, XJobExecutor ):
|
|||
docinfo = oDoc2.getDocumentInfo()
|
||||
|
||||
if oDoc2.getURL() == "":
|
||||
ErrorDialog("Please save your file", "", "Saving ERROR" )
|
||||
ErrorDialog("You should save your file.", "", "Saving Error.")
|
||||
return None
|
||||
|
||||
url = oDoc2.getURL()
|
||||
|
@ -180,7 +180,7 @@ class AddAttachment(unohelper.Base, XJobExecutor ):
|
|||
url = self.doc2pdf(url[7:])
|
||||
|
||||
if url == None:
|
||||
ErrorDialog( "Ploblem in creating PDF", "", "PDF Error" )
|
||||
ErrorDialog( "Problem in creating PDF.", "", "PDF Error.")
|
||||
return None
|
||||
|
||||
url = url[7:]
|
||||
|
@ -193,7 +193,7 @@ class AddAttachment(unohelper.Base, XJobExecutor ):
|
|||
docinfo = oDoc2.getDocumentInfo()
|
||||
|
||||
if self.win.getListBoxSelectedItem("lstResourceType") == "":
|
||||
ErrorDialog("Please select resource type", "", "Selection ERROR" )
|
||||
ErrorDialog("You have to select a resource type.", "", "Selection Error." )
|
||||
return
|
||||
|
||||
res = self.send_attachment( docinfo.getUserFieldValue(3), docinfo.getUserFieldValue(2) )
|
||||
|
@ -201,11 +201,11 @@ class AddAttachment(unohelper.Base, XJobExecutor ):
|
|||
|
||||
def btnOkWithInformation_clicked(self,oActionEvent):
|
||||
if self.win.getListBoxSelectedItem("lstResourceType") == "":
|
||||
ErrorDialog( "Please select resource type", "", "Selection ERROR" )
|
||||
ErrorDialog( "You have to select a resource type.", "", "Selection Error." )
|
||||
return
|
||||
|
||||
if self.win.getListBoxSelectedItem("lstResource") == "" or self.win.getListBoxSelectedItem("lstmodel") == "":
|
||||
ErrorDialog("Please select Model and Resource","","Selection ERROR")
|
||||
ErrorDialog("You have to select Model and Resource.", "", "Selection Error.")
|
||||
return
|
||||
|
||||
resourceid = None
|
||||
|
@ -215,7 +215,7 @@ class AddAttachment(unohelper.Base, XJobExecutor ):
|
|||
break
|
||||
|
||||
if resourceid == None:
|
||||
ErrorDialog("No resource selected !!!", "", "Resource ERROR" )
|
||||
ErrorDialog("No resource is selected.", "", "Resource Error." )
|
||||
return
|
||||
|
||||
res = self.send_attachment( self.dModel[self.win.getListBoxSelectedItem('lstmodel')], resourceid )
|
||||
|
|
|
@ -125,7 +125,7 @@ class Change( unohelper.Base, XJobExecutor ):
|
|||
import traceback,sys
|
||||
info = reduce(lambda x, y: x+y, traceback.format_exception(sys.exc_type, sys.exc_value, sys.exc_traceback))
|
||||
self.logobj.log_write('ServerParameter', LOG_ERROR, info)
|
||||
ErrorDialog("Connection to server fail. please check your Server Parameter", "", "Error")
|
||||
ErrorDialog("Connection to server is fail. Please check your Server Parameter.", "", "Error!")
|
||||
self.win.endExecute()
|
||||
|
||||
def btnCancel_clicked(self,oActionEvent):
|
||||
|
|
|
@ -106,7 +106,7 @@ class ExportToRML( unohelper.Base, XJobExecutor ):
|
|||
import traceback,sys
|
||||
info = reduce(lambda x, y: x+y, traceback.format_exception(sys.exc_type, sys.exc_value, sys.exc_traceback))
|
||||
self.logobj.log_write('ExportToRML',LOG_ERROR, info)
|
||||
ErrorDialog("Can't save the file to the hard drive.", "Exception: %s" % e, "Error" )
|
||||
ErrorDialog("Cannot save the file to the hard drive.", "Exception: %s." % e, "Error" )
|
||||
|
||||
def GetAFileName(self):
|
||||
sFilePickerArgs = Array(10)
|
||||
|
|
|
@ -103,7 +103,7 @@ class Expression(unohelper.Base, XJobExecutor ):
|
|||
tableText.insertTextContent(cursor,oInputList,False)
|
||||
self.win.endExecute()
|
||||
else:
|
||||
ErrorDialog("Please Fill appropriate data in Name field or \nExpression field")
|
||||
ErrorDialog("Please fill appropriate data in Name field or in Expression field.")
|
||||
|
||||
def btnCancel_clicked( self, oActionEvent ):
|
||||
self.win.endExecute()
|
||||
|
|
|
@ -279,7 +279,7 @@ class Fields(unohelper.Base, XJobExecutor ):
|
|||
widget.insertTextContent(cursor,oInputList,False)
|
||||
self.win.endExecute()
|
||||
else:
|
||||
ErrorDialog("Please Fill appropriate data in Name field \nor select perticular value from the list of fields")
|
||||
ErrorDialog("Please fill appropriate data in Name field \nor select particular value from the list of fields.")
|
||||
|
||||
def btnCancel_clicked( self, oActionEvent ):
|
||||
self.win.endExecute()
|
||||
|
|
|
@ -64,7 +64,6 @@ if __name__<>'package':
|
|||
database="test"
|
||||
uid = 3
|
||||
|
||||
#
|
||||
class ModifyExistingReport(unohelper.Base, XJobExecutor):
|
||||
def __init__(self,ctx):
|
||||
self.ctx = ctx
|
||||
|
@ -93,7 +92,7 @@ class ModifyExistingReport(unohelper.Base, XJobExecutor):
|
|||
|
||||
ids = self.sock.execute(database, uid, self.password, 'ir.module.module', 'search', [('name','=','base_report_designer'),('state', '=', 'installed')])
|
||||
if not len(ids):
|
||||
ErrorDialog("Please Install base_report_designer module", "", "Module Uninstalled Error")
|
||||
ErrorDialog("Please install base_report_designer module.", "", "Module Uninstalled Error!")
|
||||
exit(1)
|
||||
|
||||
ids = self.sock.execute(database, uid, self.password, 'ir.actions.report.xml', 'search', [('report_xsl', '=', False),('report_xml', '=', False)])
|
||||
|
@ -167,11 +166,11 @@ class ModifyExistingReport(unohelper.Base, XJobExecutor):
|
|||
if oDoc2.hasLocation() and not oDoc2.isReadonly():
|
||||
oDoc2.store()
|
||||
|
||||
ErrorDialog("Download is Completed","Your file has been placed here :\n"+ fp_name,"Download Message")
|
||||
ErrorDialog("Download is completed.","Your file has been placed here :\n ."+ fp_name,"Download Message !")
|
||||
obj=Logger()
|
||||
obj.log_write('Modify Existing Report',LOG_INFO, ':successful download report %s using database %s' % (self.report_with_id[selectedItemPos][2], database))
|
||||
except Exception, e:
|
||||
ErrorDialog("Report has not been downloaded", "Report: %s\nDetails: %s" % ( fp_name, str(e) ),"Download Message")
|
||||
ErrorDialog("The report could not be downloaded.", "Report: %s\nDetails: %s" % ( fp_name, str(e) ),"Download Message !")
|
||||
import traceback,sys
|
||||
info = reduce(lambda x, y: x+y, traceback.format_exception(sys.exc_type, sys.exc_value, sys.exc_traceback))
|
||||
self.logobj.log_write('ModifyExistingReport', LOG_ERROR, info)
|
||||
|
@ -197,11 +196,11 @@ class ModifyExistingReport(unohelper.Base, XJobExecutor):
|
|||
else :
|
||||
pass
|
||||
if temp:
|
||||
ErrorDialog("Report","Report has been Delete:\n "+name,"Message")
|
||||
self.logobj.log_write('Delete Report',LOG_INFO, ':successful delete report %s using database %s' % (name, database))
|
||||
ErrorDialog("Report", "The report could not be deleted:\n"+name+".", "Message !")
|
||||
self.logobj.log_write('Delete Report', LOG_INFO, ': report %s successfully deleted using database %s.' % (name, database))
|
||||
|
||||
else:
|
||||
ErrorDialog("Report","Report has not Delete:\n"+name," Message")
|
||||
ErrorDialog("Report", "The report could not be deleted:\n"+name+".", "Message !")
|
||||
self.win.endExecute()
|
||||
|
||||
|
||||
|
|
|
@ -288,7 +288,7 @@ class RepeatIn( unohelper.Base, XJobExecutor ):
|
|||
widget.insertTextContent(cursor,oInputList,False)
|
||||
self.win.endExecute()
|
||||
else:
|
||||
ErrorDialog("Please Fill appropriate data in Object Field or Name field \nor select perticular value from the list of fields")
|
||||
ErrorDialog("Please fill appropriate data in Object Field or Name field \nor select particular value from the list of fields.")
|
||||
|
||||
def btnCancel_clicked( self, oActionEvent ):
|
||||
self.win.endExecute()
|
||||
|
|
|
@ -65,8 +65,6 @@ if __name__<>'package':
|
|||
database="report"
|
||||
uid = 3
|
||||
|
||||
#
|
||||
#
|
||||
class SendtoServer(unohelper.Base, XJobExecutor):
|
||||
Kind = {
|
||||
'PDF' : 'pdf',
|
||||
|
@ -93,7 +91,7 @@ class SendtoServer(unohelper.Base, XJobExecutor):
|
|||
|
||||
self.ids = self.sock.execute(database, uid, self.password, 'ir.module.module', 'search', [('name','=','base_report_designer'),('state', '=', 'installed')])
|
||||
if not len(self.ids):
|
||||
ErrorDialog("Please Install base_report_designer module", "", "Module Uninstalled Error")
|
||||
ErrorDialog("Please install base_report_designer module.", "", "Module Uninstalled Error!")
|
||||
exit(1)
|
||||
|
||||
report_name = ""
|
||||
|
@ -172,8 +170,8 @@ class SendtoServer(unohelper.Base, XJobExecutor):
|
|||
}
|
||||
res = self.sock.execute(database, uid, self.password, 'ir.values' , 'create',rec )
|
||||
else :
|
||||
ErrorDialog(" Report Name is all ready given !!!\n\n\n Please specify other Name","","Report Name")
|
||||
self.logobj.log_write('SendToServer',LOG_WARNING, ':Report name all ready given DB %s' % (database))
|
||||
ErrorDialog("This name is already used for another report.\nPlease try with another name.", "", "Error!")
|
||||
self.logobj.log_write('SendToServer',LOG_WARNING, ': report name already used DB %s' % (database))
|
||||
self.win.endExecute()
|
||||
except Exception,e:
|
||||
import traceback,sys
|
||||
|
@ -205,8 +203,8 @@ class SendtoServer(unohelper.Base, XJobExecutor):
|
|||
self.logobj.log_write('SendToServer',LOG_INFO, ':Report %s successfully send using %s'%(params['name'],database))
|
||||
self.win.endExecute()
|
||||
else:
|
||||
ErrorDialog("Either Report Name or Technical Name is blank !!!\nPlease specify appropriate Name","","Blank Field ERROR")
|
||||
self.logobj.log_write('SendToServer',LOG_WARNING, ':Either Report Name or Technical Name is blank')
|
||||
ErrorDialog("Either report name or technical name is empty.\nPlease specify an appropriate name.", "", "Error!")
|
||||
self.logobj.log_write('SendToServer',LOG_WARNING, ': either report name or technical name is empty.')
|
||||
self.win.endExecute()
|
||||
|
||||
def getID(self):
|
||||
|
|
|
@ -100,8 +100,8 @@ class ServerParameter( unohelper.Base, XJobExecutor ):
|
|||
# sValue="Could not connect to the server!"
|
||||
# self.lstDatabase.addItem("Could not connect to the server!",0)
|
||||
elif res == 0:
|
||||
sValue="No Database found !!!"
|
||||
self.lstDatabase.addItem("No Database found !!!",0)
|
||||
sValue="No database found !"
|
||||
self.lstDatabase.addItem("No database found !",0)
|
||||
else:
|
||||
self.win.addComboListBox("lstDatabase", -2,28,123,15, True)
|
||||
self.lstDatabase = self.win.getControl( "lstDatabase" )
|
||||
|
@ -129,12 +129,12 @@ class ServerParameter( unohelper.Base, XJobExecutor ):
|
|||
self.sock=RPCSession(url)
|
||||
UID = self.sock.login(sDatabase,sLogin,sPassword)
|
||||
if not UID or UID==-1 :
|
||||
ErrorDialog("Connection Refuse...","Please enter valid Login/Password")
|
||||
ErrorDialog("Connection denied.", "Please enter valid login/password.")
|
||||
# self.win.endExecute()
|
||||
ids_module =self.sock.execute(sDatabase, UID, sPassword, 'ir.module.module', 'search', [('name','=','base_report_designer'),('state', '=', 'installed')])
|
||||
if not len(ids_module):
|
||||
ErrorDialog("Please Install base_report_designer module", "", "Module Uninstalled Error")
|
||||
self.logobj.log_write('Module Not Found',LOG_WARNING, ':base_report_designer not install in database %s' % (sDatabase))
|
||||
ErrorDialog("Please install base_report_designer module.", "", "Module Uninstalled Error!")
|
||||
self.logobj.log_write('Module not found.',LOG_WARNING, ': base_report_designer not installed in database %s.' % (sDatabase))
|
||||
#self.win.endExecute()
|
||||
else:
|
||||
desktop=getDesktop()
|
||||
|
@ -153,7 +153,7 @@ class ServerParameter( unohelper.Base, XJobExecutor ):
|
|||
#docinfo.setUserFieldValue(2,self.win.getListBoxSelectedItem("lstDatabase"))
|
||||
#docinfo.setUserFieldValue(3,"")
|
||||
|
||||
ErrorDialog(" You can start creating your report in \n \t the current document.","After Creating sending to the server.","Message")
|
||||
ErrorDialog("You can start creating your report in the current document.", "After creating, sending to the server.", "Message !")
|
||||
self.logobj.log_write('successful login',LOG_INFO, ': successful login from %s using database %s' % (sLogin, sDatabase))
|
||||
self.win.endExecute()
|
||||
|
||||
|
|
|
@ -175,7 +175,7 @@ class AddLang(unohelper.Base, XJobExecutor ):
|
|||
res = self.sock.execute(database, uid, self.password, sObject , 'read',[ids[0]])
|
||||
self.win.setEditText("txtUName",res[0][sMain[sMain.rfind("/")+1:]])
|
||||
else:
|
||||
ErrorDialog("Please select the Language Field")
|
||||
ErrorDialog("Please select a language.")
|
||||
|
||||
except:
|
||||
import traceback;traceback.print_exc()
|
||||
|
@ -261,7 +261,7 @@ class AddLang(unohelper.Base, XJobExecutor ):
|
|||
|
||||
self.win.endExecute()
|
||||
else:
|
||||
ErrorDialog("Please Fill appropriate data in Name field \nor select perticular value from the list of fields")
|
||||
ErrorDialog("Please fill appropriate data in name field \nor select particular value from the list of fields.")
|
||||
|
||||
def btnCancel_clicked( self, oActionEvent ):
|
||||
self.win.endExecute()
|
||||
|
|
|
@ -56,14 +56,14 @@ class mysocket:
|
|||
while totalsent < size:
|
||||
sent = self.sock.send(msg[totalsent:])
|
||||
if sent == 0:
|
||||
raise RuntimeError, "socket connection broken"
|
||||
raise RuntimeError, "Socket connection broken."
|
||||
totalsent = totalsent + sent
|
||||
def myreceive(self):
|
||||
buf=''
|
||||
while len(buf) < 8:
|
||||
chunk = self.sock.recv(8 - len(buf))
|
||||
if chunk == '':
|
||||
raise RuntimeError, "socket connection broken"
|
||||
raise RuntimeError, "Socket connection broken."
|
||||
buf += chunk
|
||||
size = int(buf)
|
||||
buf = self.sock.recv(1)
|
||||
|
@ -75,7 +75,7 @@ class mysocket:
|
|||
while len(msg) < size:
|
||||
chunk = self.sock.recv(size-len(msg))
|
||||
if chunk == '':
|
||||
raise RuntimeError, "socket connection broken"
|
||||
raise RuntimeError, "Socket connection broken."
|
||||
msg = msg + chunk
|
||||
msgio = cStringIO.StringIO(msg)
|
||||
unpickler = cPickle.Unpickler(msgio)
|
||||
|
@ -90,6 +90,4 @@ class mysocket:
|
|||
return res[0]
|
||||
|
||||
|
||||
|
||||
|
||||
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
||||
|
|
|
@ -100,8 +100,7 @@ class modify(unohelper.Base, XJobExecutor ):
|
|||
RepeatIn( start_group1, group2, stop_group1, item, True )
|
||||
else:
|
||||
ErrorDialog(
|
||||
"Please place your cursor at begaining of field \n"
|
||||
"which you want to modify",""
|
||||
"Please place your cursor at beginning of field that you want to modify.",""
|
||||
)
|
||||
|
||||
else:
|
||||
|
|
|
@ -202,7 +202,7 @@ class base_stage(object):
|
|||
if case.section_id.parent_id.user_id:
|
||||
data['user_id'] = case.section_id.parent_id.user_id.id
|
||||
else:
|
||||
raise osv.except_osv(_('Error !'), _('You can not escalate, you are already at the top level regarding your sales-team category.'))
|
||||
raise osv.except_osv(_('Error!'), _("You are already at the top level of your sales-team category.\nTherefore you cannot escalate furthermore."))
|
||||
self.write(cr, uid, [case.id], data, context=context)
|
||||
case.case_escalate_send_note(case.section_id.parent_id, context=context)
|
||||
cases = self.browse(cr, uid, ids, context=context)
|
||||
|
|
|
@ -108,7 +108,7 @@ class res_partner(osv.osv):
|
|||
def button_check_vat(self, cr, uid, ids, context=None):
|
||||
if not self.check_vat(cr, uid, ids, context=context):
|
||||
msg = self._construct_constraint_msg(cr, uid, ids, context=context)
|
||||
raise osv.except_osv(_('Error'), msg)
|
||||
raise osv.except_osv(_('Error!'), msg)
|
||||
|
||||
def check_vat(self, cr, uid, ids, context=None):
|
||||
user_company = self.pool.get('res.users').browse(cr, uid, uid).company_id
|
||||
|
|
|
@ -252,20 +252,20 @@ class node_calendar(nodes.node_class):
|
|||
elif cfe.getAttribute('end'):
|
||||
_log.warning("Ignore end.. ")
|
||||
else:
|
||||
_log.debug("Unknown comp-filter: %s", cfe.localName)
|
||||
_log.debug("Unknown comp-filter: %s.", cfe.localName)
|
||||
else:
|
||||
_log.debug("Unknown comp-filter: %s", vevent_filter.localName)
|
||||
_log.debug("Unknown comp-filter: %s.", vevent_filter.localName)
|
||||
else:
|
||||
_log.debug("Unknown filter element: %s", vcalendar_filter.localName)
|
||||
_log.debug("Unknown filter element: %s.", vcalendar_filter.localName)
|
||||
else:
|
||||
_log.debug("Unknown calendar-query element: %s", filter_child.localName)
|
||||
_log.debug("Unknown calendar-query element: %s.", filter_child.localName)
|
||||
return res
|
||||
elif filters.localName == 'calendar-multiget':
|
||||
# this is not the place to process, as it wouldn't support multi-level
|
||||
# hrefs. So, the code is moved to document_webdav/dav_fs.py
|
||||
pass
|
||||
else:
|
||||
_log.debug("Unknown element in REPORT: %s", filters.localName)
|
||||
_log.debug("Unknown element in REPORT: %s.", filters.localName)
|
||||
return res
|
||||
|
||||
def children(self, cr, domain=None):
|
||||
|
@ -341,7 +341,7 @@ class node_calendar(nodes.node_class):
|
|||
if line.name == ourcal.type:
|
||||
line_id = line.id
|
||||
break
|
||||
assert line_id, "Calendar #%d must have at least one %s line" % \
|
||||
assert line_id, "Calendar #%d must have at least one %s line." % \
|
||||
(ourcal.id, ourcal.type)
|
||||
if path.endswith('.ics'):
|
||||
path = path[:-4]
|
||||
|
|
|
@ -729,9 +729,9 @@ class Calendar(CalDAV, osv.osv):
|
|||
elif child.name.upper() in ('PRODID', 'VERSION'):
|
||||
pass
|
||||
elif child.name.upper().startswith('X-'):
|
||||
_logger.debug("skipping custom node %s", child.name)
|
||||
_logger.debug("Skipping custom node %s.", child.name)
|
||||
else:
|
||||
_logger.debug("skipping node %s", child.name)
|
||||
_logger.debug("Skipping node %s.", child.name)
|
||||
|
||||
res = []
|
||||
for obj_name in list(set(objs)):
|
||||
|
@ -786,7 +786,7 @@ class basic_calendar_line(osv.osv):
|
|||
res = cr.fetchone()
|
||||
if res:
|
||||
if res[0] > 0:
|
||||
raise osv.except_osv(_('Warning !'), _('Can not create line "%s" more than once') % (vals.get('name')))
|
||||
raise osv.except_osv(_('Warning!'), _('Cannot create line "%s" more than once.') % (vals.get('name')))
|
||||
return super(basic_calendar_line, self).create(cr, uid, vals, context=context)
|
||||
|
||||
basic_calendar_line()
|
||||
|
@ -856,7 +856,7 @@ class basic_calendar_fields(osv.osv):
|
|||
}
|
||||
|
||||
_sql_constraints = [
|
||||
( 'name_type_uniq', 'UNIQUE(name, type_id)', 'Can not map a field more than once'),
|
||||
( 'name_type_uniq', 'UNIQUE(name, type_id)', 'Cannot map a field more than once.'),
|
||||
]
|
||||
|
||||
def check_line(self, cr, uid, vals, name, context=None):
|
||||
|
@ -876,7 +876,7 @@ class basic_calendar_fields(osv.osv):
|
|||
line = line_obj.browse(cr, uid, l_id, context=context)[0]
|
||||
line_rel = line.object_id.model
|
||||
if (relation != 'NULL') and (not relation == line_rel):
|
||||
raise osv.except_osv(_('Warning !'), _('Please provide proper configuration of "%s" in Calendar Lines') % (name))
|
||||
raise osv.except_osv(_('Warning!'), _('Please provide proper configuration of "%s" in Calendar Lines.') % (name))
|
||||
return True
|
||||
|
||||
def create(self, cr, uid, vals, context=None):
|
||||
|
@ -1192,7 +1192,7 @@ class Alarm(CalDAV, osv.osv_memory):
|
|||
elif isinstance(child.value, datetime):
|
||||
# TODO
|
||||
# remember, spec says this datetime is in UTC
|
||||
raise NotImplementedError("we cannot parse absolute triggers")
|
||||
raise NotImplementedError("Cannot parse absolute triggers.")
|
||||
if not seconds:
|
||||
duration = abs(days)
|
||||
related = days > 0 and 'after' or 'before'
|
||||
|
|
|
@ -87,7 +87,7 @@ msgstr ""
|
|||
|
||||
#. module: caldav
|
||||
#: sql_constraint:basic.calendar.fields:0
|
||||
msgid "Can not map a field more than once"
|
||||
msgid "Cannot map a field more than once."
|
||||
msgstr ""
|
||||
|
||||
#. module: caldav
|
||||
|
@ -164,7 +164,7 @@ msgstr ""
|
|||
#. module: caldav
|
||||
#: code:addons/caldav/calendar.py:879
|
||||
#, python-format
|
||||
msgid "Please provide proper configuration of \"%s\" in Calendar Lines"
|
||||
msgid "Please provide proper configuration of \"%s\" in Calendar Lines."
|
||||
msgstr ""
|
||||
|
||||
#. module: caldav
|
||||
|
@ -207,7 +207,7 @@ msgstr ""
|
|||
#. module: caldav
|
||||
#: code:addons/caldav/calendar.py:789
|
||||
#, python-format
|
||||
msgid "Can not create line \"%s\" more than once"
|
||||
msgid "Cannot create line \"%s\" more than once."
|
||||
msgstr ""
|
||||
|
||||
#. module: caldav
|
||||
|
@ -365,7 +365,7 @@ msgstr ""
|
|||
#. module: caldav
|
||||
#: code:addons/caldav/wizard/calendar_event_import.py:63
|
||||
#, python-format
|
||||
msgid "Invalid format of the ics, file can not be imported"
|
||||
msgid "Invalid format of the ics, file cannot be imported."
|
||||
msgstr ""
|
||||
|
||||
#. module: caldav
|
||||
|
|
|
@ -178,7 +178,7 @@ configuration
|
|||
res = {}
|
||||
host = context.get('host')
|
||||
if not config.get_misc('webdav','enable',True):
|
||||
raise Exception("WebDAV is disabled, cannot continue")
|
||||
raise Exception("WebDAV is disabled, cannot continue.")
|
||||
user_pool = self.pool.get('res.users')
|
||||
current_user = user_pool.browse(cr, uid, uid, context=context)
|
||||
#TODO write documentation
|
||||
|
|
|
@ -60,7 +60,7 @@ class calendar_event_import(osv.osv_memory):
|
|||
try:
|
||||
vals = model_obj.import_cal(cr, uid, base64.decodestring(data['file_path']), context['active_id'], context)
|
||||
except:
|
||||
raise osv.except_osv(_('Warning !'),_('Invalid format of the ics, file can not be imported'))
|
||||
raise osv.except_osv(_('Warning!'),_('Invalid format of the ics, file cannot be imported.'))
|
||||
global cnt
|
||||
if vals:
|
||||
cnt = len(vals)
|
||||
|
|
|
@ -59,7 +59,7 @@ class base_action_rule(osv.osv):
|
|||
else:
|
||||
reply_to = emailfrom
|
||||
if not emailfrom:
|
||||
raise osv.except_osv(_('Error!'), _("No Email Found for your Company address!"))
|
||||
raise osv.except_osv(_('Error!'), _("There is no email for your company address."))
|
||||
return mail_message.schedule_with_attach(cr, uid, emailfrom, emails, name, body, model=obj._name, reply_to=reply_to, res_id=obj.id)
|
||||
|
||||
def do_check(self, cr, uid, action, obj, context=None):
|
||||
|
|
|
@ -795,9 +795,9 @@ class crm_lead(base_stage, osv.osv):
|
|||
def unlink(self, cr, uid, ids, context=None):
|
||||
for lead in self.browse(cr, uid, ids, context):
|
||||
if (not lead.section_id.allow_unlink) and (lead.state != 'draft'):
|
||||
raise osv.except_osv(_('Error'),
|
||||
_("You cannot delete lead '%s'; it must be in state 'Draft' to be deleted. " \
|
||||
"You should better cancel it, instead of deleting it.") % lead.name)
|
||||
raise osv.except_osv(_('Error!'),
|
||||
_("You cannot delete lead '%s' because it is not in 'Draft' state. " \
|
||||
"You can still cancel it, instead of deleting it.") % lead.name)
|
||||
return super(crm_lead, self).unlink(cr, uid, ids, context)
|
||||
|
||||
def write(self, cr, uid, ids, vals, context=None):
|
||||
|
|
|
@ -113,7 +113,7 @@ class res_users(osv.osv):
|
|||
'user_id': user_id}, context=context)
|
||||
except:
|
||||
# Tolerate a missing shortcut. See product/product.py for similar code.
|
||||
_logger.debug('Skipped meetings shortcut for user "%s"', data.get('name','<new'))
|
||||
_logger.debug('Skipped meetings shortcut for user "%s".', data.get('name','<new'))
|
||||
return user_id
|
||||
|
||||
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
||||
|
|
|
@ -285,7 +285,7 @@ class crm_phonecall(base_state, osv.osv):
|
|||
return True
|
||||
|
||||
def _call_set_partner_send_note(self, cr, uid, ids, context=None):
|
||||
return self.message_append_note(cr, uid, ids, body=_("Partner has been <b>created</b>"), context=context)
|
||||
return self.message_append_note(cr, uid, ids, body=_("Partner has been <b>created</b>."), context=context)
|
||||
|
||||
|
||||
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
||||
|
|
|
@ -110,7 +110,7 @@ msgstr ""
|
|||
#. module: crm
|
||||
#: code:addons/crm/wizard/crm_add_note.py:28
|
||||
#, python-format
|
||||
msgid "Can not add note!"
|
||||
msgid "Cannot add note."
|
||||
msgstr ""
|
||||
|
||||
#. module: crm
|
||||
|
@ -802,7 +802,7 @@ msgstr ""
|
|||
#: code:addons/crm/crm_lead.py:832
|
||||
#, python-format
|
||||
msgid ""
|
||||
"You cannot delete lead '%s'; it must be in state 'Draft' to be deleted. You "
|
||||
"You cannot delete lead '%s'; because it is not in 'Draft' state. You "
|
||||
"should better cancel it, instead of deleting it."
|
||||
msgstr ""
|
||||
|
||||
|
@ -3184,7 +3184,7 @@ msgstr ""
|
|||
#. module: crm
|
||||
#: code:addons/crm/wizard/crm_lead_to_opportunity.py:104
|
||||
#, python-format
|
||||
msgid "Closed/Cancelled Leads can not be converted into Opportunity"
|
||||
msgid "Closed/Cancelled Leads cannot be converted into Opportunity."
|
||||
msgstr ""
|
||||
|
||||
#. module: crm
|
||||
|
@ -3401,7 +3401,7 @@ msgstr ""
|
|||
#. module: crm
|
||||
#: code:addons/crm/crm_action_rule.py:61
|
||||
#, python-format
|
||||
msgid "No Email Found for your Company address!"
|
||||
msgid "There is no Email for your Company address."
|
||||
msgstr ""
|
||||
|
||||
#. module: crm
|
||||
|
|
|
@ -99,7 +99,7 @@ class crm_lead2opportunity_partner(osv.osv_memory):
|
|||
lead_obj = self.pool.get('crm.lead')
|
||||
for lead in lead_obj.browse(cr, uid, context.get('active_ids', []), context=context):
|
||||
if lead.state in ['done', 'cancel']:
|
||||
raise osv.except_osv(_("Warning !"), _("Closed/Cancelled Leads can not be converted into Opportunity"))
|
||||
raise osv.except_osv(_("Warning !"), _("Closed/Cancelled leads cannot be converted into opportunities."))
|
||||
return False
|
||||
|
||||
def _convert_opportunity(self, cr, uid, ids, vals, context=None):
|
||||
|
|
|
@ -115,7 +115,7 @@ msgstr ""
|
|||
#. module: crm_partner_assign
|
||||
#: code:addons/crm_partner_assign/partner_geo_assign.py:37
|
||||
#, python-format
|
||||
msgid "Could not contact geolocation servers, please make sure you have a working internet connection (%s)"
|
||||
msgid "Cannot contact geolocation servers, please make sure you have a working internet connection (%s)."
|
||||
msgstr ""
|
||||
|
||||
#. module: crm_partner_assign
|
||||
|
|
|
@ -34,7 +34,7 @@ def geo_find(addr):
|
|||
xml = urllib.urlopen(url).read()
|
||||
except Exception, e:
|
||||
raise osv.except_osv(_('Network error'),
|
||||
_('Could not contact geolocation servers, please make sure you have a working internet connection (%s)') % e)
|
||||
_('Cannot contact geolocation servers. Please make sure that your internet connection is up and running (%s).') % e)
|
||||
|
||||
if '<error>' in xml:
|
||||
return None
|
||||
|
|
|
@ -217,7 +217,7 @@ class delivery_grid(osv.osv):
|
|||
ok = True
|
||||
break
|
||||
if not ok:
|
||||
raise osv.except_osv(_('No price available!'), _('No line matched this product or order in the choosed delivery grid.'))
|
||||
raise osv.except_osv(_('No price available!'), _('No line matched this product or order in the chosen delivery grid.'))
|
||||
|
||||
return price
|
||||
|
||||
|
|
|
@ -86,7 +86,7 @@ class stock_picking(osv.osv):
|
|||
grid_id = carrier_obj.grid_get(cr, uid, [picking.carrier_id.id],
|
||||
picking.partner_id.id, context=context)
|
||||
if not grid_id:
|
||||
raise osv.except_osv(_('Warning'),
|
||||
raise osv.except_osv(_('Warning!'),
|
||||
_('The carrier %s (id: %d) has no delivery grid!') \
|
||||
% (picking.carrier_id.name,
|
||||
picking.carrier_id.id))
|
||||
|
|
|
@ -93,13 +93,13 @@ class indexer(object):
|
|||
except NhException:
|
||||
pass
|
||||
|
||||
raise NhException('No appropriate method to index file')
|
||||
raise NhException('No appropriate method to index file.')
|
||||
|
||||
def _doIndexContent(self,content):
|
||||
raise NhException("Content not handled here")
|
||||
raise NhException("Content cannot be handled here.")
|
||||
|
||||
def _doIndexFile(self,fpath):
|
||||
raise NhException("Content not handled here")
|
||||
raise NhException("Content cannot be handled here.")
|
||||
|
||||
def __repr__(self):
|
||||
return "<indexer %s.%s>" %(self.__module__, self.__class__.__name__)
|
||||
|
@ -132,9 +132,9 @@ class contentIndex(object):
|
|||
f = True
|
||||
|
||||
if f:
|
||||
_logger.debug('Register content indexer: %r', obj)
|
||||
_logger.debug('Register content indexer: %r.', obj)
|
||||
if not f:
|
||||
raise Exception("Your indexer should at least suport a mimetype or extension")
|
||||
raise Exception("Your indexer should at least support a mimetype or extension.")
|
||||
|
||||
def doIndex(self, content, filename=None, content_type=None, realfname = None, debug=False):
|
||||
fobj = None
|
||||
|
@ -169,22 +169,22 @@ class contentIndex(object):
|
|||
(result, _) = pop.communicate()
|
||||
|
||||
mime2 = result.split(';')[0]
|
||||
_logger.debug('File gave us: %s', mime2)
|
||||
_logger.debug('File gives us: %s', mime2)
|
||||
# Note that the temporary file still exists now.
|
||||
mime,fobj = mime_match(mime2, self.mimes)
|
||||
if not mime:
|
||||
mime = mime2
|
||||
except Exception:
|
||||
_logger.exception('Cannot determine mime type')
|
||||
_logger.exception('Cannot determine mime type.')
|
||||
|
||||
try:
|
||||
if fobj:
|
||||
res = (mime, fobj.indexContent(content,filename,fname or realfname) )
|
||||
else:
|
||||
_logger.debug("Have no object, return (%s, None)", mime)
|
||||
_logger.debug("Have no object, return (%s, None).", mime)
|
||||
res = (mime, None )
|
||||
except Exception:
|
||||
_logger.exception("Could not index file %s (%s)",
|
||||
_logger.exception("Cannot index file %s (%s).",
|
||||
filename, fname or realfname)
|
||||
res = None
|
||||
|
||||
|
@ -193,7 +193,7 @@ class contentIndex(object):
|
|||
try:
|
||||
os.unlink(fname)
|
||||
except Exception:
|
||||
_logger.exception("Could not unlink %s", fname)
|
||||
_logger.exception("Cannot unlink %s.", fname)
|
||||
return res
|
||||
|
||||
cntIndex = contentIndex()
|
||||
|
|
|
@ -61,7 +61,7 @@ class document_file(osv.osv):
|
|||
return False
|
||||
|
||||
if ids is not None:
|
||||
raise NotImplementedError("Ids is just there by convention! Don't use it yet, please.")
|
||||
raise NotImplementedError("Ids are just there by convention, please do not use it.")
|
||||
|
||||
cr.execute("UPDATE ir_attachment " \
|
||||
"SET parent_id = %s, db_datas = decode(encode(db_datas,'escape'), 'base64') " \
|
||||
|
@ -339,7 +339,7 @@ class document_file(osv.osv):
|
|||
if r:
|
||||
unres.append(r)
|
||||
else:
|
||||
self.loggerdoc.warning("Unlinking attachment #%s %s that has no storage",
|
||||
self.loggerdoc.warning("Unlinking attachment #%s %s that has no storage.",
|
||||
f.id, f.name)
|
||||
res = super(document_file, self).unlink(cr, uid, ids, context)
|
||||
stor.do_unlink(cr, uid, unres)
|
||||
|
|
|
@ -108,7 +108,7 @@ class document_directory(osv.osv):
|
|||
_sql_constraints = [
|
||||
('dirname_uniq', 'unique (name,parent_id,ressource_id,ressource_parent_type_id)', 'The directory name must be unique !'),
|
||||
('no_selfparent', 'check(parent_id <> id)', 'Directory cannot be parent of itself!'),
|
||||
('dir_parented', 'check(parent_id IS NOT NULL OR storage_id IS NOT NULL)', 'Directory must have a parent or a storage')
|
||||
('dir_parented', 'check(parent_id IS NOT NULL OR storage_id IS NOT NULL)', 'Directory must have a parent or a storage.')
|
||||
]
|
||||
def name_get(self, cr, uid, ids, context=None):
|
||||
res = []
|
||||
|
@ -153,7 +153,7 @@ class document_directory(osv.osv):
|
|||
return True
|
||||
|
||||
_constraints = [
|
||||
(_check_recursion, 'Error! You can not create recursive Directories.', ['parent_id'])
|
||||
(_check_recursion, 'Error! You cannot create recursive directories.', ['parent_id'])
|
||||
]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
|
@ -193,7 +193,7 @@ class document_directory(osv.osv):
|
|||
elif dbro.type == 'ressource':
|
||||
return nodes.node_res_dir
|
||||
else:
|
||||
raise ValueError("dir node for %s type", dbro.type)
|
||||
raise ValueError("dir node for %s type.", dbro.type)
|
||||
|
||||
def _prepare_context(self, cr, uid, nctx, context=None):
|
||||
""" Fill nctx with properties for this database
|
||||
|
|
|
@ -188,8 +188,8 @@ class nodefd_db(StringIO, nodes.node_descriptor):
|
|||
elif mode == 'a':
|
||||
StringIO.__init__(self, None)
|
||||
else:
|
||||
_logger.error("Incorrect mode %s specified", mode)
|
||||
raise IOError(errno.EINVAL, "Invalid file mode")
|
||||
_logger.error("Incorrect mode %s is specified.", mode)
|
||||
raise IOError(errno.EINVAL, "Invalid file mode.")
|
||||
self.mode = mode
|
||||
|
||||
def size(self):
|
||||
|
@ -238,7 +238,7 @@ class nodefd_db(StringIO, nodes.node_descriptor):
|
|||
(out, len(data), par.file_id))
|
||||
cr.commit()
|
||||
except Exception:
|
||||
_logger.exception('Cannot update db file #%d for close:', par.file_id)
|
||||
_logger.exception('Cannot update db file #%d for close.', par.file_id)
|
||||
raise
|
||||
finally:
|
||||
cr.close()
|
||||
|
@ -268,8 +268,8 @@ class nodefd_db64(StringIO, nodes.node_descriptor):
|
|||
elif mode == 'a':
|
||||
StringIO.__init__(self, None)
|
||||
else:
|
||||
_logger.error("Incorrect mode %s specified", mode)
|
||||
raise IOError(errno.EINVAL, "Invalid file mode")
|
||||
_logger.error("Incorrect mode %s is specified.", mode)
|
||||
raise IOError(errno.EINVAL, "Invalid file mode.")
|
||||
self.mode = mode
|
||||
|
||||
def size(self):
|
||||
|
@ -317,7 +317,7 @@ class nodefd_db64(StringIO, nodes.node_descriptor):
|
|||
(base64.encodestring(data), len(data), par.file_id))
|
||||
cr.commit()
|
||||
except Exception:
|
||||
_logger.exception('Cannot update db file #%d for close:', par.file_id)
|
||||
_logger.exception('Cannot update db file #%d for close.', par.file_id)
|
||||
raise
|
||||
finally:
|
||||
cr.close()
|
||||
|
@ -401,10 +401,10 @@ class document_storage(osv.osv):
|
|||
# self._logger.debug('Npath: %s', npath)
|
||||
for n in npath:
|
||||
if n == '..':
|
||||
raise ValueError("Invalid '..' element in path")
|
||||
raise ValueError("Invalid '..' element in path.")
|
||||
for ch in ('*', '|', "\\", '/', ':', '"', '<', '>', '?',):
|
||||
if ch in n:
|
||||
raise ValueError("Invalid char %s in path %s" %(ch, n))
|
||||
raise ValueError("Invalid char %s in path %s." %(ch, n))
|
||||
dpath = [store_path,]
|
||||
dpath += npath[:-1]
|
||||
path = os.path.join(*dpath)
|
||||
|
@ -420,7 +420,7 @@ class document_storage(osv.osv):
|
|||
"""
|
||||
boo = self.browse(cr, uid, id, context=context)
|
||||
if not boo.online:
|
||||
raise IOError(errno.EREMOTE, 'medium offline')
|
||||
raise IOError(errno.EREMOTE, 'Medium offline.')
|
||||
|
||||
if fil_obj:
|
||||
ira = fil_obj
|
||||
|
@ -435,10 +435,10 @@ class document_storage(osv.osv):
|
|||
context = {}
|
||||
boo = self.browse(cr, uid, id, context=context)
|
||||
if not boo.online:
|
||||
raise IOError(errno.EREMOTE, 'medium offline')
|
||||
raise IOError(errno.EREMOTE, 'Medium offline.')
|
||||
|
||||
if boo.readonly and mode not in ('r', 'rb'):
|
||||
raise IOError(errno.EPERM, "Readonly medium")
|
||||
raise IOError(errno.EPERM, "Readonly medium.")
|
||||
|
||||
ira = self.pool.get('ir.attachment').browse(cr, uid, file_node.file_id, context=context)
|
||||
if boo.type == 'filestore':
|
||||
|
@ -447,8 +447,8 @@ class document_storage(osv.osv):
|
|||
# try to fix their directory.
|
||||
if mode in ('r','r+'):
|
||||
if ira.file_size:
|
||||
_logger.warning( "ir.attachment #%d does not have a filename, but is at filestore, fix it!" % ira.id)
|
||||
raise IOError(errno.ENOENT, 'No file can be located')
|
||||
_logger.warning( "ir.attachment #%d does not have a filename, but is at filestore. This should get fixed." % ira.id)
|
||||
raise IOError(errno.ENOENT, 'No file can be located.')
|
||||
else:
|
||||
store_fname = self.__get_random_fname(boo.path)
|
||||
cr.execute('UPDATE ir_attachment SET store_fname = %s WHERE id = %s',
|
||||
|
@ -470,7 +470,7 @@ class document_storage(osv.osv):
|
|||
do_create = (mode[0] in ('w','a')) )
|
||||
fpath = os.path.join(path, npath[-1])
|
||||
if (not os.path.exists(fpath)) and mode[0] == 'r':
|
||||
raise IOError("File not found: %s" % fpath)
|
||||
raise IOError("File not found: %s." % fpath)
|
||||
elif mode[0] in ('w', 'a') and not ira.store_fname:
|
||||
store_fname = os.path.join(*npath)
|
||||
cr.execute('UPDATE ir_attachment SET store_fname = %s WHERE id = %s',
|
||||
|
@ -478,10 +478,10 @@ class document_storage(osv.osv):
|
|||
return nodefd_file(file_node, path=fpath, mode=mode)
|
||||
|
||||
elif boo.type == 'virtual':
|
||||
raise ValueError('Virtual storage does not support static files')
|
||||
raise ValueError('Virtual storage does not support static file(s).')
|
||||
|
||||
else:
|
||||
raise TypeError("No %s storage" % boo.type)
|
||||
raise TypeError("No %s storage." % boo.type)
|
||||
|
||||
def __get_data_3(self, cr, uid, boo, ira, context):
|
||||
if boo.type == 'filestore':
|
||||
|
@ -489,7 +489,7 @@ class document_storage(osv.osv):
|
|||
# On a migrated db, some files may have the wrong storage type
|
||||
# try to fix their directory.
|
||||
if ira.file_size:
|
||||
_logger.warning( "ir.attachment #%d does not have a filename, but is at filestore, fix it!" % ira.id)
|
||||
_logger.warning( "ir.attachment #%d does not have a filename, but is at filestore. This should get fixed." % ira.id)
|
||||
return None
|
||||
fpath = os.path.join(boo.path, ira.store_fname)
|
||||
return file(fpath, 'rb').read()
|
||||
|
@ -521,13 +521,13 @@ class document_storage(osv.osv):
|
|||
elif not ira.store_fname:
|
||||
return None
|
||||
else:
|
||||
raise IOError(errno.ENOENT, "File not found: %s" % fpath)
|
||||
raise IOError(errno.ENOENT, "File not found: %s." % fpath)
|
||||
|
||||
elif boo.type == 'virtual':
|
||||
raise ValueError('Virtual storage does not support static files')
|
||||
raise ValueError('Virtual storage does not support static file(s).')
|
||||
|
||||
else:
|
||||
raise TypeError("No %s storage" % boo.type)
|
||||
raise TypeError("No %s storage!" % boo.type)
|
||||
|
||||
def set_data(self, cr, uid, id, file_node, data, context=None, fil_obj=None):
|
||||
""" store the data.
|
||||
|
@ -541,12 +541,12 @@ class document_storage(osv.osv):
|
|||
ira = self.pool.get('ir.attachment').browse(cr, uid, file_node.file_id, context=context)
|
||||
|
||||
if not boo.online:
|
||||
raise IOError(errno.EREMOTE, 'medium offline')
|
||||
raise IOError(errno.EREMOTE, 'Medium offline.')
|
||||
|
||||
if boo.readonly:
|
||||
raise IOError(errno.EPERM, "Readonly medium")
|
||||
raise IOError(errno.EPERM, "Readonly medium.")
|
||||
|
||||
_logger.debug( "Store data for ir.attachment #%d" % ira.id)
|
||||
_logger.debug( "Store data for ir.attachment #%d." % ira.id)
|
||||
store_fname = None
|
||||
fname = None
|
||||
if boo.type == 'filestore':
|
||||
|
@ -559,13 +559,13 @@ class document_storage(osv.osv):
|
|||
fp.write(data)
|
||||
finally:
|
||||
fp.close()
|
||||
_logger.debug( "Saved data to %s" % fname)
|
||||
_logger.debug( "Saved data to %s." % fname)
|
||||
filesize = len(data) # os.stat(fname).st_size
|
||||
|
||||
# TODO Here, an old file would be left hanging.
|
||||
|
||||
except Exception, e:
|
||||
_logger.warning( "Couldn't save data to %s", path, exc_info=True)
|
||||
_logger.warning( "Cannot save data to %s.", path, exc_info=True)
|
||||
raise except_orm(_('Error!'), str(e))
|
||||
elif boo.type == 'db':
|
||||
filesize = len(data)
|
||||
|
@ -588,19 +588,19 @@ class document_storage(osv.osv):
|
|||
fp.write(data)
|
||||
finally:
|
||||
fp.close()
|
||||
_logger.debug("Saved data to %s", fname)
|
||||
_logger.debug("Saved data to %s.", fname)
|
||||
filesize = len(data) # os.stat(fname).st_size
|
||||
store_fname = os.path.join(*npath)
|
||||
# TODO Here, an old file would be left hanging.
|
||||
except Exception,e :
|
||||
_logger.warning("Couldn't save data:", exc_info=True)
|
||||
_logger.warning("Cannot save data.", exc_info=True)
|
||||
raise except_orm(_('Error!'), str(e))
|
||||
|
||||
elif boo.type == 'virtual':
|
||||
raise ValueError('Virtual storage does not support static files')
|
||||
raise ValueError('Virtual storage does not support static file(s).')
|
||||
|
||||
else:
|
||||
raise TypeError("No %s storage" % boo.type)
|
||||
raise TypeError("No %s storage!" % boo.type)
|
||||
|
||||
# 2nd phase: store the metadata
|
||||
try:
|
||||
|
@ -612,7 +612,7 @@ class document_storage(osv.osv):
|
|||
mime, icont = cntIndex.doIndex(data, ira.datas_fname,
|
||||
ira.file_type or None, fname)
|
||||
except Exception:
|
||||
_logger.debug('Cannot index file:', exc_info=True)
|
||||
_logger.debug('Cannot index file.', exc_info=True)
|
||||
pass
|
||||
|
||||
try:
|
||||
|
@ -629,7 +629,7 @@ class document_storage(osv.osv):
|
|||
file_node.content_type = mime
|
||||
return True
|
||||
except Exception, e :
|
||||
self._logger.warning("Couldn't save data:", exc_info=True)
|
||||
self._logger.warning("Cannot save data.", exc_info=True)
|
||||
# should we really rollback once we have written the actual data?
|
||||
# at the db case (only), that rollback would be safe
|
||||
raise except_orm(_('Error at doc write!'), str(e))
|
||||
|
@ -639,10 +639,10 @@ class document_storage(osv.osv):
|
|||
files that have to be removed, too. """
|
||||
|
||||
if not storage_bo.online:
|
||||
raise IOError(errno.EREMOTE, 'medium offline')
|
||||
raise IOError(errno.EREMOTE, 'Medium offline.')
|
||||
|
||||
if storage_bo.readonly:
|
||||
raise IOError(errno.EPERM, "Readonly medium")
|
||||
raise IOError(errno.EPERM, "Readonly medium.")
|
||||
|
||||
if storage_bo.type == 'filestore':
|
||||
fname = fil_bo.store_fname
|
||||
|
@ -659,7 +659,7 @@ class document_storage(osv.osv):
|
|||
path = storage_bo.path
|
||||
return ( storage_bo.id, 'file', os.path.join(path, fname))
|
||||
else:
|
||||
raise TypeError("No %s storage" % storage_bo.type)
|
||||
raise TypeError("No %s storage!" % storage_bo.type)
|
||||
|
||||
def do_unlink(self, cr, uid, unres):
|
||||
for id, ktype, fname in unres:
|
||||
|
@ -667,9 +667,9 @@ class document_storage(osv.osv):
|
|||
try:
|
||||
os.unlink(fname)
|
||||
except Exception:
|
||||
_logger.warning("Could not remove file %s, please remove manually.", fname, exc_info=True)
|
||||
_logger.warning("Cannot remove file %s, please remove it manually.", fname, exc_info=True)
|
||||
else:
|
||||
_logger.warning("Unknown unlink key %s" % ktype)
|
||||
_logger.warning("Unlink unknown key %s." % ktype)
|
||||
|
||||
return True
|
||||
|
||||
|
@ -684,10 +684,10 @@ class document_storage(osv.osv):
|
|||
assert sbro, "The file #%d didn't provide storage" % file_node.file_id
|
||||
|
||||
if not sbro.online:
|
||||
raise IOError(errno.EREMOTE, 'medium offline')
|
||||
raise IOError(errno.EREMOTE, 'Medium offline.')
|
||||
|
||||
if sbro.readonly:
|
||||
raise IOError(errno.EPERM, "Readonly medium")
|
||||
raise IOError(errno.EPERM, "Readonly medium.")
|
||||
|
||||
if sbro.type in ('filestore', 'db', 'db64'):
|
||||
# nothing to do for a rename, allow to change the db field
|
||||
|
@ -699,9 +699,9 @@ class document_storage(osv.osv):
|
|||
fname = ira.store_fname
|
||||
|
||||
if not fname:
|
||||
_logger.warning("Trying to rename a non-stored file")
|
||||
_logger.warning("Trying to rename a non-stored file.")
|
||||
if fname != os.path.join(*npath):
|
||||
_logger.warning("inconsistency in realstore: %s != %s" , fname, repr(npath))
|
||||
_logger.warning("Inconsistency to realstore: %s != %s." , fname, repr(npath))
|
||||
|
||||
oldpath = os.path.join(path, npath[-1])
|
||||
newpath = os.path.join(path, new_name)
|
||||
|
@ -711,7 +711,7 @@ class document_storage(osv.osv):
|
|||
store_fname = os.path.join(*store_path)
|
||||
return { 'name': new_name, 'datas_fname': new_name, 'store_fname': store_fname }
|
||||
else:
|
||||
raise TypeError("No %s storage" % sbro.type)
|
||||
raise TypeError("No %s storage!" % sbro.type)
|
||||
|
||||
def simple_move(self, cr, uid, file_node, ndir_bro, context=None):
|
||||
""" A preparation for a file move.
|
||||
|
@ -726,10 +726,10 @@ class document_storage(osv.osv):
|
|||
assert sbro, "The file #%d didn't provide storage" % file_node.file_id
|
||||
|
||||
if not sbro.online:
|
||||
raise IOError(errno.EREMOTE, 'medium offline')
|
||||
raise IOError(errno.EREMOTE, 'Medium offline.')
|
||||
|
||||
if sbro.readonly:
|
||||
raise IOError(errno.EPERM, "Readonly medium")
|
||||
raise IOError(errno.EPERM, "Readonly medium.")
|
||||
|
||||
par = ndir_bro
|
||||
psto = None
|
||||
|
@ -739,8 +739,8 @@ class document_storage(osv.osv):
|
|||
break
|
||||
par = par.parent_id
|
||||
if file_node.storage_id != psto:
|
||||
_logger.debug('Cannot move file %r from %r to %r', file_node, file_node.parent, ndir_bro.name)
|
||||
raise NotImplementedError('Cannot move files between storage media')
|
||||
_logger.debug('Cannot move file %r from %r to %r.', file_node, file_node.parent, ndir_bro.name)
|
||||
raise NotImplementedError('Cannot move file(s) between storage media.')
|
||||
|
||||
if sbro.type in ('filestore', 'db', 'db64'):
|
||||
# nothing to do for a rename, allow to change the db field
|
||||
|
@ -752,9 +752,9 @@ class document_storage(osv.osv):
|
|||
fname = ira.store_fname
|
||||
|
||||
if not fname:
|
||||
_logger.warning("Trying to rename a non-stored file")
|
||||
_logger.warning("Trying to rename a non-stored file.")
|
||||
if fname != os.path.join(*opath):
|
||||
_logger.warning("inconsistency in realstore: %s != %s" , fname, repr(opath))
|
||||
_logger.warning("Inconsistency to realstore: %s != %s." , fname, repr(opath))
|
||||
|
||||
oldpath = os.path.join(path, opath[-1])
|
||||
|
||||
|
@ -762,12 +762,12 @@ class document_storage(osv.osv):
|
|||
npath = filter(lambda x: x is not None, npath)
|
||||
newdir = os.path.join(*npath)
|
||||
if not os.path.isdir(newdir):
|
||||
_logger.debug("Must create dir %s", newdir)
|
||||
_logger.debug("Must create dir %s.", newdir)
|
||||
os.makedirs(newdir)
|
||||
npath.append(opath[-1])
|
||||
newpath = os.path.join(*npath)
|
||||
|
||||
_logger.debug("Going to move %s from %s to %s", opath[-1], oldpath, newpath)
|
||||
_logger.debug("Going to move %s from %s to %s.", opath[-1], oldpath, newpath)
|
||||
shutil.move(oldpath, newpath)
|
||||
|
||||
store_path = npath[1:] + [opath[-1],]
|
||||
|
@ -775,7 +775,7 @@ class document_storage(osv.osv):
|
|||
|
||||
return { 'store_fname': store_fname }
|
||||
else:
|
||||
raise TypeError("No %s storage" % sbro.type)
|
||||
raise TypeError("No %s storage." % sbro.type)
|
||||
|
||||
|
||||
document_storage()
|
||||
|
|
|
@ -923,7 +923,7 @@ msgstr ""
|
|||
|
||||
#. module: document
|
||||
#: sql_constraint:document.directory:0
|
||||
msgid "Directory must have a parent or a storage"
|
||||
msgid "Directory must have a parent or a storage."
|
||||
msgstr ""
|
||||
|
||||
#. module: document
|
||||
|
|
|
@ -271,7 +271,7 @@ class node_class(object):
|
|||
return False
|
||||
|
||||
def get_data(self,cr):
|
||||
raise TypeError('no data for %s'% self.type)
|
||||
raise TypeError('No data for %s.'% self.type)
|
||||
|
||||
def open_data(self, cr, mode):
|
||||
""" Open a node_descriptor object for this node.
|
||||
|
@ -285,10 +285,10 @@ class node_class(object):
|
|||
For this class, there is no data, so no implementation. Each
|
||||
child class that has data should override this.
|
||||
"""
|
||||
raise TypeError('no data for %s' % self.type)
|
||||
raise TypeError('No data for %s.' % self.type)
|
||||
|
||||
def _get_storage(self,cr):
|
||||
raise RuntimeError("no storage for base class")
|
||||
raise RuntimeError("No storage for base class.")
|
||||
|
||||
def get_etag(self,cr):
|
||||
""" Get a tag, unique per object + modification.
|
||||
|
@ -327,7 +327,7 @@ class node_class(object):
|
|||
if self.DAV_M_NS.has_key(ns):
|
||||
prefix = self.DAV_M_NS[ns]
|
||||
else:
|
||||
_logger.debug('No namespace: %s ("%s")',ns, prop)
|
||||
_logger.debug('No namespace: %s ("%s").',ns, prop)
|
||||
return None
|
||||
|
||||
mname = prefix + "_" + prop.replace('-','_')
|
||||
|
@ -340,7 +340,7 @@ class node_class(object):
|
|||
r = m(cr)
|
||||
return r
|
||||
except AttributeError:
|
||||
_logger.debug('Property %s not supported' % prop, exc_info=True)
|
||||
_logger.debug('The property %s is not supported.' % prop, exc_info=True)
|
||||
return None
|
||||
|
||||
def get_dav_resourcetype(self, cr):
|
||||
|
@ -384,13 +384,13 @@ class node_class(object):
|
|||
""" Create a regular file under this node
|
||||
"""
|
||||
_logger.warning("Attempted to create a file under %r, not possible.", self)
|
||||
raise IOError(errno.EPERM, "Not allowed to create files here")
|
||||
raise IOError(errno.EPERM, "Not allowed to create file(s) here.")
|
||||
|
||||
def create_child_collection(self, cr, objname):
|
||||
""" Create a child collection (directory) under self
|
||||
"""
|
||||
_logger.warning("Attempted to create a collection under %r, not possible.", self)
|
||||
raise IOError(errno.EPERM, "Not allowed to create folders here")
|
||||
raise IOError(errno.EPERM, "Not allowed to create folder(s) here.")
|
||||
|
||||
def rm(self, cr):
|
||||
raise NotImplementedError(repr(self))
|
||||
|
@ -423,9 +423,9 @@ class node_class(object):
|
|||
perms = pe2
|
||||
elif isinstance(perms, int):
|
||||
if perms < 0 or perms > 15:
|
||||
raise ValueError("Invalid permission bits")
|
||||
raise ValueError("Invalid permission bits.")
|
||||
else:
|
||||
raise ValueError("Invalid permission attribute")
|
||||
raise ValueError("Invalid permission attribute.")
|
||||
|
||||
return ((self.uidperms & perms) == perms)
|
||||
|
||||
|
@ -465,7 +465,7 @@ class node_database(node_class):
|
|||
is_allowed = self.check_perms(5)
|
||||
|
||||
if not is_allowed:
|
||||
raise IOError(errno.EPERM, "Permission into directory denied")
|
||||
raise IOError(errno.EPERM, "Permission into directory denied.")
|
||||
|
||||
if domain:
|
||||
where = where + domain
|
||||
|
@ -543,7 +543,7 @@ class node_dir(node_database):
|
|||
try:
|
||||
self.dctx['dctx_' + dfld.field] = safe_eval(dfld.expr,dc2)
|
||||
except Exception,e:
|
||||
print "Cannot eval %s" % dfld.expr
|
||||
print "Cannot eval %s." % dfld.expr
|
||||
print e
|
||||
pass
|
||||
|
||||
|
@ -569,7 +569,7 @@ class node_dir(node_database):
|
|||
|
||||
is_allowed = self.check_perms(nodename and 1 or 5)
|
||||
if not is_allowed:
|
||||
raise IOError(errno.EPERM, "Permission into directory denied")
|
||||
raise IOError(errno.EPERM, "Permission into directory denied.")
|
||||
|
||||
cntobj = self.context._dirobj.pool.get('document.directory.content')
|
||||
uid = self.context.uid
|
||||
|
@ -597,7 +597,7 @@ class node_dir(node_database):
|
|||
is_allowed = self.check_perms(5)
|
||||
|
||||
if not is_allowed:
|
||||
raise IOError(errno.EPERM, "Permission into directory denied")
|
||||
raise IOError(errno.EPERM, "Permission into directory denied.")
|
||||
|
||||
if not domain:
|
||||
domain = []
|
||||
|
@ -633,20 +633,20 @@ class node_dir(node_database):
|
|||
if not directory:
|
||||
raise OSError(2, 'Not such file or directory.')
|
||||
if not self.check_perms('u'):
|
||||
raise IOError(errno.EPERM,"Permission denied")
|
||||
raise IOError(errno.EPERM,"Permission denied.")
|
||||
|
||||
if directory._table_name=='document.directory':
|
||||
if self.children(cr):
|
||||
raise OSError(39, 'Directory not empty.')
|
||||
res = self.context._dirobj.unlink(cr, uid, [directory.id])
|
||||
else:
|
||||
raise OSError(1, 'Operation not permited.')
|
||||
raise OSError(1, 'Operation is not permitted.')
|
||||
return res
|
||||
|
||||
def create_child_collection(self, cr, objname):
|
||||
object2 = False
|
||||
if not self.check_perms(2):
|
||||
raise IOError(errno.EPERM,"Permission denied")
|
||||
raise IOError(errno.EPERM,"Permission denied.")
|
||||
|
||||
dirobj = self.context._dirobj
|
||||
uid = self.context.uid
|
||||
|
@ -654,7 +654,7 @@ class node_dir(node_database):
|
|||
ctx.update(self.dctx)
|
||||
obj = dirobj.browse(cr, uid, self.dir_id)
|
||||
if obj and (obj.type == 'ressource') and not object2:
|
||||
raise OSError(1, 'Operation not permited.')
|
||||
raise OSError(1, 'Operation is not permitted.')
|
||||
|
||||
#objname = uri2[-1]
|
||||
val = {
|
||||
|
@ -672,7 +672,7 @@ class node_dir(node_database):
|
|||
Return the node_* created
|
||||
"""
|
||||
if not self.check_perms(2):
|
||||
raise IOError(errno.EPERM,"Permission denied")
|
||||
raise IOError(errno.EPERM,"Permission denied.")
|
||||
|
||||
dirobj = self.context._dirobj
|
||||
uid = self.context.uid
|
||||
|
@ -702,10 +702,10 @@ class node_dir(node_database):
|
|||
Note /may/ be called with ndir_node = None, to rename the document root.
|
||||
"""
|
||||
if ndir_node and (ndir_node.context != self.context):
|
||||
raise NotImplementedError("Cannot move directories between contexts")
|
||||
raise NotImplementedError("Cannot move directories between contexts.")
|
||||
|
||||
if (not self.check_perms('u')) or (not ndir_node.check_perms('w')):
|
||||
raise IOError(errno.EPERM,"Permission denied")
|
||||
raise IOError(errno.EPERM,"Permission denied.")
|
||||
|
||||
dir_obj = self.context._dirobj
|
||||
if not fil_obj:
|
||||
|
@ -724,13 +724,13 @@ class node_dir(node_database):
|
|||
assert self.parent
|
||||
|
||||
if self.parent != ndir_node:
|
||||
_logger.debug('Cannot move dir %r from %r to %r', self, self.parent, ndir_node)
|
||||
raise NotImplementedError('Cannot move dir to another dir')
|
||||
_logger.debug('Cannot move dir %r from %r to %r.', self, self.parent, ndir_node)
|
||||
raise NotImplementedError('Cannot move dir to another dir.')
|
||||
|
||||
ret = {}
|
||||
if new_name and (new_name != dbro.name):
|
||||
if ndir_node.child(cr, new_name):
|
||||
raise IOError(errno.EEXIST, "Destination path already exists")
|
||||
raise IOError(errno.EEXIST, "Destination path already exists.")
|
||||
ret['name'] = new_name
|
||||
|
||||
del dbro
|
||||
|
@ -832,7 +832,7 @@ class node_res_dir(node_class):
|
|||
elif isinstance(app, tuple):
|
||||
where.append(app)
|
||||
else:
|
||||
raise RuntimeError("incorrect domain expr: %s" % self.domain)
|
||||
raise RuntimeError("Incorrect domain expr: %s." % self.domain)
|
||||
if self.resm_id:
|
||||
where.append(('id','=',self.resm_id))
|
||||
|
||||
|
@ -845,7 +845,7 @@ class node_res_dir(node_class):
|
|||
is_allowed = self.check_perms(5)
|
||||
|
||||
if not is_allowed:
|
||||
raise IOError(errno.EPERM,"Permission denied")
|
||||
raise IOError(errno.EPERM,"Permission denied.")
|
||||
|
||||
# print "Where clause for %s" % self.res_model, where
|
||||
if self.ressource_tree:
|
||||
|
@ -923,7 +923,7 @@ class node_res_obj(node_class):
|
|||
try:
|
||||
self.dctx[fld] = safe_eval(expr, dc2)
|
||||
except Exception,e:
|
||||
print "Cannot eval %s for %s" % (expr, fld)
|
||||
print "Cannot eval %s for %s." % (expr, fld)
|
||||
print e
|
||||
pass
|
||||
else:
|
||||
|
@ -962,7 +962,7 @@ class node_res_obj(node_class):
|
|||
res = []
|
||||
is_allowed = self.check_perms((nodename and 1) or 5)
|
||||
if not is_allowed:
|
||||
raise IOError(errno.EPERM,"Permission denied")
|
||||
raise IOError(errno.EPERM,"Permission denied.")
|
||||
|
||||
cntobj = self.context._dirobj.pool.get('document.directory.content')
|
||||
uid = self.context.uid
|
||||
|
@ -997,7 +997,7 @@ class node_res_obj(node_class):
|
|||
def get_dav_eprop_DEPR(self, cr, ns, prop):
|
||||
# Deprecated!
|
||||
if ns != 'http://groupdav.org/' or prop != 'resourcetype':
|
||||
_logger.warning("Who asked for %s:%s?" % (ns, prop))
|
||||
_logger.warning("Who asks for %s:%s?" % (ns, prop))
|
||||
return None
|
||||
cntobj = self.context._dirobj.pool.get('document.directory.content')
|
||||
uid = self.context.uid
|
||||
|
@ -1016,7 +1016,7 @@ class node_res_obj(node_class):
|
|||
|
||||
is_allowed = self.check_perms((name and 1) or 5)
|
||||
if not is_allowed:
|
||||
raise IOError(errno.EPERM,"Permission denied")
|
||||
raise IOError(errno.EPERM,"Permission denied.")
|
||||
|
||||
uid = self.context.uid
|
||||
ctx = self.context.context.copy()
|
||||
|
@ -1103,7 +1103,7 @@ class node_res_obj(node_class):
|
|||
dirobj = self.context._dirobj
|
||||
is_allowed = self.check_perms(2)
|
||||
if not is_allowed:
|
||||
raise IOError(errno.EPERM,"Permission denied")
|
||||
raise IOError(errno.EPERM,"Permission denied.")
|
||||
|
||||
uid = self.context.uid
|
||||
ctx = self.context.context.copy()
|
||||
|
@ -1114,7 +1114,7 @@ class node_res_obj(node_class):
|
|||
|
||||
obj = dirobj.browse(cr, uid, self.dir_id)
|
||||
if obj and (obj.type == 'ressource') and not object2:
|
||||
raise OSError(1, 'Operation not permited.')
|
||||
raise OSError(1, 'Operation is not permitted.')
|
||||
|
||||
|
||||
val = {
|
||||
|
@ -1135,7 +1135,7 @@ class node_res_obj(node_class):
|
|||
"""
|
||||
is_allowed = self.check_perms(2)
|
||||
if not is_allowed:
|
||||
raise IOError(errno.EPERM,"Permission denied")
|
||||
raise IOError(errno.EPERM,"Permission denied.")
|
||||
|
||||
dirobj = self.context._dirobj
|
||||
uid = self.context.uid
|
||||
|
@ -1213,9 +1213,9 @@ class node_file(node_class):
|
|||
|
||||
def open_data(self, cr, mode):
|
||||
stor = self.storage_id
|
||||
assert stor, "No storage for file #%s" % self.file_id
|
||||
assert stor, "No storage for file #%s." % self.file_id
|
||||
if not self.check_perms(4):
|
||||
raise IOError(errno.EPERM, "Permission denied")
|
||||
raise IOError(errno.EPERM, "Permission denied.")
|
||||
|
||||
# If storage is not set properly, we are just screwed here, don't
|
||||
# try to get it from default.
|
||||
|
@ -1225,7 +1225,7 @@ class node_file(node_class):
|
|||
def rm(self, cr):
|
||||
uid = self.context.uid
|
||||
if not self.check_perms(8):
|
||||
raise IOError(errno.EPERM, "Permission denied")
|
||||
raise IOError(errno.EPERM, "Permission denied.")
|
||||
document_obj = self.context._dirobj.pool.get('ir.attachment')
|
||||
if self.type in ('collection','database'):
|
||||
return False
|
||||
|
@ -1271,7 +1271,7 @@ class node_file(node_class):
|
|||
stor = self.storage_id
|
||||
assert stor, "No storage for file #%s" % self.file_id
|
||||
if not self.check_perms(4):
|
||||
raise IOError(errno.EPERM, "Permission denied")
|
||||
raise IOError(errno.EPERM, "Permission denied.")
|
||||
|
||||
# If storage is not set properly, we are just screwed here, don't
|
||||
# try to get it from default.
|
||||
|
@ -1294,7 +1294,7 @@ class node_file(node_class):
|
|||
stor = self.storage_id
|
||||
assert stor, "No storage for file #%s" % self.file_id
|
||||
if not self.check_perms(2):
|
||||
raise IOError(errno.EPERM, "Permission denied")
|
||||
raise IOError(errno.EPERM, "Permission denied.")
|
||||
|
||||
stobj = self.context._dirobj.pool.get('document.storage')
|
||||
return stobj.set_data(cr, self.context.uid,stor, self, data, self.context.context, fil_obj)
|
||||
|
@ -1304,20 +1304,20 @@ class node_file(node_class):
|
|||
|
||||
def move_to(self, cr, ndir_node, new_name=False, fil_obj=None, ndir_obj=None, in_write=False):
|
||||
if ndir_node and ndir_node.context != self.context:
|
||||
raise NotImplementedError("Cannot move files between contexts")
|
||||
raise NotImplementedError("Cannot move files between contexts.")
|
||||
|
||||
if (not self.check_perms(8)) and ndir_node.check_perms(2):
|
||||
raise IOError(errno.EPERM, "Permission denied")
|
||||
raise IOError(errno.EPERM, "Permission denied.")
|
||||
|
||||
doc_obj = self.context._dirobj.pool.get('ir.attachment')
|
||||
if not fil_obj:
|
||||
dbro = doc_obj.browse(cr, self.context.uid, self.file_id, context=self.context.context)
|
||||
else:
|
||||
dbro = fil_obj
|
||||
assert dbro.id == self.file_id, "%s != %s for %r" % (dbro.id, self.file_id, self)
|
||||
assert dbro.id == self.file_id, "%s != %s for %r." % (dbro.id, self.file_id, self)
|
||||
|
||||
if not dbro:
|
||||
raise IndexError("Cannot locate doc %d", self.file_id)
|
||||
raise IndexError("Cannot locate doc %d.", self.file_id)
|
||||
|
||||
if (not self.parent):
|
||||
# there *must* be a parent node for this one
|
||||
|
@ -1327,8 +1327,8 @@ class node_file(node_class):
|
|||
ret = {}
|
||||
if ndir_node and self.parent != ndir_node:
|
||||
if not (isinstance(self.parent, node_dir) and isinstance(ndir_node, node_dir)):
|
||||
_logger.debug('Cannot move file %r from %r to %r', self, self.parent, ndir_node)
|
||||
raise NotImplementedError('Cannot move files between dynamic folders')
|
||||
_logger.debug('Cannot move file %r from %r to %r.', self, self.parent, ndir_node)
|
||||
raise NotImplementedError('Cannot move files between dynamic folders.')
|
||||
|
||||
if not ndir_obj:
|
||||
ndir_obj = self.context._dirobj.browse(cr, self.context.uid, \
|
||||
|
@ -1343,7 +1343,7 @@ class node_file(node_class):
|
|||
|
||||
if new_name and (new_name != dbro.name):
|
||||
if len(ret):
|
||||
raise NotImplementedError("Cannot rename and move") # TODO
|
||||
raise NotImplementedError("Cannot rename and move.") # TODO
|
||||
stobj = self.context._dirobj.pool.get('document.storage')
|
||||
r2 = stobj.simple_rename(cr, self.context.uid, self, new_name, self.context.context)
|
||||
ret.update(r2)
|
||||
|
@ -1399,7 +1399,7 @@ class node_content(node_class):
|
|||
def get_data(self, cr, fil_obj = None):
|
||||
cntobj = self.context._dirobj.pool.get('document.directory.content')
|
||||
if not self.check_perms(4):
|
||||
raise IOError(errno.EPERM, "Permission denied")
|
||||
raise IOError(errno.EPERM, "Permission denied.")
|
||||
|
||||
ctx = self.context.context.copy()
|
||||
ctx.update(self.dctx)
|
||||
|
@ -1416,10 +1416,10 @@ class node_content(node_class):
|
|||
elif mode in ('r+', 'w+'):
|
||||
cperms = 'rw'
|
||||
else:
|
||||
raise IOError(errno.EINVAL, "Cannot open at mode %s" % mode)
|
||||
raise IOError(errno.EINVAL, "Cannot open at mode %s." % mode)
|
||||
|
||||
if not self.check_perms(cperms):
|
||||
raise IOError(errno.EPERM, "Permission denied")
|
||||
raise IOError(errno.EPERM, "Permission denied.")
|
||||
|
||||
ctx = self.context.context.copy()
|
||||
ctx.update(self.dctx)
|
||||
|
@ -1438,7 +1438,7 @@ class node_content(node_class):
|
|||
def set_data(self, cr, data, fil_obj = None):
|
||||
cntobj = self.context._dirobj.pool.get('document.directory.content')
|
||||
if not self.check_perms(2):
|
||||
raise IOError(errno.EPERM, "Permission denied")
|
||||
raise IOError(errno.EPERM, "Permission denied.")
|
||||
|
||||
ctx = self.context.context.copy()
|
||||
ctx.update(self.dctx)
|
||||
|
@ -1473,8 +1473,8 @@ class nodefd_content(StringIO, node_descriptor):
|
|||
elif mode == 'a':
|
||||
StringIO.__init__(self, None)
|
||||
else:
|
||||
_logger.error("Incorrect mode %s specified", mode)
|
||||
raise IOError(errno.EINVAL, "Invalid file mode")
|
||||
_logger.error("Incorrect mode %s is specified.", mode)
|
||||
raise IOError(errno.EINVAL, "Invalid file mode.")
|
||||
self.mode = mode
|
||||
|
||||
def size(self):
|
||||
|
@ -1499,7 +1499,7 @@ class nodefd_content(StringIO, node_descriptor):
|
|||
raise NotImplementedError
|
||||
cr.commit()
|
||||
except Exception:
|
||||
_logger.exception('Cannot update db content #%d for close:', par.cnt_id)
|
||||
_logger.exception('Cannot update db content #%d for close.', par.cnt_id)
|
||||
raise
|
||||
finally:
|
||||
cr.close()
|
||||
|
@ -1527,8 +1527,8 @@ class nodefd_static(StringIO, node_descriptor):
|
|||
elif mode == 'a':
|
||||
StringIO.__init__(self, None)
|
||||
else:
|
||||
_logger.error("Incorrect mode %s specified", mode)
|
||||
raise IOError(errno.EINVAL, "Invalid file mode")
|
||||
_logger.error("Incorrect mode %s is specified.", mode)
|
||||
raise IOError(errno.EINVAL, "Invalid file mode.")
|
||||
self.mode = mode
|
||||
|
||||
def size(self):
|
||||
|
@ -1552,7 +1552,7 @@ class nodefd_static(StringIO, node_descriptor):
|
|||
raise NotImplementedError
|
||||
cr.commit()
|
||||
except Exception:
|
||||
_logger.exception('Cannot update db content #%d for close:', par.cnt_id)
|
||||
_logger.exception('Cannot update db content #%d for close.', par.cnt_id)
|
||||
raise
|
||||
finally:
|
||||
cr.close()
|
||||
|
|
|
@ -104,7 +104,7 @@ class DocIndex(indexer):
|
|||
except OSError:
|
||||
|
||||
_logger.warn("Failed attempt to execute antiword (MS Word reader). Antiword is necessary to index the file %s of MIME type %s. Detailed error available at DEBUG level.", fname, self._getMimeTypes()[0])
|
||||
_logger.debug("Trace of the failed file indexing attempt: ", exc_info=True)
|
||||
_logger.debug("Trace of the failed file indexing attempt.", exc_info=True)
|
||||
return False
|
||||
|
||||
cntIndex.register(DocIndex())
|
||||
|
|
|
@ -81,7 +81,7 @@ class abstracted_fs(object):
|
|||
self.db_name_list.append(db_name)
|
||||
cr.commit()
|
||||
except Exception:
|
||||
self._log.warning('Cannot use db "%s"', db_name)
|
||||
self._log.warning('Cannot use db "%s".', db_name)
|
||||
finally:
|
||||
if cr is not None:
|
||||
cr.close()
|
||||
|
@ -143,40 +143,40 @@ class abstracted_fs(object):
|
|||
child = node.child(cr, objname)
|
||||
if child:
|
||||
if child.type not in ('file','content'):
|
||||
raise OSError(1, 'Operation not permited.')
|
||||
raise OSError(1, 'Operation is not permitted.')
|
||||
|
||||
ret = child.open_data(cr, mode)
|
||||
cr.commit()
|
||||
assert ret, "Cannot create descriptor for %r: %r" % (child, ret)
|
||||
assert ret, "Cannot create descriptor for %r: %r." % (child, ret)
|
||||
return ret
|
||||
except EnvironmentError:
|
||||
raise
|
||||
except Exception:
|
||||
self._log.exception('Cannot locate item %s at node %s', objname, repr(node))
|
||||
self._log.exception('Cannot locate item %s at node %s.', objname, repr(node))
|
||||
pass
|
||||
|
||||
try:
|
||||
child = node.create_child(cr, objname, data=None)
|
||||
ret = child.open_data(cr, mode)
|
||||
assert ret, "cannot create descriptor for %r" % child
|
||||
assert ret, "Cannot create descriptor for %r." % child
|
||||
cr.commit()
|
||||
return ret
|
||||
except EnvironmentError:
|
||||
raise
|
||||
except Exception:
|
||||
self._log.exception('Cannot create item %s at node %s', objname, repr(node))
|
||||
raise OSError(1, 'Operation not permited.')
|
||||
self._log.exception('Cannot create item %s at node %s.', objname, repr(node))
|
||||
raise OSError(1, 'Operation is not permitted.')
|
||||
|
||||
def open(self, datacr, mode):
|
||||
if not (datacr and datacr[1]):
|
||||
raise OSError(1, 'Operation not permited.')
|
||||
raise OSError(1, 'Operation is not permitted.')
|
||||
# Reading operation
|
||||
cr, node, rem = datacr
|
||||
try:
|
||||
res = node.open_data(cr, mode)
|
||||
cr.commit()
|
||||
except TypeError:
|
||||
raise IOError(errno.EINVAL, "No data")
|
||||
raise IOError(errno.EINVAL, "No data.")
|
||||
return res
|
||||
|
||||
# ok, but need test more
|
||||
|
@ -211,9 +211,9 @@ class abstracted_fs(object):
|
|||
self.cwd_node = None
|
||||
return None
|
||||
if not datacr[1]:
|
||||
raise OSError(1, 'Operation not permitted')
|
||||
raise OSError(1, 'Operation is not permitted.')
|
||||
if datacr[1].type not in ('collection','database'):
|
||||
raise OSError(2, 'Path is not a directory')
|
||||
raise OSError(2, 'Path is not a directory.')
|
||||
self.cwd = '/'+datacr[1].context.dbname + '/'
|
||||
self.cwd += '/'.join(datacr[1].full_path())
|
||||
self.cwd_node = datacr[1]
|
||||
|
@ -223,7 +223,7 @@ class abstracted_fs(object):
|
|||
"""Create the specified directory."""
|
||||
cr, node, rem = datacr or (None, None, None)
|
||||
if not node:
|
||||
raise OSError(1, 'Operation not permited.')
|
||||
raise OSError(1, 'Operation is not permitted.')
|
||||
|
||||
try:
|
||||
basename =_to_unicode(basename)
|
||||
|
@ -231,8 +231,8 @@ class abstracted_fs(object):
|
|||
self._log.debug("Created child dir: %r", cdir)
|
||||
cr.commit()
|
||||
except Exception:
|
||||
self._log.exception('Cannot create dir "%s" at node %s', basename, repr(node))
|
||||
raise OSError(1, 'Operation not permited.')
|
||||
self._log.exception('Cannot create dir "%s" at node %s.', basename, repr(node))
|
||||
raise OSError(1, 'Operation is not permitted.')
|
||||
|
||||
def close_cr(self, data):
|
||||
if data and data[0]:
|
||||
|
@ -287,10 +287,10 @@ class abstracted_fs(object):
|
|||
p_parts = p_parts[1:]
|
||||
# self._log.debug("Path parts: %r ", p_parts)
|
||||
if not p_parts:
|
||||
raise IOError(errno.EPERM, 'Cannot perform operation at root dir')
|
||||
raise IOError(errno.EPERM, 'Cannot perform operation at root directory.')
|
||||
dbname = p_parts[0]
|
||||
if dbname not in self.db_list():
|
||||
raise IOError(errno.ENOENT,'Invalid database path: %s' % dbname)
|
||||
raise IOError(errno.ENOENT,'Invalid database path: %s.' % dbname)
|
||||
try:
|
||||
db = pooler.get_db(dbname)
|
||||
except Exception:
|
||||
|
@ -303,7 +303,7 @@ class abstracted_fs(object):
|
|||
raise
|
||||
if not uid:
|
||||
cr.close()
|
||||
raise OSError(2, 'Authentification Required.')
|
||||
raise OSError(2, 'Authentification required.')
|
||||
n = get_node_context(cr, uid, {})
|
||||
node = n.get_uri(cr, p_parts[1:])
|
||||
return (cr, node, rem_path)
|
||||
|
@ -318,7 +318,7 @@ class abstracted_fs(object):
|
|||
node = self.cwd_node
|
||||
if node is False and mode not in ('???'):
|
||||
cr.close()
|
||||
raise IOError(errno.ENOENT, 'Path does not exist')
|
||||
raise IOError(errno.ENOENT, 'Path does not exist.')
|
||||
return (cr, node, rem_path)
|
||||
|
||||
def get_node_cr_uid(self, node):
|
||||
|
@ -375,7 +375,7 @@ class abstracted_fs(object):
|
|||
return self.rmdir(datacr)
|
||||
elif datacr[1].type == 'file':
|
||||
return self.rmfile(datacr)
|
||||
raise OSError(1, 'Operation not permited.')
|
||||
raise OSError(1, 'Operation is not permitted.')
|
||||
|
||||
def rmfile(self, datacr):
|
||||
"""Remove the specified file."""
|
||||
|
@ -399,8 +399,8 @@ class abstracted_fs(object):
|
|||
except EnvironmentError:
|
||||
raise
|
||||
except Exception:
|
||||
self._log.exception('Cannot rename "%s" to "%s" at "%s"', src, datacr[2], datacr[1])
|
||||
raise OSError(1,'Operation not permited.')
|
||||
self._log.exception('Cannot rename "%s" to "%s" at "%s".', src, datacr[2], datacr[1])
|
||||
raise OSError(1,'Operation is not permitted.')
|
||||
|
||||
def stat(self, node):
|
||||
raise NotImplementedError()
|
||||
|
@ -429,7 +429,7 @@ class abstracted_fs(object):
|
|||
def getsize(self, datacr):
|
||||
"""Return the size of the specified file in bytes."""
|
||||
if not (datacr and datacr[1]):
|
||||
raise IOError(errno.ENOENT, "No such file or directory")
|
||||
raise IOError(errno.ENOENT, "No such file or directory.")
|
||||
if datacr[1].type in ('file', 'content'):
|
||||
return datacr[1].get_data_len(datacr[0]) or 0L
|
||||
return 0L
|
||||
|
|
|
@ -302,16 +302,16 @@ class DummyAuthorizer:
|
|||
provide customized response strings when user log-in and quit.
|
||||
"""
|
||||
if self.has_user(username):
|
||||
raise AuthorizerError('User "%s" already exists' %username)
|
||||
raise AuthorizerError('User "%s" already exists.' %username)
|
||||
homedir = os.path.realpath(homedir)
|
||||
if not os.path.isdir(homedir):
|
||||
raise AuthorizerError('No such directory: "%s"' %homedir)
|
||||
raise AuthorizerError('No such directory: "%s".' %homedir)
|
||||
for p in perm:
|
||||
if p not in 'elradfmw':
|
||||
raise AuthorizerError('No such permission "%s"' %p)
|
||||
raise AuthorizerError('No such permission: "%s".' %p)
|
||||
for p in perm:
|
||||
if (p in self.write_perms) and (username == 'anonymous'):
|
||||
warnings.warn("write permissions assigned to anonymous user.",
|
||||
warnings.warn("Write permissions are assigned to anonymous user.",
|
||||
RuntimeWarning)
|
||||
break
|
||||
dic = {'pwd': str(password),
|
||||
|
@ -532,7 +532,7 @@ class ActiveDTP(asyncore.dispatcher):
|
|||
try:
|
||||
self.connect((ip, port))
|
||||
except socket.gaierror:
|
||||
self.cmd_channel.respond("425 Can't connect to specified address.")
|
||||
self.cmd_channel.respond("425 Cannot connect to specified address.")
|
||||
self.close()
|
||||
|
||||
# --- connection / overridden
|
||||
|
@ -542,14 +542,14 @@ class ActiveDTP(asyncore.dispatcher):
|
|||
|
||||
def handle_connect(self):
|
||||
"""Called when connection is established."""
|
||||
self.cmd_channel.respond('200 Active data connection established.')
|
||||
self.cmd_channel.respond('200 Active data connection has been established.')
|
||||
# delegate such connection to DTP handler
|
||||
handler = self.cmd_channel.dtp_handler(self.socket, self.cmd_channel)
|
||||
self.cmd_channel.data_channel = handler
|
||||
self.cmd_channel.on_dtp_connection()
|
||||
|
||||
def handle_expt(self):
|
||||
self.cmd_channel.respond("425 Can't connect to specified address.")
|
||||
self.cmd_channel.respond("425 Cannot connect to specified address.")
|
||||
self.close()
|
||||
|
||||
def handle_error(self):
|
||||
|
@ -562,7 +562,7 @@ class ActiveDTP(asyncore.dispatcher):
|
|||
pass
|
||||
except:
|
||||
logerror(traceback.format_exc())
|
||||
self.cmd_channel.respond("425 Can't connect to specified address.")
|
||||
self.cmd_channel.respond("425 Cannot connect to specified address.")
|
||||
self.close()
|
||||
|
||||
class DTPHandler(asyncore.dispatcher):
|
||||
|
@ -638,7 +638,7 @@ class DTPHandler(asyncore.dispatcher):
|
|||
elif type == 'i':
|
||||
self.data_wrapper = lambda x: x
|
||||
else:
|
||||
raise TypeError, "Unsupported type"
|
||||
raise TypeError, "Unsupported type."
|
||||
self.receive = True
|
||||
|
||||
def get_transmitted_bytes(self):
|
||||
|
@ -767,7 +767,7 @@ class DTPHandler(asyncore.dispatcher):
|
|||
# some other exception occurred; we don't want to provide
|
||||
# confidential error messages
|
||||
logerror(traceback.format_exc())
|
||||
error = "Internal error"
|
||||
error = "Internal error."
|
||||
self.cmd_channel.respond("426 %s; transfer aborted." %error)
|
||||
self.close()
|
||||
|
||||
|
@ -823,7 +823,7 @@ class FileProducer:
|
|||
elif type == 'i':
|
||||
self.data_wrapper = lambda x: x
|
||||
else:
|
||||
raise TypeError, "Unsupported type"
|
||||
raise TypeError, "Unsupported type."
|
||||
|
||||
def more(self):
|
||||
"""Attempt a chunk of data of size self.buffer_size."""
|
||||
|
@ -1485,7 +1485,7 @@ class FTPHandler(asynchat.async_chat):
|
|||
buflimit = 2048
|
||||
if self.in_buffer_len > buflimit:
|
||||
self.respond('500 Command too long.')
|
||||
self.log('Command received exceeded buffer limit of %s.' %(buflimit))
|
||||
self.log('Command has been received exceeds buffer limit of %s.' %(buflimit))
|
||||
self.in_buffer = []
|
||||
self.in_buffer_len = 0
|
||||
|
||||
|
@ -1528,12 +1528,12 @@ class FTPHandler(asynchat.async_chat):
|
|||
# let's check if user provided an argument for those commands
|
||||
# needing one
|
||||
if not arg and cmd in self.arg_cmds:
|
||||
self.respond("501 Syntax error: command needs an argument.")
|
||||
self.respond("501 Syntax error! Command needs an argument.")
|
||||
return
|
||||
|
||||
# let's do the same for those commands requiring no argument.
|
||||
elif arg and cmd in self.unarg_cmds:
|
||||
self.respond("501 Syntax error: command does not accept arguments.")
|
||||
self.respond("501 Syntax error! Command does not accept arguments.")
|
||||
return
|
||||
|
||||
# provide a limited set of commands if user isn't
|
||||
|
@ -1617,7 +1617,7 @@ class FTPHandler(asynchat.async_chat):
|
|||
else:
|
||||
self.in_buffer.append(data)
|
||||
return
|
||||
self.log("Can't handle OOB data.")
|
||||
self.log("Cannot handle OOB data.")
|
||||
self.close()
|
||||
|
||||
def handle_error(self):
|
||||
|
@ -1801,7 +1801,7 @@ class FTPHandler(asynchat.async_chat):
|
|||
except NotImplementedError, err:
|
||||
cmdname = function.__name__
|
||||
why = err.args[0] or 'Not implemented'
|
||||
self.log('FAIL %s() not implemented: %s.' %(cmdname, why))
|
||||
self.log('FAIL %s() is not implemented: %s.' %(cmdname, why))
|
||||
self.respond('502 %s.' %why)
|
||||
raise FTPExceptionSent(why)
|
||||
except EnvironmentError, err:
|
||||
|
@ -1811,7 +1811,7 @@ class FTPHandler(asynchat.async_chat):
|
|||
except Exception:
|
||||
pass
|
||||
ret_code = eresp.get(err.errno, '451')
|
||||
why = (err.strerror) or 'Error in command'
|
||||
why = (err.strerror) or 'Error in command.'
|
||||
self.log('FAIL %s() %s errno=%s: %s.' %(cmdname, uline, err.errno, why))
|
||||
self.respond('%s %s.' % (str(ret_code), why))
|
||||
|
||||
|
@ -1841,15 +1841,15 @@ class FTPHandler(asynchat.async_chat):
|
|||
if ip != self.remote_ip:
|
||||
self.log("Rejected data connection to foreign address %s:%s."
|
||||
%(ip, port))
|
||||
self.respond("501 Can't connect to a foreign address.")
|
||||
self.respond("501 Cannot connect to a foreign address.")
|
||||
return
|
||||
|
||||
# ...another RFC-2577 recommendation is rejecting connections
|
||||
# to privileged ports (< 1024) for security reasons.
|
||||
if not self.permit_privileged_ports:
|
||||
if port < 1024:
|
||||
self.log('PORT against the privileged port "%s" refused.' %port)
|
||||
self.respond("501 Can't connect over a privileged port.")
|
||||
self.log('PORT against the privileged port "%s" has been refused.' %port)
|
||||
self.respond("501 Cannot connect over a privileged port.")
|
||||
return
|
||||
|
||||
# close existent DTP-server instance, if any.
|
||||
|
@ -1889,7 +1889,7 @@ class FTPHandler(asynchat.async_chat):
|
|||
# make sure we are not hitting the max connections limit
|
||||
if self.server.max_cons:
|
||||
if len(self._map) >= self.server.max_cons:
|
||||
msg = "Too many connections. Can't open data channel."
|
||||
msg = "Too many connections. Cannot open data channel."
|
||||
self.respond("425 %s" %msg)
|
||||
self.log(msg)
|
||||
return
|
||||
|
@ -2150,7 +2150,7 @@ class FTPHandler(asynchat.async_chat):
|
|||
datacr = self.get_crdata2(line, mode='list')
|
||||
# RFC-3659 requires 501 response code if path is not a directory
|
||||
if not self.fs.isdir(datacr[1]):
|
||||
err = 'No such directory'
|
||||
err = 'No such directory.'
|
||||
self.log('FAIL MLSD "%s". %s.' %(line, err))
|
||||
self.respond("501 %s." %err)
|
||||
return
|
||||
|
@ -2191,7 +2191,7 @@ class FTPHandler(asynchat.async_chat):
|
|||
fd.seek(self.restart_position)
|
||||
ok = 1
|
||||
except AssertionError:
|
||||
why = "Invalid REST parameter"
|
||||
why = "Invalid REST parameter."
|
||||
except IOError, err:
|
||||
why = _strerror(err)
|
||||
self.restart_position = 0
|
||||
|
@ -2240,7 +2240,7 @@ class FTPHandler(asynchat.async_chat):
|
|||
fd.seek(self.restart_position)
|
||||
ok = 1
|
||||
except AssertionError:
|
||||
why = "Invalid REST parameter"
|
||||
why = "Invalid REST parameter."
|
||||
except IOError, err:
|
||||
why = _strerror(err)
|
||||
self.restart_position = 0
|
||||
|
@ -2275,7 +2275,7 @@ class FTPHandler(asynchat.async_chat):
|
|||
|
||||
# watch for STOU preceded by REST, which makes no sense.
|
||||
if self.restart_position:
|
||||
self.respond("450 Can't STOU while REST request is pending.")
|
||||
self.respond("450 Cannot STOU while REST request is pending.")
|
||||
return
|
||||
|
||||
|
||||
|
@ -2296,7 +2296,7 @@ class FTPHandler(asynchat.async_chat):
|
|||
# hitted the max number of tries to find out file with
|
||||
# unique name
|
||||
if err.errno == errno.EEXIST:
|
||||
why = 'No usable unique file name found'
|
||||
why = 'No usable unique file name found.'
|
||||
# something else happened
|
||||
else:
|
||||
why = _strerror(err)
|
||||
|
@ -2307,9 +2307,9 @@ class FTPHandler(asynchat.async_chat):
|
|||
|
||||
filename = line
|
||||
if not self.authorizer.has_perm(self.username, 'w', filename):
|
||||
self.log('FAIL STOU "%s". Not enough privileges'
|
||||
self.log('FAIL STOU "%s". Not enough privileges.'
|
||||
%self.fs.ftpnorm(line))
|
||||
self.respond("550 Can't STOU: not enough privileges.")
|
||||
self.respond("550 Cannot STOU: not enough privileges.")
|
||||
self.fs.close_cr(datacr)
|
||||
return
|
||||
|
||||
|
@ -2329,7 +2329,7 @@ class FTPHandler(asynchat.async_chat):
|
|||
"""Append data to an existing file on the server."""
|
||||
# watch for APPE preceded by REST, which makes no sense.
|
||||
if self.restart_position:
|
||||
self.respond("550 Can't APPE while REST request is pending.")
|
||||
self.respond("550 Cannot APPE while REST request is pending.")
|
||||
else:
|
||||
self.ftp_STOR(line, mode='a')
|
||||
|
||||
|
@ -2405,7 +2405,7 @@ class FTPHandler(asynchat.async_chat):
|
|||
# and account information already supplied and beginning the
|
||||
# login sequence again.
|
||||
self.flush_account()
|
||||
msg = 'Previous account information was flushed'
|
||||
msg = 'Previous account information is flushed.'
|
||||
self.log('OK USER "%s". %s.' %(line, msg))
|
||||
self.respond('331 %s, send password.' %msg)
|
||||
self.username = line
|
||||
|
@ -2554,7 +2554,7 @@ class FTPHandler(asynchat.async_chat):
|
|||
else:
|
||||
datacr = self.get_crdata2(line)
|
||||
if not datacr:
|
||||
raise IOError(errno.ENOENT, "%s is not retrievable" %line)
|
||||
raise IOError(errno.ENOENT, "%s is not retrievable." %line)
|
||||
|
||||
lmt = self.try_as_current_user(self.fs.getmtime, (datacr,), line=line)
|
||||
lmt = time.strftime("%Y%m%d%H%M%S", time.localtime(lmt))
|
||||
|
@ -2584,7 +2584,7 @@ class FTPHandler(asynchat.async_chat):
|
|||
try:
|
||||
datacr = self.get_crdata2(line, mode='delete')
|
||||
if not datacr[1]:
|
||||
msg = "Can't remove root directory."
|
||||
msg = "Cannot remove root directory."
|
||||
self.respond("553 %s" %msg)
|
||||
self.log('FAIL MKD "/". %s' %msg)
|
||||
self.fs.close_cr(datacr)
|
||||
|
@ -2617,7 +2617,7 @@ class FTPHandler(asynchat.async_chat):
|
|||
if not datacr[1]:
|
||||
self.respond("550 No such file or directory.")
|
||||
elif not datacr[1]:
|
||||
self.respond("553 Can't rename the home directory.")
|
||||
self.respond("553 Cannot rename the home directory.")
|
||||
else:
|
||||
self.fs.rnfr = datacr[1]
|
||||
self.respond("350 Ready for destination name.")
|
||||
|
@ -2760,14 +2760,14 @@ class FTPHandler(asynchat.async_chat):
|
|||
def ftp_OPTS(self, line):
|
||||
"""Specify options for FTP commands as specified in RFC-2389."""
|
||||
try:
|
||||
assert (not line.count(' ') > 1), 'Invalid number of arguments'
|
||||
assert (not line.count(' ') > 1), 'Invalid number of arguments.'
|
||||
if ' ' in line:
|
||||
cmd, arg = line.split(' ')
|
||||
assert (';' in arg), 'Invalid argument'
|
||||
assert (';' in arg), 'Invalid argument!'
|
||||
else:
|
||||
cmd, arg = line, ''
|
||||
# actually the only command able to accept options is MLST
|
||||
assert (cmd.upper() == 'MLST'), 'Unsupported command "%s"' %cmd
|
||||
assert (cmd.upper() == 'MLST'), 'Unsupported command "%s".' %cmd
|
||||
except AssertionError, err:
|
||||
self.respond('501 %s.' %err)
|
||||
else:
|
||||
|
|
|
@ -62,7 +62,7 @@ def get_ftp_fulldata(ftp, fname, limit=8192):
|
|||
data = []
|
||||
def ffp(data, ndata):
|
||||
if len(data)+ len(ndata) > limit:
|
||||
raise IndexError('Data over the limit')
|
||||
raise IndexError('Data over the limit.')
|
||||
data.append(ndata)
|
||||
ftp.retrbinary('RETR %s' % fname, partial(ffp,data))
|
||||
return ''.join(data)
|
||||
|
|
|
@ -98,7 +98,7 @@ class BoundStream2(object):
|
|||
|
||||
def read(self, size=-1):
|
||||
if not self._stream:
|
||||
raise IOError(errno.EBADF, "read() without stream")
|
||||
raise IOError(errno.EBADF, "read() without stream.")
|
||||
|
||||
if self._rem_length == 0:
|
||||
return ''
|
||||
|
@ -136,25 +136,25 @@ class BoundStream2(object):
|
|||
"""
|
||||
if whence == os.SEEK_SET:
|
||||
if pos < 0 or pos > self._length:
|
||||
raise IOError(errno.EINVAL,"Cannot seek")
|
||||
raise IOError(errno.EINVAL,"Cannot seek.")
|
||||
self._stream.seek(pos - self._offset)
|
||||
self._rem_length = self._length - pos
|
||||
elif whence == os.SEEK_CUR:
|
||||
if pos > 0:
|
||||
if pos > self._rem_length:
|
||||
raise IOError(errno.EINVAL,"Cannot seek past end")
|
||||
raise IOError(errno.EINVAL,"Cannot seek past end.")
|
||||
elif pos < 0:
|
||||
oldpos = self.tell()
|
||||
if oldpos + pos < 0:
|
||||
raise IOError(errno.EINVAL,"Cannot seek before start")
|
||||
raise IOError(errno.EINVAL,"Cannot seek before start.")
|
||||
self._stream.seek(pos, os.SEEK_CUR)
|
||||
self._rem_length -= pos
|
||||
elif whence == os.SEEK_END:
|
||||
if pos > 0:
|
||||
raise IOError(errno.EINVAL,"Cannot seek past end")
|
||||
raise IOError(errno.EINVAL,"Cannot seek past end.")
|
||||
else:
|
||||
if self._length + pos < 0:
|
||||
raise IOError(errno.EINVAL,"Cannot seek before start")
|
||||
raise IOError(errno.EINVAL,"Cannot seek before start.")
|
||||
newpos = self._offset + self._length + pos
|
||||
self._stream.seek(newpos, os.SEEK_SET)
|
||||
self._rem_length = 0 - pos
|
||||
|
@ -206,7 +206,7 @@ class openerp_dav_handler(dav_interface):
|
|||
self.parent.log_error("Cannot %s: %s", opname, str(e))
|
||||
self.parent.log_message("Exc: %s",traceback.format_exc())
|
||||
# see par 9.3.1 of rfc
|
||||
raise DAV_Error(403, str(e) or 'Not supported at this path')
|
||||
raise DAV_Error(403, str(e) or 'Not supported at this path.')
|
||||
except EnvironmentError, err:
|
||||
if cr: cr.close()
|
||||
import traceback
|
||||
|
@ -218,7 +218,7 @@ class openerp_dav_handler(dav_interface):
|
|||
if cr: cr.close()
|
||||
self.parent.log_error("Cannot %s: %s", opname, str(e))
|
||||
self.parent.log_message("Exc: %s",traceback.format_exc())
|
||||
raise default_exc("Operation failed")
|
||||
raise default_exc("Operation failed.")
|
||||
|
||||
def _get_dav_lockdiscovery(self, uri):
|
||||
""" We raise that so that the node API is used """
|
||||
|
@ -434,7 +434,7 @@ class openerp_dav_handler(dav_interface):
|
|||
except DAV_Error:
|
||||
raise
|
||||
except Exception, e:
|
||||
self.parent.log_error("cannot get_children: "+ str(e))
|
||||
self.parent.log_error("Cannot get_children: "+str(e)+".")
|
||||
raise
|
||||
finally:
|
||||
if cr: cr.close()
|
||||
|
@ -500,10 +500,10 @@ class openerp_dav_handler(dav_interface):
|
|||
assert start >= 0
|
||||
if end and end < start:
|
||||
self.parent.log_error("Invalid range for data: %s-%s" %(start, end))
|
||||
raise DAV_Error(416, "Invalid range for data")
|
||||
raise DAV_Error(416, "Invalid range for data.")
|
||||
if end:
|
||||
if end >= res.size():
|
||||
raise DAV_Error(416, "Requested data exceeds available size")
|
||||
raise DAV_Error(416, "Requested data exceeds available size.")
|
||||
length = (end + 1) - start
|
||||
else:
|
||||
length = res.size() - start
|
||||
|
@ -661,7 +661,7 @@ class openerp_dav_handler(dav_interface):
|
|||
cr, uid, pool, dbname, uri2 = self.get_cr(uri)
|
||||
if not uri2[-1]:
|
||||
if cr: cr.close()
|
||||
raise DAV_Error(409, "Cannot create nameless collection")
|
||||
raise DAV_Error(409, "Cannot create nameless collection.")
|
||||
if not dbname:
|
||||
if cr: cr.close()
|
||||
raise DAV_Error, 409
|
||||
|
@ -672,7 +672,7 @@ class openerp_dav_handler(dav_interface):
|
|||
nc = node.child(cr, uri2[-1])
|
||||
if nc:
|
||||
cr.close()
|
||||
raise DAV_Error(405, "Path already exists")
|
||||
raise DAV_Error(405, "Path already exists.")
|
||||
self._try_function(node.create_child_collection, (cr, uri2[-1]),
|
||||
"create col %s" % uri2[-1], cr=cr)
|
||||
cr.commit()
|
||||
|
@ -698,14 +698,14 @@ class openerp_dav_handler(dav_interface):
|
|||
dir_node = self.uri2object(cr, uid, pool, uri2[:-1])
|
||||
if not dir_node:
|
||||
cr.close()
|
||||
raise DAV_NotFound('Parent folder not found')
|
||||
raise DAV_NotFound('Parent folder not found.')
|
||||
|
||||
newchild = self._try_function(dir_node.create_child, (cr, objname, data),
|
||||
"create %s" % objname, cr=cr)
|
||||
if not newchild:
|
||||
cr.commit()
|
||||
cr.close()
|
||||
raise DAV_Error(400, "Failed to create resource")
|
||||
raise DAV_Error(400, "Failed to create resource.")
|
||||
|
||||
uparts=urlparse.urlparse(uri)
|
||||
fileloc = '/'.join(newchild.full_path())
|
||||
|
@ -755,7 +755,7 @@ class openerp_dav_handler(dav_interface):
|
|||
res = self._try_function(node.rm, (cr,), "rm %s" % uri, cr=cr)
|
||||
if not res:
|
||||
if cr: cr.close()
|
||||
raise OSError(1, 'Operation not permited.')
|
||||
raise OSError(1, 'Invalid Action!')
|
||||
cr.commit()
|
||||
cr.close()
|
||||
return 204
|
||||
|
@ -937,7 +937,7 @@ class openerp_dav_handler(dav_interface):
|
|||
except AttributeError:
|
||||
# perhaps the node doesn't support locks
|
||||
cr.close()
|
||||
raise DAV_Error(400, 'No locks for this resource')
|
||||
raise DAV_Error(400, 'No locks for this resource.')
|
||||
|
||||
res = self._try_function(node_fn, (cr, token), "unlock %s" % uri, cr=cr)
|
||||
cr.commit()
|
||||
|
@ -966,7 +966,7 @@ class openerp_dav_handler(dav_interface):
|
|||
dir_node = self.uri2object(cr, uid, pool, uri2[:-1])
|
||||
if not dir_node:
|
||||
cr.close()
|
||||
raise DAV_NotFound('Parent folder not found')
|
||||
raise DAV_NotFound('Parent folder not found.')
|
||||
|
||||
# We create a new node (file) but with empty data=None,
|
||||
# as in RFC4918 p. 9.10.4
|
||||
|
@ -975,7 +975,7 @@ class openerp_dav_handler(dav_interface):
|
|||
if not node:
|
||||
cr.commit()
|
||||
cr.close()
|
||||
raise DAV_Error(400, "Failed to create resource")
|
||||
raise DAV_Error(400, "Failed to create resource.")
|
||||
|
||||
created = True
|
||||
|
||||
|
@ -984,7 +984,7 @@ class openerp_dav_handler(dav_interface):
|
|||
except AttributeError:
|
||||
# perhaps the node doesn't support locks
|
||||
cr.close()
|
||||
raise DAV_Error(400, 'No locks for this resource')
|
||||
raise DAV_Error(400, 'No locks for this resource.')
|
||||
|
||||
# Obtain the lock on the node
|
||||
lres, pid, token = self._try_function(node_fn, (cr, lock_data), "lock %s" % objname, cr=cr)
|
||||
|
@ -992,7 +992,7 @@ class openerp_dav_handler(dav_interface):
|
|||
if not lres:
|
||||
cr.commit()
|
||||
cr.close()
|
||||
raise DAV_Error(423, "Resource already locked")
|
||||
raise DAV_Error(423, "Resource already locked.")
|
||||
|
||||
assert isinstance(lres, list), 'lres: %s' % repr(lres)
|
||||
|
||||
|
|
|
@ -43,7 +43,7 @@ class document_davdir(osv.osv):
|
|||
elif dbro.type == 'ressource':
|
||||
return nodes.node_res_dir
|
||||
else:
|
||||
raise ValueError("dir node for %s type", dbro.type)
|
||||
raise ValueError("Directory node for %s type.", dbro.type)
|
||||
|
||||
def _prepare_context(self, cr, uid, nctx, context=None):
|
||||
nctx.node_file_class = nodes.node_file
|
||||
|
|
|
@ -278,7 +278,7 @@ class addAuthTransport:
|
|||
|
||||
return self._parse_response(h.getfile(), sock, resp)
|
||||
|
||||
raise ProtocolError(host+handler, 403, "No authentication",'')
|
||||
raise ProtocolError(host+handler, 403, "No authentication.",'')
|
||||
|
||||
class PersistentAuthTransport(addAuthTransport,PersistentTransport):
|
||||
pass
|
||||
|
@ -402,7 +402,7 @@ class DAVClient(object):
|
|||
r1 = conn.getresponse()
|
||||
except httplib.BadStatusLine, bsl:
|
||||
log.warning("Bad status line: %s", bsl.line)
|
||||
raise Exception('Bad status line')
|
||||
raise Exception('Bad status line.')
|
||||
if r1.status == 401: # and r1.headers:
|
||||
if 'www-authenticate' in r1.msg:
|
||||
(atype,realm) = r1.msg.getheader('www-authenticate').split(' ',1)
|
||||
|
@ -437,7 +437,7 @@ class DAVClient(object):
|
|||
doc = xml.dom.minidom.parseString(data1)
|
||||
_logger.debug("XML Body:\n %s", doc.toprettyxml(indent="\t"))
|
||||
except Exception:
|
||||
_logger.warning("could not print xml", exc_info=True)
|
||||
_logger.warning("Cannot print XML.", exc_info=True)
|
||||
pass
|
||||
conn.close()
|
||||
return r1.status, r1.msg, data1
|
||||
|
@ -651,7 +651,7 @@ class DAVClient(object):
|
|||
if isinstance(crange, tuple):
|
||||
crange = [crange,]
|
||||
if not isinstance(crange, list):
|
||||
raise TypeError("Range must be a tuple or list of tuples")
|
||||
raise TypeError("Range must be a tuple or list of tuples.")
|
||||
rs = []
|
||||
for r in crange:
|
||||
rs.append('%d-%d' % r)
|
||||
|
@ -689,7 +689,7 @@ class DAVClient(object):
|
|||
"""
|
||||
hdrs = { }
|
||||
if not (body or srcpath):
|
||||
raise ValueError("PUT must have something to send")
|
||||
raise ValueError("PUT must have something to send.")
|
||||
if (not body) and srcpath:
|
||||
fd = open(srcpath, 'rb')
|
||||
body = fd.read()
|
||||
|
|
|
@ -65,7 +65,7 @@ class Prop2xml(object):
|
|||
|
||||
def createText2Node(self, data):
|
||||
if not isinstance(data, StringTypes):
|
||||
raise TypeError, "node contents must be a string"
|
||||
raise TypeError, "Node contents must be a string."
|
||||
t = Text2()
|
||||
t.data = data
|
||||
t.ownerDocument = self.doc
|
||||
|
|
|
@ -119,7 +119,7 @@ class DAVHandler(HttpOptions, FixSendError, DAVRequestHandler):
|
|||
if up.path.startswith(self.davpath):
|
||||
self.headers['Destination'] = up.path[len(self.davpath):]
|
||||
else:
|
||||
raise DAV_Forbidden("Not allowed to copy/move outside webdav path")
|
||||
raise DAV_Forbidden("Not allowed to copy/move outside webdav path.")
|
||||
# TODO: locks
|
||||
DAVRequestHandler.copymove(self, CLASS)
|
||||
|
||||
|
@ -338,7 +338,7 @@ class DAVHandler(HttpOptions, FixSendError, DAVRequestHandler):
|
|||
if isinstance(ldif, list):
|
||||
if len(ldif) !=1 or (not isinstance(ldif[0], TagList)) \
|
||||
or len(ldif[0].list) != 1:
|
||||
raise DAV_Error(400, "Cannot accept multiple tokens")
|
||||
raise DAV_Error(400, "Cannot accept multiple tokens.")
|
||||
ldif = ldif[0].list[0]
|
||||
if ldif[0] == '<' and ldif[-1] == '>':
|
||||
ldif = ldif[1:-1]
|
||||
|
@ -352,7 +352,7 @@ class DAVHandler(HttpOptions, FixSendError, DAVRequestHandler):
|
|||
lock_data.update(self._lock_unlock_parse(body))
|
||||
|
||||
if lock_data['refresh'] and not lock_data.get('token', False):
|
||||
raise DAV_Error(400, 'Lock refresh must specify token')
|
||||
raise DAV_Error(400, 'Lock refresh must specify token.')
|
||||
|
||||
lock_data['depth'] = depth
|
||||
|
||||
|
|
|
@ -40,7 +40,7 @@ class edi(netsvc.ExportService):
|
|||
res = getattr(edi_document, method_name)(cr, *method_args)
|
||||
cr.commit()
|
||||
except Exception:
|
||||
_logger.exception('Failed to execute EDI method %s with args %r', method_name, method_args)
|
||||
_logger.exception('Failed to execute EDI method %s with args %r.', method_name, method_args)
|
||||
raise
|
||||
finally:
|
||||
cr.close()
|
||||
|
@ -63,7 +63,7 @@ class edi(netsvc.ExportService):
|
|||
# No security check for these methods
|
||||
pass
|
||||
else:
|
||||
raise KeyError("Method not found: %s" % method)
|
||||
raise KeyError("Method not found: %s." % method)
|
||||
fn = getattr(self, 'exp_'+method)
|
||||
return fn(*params)
|
||||
|
||||
|
|
|
@ -118,7 +118,7 @@ msgstr ""
|
|||
#. module: edi
|
||||
#: code:addons/edi/models/edi.py:152
|
||||
#, python-format
|
||||
msgid "Missing Application"
|
||||
msgid "Missing Application !"
|
||||
msgstr ""
|
||||
|
||||
#. module: edi
|
||||
|
|
|
@ -129,7 +129,7 @@ class edi_document(osv.osv):
|
|||
_logger.debug("get_document(%s)", edi_token)
|
||||
edi_ids = self.search(cr, uid, [('name','=', edi_token)], context=context)
|
||||
if not edi_ids:
|
||||
raise ValueError('Invalid EDI token: %s' % edi_token)
|
||||
raise ValueError('Invalid EDI token: %s.' % edi_token)
|
||||
edi = self.browse(cr, uid, edi_ids[0], context=context)
|
||||
return edi.document
|
||||
|
||||
|
@ -147,16 +147,16 @@ class edi_document(osv.osv):
|
|||
res = []
|
||||
for edi_document in edi_documents:
|
||||
module = edi_document.get('__import_module') or edi_document.get('__module')
|
||||
assert module, 'a `__module` or `__import_module` attribute is required in each EDI document'
|
||||
assert module, 'a `__module` or `__import_module` attribute is required in each EDI document.'
|
||||
if module != 'base' and not ir_module.search(cr, uid, [('name','=',module),('state','=','installed')]):
|
||||
raise osv.except_osv(_('Missing Application'),
|
||||
raise osv.except_osv(_('Missing application.'),
|
||||
_("The document you are trying to import requires the OpenERP `%s` application. "
|
||||
"You can install it by connecting as the administrator and opening the configuration assistant.")%(module,))
|
||||
model = edi_document.get('__import_model') or edi_document.get('__model')
|
||||
assert model, 'a `__model` or `__import_model` attribute is required in each EDI document'
|
||||
assert model, 'a `__model` or `__import_model` attribute is required in each EDI document.'
|
||||
model_obj = self.pool.get(model)
|
||||
assert model_obj, 'model `%s` cannot be found, despite module `%s` being available - '\
|
||||
'this EDI document seems invalid or unsupported' % (model,module)
|
||||
'this EDI document seems invalid or unsupported.' % (model,module)
|
||||
record_id = model_obj.edi_import(cr, uid, edi_document, context=context)
|
||||
record_action = model_obj._edi_record_display_action(cr, uid, record_id, context=context)
|
||||
res.append((model, record_id, record_action))
|
||||
|
@ -202,7 +202,7 @@ class edi_document(osv.osv):
|
|||
may be retrieved, without authentication.
|
||||
"""
|
||||
if edi_url:
|
||||
assert not edi_document, 'edi_document must not be provided if edi_url is given'
|
||||
assert not edi_document, 'edi_document must not be provided if edi_url is given.'
|
||||
edi_document = urllib2.urlopen(edi_url).read()
|
||||
assert edi_document, 'EDI Document is empty!'
|
||||
edi_documents = self.deserialize(edi_document)
|
||||
|
@ -219,7 +219,7 @@ class EDIMixin(object):
|
|||
model_name = edi_document.get('__imported_model') or edi_document.get('__model') or self._name
|
||||
for attribute in attributes:
|
||||
assert edi_document.get(attribute),\
|
||||
'Attribute `%s` is required in %s EDI documents' % (attribute, model_name)
|
||||
'Attribute `%s` is required in %s EDI documents.' % (attribute, model_name)
|
||||
|
||||
# private method, not RPC-exposed as it creates ir.model.data entries as
|
||||
# SUPERUSER based on its parameters
|
||||
|
@ -261,7 +261,7 @@ class EDIMixin(object):
|
|||
ext_id = existing_id or safe_unique_id(db_uuid, record._name, record.id)
|
||||
# ID is unique cross-db thanks to db_uuid (already included in existing_module)
|
||||
module = existing_module or "%s:%s" % (record._original_module, db_uuid)
|
||||
_logger.debug("%s: Generating new external ID `%s.%s` for %r", self._name,
|
||||
_logger.debug("%s: Generating new external ID `%s.%s` for %r.", self._name,
|
||||
module, ext_id, record)
|
||||
ir_model_data.create(cr, openerp.SUPERUSER_ID,
|
||||
{'name': ext_id,
|
||||
|
@ -276,7 +276,7 @@ class EDIMixin(object):
|
|||
# this could happen for data records defined in a module that depends
|
||||
# on the module that owns the model, e.g. purchase defines
|
||||
# product.pricelist records.
|
||||
_logger.debug('Mismatching module: expected %s, got %s, for %s',
|
||||
_logger.debug('Mismatching module: expected %s, got %s, for %s.',
|
||||
module, record._original_module, record)
|
||||
# ID is unique cross-db thanks to db_uuid
|
||||
module = "%s:%s" % (module, db_uuid)
|
||||
|
@ -464,12 +464,12 @@ class EDIMixin(object):
|
|||
local_cr = db.cursor()
|
||||
web_root_url = self.pool.get('ir.config_parameter').get_param(local_cr, uid, 'web.base.url')
|
||||
if not web_root_url:
|
||||
_logger.warning('Ignoring EDI mail notification, web.base.url not defined in parameters')
|
||||
_logger.warning('Ignoring EDI mail notification, web.base.url is not defined in parameters.')
|
||||
return
|
||||
mail_tmpl = self._edi_get_object_by_external_id(local_cr, uid, template_ext_id, 'email.template', context=context)
|
||||
if not mail_tmpl:
|
||||
# skip EDI export if the template was not found
|
||||
_logger.warning('Ignoring EDI mail notification, template %s cannot be located', template_ext_id)
|
||||
_logger.warning('Ignoring EDI mail notification, template %s cannot be located.', template_ext_id)
|
||||
return
|
||||
for edi_record in self.browse(local_cr, uid, ids, context=context):
|
||||
edi_token = self.pool.get('edi.document').export_edi(local_cr, uid, [edi_record], context = context)[0]
|
||||
|
@ -533,10 +533,10 @@ class EDIMixin(object):
|
|||
file_data = base64.b64decode(attachment.get('content'))
|
||||
except TypeError:
|
||||
pass
|
||||
assert file_data, 'Incorrect/Missing attachment file content'
|
||||
assert attachment.get('name'), 'Incorrect/Missing attachment name'
|
||||
assert attachment.get('file_name'), 'Incorrect/Missing attachment file name'
|
||||
assert attachment.get('file_name'), 'Incorrect/Missing attachment file name'
|
||||
assert file_data, 'Incorrect/Missing attachment file content.'
|
||||
assert attachment.get('name'), 'Incorrect/Missing attachment name.'
|
||||
assert attachment.get('file_name'), 'Incorrect/Missing attachment file name.'
|
||||
assert attachment.get('file_name'), 'Incorrect/Missing attachment file name.'
|
||||
ir_attachment.create(cr, uid, {'name': attachment['name'],
|
||||
'datas_fname': attachment['file_name'],
|
||||
'res_model': self._name,
|
||||
|
@ -593,12 +593,12 @@ class EDIMixin(object):
|
|||
target = self._edi_get_object_by_external_id(cr, uid, external_id, model, context=context)
|
||||
need_new_ext_id = False
|
||||
if not target:
|
||||
_logger.debug("%s: Importing EDI relationship [%r,%r] - ID not found, trying name_get",
|
||||
_logger.debug("%s: Importing EDI relationship [%r,%r] - ID not found, trying name_get.",
|
||||
self._name, external_id, value)
|
||||
target = self._edi_get_object_by_name(cr, uid, value, model, context=context)
|
||||
need_new_ext_id = True
|
||||
if not target:
|
||||
_logger.debug("%s: Importing EDI relationship [%r,%r] - name not found, creating it!",
|
||||
_logger.debug("%s: Importing EDI relationship [%r,%r] - name not found, creating it.",
|
||||
self._name, external_id, value)
|
||||
# also need_new_ext_id here, but already been set above
|
||||
model = self.pool.get(model)
|
||||
|
@ -622,7 +622,7 @@ class EDIMixin(object):
|
|||
"""
|
||||
assert self._name == edi_document.get('__import_model') or \
|
||||
('__import_model' not in edi_document and self._name == edi_document.get('__model')), \
|
||||
"EDI Document Model and current model do not match: '%s' (EDI) vs '%s' (current)" % \
|
||||
"EDI Document Model and current model do not match: '%s' (EDI) vs '%s' (current)." % \
|
||||
(edi_document['__model'], self._name)
|
||||
|
||||
# First check the record is now already known in the database, in which case it is ignored
|
||||
|
@ -640,12 +640,12 @@ class EDIMixin(object):
|
|||
continue
|
||||
field_info = self._all_columns.get(field_name)
|
||||
if not field_info:
|
||||
_logger.warning('Ignoring unknown field `%s` when importing `%s` EDI document', field_name, self._name)
|
||||
_logger.warning('Ignoring unknown field `%s` when importing `%s` EDI document.', field_name, self._name)
|
||||
continue
|
||||
field = field_info.column
|
||||
# skip function/related fields
|
||||
if isinstance(field, fields.function):
|
||||
_logger.warning("Unexpected function field value found in '%s' EDI document: '%s'" % (self._name, field_name))
|
||||
_logger.warning("Unexpected function field value is found in '%s' EDI document: '%s'." % (self._name, field_name))
|
||||
continue
|
||||
relation_model = field._obj
|
||||
if field._type == 'many2one':
|
||||
|
|
|
@ -103,7 +103,7 @@ class event_event(osv.osv):
|
|||
for self.event in self.browse(cr, uid, ids, context=context):
|
||||
total_confirmed = self.event.register_current
|
||||
if total_confirmed < self.event.register_min or total_confirmed > self.event.register_max and self.event.register_max!=0:
|
||||
raise osv.except_osv(_('Error!'),_("The total of confirmed registration for the event '%s' does not meet the expected minimum/maximum. You should maybe reconsider those limits before going further") % (self.event.name))
|
||||
raise osv.except_osv(_('Error!'),_("The total of confirmed registration for the event '%s' does not meet the expected minimum/maximum. Please reconsider those limits before going further.") % (self.event.name))
|
||||
|
||||
def check_registration_limits_before(self, cr, uid, ids, no_of_registration, context=None):
|
||||
for event in self.browse(cr, uid, ids, context=context):
|
||||
|
@ -366,7 +366,7 @@ class event_registration(osv.osv):
|
|||
self.write(cr, uid, ids, values)
|
||||
self.message_append(cr, uid, ids, _('State set to Done'), body_text=_('Done'))
|
||||
else:
|
||||
raise osv.except_osv(_('Error!'),_("You must wait the event starting day to do this action.") )
|
||||
raise osv.except_osv(_('Error!'),_("You must wait for the starting day of the event to do this action.") )
|
||||
return True
|
||||
|
||||
def button_reg_cancel(self, cr, uid, ids, context=None, *args):
|
||||
|
|
|
@ -54,7 +54,7 @@ class event_moodle(osv.osv):
|
|||
"""
|
||||
moodle_config_wiz_ids = self.search(cr, uid, [], context=context)
|
||||
if not moodle_config_wiz_ids:
|
||||
raise osv.except_osv(('Error!'),("Configure your moodle connexion before"))
|
||||
raise osv.except_osv(('Error!'),("First configure your moodle connection."))
|
||||
return moodle_config_wiz_ids[0]
|
||||
|
||||
def make_url(self, cr, uid, ids, context=None):
|
||||
|
@ -132,7 +132,7 @@ class event_moodle(osv.osv):
|
|||
"""
|
||||
if email:
|
||||
if (email.count('@') != 1 and email.count('.') < 1):
|
||||
raise osv.except_osv(_('Error!'),_("Your email '%s' is wrong") % (email))
|
||||
raise osv.except_osv(_('Error!'),_("Your email '%s' is wrong.") % (email))
|
||||
|
||||
def make_username(self, username, response_courses):
|
||||
"""
|
||||
|
|
|
@ -50,5 +50,5 @@
|
|||
order = self.pool.get('sale.order').browse(cr, uid,order_id)
|
||||
registration_ids = self.search(cr,uid,[('origin','=',order.name)])
|
||||
if registration_ids == []:
|
||||
raise osv.except_osv(_('Error!'),_("The registration is not created"))
|
||||
raise osv.except_osv(_('Error!'),_("The registration is not created."))
|
||||
|
||||
|
|
|
@ -151,8 +151,8 @@ openerp_mailgate.py -u %(uid)d -p PASSWORD -o %(model)s -d %(dbname)s --host=HOS
|
|||
connection = server.connect()
|
||||
server.write({'state':'done'})
|
||||
except Exception, e:
|
||||
_logger.exception("Failed to connect to %s server %s", server.type, server.name)
|
||||
raise osv.except_osv(_("Connection test failed!"), _("Here is what we got instead:\n %s") % tools.ustr(e))
|
||||
_logger.exception("Failed to connect to %s server %s.", server.type, server.name)
|
||||
raise osv.except_osv(_("Connection test failed!"), _("Here is what we got instead:\n %s.") % tools.ustr(e))
|
||||
finally:
|
||||
try:
|
||||
if connection:
|
||||
|
@ -204,7 +204,7 @@ openerp_mailgate.py -u %(uid)d -p PASSWORD -o %(model)s -d %(dbname)s --host=HOS
|
|||
count += 1
|
||||
_logger.info("fetched/processed %s email(s) on %s server %s", count, server.type, server.name)
|
||||
except Exception, e:
|
||||
_logger.exception("Failed to fetch mail from %s server %s", server.type, server.name)
|
||||
_logger.exception("Failed to fetch mail from %s server %s.", server.type, server.name)
|
||||
finally:
|
||||
if imap_server:
|
||||
imap_server.close()
|
||||
|
@ -231,7 +231,7 @@ openerp_mailgate.py -u %(uid)d -p PASSWORD -o %(model)s -d %(dbname)s --host=HOS
|
|||
cr.commit()
|
||||
_logger.info("fetched/processed %s email(s) on %s server %s", numMsgs, server.type, server.name)
|
||||
except Exception, e:
|
||||
_logger.exception("Failed to fetch mail from %s server %s", server.type, server.name)
|
||||
_logger.exception("Failed to fetch mail from %s server %s.", server.type, server.name)
|
||||
finally:
|
||||
if pop_server:
|
||||
pop_server.quit()
|
||||
|
|
|
@ -140,7 +140,7 @@ msgstr ""
|
|||
#: code:addons/fetchmail/fetchmail.py:155
|
||||
#, python-format
|
||||
msgid "Here is what we got instead:\n"
|
||||
" %s"
|
||||
" %s."
|
||||
msgstr ""
|
||||
|
||||
#. module: fetchmail
|
||||
|
|
|
@ -106,7 +106,7 @@ msgstr ""
|
|||
#. module: google_base_account
|
||||
#: code:addons/google_base_account/wizard/google_login.py:75
|
||||
#, python-format
|
||||
msgid "Authentication fail check the user and password !"
|
||||
msgid "Authentication failed. Check the user and password !"
|
||||
msgstr ""
|
||||
|
||||
#. module: google_base_account
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue