[MERGE] exception and warning messages improvement

bzr revid: abo@openerp.com-20120808130608-tnk060lr543nqn2n
This commit is contained in:
Antonin Bourguignon 2012-08-08 15:06:08 +02:00
commit 54bbe3d78b
235 changed files with 2070 additions and 2079 deletions

View File

@ -125,7 +125,7 @@ class account_payment_term_line(osv.osv):
return True
_constraints = [
(_check_percent, 'Percentages for Payment Term Line must be between 0 and 1, Example: 0.02 for 2% ', ['value_amount']),
(_check_percent, 'Percentages for Payment Term Line must be between 0 and 1, Example: 0.02 for 2%.', ['value_amount']),
]
account_payment_term_line()
@ -136,7 +136,7 @@ class account_account_type(osv.osv):
def _get_current_report_type(self, cr, uid, ids, name, arg, context=None):
obj_data = self.pool.get('ir.model.data')
obj_financial_report = self.pool.get('account.financial.report')
obj_financial_report = self.pool.get('account.financial.report')
res = {}
financial_report_ref = {
'asset': obj_financial_report.browse(cr, uid, obj_data.get_object_reference(cr, uid, 'account','account_financial_report_assets0')[1], context=context),
@ -154,7 +154,7 @@ class account_account_type(osv.osv):
def _save_report_type(self, cr, uid, account_type_id, field_name, field_value, arg, context=None):
obj_data = self.pool.get('ir.model.data')
obj_financial_report = self.pool.get('account.financial.report')
obj_financial_report = self.pool.get('account.financial.report')
#unlink if it exists somewhere in the financial reports related to BS or PL
financial_report_ref = {
'asset': obj_financial_report.browse(cr, uid, obj_data.get_object_reference(cr, uid, 'account','account_financial_report_assets0')[1], context=context),
@ -179,7 +179,7 @@ class account_account_type(osv.osv):
'Balance' will generally be used for cash accounts.
'Detail' will copy each existing journal item of the previous year, even the reconciled ones.
'Unreconciled' will copy only the journal items that were unreconciled on the first day of the new fiscal year."""),
'report_type': fields.function(_get_current_report_type, fnct_inv=_save_report_type, type='selection', string='P&L / BS Category',
'report_type': fields.function(_get_current_report_type, fnct_inv=_save_report_type, type='selection', string='P&L / BS Category',
selection= [('none','/'),
('income', _('Profit & Loss (Income account)')),
('expense', _('Profit & Loss (Expense account)')),
@ -404,12 +404,12 @@ class account_account(osv.osv):
journal_obj = self.pool.get('account.journal')
jids = journal_obj.search(cr, uid, [('type','=','situation'),('centralisation','=',1),('company_id','=',account.company_id.id)], context=context)
if not jids:
raise osv.except_osv(_('Error!'),_("You need an Opening journal with centralisation checked to set the initial balance!"))
raise osv.except_osv(_('Error!'),_("You need an Opening journal with centralisation checked to set the initial balance."))
period_obj = self.pool.get('account.period')
pids = period_obj.search(cr, uid, [('special','=',True),('company_id','=',account.company_id.id)], context=context)
if not pids:
raise osv.except_osv(_('Error!'),_("No opening/closing period defined, please create one to set the initial balance!"))
raise osv.except_osv(_('Error!'),_("There is no opening/closing period defined, please create one to set the initial balance."))
move_obj = self.pool.get('account.move.line')
move_id = move_obj.search(cr, uid, [
@ -426,7 +426,7 @@ class account_account(osv.osv):
}, context=context)
else:
if diff<0.0:
raise osv.except_osv(_('Error!'),_("Unable to adapt the initial balance (negative value)!"))
raise osv.except_osv(_('Error!'),_("Unable to adapt the initial balance (negative value)."))
nameinv = (name=='credit' and 'debit') or 'credit'
move_id = move_obj.create(cr, uid, {
'name': _('Opening Balance'),
@ -541,9 +541,9 @@ class account_account(osv.osv):
return True
_constraints = [
(_check_recursion, 'Error ! You can not create recursive accounts.', ['parent_id']),
(_check_type, 'Configuration Error! \nYou can not define children to an account with internal type different of "View"! ', ['type']),
(_check_account_type, 'Configuration Error! \nYou can not select an account type with a deferral method different of "Unreconciled" for accounts with internal type "Payable/Receivable"! ', ['user_type','type']),
(_check_recursion, 'Error!\nYou cannot create recursive accounts.', ['parent_id']),
(_check_type, 'Configuration Error!\nYou cannot define children to an account with internal type different of "View".', ['type']),
(_check_account_type, 'Configuration Error!\nYou cannot select an account type with a deferral method different of "Unreconciled" for accounts with internal type "Payable/Receivable".', ['user_type','type']),
]
_sql_constraints = [
('code_company_uniq', 'unique (code,company_id)', 'The code of the account must be unique per company !')
@ -619,14 +619,14 @@ class account_account(osv.osv):
if line_obj.search(cr, uid, [('account_id', 'in', account_ids)]):
if method == 'write':
raise osv.except_osv(_('Error !'), _('You can not desactivate an account that contains some journal items.'))
raise osv.except_osv(_('Error!'), _('You cannot deactivate an account that contains journal items.'))
elif method == 'unlink':
raise osv.except_osv(_('Error !'), _('You can not remove an account containing journal items.'))
raise osv.except_osv(_('Error!'), _('You cannot remove an account that contains journal items.'))
#Checking whether the account is set as a property to any Partner or not
value = 'account.account,' + str(ids[0])
partner_prop_acc = self.pool.get('ir.property').search(cr, uid, [('value_reference','=',value)], context=context)
if partner_prop_acc:
raise osv.except_osv(_('Warning !'), _('You can not remove/desactivate an account which is set on a customer or supplier.'))
raise osv.except_osv(_('Warning!'), _('You cannot remove/deactivate an account which is set on a customer or supplier.'))
return True
def _check_allow_type_change(self, cr, uid, ids, new_type, context=None):
@ -639,10 +639,10 @@ class account_account(osv.osv):
if line_obj.search(cr, uid, [('account_id', 'in', account_ids)]):
#Check for 'Closed' type
if old_type == 'closed' and new_type !='closed':
raise osv.except_osv(_('Warning !'), _("You cannot change the type of account from 'Closed' to any other type which contains journal items!"))
raise osv.except_osv(_('Warning!'), _("You cannot change the type of account from 'Closed' to any other type which contains journal items!"))
#Check for change From group1 to group2 and vice versa
if (old_type in group1 and new_type in group2) or (old_type in group2 and new_type in group1):
raise osv.except_osv(_('Warning !'), _("You cannot change the type of account from '%s' to '%s' type as it contains journal items!") % (old_type,new_type,))
raise osv.except_osv(_('Warning!'), _("You cannot change the type of account from '%s' to '%s' type as it contains journal items!") % (old_type,new_type,))
return True
def write(self, cr, uid, ids, vals, context=None):
@ -661,7 +661,7 @@ class account_account(osv.osv):
# Allow the write if the value is the same
for i in [i['company_id'][0] for i in self.read(cr,uid,ids,['company_id'])]:
if vals['company_id']!=i:
raise osv.except_osv(_('Warning !'), _('You cannot change the owner company of an account that already contains journal items.'))
raise osv.except_osv(_('Warning!'), _('You cannot change the owner company of an account that already contains journal items.'))
if 'active' in vals and not vals['active']:
self._check_moves(cr, uid, ids, "write", context=context)
if 'type' in vals.keys():
@ -768,7 +768,7 @@ class account_journal(osv.osv):
return True
_constraints = [
(_check_currency, 'Configuration error! The currency chosen should be shared by the default accounts too.', ['currency','default_debit_account_id','default_credit_account_id']),
(_check_currency, 'Configuration error!\nThe currency chosen should be shared by the default accounts too.', ['currency','default_debit_account_id','default_credit_account_id']),
]
def copy(self, cr, uid, id, default={}, context=None, done_list=[], local=False):
@ -790,7 +790,7 @@ class account_journal(osv.osv):
if 'company_id' in vals and journal.company_id.id != vals['company_id']:
move_lines = self.pool.get('account.move.line').search(cr, uid, [('journal_id', 'in', ids)])
if move_lines:
raise osv.except_osv(_('Warning !'), _('You can not modify the company of this journal as its related record exist in journal items'))
raise osv.except_osv(_('Warning!'), _('This journal already contains items, therefore you cannot modify its company field.'))
return super(account_journal, self).write(cr, uid, ids, vals, context=context)
def create_sequence(self, cr, uid, vals, context=None):
@ -915,7 +915,7 @@ class account_fiscalyear(osv.osv):
return True
_constraints = [
(_check_duration, 'Error! The start date of the fiscal year must be before his end date.', ['date_start','date_stop'])
(_check_duration, 'Error!\nThe start date of a fiscal year must precede its end date.', ['date_start','date_stop'])
]
def create_period3(self, cr, uid, ids, context=None):
@ -966,7 +966,7 @@ class account_fiscalyear(osv.osv):
ids = self.search(cr, uid, args, context=context)
if not ids:
if exception:
raise osv.except_osv(_('Error !'), _('No fiscal year defined for this date !\nPlease create one from the configuration of the accounting menu.'))
raise osv.except_osv(_('Error!'), _('There is no fiscal year defined for this date.\nPlease create one from the configuration of the accounting menu.'))
else:
return []
return ids
@ -1032,8 +1032,8 @@ class account_period(osv.osv):
return True
_constraints = [
(_check_duration, 'Error ! The duration of the Period(s) is/are invalid. ', ['date_stop']),
(_check_year_limit, 'Invalid period ! Some periods overlap or the date period is not in the scope of the fiscal year. ', ['date_stop'])
(_check_duration, 'Error!\nThe duration of the Period(s) is/are invalid.', ['date_stop']),
(_check_year_limit, 'Error!\nThe period is invalid. Either some periods are overlapping or the period\'s dates are not matching the scope of the fiscal year.', ['date_stop'])
]
def next(self, cr, uid, period, step, context=None):
@ -1055,7 +1055,7 @@ class account_period(osv.osv):
args.append(('company_id', '=', company_id))
ids = self.search(cr, uid, args, context=context)
if not ids:
raise osv.except_osv(_('Error !'), _('No period defined for this date: %s !\nPlease create one.')%dt)
raise osv.except_osv(_('Error!'), _('There is no period defined for this date: %s.\nPlease create one.')%dt)
return ids
def action_draft(self, cr, uid, ids, *args):
@ -1080,7 +1080,7 @@ class account_period(osv.osv):
if 'company_id' in vals:
move_lines = self.pool.get('account.move.line').search(cr, uid, [('period_id', 'in', ids)])
if move_lines:
raise osv.except_osv(_('Warning !'), _('You can not modify company of this period as some journal items exists.'))
raise osv.except_osv(_('Warning!'), _('This journal already contains items for this period, therefore you cannot modify its company field.'))
return super(account_period, self).write(cr, uid, ids, vals, context=context)
def build_ctx_periods(self, cr, uid, period_from_id, period_to_id):
@ -1093,9 +1093,9 @@ class account_period(osv.osv):
period_date_stop = period_to.date_stop
company2_id = period_to.company_id.id
if company1_id != company2_id:
raise osv.except_osv(_('Error'), _('You should have chosen periods that belongs to the same company'))
raise osv.except_osv(_('Error!'), _('You should choose the periods that belong to the same company.'))
if period_date_start > period_date_stop:
raise osv.except_osv(_('Error'), _('Start period should be smaller then End period'))
raise osv.except_osv(_('Error!'), _('Start period should precede then end period.'))
#for period from = january, we want to exclude the opening period (but it has same date_from, so we have to check if period_from is special or not to include that clause or not in the search).
if period_from.special:
return self.search(cr, uid, [('date_start', '>=', period_date_start), ('date_stop', '<=', period_date_stop), ('company_id', '=', company1_id)])
@ -1134,7 +1134,7 @@ class account_journal_period(osv.osv):
cr.execute('select * from account_move_line where journal_id=%s and period_id=%s limit 1', (obj.journal_id.id, obj.period_id.id))
res = cr.fetchall()
if res:
raise osv.except_osv(_('Error !'), _('You can not modify/delete a journal with entries for this period !'))
raise osv.except_osv(_('Error!'), _('You cannot modify/delete a journal with entries for this period.'))
return True
def write(self, cr, uid, ids, vals, context=None):
@ -1303,7 +1303,7 @@ class account_move(osv.osv):
_constraints = [
(_check_centralisation,
'You can not create more than one move per period on centralized journal',
'You cannot create more than one move per period on a centralized journal.',
['journal_id']),
]
@ -1314,7 +1314,7 @@ class account_move(osv.osv):
valid_moves = self.validate(cr, uid, ids, context)
if not valid_moves:
raise osv.except_osv(_('Integrity Error !'), _('You can not validate a non-balanced entry !\nMake sure you have configured payment terms properly !\nThe latest payment term line should be of the type "Balance" !'))
raise osv.except_osv(_('Error!'), _('You cannot validate a non-balanced entry.\nMake sure you have configured payment terms properly.\nThe latest payment term line should be of the "Balance" type.'))
obj_sequence = self.pool.get('ir.sequence')
for move in self.browse(cr, uid, valid_moves, context=context):
if move.name =='/':
@ -1328,7 +1328,7 @@ class account_move(osv.osv):
c = {'fiscalyear_id': move.period_id.fiscalyear_id.id}
new_name = obj_sequence.next_by_id(cr, uid, journal.sequence_id.id, c)
else:
raise osv.except_osv(_('Error'), _('No sequence defined on the journal !'))
raise osv.except_osv(_('Error!'), _('Please define a sequence on the journal.'))
if new_name:
self.write(cr, uid, [move.id], {'name':new_name})
@ -1351,14 +1351,14 @@ class account_move(osv.osv):
if not top_common:
top_common = top_account
elif top_account.id != top_common.id:
raise osv.except_osv(_('Error !'),
_('You cannot validate this journal entry because account "%s" does not belong to chart of accounts "%s"!') % (account.name, top_common.name))
raise osv.except_osv(_('Error!'),
_('You cannot validate this journal entry because account "%s" does not belong to chart of accounts "%s".') % (account.name, top_common.name))
return self.post(cursor, user, ids, context=context)
def button_cancel(self, cr, uid, ids, context=None):
for line in self.browse(cr, uid, ids, context=context):
if not line.journal_id.update_posted:
raise osv.except_osv(_('Error !'), _('You can not modify a posted entry of this journal !\nYou should set the journal to allow cancelling entries if you want to do that.'))
raise osv.except_osv(_('Error!'), _('You cannot modify a posted entry of this journal.\nFirst you should set the journal to allow cancelling entries.'))
if ids:
cr.execute('UPDATE account_move '\
'SET state=%s '\
@ -1445,8 +1445,8 @@ class account_move(osv.osv):
obj_move_line = self.pool.get('account.move.line')
for move in self.browse(cr, uid, ids, context=context):
if move['state'] != 'draft':
raise osv.except_osv(_('UserError'),
_('You can not delete a posted journal entry "%s"!') % \
raise osv.except_osv(_('User Error!'),
_('You cannot delete a posted journal entry "%s".') % \
move['name'])
line_ids = map(lambda x: x.id, move.line_id)
context['journal_id'] = move.journal_id.id
@ -1474,16 +1474,16 @@ class account_move(osv.osv):
account_id = move.journal_id.default_debit_account_id.id
mode2 = 'debit'
if not account_id:
raise osv.except_osv(_('UserError'),
_('There is no default default debit account defined \n' \
'on journal "%s"') % move.journal_id.name)
raise osv.except_osv(_('User Error!'),
_('There is no default debit account defined \n' \
'on journal "%s".') % move.journal_id.name)
else:
account_id = move.journal_id.default_credit_account_id.id
mode2 = 'credit'
if not account_id:
raise osv.except_osv(_('UserError'),
_('There is no default default credit account defined \n' \
'on journal "%s"') % move.journal_id.name)
raise osv.except_osv(_('User Error!'),
_('There is no default credit account defined \n' \
'on journal "%s".') % move.journal_id.name)
# find the first line of this move with the current mode
# or create it if it doesn't exist
@ -1577,11 +1577,11 @@ class account_move(osv.osv):
if not company_id:
company_id = line.account_id.company_id.id
if not company_id == line.account_id.company_id.id:
raise osv.except_osv(_('Error'), _("Couldn't create move between different companies"))
raise osv.except_osv(_('Error!'), _("Cannot create moves for different companies."))
if line.account_id.currency_id and line.currency_id:
if line.account_id.currency_id.id != line.currency_id.id and (line.account_id.currency_id.id != line.account_id.company_id.currency_id.id):
raise osv.except_osv(_('Error'), _("""Couldn't create move with currency different from the secondary currency of the account "%s - %s". Clear the secondary currency field of the account definition if you want to accept all currencies.""") % (line.account_id.code, line.account_id.name))
raise osv.except_osv(_('Error!'), _("""Cannot create move with currency different from ..""") % (line.account_id.code, line.account_id.name))
if abs(amount) < 10 ** -4:
# If the move is balanced
@ -1833,7 +1833,7 @@ class account_tax_code(osv.osv):
_check_recursion = check_cycle
_constraints = [
(_check_recursion, 'Error ! You can not create recursive accounts.', ['parent_id'])
(_check_recursion, 'Error!\nYou cannot create recursive accounts.', ['parent_id'])
]
_order = 'code'
@ -2118,7 +2118,7 @@ class account_tax(osv.osv):
}
def compute(self, cr, uid, taxes, price_unit, quantity, product=None, partner=None):
_logger.warning("Deprecated, use compute_all(...)['taxes'] instead of compute(...) to manage prices with tax included")
_logger.warning("Deprecated, use compute_all(...)['taxes'] instead of compute(...) to manage prices with tax included.")
return self._compute(cr, uid, taxes, price_unit, quantity, product, partner)
def _compute(self, cr, uid, taxes, price_unit, quantity, product=None, partner=None, precision=None):
@ -2307,7 +2307,7 @@ class account_model(osv.osv):
date_maturity = context.get('date',time.strftime('%Y-%m-%d'))
if line.date_maturity == 'partner':
if not line.partner_id:
raise osv.except_osv(_('Error !'), _("Maturity date of entry line generated by model line '%s' of model '%s' is based on partner payment term!" \
raise osv.except_osv(_('Error!'), _("Maturity date of entry line generated by model line '%s' of model '%s' is based on partner payment term!" \
"\nPlease define partner on it!")%(line.name, model.name))
if line.partner_id.property_payment_term:
payment_term_id = line.partner_id.property_payment_term.id
@ -2521,8 +2521,8 @@ class account_account_template(osv.osv):
_check_recursion = check_cycle
_constraints = [
(_check_recursion, 'Error ! You can not create recursive account templates.', ['parent_id']),
(_check_type, 'Configuration Error!\nYou can not define children to an account with internal type different of "View"! ', ['type']),
(_check_recursion, 'Error!\nYou cannot create recursive account templates.', ['parent_id']),
(_check_type, 'Configuration Error!\nYou cannot define children to an account that has internal type other than "View".', ['type']),
]
@ -2623,7 +2623,7 @@ class account_add_tmpl_wizard(osv.osv_memory):
ptids = tmpl_obj.read(cr, uid, [tids[0]['parent_id'][0]], ['code'])
res = None
if not ptids or not ptids[0]['code']:
raise osv.except_osv(_('Error !'), _('I can not locate a parent code for the template account!'))
raise osv.except_osv(_('Error!'), _('There is no parent code for the template account.'))
res = acc_obj.search(cr, uid, [('code','=',ptids[0]['code'])])
return res and res[0] or False
@ -2729,7 +2729,7 @@ class account_tax_code_template(osv.osv):
_check_recursion = check_cycle
_constraints = [
(_check_recursion, 'Error ! You can not create recursive Tax Codes.', ['parent_id'])
(_check_recursion, 'Error!\nYou cannot create recursive Tax Codes.', ['parent_id'])
]
_order = 'code,name'
account_tax_code_template()
@ -2742,7 +2742,7 @@ class account_chart_template(osv.osv):
_columns={
'name': fields.char('Name', size=64, required=True),
'parent_id': fields.many2one('account.chart.template', 'Parent Chart Template'),
'code_digits': fields.integer('# of Digits', required=True, help="No. of Digits to use for account code"),
'code_digits': fields.integer('# of Digits', required=True, help="No. of Digits to use for account code"),
'visible': fields.boolean('Can be Visible?', help="Set this to False if you don't want this template to be used actively in the wizard that generate Chart of Accounts from templates, this is useful when you want to generate accounts of this template only when loading its child template."),
'complete_tax_set': fields.boolean('Complete Set of Taxes', help='This boolean helps you to choose if you want to propose to the user to encode the sale and purchase rates or choose from list of taxes. This last choice assumes that the set of tax defined on this template is complete'),
'account_root_id': fields.many2one('account.account.template', 'Root Account', domain=[('parent_id','=',False)]),
@ -3026,7 +3026,7 @@ class wizard_multi_charts_accounts(osv.osv_memory):
return res
def default_get(self, cr, uid, fields, context=None):
res = super(wizard_multi_charts_accounts, self).default_get(cr, uid, fields, context=context)
res = super(wizard_multi_charts_accounts, self).default_get(cr, uid, fields, context=context)
tax_templ_obj = self.pool.get('account.tax.template')
if 'bank_accounts_id' in fields:
@ -3102,7 +3102,7 @@ class wizard_multi_charts_accounts(osv.osv_memory):
# Get the analytic journal
data = False
if journal_type in ('sale', 'sale_refund'):
data = obj_data.get_object_reference(cr, uid, 'account', 'analytic_journal_sale')
data = obj_data.get_object_reference(cr, uid, 'account', 'analytic_journal_sale')
elif journal_type in ('purchase', 'purchase_refund'):
pass
elif journal_type == 'general':
@ -3128,7 +3128,7 @@ class wizard_multi_charts_accounts(osv.osv_memory):
if journal_type in ('general', 'situation'):
data = obj_data.get_object_reference(cr, uid, 'account', 'account_journal_view')
elif journal_type in ('sale_refund', 'purchase_refund'):
data = obj_data.get_object_reference(cr, uid, 'account', 'account_sp_refund_journal_view')
data = obj_data.get_object_reference(cr, uid, 'account', 'account_sp_refund_journal_view')
else:
data = obj_data.get_object_reference(cr, uid, 'account', 'account_sp_journal_view')
return data and data[1] or False
@ -3357,7 +3357,7 @@ class wizard_multi_charts_accounts(osv.osv_memory):
def _prepare_bank_journal(self, cr, uid, line, current_num, default_account_id, company_id, context=None):
'''
This function prepares the value to use for the creation of a bank journal created through the wizard of
This function prepares the value to use for the creation of a bank journal created through the wizard of
generating COA from templates.
:param line: dictionary containing the values encoded by the user related to his bank account
@ -3375,9 +3375,9 @@ class wizard_multi_charts_accounts(osv.osv_memory):
tmp = obj_data.get_object_reference(cr, uid, 'account', 'account_journal_bank_view')
view_id_cash = tmp and tmp[1] or False
# we need to loop again to find next number for journal code
# we need to loop again to find next number for journal code
# because we can't rely on the value current_num as,
# its possible that we already have bank journals created (e.g. by the creation of res.partner.bank)
# its possible that we already have bank journals created (e.g. by the creation of res.partner.bank)
# and the next number for account code might have been already used before for journal
for num in xrange(current_num, 100):
# journal_code has a maximal size of 5, hence we can enforce the boundary num < 100
@ -3386,7 +3386,7 @@ class wizard_multi_charts_accounts(osv.osv_memory):
if not ids:
break
else:
raise osv.except_osv(_('Error'), _('Cannot generate an unused journal code.'))
raise osv.except_osv(_('Error!'), _('Cannot generate an unused journal code.'))
vals = {
'name': line['acc_name'],
@ -3464,7 +3464,7 @@ class wizard_multi_charts_accounts(osv.osv_memory):
journal_data.append(vals)
ref_acc_bank = obj_wizard.chart_template_id.bank_account_view_id
if journal_data and not ref_acc_bank.code:
raise osv.except_osv(_('Configuration Error !'), _('The bank account defined on the selected chart of accounts hasn\'t a code.'))
raise osv.except_osv(_('Configuration Error!'), _('You have to set a code for the bank account defined on the selected chart of accounts.'))
current_num = 1
for line in journal_data:

View File

@ -87,18 +87,18 @@ class account_analytic_line(osv.osv):
if not a:
a = prod.categ_id.property_account_expense_categ.id
if not a:
raise osv.except_osv(_('Error !'),
raise osv.except_osv(_('Error!'),
_('There is no expense account defined ' \
'for this product: "%s" (id:%d)') % \
'for this product: "%s" (id:%d).') % \
(prod.name, prod.id,))
else:
a = prod.product_tmpl_id.property_account_income.id
if not a:
a = prod.categ_id.property_account_income_categ.id
if not a:
raise osv.except_osv(_('Error !'),
raise osv.except_osv(_('Error!'),
_('There is no income account defined ' \
'for this product: "%s" (id:%d)') % \
'for this product: "%s" (id:%d).') % \
(prod.name, prod_id,))
flag = False

View File

@ -192,11 +192,11 @@ class account_bank_statement(osv.osv):
'ref': st_line.ref,
}
def _prepare_bank_move_line(self, cr, uid, st_line, move_id, amount, company_currency_id,
def _prepare_bank_move_line(self, cr, uid, st_line, move_id, amount, company_currency_id,
context=None):
"""Compute the args to build the dict of values to create the bank move line from a
statement line by calling the _prepare_move_line_vals. This method may be
overridden to implement custom move generation (making sure to call super() to
statement line by calling the _prepare_move_line_vals. This method may be
overridden to implement custom move generation (making sure to call super() to
establish a clean extension chain).
:param browse_record st_line: account.bank.statement.line record to
@ -219,12 +219,12 @@ class account_bank_statement(osv.osv):
res_currency_obj = self.pool.get('res.currency')
amt_cur = -res_currency_obj.compute(cr, uid, company_currency_id, cur_id, amount, context=context)
res = self._prepare_move_line_vals(cr, uid, st_line, move_id, debit, credit,
res = self._prepare_move_line_vals(cr, uid, st_line, move_id, debit, credit,
amount_currency=amt_cur, currency_id=cur_id, analytic_id=anl_id, context=context)
return res
def _get_counter_part_account(sefl, cr, uid, st_line, context=None):
"""Retrieve the account to use in the counterpart move.
"""Retrieve the account to use in the counterpart move.
This method may be overridden to implement custom move generation (making sure to
call super() to establish a clean extension chain).
@ -237,7 +237,7 @@ class account_bank_statement(osv.osv):
return st_line.statement_id.journal_id.default_debit_account_id.id
def _get_counter_part_partner(sefl, cr, uid, st_line, context=None):
"""Retrieve the partner to use in the counterpart move.
"""Retrieve the partner to use in the counterpart move.
This method may be overridden to implement custom move generation (making sure to
call super() to establish a clean extension chain).
@ -247,11 +247,11 @@ class account_bank_statement(osv.osv):
"""
return st_line.partner_id and st_line.partner_id.id or False
def _prepare_counterpart_move_line(self, cr, uid, st_line, move_id, amount, company_currency_id,
def _prepare_counterpart_move_line(self, cr, uid, st_line, move_id, amount, company_currency_id,
context=None):
"""Compute the args to build the dict of values to create the counter part move line from a
statement line by calling the _prepare_move_line_vals. This method may be
overridden to implement custom move generation (making sure to call super() to
statement line by calling the _prepare_move_line_vals. This method may be
overridden to implement custom move generation (making sure to call super() to
establish a clean extension chain).
:param browse_record st_line: account.bank.statement.line record to
@ -271,12 +271,12 @@ class account_bank_statement(osv.osv):
if st_line.statement_id.currency.id <> company_currency_id:
amt_cur = st_line.amount
cur_id = st_line.statement_id.currency.id
return self._prepare_move_line_vals(cr, uid, st_line, move_id, debit, credit,
return self._prepare_move_line_vals(cr, uid, st_line, move_id, debit, credit,
amount_currency = amt_cur, currency_id = cur_id, account_id = account_id,
partner_id = partner_id, context=context)
def _prepare_move_line_vals(self, cr, uid, st_line, move_id, debit, credit, currency_id = False,
amount_currency= False, account_id = False, analytic_id = False,
amount_currency= False, account_id = False, analytic_id = False,
partner_id = False, context=None):
"""Prepare the dict of values to create the move line from a
statement line. All non-mandatory args will replace the default computed one.
@ -350,12 +350,12 @@ class account_bank_statement(osv.osv):
amount = res_currency_obj.compute(cr, uid, st.currency.id,
company_currency_id, st_line.amount, context=context)
bank_move_vals = self._prepare_bank_move_line(cr, uid, st_line, move_id, amount,
bank_move_vals = self._prepare_bank_move_line(cr, uid, st_line, move_id, amount,
company_currency_id, context=context)
move_line_id = account_move_line_obj.create(cr, uid, bank_move_vals, context=context)
torec.append(move_line_id)
counterpart_move_vals = self._prepare_counterpart_move_line(cr, uid, st_line, move_id,
counterpart_move_vals = self._prepare_counterpart_move_line(cr, uid, st_line, move_id,
amount, company_currency_id, context=context)
account_move_line_obj.create(cr, uid, counterpart_move_vals, context=context)
@ -364,7 +364,7 @@ class account_bank_statement(osv.osv):
context=context).line_id],
context=context):
if line.state <> 'valid':
raise osv.except_osv(_('Error !'),
raise osv.except_osv(_('Error!'),
_('Journal item "%s" is not valid.') % line.name)
# Bank statements will not consider boolean on journal entry_posted
@ -377,7 +377,7 @@ class account_bank_statement(osv.osv):
def balance_check(self, cr, uid, st_id, journal_type='bank', context=None):
st = self.browse(cr, uid, st_id, context=context)
if not ((abs((st.balance_end or 0.0) - st.balance_end_real) < 0.0001) or (abs((st.balance_end or 0.0) - st.balance_end_real) < 0.0001)):
raise osv.except_osv(_('Error !'),
raise osv.except_osv(_('Error!'),
_('The statement balance is incorrect !\nThe expected balance (%.2f) is different than the computed one. (%.2f)') % (st.balance_end_real, st.balance_end))
return True
@ -401,7 +401,7 @@ class account_bank_statement(osv.osv):
self.balance_check(cr, uid, st.id, journal_type=j_type, context=context)
if (not st.journal_id.default_credit_account_id) \
or (not st.journal_id.default_debit_account_id):
raise osv.except_osv(_('Configuration Error !'),
raise osv.except_osv(_('Configuration Error!'),
_('Please verify that an account is defined in the journal.'))
if not st.name == '/':
@ -415,7 +415,7 @@ class account_bank_statement(osv.osv):
for line in st.move_line_ids:
if line.state <> 'valid':
raise osv.except_osv(_('Error !'),
raise osv.except_osv(_('Error!'),
_('The account entries lines are not in valid state.'))
for st_line in st.line_ids:
if st_line.analytic_account_id:
@ -445,7 +445,7 @@ class account_bank_statement(osv.osv):
account_move_obj.unlink(cr, uid, ids, context)
done.append(st.id)
return self.write(cr, uid, done, {'state':'draft'}, context=context)
def _compute_balance_end_real(self, cr, uid, journal_id, context=None):
cr.execute('SELECT balance_end_real \
FROM account_bank_statement \
@ -469,7 +469,7 @@ class account_bank_statement(osv.osv):
if t['state'] in ('draft'):
unlink_ids.append(t['id'])
else:
raise osv.except_osv(_('Invalid action !'), _('In order to delete a bank statement, you must first cancel it to delete related journal items.'))
raise osv.except_osv(_('Invalid Action!'), _('In order to delete a bank statement, you must first cancel it to delete related journal items.'))
osv.osv.unlink(self, cr, uid, unlink_ids, context=context)
return True

View File

@ -238,7 +238,7 @@ class account_cash_statement(osv.osv):
for statement in statement_pool.browse(cr, uid, ids, context=context):
vals = {}
if not self._user_allow(cr, uid, statement.id, context=context):
raise osv.except_osv(_('Error !'), (_('You do not have rights to open this %s journal !') % (statement.journal_id.name, )))
raise osv.except_osv(_('Error!'), (_('You do not have rights to open this %s journal !') % (statement.journal_id.name, )))
if statement.name and statement.name == '/':
c = {'fiscalyear_id': statement.period_id.fiscalyear_id.id}
@ -282,8 +282,8 @@ class account_cash_statement(osv.osv):
for item_label, item_account in TALBES:
if getattr(obj.journal_id, item_account):
raise osv.except_osv(_('Error !'),
_('There is no %s Account on the Journal %s') % (item_label, obj.journal_id.name,))
raise osv.except_osv(_('Error!'),
_('There is no %s Account on the journal %s.') % (item_label, obj.journal_id.name,))
is_profit = obj.difference < 0.0

View File

@ -319,7 +319,7 @@ class account_invoice(osv.osv):
res['fields'][field]['selection'] = journal_select
doc = etree.XML(res['arch'])
if context.get('type', False):
for node in doc.xpath("//field[@name='partner_bank_id']"):
if context['type'] == 'in_refund':
@ -327,7 +327,7 @@ class account_invoice(osv.osv):
elif context['type'] == 'out_refund':
node.set('domain', "[('partner_id', '=', partner_id)]")
res['arch'] = etree.tostring(doc)
if view_type == 'search':
if context.get('type', 'in_invoice') in ('out_invoice', 'out_refund'):
for node in doc.xpath("//group[@name='extended filter']"):
@ -364,15 +364,15 @@ class account_invoice(osv.osv):
except Exception, e:
if '"journal_id" viol' in e.args[0]:
raise orm.except_orm(_('Configuration Error!'),
_('There is no Accounting Journal of type Sale/Purchase defined!'))
_('There is no Sale/Purchase Journal(s) defined.'))
else:
raise orm.except_orm(_('Unknown Error'), str(e))
raise orm.except_orm(_('Unknown Error!'), str(e))
def invoice_print(self, cr, uid, ids, context=None):
'''
This function prints the invoice and mark it as sent, so that we can see more easily the next step of the workflow
'''
assert len(ids) == 1, 'This option should only be used for a single id at a time'
assert len(ids) == 1, 'This option should only be used for a single id at a time.'
self.write(cr, uid, ids, {'sent': True}, context=context)
datas = {
'ids': ids,
@ -425,7 +425,7 @@ class account_invoice(osv.osv):
if t['state'] in ('draft', 'cancel') and t['internal_number']== False:
unlink_ids.append(t['id'])
else:
raise osv.except_osv(_('Invalid action !'), _('You can not delete an invoice which is open or paid. We suggest you to refund it instead.'))
raise osv.except_osv(_('Invalid Action!'), _('You cannot delete an invoice which is open or paid. You should refund it instead.'))
osv.osv.unlink(self, cr, uid, unlink_ids, context=context)
return True
@ -458,8 +458,8 @@ class account_invoice(osv.osv):
rec_res_id = rec_line_data and rec_line_data[0].get('value_reference',False) and int(rec_line_data[0]['value_reference'].split(',')[1]) or False
pay_res_id = pay_line_data and pay_line_data[0].get('value_reference',False) and int(pay_line_data[0]['value_reference'].split(',')[1]) or False
if not rec_res_id and not pay_res_id:
raise osv.except_osv(_('Configuration Error !'),
_('Can not find a chart of accounts for this company, you should create one.'))
raise osv.except_osv(_('Configuration Error!'),
_('Cannot find a chart of accounts for this company, you should create one.'))
account_obj = self.pool.get('account.account')
rec_obj_acc = account_obj.browse(cr, uid, [rec_res_id])
pay_obj_acc = account_obj.browse(cr, uid, [pay_res_id])
@ -521,7 +521,7 @@ class account_invoice(osv.osv):
pterm_list.sort()
res = {'value':{'date_due': pterm_list[-1]}}
else:
raise osv.except_osv(_('Data Insufficient !'), _('The payment term of supplier does not have a payment term line!'))
raise osv.except_osv(_('Insufficient Data!'), _('The payment term of supplier does not have a payment term line.'))
return res
def onchange_invoice_line(self, cr, uid, ids, lines):
@ -553,8 +553,8 @@ class account_invoice(osv.osv):
rec_res_id = rec_line_data and rec_line_data[0].get('value_reference',False) and int(rec_line_data[0]['value_reference'].split(',')[1]) or False
pay_res_id = pay_line_data and pay_line_data[0].get('value_reference',False) and int(pay_line_data[0]['value_reference'].split(',')[1]) or False
if not rec_res_id and not pay_res_id:
raise osv.except_osv(_('Configuration Error !'),
_('Can not find a chart of account, you should create one from the configuration of the accounting menu.'))
raise osv.except_osv(_('Configuration Error!'),
_('Cannot find a chart of account, you should create one from Settings\Configuration\Accounting menu.'))
if type in ('out_invoice', 'out_refund'):
acc_id = rec_res_id
else:
@ -568,16 +568,16 @@ class account_invoice(osv.osv):
if line.account_id.company_id.id != company_id:
result_id = account_obj.search(cr, uid, [('name','=',line.account_id.name),('company_id','=',company_id)])
if not result_id:
raise osv.except_osv(_('Configuration Error !'),
_('Can not find a chart of account, you should create one from the configuration of the accounting menu.'))
raise osv.except_osv(_('Configuration Error!'),
_('Cannot find a chart of account, you should create one from Settings\Configuration\Accounting menu.'))
inv_line_obj.write(cr, uid, [line.id], {'account_id': result_id[-1]})
else:
if invoice_line:
for inv_line in invoice_line:
obj_l = account_obj.browse(cr, uid, inv_line[2]['account_id'])
if obj_l.company_id.id != company_id:
raise osv.except_osv(_('Configuration Error !'),
_('Invoice line account company does not match with invoice company.'))
raise osv.except_osv(_('Configuration Error!'),
_('Invoice line account\'s company and invoice\'s compnay does not match.'))
else:
continue
if company_id and type:
@ -598,7 +598,7 @@ class account_invoice(osv.osv):
if r[1] == 'journal_id' and r[2] in journal_ids:
val['journal_id'] = r[2]
if not val.get('journal_id', False):
raise osv.except_osv(_('Configuration Error !'), (_('Can\'t find any account journal of %s type for this company.\n\nYou can create one in the menu: \nConfiguration\Financial Accounting\Accounts\Journals.') % (journal_type)))
raise osv.except_osv(_('Configuration Error!'), (_('Cannot find any account journal of %s type for this company.\n\nYou can create one in the menu: \nConfiguration\Journals\Journals.') % (journal_type)))
dom = {'journal_id': [('id', 'in', journal_ids)]}
else:
journal_ids = obj_journal.search(cr, uid, [])
@ -759,13 +759,13 @@ class account_invoice(osv.osv):
key = (tax.tax_code_id.id, tax.base_code_id.id, tax.account_id.id, tax.account_analytic_id.id)
tax_key.append(key)
if not key in compute_taxes:
raise osv.except_osv(_('Warning !'), _('Global taxes defined, but they are not in invoice lines !'))
raise osv.except_osv(_('Warning!'), _('Global taxes defined, but they are not in invoice lines !'))
base = compute_taxes[key]['base']
if abs(base - tax.base) > inv.company_id.currency_id.rounding:
raise osv.except_osv(_('Warning !'), _('Tax base different!\nClick on compute to update the tax base.'))
raise osv.except_osv(_('Warning!'), _('Tax base different!\nClick on compute to update the tax base.'))
for key in compute_taxes:
if not key in tax_key:
raise osv.except_osv(_('Warning !'), _('Taxes are missing!\nClick on compute button.'))
raise osv.except_osv(_('Warning!'), _('Taxes are missing!\nClick on compute button.'))
def compute_invoice_totals(self, cr, uid, inv, company_currency, ref, invoice_move_lines):
total = 0
@ -835,12 +835,12 @@ class account_invoice(osv.osv):
context = {}
for inv in self.browse(cr, uid, ids, context=context):
if not inv.journal_id.sequence_id:
raise osv.except_osv(_('Error !'), _('Please define sequence on the journal related to this invoice.'))
raise osv.except_osv(_('Error!'), _('Please define sequence on the journal related to this invoice.'))
if not inv.invoice_line:
raise osv.except_osv(_('No Invoice Lines !'), _('Please create some invoice lines.'))
if inv.move_id:
continue
ctx = context.copy()
ctx.update({'lang': inv.partner_id.lang})
if not inv.date_invoice:
@ -866,7 +866,7 @@ class account_invoice(osv.osv):
total_percent += line.value_amount
total_fixed = (total_fixed * 100) / (inv.amount_total or 1.0)
if (total_fixed + total_percent) > 100:
raise osv.except_osv(_('Error !'), _("Can not create the invoice !\nThe related payment term is probably misconfigured as it gives a computed amount greater than the total invoiced amount. The latest line of your payment term must be of type 'balance' to avoid rounding issues."))
raise osv.except_osv(_('Error!'), _("Cannot create the invoice.\nThe related payment term is probably misconfigured as it gives a computed amount greater than the total invoiced amount. In order to avoid rounding issues, the latest line of your payment term must be of type 'balance'."))
# one move line per tax line
iml += ait_obj.move_line_get(cr, uid, inv.id)
@ -947,8 +947,8 @@ class account_invoice(osv.osv):
journal_id = inv.journal_id.id
journal = journal_obj.browse(cr, uid, journal_id, context=ctx)
if journal.centralisation:
raise osv.except_osv(_('UserError'),
_('You cannot create an invoice on a centralised journal. Uncheck the centralised counterpart box in the related journal from the configuration menu.'))
raise osv.except_osv(_('User Error!'),
_('You cannot create an invoice on a centralized journal. Uncheck the centralized counterpart box in the related journal from the configuration menu.'))
line = self.finalize_invoice_move_lines(cr, uid, inv, line)
@ -979,7 +979,7 @@ class account_invoice(osv.osv):
move_obj.post(cr, uid, [move_id], context=ctx)
self._log_event(cr, uid, ids)
return True
def invoice_validate(self, cr, uid, ids, context=None):
self.write(cr, uid, ids, {'state':'open'}, context=context)
return True
@ -1061,7 +1061,7 @@ class account_invoice(osv.osv):
pay_ids = account_move_line_obj.browse(cr, uid, i['payment_ids'])
for move_line in pay_ids:
if move_line.reconcile_partial_id and move_line.reconcile_partial_id.line_partial_ids:
raise osv.except_osv(_('Error !'), _('You can not cancel an invoice which is partially paid! You need to unreconcile related payment entries first!'))
raise osv.except_osv(_('Error!'), _('You cannot cancel an invoice which is partially paid. You need to unreconcile related payment entries first.'))
# First, set the invoices as cancelled and detach the move ids
self.write(cr, uid, ids, {'state':'cancel', 'move_id':False})
@ -1187,7 +1187,7 @@ class account_invoice(osv.osv):
if context is None:
context = {}
#TODO check if we can use different period for payment and the writeoff line
assert len(ids)==1, "Can only pay one invoice at a time"
assert len(ids)==1, "Can only pay one invoice at a time."
invoice = self.browse(cr, uid, ids[0], context=context)
src_account_id = invoice.account_id.id
# Take the seq as name for move
@ -1273,7 +1273,7 @@ class account_invoice(osv.osv):
else:
code = invoice.currency_id.symbol
# TODO: use currency's formatting function
msg = _("Invoice '%s' is paid partially: %s%s of %s%s (%s%s remaining)") % \
msg = _("Invoice '%s' is paid partially: %s%s of %s%s (%s%s remaining).") % \
(name, pay_amount, code, invoice.amount_total, code, total, code)
self.message_append_note(cr, uid, [inv_id], body=msg, context=context)
self.pool.get('account.move.line').reconcile_partial(cr, uid, line_ids, 'manual', context)
@ -1281,11 +1281,11 @@ class account_invoice(osv.osv):
# Update the stored value (fields.function), so we write to trigger recompute
self.pool.get('account.invoice').write(cr, uid, ids, {}, context=context)
return True
# -----------------------------------------
# OpenChatter notifications and need_action
# -----------------------------------------
def _get_document_type(self, type):
type_dict = {
'out_invoice': 'Customer invoice',
@ -1294,19 +1294,19 @@ class account_invoice(osv.osv):
'in_refund': 'Supplier Refund',
}
return type_dict.get(type, 'Invoice')
def create_send_note(self, cr, uid, ids, context=None):
for obj in self.browse(cr, uid, ids, context=context):
self.message_append_note(cr, uid, [obj.id],body=_("%s <b>created</b>.") % (self._get_document_type(obj.type)), context=context)
def confirm_paid_send_note(self, cr, uid, ids, context=None):
for obj in self.browse(cr, uid, ids, context=context):
self.message_append_note(cr, uid, [obj.id], body=_("%s <b>paid</b>.") % (self._get_document_type(obj.type)), context=context)
def invoice_cancel_send_note(self, cr, uid, ids, context=None):
for obj in self.browse(cr, uid, ids, context=context):
self.message_append_note(cr, uid, [obj.id], body=_("%s <b>cancelled</b>.") % (self._get_document_type(obj.type)), context=context)
account_invoice()
class account_invoice_line(osv.osv):
@ -1475,7 +1475,7 @@ class account_invoice_line(osv.osv):
if prod.uom_id.category_id.id != prod_uom.category_id.id:
warning = {
'title': _('Warning!'),
'message': _('You selected an Unit of Measure which is not compatible with the product.')
'message': _('The selected unit of measure is not compatible with the unit of measure of the product.')
}
return {'value': res['value'], 'warning': warning}
return res

View File

@ -95,7 +95,7 @@ class account_move_line(osv.osv):
if initial_bal and not context.get('periods', False) and not where_move_lines_by_date:
#we didn't pass any filter in the context, and the initial balance can't be computed using only the fiscalyear otherwise entries will be summed twice
#so we have to invalidate this query
raise osv.except_osv(_('Warning !'),_("You haven't supplied enough argument to compute the initial balance, please select a period and journal in the context."))
raise osv.except_osv(_('Warning!'),_("You have not supplied enough arguments to compute the initial balance, please select a period and a journal in the context."))
if context.get('journal_ids', False):
@ -577,14 +577,14 @@ class account_move_line(osv.osv):
lines = self.browse(cr, uid, ids, context=context)
for l in lines:
if l.account_id.type == 'view':
raise osv.except_osv(_('Error :'), _('You can not create journal items on a "view" account %s %s') % (l.account_id.code, l.account_id.name))
raise osv.except_osv(_('Error!'), _('You cannot create journal items on “View” type account %s %s.') % (l.account_id.code, l.account_id.name))
return True
def _check_no_closed(self, cr, uid, ids, context=None):
lines = self.browse(cr, uid, ids, context=context)
for l in lines:
if l.account_id.type == 'closed':
raise osv.except_osv(_('Error :'), _('You can not create journal items on a closed account %s %s') % (l.account_id.code, l.account_id.name))
raise osv.except_osv(_('Error!'), _('You cannot create journal items on a closed account %s %s.') % (l.account_id.code, l.account_id.name))
return True
def _check_company_id(self, cr, uid, ids, context=None):
@ -609,9 +609,9 @@ class account_move_line(osv.osv):
return True
_constraints = [
(_check_no_view, 'You can not create journal items on an account of type view.', ['account_id']),
(_check_no_closed, 'You can not create journal items on closed account.', ['account_id']),
(_check_company_id, 'Company must be the same for its related account and period.', ['company_id']),
(_check_no_view, 'You cannot create journal items on an account of type view.', ['account_id']),
(_check_no_closed, 'You cannot create journal items on closed account.', ['account_id']),
(_check_company_id, 'Account and Period must belong to the same company.', ['company_id']),
(_check_date, 'The date of your Journal Entry is not in the defined period! You should change the date or remove this constraint from the journal.', ['date']),
(_check_currency, 'The selected account of your Journal Entry forces to provide a secondary currency. You should remove the secondary currency on the account or select a multi-currency view on the journal.', ['currency_id']),
]
@ -743,7 +743,7 @@ class account_move_line(osv.osv):
context = {}
for line in self.browse(cr, uid, ids, context=context):
if company_list and not line.company_id.id in company_list:
raise osv.except_osv(_('Warning !'), _('To reconcile the entries company should be the same for all entries'))
raise osv.except_osv(_('Warning!'), _('To reconcile the entries company should be the same for all entries.'))
company_list.append(line.company_id.id)
for line in self.browse(cr, uid, ids, context=context):
@ -752,7 +752,7 @@ class account_move_line(osv.osv):
else:
currency_id = line.company_id.currency_id
if line.reconcile_id:
raise osv.except_osv(_('Warning'), _('Already Reconciled!'))
raise osv.except_osv(_('Warning!'), _('Already reconciled.'))
if line.reconcile_partial_id:
for line2 in line.reconcile_partial_id.line_partial_ids:
if not line2.reconcile_id:
@ -796,11 +796,11 @@ class account_move_line(osv.osv):
company_list = []
for line in self.browse(cr, uid, ids, context=context):
if company_list and not line.company_id.id in company_list:
raise osv.except_osv(_('Warning !'), _('To reconcile the entries company should be the same for all entries'))
raise osv.except_osv(_('Warning!'), _('To reconcile the entries company should be the same for all entries.'))
company_list.append(line.company_id.id)
for line in unrec_lines:
if line.state <> 'valid':
raise osv.except_osv(_('Error'),
raise osv.except_osv(_('Error!'),
_('Entry "%s" is not valid !') % line.name)
credit += line['credit']
debit += line['debit']
@ -823,15 +823,15 @@ class account_move_line(osv.osv):
r = cr.fetchall()
#TODO: move this check to a constraint in the account_move_reconcile object
if not unrec_lines:
raise osv.except_osv(_('Error'), _('Entry is already reconciled'))
raise osv.except_osv(_('Error!'), _('Entry is already reconciled.'))
account = account_obj.browse(cr, uid, account_id, context=context)
if r[0][1] != None:
raise osv.except_osv(_('Error'), _('Some entries are already reconciled !'))
raise osv.except_osv(_('Error!'), _('Some entries are already reconciled.'))
if (not currency_obj.is_zero(cr, uid, account.company_id.currency_id, writeoff)) or \
(account.currency_id and (not currency_obj.is_zero(cr, uid, account.currency_id, currency))):
if not writeoff_acc_id:
raise osv.except_osv(_('Warning'), _('You have to provide an account for the write off/exchange difference entry !'))
raise osv.except_osv(_('Warning!'), _('You have to provide an account for the write off/exchange difference entry.'))
if writeoff > 0:
debit = writeoff
credit = 0.0
@ -1090,9 +1090,9 @@ class account_move_line(osv.osv):
res = cr.fetchone()
if res:
if res[1] != 'draft':
raise osv.except_osv(_('UserError'),
raise osv.except_osv(_('User Error!'),
_('The account move (%s) for centralisation ' \
'has been confirmed!') % res[2])
'has been confirmed.') % res[2])
return res
def _remove_move_reconcile(self, cr, uid, move_ids=[], context=None):
@ -1139,9 +1139,9 @@ class account_move_line(osv.osv):
if isinstance(ids, (int, long)):
ids = [ids]
if vals.get('account_tax_id', False):
raise osv.except_osv(_('Unable to change tax !'), _('You can not change the tax, you should remove and recreate lines !'))
raise osv.except_osv(_('Unable to change tax!'), _('You cannot change the tax, you should remove and recreate lines.'))
if ('account_id' in vals) and not account_obj.read(cr, uid, vals['account_id'], ['active'])['active']:
raise osv.except_osv(_('Bad account!'), _('You can not use an inactive account!'))
raise osv.except_osv(_('Bad Account!'), _('You cannot use an inactive account.'))
if update_check:
if ('account_id' in vals) or ('journal_id' in vals) or ('period_id' in vals) or ('move_id' in vals) or ('debit' in vals) or ('credit' in vals) or ('date' in vals):
self._update_check(cr, uid, ids, context)
@ -1186,7 +1186,7 @@ class account_move_line(osv.osv):
result = cr.fetchall()
for (state,) in result:
if state == 'done':
raise osv.except_osv(_('Error !'), _('You can not add/modify entries in a closed journal.'))
raise osv.except_osv(_('Error!'), _('You cannot add/modify entries in a closed journal.'))
if not result:
journal = journal_obj.browse(cr, uid, journal_id, context=context)
period = period_obj.browse(cr, uid, period_id, context=context)
@ -1202,9 +1202,9 @@ class account_move_line(osv.osv):
for line in self.browse(cr, uid, ids, context=context):
err_msg = _('Move name (id): %s (%s)') % (line.move_id.name, str(line.move_id.id))
if line.move_id.state <> 'draft' and (not line.journal_id.entry_posted):
raise osv.except_osv(_('Error !'), _('You can not do this modification on a confirmed entry! You can just change some non legal fields or you must unconfirm the journal entry first! \n%s') % err_msg)
raise osv.except_osv(_('Error!'), _('You cannot do this modification on a confirmed entry. You can just change some non legal fields or you must unconfirm the journal entry first.\n%s.') % err_msg)
if line.reconcile_id:
raise osv.except_osv(_('Error !'), _('You can not do this modification on a reconciled entry! You can just change some non legal fields or you must unreconcile first!\n%s') % err_msg)
raise osv.except_osv(_('Error!'), _('You cannot do this modification on a reconciled entry. You can just change some non legal fields or you must unreconcile first.\n%s.') % err_msg)
t = (line.journal_id.id, line.period_id.id)
if t not in done:
self._update_journal_check(cr, uid, line.journal_id.id, line.period_id.id, context)
@ -1224,7 +1224,7 @@ class account_move_line(osv.osv):
if company_id:
vals['company_id'] = company_id[0]
if ('account_id' in vals) and not account_obj.read(cr, uid, vals['account_id'], ['active'])['active']:
raise osv.except_osv(_('Bad account!'), _('You can not use an inactive account!'))
raise osv.except_osv(_('Bad Account!'), _('You cannot use an inactive account.'))
if 'journal_id' in vals:
context['journal_id'] = vals['journal_id']
if 'period_id' in vals:
@ -1237,10 +1237,10 @@ class account_move_line(osv.osv):
if 'period_id' not in context or not isinstance(context.get('period_id', ''), (int, long)):
period_candidate_ids = self.pool.get('account.period').name_search(cr, uid, name=context.get('period_id',''))
if len(period_candidate_ids) != 1:
raise osv.except_osv(_('Encoding error'), _('No period found or more than one period found for the given date.'))
raise osv.except_osv(_('Error!'), _('No period found or more than one period found for the given date.'))
context['period_id'] = period_candidate_ids[0][0]
if not context.get('journal_id', False) and context.get('search_default_journal_id', False):
context['journal_id'] = context.get('search_default_journal_id')
context['journal_id'] = context.get('search_default_journal_id')
self._update_journal_check(cr, uid, context['journal_id'], context['period_id'], context)
move_id = vals.get('move_id', False)
journal = journal_obj.browse(cr, uid, context['journal_id'], context=context)
@ -1263,7 +1263,7 @@ class account_move_line(osv.osv):
move_id = move_obj.create(cr, uid, v, context)
vals['move_id'] = move_id
else:
raise osv.except_osv(_('No piece number !'), _('Can not create an automatic sequence for this piece!\nPut a sequence in the journal definition for automatic numbering or create a sequence manually for this piece.'))
raise osv.except_osv(_('No piece number !'), _('Cannot create an automatic sequence for this piece.\nPut a sequence in the journal definition for automatic numbering or create a sequence manually for this piece.'))
ok = not (journal.type_control_ids or journal.account_control_ids)
if ('account_id' in vals):
account = account_obj.browse(cr, uid, vals['account_id'], context=context)
@ -1288,7 +1288,7 @@ class account_move_line(osv.osv):
vals['amount_currency'] = cur_obj.compute(cr, uid, account.company_id.currency_id.id,
account.currency_id.id, vals.get('debit', 0.0)-vals.get('credit', 0.0), context=ctx)
if not ok:
raise osv.except_osv(_('Bad account !'), _('You can not use this general account in this journal, check the tab \'Entry Controls\' on the related journal !'))
raise osv.except_osv(_('Bad Account!'), _('You cannot use this general account in this journal, check the tab \'Entry Controls\' on the related journal.'))
if vals.get('analytic_account_id',False):
if journal.analytic_journal_id:

View File

@ -209,7 +209,7 @@ msgstr ""
#. module: account
#: code:addons/account/account_invoice.py:1241
#, python-format
msgid "Invoice '%s' is paid partially: %s%s of %s%s (%s%s remaining)"
msgid "Invoice '%s' is paid partially: %s%s of %s%s (%s%s remaining)."
msgstr ""
#. module: account
@ -225,7 +225,7 @@ msgstr ""
#. module: account
#: code:addons/account/account_move_line.py:1200
#, python-format
msgid "You can not add/modify entries in a closed journal."
msgid "You cannot add/modify entries in a closed journal."
msgstr ""
#. module: account
@ -327,7 +327,7 @@ msgstr ""
#. module: account
#: constraint:account.move.line:0
msgid "You can not create journal items on an account of type view."
msgid "You cannot create journal items on an account of type view."
msgstr ""
#. module: account
@ -579,7 +579,7 @@ msgstr ""
#. module: account
#: code:addons/account/account_move_line.py:1251
#, python-format
msgid "No period found or more than one period found for the given date."
msgid "No period is found or more than one period found for the given date."
msgstr ""
#. module: account
@ -618,7 +618,7 @@ msgstr ""
#: code:addons/account/account_move_line.py:750
#: code:addons/account/account_move_line.py:803
#, python-format
msgid "To reconcile the entries company should be the same for all entries"
msgid "To reconcile the entries company should be the same for all entries."
msgstr ""
#. module: account
@ -787,7 +787,7 @@ msgstr ""
#. module: account
#: code:addons/account/wizard/account_invoice_refund.py:110
#, python-format
msgid "Can not %s invoice which is already reconciled, invoice should be unreconciled first. You can only Refund this invoice"
msgid "Cannot %s invoice which is already reconciled, invoice should be unreconciled first. You can only Refund this invoice."
msgstr ""
#. module: account
@ -907,7 +907,7 @@ msgstr ""
#. module: account
#: code:addons/account/account.py:2596
#, python-format
msgid "I can not locate a parent code for the template account!"
msgid "I cannot locate a parent code for the template account!"
msgstr ""
#. module: account
@ -1134,7 +1134,7 @@ msgstr ""
#. module: account
#: code:addons/account/account_move_line.py:1302
#, python-format
msgid "You can not use this general account in this journal, check the tab 'Entry Controls' on the related journal !"
msgid "You cannot use this general account in this journal, check the tab 'Entry Controls' on the related journal !"
msgstr ""
#. module: account
@ -1153,7 +1153,7 @@ msgstr ""
#. module: account
#: code:addons/account/account.py:1129
#, python-format
msgid "You can not modify/delete a journal with entries for this period !"
msgid "You cannot modify/delete a journal with entries for this period !"
msgstr ""
#. module: account
@ -1239,7 +1239,7 @@ msgstr ""
#: code:addons/account/wizard/account_financial_report.py:69
#: code:addons/account/wizard/account_report_common.py:144
#, python-format
msgid "Select a starting and an ending period"
msgid "Select a starting and an ending period."
msgstr ""
#. module: account
@ -1564,7 +1564,7 @@ msgstr ""
#. module: account
#: code:addons/account/account_invoice.py:1429
#, python-format
msgid "You selected an Unit of Measure which is not compatible with the product."
msgid "Selected Unit of Measure is not compatible with the Unit of Measure of the product."
msgstr ""
#. module: account
@ -1721,7 +1721,7 @@ msgstr ""
#. module: account
#: constraint:res.company:0
msgid "Error! You can not create recursive companies."
msgid "Error! You cannot create recursive companies."
msgstr ""
#. module: account
@ -1934,8 +1934,8 @@ msgstr ""
#. module: account
#: code:addons/account/account.py:1461
#, python-format
msgid "There is no default default debit account defined \n"
"on journal \"%s\""
msgid "No default debit account is defined \n"
"on journal \"%s\"."
msgstr ""
#. module: account
@ -1952,14 +1952,14 @@ msgstr ""
#. module: account
#: code:addons/account/account_move_line.py:1277
#, python-format
msgid "Can not create an automatic sequence for this piece!\n"
msgid "Cannot create an automatic sequence for this piece!\n"
"Put a sequence in the journal definition for automatic numbering or create a sequence manually for this piece."
msgstr ""
#. module: account
#: code:addons/account/account.py:787
#, python-format
msgid "You can not modify the company of this journal as its related record exist in journal items"
msgid "You cannot modify the company of this journal as its related record exist in journal items."
msgstr ""
#. module: account
@ -2013,7 +2013,7 @@ msgstr ""
#. module: account
#: code:addons/account/account_invoice.py:370
#, python-format
msgid "There is no Accounting Journal of type Sale/Purchase defined!"
msgid "No Sale/Purchase Journal(s) is defined!"
msgstr ""
#. module: account
@ -2112,8 +2112,8 @@ msgstr ""
#. module: account
#: code:addons/account/account.py:1468
#, python-format
msgid "There is no default default credit account defined \n"
"on journal \"%s\""
msgid "No default credit account is defined \n"
"on journal \"%s\"."
msgstr ""
#. module: account
@ -2163,7 +2163,7 @@ msgstr ""
#. module: account
#: constraint:account.account:0
#: constraint:account.tax.code:0
msgid "Error ! You can not create recursive accounts."
msgid "Error ! You cannot create recursive accounts."
msgstr ""
#. module: account
@ -2530,7 +2530,7 @@ msgstr ""
#. module: account
#: code:addons/account/account.py:1321
#, python-format
msgid "No sequence defined on the journal !"
msgid "Please define sequence on the journal !"
msgstr ""
#. module: account
@ -2538,7 +2538,7 @@ msgstr ""
#: code:addons/account/account_invoice.py:688
#: code:addons/account/account_move_line.py:173
#, python-format
msgid "You have to define an analytic journal on the '%s' journal!"
msgid "You have to define an analytic journal on the '%s' journal."
msgstr ""
#. module: account
@ -2635,7 +2635,7 @@ msgstr ""
#. module: account
#: code:addons/account/account_invoice.py:392
#, python-format
msgid "You can not delete an invoice which is open or paid. We suggest you to refund it instead."
msgid "You cannot delete an invoice which is open or paid. We suggest you to refund it instead."
msgstr ""
#. module: account
@ -2915,7 +2915,7 @@ msgstr ""
#. module: account
#: code:addons/account/wizard/account_open_closed_fiscalyear.py:39
#, python-format
msgid "No End of year journal defined for the fiscal year"
msgid "Please define End of year journal for the fiscal year."
msgstr ""
#. module: account
@ -3181,7 +3181,7 @@ msgstr ""
#. module: account
#: code:addons/account/account_invoice.py:839
#, python-format
msgid "Can not create the invoice !\n"
msgid "Cannot create the invoice !\n"
"The related payment term is probably misconfigured as it gives a computed amount greater than the total invoiced amount. The latest line of your payment term must be of type 'balance' to avoid rounding issues."
msgstr ""
@ -3216,7 +3216,7 @@ msgstr ""
#. module: account
#: code:addons/account/account_move_line.py:584
#, python-format
msgid "You can not create journal items on a \"view\" account %s %s"
msgid "You cannot create journal items on a \"view\" account %s %s."
msgstr ""
#. module: account
@ -3449,7 +3449,7 @@ msgstr ""
#. module: account
#: code:addons/account/account_invoice.py:921
#, python-format
msgid "You cannot create an invoice on a centralised journal. Uncheck the centralised counterpart box in the related journal from the configuration menu."
msgid "You cannot create an invoice on a centralized journal. Uncheck the centralized counterpart box in the related journal from the configuration menu."
msgstr ""
#. module: account
@ -3473,7 +3473,7 @@ msgstr ""
#. module: account
#: code:addons/account/account_move_line.py:591
#, python-format
msgid "You can not create journal items on a closed account %s %s"
msgid "You cannot create journal items on a closed account %s %s."
msgstr ""
#. module: account
@ -3737,7 +3737,7 @@ msgstr ""
#. module: account
#: code:addons/account/account_move_line.py:97
#, python-format
msgid "You haven't supplied enough argument to compute the initial balance, please select a period and journal in the context."
msgid "You havenot supplied enough argument to compute the initial balance, please select a period and journal in the context."
msgstr ""
#. module: account
@ -3775,8 +3775,8 @@ msgstr ""
#. module: account
#: code:addons/account/account_move_line.py:1216
#, python-format
msgid "You can not do this modification on a confirmed entry! You can just change some non legal fields or you must unconfirm the journal entry first! \n"
"%s"
msgid "You cannot do this modification on a confirmed entry! You can just change some non legal fields or you must unconfirm the journal entry first! \n"
"%s."
msgstr ""
#. module: account
@ -3830,7 +3830,7 @@ msgstr ""
#. module: account
#: code:addons/account/account_analytic_line.py:93
#, python-format
msgid "There is no expense account defined for this product: \"%s\" (id:%d)"
msgid "No expense account is defined for this product: \"%s\" (id:%d)."
msgstr ""
#. module: account
@ -3905,7 +3905,7 @@ msgstr ""
#. module: account
#: constraint:account.analytic.line:0
msgid "You can not create analytic line on view account."
msgid "You cannot create analytic line on view account."
msgstr ""
#. module: account
@ -3980,7 +3980,7 @@ msgstr ""
#. module: account
#: code:addons/account/report/common_report_header.py:92
#, python-format
msgid "Not implemented"
msgid "Not implemented!"
msgstr ""
#. module: account
@ -4032,7 +4032,7 @@ msgstr ""
#. module: account
#: code:addons/account/account.py:1077
#, python-format
msgid "You can not modify company of this period as some journal items exists."
msgid "You cannot modify company of this period as some journal items exist."
msgstr ""
#. module: account
@ -4065,7 +4065,7 @@ msgstr ""
#. module: account
#: code:addons/account/account_move_line.py:1251
#, python-format
msgid "Encoding error"
msgid "Encoding error!"
msgstr ""
#. module: account
@ -4178,7 +4178,7 @@ msgstr ""
#. module: account
#: code:addons/account/account.py:1567
#, python-format
msgid "Couldn't create move with currency different from the secondary currency of the account \"%s - %s\". Clear the secondary currency field of the account definition if you want to accept all currencies."
msgid "Cannot create move with currency different from the secondary currency of the account \"%s - %s\". Clear the secondary currency field of the account definition if you want to accept all currencies."
msgstr ""
#. module: account
@ -4332,7 +4332,7 @@ msgstr ""
#. module: account
#: constraint:account.account.template:0
msgid "Configuration Error!\n"
"You can not define children to an account with internal type different of \"View\"! "
"You cannot define children to an account with internal type different of \"View\"! "
msgstr ""
#. module: account
@ -4404,7 +4404,7 @@ msgstr ""
#: code:addons/account/wizard/account_report_common.py:144
#: code:addons/account/wizard/account_report_common.py:150
#, python-format
msgid "Error"
msgid "Error!"
msgstr ""
#. module: account
@ -4501,7 +4501,7 @@ msgstr ""
#. module: account
#: code:addons/account/account.py:1351
#, python-format
msgid "You can not modify a posted entry of this journal !\n"
msgid "You cannot modify a posted entry of this journal !\n"
"You should set the journal to allow cancelling entries if you want to do that."
msgstr ""
@ -4727,7 +4727,7 @@ msgstr ""
#. module: account
#: code:addons/account/wizard/account_invoice_refund.py:146
#, python-format
msgid "No Period found on Invoice!"
msgid "No Period is found on Invoice!"
msgstr ""
#. module: account
@ -4910,7 +4910,7 @@ msgstr ""
#. module: account
#: constraint:account.move:0
msgid "You can not create more than one move per period on centralized journal"
msgid "You cannot create more than one move per period on centralized journal."
msgstr ""
#. module: account
@ -5024,7 +5024,7 @@ msgstr ""
#. module: account
#: code:addons/account/wizard/account_validate_account_move.py:39
#, python-format
msgid "Specified Journal does not have any account move entries in draft state for this period"
msgid "Specified Journal does not have any account move entries in draft state for this period."
msgstr ""
#. module: account
@ -5118,7 +5118,7 @@ msgstr ""
#. module: account
#: code:addons/account/account.py:963
#, python-format
msgid "No fiscal year defined for this date !\n"
msgid "No fiscal year is defined for this date !\n"
"Please create one from the configuration of the accounting menu."
msgstr ""
@ -5371,7 +5371,7 @@ msgstr ""
#: code:addons/account/account_move_line.py:1155
#: code:addons/account/account_move_line.py:1238
#, python-format
msgid "You can not use an inactive account!"
msgid "You cannot use an inactive account!"
msgstr ""
#. module: account
@ -5475,7 +5475,7 @@ msgstr ""
#. module: account
#: code:addons/account/wizard/account_report_common.py:150
#, python-format
msgid "not implemented"
msgid "Not implemented!"
msgstr ""
#. module: account
@ -5514,7 +5514,7 @@ msgstr ""
#. module: account
#: constraint:account.account:0
msgid "Configuration Error! \n"
"You can not define children to an account with internal type different of \"View\"! "
"You cannot define children to an account with internal type different of \"View\"! "
msgstr ""
#. module: account
@ -5640,7 +5640,7 @@ msgstr ""
#. module: account
#: code:addons/account/account.py:629
#, python-format
msgid "You can not remove/desactivate an account which is set on a customer or supplier."
msgid "You cannot remove/deactivate an account which is set on a customer or supplier."
msgstr ""
#. module: account
@ -5929,7 +5929,7 @@ msgstr ""
#. module: account
#: constraint:account.payment.term.line:0
msgid "Percentages for Payment Term Line must be between 0 and 1, Example: 0.02 for 2% "
msgid "Percentages for Payment Term Line must be between 0 and 1, Example: 0.02 for 2% ."
msgstr ""
#. module: account
@ -5956,7 +5956,7 @@ msgstr ""
#. module: account
#: code:addons/account/account.py:622
#, python-format
msgid "You can not desactivate an account that contains some journal items."
msgid "You cannot deactivate an account that contains some journal items."
msgstr ""
#. module: account
@ -5988,7 +5988,7 @@ msgstr ""
#. module: account
#: code:addons/account/wizard/account_state_open.py:37
#, python-format
msgid "Invoice is already reconciled"
msgid "Invoice is already reconciled."
msgstr ""
#. module: account
@ -6012,7 +6012,7 @@ msgstr ""
#. module: account
#: code:addons/account/account.py:624
#, python-format
msgid "You can not remove an account containing journal items."
msgid "You cannot remove an account containing journal items."
msgstr ""
#. module: account
@ -6035,7 +6035,7 @@ msgstr ""
#. module: account
#: code:addons/account/account.py:1563
#, python-format
msgid "Couldn't create move between different companies"
msgid "Cannot create move between different companies"
msgstr ""
#. module: account
@ -6183,7 +6183,7 @@ msgstr ""
#: code:addons/account/account_move_line.py:584
#: code:addons/account/account_move_line.py:591
#, python-format
msgid "Error :"
msgid "Error !"
msgstr ""
#. module: account
@ -6230,8 +6230,8 @@ msgstr ""
#. module: account
#: code:addons/account/account_move_line.py:1218
#, python-format
msgid "You can not do this modification on a reconciled entry! You can just change some non legal fields or you must unreconcile first!\n"
"%s"
msgid "You cannot do this modification on a reconciled entry! You can just change some non legal fields or you must unreconcile first!\n"
"%s."
msgstr ""
#. module: account
@ -6454,7 +6454,7 @@ msgstr ""
#. module: account
#: code:addons/account/account.py:1432
#, python-format
msgid "You can not delete a posted journal entry \"%s\"!"
msgid "You cannot delete a posted journal entry \"%s\"!"
msgstr ""
#. module: account
@ -6548,7 +6548,7 @@ msgstr ""
#: code:addons/account/account_invoice.py:528
#: code:addons/account/account_invoice.py:543
#, python-format
msgid "Can not find a chart of account, you should create one from the configuration of the accounting menu."
msgid "Cannot find a chart of account, you should create one from Settings\Configuration\Accounting menu."
msgstr ""
#. module: account
@ -6720,7 +6720,7 @@ msgstr ""
#: code:addons/account/wizard/account_validate_account_move.py:39
#: code:addons/account/wizard/account_validate_account_move.py:61
#, python-format
msgid "Warning"
msgid "Warning !"
msgstr ""
#. module: account
@ -6797,7 +6797,7 @@ msgstr ""
#. module: account
#: code:addons/account/wizard/account_fiscalyear_close.py:73
#, python-format
msgid "The periods to generate opening entries were not found"
msgid "The periods to generate opening entries were not found."
msgstr ""
#. module: account
@ -6843,7 +6843,7 @@ msgstr ""
#. module: account
#: code:addons/account/wizard/account_fiscalyear_close.py:84
#, python-format
msgid "The journal must have default credit and debit account"
msgid "The journal must have default credit and debit account."
msgstr ""
#. module: account
@ -7056,7 +7056,7 @@ msgstr ""
#: code:addons/account/wizard/account_report_aged_partner_balance.py:56
#: code:addons/account/wizard/account_report_aged_partner_balance.py:58
#, python-format
msgid "UserError"
msgid "UserError!"
msgstr ""
#. module: account
@ -7246,13 +7246,13 @@ msgstr ""
#. module: account
#: code:addons/account/account.py:3446
#, python-format
msgid "The bank account defined on the selected chart of accounts hasn't a code."
msgid "The bank account defined on the selected chart of accounts hasnot a code."
msgstr ""
#. module: account
#: code:addons/account/wizard/account_invoice_refund.py:108
#, python-format
msgid "Can not %s draft/proforma/cancel invoice."
msgid "Cannot %s draft/proforma/cancel invoice."
msgstr ""
#. module: account
@ -7333,7 +7333,7 @@ msgstr ""
#. module: account
#: constraint:account.tax.code.template:0
msgid "Error ! You can not create recursive Tax Codes."
msgid "Error ! You cannot create recursive Tax Codes."
msgstr ""
#. module: account
@ -7418,7 +7418,7 @@ msgstr ""
#. module: account
#: code:addons/account/account_invoice.py:428
#, python-format
msgid "Can not find a chart of accounts for this company, you should create one."
msgid "Cannot find a chart of accounts for this company, you should create one."
msgstr ""
#. module: account
@ -7520,7 +7520,7 @@ msgstr ""
#. module: account
#: code:addons/account/account_invoice.py:1030
#, python-format
msgid "You can not cancel an invoice which is partially paid! You need to unreconcile related payment entries first!"
msgid "You cannot cancel an invoice which is partially paid! You need to unreconcile related payment entries first!"
msgstr ""
#. module: account
@ -7634,7 +7634,7 @@ msgstr ""
#. module: account
#: code:addons/account/account.py:1052
#, python-format
msgid "No period defined for this date: %s !\n"
msgid "No period is defined for this date: %s !\n"
"Please create one."
msgstr ""
@ -7956,7 +7956,7 @@ msgstr ""
#. module: account
#: code:addons/account/account.py:1307
#, python-format
msgid "You can not validate a non-balanced entry !\n"
msgid "You cannot validate a non-balanced entry !\n"
"Make sure you have configured payment terms properly !\n"
"The latest payment term line should be of the type \"Balance\" !"
msgstr ""
@ -8427,7 +8427,7 @@ msgstr ""
#. module: account
#: code:addons/account/wizard/account_automatic_reconcile.py:152
#, python-format
msgid "You must select accounts to reconcile"
msgid "You must select accounts to reconcile."
msgstr ""
#. module: account
@ -8465,7 +8465,7 @@ msgstr ""
#. module: account
#: code:addons/account/account_move_line.py:1153
#, python-format
msgid "You can not change the tax, you should remove and recreate lines !"
msgid "You cannot change the tax, you should remove and recreate lines !"
msgstr ""
#. module: account
@ -8519,7 +8519,7 @@ msgstr ""
#. module: account
#: code:addons/account/wizard/account_fiscalyear_close.py:87
#, python-format
msgid "The journal must have centralised counterpart without the Skipping draft state option checked!"
msgid "The journal must have centralized counterpart without the Skipping draft state option checked!"
msgstr ""
#. module: account
@ -8561,7 +8561,7 @@ msgstr ""
#. module: account
#: code:addons/account/account.py:412
#, python-format
msgid "No opening/closing period defined, please create one to set the initial balance!"
msgid "No opening/closing period is defined, please create one to set the initial balance!"
msgstr ""
#. module: account
@ -8726,7 +8726,7 @@ msgstr ""
#. module: account
#: code:addons/account/account_invoice.py:372
#, python-format
msgid "Unknown Error"
msgid "Unknown Error!"
msgstr ""
#. module: account
@ -8790,7 +8790,7 @@ msgstr ""
#. module: account
#: constraint:account.account:0
msgid "Configuration Error! \n"
"You can not select an account type with a deferral method different of \"Unreconciled\" for accounts with internal type \"Payable/Receivable\"! "
"You cannot select an account type with a deferral method different of \"Unreconciled\" for accounts with internal type \"Payable/Receivable\"! "
msgstr ""
#. module: account
@ -8894,7 +8894,7 @@ msgstr ""
#. module: account
#: help:account.invoice.refund,filter_refund:0
msgid "Refund invoice base on this type. You can not Modify and Cancel if the invoice is already reconciled"
msgid "Refund invoice base on this type. You cannot Modify and Cancel if the invoice is already reconciled"
msgstr ""
#. module: account
@ -9055,12 +9055,12 @@ msgstr ""
#. module: account
#: code:addons/account/account_analytic_line.py:102
#, python-format
msgid "There is no income account defined for this product: \"%s\" (id:%d)"
msgid "No income account is defined for this product: \"%s\" (id:%d)."
msgstr ""
#. module: account
#: constraint:account.move.line:0
msgid "You can not create journal items on closed account."
msgid "You cannot create journal items on closed account."
msgstr ""
#. module: account
@ -9305,7 +9305,7 @@ msgstr ""
#. module: account
#: constraint:account.account.template:0
msgid "Error ! You can not create recursive account templates."
msgid "Error ! You cannot create recursive account templates."
msgstr ""
#. module: account
@ -9327,7 +9327,7 @@ msgstr ""
#. module: account
#: code:addons/account/account_move_line.py:832
#, python-format
msgid "Entry is already reconciled"
msgid "Entry is already reconciled!"
msgstr ""
#. module: account
@ -9352,7 +9352,7 @@ msgstr ""
#. module: account
#: help:account.account,type:0
msgid "The 'Internal Type' is used for features available on different types of accounts: view can not have journal items, consolidation are accounts that can have children accounts for multi-company consolidations, payable/receivable are for partners accounts (for debit/credit computations), closed for depreciated accounts."
msgid "The 'Internal Type' is used for features available on different types of accounts: view cannot have journal items, consolidation are accounts that can have children accounts for multi-company consolidations, payable/receivable are for partners accounts (for debit/credit computations), closed for depreciated accounts."
msgstr ""
#. module: account
@ -9774,7 +9774,7 @@ msgstr ""
#. module: account
#: code:addons/account/wizard/account_report_aged_partner_balance.py:56
#, python-format
msgid "You must enter a period length that cannot be 0 or below !"
msgid "You must enter a period length greater than 0 !"
msgstr ""
#. module: account

View File

@ -91,7 +91,7 @@ class account_installer(osv.osv_memory):
def check_unconfigured_cmp(self, cr, uid, context=None):
""" check if there are still unconfigured companies """
if not self.get_unconfigured_cmp(cr, uid, context=context):
raise osv.except_osv(_('No unconfigured company !'), _("There are currently no company without chart of account. The wizard will therefore not be executed."))
raise osv.except_osv(_('No unconfigured company !'), _("There is currently no company without chart of account. The wizard will therefore not be executed."))
def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False):
if context is None:context = {}

View File

@ -89,7 +89,7 @@ class common_report_header(object):
return ''
def _get_sortby(self, data):
raise (_('Error'), _('Not implemented'))
raise (_('Error!'), _('Not implemented.'))
def _get_filter(self, data):
if data.get('form', False) and data['form'].get('filter', False):

View File

@ -70,4 +70,4 @@
try:
self.button_cancel(cr, uid, [ref("account_bank_statement_0")])
except Exception, e:
assert e[0]=='UserError', 'Another exception has been raised!'
assert e[0]=='User Error!', 'Another exception has been raised!'

View File

@ -145,7 +145,7 @@ class account_automatic_reconcile(osv.osv_memory):
allow_write_off = form.allow_write_off
reconciled = unreconciled = 0
if not form.account_ids:
raise osv.except_osv(_('UserError'), _('You must select accounts to reconcile'))
raise osv.except_osv(_('User Error!'), _('You must select accounts to reconcile.'))
for account_id in form.account_ids:
params = (account_id.id,)
if not allow_write_off:

View File

@ -35,7 +35,7 @@ class account_change_currency(osv.osv_memory):
context = {}
if context.get('active_id',False):
if obj_inv.browse(cr, uid, context['active_id']).state != 'draft':
raise osv.except_osv(_('Error'), _('You can only change currency for Draft Invoice !'))
raise osv.except_osv(_('Error!'), _('You can only change currency for Draft Invoice.'))
pass
def change_currency(self, cr, uid, ids, context=None):
@ -56,18 +56,18 @@ class account_change_currency(osv.osv_memory):
if invoice.company_id.currency_id.id == invoice.currency_id.id:
new_price = line.price_unit * rate
if new_price <= 0:
raise osv.except_osv(_('Error'), _('New currency is not configured properly !'))
raise osv.except_osv(_('Error!'), _('New currency is not configured properly.'))
if invoice.company_id.currency_id.id != invoice.currency_id.id and invoice.company_id.currency_id.id == new_currency:
old_rate = invoice.currency_id.rate
if old_rate <= 0:
raise osv.except_osv(_('Error'), _('Current currency is not configured properly !'))
raise osv.except_osv(_('Error!'), _('Current currency is not configured properly.'))
new_price = line.price_unit / old_rate
if invoice.company_id.currency_id.id != invoice.currency_id.id and invoice.company_id.currency_id.id != new_currency:
old_rate = invoice.currency_id.rate
if old_rate <= 0:
raise osv.except_osv(_('Error'), _('Current currency is not configured properly !'))
raise osv.except_osv(_('Error!'), _('Current currency is not configured properly.'))
new_price = (line.price_unit / old_rate ) * rate
obj_inv_line.write(cr, uid, [line.id], {'price_unit': new_price})
obj_inv.write(cr, uid, [invoice.id], {'currency_id': new_currency}, context=context)

View File

@ -67,7 +67,7 @@ class accounting_report(osv.osv_memory):
result['date_to'] = data['form']['date_to_cmp']
elif data['form']['filter_cmp'] == 'filter_period':
if not data['form']['period_from_cmp'] or not data['form']['period_to_cmp']:
raise osv.except_osv(_('Error'),_('Select a starting and an ending period'))
raise osv.except_osv(_('Error!'),_('Select a starting and an ending period'))
result['period_from'] = data['form']['period_from_cmp']
result['period_to'] = data['form']['period_to_cmp']
return result

View File

@ -51,15 +51,15 @@ class account_fiscalyear_close(osv.osv_memory):
"""
def _reconcile_fy_closing(cr, uid, ids, context=None):
"""
This private function manually do the reconciliation on the account_move_line given as `ids´, and directly
This private function manually do the reconciliation on the account_move_line given as `ids´, and directly
through psql. It's necessary to do it this way because the usual `reconcile()´ function on account.move.line
object is really resource greedy (not supposed to work on reconciliation between thousands of records) and
object is really resource greedy (not supposed to work on reconciliation between thousands of records) and
it does a lot of different computation that are useless in this particular case.
"""
#check that the reconcilation concern journal entries from only one company
cr.execute('select distinct(company_id) from account_move_line where id in %s',(tuple(ids),))
if len(cr.fetchall()) > 1:
raise osv.except_osv(_('Warning !'), _('The entries to reconcile should belong to the same company'))
raise osv.except_osv(_('Warning!'), _('The entries to reconcile should belong to the same company.'))
r_id = self.pool.get('account.move.reconcile').create(cr, uid, {'type': 'auto'})
cr.execute('update account_move_line set reconcile_id = %s where id in %s',(r_id, tuple(ids),))
return r_id
@ -85,7 +85,7 @@ class account_fiscalyear_close(osv.osv_memory):
fy2_period_set = ','.join(map(lambda id: str(id[0]), cr.fetchall()))
if not fy_period_set or not fy2_period_set:
raise osv.except_osv(_('UserError'), _('The periods to generate opening entries were not found'))
raise osv.except_osv(_('User Error!'), _('The periods to generate opening entries cannot be found.'))
period = obj_acc_period.browse(cr, uid, data[0].period_id.id, context=context)
new_fyear = obj_acc_fiscalyear.browse(cr, uid, data[0].fy2_id.id, context=context)
@ -96,11 +96,11 @@ class account_fiscalyear_close(osv.osv_memory):
company_id = new_journal.company_id.id
if not new_journal.default_credit_account_id or not new_journal.default_debit_account_id:
raise osv.except_osv(_('UserError'),
_('The journal must have default credit and debit account'))
raise osv.except_osv(_('User Error!'),
_('The journal must have default credit and debit account.'))
if (not new_journal.centralisation) or new_journal.entry_posted:
raise osv.except_osv(_('UserError'),
_('The journal must have centralised counterpart without the Skipping draft state option checked!'))
raise osv.except_osv(_('User Error!'),
_('The journal must have centralized counterpart without the Skipping draft state option checked.'))
#delete existing move and move lines if any
move_ids = obj_acc_move.search(cr, uid, [

View File

@ -106,9 +106,9 @@ class account_invoice_refund(osv.osv_memory):
journal_id = form.journal_id.id
for inv in inv_obj.browse(cr, uid, context.get('active_ids'), context=context):
if inv.state in ['draft', 'proforma2', 'cancel']:
raise osv.except_osv(_('Error !'), _('Can not %s draft/proforma/cancel invoice.') % (mode))
raise osv.except_osv(_('Error!'), _('Cannot %s draft/proforma/cancel invoice.') % (mode))
if inv.reconciled and mode in ('cancel', 'modify'):
raise osv.except_osv(_('Error !'), _('Can not %s invoice which is already reconciled, invoice should be unreconciled first. You can only Refund this invoice') % (mode))
raise osv.except_osv(_('Error!'), _('Cannot %s invoice which is already reconciled, invoice should be unreconciled first. You can only refund this invoice.') % (mode))
if form.period.id:
period = form.period.id
else:
@ -143,8 +143,8 @@ class account_invoice_refund(osv.osv_memory):
description = inv.name
if not period:
raise osv.except_osv(_('Data Insufficient !'), \
_('No Period found on Invoice!'))
raise osv.except_osv(_('Insufficient Data!'), \
_('No period found on the invoice.'))
refund_id = inv_obj.refund(cr, uid, [inv.id], date, period, description, journal_id)
refund = inv_obj.browse(cr, uid, refund_id[0], context=context)

View File

@ -41,7 +41,7 @@ class account_invoice_confirm(osv.osv_memory):
for record in data_inv:
if record['state'] not in ('draft','proforma','proforma2'):
raise osv.except_osv(_('Warning'), _("Selected Invoice(s) cannot be confirmed as they are not in 'Draft' or 'Pro-Forma' state!"))
raise osv.except_osv(_('Warning!'), _("Selected invoice(s) cannot be confirmed as they are not in 'Draft' or 'Pro-Forma' state."))
wf_service.trg_validate(uid, 'account.invoice', record['id'], 'invoice_open', cr)
return {'type': 'ir.actions.act_window_close'}
@ -65,7 +65,7 @@ class account_invoice_cancel(osv.osv_memory):
for record in data_inv:
if record['state'] in ('cancel','paid'):
raise osv.except_osv(_('Warning'), _("Selected Invoice(s) cannot be cancelled as they are already in 'Cancelled' or 'Done' state!"))
raise osv.except_osv(_('Warning!'), _("Selected invoice(s) cannot be cancelled as they are already in 'Cancelled' or 'Done' state."))
wf_service.trg_validate(uid, 'account.invoice', record['id'], 'invoice_cancel', cr)
return {'type': 'ir.actions.act_window_close'}

View File

@ -46,7 +46,7 @@ class account_move_bank_reconcile(osv.osv_memory):
from account_journal where id=%s', (data['journal_id'],))
account_id = cr.fetchone()[0]
if not account_id:
raise osv.except_osv(_('Error'), _('You have to define \
raise osv.except_osv(_('Error!'), _('You have to define \
the bank account\nin the journal definition for reconciliation.'))
return {
'domain': "[('journal_id','=',%d), ('account_id','=',%d), ('state','<>','draft')]" % (data['journal_id'], account_id),

View File

@ -60,7 +60,7 @@ class account_move_journal(osv.osv_memory):
if context.get('journal_type', False):
jids = journal_pool.search(cr, uid, [('type','=', context.get('journal_type'))])
if not jids:
raise osv.except_osv(_('Configuration Error !'), _('Can\'t find any account journal of %s type for this company.\n\nYou can create one in the menu: \nConfiguration/Financial Accounting/Accounts/Journals.') % context.get('journal_type'))
raise osv.except_osv(_('Configuration Error!'), _('Cannot find any account journal of %s type for this company.\n\nYou can create one in the menu: \nConfiguration/Journals/Journals.') % context.get('journal_type'))
journal_id = jids[0]
return journal_id
@ -83,25 +83,25 @@ class account_move_journal(osv.osv_memory):
if context:
if not view_id:
return res
period_pool = self.pool.get('account.period')
journal_pool = self.pool.get('account.journal')
journal_id = self._get_journal(cr, uid, context)
period_id = self._get_period(cr, uid, context)
journal = False
if journal_id:
journal = journal_pool.read(cr, uid, [journal_id], ['name'])[0]['name']
journal_string = _("Journal: %s") % tools.ustr(journal)
else:
journal_string = _("Journal: All")
period = False
if period_id:
period = period_pool.browse(cr, uid, [period_id], ['name'])[0]['name']
period_string = _("Period: %s") % tools.ustr(period)
open_string = _("Open")
view = """<?xml version="1.0" encoding="utf-8"?>
<form string="Standard entries" version="7.0">
@ -116,7 +116,7 @@ class account_move_journal(osv.osv_memory):
<button string="Cancel" class="oe_link" special="cancel"/>
</footer>
</form>""" % (_('Journal'), journal_string, _('Period'), period_string, open_string)
view = etree.fromstring(view.encode('utf8'))
xarch, xfields = self._view_look_dom_arch(cr, uid, view, view_id, context=context)
view = xarch
@ -158,7 +158,7 @@ class account_move_journal(osv.osv_memory):
state = period.state
if state == 'done':
raise osv.except_osv(_('UserError'), _('This period is already closed !'))
raise osv.except_osv(_('User Error!'), _('This period is already closed.'))
company = period.company_id.id
res = {

View File

@ -36,7 +36,7 @@ class account_open_closed_fiscalyear(osv.osv_memory):
data = self.browse(cr, uid, ids, context=context)[0]
period_journal = data.fyear_id.end_journal_period_id or False
if not period_journal:
raise osv.except_osv(_('Error !'), _('No End of year journal defined for the fiscal year'))
raise osv.except_osv(_('Error!'), _('You have to set the end of the fiscal year for this journal.'))
ids_move = move_obj.search(cr, uid, [('journal_id','=',period_journal.journal_id.id),('period_id','=',period_journal.period_id.id)])
if ids_move:

View File

@ -48,7 +48,7 @@ class account_period_close(osv.osv_memory):
for id in context['active_ids']:
account_move_ids = account_move_obj.search(cr, uid, [('period_id', '=', id), ('state', '=', "draft")], context=context)
if account_move_ids:
raise osv.except_osv(_('Invalid action !'), _('In order to close a period, you must first post related journal entries.'))
raise osv.except_osv(_('Invalid Action!'), _('In order to close a period, you must first post related journal entries.'))
cr.execute('update account_journal_period set state=%s where period_id=%s', (mode, id))
cr.execute('update account_period set state=%s where id=%s', (mode, id))

View File

@ -53,9 +53,9 @@ class account_aged_trial_balance(osv.osv_memory):
period_length = data['form']['period_length']
if period_length<=0:
raise osv.except_osv(_('UserError'), _('You must enter a period length that cannot be 0 or below !'))
raise osv.except_osv(_('User Error!'), _('You must set a period length greater than 0.'))
if not data['form']['date_from']:
raise osv.except_osv(_('UserError'), _('Enter a Start date !'))
raise osv.except_osv(_('User Error!'), _('You must set a start date.'))
start = datetime.strptime(data['form']['date_from'], "%Y-%m-%d")

View File

@ -144,13 +144,13 @@ class account_common_report(osv.osv_memory):
result['date_to'] = data['form']['date_to']
elif data['form']['filter'] == 'filter_period':
if not data['form']['period_from'] or not data['form']['period_to']:
raise osv.except_osv(_('Error'),_('Select a starting and an ending period'))
raise osv.except_osv(_('Error!'),_('Select a starting and an ending period.'))
result['period_from'] = data['form']['period_from']
result['period_to'] = data['form']['period_to']
return result
def _print_report(self, cr, uid, ids, data, context=None):
raise (_('Error'), _('not implemented'))
raise (_('Error!'), _('Not implemented.'))
def check_report(self, cr, uid, ids, context=None):
if context is None:

View File

@ -34,7 +34,7 @@ class account_state_open(osv.osv_memory):
if 'active_ids' in context:
data_inv = obj_invoice.browse(cr, uid, context['active_ids'][0], context=context)
if data_inv.reconciled:
raise osv.except_osv(_('Warning'), _('Invoice is already reconciled'))
raise osv.except_osv(_('Warning!'), _('Invoice is already reconciled.'))
wf_service = netsvc.LocalService("workflow")
wf_service.trg_validate(uid, 'account.invoice', context['active_ids'][0], 'open_test', cr)
return {'type': 'ir.actions.act_window_close'}

View File

@ -41,7 +41,7 @@ class account_use_model(osv.osv_memory):
for line in model.lines_id:
if line.date_maturity == 'partner':
if not line.partner_id:
raise osv.except_osv(_('Error !'), _("Maturity date of entry line generated by model line '%s' is based on partner payment term!"\
raise osv.except_osv(_('Error!'), _("Maturity date of entry line generated by model line '%s' is based on partner payment term!"\
"\nPlease define partner on it!")%line.name)
pass

View File

@ -36,7 +36,7 @@ class validate_account_move(osv.osv_memory):
data = self.browse(cr, uid, ids, context=context)[0]
ids_move = obj_move.search(cr, uid, [('state','=','draft'),('journal_id','=',data.journal_id.id),('period_id','=',data.period_id.id)])
if not ids_move:
raise osv.except_osv(_('Warning'), _('Specified Journal does not have any account move entries in draft state for this period'))
raise osv.except_osv(_('Warning!'), _('Specified journal does not have any account move entries in draft state for this period.'))
obj_move.button_validate(cr, uid, ids_move, context=context)
return {'type': 'ir.actions.act_window_close'}
@ -58,7 +58,7 @@ class validate_account_move_lines(osv.osv_memory):
move_ids.append(line.move_id.id)
move_ids = list(set(move_ids))
if not move_ids:
raise osv.except_osv(_('Warning'), _('Selected Entry Lines does not have any account move enties in draft state'))
raise osv.except_osv(_('Warning!'), _('Selected Entry Lines does not have any account move enties in draft state.'))
obj_move.button_validate(cr, uid, move_ids, context)
return {'type': 'ir.actions.act_window_close'}
validate_account_move_lines()

View File

@ -29,11 +29,11 @@ class CashBox(osv.osv_memory):
for box in self.browse(cr, uid, ids, context=context):
for record in records:
if not record.journal_id:
raise osv.except_osv(_('Error !'),
raise osv.except_osv(_('Error!'),
_("Please check that the field 'Journal' is set on the Bank Statement"))
if not record.journal_id.internal_account_id:
raise osv.except_osv(_('Error !'),
raise osv.except_osv(_('Error!'),
_("Please check that the field 'Internal Transfers Account' is set on the payment method '%s'.") % (record.journal_id.name,))
self._create_bank_statement_line(cr, uid, box, record, context=context)

View File

@ -218,7 +218,7 @@ class account_analytic_plan_instance(osv.osv):
pids = ana_plan_instance_obj.search(cr, uid, [('name','=',vals['name']), ('code','=',vals['code']), ('plan_id','<>',False)], context=context)
if pids:
raise osv.except_osv(_('Error'), _('A model having this name and code already exists !'))
raise osv.except_osv(_('Error!'), _('A model with this name and code already exists.'))
res = acct_anal_plan_line_obj.search(cr, uid, [('plan_id','=',journal.plan_id.id)], context=context)
for i in res:
@ -231,7 +231,7 @@ class account_analytic_plan_instance(osv.osv):
if acct_anal_acct.search(cr, uid, [('parent_id', 'child_of', [item.root_analytic_id.id]), ('id', '=', tempo[2]['analytic_account_id'])], context=context):
total_per_plan += tempo[2]['rate']
if total_per_plan < item.min_required or total_per_plan > item.max_required:
raise osv.except_osv(_('Value Error'),_('The Total Should be Between %s and %s') % (str(item.min_required), str(item.max_required)))
raise osv.except_osv(_('Error!'),_('The total should be between %s and %s.') % (str(item.min_required), str(item.max_required)))
return super(account_analytic_plan_instance, self).create(cr, uid, vals, context=context)
@ -338,7 +338,7 @@ class account_move_line(osv.osv):
for line in self.browse(cr, uid, ids, context=context):
if line.analytics_id:
if not line.journal_id.analytic_journal_id:
raise osv.except_osv(_('No Analytic Journal !'),_("You have to define an analytic journal on the '%s' journal!") % (line.journal_id.name,))
raise osv.except_osv(_('No Analytic Journal !'),_("You have to define an analytic journal on the '%s' journal.") % (line.journal_id.name,))
toremove = analytic_line_obj.search(cr, uid, [('move_id','=',line.id)], context=context)
if toremove:
@ -482,7 +482,7 @@ class account_bank_statement(osv.osv):
for st_line in st.line_ids:
if st_line.analytics_id:
if not st.journal_id.analytic_journal_id:
raise osv.except_osv(_('No Analytic Journal !'),_("You have to define an analytic journal on the '%s' journal!") % (st.journal_id.name,))
raise osv.except_osv(_('No Analytic Journal !'),_("You have to define an analytic journal on the '%s' journal.") % (st.journal_id.name,))
if not st_line.amount:
continue
return True

View File

@ -64,7 +64,7 @@ msgstr ""
#. module: account_analytic_plans
#: code:addons/account_analytic_plans/wizard/account_crossovered_analytic.py:61
#, python-format
msgid "User Error"
msgid "User Error!"
msgstr ""
#. module: account_analytic_plans
@ -189,7 +189,7 @@ msgstr ""
#. module: account_analytic_plans
#: code:addons/account_analytic_plans/wizard/analytic_plan_create_model.py:41
#, python-format
msgid "No analytic plan defined !"
msgid "Please define analytic plan."
msgstr ""
#. module: account_analytic_plans
@ -278,7 +278,7 @@ msgstr ""
#: code:addons/account_analytic_plans/account_analytic_plans.py:341
#: code:addons/account_analytic_plans/account_analytic_plans.py:485
#, python-format
msgid "You have to define an analytic journal on the '%s' journal!"
msgid "You have to define an analytic journal on the '%s' journal."
msgstr ""
#. module: account_analytic_plans
@ -341,7 +341,7 @@ msgstr ""
#: code:addons/account_analytic_plans/wizard/analytic_plan_create_model.py:38
#: code:addons/account_analytic_plans/wizard/analytic_plan_create_model.py:41
#, python-format
msgid "Error"
msgid "Error!"
msgstr ""
#. module: account_analytic_plans
@ -351,7 +351,7 @@ msgstr ""
#. module: account_analytic_plans
#: constraint:account.move.line:0
msgid "You can not create journal items on an account of type view."
msgid "You cannot create journal items on an account of type view."
msgstr ""
#. module: account_analytic_plans
@ -362,7 +362,7 @@ msgstr ""
#. module: account_analytic_plans
#: code:addons/account_analytic_plans/wizard/analytic_plan_create_model.py:38
#, python-format
msgid "Please put a name and a code before saving the model !"
msgid "Please put a name and a code before saving the model."
msgstr ""
#. module: account_analytic_plans
@ -394,7 +394,7 @@ msgstr ""
#. module: account_analytic_plans
#: constraint:account.analytic.line:0
msgid "You can not create analytic line on view account."
msgid "You cannot create analytic line on view account."
msgstr ""
#. module: account_analytic_plans
@ -426,7 +426,7 @@ msgstr ""
#. module: account_analytic_plans
#: code:addons/account_analytic_plans/account_analytic_plans.py:234
#, python-format
msgid "The Total Should be Between %s and %s"
msgid "The total should be between %s and %s."
msgstr ""
#. module: account_analytic_plans
@ -502,7 +502,7 @@ msgstr ""
#. module: account_analytic_plans
#: constraint:account.move.line:0
msgid "You can not create journal items on closed account."
msgid "You cannot create journal items on closed account."
msgstr ""
#. module: account_analytic_plans
@ -523,6 +523,6 @@ msgstr ""
#. module: account_analytic_plans
#: code:addons/account_analytic_plans/account_analytic_plans.py:234
#, python-format
msgid "Value Error"
msgid "Value Error!"
msgstr ""

View File

@ -58,7 +58,7 @@ class account_crossovered_analytic(osv.osv_memory):
flag = False
break
if flag:
raise osv.except_osv(_('User Error'),_('There are no Analytic lines related to Account %s' % name))
raise osv.except_osv(_('User Error!'),_('There are no analytic lines related to account %s.' % name))
datas = {
'ids': [],

View File

@ -35,10 +35,10 @@ class analytic_plan_create_model(osv.osv_memory):
if 'active_id' in context and context['active_id']:
plan = plan_obj.browse(cr, uid, context['active_id'], context=context)
if (not plan.name) or (not plan.code):
raise osv.except_osv(_('Error'), _('Please put a name and a code before saving the model !'))
raise osv.except_osv(_('Error!'), _('Please put a name and a code before saving the model.'))
pids = anlytic_plan_obj.search(cr, uid, [], context=context)
if not pids:
raise osv.except_osv(_('Error'), _('No analytic plan defined !'))
raise osv.except_osv(_('Error!'), _('There is no analytic plan defined.'))
plan_obj.write(cr, uid, [context['active_id']], {'plan_id':pids[0]}, context=context)
model_data_ids = mod_obj.search(cr, uid, [('model', '=', 'ir.ui.view'),('name', '=', 'view_analytic_plan_create_model')], context=context)
@ -57,4 +57,4 @@ class analytic_plan_create_model(osv.osv_memory):
analytic_plan_create_model()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:

View File

@ -270,7 +270,7 @@ class account_asset_asset(osv.osv):
return True
_constraints = [
(_check_recursion, 'Error ! You can not create recursive assets.', ['parent_id']),
(_check_recursion, 'Error ! You cannot create recursive assets.', ['parent_id']),
(_check_prorata, 'Prorata temporis can be applied only for time method "number of depreciations".', ['prorata']),
]

View File

@ -323,7 +323,7 @@ msgstr ""
#. module: account_asset
#: constraint:account.asset.asset:0
msgid "Error ! You can not create recursive assets."
msgid "Error ! You cannot create recursive assets."
msgstr ""
#. module: account_asset

View File

@ -34,7 +34,7 @@ class account_bank_statement(osv.osv):
# bypass obsolete statement line resequencing
if vals.get('line_ids', False) or context.get('ebanking_import', False):
res = super(osv.osv, self).write(cr, uid, ids, vals, context=context)
else:
else:
res = super(account_bank_statement, self).write(cr, uid, ids, vals, context=context)
return res
@ -70,7 +70,7 @@ class account_bank_statement_line_global(osv.osv):
'type': fields.selection([
('iso20022', 'ISO 20022'),
('coda', 'CODA'),
('manual', 'Manual'),
('manual', 'Manual'),
], 'Type', required=True),
'amount': fields.float('Amount', digits_compute=dp.get_precision('Account')),
'bank_statement_line_ids': fields.one2many('account.bank.statement.line', 'globalisation_id', 'Bank Statement Lines'),
@ -108,12 +108,12 @@ class account_bank_statement_line(osv.osv):
'date': fields.date('Entry Date', required=True, states={'confirm': [('readonly', True)]}),
'val_date': fields.date('Valuta Date', states={'confirm': [('readonly', True)]}),
'globalisation_id': fields.many2one('account.bank.statement.line.global', 'Globalisation ID',
states={'confirm': [('readonly', True)]},
states={'confirm': [('readonly', True)]},
help="Code to identify transactions belonging to the same globalisation level within a batch payment"),
'globalisation_amount': fields.related('globalisation_id', 'amount', type='float',
relation='account.bank.statement.line.global', string='Glob. Amount', readonly=True),
'state': fields.selection([('draft', 'Draft'), ('confirm', 'Confirmed')],
'Status', required=True, readonly=True),
'Status', required=True, readonly=True),
'counterparty_name': fields.char('Counterparty Name', size=35),
'counterparty_bic': fields.char('Counterparty BIC', size=11),
'counterparty_number': fields.char('Counterparty Number', size=34),
@ -127,8 +127,8 @@ class account_bank_statement_line(osv.osv):
if context is None:
context = {}
if context.get('block_statement_line_delete', False):
raise osv.except_osv(_('Warning'), _('Delete operation not allowed ! \
Please go to the associated bank statement in order to delete and/or modify this bank statement line'))
raise osv.except_osv(_('Warning!'), _('Delete operation not allowed. \
Please go to the associated bank statement in order to delete and/or modify bank statement line.'))
return super(account_bank_statement_line, self).unlink(cr, uid, ids, context=context)
account_bank_statement_line()

View File

@ -112,7 +112,7 @@ msgstr ""
#. module: account_bank_statement_extensions
#: code:addons/account_bank_statement_extensions/account_bank_statement.py:130
#, python-format
msgid "Delete operation not allowed ! Please go to the associated bank statement in order to delete and/or modify this bank statement line"
msgid "Delete operation not allowed ! Please go to the associated bank statement in order to delete and/or modify bank statement line."
msgstr ""
#. module: account_bank_statement_extensions
@ -155,7 +155,7 @@ msgstr ""
#. module: account_bank_statement_extensions
#: code:addons/account_bank_statement_extensions/account_bank_statement.py:130
#, python-format
msgid "Warning"
msgid "Warning!"
msgstr ""
#. module: account_bank_statement_extensions

View File

@ -2,9 +2,9 @@
##############################################################################
#
# OpenERP, Open Source Management Solution
#
#
# Copyright (c) 2011 Noviat nv/sa (www.noviat.be). All rights reserved.
#
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
@ -36,7 +36,7 @@ class coda_bank_account(osv.osv):
_columns = {
'name': fields.char('Name', size=64, required=True),
'bank_id': fields.many2one('res.partner.bank', 'Bank Account', required=True,
'bank_id': fields.many2one('res.partner.bank', 'Bank Account', required=True,
help='Bank Account Number.\nThe CODA import function will find its CODA processing parameters on this number.'),
'description1': fields.char('Primary Account Description', size=35,
help='The Primary or Secondary Account Description should match the corresponding Account Description in the CODA file.'),
@ -44,18 +44,18 @@ class coda_bank_account(osv.osv):
help='The Primary or Secondary Account Description should match the corresponding Account Description in the CODA file.'),
'state': fields.selection([
('normal', 'Normal'),
('info', 'Info')],
('info', 'Info')],
'Type', required=True, select=1,
help='No Bank Statements will be generated for CODA Bank Statements from Bank Accounts of type \'Info\'.'),
'journal': fields.many2one('account.journal', 'Journal',
domain=[('type', '=', 'bank')],
'journal': fields.many2one('account.journal', 'Journal',
domain=[('type', '=', 'bank')],
states={'normal':[('required',True)],'info':[('required',False)]},
help='Bank Journal for the Bank Statement'),
'currency': fields.many2one('res.currency', 'Currency', required=True,
help='The currency of the CODA Bank Statement'),
help='The currency of the CODA Bank Statement'),
'coda_st_naming': fields.char('Bank Statement Naming Policy', size=64,
help="Define the rules to create the name of the Bank Statements generated by the CODA processing." \
"\nE.g. %(code)s%(y)s/%(paper)s"
"\nE.g. %(code)s%(y)s/%(paper)s"
"\n\nVariables:" \
"\nBank Journal Code: %(code)s" \
"\nCurrent Year with Century: %(year)s" \
@ -79,9 +79,9 @@ class coda_bank_account(osv.osv):
'currency': lambda self,cr,uid,c: self.pool.get('res.users').browse(cr, uid, uid, c).company_id.currency_id.id,
'state': 'normal',
'coda_st_naming': '%(code)s/%(y)s/%(coda)s',
'active': True,
'find_bbacom': True,
'find_partner': True,
'active': True,
'find_bbacom': True,
'find_partner': True,
'company_id': lambda self,cr,uid,c: self.pool.get('res.users').browse(cr, uid, uid, c).company_id.id,
}
_sql_constraints = [
@ -107,12 +107,12 @@ class coda_bank_account(osv.osv):
if not default:
default = {}
default = default.copy()
default.update({'journal_id': None})
default.update({'journal_id': None})
default['description1'] = cba['description1'] or ''
default['description2'] = cba['description2'] or ''
default['name'] = (cba['name'] or '') + ' (copy)'
default['state'] = cba['state']
return super(coda_bank_account, self).copy(cr, uid, id, default, context)
default['state'] = cba['state']
return super(coda_bank_account, self).copy(cr, uid, id, default, context)
def onchange_state(self, cr, uid, ids, state):
return state =='info' and {'value': {'journal': None}} or {}
@ -137,10 +137,10 @@ class account_coda(osv.osv):
'date': fields.date.context_today,
'user_id': lambda self,cr,uid,context: uid,
'company_id': lambda s,cr,uid,c: s.pool.get('res.company')._company_default_get(cr, uid, 'account.coda', context=c),
}
}
_sql_constraints = [
('coda_uniq', 'unique (name, coda_creation_date)', 'This CODA has already been imported !')
]
]
def unlink(self, cr, uid, ids, context=None):
if context is None:
@ -149,22 +149,22 @@ class account_coda(osv.osv):
coda_st_obj = self.pool.get('coda.bank.statement')
bank_st_obj = self.pool.get('account.bank.statement')
for coda in self.browse(cr, uid, ids, context=context):
for coda_statement in coda.statement_ids:
for coda_statement in coda.statement_ids:
if not context.get('coda_statement_unlink', False):
if coda_st_obj.exists(cr, uid, coda_statement.id, context=context):
coda_st_obj.unlink(cr, uid, [coda_statement.id], context=context)
coda_st_obj.unlink(cr, uid, [coda_statement.id], context=context)
if not context.get('bank_statement_unlink', False):
if coda_st_obj.exists(cr, uid, coda_statement.id, context=context) and (coda_statement.type == 'normal') and bank_st_obj.exists(cr, uid, coda_statement.statement_id.id, context=context):
bank_st_obj.unlink(cr, uid, [coda_statement.statement_id.id], context=context)
bank_st_obj.unlink(cr, uid, [coda_statement.statement_id.id], context=context)
context.update({'coda_unlink': False})
return super(account_coda, self).unlink(cr, uid, ids, context=context)
account_coda()
class account_coda_trans_type(osv.osv):
class account_coda_trans_type(osv.osv):
_name = 'account.coda.trans.type'
_description = 'CODA transaction type'
_rec_name = 'type'
_rec_name = 'type'
_columns = {
'type': fields.char('Transaction Type', size=1, required=True),
'parent_id': fields.many2one('account.coda.trans.type', 'Parent'),
@ -172,36 +172,36 @@ class account_coda_trans_type(osv.osv):
}
account_coda_trans_type()
class account_coda_trans_code(osv.osv):
class account_coda_trans_code(osv.osv):
_name = 'account.coda.trans.code'
_description = 'CODA transaction code'
_rec_name = 'code'
_rec_name = 'code'
_columns = {
'code': fields.char('Code', size=2, required=True, select=1),
'type': fields.selection([
('code', 'Transaction Code'),
('family', 'Transaction Family')],
'Type', required=True, select=1),
('family', 'Transaction Family')],
'Type', required=True, select=1),
'parent_id': fields.many2one('account.coda.trans.code', 'Family', select=1),
'description': fields.char('Description', size=128, translate=True, select=2),
'comment': fields.text('Comment', translate=True),
}
account_coda_trans_code()
class account_coda_trans_category(osv.osv):
class account_coda_trans_category(osv.osv):
_name = 'account.coda.trans.category'
_description = 'CODA transaction category'
_rec_name = 'category'
_rec_name = 'category'
_columns = {
'category': fields.char('Transaction Category', size=3, required=True),
'description': fields.char('Description', size=256, translate=True),
}
account_coda_trans_category()
class account_coda_comm_type(osv.osv):
class account_coda_comm_type(osv.osv):
_name = 'account.coda.comm.type'
_description = 'CODA structured communication type'
_rec_name = 'code'
_rec_name = 'code'
_columns = {
'code': fields.char('Structured Communication Type', size=3, required=True, select=1),
'description': fields.char('Description', size=128, translate=True),
@ -212,9 +212,9 @@ class account_coda_comm_type(osv.osv):
account_coda_comm_type()
class coda_bank_statement(osv.osv):
_name = 'coda.bank.statement'
_description = 'CODA Bank Statement'
_name = 'coda.bank.statement'
_description = 'CODA Bank Statement'
def _default_journal_id(self, cr, uid, context={}):
if context.get('journal_id', False):
return context['journal_id']
@ -245,16 +245,16 @@ class coda_bank_statement(osv.osv):
'coda_id': fields.many2one('account.coda', 'CODA Data File', ondelete='cascade'),
'type': fields.selection([
('normal', 'Normal'),
('info', 'Info')],
('info', 'Info')],
'Type', required=True, readonly=True,
help='No Bank Statements are associated with CODA Bank Statements of type \'Info\'.'),
'statement_id': fields.many2one('account.bank.statement', 'Associated Bank Statement'),
'statement_id': fields.many2one('account.bank.statement', 'Associated Bank Statement'),
'journal_id': fields.many2one('account.journal', 'Journal', readonly=True, domain=[('type', '=', 'bank')]),
'coda_bank_account_id': fields.many2one('coda.bank.account', 'Bank Account', readonly=True),
'coda_bank_account_id': fields.many2one('coda.bank.account', 'Bank Account', readonly=True),
'period_id': fields.many2one('account.period', 'Period', required=True, readonly=True),
'balance_start': fields.float('Starting Balance', digits_compute=dp.get_precision('Account'), readonly=True),
'balance_end_real': fields.float('Ending Balance', digits_compute=dp.get_precision('Account'), readonly=True),
'balance_end': fields.function(_end_balance, method=True, store=True, string='Balance'),
'balance_end': fields.function(_end_balance, method=True, store=True, string='Balance'),
'line_ids': fields.one2many('coda.bank.statement.line',
'statement_id', 'CODA Bank Statement lines', readonly=True),
'currency': fields.many2one('res.currency', 'Currency', required=True, readonly=True,
@ -262,14 +262,14 @@ class coda_bank_statement(osv.osv):
'company_id': fields.related('journal_id', 'company_id', type='many2one', relation='res.company', string='Company', store=True, readonly=True),
}
_defaults = {
'type': 'normal',
'type': 'normal',
'currency': lambda self,cr,uid,c: self.pool.get('res.users').browse(cr, uid, uid, c).company_id.currency_id.id,
'journal_id': _default_journal_id,
'period_id': _get_period,
}
def search(self, cr, uid, args, offset=0, limit=None, order=None, context=None, count=False):
if context is None:
if context is None:
context = {}
res = super(coda_bank_statement, self).search(cr, uid, args=args, offset=offset, limit=limit, order=order,
context=context, count=count)
@ -283,20 +283,20 @@ class coda_bank_statement(osv.osv):
context.update({'coda_statement_unlink': True})
coda_obj = self.pool.get('account.coda')
bank_st_obj = self.pool.get('account.bank.statement')
# find all CODA bank statements that are associated with the selected CODA bank statements via a common CODA file
new_ids = []
new_ids = []
for coda_statement in self.browse(cr, uid, ids, context=context):
if coda_obj.exists(cr, uid, coda_statement.coda_id.id, context=context):
new_ids += [x.id for x in coda_obj.browse(cr, uid, coda_statement.coda_id.id, context=context).statement_ids]
# unlink CODA banks statements as well as associated bank statements and CODA files
# unlink CODA banks statements as well as associated bank statements and CODA files
for coda_statement in self.browse(cr, uid, new_ids, context=context):
if coda_statement.statement_id.state == 'confirm':
raise osv.except_osv(_('Invalid action !'),
_("Cannot delete CODA Bank Statement '%s' of Journal '%s'." \
"\nThe associated Bank Statement has already been confirmed !" \
"\nPlease undo this action first!") \
if coda_statement.statement_id.state == 'confirm':
raise osv.except_osv(_('Invalid Action!'),
_("Cannot delete CODA Bank Statement '%s' of journal '%s'." \
"\nThe associated Bank Statement has already been confirmed." \
"\nPlease undo this action first.") \
% (coda_statement.name, coda_statement.journal_id.name))
else:
if not context.get('coda_unlink', False):
@ -304,11 +304,11 @@ class coda_bank_statement(osv.osv):
coda_obj.unlink(cr, uid, [coda_statement.coda_id.id], context=context)
if not context.get('bank_statement_unlink', False):
if coda_statement.statement_id and bank_st_obj.exists(cr, uid, coda_statement.statement_id.id, context=context):
bank_st_obj.unlink(cr, uid, [coda_statement.statement_id.id], context=context)
bank_st_obj.unlink(cr, uid, [coda_statement.statement_id.id], context=context)
context.update({'coda_statement_unlink': False})
return super(coda_bank_statement, self).unlink(cr, uid, new_ids, context=context)
coda_bank_statement()
class account_bank_statement(osv.osv):
@ -316,7 +316,7 @@ class account_bank_statement(osv.osv):
_columns = {
'coda_statement_id': fields.many2one('coda.bank.statement', 'Associated CODA Bank Statement'),
}
def unlink(self, cr, uid, ids, context=None):
if context is None:
context = {}
@ -325,15 +325,15 @@ class account_bank_statement(osv.osv):
coda_st_obj = self.pool.get('coda.bank.statement')
# find all statements that are associated with the selected bank statements via a common CODA file
ids_plus = []
ids_plus = []
for statement in self.browse(cr, uid, ids, context=context):
if statement.coda_statement_id:
for x in coda_obj.browse(cr, uid, statement.coda_statement_id.coda_id.id, context=context).statement_ids:
if x.type == 'normal':
ids_plus += [x.statement_id.id]
# unlink banks statements as well as associated CODA bank statements and CODA files
for statement in self.browse(cr, uid, ids_plus, context=context):
for statement in self.browse(cr, uid, ids_plus, context=context):
if not context.get('coda_statement_unlink', False):
if statement.coda_statement_id and coda_st_obj.exists(cr, uid, statement.coda_statement_id.id, context=context):
coda_st_obj.unlink(cr, uid, [statement.coda_statement_id.id], context=context)
@ -347,56 +347,56 @@ class account_bank_statement(osv.osv):
context.update({'bank_statement_unlink': False})
new_ids = list(set(ids + ids_plus))
return super(account_bank_statement, self).unlink(cr, uid, new_ids, context=context)
account_bank_statement()
class coda_bank_statement_line(osv.osv):
_name = 'coda.bank.statement.line'
_order = 'sequence'
_name = 'coda.bank.statement.line'
_order = 'sequence'
_description = 'CODA Bank Statement Line'
_columns = {
'name': fields.char('Communication', size=268, required=True),
'sequence': fields.integer('Sequence'),
'date': fields.date('Entry Date', required=True),
'val_date': fields.date('Valuta Date'),
'val_date': fields.date('Valuta Date'),
'account_id': fields.many2one('account.account','Account'), # remove required=True
'type': fields.selection([
('supplier','Supplier'),
('customer','Customer'),
('general','General'),
('globalisation','Globalisation'),
('information','Information'),
('communication','Free Communication'),
('globalisation','Globalisation'),
('information','Information'),
('communication','Free Communication'),
], 'Type', required=True),
'globalisation_level': fields.integer('Globalisation Level',
'globalisation_level': fields.integer('Globalisation Level',
help="The value which is mentioned (1 to 9), specifies the hierarchy level"
" of the globalisation of which this record is the first."
"\nThe same code will be repeated at the end of the globalisation."),
'globalisation_amount': fields.float('Globalisation Amount', digits_compute=dp.get_precision('Account')),
'globalisation_amount': fields.float('Globalisation Amount', digits_compute=dp.get_precision('Account')),
'globalisation_id': fields.many2one('account.bank.statement.line.global', 'Globalisation ID', readonly=True,
help="Code to identify transactions belonging to the same globalisation level within a batch payment"),
help="Code to identify transactions belonging to the same globalisation level within a batch payment"),
'amount': fields.float('Amount', digits_compute=dp.get_precision('Account')),
'partner_id': fields.many2one('res.partner', 'Partner'),
'counterparty_name': fields.char('Counterparty Name', size=35),
'counterparty_bic': fields.char('Counterparty BIC', size=11),
'counterparty_number': fields.char('Counterparty Number', size=34),
'counterparty_currency': fields.char('Counterparty Currency', size=3),
'counterparty_bic': fields.char('Counterparty BIC', size=11),
'counterparty_number': fields.char('Counterparty Number', size=34),
'counterparty_currency': fields.char('Counterparty Currency', size=3),
'statement_id': fields.many2one('coda.bank.statement', 'CODA Bank Statement',
select=True, required=True, ondelete='cascade'),
'coda_bank_account_id': fields.related('statement_id', 'coda_bank_account_id', type='many2one', relation='coda.bank.account', string='Bank Account', store=True, readonly=True),
'ref': fields.char('Reference', size=32),
'note': fields.text('Notes'),
'company_id': fields.related('statement_id', 'company_id', type='many2one', relation='res.company', string='Company', store=True, readonly=True),
'company_id': fields.related('statement_id', 'company_id', type='many2one', relation='res.company', string='Company', store=True, readonly=True),
}
def unlink(self, cr, uid, ids, context=None):
if context is None:
context = {}
if context.get('block_statement_line_delete', False):
raise osv.except_osv('Warning', _('Delete operation not allowed !'))
raise osv.except_osv('Warning', _('Delete operation not allowed.'))
return super(account_bank_statement_line, self).unlink(cr, uid, ids, context=context)
coda_bank_statement_line()
coda_bank_statement_line()
class account_bank_statement_line_global(osv.osv):
_inherit = 'account.bank.statement.line.global'

View File

@ -2,9 +2,9 @@
##############################################################################
#
# OpenERP, Open Source Management Solution
#
#
# Copyright (c) 2011 Noviat nv/sa (www.noviat.be). All rights reserved.
#
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
@ -41,7 +41,7 @@ class account_coda_import(osv.osv_memory):
_defaults = {
'coda_fname': lambda *a: '',
}
def coda_parsing(self, cr, uid, ids, context=None, batch=False, codafile=None, codafilename=None):
if context is None:
context = {}
@ -52,12 +52,12 @@ class account_coda_import(osv.osv_memory):
data=self.browse(cr,uid,ids)[0]
try:
codafile = data.coda_data
codafilename = data.coda_fname
codafilename = data.coda_fname
except:
raise osv.except_osv(_('Error!'), _('Wizard in incorrect state. Please hit the Cancel button!'))
raise osv.except_osv(_('Error!'), _('Wizard in incorrect state. Please hit the Cancel button.'))
return {}
currency_obj = self.pool.get('res.currency')
currency_obj = self.pool.get('res.currency')
coda_bank_account_obj = self.pool.get('coda.bank.account')
trans_type_obj = self.pool.get('account.coda.trans.type')
trans_code_obj = self.pool.get('account.coda.trans.code')
@ -85,7 +85,7 @@ class account_coda_import(osv.osv_memory):
coda_bank.update({'journal_code': coda_bank['journal'] and journal_obj.browse(cr, uid, coda_bank['journal'][0], context=context).code or ''})
coda_bank.update({'iban': partner_bank_obj.browse(cr, uid, coda_bank['bank_id'][0], context=context).iban})
coda_bank.update({'acc_number': partner_bank_obj.browse(cr, uid, coda_bank['bank_id'][0], context=context).acc_number})
coda_bank.update({'currency_name': currency_obj.browse(cr, uid, coda_bank['currency'][0], context=context).name})
coda_bank.update({'currency_name': currency_obj.browse(cr, uid, coda_bank['currency'][0], context=context).name})
trans_type_table = trans_type_obj.read(cr, uid, trans_type_obj.search(cr, uid, []), context=context)
trans_code_table = trans_code_obj.read(cr, uid, trans_code_obj.search(cr, uid, []), context=context)
trans_category_table = trans_category_obj.read(cr, uid, trans_category_obj.search(cr, uid, []), context=context)
@ -96,9 +96,9 @@ class account_coda_import(osv.osv_memory):
err_log = ''
coda_statements = []
recordlist = unicode(base64.decodestring(codafile), 'windows-1252', 'strict').split('\n')
for line in recordlist:
if not line:
pass
elif line[0] == '0':
@ -109,11 +109,11 @@ class account_coda_import(osv.osv_memory):
st_line_seq = 0
glob_lvl_stack = [0]
# header data
coda_statement['currency'] = 'EUR' # default currency
coda_statement['currency'] = 'EUR' # default currency
coda_statement['version'] = line[127]
coda_version = line[127]
if coda_version not in ['1','2']:
err_string = _('\nCODA V%s statements are not supported, please contact your bank!') % coda_version
err_string = _('\nCODA V%s statements are not supported, please contact your bank.') % coda_version
err_code = 'R0001'
if batch:
return (err_code, err_string)
@ -122,7 +122,7 @@ class account_coda_import(osv.osv_memory):
coda_statement['date'] = str2date(line[5:11])
period_id = period_obj.search(cr , uid, [('date_start' ,'<=', coda_statement['date']), ('date_stop','>=',coda_statement['date'])])
if not period_id:
err_string = _("\nThe CODA creation date doesn't fall within a defined Accounting Period!" \
err_string = _("\nThe CODA creation date doesn't fall within a defined Accounting Period." \
"\nPlease create the Accounting Period for date %s.") % coda_statement['date']
err_code = 'R0002'
if batch:
@ -130,19 +130,19 @@ class account_coda_import(osv.osv_memory):
raise osv.except_osv(_('Data Error!'), err_string)
coda_statement['period_id'] = period_id[0]
coda_statement['state'] = 'draft'
coda_id = coda_obj.search(cr, uid,[
('name', '=', codafilename),
('coda_creation_date', '=', coda_statement['date']),
])
if coda_id:
err_string = _("\nCODA File with Filename '%s' and Creation Date '%s' has already been imported !") \
err_string = _("\nCODA File with Filename '%s' and Creation Date '%s' has already been imported.") \
% (codafilename, coda_statement['date'])
err_code = 'W0001'
if batch:
return (err_code, err_string)
raise osv.except_osv(_('Warning !'), err_string)
raise osv.except_osv(_('Warning!'), err_string)
elif line[0] == '1':
if coda_version == '1':
coda_statement['acc_number'] = line[5:17]
@ -150,24 +150,24 @@ class account_coda_import(osv.osv_memory):
coda_statement['currency'] = line[18:21]
elif line[1] == '0': # Belgian bank account BBAN structure
coda_statement['acc_number'] = line[5:17]
coda_statement['currency'] = line[18:21]
coda_statement['currency'] = line[18:21]
elif line[1] == '1': # foreign bank account BBAN structure
err_string = _('\nForeign bank accounts with BBAN structure are not supported !')
err_string = _('\nForeign bank accounts with BBAN structure are not supported.')
err_code = 'R1001'
if batch:
return (err_code, err_string)
raise osv.except_osv(_('Data Error!'), err_string)
elif line[1] == '2': # Belgian bank account IBAN structure
coda_statement['acc_number']=line[5:21]
coda_statement['acc_number']=line[5:21]
coda_statement['currency'] = line[39:42]
elif line[1] == '3': # foreign bank account IBAN structure
err_string = _('\nForeign bank accounts with IBAN structure are not supported !')
err_string = _('\nForeign bank accounts with IBAN structure are not supported.')
err_code = 'R1002'
if batch:
return (err_code, err_string)
raise osv.except_osv(_('Data Error!'), err_string)
else:
err_string = _('\nUnsupported bank account structure !')
err_string = _('\nUnsupported bank account structure.')
err_code = 'R1003'
if batch:
return (err_code, err_string)
@ -177,11 +177,11 @@ class account_coda_import(osv.osv_memory):
and (coda_statement['currency'] == x['currency_name']) and (coda_statement['description'] == (x['description1'] or x['description2'] or ''))
coda_bank = filter(cba_filter, coda_bank_table)
if coda_bank:
coda_bank = coda_bank[0]
coda_bank = coda_bank[0]
coda_statement['type'] = coda_bank['state']
coda_statement['journal_id'] = coda_bank['journal'] and coda_bank['journal'][0]
coda_statement['currency_id'] = coda_bank['currency'][0]
coda_statement['coda_bank_account_id'] = coda_bank['id']
coda_statement['coda_bank_account_id'] = coda_bank['id']
def_pay_acc = coda_bank['def_payable'][0]
def_rec_acc = coda_bank['def_receivable'][0]
awaiting_acc = coda_bank['awaiting_account'][0]
@ -189,8 +189,8 @@ class account_coda_import(osv.osv_memory):
find_bbacom = coda_bank['find_bbacom']
find_partner = coda_bank['find_partner']
else:
err_string = _("\nNo matching CODA Bank Account Configuration record found !") + \
_("\nPlease check if the 'Bank Account Number', 'Currency' and 'Account Description' fields of your configuration record match with '%s', '%s' and '%s' !") \
err_string = _("\nNo matching CODA Bank Account Configuration record found.") + \
_("\nPlease check if the 'Bank Account Number', 'Currency' and 'Account Description' fields of your configuration record match with '%s', '%s' and '%s'.") \
% (coda_statement['acc_number'], coda_statement['currency'], coda_statement['description'])
err_code = 'R1004'
if batch:
@ -199,13 +199,13 @@ class account_coda_import(osv.osv_memory):
bal_start = list2float(line[43:58]) # old balance data
if line[42] == '1': # 1= Debit
bal_start = - bal_start
coda_statement['balance_start'] = bal_start
coda_statement['balance_start'] = bal_start
coda_statement['acc_holder'] = line[64:90]
coda_statement['paper_seq_number'] = line[2:5]
coda_statement['coda_seq_number'] = line[125:128]
if coda_bank['coda_st_naming']:
coda_statement['name'] = coda_bank['coda_st_naming'] % {
'code': coda_bank['journal_code'] or '',
'code': coda_bank['journal_code'] or '',
'year': time.strftime('%Y'),
'y': time.strftime('%y'),
'coda': line[125:128],
@ -213,7 +213,7 @@ class account_coda_import(osv.osv_memory):
}
else:
coda_statement['name'] = '/'
elif line[0] == '2':
# movement data record 2
if line[1] == '1':
@ -222,7 +222,7 @@ class account_coda_import(osv.osv_memory):
st_line_seq = st_line_seq + 1
st_line['sequence'] = st_line_seq
st_line['type'] = 'general'
st_line['reconcile'] = False
st_line['reconcile'] = False
st_line['struct_comm_type'] = ''
st_line['struct_comm_type_desc'] = ''
st_line['struct_comm_101'] = ''
@ -230,25 +230,25 @@ class account_coda_import(osv.osv_memory):
st_line['partner_id'] = 0
st_line['account_id'] = 0
st_line['counterparty_name'] = ''
st_line['counterparty_bic'] = ''
st_line['counterparty_bic'] = ''
st_line['counterparty_number'] = ''
st_line['counterparty_currency'] = ''
st_line['counterparty_currency'] = ''
st_line['glob_lvl_flag'] = False
st_line['globalisation_id'] = 0
st_line['globalisation_code'] = ''
st_line['globalisation_amount'] = False
st_line['amount'] = False
st_line['ref'] = line[2:10]
st_line['trans_ref'] = line[10:31]
st_line_amt = list2float(line[32:47])
if line[31] == '1': # 1=debit
st_line_amt = - st_line_amt
# processing of amount depending on globalisation code
# processing of amount depending on globalisation code
glob_lvl_flag = int(line[124])
if glob_lvl_flag > 0:
if glob_lvl_stack[-1] == glob_lvl_flag:
st_line['glob_lvl_flag'] = glob_lvl_flag
if glob_lvl_flag > 0:
if glob_lvl_stack[-1] == glob_lvl_flag:
st_line['glob_lvl_flag'] = glob_lvl_flag
st_line['amount'] = st_line_amt
glob_lvl_stack.pop()
else:
@ -265,23 +265,23 @@ class account_coda_import(osv.osv_memory):
st_line['trans_type'] = line[53]
trans_type = filter(lambda x: st_line['trans_type'] == x['type'], trans_type_table)
if not trans_type:
err_string = _('\nThe File contains an invalid CODA Transaction Type : %s!') % st_line['trans_type']
err_string = _('\nThe File contains an invalid CODA Transaction Type : %s.') % st_line['trans_type']
err_code = 'R2001'
if batch:
return (err_code, err_string)
raise osv.except_osv(_('Data Error!'), err_string)
st_line['trans_type_desc'] = trans_type[0]['description']
raise osv.except_osv(_('Data Error!'), err_string)
st_line['trans_type_desc'] = trans_type[0]['description']
st_line['trans_family'] = line[54:56]
trans_family = filter(lambda x: (x['type'] == 'family') and (st_line['trans_family'] == x['code']), trans_code_table)
if not trans_family:
err_string = _('\nThe File contains an invalid CODA Transaction Family : %s!') % st_line['trans_family']
err_string = _('\nThe File contains an invalid CODA Transaction Family : %s.') % st_line['trans_family']
err_code = 'R2002'
if batch:
return (err_code, err_string)
raise osv.except_osv(_('Data Error!'), err_string)
raise osv.except_osv(_('Data Error!'), err_string)
st_line['trans_family_desc'] = trans_family[0]['description']
st_line['trans_code'] = line[56:58]
trans_code = filter(lambda x: (x['type'] == 'code') and (st_line['trans_code'] == x['code']) and (trans_family[0]['id'] == x['parent_id'][0]),
trans_code = filter(lambda x: (x['type'] == 'code') and (st_line['trans_code'] == x['code']) and (trans_family[0]['id'] == x['parent_id'][0]),
trans_code_table)
if trans_code:
st_line['trans_code_desc'] = trans_code[0]['description']
@ -292,49 +292,49 @@ class account_coda_import(osv.osv_memory):
if trans_category:
st_line['trans_category_desc'] = trans_category[0]['description']
else:
st_line['trans_category_desc'] = _('Transaction Category unknown, please consult your bank.')
# positions 61-115 : communication
st_line['trans_category_desc'] = _('Transaction Category unknown, please consult your bank.')
# positions 61-115 : communication
if line[61] == '1':
st_line['struct_comm_type'] = line[62:65]
comm_type = filter(lambda x: st_line['struct_comm_type'] == x['code'], comm_type_table)
if not comm_type:
err_string = _('\nThe File contains an invalid Structured Communication Type : %s!') % st_line['struct_comm_type']
err_string = _('\nThe File contains an invalid Structured Communication Type : %s.') % st_line['struct_comm_type']
err_code = 'R2003'
if batch:
return (err_code, err_string)
raise osv.except_osv(_('Data Error!'), err_string)
raise osv.except_osv(_('Data Error!'), err_string)
st_line['struct_comm_type_desc'] = comm_type[0]['description']
st_line['communication'] = st_line['name'] = line[65:115]
if st_line['struct_comm_type'] == '101':
bbacomm = line[65:77]
st_line['struct_comm_101'] = st_line['name'] = '+++' + bbacomm[0:3] + '/' + bbacomm[3:7] + '/' + bbacomm[7:] + '+++'
bbacomm = line[65:77]
st_line['struct_comm_101'] = st_line['name'] = '+++' + bbacomm[0:3] + '/' + bbacomm[3:7] + '/' + bbacomm[7:] + '+++'
else:
st_line['communication'] = st_line['name'] = line[62:115]
st_line['entry_date'] = str2date(line[115:121])
# positions 122-124 not processed
# positions 122-124 not processed
coda_statement_lines[st_line_seq] = st_line
coda_statement['coda_statement_lines'] = coda_statement_lines
elif line[1] == '2':
# movement data record 2.2
if coda_statement['coda_statement_lines'][st_line_seq]['ref'] != line[2:10]:
err_string = _('\nCODA parsing error on movement data record 2.2, seq nr %s!' \
err_string = _('\nCODA parsing error on movement data record 2.2, seq nr %s.' \
'\nPlease report this issue via your OpenERP support channel.') % line[2:10]
err_code = 'R2004'
if batch:
return (err_code, err_string)
raise osv.except_osv(_('Error!'), err_string)
raise osv.except_osv(_('Error!'), err_string)
coda_statement['coda_statement_lines'][st_line_seq]['name'] += line[10:63]
coda_statement['coda_statement_lines'][st_line_seq]['communication'] += line[10:63]
coda_statement['coda_statement_lines'][st_line_seq]['counterparty_bic'] = line[98:109].strip()
coda_statement['coda_statement_lines'][st_line_seq]['counterparty_bic'] = line[98:109].strip()
elif line[1] == '3':
# movement data record 2.3
if coda_statement['coda_statement_lines'][st_line_seq]['ref'] != line[2:10]:
err_string = _('\nCODA parsing error on movement data record 2.3, seq nr %s!' \
err_string = _('\nCODA parsing error on movement data record 2.3, seq nr %s.' \
'\nPlease report this issue via your OpenERP support channel.') % line[2:10]
err_code = 'R2005'
if batch:
return (err_code, err_string)
raise osv.except_osv(_('Error!'), err_string)
raise osv.except_osv(_('Error!'), err_string)
st_line = coda_statement_lines[st_line_seq]
if coda_version == '1':
counterparty_number = line[10:22]
@ -346,7 +346,7 @@ class account_coda_import(osv.osv_memory):
counterparty_currency = line[23:26].strip()
else:
counterparty_number = line[10:44].strip()
counterparty_currency = line[44:47].strip()
counterparty_currency = line[44:47].strip()
counterparty_name = line[47:82].strip()
st_line['name'] += line[82:125]
st_line['communication'] += line[82:125]
@ -354,15 +354,15 @@ class account_coda_import(osv.osv_memory):
st_line['counterparty_currency'] = counterparty_currency
st_line['counterparty_name'] = counterparty_name
if counterparty_currency not in [coda_bank['currency_name'], '']:
err_string = _('\nCODA parsing error on movement data record 2.3, seq nr %s!' \
'\nPlease report this issue via your OpenERP support channel.') % line[2:10]
err_string = _('\nCODA parsing error on movement data record 2.3, seq nr %s.' \
'\nPlease report this issue via your OpenERP support channel.') % line[2:10]
err_code = 'R2006'
if batch:
return (err_code, err_string)
raise osv.except_osv(_('Error!'), err_string)
raise osv.except_osv(_('Error!'), err_string)
# partner matching and reconciliation
if st_line['type'] == 'general':
# partner matching and reconciliation
if st_line['type'] == 'general':
match = False
bank_ids = False
# prepare reconciliation for bba scor
@ -379,14 +379,14 @@ class account_coda_import(osv.osv_memory):
else:
st_line['account_id'] = partner.property_account_receivable.id or def_rec_acc
st_line['type'] = 'customer'
if invoice.type in ['in_invoice', 'out_invoice']:
if invoice.type in ['in_invoice', 'out_invoice']:
iml_ids = move_line_obj.search(cr, uid, [('move_id', '=', invoice.move_id.id), ('reconcile_id', '=', False), ('account_id.reconcile', '=', True)])
if iml_ids:
st_line['reconcile'] = iml_ids[0]
match = True
else:
coda_parsing_note += _("\n Bank Statement '%s' line '%s':" \
"\n There is no invoice matching the Structured Communication '%s'!" \
"\n There is no invoice matching the Structured Communication '%s'." \
"\n Please verify and adjust the invoice and perform the import again or otherwise change the corresponding entry manually in the generated Bank Statement.") \
% (coda_statement['name'], st_line['ref'], reference)
# lookup partner via counterparty_number
@ -402,10 +402,10 @@ class account_coda_import(osv.osv_memory):
if not match and find_partner and bank_ids:
if len(bank_ids) > 1:
coda_parsing_note += _("\n Bank Statement '%s' line '%s':" \
"\n No partner record assigned: There are multiple partners with the same Bank Account Number '%s'!" \
"\n No partner record assigned: There are multiple partners with the same Bank Account Number '%s'." \
"\n Please correct the configuration and perform the import again or otherwise change the corresponding entry manually in the generated Bank Statement.") \
% (coda_statement['name'], st_line['ref'], counterparty_number)
else:
else:
bank = partner_bank_obj.browse(cr, uid, bank_ids[0], context)
st_line['partner_id'] = bank.partner_id.id
match = True
@ -418,26 +418,26 @@ class account_coda_import(osv.osv_memory):
elif not match and find_partner:
if counterparty_number:
coda_parsing_note += _("\n Bank Statement '%s' line '%s':" \
"\n The bank account '%s' is not defined for the partner '%s'!" \
"\n The bank account '%s' is not defined for the partner '%s'." \
"\n Please correct the configuration and perform the import again or otherwise change the corresponding entry manually in the generated Bank Statement.") \
% (coda_statement['name'], st_line['ref'],
% (coda_statement['name'], st_line['ref'],
counterparty_number, counterparty_name)
else:
coda_parsing_note += _("\n Bank Statement '%s' line '%s':" \
"\n No matching partner record found!" \
"\n No matching partner record found." \
"\n Please adjust the corresponding entry manually in the generated Bank Statement.") \
% (coda_statement['name'], st_line['ref'])
% (coda_statement['name'], st_line['ref'])
st_line['account_id'] = awaiting_acc
# end of partner record lookup
coda_statement_lines[st_line_seq] = st_line
coda_statement['coda_statement_lines'] = coda_statement_lines
else:
# movement data record 2.x (x <> 1,2,3)
err_string = _('\nMovement data records of type 2.%s are not supported !') % line[1]
err_string = _('\nMovement data records of type 2.%s are not supported.') % line[1]
err_code = 'R2007'
if batch:
return (err_code, err_string)
raise osv.except_osv(_('Data Error!'), err_string)
raise osv.except_osv(_('Data Error!'), err_string)
elif line[0] == '3':
# information data record 3
@ -461,19 +461,19 @@ class account_coda_import(osv.osv_memory):
err_code = 'R3001'
if batch:
return (err_code, err_string)
raise osv.except_osv(_('Data Error!'), err_string)
info_line['trans_type_desc'] = trans_type[0]['description']
raise osv.except_osv(_('Data Error!'), err_string)
info_line['trans_type_desc'] = trans_type[0]['description']
info_line['trans_family'] = line[32:34]
trans_family = filter(lambda x: (x['type'] == 'family') and (info_line['trans_family'] == x['code']), trans_code_table)
if not trans_family:
err_string = _('\nThe File contains an invalid CODA Transaction Family : %s!') % st_line['trans_family']
err_string = _('\nThe File contains an invalid CODA Transaction Family : %s.') % st_line['trans_family']
err_code = 'R3002'
if batch:
return (err_code, err_string)
raise osv.except_osv(_('Data Error!'), err_string)
raise osv.except_osv(_('Data Error!'), err_string)
info_line['trans_family_desc'] = trans_family[0]['description']
info_line['trans_code'] = line[34:36]
trans_code = filter(lambda x: (x['type'] == 'code') and (info_line['trans_code'] == x['code']) and (trans_family[0]['id'] == x['parent_id']),
trans_code = filter(lambda x: (x['type'] == 'code') and (info_line['trans_code'] == x['code']) and (trans_family[0]['id'] == x['parent_id']),
trans_code_table)
if trans_code:
info_line['trans_code_desc'] = trans_code[0]['description']
@ -484,13 +484,13 @@ class account_coda_import(osv.osv_memory):
if trans_category:
info_line['trans_category_desc'] = trans_category[0]['description']
else:
info_line['trans_category_desc'] = _('Transaction Category unknown, please consult your bank.')
# positions 40-113 : communication
info_line['trans_category_desc'] = _('Transaction Category unknown, please consult your bank.')
# positions 40-113 : communication
if line[39] == '1':
info_line['struct_comm_type'] = line[40:43]
comm_type = filter(lambda x: info_line['struct_comm_type'] == x['code'], comm_type_table)
if not comm_type:
err_string = _('\nThe File contains an invalid Structured Communication Type : %s!') % info_line['struct_comm_type']
err_string = _('\nThe File contains an invalid Structured Communication Type : %s.') % info_line['struct_comm_type']
err_code = 'R3003'
if batch:
return (err_code, err_string)
@ -505,18 +505,18 @@ class account_coda_import(osv.osv_memory):
elif line[1] == '2':
# information data record 3.2
if coda_statement['coda_statement_lines'][st_line_seq]['ref'] != line[2:10]:
err_string = _('\nCODA parsing error on information data record 3.2, seq nr %s!' \
err_string = _('\nCODA parsing error on information data record 3.2, seq nr %s.' \
'\nPlease report this issue via your OpenERP support channel.') % line[2:10]
err_code = 'R3004'
if batch:
return (err_code, err_string)
raise osv.except_osv(_('Error!'), err_string)
coda_statement['coda_statement_lines'][st_line_seq]['name'] += line[10:115]
coda_statement['coda_statement_lines'][st_line_seq]['name'] += line[10:115]
coda_statement['coda_statement_lines'][st_line_seq]['communication'] += line[10:115]
elif line[1] == '3':
# information data record 3.3
if coda_statement['coda_statement_lines'][st_line_seq]['ref'] != line[2:10]:
err_string = _('\nCODA parsing error on information data record 3.3, seq nr %s!' \
err_string = _('\nCODA parsing error on information data record 3.3, seq nr %s.' \
'\nPlease report this issue via your OpenERP support channel.') % line[2:10]
err_code = 'R3005'
if batch:
@ -524,7 +524,7 @@ class account_coda_import(osv.osv_memory):
raise osv.except_osv(_('Error!'), err_string)
coda_statement['coda_statement_lines'][st_line_seq]['name'] += line[10:100]
coda_statement['coda_statement_lines'][st_line_seq]['communication'] += line[10:100]
elif line[0] == '4':
# free communication data record 4
comm_line = {}
@ -535,21 +535,21 @@ class account_coda_import(osv.osv_memory):
comm_line['communication'] = comm_line['name'] = line[32:112]
coda_statement_lines[st_line_seq] = comm_line
coda_statement['coda_statement_lines'] = coda_statement_lines
elif line[0] == '8':
# new balance record
bal_end = list2float(line[42:57])
if line[41] == '1': # 1=Debit
bal_end = - bal_end
coda_statement['balance_end_real'] = bal_end
elif line[0] == '9':
# footer record
coda_statement['balance_min'] = list2float(line[22:37])
coda_statement['balance_min'] = list2float(line[22:37])
coda_statement['balance_plus'] = list2float(line[37:52])
if not bal_end:
coda_statement['balance_end_real'] = coda_statement['balance_start'] + coda_statement['balance_plus'] - coda_statement['balance_min']
if coda_parsing_note:
if coda_parsing_note:
coda_statement['coda_parsing_note'] = '\nStatement Line matching results:' + coda_parsing_note
else:
coda_statement['coda_parsing_note'] = ''
@ -557,11 +557,11 @@ class account_coda_import(osv.osv_memory):
#end for
err_string = ''
err_code = ''
err_code = ''
coda_id = 0
coda_note = ''
line_note = ''
try:
coda_id = coda_obj.create(cr, uid,{
'name' : codafilename,
@ -571,7 +571,7 @@ class account_coda_import(osv.osv_memory):
'user_id': uid,
})
context.update({'coda_id': coda_id})
except osv.except_osv, e:
cr.rollback()
err_string = _('\nApplication Error : ') + str(e)
@ -585,24 +585,24 @@ class account_coda_import(osv.osv_memory):
err_code = 'G0001'
if batch:
return (err_code, err_string)
raise osv.except_osv(_('CODA Import failed !'), err_string)
raise osv.except_osv(_('CODA Import failed.'), err_string)
nb_err = 0
err_string = ''
coda_st_ids = []
bk_st_ids = []
bk_st_ids = []
for statement in coda_statements:
# The CODA Statement info is written to two objects: 'coda.bank.statement' and 'account.bank.statement'
try:
coda_st_id = coda_st_obj.create(cr, uid, {
'name': statement['name'],
'type': statement['type'],
'coda_bank_account_id': statement['coda_bank_account_id'],
'currency': statement['currency_id'],
'currency': statement['currency_id'],
'journal_id': statement['journal_id'],
'coda_id': coda_id,
'date': statement['date'],
@ -613,7 +613,7 @@ class account_coda_import(osv.osv_memory):
})
coda_st_ids.append(coda_st_id)
if statement['type'] == 'normal':
if statement['type'] == 'normal':
context.update({'ebanking_import': 1})
journal = journal_obj.browse(cr, uid, statement['journal_id'], context=context)
cr.execute('SELECT balance_end_real \
@ -626,16 +626,15 @@ class account_coda_import(osv.osv_memory):
if journal.default_debit_account_id and (journal.default_credit_account_id == journal.default_debit_account_id):
balance_start_check = journal.default_debit_account_id.balance
else:
nb_err += 1
err_string += _('\nConfiguration Error in journal %s!' \
'\nPlease verify the Default Debit and Credit Account settings.') % journal.name
nb_err += 1
err_string += _('\nConfiguration Error!\nPlease verify the Default Debit and Credit Account settings in journal %s.') % journal.name
break
if balance_start_check <> statement['balance_start']:
nb_err += 1
err_string += _('\nThe CODA Statement %s Starting Balance (%.2f) does not correspond with the previous Closing Balance (%.2f) in journal %s!') \
% (statement['name'], statement['balance_start'], balance_start_check, journal.name)
break
nb_err += 1
err_string += _('\nThe CODA Statement %s Starting Balance (%.2f) does not correspond with the previous Closing Balance (%.2f) in journal %s.') \
% (statement['name'], statement['balance_start'], balance_start_check, journal.name)
break
bk_st_id = bank_st_obj.create(cr, uid, {
'name': statement['name'],
'journal_id': statement['journal_id'],
@ -648,7 +647,7 @@ class account_coda_import(osv.osv_memory):
})
bk_st_ids.append(bk_st_id)
coda_st_obj.write(cr, uid, [coda_st_id], {'statement_id': bk_st_id}, context=context)
glob_id_stack = [(0, '', 0, '')] # stack with tuples (glob_lvl_flag, glob_code, glob_id, glob_name)
lines = statement['coda_statement_lines']
st_line_seq = 0
@ -657,7 +656,7 @@ class account_coda_import(osv.osv_memory):
line = lines[x]
# handling non-transactional records : line['type'] in ['information', 'communication']
if line['type'] == 'information':
line['globalisation_id'] = glob_id_stack[-1][2]
@ -673,25 +672,25 @@ class account_coda_import(osv.osv_memory):
line['trans_category'], line['trans_category_desc'],
line['struct_comm_type'], line['struct_comm_type_desc'],
line['communication'])
coda_st_line_id = coda_st_line_obj.create(cr, uid, {
'sequence': line['sequence'],
'ref': line['ref'],
'ref': line['ref'],
'name': line['name'].strip() or '/',
'type' : 'information',
'date': line['entry_date'],
'type' : 'information',
'date': line['entry_date'],
'statement_id': coda_st_id,
'note': line_note,
})
elif line['type'] == 'communication':
line_note = _('Free Communication:\n %s') \
%(line['communication'])
coda_st_line_id = coda_st_line_obj.create(cr, uid, {
'sequence': line['sequence'],
'ref': line['ref'],
'ref': line['ref'],
'name': line['name'].strip() or '/',
'type' : 'communication',
'date': statement['date'],
@ -699,20 +698,20 @@ class account_coda_import(osv.osv_memory):
'note': line_note,
})
# handling transactional records, # line['type'] in ['globalisation', 'general', 'supplier', 'customer']
# handling transactional records, # line['type'] in ['globalisation', 'general', 'supplier', 'customer']
else:
glob_lvl_flag = line['glob_lvl_flag']
if glob_lvl_flag:
if glob_id_stack[-1][0] == glob_lvl_flag:
if glob_lvl_flag:
if glob_id_stack[-1][0] == glob_lvl_flag:
line['globalisation_id'] = glob_id_stack[-1][2]
glob_id_stack.pop()
else:
glob_name = line['name'].strip() or '/'
glob_code = seq_obj.get(cr, uid, 'statement.line.global')
glob_id = glob_obj.create(cr, uid, {
'code': glob_code,
'code': glob_code,
'name': glob_name,
'type': 'coda',
'parent_id': glob_id_stack[-1][2],
@ -720,7 +719,7 @@ class account_coda_import(osv.osv_memory):
})
line['globalisation_id'] = glob_id
glob_id_stack.append((glob_lvl_flag, glob_code, glob_id, glob_name))
line_note = _('Partner name: %s \nPartner Account Number: %s' \
'\nTransaction Type: %s - %s' \
'\nTransaction Family: %s - %s' \
@ -735,54 +734,54 @@ class account_coda_import(osv.osv_memory):
line['trans_category'], line['trans_category_desc'],
line['struct_comm_type'], line['struct_comm_type_desc'],
line['communication'])
if line['type'] == 'globalisation':
coda_st_line_id = coda_st_line_obj.create(cr, uid, {
'sequence': line['sequence'],
'ref': line['ref'],
'ref': line['ref'],
'name': line['name'].strip() or '/',
'type' : 'globalisation',
'val_date' : line['val_date'],
'val_date' : line['val_date'],
'date': line['entry_date'],
'globalisation_level': line['glob_lvl_flag'],
'globalisation_amount': line['globalisation_amount'],
'globalisation_id': line['globalisation_id'],
'globalisation_level': line['glob_lvl_flag'],
'globalisation_amount': line['globalisation_amount'],
'globalisation_id': line['globalisation_id'],
'partner_id': line['partner_id'] or 0,
'account_id': line['account_id'],
'statement_id': coda_st_id,
'note': line_note,
})
else: # line['type'] in ['general', 'supplier', 'customer']
else: # line['type'] in ['general', 'supplier', 'customer']
if glob_lvl_flag == 0:
if glob_lvl_flag == 0:
line['globalisation_id'] = glob_id_stack[-1][2]
if not line['account_id']:
if not line['account_id']:
line['account_id'] = awaiting_acc
coda_st_line_id = coda_st_line_obj.create(cr, uid, {
'sequence': line['sequence'],
'ref': line['ref'],
'ref': line['ref'],
'name': line['name'] or '/',
'type' : line['type'],
'val_date' : line['val_date'],
'val_date' : line['val_date'],
'date': line['entry_date'],
'amount': line['amount'],
'partner_id': line['partner_id'] or 0,
'counterparty_name': line['counterparty_name'],
'counterparty_bic': line['counterparty_bic'],
'counterparty_number': line['counterparty_number'],
'counterparty_currency': line['counterparty_currency'],
'counterparty_bic': line['counterparty_bic'],
'counterparty_number': line['counterparty_number'],
'counterparty_currency': line['counterparty_currency'],
'account_id': line['account_id'],
'globalisation_level': line['glob_lvl_flag'],
'globalisation_id': line['globalisation_id'],
'globalisation_level': line['glob_lvl_flag'],
'globalisation_id': line['globalisation_id'],
'statement_id': coda_st_id,
'note': line_note,
})
if statement['type'] == 'normal':
st_line_seq += 1
voucher_id = False
line_name = line['name'].strip()
@ -793,7 +792,7 @@ class account_coda_import(osv.osv_memory):
line_name = '/'
if line['reconcile']:
voucher_vals = {
voucher_vals = {
'type': line['type'] == 'supplier' and 'payment' or 'receipt',
'name': line_name,
'partner_id': line['partner_id'],
@ -808,15 +807,15 @@ class account_coda_import(osv.osv_memory):
voucher_id = voucher_obj.create(cr, uid, voucher_vals, context=context)
move_line = move_line_obj.browse(cr, uid, line['reconcile'], context=context)
voucher_dict = voucher_obj.onchange_partner_id(cr, uid, [],
partner_id = line['partner_id'],
journal_id = statement['journal_id'],
price = abs(line['amount']),
currency_id = journal.company_id.currency_id.id,
voucher_dict = voucher_obj.onchange_partner_id(cr, uid, [],
partner_id = line['partner_id'],
journal_id = statement['journal_id'],
price = abs(line['amount']),
currency_id = journal.company_id.currency_id.id,
ttype = line['type'] == 'supplier' and 'payment' or 'receipt',
date = line['val_date'],
context = context)
#_logger.warning('voucher_dict = %s' % voucher_dict)
#_logger.warning('voucher_dict = %s' % voucher_dict)
voucher_line_vals = False
if voucher_dict['value']['line_ids']:
for line_dict in voucher_dict['value']['line_ids']:
@ -831,38 +830,38 @@ class account_coda_import(osv.osv_memory):
bank_st_line_id = bank_st_line_obj.create(cr, uid, {
'sequence': st_line_seq,
'ref': line['ref'],
'ref': line['ref'],
'name': line_name,
'type' : line['type'],
'val_date' : line['val_date'],
'val_date' : line['val_date'],
'date': line['entry_date'],
'amount': line['amount'],
'partner_id': line['partner_id'] or 0,
'counterparty_name': line['counterparty_name'],
'counterparty_bic': line['counterparty_bic'],
'counterparty_number': line['counterparty_number'],
'counterparty_currency': line['counterparty_currency'],
'counterparty_bic': line['counterparty_bic'],
'counterparty_number': line['counterparty_number'],
'counterparty_currency': line['counterparty_currency'],
'account_id': line['account_id'],
'globalisation_id': line['globalisation_id'],
'globalisation_id': line['globalisation_id'],
'statement_id': bk_st_id,
'voucher_id': voucher_id,
'note': line_note,
})
})
# end 'for x in lines'
coda_st_obj.write(cr, uid, [coda_st_id], {}, context=context) # calculate balance
st_balance = coda_st_obj.read(cr, uid, coda_st_id, ['balance_end', 'balance_end_real'], context=context)
if st_balance['balance_end'] <> st_balance['balance_end_real']:
err_string += _('\nIncorrect ending Balance in CODA Statement %s for Bank Account %s!') \
err_string += _('\nIncorrect ending Balance in CODA Statement %s for Bank Account %s.') \
% (statement['coda_seq_number'], (statement['acc_number'] + ' (' + statement['currency'] + ') - ' + statement['description']))
if statement['type'] == 'normal':
nb_err += 1
break
else:
statement['coda_parsing_note'] += '\n' + err_string
if statement['type'] == 'normal':
bank_st_obj.button_dummy(cr, uid, [bk_st_id], context=context) # calculate balance
if statement['type'] == 'normal':
bank_st_obj.button_dummy(cr, uid, [bk_st_id], context=context) # calculate balance
journal_name = journal.name
else:
journal_name = _('None')
@ -904,7 +903,7 @@ class account_coda_import(osv.osv_memory):
_logger.error('Unknown Error while processing Statement %s\n%s' % (statement.get('name', '/'),tb))
# end 'for statement in coda_statements'
coda_note_header = _('CODA File is Imported :')
coda_note_footer = _('\n\nNumber of statements : ') + str(len(coda_st_ids))
err_log = err_log + _('\nNumber of errors : ') + str(nb_err) + '\n'
@ -920,13 +919,13 @@ class account_coda_import(osv.osv_memory):
if batch:
err_code = 'G0002'
return (err_code, err_string)
raise osv.except_osv(_('CODA Import failed !'), err_string)
raise osv.except_osv(_('CODA Import failed.'), err_string)
context.update({ 'bk_st_ids': bk_st_ids})
model_data_ids = mod_obj.search(cr, uid, [('model', '=', 'ir.ui.view'), ('name', '=', 'account_coda_import_result_view')], context=context)
resource_id = mod_obj.read(cr, uid, model_data_ids, fields=['res_id'], context=context)[0]['res_id']
self.write(cr, uid, ids, {'note': note}, context=context)
return {
'name': _('Import CODA File result'),
'res_id': ids[0],
@ -961,7 +960,7 @@ class account_coda_import(osv.osv_memory):
domain += [('id','in', context.get('bk_st_ids', False))]
action.update({'domain': domain})
return action
account_coda_import()
def str2date(date_str):

View File

@ -107,7 +107,7 @@ class account_move_line(osv.osv):
if line.id not in line2bank and line.partner_id.bank_ids:
line2bank[line.id] = line.partner_id.bank_ids[0].id
else:
raise osv.except_osv(_('Error !'), _('No partner defined on entry line'))
raise osv.except_osv(_('Error!'), _('There is no partner defined on the entry line.'))
return line2bank
_columns = {

View File

@ -71,8 +71,8 @@ class payment_order(osv.osv):
#dead code
def get_wizard(self, type):
logger = netsvc.Logger()
logger.notifyChannel("warning", netsvc.LOG_WARNING,
"No wizard found for the payment type '%s'." % type)
logger.notifyChannel("Warning!", netsvc.LOG_WARNING,
"No wizard is found for the payment type '%s'." % type)
return None
def _total(self, cursor, user, ids, name, args, context=None):

View File

@ -402,7 +402,7 @@ msgstr ""
#. module: account_payment
#: code:addons/account_payment/account_move_line.py:110
#, python-format
msgid "No partner defined on entry line"
msgid "No partner is defined on entry line."
msgstr ""
#. module: account_payment

View File

@ -473,7 +473,7 @@ class account_voucher(osv.osv):
tr_type = 'purchase'
else:
if not journal.default_credit_account_id or not journal.default_debit_account_id:
raise osv.except_osv(_('Error !'), _('Please define default credit/debit accounts on the journal "%s" !') % (journal.name))
raise osv.except_osv(_('Error!'), _('Please define default credit/debit accounts on the journal "%s".') % (journal.name))
account_id = journal.default_credit_account_id.id or journal.default_debit_account_id.id
tr_type = 'receipt'
@ -822,7 +822,7 @@ class account_voucher(osv.osv):
def unlink(self, cr, uid, ids, context=None):
for t in self.read(cr, uid, ids, ['state'], context=context):
if t['state'] not in ('draft', 'cancel'):
raise osv.except_osv(_('Invalid action !'), _('Cannot delete Voucher(s) which are already opened or paid !'))
raise osv.except_osv(_('Invalid Action!'), _('Cannot delete voucher(s) which are already opened or paid.'))
return super(account_voucher, self).unlink(cr, uid, ids, context=context)
def onchange_payment(self, cr, uid, ids, pay_now, journal_id, partner_id, ttype='sale'):
@ -919,8 +919,8 @@ class account_voucher(osv.osv):
elif voucher_brw.journal_id.sequence_id:
name = seq_obj.next_by_id(cr, uid, voucher_brw.journal_id.sequence_id.id, context=context)
else:
raise osv.except_osv(_('Error !'),
_('Please define a sequence on the journal !'))
raise osv.except_osv(_('Error!'),
_('Please define a sequence on the journal.'))
if not voucher_brw.reference:
ref = name.replace('/','')
else:
@ -952,11 +952,11 @@ class account_voucher(osv.osv):
if amount_residual > 0:
account_id = line.voucher_id.company_id.expense_currency_exchange_account_id
if not account_id:
raise osv.except_osv(_('Warning'),_("Unable to create accounting entry for currency rate difference. You have to configure the field 'Income Currency Rate' on the company! "))
raise osv.except_osv(_('Warning!'),_("First you have to configure the 'Income Currency Rate' on the company, then create accounting entry for currency rate difference."))
else:
account_id = line.voucher_id.company_id.income_currency_exchange_account_id
if not account_id:
raise osv.except_osv(_('Warning'),_("Unable to create accounting entry for currency rate difference. You have to configure the field 'Expense Currency Rate' on the company! "))
raise osv.except_osv(_('Warning!'),_("First you have to configure the 'Expense Currency Rate' on the company, then create accounting entry for currency rate difference."))
# Even if the amount_currency is never filled, we need to pass the foreign currency because otherwise
# the receivable/payable account may have a secondary currency, which render this field mandatory
account_currency_id = company_currency <> current_currency and current_currency or False
@ -1516,7 +1516,7 @@ class account_bank_statement_line(osv.osv):
return True
_constraints = [
(_check_amount, 'The amount of the voucher must be the same amount as the one on the statement line', ['amount']),
(_check_amount, 'The amount of the voucher must be the same amount as the one on the statement line.', ['amount']),
]
_columns = {

View File

@ -81,7 +81,7 @@ msgstr ""
#. module: account_voucher
#: code:addons/account_voucher/account_voucher.py:797
#, python-format
msgid "Cannot delete Voucher(s) which are already opened or paid !"
msgid "Cannot delete Voucher(s) which are already opened or paid."
msgstr ""
#. module: account_voucher
@ -299,7 +299,7 @@ msgstr ""
#: code:addons/account_voucher/account_voucher.py:927
#: code:addons/account_voucher/account_voucher.py:931
#, python-format
msgid "Warning"
msgid "Warning!"
msgstr ""
#. module: account_voucher
@ -346,7 +346,7 @@ msgstr ""
#. module: account_voucher
#: constraint:res.company:0
msgid "Error! You can not create recursive companies."
msgid "Error! You cannot create recursive companies."
msgstr ""
#. module: account_voucher
@ -520,7 +520,7 @@ msgstr ""
#: constraint:account.bank.statement.line:0
msgid ""
"The amount of the voucher must be the same amount as the one on the "
"statement line"
"statement line."
msgstr ""
#. module: account_voucher
@ -620,8 +620,8 @@ msgstr ""
#: code:addons/account_voucher/account_voucher.py:927
#, python-format
msgid ""
"Unable to create accounting entry for currency rate difference. You have to "
"configure the field 'Income Currency Rate' on the company! "
"First configure the 'Income Currency Rate' on the company,after that create "
"accounting entry for currency rate difference."
msgstr ""
#. module: account_voucher
@ -751,7 +751,7 @@ msgstr ""
#. module: account_voucher
#: code:addons/account_voucher/account_voucher.py:462
#, python-format
msgid "Please define default credit/debit accounts on the journal \"%s\" !"
msgid "Please define default credit/debit accounts on the journal \"%s\"."
msgstr ""
#. module: account_voucher
@ -762,7 +762,7 @@ msgstr ""
#. module: account_voucher
#: code:addons/account_voucher/account_voucher.py:895
#, python-format
msgid "Please define a sequence on the journal !"
msgid "Please define a sequence on the journal."
msgstr ""
#. module: account_voucher
@ -1059,8 +1059,8 @@ msgstr ""
#: code:addons/account_voucher/account_voucher.py:931
#, python-format
msgid ""
"Unable to create accounting entry for currency rate difference. You have to "
"configure the field 'Expense Currency Rate' on the company! "
"First configure the 'Expense Currency Rate' on the company,after that create "
"accounting entry for currency rate difference."
msgstr ""
#. module: account_voucher

View File

@ -140,7 +140,7 @@ class account_analytic_account(osv.osv):
for account in self.browse(cr, uid, ids, context=context):
if account.company_id:
if account.company_id.currency_id.id != value:
raise osv.except_osv(_('Error !'), _("If you set a company, the currency selected has to be the same as it's currency. \nYou can remove the company belonging, and thus change the currency, only on analytic account of type 'view'. This can be really usefull for consolidation purposes of several companies charts with different currencies, for example."))
raise osv.except_osv(_('Error!'), _("If you set a company, the currency selected has to be the same as it's currency. \nYou can remove the company belonging, and thus change the currency, only on analytic account of type 'view'. This can be really usefull for consolidation purposes of several companies charts with different currencies, for example."))
return cr.execute("""update account_analytic_account set currency_id=%s where id=%s""", (value, account.id, ))
def _currency(self, cr, uid, ids, field_name, arg, context=None):
@ -232,7 +232,7 @@ class account_analytic_account(osv.osv):
_order = 'name asc'
_constraints = [
(check_recursion, 'Error! You can not create recursive analytic accounts.', ['parent_id']),
(check_recursion, 'Error! You cannot create recursive analytic accounts.', ['parent_id']),
]
def copy(self, cr, uid, id, default=None, context=None):
@ -333,7 +333,7 @@ class account_analytic_line(osv.osv):
return True
_constraints = [
(_check_no_view, 'You can not create analytic line on view account.', ['account_id']),
(_check_no_view, 'You cannot create analytic line on view account.', ['account_id']),
]
account_analytic_line()

View File

@ -92,7 +92,7 @@ class hr_analytic_timesheet(osv.osv):
if not a:
a = r.product_id.categ_id.property_account_expense_categ.id
if not a:
raise osv.except_osv(_('Error !'),
raise osv.except_osv(_('Error!'),
_('There is no expense account define ' \
'for this product: "%s" (id:%d)') % \
(r.product_id.name, r.product_id.id,))
@ -127,7 +127,7 @@ class hr_analytic_timesheet(osv.osv):
if not a:
a = r.product_id.categ_id.property_account_expense_categ.id
if not a:
raise osv.except_osv(_('Error !'),
raise osv.except_osv(_('Error!'),
_('There is no expense account define ' \
'for this product: "%s" (id:%d)') % \
(r.product_id.name, r.product_id.id,))

View File

@ -425,7 +425,7 @@ class ir_model_fields_anonymize_wizard(osv.osv_memory):
elif field_type == 'integer':
anonymized_value = 0
elif field_type in ['binary', 'many2many', 'many2one', 'one2many', 'reference']: # cannot anonymize these kind of fields
msg = "Cannot anonymize fields of these types: binary, many2many, many2one, one2many, reference"
msg = "Cannot anonymize fields of these types: binary, many2many, many2one, one2many, reference."
self._raise_after_history_update(cr, uid, history_id, 'Error !', msg)
if anonymized_value is None:
@ -453,9 +453,9 @@ class ir_model_fields_anonymize_wizard(osv.osv_memory):
# add a result message in the wizard:
msgs = ["Anonymization successful.",
"",
"Don't forget to save the resulting file to a safe place because you will not be able to revert the anonymization without this file.",
"Donot forget to save the resulting file to a safe place because you will not be able to revert the anonymization without this file.",
"",
"This file is also stored in the %s directory. The absolute file path is: %s",
"This file is also stored in the %s directory. The absolute file path is: %s.",
]
msg = '\n'.join(msgs) % (dirpath, abs_filepath)
@ -515,7 +515,7 @@ class ir_model_fields_anonymize_wizard(osv.osv_memory):
wizards = self.browse(cr, uid, ids, context=context)
for wizard in wizards:
if not wizard.file_import:
msg = "The anonymization export file was not supplied. It is not possible to reverse the anonymization process without this file."
msg = "It is not possible to reverse the anonymization process without supplying anonymization export file."
self._raise_after_history_update(cr, uid, history_id, 'Error !', msg)
# reverse the anonymization:

View File

@ -37,7 +37,7 @@ def get_datetime(date_field):
date_split = date_field.split(' ')
if len(date_split) == 1:
date_field = date_split[0] + " 00:00:00"
return datetime.strptime(date_field[:19], '%Y-%m-%d %H:%M:%S')
@ -185,7 +185,7 @@ the rule to mark CC(mail to any other person defined in actions)."),
self.post_action(cr, uid, [new_id], model, context=context)
return new_id
return wrapper
def _write(self, old_write, model, context=None):
"""
Return a wrapper around `old_write` calling both `old_write` and
@ -327,7 +327,7 @@ the rule to mark CC(mail to any other person defined in actions)."),
reply_to = emailfrom
if not emailfrom:
raise osv.except_osv(_('Error!'),
_("No Email ID Found for your Company address!"))
_("No email ID found for your company address."))
return mail_message.schedule_with_attach(cr, uid, emailfrom, emails, name, body, model='base.action.rule', reply_to=reply_to, res_id=obj.id)
@ -490,7 +490,7 @@ the rule to mark CC(mail to any other person defined in actions)."),
return True
_constraints = [
(_check_mail, 'Error: The mail is not well formated', ['act_mail_body']),
(_check_mail, 'Error ! The mail is not well formated.', ['act_mail_body']),
]
base_action_rule()

View File

@ -286,7 +286,7 @@ msgstr ""
#. module: base_action_rule
#: code:addons/base_action_rule/base_action_rule.py:329
#, python-format
msgid "No Email ID Found for your Company address!"
msgid "No Email ID is found for your Company address!"
msgstr ""
#. module: base_action_rule
@ -332,7 +332,7 @@ msgstr ""
#. module: base_action_rule
#: constraint:base.action.rule:0
msgid "Error: The mail is not well formated"
msgid "Error ! The mail is not well formated."
msgstr ""
#. module: base_action_rule

View File

@ -48,18 +48,18 @@ def get_recurrent_dates(rrulestring, exdate, startdate=None, exrule=None):
def todate(date):
val = parser.parse(''.join((re.compile('\d')).findall(date)))
return val
if not startdate:
startdate = datetime.now()
if not exdate:
exdate = []
rset1 = rrule.rrulestr(str(rrulestring), dtstart=startdate, forceset=True)
for date in exdate:
datetime_obj = todate(date)
rset1._exdate.append(datetime_obj)
if exrule:
rset1.exrule(rrule.rrulestr(str(exrule), dtstart=startdate))
@ -413,7 +413,7 @@ property or property parameter."),
cal = vobject.iCalendar()
event = cal.add('vevent')
if not event_obj.date_deadline or not event_obj.date:
raise osv.except_osv(_('Warning !'),_("Couldn't Invite because date is not specified!"))
raise osv.except_osv(_('Warning!'),_("First you have to specify the date of the invitation."))
event.add('created').value = ics_datetime(time.strftime('%Y-%m-%d %H:%M:%S'))
event.add('dtstart').value = ics_datetime(event_obj.date)
event.add('dtend').value = ics_datetime(event_obj.date_deadline)
@ -995,17 +995,17 @@ class calendar_event(osv.osv):
@param context: A standard dictionary for contextual values
@return: dictionary of rrule value.
"""
result = {}
if not isinstance(ids, list):
ids = [ids]
for datas in self.read(cr, uid, ids, ['id','byday','recurrency', 'month_list','end_date', 'rrule_type', 'select1', 'interval', 'count', 'end_type', 'mo', 'tu', 'we', 'th', 'fr', 'sa', 'su', 'exrule', 'day', 'week_list' ], context=context):
event = datas['id']
if datas.get('interval', 0) < 0:
raise osv.except_osv(_('Warning!'), _('Interval cannot be negative'))
raise osv.except_osv(_('Warning!'), _('Interval cannot be negative.'))
if datas.get('count', 0) < 0:
raise osv.except_osv(_('Warning!'), _('Count cannot be negative'))
raise osv.except_osv(_('Warning!'), _('Count cannot be negative.'))
if datas['recurrency']:
result[event] = self.compute_rule_string(datas)
else:
@ -1092,7 +1092,7 @@ rule or repeating pattern of time to exclude from the recurring rule."),
true, it will allow you to hide the event alarm information without removing it."),
'recurrency': fields.boolean('Recurrent', help="Recurrent Meeting"),
}
def default_organizer(self, cr, uid, context=None):
user_pool = self.pool.get('res.users')
user = user_pool.browse(cr, uid, uid, context=context)
@ -1189,23 +1189,23 @@ rule or repeating pattern of time to exclude from the recurring rule."),
byday = map(lambda x: x.upper(), filter(lambda x: datas.get(x) and x in weekdays, datas))
if byday:
return ';BYDAY=' + ','.join(byday)
return ''
return ''
def get_month_string(freq, datas):
if freq == 'monthly':
if datas.get('select1')=='date' and (datas.get('day') < 1 or datas.get('day') > 31):
raise osv.except_osv(_('Error!'), ("Please select proper Day of month"))
raise osv.except_osv(_('Error!'), ("Please select a proper day of the month."))
if datas.get('select1')=='day':
return ';BYDAY=' + datas.get('byday') + datas.get('week_list')
elif datas.get('select1')=='date':
return ';BYMONTHDAY=' + str(datas.get('day'))
return ''
def get_end_date(datas):
if datas.get('end_date'):
datas['end_date_new'] = ''.join((re.compile('\d')).findall(datas.get('end_date'))) + 'T235959Z'
return (datas.get('end_type') == 'count' and (';COUNT=' + str(datas.get('count'))) or '') +\
((datas.get('end_date_new') and datas.get('end_type') == 'end_date' and (';UNTIL=' + datas.get('end_date_new'))) or '')
@ -1216,32 +1216,32 @@ rule or repeating pattern of time to exclude from the recurring rule."),
interval_srting = datas.get('interval') and (';INTERVAL=' + str(datas.get('interval'))) or ''
return 'FREQ=' + freq.upper() + get_week_string(freq, datas) + interval_srting + get_end_date(datas) + get_month_string(freq, datas)
def _get_empty_rrule_data(self):
return {
'byday' : False,
'recurrency' : False,
'end_date' : False,
'rrule_type' : False,
'select1' : False,
'interval' : 0,
'count' : False,
'end_type' : False,
'mo' : False,
'tu' : False,
'we' : False,
'th' : False,
'fr' : False,
'sa' : False,
'su' : False,
'exrule' : False,
'day' : False,
'end_date' : False,
'rrule_type' : False,
'select1' : False,
'interval' : 0,
'count' : False,
'end_type' : False,
'mo' : False,
'tu' : False,
'we' : False,
'th' : False,
'fr' : False,
'sa' : False,
'su' : False,
'exrule' : False,
'day' : False,
'week_list' : False
}
#def _write_rrule(self, cr, uid, ids, field_value, rule_date=False, context=None):
# data = self._get_empty_rrule_data()
#
#
# if field_value:
# data['recurrency'] = True
# for event in self.browse(cr, uid, ids, context=context):
@ -1250,16 +1250,16 @@ rule or repeating pattern of time to exclude from the recurring rule."),
# data.update(update_data)
# #parse_rrule
# self.write(cr, uid, event.id, data, context=context)
def _parse_rrule(self, rule, data, date_start):
day_list = ['mo', 'tu', 'we', 'th', 'fr', 'sa', 'su']
rrule_type = ['yearly', 'monthly', 'weekly', 'daily']
r = rrule.rrulestr(rule, dtstart=datetime.strptime(date_start, "%Y-%m-%d %H:%M:%S"))
if r._freq > 0 and r._freq < 4:
data['rrule_type'] = rrule_type[r._freq]
data['count'] = r._count
data['interval'] = r._interval
data['end_date'] = r._until and r._until.strftime("%Y-%m-%d %H:%M:%S")
@ -1270,36 +1270,36 @@ rule or repeating pattern of time to exclude from the recurring rule."),
data[day_list[i]] = True
data['rrule_type'] = 'weekly'
#repeat monthly bynweekday ((weekday, weeknumber), )
if r._bynweekday:
if r._bynweekday:
data['week_list'] = day_list[r._bynweekday[0][0]].upper()
data['byday'] = r._bynweekday[0][1]
data['select1'] = 'day'
data['rrule_type'] = 'monthly'
data['rrule_type'] = 'monthly'
if r._bymonthday:
data['day'] = r._bymonthday[0]
data['select1'] = 'date'
data['rrule_type'] = 'monthly'
#yearly but for openerp it's monthly, take same information as monthly but interval is 12 times
if r._bymonth:
data['interval'] = data['interval'] * 12
#FIXEME handle forever case
#end of recurrence
#end of recurrence
#in case of repeat for ever that we do not support right now
if not (data.get('count') or data.get('end_date')):
data['count'] = 100
if data.get('count'):
data['end_type'] = 'count'
else:
data['end_type'] = 'end_date'
return data
data['end_type'] = 'end_date'
return data
def remove_virtual_id(self, ids):
if isinstance(ids, (str, int, long)):
return base_calendar_id2real_id(ids)
if isinstance(ids, (list, tuple)):
res = []
for id in ids:
@ -1345,7 +1345,7 @@ rule or repeating pattern of time to exclude from the recurring rule."),
def need_to_update(self, event_id, vals):
split_id = str(event_id).split("-")
if len(split_id) < 2:
return False
return False
else:
date_start = vals.get('date', '')
try:
@ -1409,9 +1409,9 @@ rule or repeating pattern of time to exclude from the recurring rule."),
def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None, orderby=False):
if not context:
context = {}
if 'date' in groupby:
raise osv.except_osv(_('Warning !'), _('Group by date not supported, use the calendar view instead'))
raise osv.except_osv(_('Warning!'), _('Group by date is not supported, use the calendar view instead.'))
virtual_id = context.get('virtual_id', True)
context.update({'virtual_id': False})
res = super(calendar_event, self).read_group(cr, uid, domain, fields, groupby, offset=offset, limit=limit, context=context, orderby=orderby)
@ -1479,7 +1479,7 @@ rule or repeating pattern of time to exclude from the recurring rule."),
def copy(self, cr, uid, id, default=None, context=None):
if context is None:
context = {}
res = super(calendar_event, self).copy(cr, uid, base_calendar_id2real_id(id), default, context)
alarm_obj = self.pool.get('res.alarm')
alarm_obj.do_alarm_create(cr, uid, [res], self._name, 'date', context=context)

View File

@ -108,7 +108,7 @@ msgstr ""
#. module: base_calendar
#: code:addons/base_calendar/base_calendar.py:1006
#, python-format
msgid "Count cannot be negative"
msgid "Count cannot be negative."
msgstr ""
#. module: base_calendar
@ -261,7 +261,7 @@ msgstr ""
#. module: base_calendar
#: code:addons/base_calendar/base_calendar.py:1004
#, python-format
msgid "Interval cannot be negative"
msgid "Interval cannot be negative."
msgstr ""
#. module: base_calendar
@ -273,7 +273,7 @@ msgstr ""
#. module: base_calendar
#: code:addons/base_calendar/wizard/base_calendar_invite_attendee.py:143
#, python-format
msgid "%s must have an email address to send mail"
msgid "%s must have an email address to send mail."
msgstr ""
#. module: base_calendar
@ -407,7 +407,7 @@ msgstr ""
#. module: base_calendar
#: code:addons/base_calendar/base_calendar.py:1411
#, python-format
msgid "Group by date not supported, use the calendar view instead"
msgid "Group by date is not supported, use the calendar view instead."
msgstr ""
#. module: base_calendar
@ -1405,7 +1405,7 @@ msgstr ""
#. module: base_calendar
#: code:addons/base_calendar/base_calendar.py:418
#, python-format
msgid "Couldn't Invite because date is not specified!"
msgid "First specified the date for Invitation."
msgstr ""
#. module: base_calendar

View File

@ -92,9 +92,9 @@ send an Email to Invited Person')
else:
return {'type': 'ir.actions.act_window_close'}
if type == 'internal':
if not datas.get('user_ids'):
raise osv.except_osv(_('Error!'), ("Please select any User"))
raise osv.except_osv(_('Error!'), ("Please select any user."))
for user_id in datas.get('user_ids'):
user = user_obj.browse(cr, uid, user_id)
res = {
@ -141,7 +141,7 @@ send an Email to Invited Person')
if not mail_to:
name = map(lambda x: x[1], filter(lambda x: type==x[0], \
self._columns['type'].selection))
raise osv.except_osv(_('Error!'), _("%s must have an email address to send mail") %(name[0]))
raise osv.except_osv(_('Error!'), _("%s must have an email address to send mail.") %(name[0]))
att_obj._send_mail(cr, uid, attendees, mail_to, \
email_from = current_user.user_email or tools.config.get('email_from', False))

View File

@ -139,7 +139,7 @@ class users(osv.osv):
def set_pw(self, cr, uid, id, name, value, args, context):
if not value:
raise osv.except_osv(_('Error'), _("Please specify the password !"))
raise osv.except_osv(_('Error!'), _("You have to specify a password."))
obj = pooler.get_pool(cr.dbname).get('res.users')
if not hasattr(obj, "_salt_cache"):
@ -181,8 +181,8 @@ class users(osv.osv):
cr = pooler.get_db(db).cursor()
return self._login(cr, db, login, password)
except Exception:
_logger.exception('Could not authenticate')
return Exception('Access Denied')
_logger.exception('Cannot authenticate.')
return Exception('Access denied.')
finally:
if cr is not None:
cr.close()
@ -196,9 +196,9 @@ class users(osv.osv):
else:
# Return early if no one has a login name like that.
return False
stored_pw = self.maybe_encrypt(cr, stored_pw, id)
if not stored_pw:
# means couldn't encrypt or user is not active!
return False
@ -210,16 +210,16 @@ class users(osv.osv):
obj._salt_cache = {}
salt = obj._salt_cache[id] = stored_pw[len(magic_md5):11]
encrypted_pw = encrypt_md5(password, salt)
# Check if the encrypted password matches against the one in the db.
cr.execute("""UPDATE res_users
SET date=now() AT TIME ZONE 'UTC'
WHERE id=%s AND password=%s AND active
RETURNING id""",
RETURNING id""",
(int(id), encrypted_pw.encode('utf-8')))
res = cr.fetchone()
cr.commit()
if res:
return res[0]
else:
@ -249,13 +249,13 @@ class users(osv.osv):
stored_login = cr.fetchone()
if stored_login:
stored_login = stored_login[0]
res = self._login(cr, db, stored_login, passwd)
if not res:
raise security.ExceptionNoTb('AccessDenied')
else:
salt = self._salt_cache[db][uid]
cr.execute('SELECT COUNT(*) FROM res_users WHERE id=%s AND password=%s AND active',
cr.execute('SELECT COUNT(*) FROM res_users WHERE id=%s AND password=%s AND active',
(int(uid), encrypt_md5(passwd, salt)))
res = cr.fetchone()[0]
finally:
@ -271,10 +271,10 @@ class users(osv.osv):
else:
self._uid_cache[db] = {uid: passwd}
return bool(res)
def maybe_encrypt(self, cr, pw, id):
""" Return the password 'pw', making sure it is encrypted.
If the password 'pw' is not encrypted, then encrypt all active passwords
in the db. Returns the (possibly newly) encrypted password for 'id'.
"""
@ -296,4 +296,5 @@ class users(osv.osv):
return pw
users()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:

View File

@ -39,6 +39,6 @@ msgstr ""
#. module: base_crypt
#: code:addons/base_crypt/crypt.py:140
#, python-format
msgid "Error"
msgid "Error!"
msgstr ""

View File

@ -33,7 +33,7 @@ def _create_yaml(self, cr, uid, data, context=None):
try:
res_xml = mod.generate_yaml(cr, uid)
except Exception, e:
raise osv.except_osv(_('Error'),_(str(e)))
raise osv.except_osv(_('Error!'),_(str(e)))
return {
'yaml_file': base64.encodestring(res_xml),
}

View File

@ -138,7 +138,7 @@ class DomApi(DomApiGeneral):
if self.styles_dom.getElementsByTagName("style:page-master").__len__()<>0:
self.page_master = self.styles_dom.getElementsByTagName("style:page-master")[0]
if self.styles_dom.getElementsByTagName("style:page-layout").__len__()<>0 :
self.page_master = self.styles_dom.getElementsByTagName("style:page-layout")[0]
self.page_master = self.styles_dom.getElementsByTagName("style:page-layout")[0]
self.document = self.content_dom.getElementsByTagName("office:document-content")[0]
def buildStylePropertiesDict(self):
@ -307,16 +307,16 @@ def sxw2rml(sxw_file, xsl, output='.', save_pict=False):
tool = PyOpenOffice(output, save_pict = save_pict)
res = tool.unpackNormalize(sxw_file)
f = StringIO(xsl)
styledoc = etree.parse(f)
style = etree.XSLT(styledoc)
f = StringIO(res)
doc = etree.parse(f)
result = style(doc)
root = etree.XPathEvaluator(result)("/document/stylesheet")
if root:
root=root[0]
images = etree.Element("images")
@ -341,7 +341,7 @@ if __name__ == "__main__":
parser.add_option("-o", "--output", dest="output", default='.', help="directory of image output")
(opt, args) = parser.parse_args()
if len(args) != 1:
parser.error("incorrect number of arguments")
parser.error("Incorrect number of arguments.")
import sys

View File

@ -3,31 +3,31 @@
# Portions of this file are under the following copyright and license:
#
#
# Copyright (c) 2003-2004 Danny Brewer
# d29583@groovegarden.com
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# See: http://www.gnu.org/licenses/lgpl.html
# Copyright (c) 2003-2004 Danny Brewer
# d29583@groovegarden.com
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# See: http://www.gnu.org/licenses/lgpl.html
#
#
#
# and other portions are under the following copyright and license:
#
#
# OpenERP, Open Source Management Solution>..
# Copyright (C) 2004-2010 OpenERP SA (<http://openerp.com>).
# Copyright (C) 2004-2010 OpenERP SA (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
@ -145,7 +145,7 @@ class AddAttachment(unohelper.Base, XJobExecutor ):
self.aSearchResult =self.sock.execute( database, uid, self.password, self.dModel[modelSelectedItem], 'name_search', self.win.getEditText("txtSearchName"))
self.win.removeListBoxItems("lstResource", 0, self.win.getListBoxItemCount("lstResource"))
if self.aSearchResult == []:
ErrorDialog("No search result found !!!", "", "Search ERROR" )
ErrorDialog("No search result found.", "", "Search Error.")
return
for result in self.aSearchResult:
@ -172,7 +172,7 @@ class AddAttachment(unohelper.Base, XJobExecutor ):
docinfo = oDoc2.getDocumentInfo()
if oDoc2.getURL() == "":
ErrorDialog("Please save your file", "", "Saving ERROR" )
ErrorDialog("You should save your file.", "", "Saving Error.")
return None
url = oDoc2.getURL()
@ -180,7 +180,7 @@ class AddAttachment(unohelper.Base, XJobExecutor ):
url = self.doc2pdf(url[7:])
if url == None:
ErrorDialog( "Ploblem in creating PDF", "", "PDF Error" )
ErrorDialog( "Problem in creating PDF.", "", "PDF Error.")
return None
url = url[7:]
@ -193,7 +193,7 @@ class AddAttachment(unohelper.Base, XJobExecutor ):
docinfo = oDoc2.getDocumentInfo()
if self.win.getListBoxSelectedItem("lstResourceType") == "":
ErrorDialog("Please select resource type", "", "Selection ERROR" )
ErrorDialog("You have to select a resource type.", "", "Selection Error." )
return
res = self.send_attachment( docinfo.getUserFieldValue(3), docinfo.getUserFieldValue(2) )
@ -201,11 +201,11 @@ class AddAttachment(unohelper.Base, XJobExecutor ):
def btnOkWithInformation_clicked(self,oActionEvent):
if self.win.getListBoxSelectedItem("lstResourceType") == "":
ErrorDialog( "Please select resource type", "", "Selection ERROR" )
ErrorDialog( "You have to select a resource type.", "", "Selection Error." )
return
if self.win.getListBoxSelectedItem("lstResource") == "" or self.win.getListBoxSelectedItem("lstmodel") == "":
ErrorDialog("Please select Model and Resource","","Selection ERROR")
ErrorDialog("You have to select Model and Resource.", "", "Selection Error.")
return
resourceid = None
@ -215,7 +215,7 @@ class AddAttachment(unohelper.Base, XJobExecutor ):
break
if resourceid == None:
ErrorDialog("No resource selected !!!", "", "Resource ERROR" )
ErrorDialog("No resource is selected.", "", "Resource Error." )
return
res = self.send_attachment( self.dModel[self.win.getListBoxSelectedItem('lstmodel')], resourceid )

View File

@ -125,7 +125,7 @@ class Change( unohelper.Base, XJobExecutor ):
import traceback,sys
info = reduce(lambda x, y: x+y, traceback.format_exception(sys.exc_type, sys.exc_value, sys.exc_traceback))
self.logobj.log_write('ServerParameter', LOG_ERROR, info)
ErrorDialog("Connection to server fail. please check your Server Parameter", "", "Error")
ErrorDialog("Connection to server is fail. Please check your Server Parameter.", "", "Error!")
self.win.endExecute()
def btnCancel_clicked(self,oActionEvent):

View File

@ -3,31 +3,31 @@
# Portions of this file are under the following copyright and license:
#
#
# Copyright (c) 2003-2004 Danny Brewer
# d29583@groovegarden.com
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# See: http://www.gnu.org/licenses/lgpl.html
# Copyright (c) 2003-2004 Danny Brewer
# d29583@groovegarden.com
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# See: http://www.gnu.org/licenses/lgpl.html
#
#
#
# and other portions are under the following copyright and license:
#
#
# OpenERP, Open Source Management Solution>..
# Copyright (C) 2004-2010 OpenERP SA (<http://openerp.com>).
# Copyright (C) 2004-2010 OpenERP SA (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
@ -106,7 +106,7 @@ class ExportToRML( unohelper.Base, XJobExecutor ):
import traceback,sys
info = reduce(lambda x, y: x+y, traceback.format_exception(sys.exc_type, sys.exc_value, sys.exc_traceback))
self.logobj.log_write('ExportToRML',LOG_ERROR, info)
ErrorDialog("Can't save the file to the hard drive.", "Exception: %s" % e, "Error" )
ErrorDialog("Cannot save the file to the hard drive.", "Exception: %s." % e, "Error" )
def GetAFileName(self):
sFilePickerArgs = Array(10)

View File

@ -103,7 +103,7 @@ class Expression(unohelper.Base, XJobExecutor ):
tableText.insertTextContent(cursor,oInputList,False)
self.win.endExecute()
else:
ErrorDialog("Please Fill appropriate data in Name field or \nExpression field")
ErrorDialog("Please fill appropriate data in Name field or in Expression field.")
def btnCancel_clicked( self, oActionEvent ):
self.win.endExecute()

View File

@ -279,7 +279,7 @@ class Fields(unohelper.Base, XJobExecutor ):
widget.insertTextContent(cursor,oInputList,False)
self.win.endExecute()
else:
ErrorDialog("Please Fill appropriate data in Name field \nor select perticular value from the list of fields")
ErrorDialog("Please fill appropriate data in Name field \nor select particular value from the list of fields.")
def btnCancel_clicked( self, oActionEvent ):
self.win.endExecute()

View File

@ -3,31 +3,31 @@
# Portions of this file are under the following copyright and license:
#
#
# Copyright (c) 2003-2004 Danny Brewer
# d29583@groovegarden.com
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# See: http://www.gnu.org/licenses/lgpl.html
# Copyright (c) 2003-2004 Danny Brewer
# d29583@groovegarden.com
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# See: http://www.gnu.org/licenses/lgpl.html
#
#
#
# and other portions are under the following copyright and license:
#
#
# OpenERP, Open Source Management Solution>..
# Copyright (C) 2004-2010 OpenERP SA (<http://openerp.com>).
# Copyright (C) 2004-2010 OpenERP SA (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
@ -64,7 +64,6 @@ if __name__<>'package':
database="test"
uid = 3
#
class ModifyExistingReport(unohelper.Base, XJobExecutor):
def __init__(self,ctx):
self.ctx = ctx
@ -93,7 +92,7 @@ class ModifyExistingReport(unohelper.Base, XJobExecutor):
ids = self.sock.execute(database, uid, self.password, 'ir.module.module', 'search', [('name','=','base_report_designer'),('state', '=', 'installed')])
if not len(ids):
ErrorDialog("Please Install base_report_designer module", "", "Module Uninstalled Error")
ErrorDialog("Please install base_report_designer module.", "", "Module Uninstalled Error!")
exit(1)
ids = self.sock.execute(database, uid, self.password, 'ir.actions.report.xml', 'search', [('report_xsl', '=', False),('report_xml', '=', False)])
@ -167,11 +166,11 @@ class ModifyExistingReport(unohelper.Base, XJobExecutor):
if oDoc2.hasLocation() and not oDoc2.isReadonly():
oDoc2.store()
ErrorDialog("Download is Completed","Your file has been placed here :\n"+ fp_name,"Download Message")
ErrorDialog("Download is completed.","Your file has been placed here :\n ."+ fp_name,"Download Message !")
obj=Logger()
obj.log_write('Modify Existing Report',LOG_INFO, ':successful download report %s using database %s' % (self.report_with_id[selectedItemPos][2], database))
except Exception, e:
ErrorDialog("Report has not been downloaded", "Report: %s\nDetails: %s" % ( fp_name, str(e) ),"Download Message")
ErrorDialog("The report could not be downloaded.", "Report: %s\nDetails: %s" % ( fp_name, str(e) ),"Download Message !")
import traceback,sys
info = reduce(lambda x, y: x+y, traceback.format_exception(sys.exc_type, sys.exc_value, sys.exc_traceback))
self.logobj.log_write('ModifyExistingReport', LOG_ERROR, info)
@ -193,15 +192,15 @@ class ModifyExistingReport(unohelper.Base, XJobExecutor):
str_value='ir.actions.report.xml,'+str(id)
ids = self.sock.execute(database, uid, self.password, 'ir.values' , 'search',[('value','=',str_value)])
if ids:
rec = self.sock.execute(database, uid, self.password, 'ir.values', 'unlink', ids,)
rec = self.sock.execute(database, uid, self.password, 'ir.values', 'unlink', ids,)
else :
pass
if temp:
ErrorDialog("Report","Report has been Delete:\n "+name,"Message")
self.logobj.log_write('Delete Report',LOG_INFO, ':successful delete report %s using database %s' % (name, database))
ErrorDialog("Report", "The report could not be deleted:\n"+name+".", "Message !")
self.logobj.log_write('Delete Report', LOG_INFO, ': report %s successfully deleted using database %s.' % (name, database))
else:
ErrorDialog("Report","Report has not Delete:\n"+name," Message")
ErrorDialog("Report", "The report could not be deleted:\n"+name+".", "Message !")
self.win.endExecute()

View File

@ -288,7 +288,7 @@ class RepeatIn( unohelper.Base, XJobExecutor ):
widget.insertTextContent(cursor,oInputList,False)
self.win.endExecute()
else:
ErrorDialog("Please Fill appropriate data in Object Field or Name field \nor select perticular value from the list of fields")
ErrorDialog("Please fill appropriate data in Object Field or Name field \nor select particular value from the list of fields.")
def btnCancel_clicked( self, oActionEvent ):
self.win.endExecute()

View File

@ -3,31 +3,31 @@
# Portions of this file are under the following copyright and license:
#
#
# Copyright (c) 2003-2004 Danny Brewer
# d29583@groovegarden.com
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# See: http://www.gnu.org/licenses/lgpl.html
# Copyright (c) 2003-2004 Danny Brewer
# d29583@groovegarden.com
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# See: http://www.gnu.org/licenses/lgpl.html
#
#
#
# and other portions are under the following copyright and license:
#
#
# OpenERP, Open Source Management Solution>..
# Copyright (C) 2004-2010 OpenERP SA (<http://openerp.com>).
# Copyright (C) 2004-2010 OpenERP SA (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
@ -65,8 +65,6 @@ if __name__<>'package':
database="report"
uid = 3
#
#
class SendtoServer(unohelper.Base, XJobExecutor):
Kind = {
'PDF' : 'pdf',
@ -93,7 +91,7 @@ class SendtoServer(unohelper.Base, XJobExecutor):
self.ids = self.sock.execute(database, uid, self.password, 'ir.module.module', 'search', [('name','=','base_report_designer'),('state', '=', 'installed')])
if not len(self.ids):
ErrorDialog("Please Install base_report_designer module", "", "Module Uninstalled Error")
ErrorDialog("Please install base_report_designer module.", "", "Module Uninstalled Error!")
exit(1)
report_name = ""
@ -172,8 +170,8 @@ class SendtoServer(unohelper.Base, XJobExecutor):
}
res = self.sock.execute(database, uid, self.password, 'ir.values' , 'create',rec )
else :
ErrorDialog(" Report Name is all ready given !!!\n\n\n Please specify other Name","","Report Name")
self.logobj.log_write('SendToServer',LOG_WARNING, ':Report name all ready given DB %s' % (database))
ErrorDialog("This name is already used for another report.\nPlease try with another name.", "", "Error!")
self.logobj.log_write('SendToServer',LOG_WARNING, ': report name already used DB %s' % (database))
self.win.endExecute()
except Exception,e:
import traceback,sys
@ -205,8 +203,8 @@ class SendtoServer(unohelper.Base, XJobExecutor):
self.logobj.log_write('SendToServer',LOG_INFO, ':Report %s successfully send using %s'%(params['name'],database))
self.win.endExecute()
else:
ErrorDialog("Either Report Name or Technical Name is blank !!!\nPlease specify appropriate Name","","Blank Field ERROR")
self.logobj.log_write('SendToServer',LOG_WARNING, ':Either Report Name or Technical Name is blank')
ErrorDialog("Either report name or technical name is empty.\nPlease specify an appropriate name.", "", "Error!")
self.logobj.log_write('SendToServer',LOG_WARNING, ': either report name or technical name is empty.')
self.win.endExecute()
def getID(self):

View File

@ -3,31 +3,31 @@
# Portions of this file are under the following copyright and license:
#
#
# Copyright (c) 2003-2004 Danny Brewer
# d29583@groovegarden.com
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# See: http://www.gnu.org/licenses/lgpl.html
# Copyright (c) 2003-2004 Danny Brewer
# d29583@groovegarden.com
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# See: http://www.gnu.org/licenses/lgpl.html
#
#
#
# and other portions are under the following copyright and license:
#
#
# OpenERP, Open Source Management Solution>..
# Copyright (C) 2004-2010 OpenERP SA (<http://openerp.com>).
# Copyright (C) 2004-2010 OpenERP SA (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
@ -86,7 +86,7 @@ class ServerParameter( unohelper.Base, XJobExecutor ):
self.win.addButton('btnOK',-2 ,-5, 60,15,'Connect' ,actionListenerProc = self.btnOk_clicked )
self.win.addButton('btnPrevious',15 -80 ,-5,50,15,'Previous',actionListenerProc = self.btnPrevious_clicked)
self.win.addButton('btnCancel',-2 - 110 - 5 ,-5, 35,15,'Cancel' ,actionListenerProc = self.btnCancel_clicked )
sValue=""
if docinfo.getUserFieldValue(0)<>"":
global url
@ -100,8 +100,8 @@ class ServerParameter( unohelper.Base, XJobExecutor ):
# sValue="Could not connect to the server!"
# self.lstDatabase.addItem("Could not connect to the server!",0)
elif res == 0:
sValue="No Database found !!!"
self.lstDatabase.addItem("No Database found !!!",0)
sValue="No database found !"
self.lstDatabase.addItem("No database found !",0)
else:
self.win.addComboListBox("lstDatabase", -2,28,123,15, True)
self.lstDatabase = self.win.getControl( "lstDatabase" )
@ -129,12 +129,12 @@ class ServerParameter( unohelper.Base, XJobExecutor ):
self.sock=RPCSession(url)
UID = self.sock.login(sDatabase,sLogin,sPassword)
if not UID or UID==-1 :
ErrorDialog("Connection Refuse...","Please enter valid Login/Password")
ErrorDialog("Connection denied.", "Please enter valid login/password.")
# self.win.endExecute()
ids_module =self.sock.execute(sDatabase, UID, sPassword, 'ir.module.module', 'search', [('name','=','base_report_designer'),('state', '=', 'installed')])
if not len(ids_module):
ErrorDialog("Please Install base_report_designer module", "", "Module Uninstalled Error")
self.logobj.log_write('Module Not Found',LOG_WARNING, ':base_report_designer not install in database %s' % (sDatabase))
ErrorDialog("Please install base_report_designer module.", "", "Module Uninstalled Error!")
self.logobj.log_write('Module not found.',LOG_WARNING, ': base_report_designer not installed in database %s.' % (sDatabase))
#self.win.endExecute()
else:
desktop=getDesktop()
@ -153,11 +153,11 @@ class ServerParameter( unohelper.Base, XJobExecutor ):
#docinfo.setUserFieldValue(2,self.win.getListBoxSelectedItem("lstDatabase"))
#docinfo.setUserFieldValue(3,"")
ErrorDialog(" You can start creating your report in \n \t the current document.","After Creating sending to the server.","Message")
self.logobj.log_write('successful login',LOG_INFO, ':successful login from %s using database %s' % (sLogin, sDatabase))
ErrorDialog("You can start creating your report in the current document.", "After creating, sending to the server.", "Message !")
self.logobj.log_write('successful login',LOG_INFO, ': successful login from %s using database %s' % (sLogin, sDatabase))
self.win.endExecute()
def btnCancel_clicked( self, oActionEvent ):
self.win.endExecute()
@ -165,7 +165,7 @@ class ServerParameter( unohelper.Base, XJobExecutor ):
self.win.endExecute()
Change(None)
self.win.endExecute()
if __name__<>"package" and __name__=="__main__":
ServerParameter(None)

View File

@ -3,31 +3,31 @@
# Portions of this file are under the following copyright and license:
#
#
# Copyright (c) 2003-2004 Danny Brewer
# d29583@groovegarden.com
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# See: http://www.gnu.org/licenses/lgpl.html
# Copyright (c) 2003-2004 Danny Brewer
# d29583@groovegarden.com
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# See: http://www.gnu.org/licenses/lgpl.html
#
#
#
# and other portions are under the following copyright and license:
#
#
# OpenERP, Open Source Management Solution>..
# Copyright (C) 2004-2010 OpenERP SA (<http://openerp.com>).
# Copyright (C) 2004-2010 OpenERP SA (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
@ -175,7 +175,7 @@ class AddLang(unohelper.Base, XJobExecutor ):
res = self.sock.execute(database, uid, self.password, sObject , 'read',[ids[0]])
self.win.setEditText("txtUName",res[0][sMain[sMain.rfind("/")+1:]])
else:
ErrorDialog("Please select the Language Field")
ErrorDialog("Please select a language.")
except:
import traceback;traceback.print_exc()
@ -261,7 +261,7 @@ class AddLang(unohelper.Base, XJobExecutor ):
self.win.endExecute()
else:
ErrorDialog("Please Fill appropriate data in Name field \nor select perticular value from the list of fields")
ErrorDialog("Please fill appropriate data in name field \nor select particular value from the list of fields.")
def btnCancel_clicked( self, oActionEvent ):
self.win.endExecute()

View File

@ -56,14 +56,14 @@ class mysocket:
while totalsent < size:
sent = self.sock.send(msg[totalsent:])
if sent == 0:
raise RuntimeError, "socket connection broken"
raise RuntimeError, "Socket connection broken."
totalsent = totalsent + sent
def myreceive(self):
buf=''
while len(buf) < 8:
chunk = self.sock.recv(8 - len(buf))
if chunk == '':
raise RuntimeError, "socket connection broken"
raise RuntimeError, "Socket connection broken."
buf += chunk
size = int(buf)
buf = self.sock.recv(1)
@ -75,7 +75,7 @@ class mysocket:
while len(msg) < size:
chunk = self.sock.recv(size-len(msg))
if chunk == '':
raise RuntimeError, "socket connection broken"
raise RuntimeError, "Socket connection broken."
msg = msg + chunk
msgio = cStringIO.StringIO(msg)
unpickler = cPickle.Unpickler(msgio)
@ -90,6 +90,4 @@ class mysocket:
return res[0]
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:

View File

@ -3,31 +3,31 @@
# Portions of this file are under the following copyright and license:
#
#
# Copyright (c) 2003-2004 Danny Brewer
# d29583@groovegarden.com
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# See: http://www.gnu.org/licenses/lgpl.html
# Copyright (c) 2003-2004 Danny Brewer
# d29583@groovegarden.com
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# See: http://www.gnu.org/licenses/lgpl.html
#
#
#
# and other portions are under the following copyright and license:
#
#
# OpenERP, Open Source Management Solution>..
# Copyright (C) 2004-2010 OpenERP SA (<http://openerp.com>).
# Copyright (C) 2004-2010 OpenERP SA (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
@ -100,8 +100,7 @@ class modify(unohelper.Base, XJobExecutor ):
RepeatIn( start_group1, group2, stop_group1, item, True )
else:
ErrorDialog(
"Please place your cursor at begaining of field \n"
"which you want to modify",""
"Please place your cursor at beginning of field that you want to modify.",""
)
else:

View File

@ -25,7 +25,7 @@ from tools.translate import _
class base_stage(object):
""" Base utility mixin class for objects willing to manage their stages.
Object that inherit from this class should inherit from mailgate.thread
to have access to the mail gateway, as well as Chatter. Objects
to have access to the mail gateway, as well as Chatter. Objects
subclassing this class should define the following colums:
- ``date_open`` (datetime field)
- ``date_closed`` (datetime field)
@ -104,13 +104,13 @@ class base_stage(object):
def stage_find(self, cr, uid, cases, section_id, domain=[], order='sequence', context=None):
""" Find stage, with a given (optional) domain on the search,
ordered by the order parameter. If several stages match the
ordered by the order parameter. If several stages match the
search criterions, the first one will be returned, according
to the requested search order.
This method is meant to be overriden by subclasses. That way
specific behaviors can be achieved for every class inheriting
from base_stage.
:param cases: browse_record of cases
:param section_id: section limitating the search, given for
a generic search (for example default search).
@ -202,7 +202,7 @@ class base_stage(object):
if case.section_id.parent_id.user_id:
data['user_id'] = case.section_id.parent_id.user_id.id
else:
raise osv.except_osv(_('Error !'), _('You can not escalate, you are already at the top level regarding your sales-team category.'))
raise osv.except_osv(_('Error!'), _("You are already at the top level of your sales-team category.\nTherefore you cannot escalate furthermore."))
self.write(cr, uid, [case.id], data, context=context)
case.case_escalate_send_note(case.section_id.parent_id, context=context)
cases = self.browse(cr, uid, ids, context=context)
@ -221,7 +221,7 @@ class base_stage(object):
self.case_set(cr, uid, [case.id], 'open', data, context=context)
self.case_open_send_note(cr, uid, [case.id], context=context)
return True
def case_close(self, cr, uid, ids, context=None):
""" Closes case """
self.case_set(cr, uid, ids, 'done', {'active': True, 'date_closed': fields.datetime.now()}, context=context)
@ -250,14 +250,14 @@ class base_stage(object):
""" Generic method for setting case. This methods wraps the update
of the record, as well as call to _action and browse_record
case setting to fill the cache.
:params new_state_name: the new state of the record; this method
will call ``stage_set_with_state_name``
that will find the stage matching the
new state, using the ``stage_find`` method.
:params new_stage_id: alternatively, you may directly give the
new stage of the record
:params state_name: the new value of the state, such as
:params state_name: the new value of the state, such as
'draft' or 'close'.
:params update_values: values that will be added with the state
update when writing values to the record.
@ -370,20 +370,20 @@ class base_stage(object):
l.append(case.user_id.user_email)
res[case.id] = l
return res
# ******************************
# Notifications
# ******************************
def case_get_note_msg_prefix(self, cr, uid, id, context=None):
""" Default prefix for notifications. For example: "%s has been
""" Default prefix for notifications. For example: "%s has been
<b>closed</b>.". As several models will inherit from base_stage,
this method returns a void string. Class using base_stage
will have to override this method to define the prefix they
want to display.
"""
return ''
def stage_set_send_note(self, cr, uid, ids, stage_id, context=None):
""" Send a notification when the stage changes. This method has
to be overriden, because each document will have its particular
@ -391,7 +391,7 @@ class base_stage(object):
crm.case.stage).
"""
return True
def case_open_send_note(self, cr, uid, ids, context=None):
for id in ids:
msg = _('%s has been <b>opened</b>.') % (self.case_get_note_msg_prefix(cr, uid, id, context=context))
@ -421,7 +421,7 @@ class base_stage(object):
msg = _('%s has been <b>renewed</b>.') % (self.case_get_note_msg_prefix(cr, uid, id, context=context))
self.message_append_note(cr, uid, [id], body=msg, context=context)
return True
def case_escalate_send_note(self, cr, uid, ids, new_section=None, context=None):
for id in ids:
if new_section:

View File

@ -108,7 +108,7 @@ class res_partner(osv.osv):
def button_check_vat(self, cr, uid, ids, context=None):
if not self.check_vat(cr, uid, ids, context=context):
msg = self._construct_constraint_msg(cr, uid, ids, context=context)
raise osv.except_osv(_('Error'), msg)
raise osv.except_osv(_('Error!'), msg)
def check_vat(self, cr, uid, ids, context=None):
user_company = self.pool.get('res.users').browse(cr, uid, uid).company_id

View File

@ -252,20 +252,20 @@ class node_calendar(nodes.node_class):
elif cfe.getAttribute('end'):
_log.warning("Ignore end.. ")
else:
_log.debug("Unknown comp-filter: %s", cfe.localName)
_log.debug("Unknown comp-filter: %s.", cfe.localName)
else:
_log.debug("Unknown comp-filter: %s", vevent_filter.localName)
_log.debug("Unknown comp-filter: %s.", vevent_filter.localName)
else:
_log.debug("Unknown filter element: %s", vcalendar_filter.localName)
_log.debug("Unknown filter element: %s.", vcalendar_filter.localName)
else:
_log.debug("Unknown calendar-query element: %s", filter_child.localName)
_log.debug("Unknown calendar-query element: %s.", filter_child.localName)
return res
elif filters.localName == 'calendar-multiget':
# this is not the place to process, as it wouldn't support multi-level
# hrefs. So, the code is moved to document_webdav/dav_fs.py
pass
else:
_log.debug("Unknown element in REPORT: %s", filters.localName)
_log.debug("Unknown element in REPORT: %s.", filters.localName)
return res
def children(self, cr, domain=None):
@ -341,7 +341,7 @@ class node_calendar(nodes.node_class):
if line.name == ourcal.type:
line_id = line.id
break
assert line_id, "Calendar #%d must have at least one %s line" % \
assert line_id, "Calendar #%d must have at least one %s line." % \
(ourcal.id, ourcal.type)
if path.endswith('.ics'):
path = path[:-4]

View File

@ -306,7 +306,7 @@ class CalDAV(object):
if cal_data.name.lower() == 'organizer':
dmail = { 'name': cal_data.params.get('CN', ['',])[0],
'email': cal_data.value.lower().replace('mailto:',''),
# TODO: company?
# TODO: company?
}
self.ical_set(cal_data.name.lower(), mailto2str(dmail), 'value')
continue
@ -382,7 +382,7 @@ class CalDAV(object):
ical.contents['vevent'].append(revents)
#END
if data.get('recurrent_uid', None):
# Change the UID value in case of modified event from any recurrent event
# Change the UID value in case of modified event from any recurrent event
uidval = openobjectid2uid(cr, data['recurrent_uid'], model)
vevent.add('uid').value = uidval
elif field == 'attendee' and data[map_field]:
@ -412,7 +412,7 @@ class CalDAV(object):
if exfield:
# Set exdates according to timezone value
# This is the case when timezone mapping comes after the exdate mapping
# and we have exdate value available
# and we have exdate value available
exfield.params['TZID'] = [tzval.title()]
exdates_updated = []
for exdate in exdates:
@ -432,7 +432,7 @@ class CalDAV(object):
if tzval:
# Set exdates according to timezone value
# This is the case when timezone mapping comes before the exdate mapping
# and we have timezone value available
# and we have timezone value available
exfield.params['TZID'] = [tzval.title()]
exdates_updated = []
for exdate in exdates:
@ -448,7 +448,7 @@ class CalDAV(object):
dtfield.value = self.format_date_tz(parser.parse(data[map_field]), tzval.title())
else:
dtfield.value = parser.parse(data[map_field])
elif map_type == 'utc'and data[map_field]:
if tzval:
local = pytz.timezone (tzval.title())
@ -554,7 +554,7 @@ class CalDAV(object):
@param data_id: Get Datas ID or False
@param context: A standard dictionary for contextual values
"""
ical_data = content
self.__attribute__ = get_attribute_mapping(cr, uid, self._calname, context)
parsedCal = vobject.readOne(ical_data)
@ -602,7 +602,7 @@ class Calendar(CalDAV, osv.osv):
'calendar_order': fields.integer('Order', help="For supporting clients, the order of this folder among the calendars"),
'has_webcal': fields.boolean('WebCal', required=True, help="Also export a <name>.ics entry next to the calendar folder, with WebCal content."),
}
_defaults = {
'has_webcal': False,
}
@ -635,7 +635,7 @@ class Calendar(CalDAV, osv.osv):
node = res_node_calendar('%s.ics' %data.id, parent, ctx, data, line.object_id.model, data.id)
res.append(node)
return res
def get_cal_max_modified(self, cr, uid, ids, parent=None, domain=None, context=None):
if context is None:
@ -729,10 +729,10 @@ class Calendar(CalDAV, osv.osv):
elif child.name.upper() in ('PRODID', 'VERSION'):
pass
elif child.name.upper().startswith('X-'):
_logger.debug("skipping custom node %s", child.name)
_logger.debug("Skipping custom node %s.", child.name)
else:
_logger.debug("skipping node %s", child.name)
_logger.debug("Skipping node %s.", child.name)
res = []
for obj_name in list(set(objs)):
obj = self.pool.get(obj_name)
@ -781,25 +781,25 @@ class basic_calendar_line(osv.osv):
"""
cr.execute("SELECT COUNT(id) FROM basic_calendar_lines \
WHERE name=%s AND calendar_id=%s",
WHERE name=%s AND calendar_id=%s",
(vals.get('name'), vals.get('calendar_id')))
res = cr.fetchone()
if res:
if res[0] > 0:
raise osv.except_osv(_('Warning !'), _('Can not create line "%s" more than once') % (vals.get('name')))
raise osv.except_osv(_('Warning!'), _('Cannot create line "%s" more than once.') % (vals.get('name')))
return super(basic_calendar_line, self).create(cr, uid, vals, context=context)
basic_calendar_line()
class basic_calendar_alias(osv.osv):
""" Mapping of client filenames to ORM ids of calendar records
Since some clients insist on putting arbitrary filenames on the .ics data
they send us, and they won't respect the redirection "Location:" header,
they send us, and they won't respect the redirection "Location:" header,
we have to store those filenames and allow clients to call our calendar
records with them.
Note that adding a column to all tables that would possibly hold calendar-
mapped data won't work. The user is always allowed to specify more
mapped data won't work. The user is always allowed to specify more
calendars, on any arbitrary ORM object, without need to alter those tables'
data or structure
"""
@ -810,7 +810,7 @@ class basic_calendar_alias(osv.osv):
select=1, help='The calendar/line this mapping applies to'),
'res_id': fields.integer('Res. ID', required=True, select=1),
}
_sql_constraints = [ ('name_cal_uniq', 'UNIQUE(cal_line_id, name)',
_('The same filename cannot apply to two records!')), ]
@ -856,7 +856,7 @@ class basic_calendar_fields(osv.osv):
}
_sql_constraints = [
( 'name_type_uniq', 'UNIQUE(name, type_id)', 'Can not map a field more than once'),
( 'name_type_uniq', 'UNIQUE(name, type_id)', 'Cannot map a field more than once.'),
]
def check_line(self, cr, uid, vals, name, context=None):
@ -876,7 +876,7 @@ class basic_calendar_fields(osv.osv):
line = line_obj.browse(cr, uid, l_id, context=context)[0]
line_rel = line.object_id.model
if (relation != 'NULL') and (not relation == line_rel):
raise osv.except_osv(_('Warning !'), _('Please provide proper configuration of "%s" in Calendar Lines') % (name))
raise osv.except_osv(_('Warning!'), _('Please provide proper configuration of "%s" in Calendar Lines.') % (name))
return True
def create(self, cr, uid, vals, context=None):
@ -1192,7 +1192,7 @@ class Alarm(CalDAV, osv.osv_memory):
elif isinstance(child.value, datetime):
# TODO
# remember, spec says this datetime is in UTC
raise NotImplementedError("we cannot parse absolute triggers")
raise NotImplementedError("Cannot parse absolute triggers.")
if not seconds:
duration = abs(days)
related = days > 0 and 'after' or 'before'
@ -1290,7 +1290,7 @@ class Attendee(CalDAV, osv.osv_memory):
attendee_add.params['CN'] = cn_val
if not attendee['email']:
attendee_add.value = 'MAILTO:'
#raise osv.except_osv(_('Error !'), _('Attendee must have an Email Id'))
#raise osv.except_osv(_('Error!'), _('Attendee must have an Email Id'))
elif attendee['email']:
attendee_add.value = 'MAILTO:' + attendee['email']
return vevent

View File

@ -87,7 +87,7 @@ msgstr ""
#. module: caldav
#: sql_constraint:basic.calendar.fields:0
msgid "Can not map a field more than once"
msgid "Cannot map a field more than once."
msgstr ""
#. module: caldav
@ -164,7 +164,7 @@ msgstr ""
#. module: caldav
#: code:addons/caldav/calendar.py:879
#, python-format
msgid "Please provide proper configuration of \"%s\" in Calendar Lines"
msgid "Please provide proper configuration of \"%s\" in Calendar Lines."
msgstr ""
#. module: caldav
@ -207,7 +207,7 @@ msgstr ""
#. module: caldav
#: code:addons/caldav/calendar.py:789
#, python-format
msgid "Can not create line \"%s\" more than once"
msgid "Cannot create line \"%s\" more than once."
msgstr ""
#. module: caldav
@ -365,7 +365,7 @@ msgstr ""
#. module: caldav
#: code:addons/caldav/wizard/calendar_event_import.py:63
#, python-format
msgid "Invalid format of the ics, file can not be imported"
msgid "Invalid format of the ics, file cannot be imported."
msgstr ""
#. module: caldav

View File

@ -178,7 +178,7 @@ configuration
res = {}
host = context.get('host')
if not config.get_misc('webdav','enable',True):
raise Exception("WebDAV is disabled, cannot continue")
raise Exception("WebDAV is disabled, cannot continue.")
user_pool = self.pool.get('res.users')
current_user = user_pool.browse(cr, uid, uid, context=context)
#TODO write documentation

View File

@ -60,7 +60,7 @@ class calendar_event_import(osv.osv_memory):
try:
vals = model_obj.import_cal(cr, uid, base64.decodestring(data['file_path']), context['active_id'], context)
except:
raise osv.except_osv(_('Warning !'),_('Invalid format of the ics, file can not be imported'))
raise osv.except_osv(_('Warning!'),_('Invalid format of the ics, file cannot be imported.'))
global cnt
if vals:
cnt = len(vals)

View File

@ -59,7 +59,7 @@ class base_action_rule(osv.osv):
else:
reply_to = emailfrom
if not emailfrom:
raise osv.except_osv(_('Error!'), _("No Email Found for your Company address!"))
raise osv.except_osv(_('Error!'), _("There is no email for your company address."))
return mail_message.schedule_with_attach(cr, uid, emailfrom, emails, name, body, model=obj._name, reply_to=reply_to, res_id=obj.id)
def do_check(self, cr, uid, action, obj, context=None):

View File

@ -280,7 +280,7 @@ class crm_lead(base_stage, osv.osv):
obj_id = super(crm_lead, self).create(cr, uid, vals, context)
self.create_send_note(cr, uid, [obj_id], context=context)
return obj_id
def onchange_stage_id(self, cr, uid, ids, stage_id, context=None):
if not stage_id:
return {'value':{}}
@ -537,7 +537,7 @@ class crm_lead(base_stage, osv.osv):
lead_ids = context and context.get('lead_ids', []) or []
if len(ids) <= 1:
raise osv.except_osv(_('Warning !'),_('Please select more than one opportunity from the list view.'))
raise osv.except_osv(_('Warning!'),_('Please select more than one opportunity from the list view.'))
ctx_opportunities = self.browse(cr, uid, lead_ids, context=context)
opportunities = self.browse(cr, uid, ids, context=context)
@ -795,9 +795,9 @@ class crm_lead(base_stage, osv.osv):
def unlink(self, cr, uid, ids, context=None):
for lead in self.browse(cr, uid, ids, context):
if (not lead.section_id.allow_unlink) and (lead.state != 'draft'):
raise osv.except_osv(_('Error'),
_("You cannot delete lead '%s'; it must be in state 'Draft' to be deleted. " \
"You should better cancel it, instead of deleting it.") % lead.name)
raise osv.except_osv(_('Error!'),
_("You cannot delete lead '%s' because it is not in 'Draft' state. " \
"You can still cancel it, instead of deleting it.") % lead.name)
return super(crm_lead, self).unlink(cr, uid, ids, context)
def write(self, cr, uid, ids, vals, context=None):
@ -871,12 +871,12 @@ class crm_lead(base_stage, osv.osv):
""" Override of the (void) default notification method. """
stage_name = self.pool.get('crm.case.stage').name_get(cr, uid, [stage_id], context=context)[0][1]
return self.message_append_note(cr, uid, ids, body= _("Stage changed to <b>%s</b>.") % (stage_name), context=context)
def case_get_note_msg_prefix(self, cr, uid, lead, context=None):
if isinstance(lead, (int, long)):
lead = self.browse(cr, uid, [lead], context=context)[0]
return ('Opportunity' if lead.type == 'opportunity' else 'Lead')
def create_send_note(self, cr, uid, ids, context=None):
for id in ids:
message = _("%s has been <b>created</b>.")% (self.case_get_note_msg_prefix(cr, uid, id, context=context))
@ -903,7 +903,7 @@ class crm_lead(base_stage, osv.osv):
message = _("%s <b>partner</b> is now set to <em>%s</em>." % (self.case_get_note_msg_prefix(cr, uid, lead, context=context), lead.partner_id.name))
lead.message_append_note(body=message)
return True
def convert_opportunity_send_note(self, cr, uid, lead, context=None):
message = _("Lead has been <b>converted to an opportunity</b>.")
lead.message_append_note(body=message)

View File

@ -113,7 +113,7 @@ class res_users(osv.osv):
'user_id': user_id}, context=context)
except:
# Tolerate a missing shortcut. See product/product.py for similar code.
_logger.debug('Skipped meetings shortcut for user "%s"', data.get('name','<new'))
_logger.debug('Skipped meetings shortcut for user "%s".', data.get('name','<new'))
return user_id
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:

View File

@ -285,7 +285,7 @@ class crm_phonecall(base_state, osv.osv):
return True
def _call_set_partner_send_note(self, cr, uid, ids, context=None):
return self.message_append_note(cr, uid, ids, body=_("Partner has been <b>created</b>"), context=context)
return self.message_append_note(cr, uid, ids, body=_("Partner has been <b>created</b>."), context=context)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:

View File

@ -110,7 +110,7 @@ msgstr ""
#. module: crm
#: code:addons/crm/wizard/crm_add_note.py:28
#, python-format
msgid "Can not add note!"
msgid "Cannot add note."
msgstr ""
#. module: crm
@ -802,7 +802,7 @@ msgstr ""
#: code:addons/crm/crm_lead.py:832
#, python-format
msgid ""
"You cannot delete lead '%s'; it must be in state 'Draft' to be deleted. You "
"You cannot delete lead '%s'; because it is not in 'Draft' state. You "
"should better cancel it, instead of deleting it."
msgstr ""
@ -3184,7 +3184,7 @@ msgstr ""
#. module: crm
#: code:addons/crm/wizard/crm_lead_to_opportunity.py:104
#, python-format
msgid "Closed/Cancelled Leads can not be converted into Opportunity"
msgid "Closed/Cancelled Leads cannot be converted into Opportunity."
msgstr ""
#. module: crm
@ -3273,7 +3273,7 @@ msgstr ""
#. module: crm
#: constraint:crm.segmentation:0
msgid "Error ! You can not create recursive profiles."
msgid "Error ! You cannot create recursive profiles."
msgstr ""
#. module: crm
@ -3401,7 +3401,7 @@ msgstr ""
#. module: crm
#: code:addons/crm/crm_action_rule.py:61
#, python-format
msgid "No Email Found for your Company address!"
msgid "There is no Email for your Company address."
msgstr ""
#. module: crm

View File

@ -99,7 +99,7 @@ class crm_lead2opportunity_partner(osv.osv_memory):
lead_obj = self.pool.get('crm.lead')
for lead in lead_obj.browse(cr, uid, context.get('active_ids', []), context=context):
if lead.state in ['done', 'cancel']:
raise osv.except_osv(_("Warning !"), _("Closed/Cancelled Leads can not be converted into Opportunity"))
raise osv.except_osv(_("Warning !"), _("Closed/Cancelled leads cannot be converted into opportunities."))
return False
def _convert_opportunity(self, cr, uid, ids, vals, context=None):
@ -110,7 +110,7 @@ class crm_lead2opportunity_partner(osv.osv_memory):
lead_ids = vals.get('lead_ids', [])
user_ids = vals.get('user_ids', False)
team_id = vals.get('section_id', False)
return lead.convert_opportunity(cr, uid, lead_ids, partner_id, user_ids, team_id, context=context)
return lead.convert_opportunity(cr, uid, lead_ids, partner_id, user_ids, team_id, context=context)
def _merge_opportunity(self, cr, uid, ids, opportunity_ids, action='merge', context=None):
#TOFIX: is it usefully ?
@ -131,7 +131,7 @@ class crm_lead2opportunity_partner(osv.osv_memory):
"""
if not context:
context = {}
lead = self.pool.get('crm.lead')
lead_ids = context.get('active_ids', [])
data = self.browse(cr, uid, ids, context=context)[0]

View File

@ -44,7 +44,7 @@ class crm_lead2partner(osv.osv_memory):
rec_ids = context and context.get('active_ids', [])
for this in model.browse(cr, uid, rec_ids, context=context):
if this.partner_id:
raise osv.except_osv(_('Warning !'),
raise osv.except_osv(_('Warning!'),
_('A partner is already defined.'))
def _select_partner(self, cr, uid, context=None):

View File

@ -115,7 +115,7 @@ msgstr ""
#. module: crm_partner_assign
#: code:addons/crm_partner_assign/partner_geo_assign.py:37
#, python-format
msgid "Could not contact geolocation servers, please make sure you have a working internet connection (%s)"
msgid "Cannot contact geolocation servers, please make sure you have a working internet connection (%s)."
msgstr ""
#. module: crm_partner_assign

View File

@ -34,7 +34,7 @@ def geo_find(addr):
xml = urllib.urlopen(url).read()
except Exception, e:
raise osv.except_osv(_('Network error'),
_('Could not contact geolocation servers, please make sure you have a working internet connection (%s)') % e)
_('Cannot contact geolocation servers. Please make sure that your internet connection is up and running (%s).') % e)
if '<error>' in xml:
return None
@ -46,11 +46,11 @@ def geo_find(addr):
def geo_query_address(street=None, zip=None, city=None, state=None, country=None):
if country and ',' in country and (country.endswith(' of') or country.endswith(' of the')):
# put country qualifier in front, otherwise GMap gives wrong results,
# e.g. 'Congo, Democratic Republic of the' => 'Democratic Republic of the Congo'
country = '{1} {0}'.format(*country.split(',',1))
return tools.ustr(', '.join(filter(None, [street,
("%s %s" % (zip or '', city or '')).strip(),
state,
# e.g. 'Congo, Democratic Republic of the' => 'Democratic Republic of the Congo'
country = '{1} {0}'.format(*country.split(',',1))
return tools.ustr(', '.join(filter(None, [street,
("%s %s" % (zip or '', city or '')).strip(),
state,
country])))
class res_partner_grade(osv.osv):

View File

@ -234,7 +234,7 @@ class crm_segmentation(osv.osv):
}
_constraints = [
(osv.osv._check_recursion, 'Error ! You can not create recursive profiles.', ['parent_id'])
(osv.osv._check_recursion, 'Error ! You cannot create recursive profiles.', ['parent_id'])
]
def process_continue(self, cr, uid, ids, start=False):

View File

@ -129,7 +129,7 @@ msgstr ""
#. module: crm_profiling
#: constraint:crm.segmentation:0
msgid "Error ! You can not create recursive profiles."
msgid "Error ! You cannot create recursive profiles."
msgstr ""
#. module: crm_profiling

View File

@ -217,7 +217,7 @@ class delivery_grid(osv.osv):
ok = True
break
if not ok:
raise osv.except_osv(_('No price available!'), _('No line matched this product or order in the choosed delivery grid.'))
raise osv.except_osv(_('No price available!'), _('No line matched this product or order in the chosen delivery grid.'))
return price

View File

@ -86,7 +86,7 @@ class stock_picking(osv.osv):
grid_id = carrier_obj.grid_get(cr, uid, [picking.carrier_id.id],
picking.partner_id.id, context=context)
if not grid_id:
raise osv.except_osv(_('Warning'),
raise osv.except_osv(_('Warning!'),
_('The carrier %s (id: %d) has no delivery grid!') \
% (picking.carrier_id.name,
picking.carrier_id.id))

View File

@ -93,13 +93,13 @@ class indexer(object):
except NhException:
pass
raise NhException('No appropriate method to index file')
raise NhException('No appropriate method to index file.')
def _doIndexContent(self,content):
raise NhException("Content not handled here")
raise NhException("Content cannot be handled here.")
def _doIndexFile(self,fpath):
raise NhException("Content not handled here")
raise NhException("Content cannot be handled here.")
def __repr__(self):
return "<indexer %s.%s>" %(self.__module__, self.__class__.__name__)
@ -116,7 +116,7 @@ def mime_match(mime, mdict):
return (None, None)
class contentIndex(object):
def __init__(self):
self.mimes = {}
self.exts = {}
@ -132,9 +132,9 @@ class contentIndex(object):
f = True
if f:
_logger.debug('Register content indexer: %r', obj)
_logger.debug('Register content indexer: %r.', obj)
if not f:
raise Exception("Your indexer should at least suport a mimetype or extension")
raise Exception("Your indexer should at least support a mimetype or extension.")
def doIndex(self, content, filename=None, content_type=None, realfname = None, debug=False):
fobj = None
@ -169,22 +169,22 @@ class contentIndex(object):
(result, _) = pop.communicate()
mime2 = result.split(';')[0]
_logger.debug('File gave us: %s', mime2)
_logger.debug('File gives us: %s', mime2)
# Note that the temporary file still exists now.
mime,fobj = mime_match(mime2, self.mimes)
if not mime:
mime = mime2
except Exception:
_logger.exception('Cannot determine mime type')
_logger.exception('Cannot determine mime type.')
try:
if fobj:
res = (mime, fobj.indexContent(content,filename,fname or realfname) )
else:
_logger.debug("Have no object, return (%s, None)", mime)
_logger.debug("Have no object, return (%s, None).", mime)
res = (mime, None )
except Exception:
_logger.exception("Could not index file %s (%s)",
_logger.exception("Cannot index file %s (%s).",
filename, fname or realfname)
res = None
@ -193,7 +193,7 @@ class contentIndex(object):
try:
os.unlink(fname)
except Exception:
_logger.exception("Could not unlink %s", fname)
_logger.exception("Cannot unlink %s.", fname)
return res
cntIndex = contentIndex()

View File

@ -23,7 +23,7 @@ import base64
from osv import osv, fields
import os
# from psycopg2 import Binary
#from psycopg2 import Binary
#from tools import config
import tools
from tools.translate import _
@ -37,8 +37,8 @@ DMS_ROOT_PATH = tools.config.get('document_path', os.path.join(tools.config['roo
class document_file(osv.osv):
_inherit = 'ir.attachment'
_rec_name = 'datas_fname'
def _attach_parent_id(self, cr, uid, ids=None, context=None):
"""Migrate ir.attachments to the document module.
@ -61,7 +61,7 @@ class document_file(osv.osv):
return False
if ids is not None:
raise NotImplementedError("Ids is just there by convention! Don't use it yet, please.")
raise NotImplementedError("Ids are just there by convention, please do not use it.")
cr.execute("UPDATE ir_attachment " \
"SET parent_id = %s, db_datas = decode(encode(db_datas,'escape'), 'base64') " \
@ -294,7 +294,7 @@ class document_file(osv.osv):
('datas_fname', '=', vals['datas_fname']),
]
attach_ids = self.search(cr, uid, domain, context=context)
super(document_file, self).write(cr, uid, attach_ids,
super(document_file, self).write(cr, uid, attach_ids,
{'datas' : vals['datas']},
context=context)
result = attach_ids[0]
@ -339,7 +339,7 @@ class document_file(osv.osv):
if r:
unres.append(r)
else:
self.loggerdoc.warning("Unlinking attachment #%s %s that has no storage",
self.loggerdoc.warning("Unlinking attachment #%s %s that has no storage.",
f.id, f.name)
res = super(document_file, self).unlink(cr, uid, ids, context)
stor.do_unlink(cr, uid, unres)

View File

@ -78,7 +78,7 @@ class document_directory(osv.osv):
root_id = objid.read(cr, uid, mid, ['res_id'])['res_id']
return root_id
except Exception, e:
_logger.warning('Cannot set directory root:'+ str(e))
return False
return objid.browse(cr, uid, mid, context=context).res_id
@ -108,7 +108,7 @@ class document_directory(osv.osv):
_sql_constraints = [
('dirname_uniq', 'unique (name,parent_id,ressource_id,ressource_parent_type_id)', 'The directory name must be unique !'),
('no_selfparent', 'check(parent_id <> id)', 'Directory cannot be parent of itself!'),
('dir_parented', 'check(parent_id IS NOT NULL OR storage_id IS NOT NULL)', 'Directory must have a parent or a storage')
('dir_parented', 'check(parent_id IS NOT NULL OR storage_id IS NOT NULL)', 'Directory must have a parent or a storage.')
]
def name_get(self, cr, uid, ids, context=None):
res = []
@ -153,7 +153,7 @@ class document_directory(osv.osv):
return True
_constraints = [
(_check_recursion, 'Error! You can not create recursive Directories.', ['parent_id'])
(_check_recursion, 'Error! You cannot create recursive directories.', ['parent_id'])
]
def __init__(self, *args, **kwargs):
@ -179,7 +179,7 @@ class document_directory(osv.osv):
def get_node_class(self, cr, uid, ids, dbro=None, dynamic=False, context=None):
"""Retrieve the class of nodes for this directory
This function can be overriden by inherited classes ;)
@param dbro The browse object, if caller already has it
"""
@ -193,17 +193,17 @@ class document_directory(osv.osv):
elif dbro.type == 'ressource':
return nodes.node_res_dir
else:
raise ValueError("dir node for %s type", dbro.type)
raise ValueError("dir node for %s type.", dbro.type)
def _prepare_context(self, cr, uid, nctx, context=None):
""" Fill nctx with properties for this database
@param nctx instance of nodes.node_context, to be filled
@param context ORM context (dict) for us
Note that this function is called *without* a list of ids,
Note that this function is called *without* a list of ids,
it should behave the same for the whole database (based on the
ORM instance of document.directory).
Some databases may override this and attach properties to the
node_context. See WebDAV, CalDAV.
"""

View File

@ -52,7 +52,7 @@ For data /storage/ we have the cases:
Have (ir.attachment, context), we modify the file (save, update, rename etc).
Have (directory, context), we create a file.
Have (path, context), we create or modify a file.
Note that in all above cases, we don't explicitly choose the storage media,
but always require a context to be present.
@ -61,7 +61,7 @@ nodes, for once, won't. Their metadata will be computed by the parent storage
media + directory.
The algorithm says that in any of the above cases, our first goal is to locate
the node for any combination of search criteria. It would be wise NOT to
the node for any combination of search criteria. It would be wise NOT to
represent each node in the path (like node[/] + node[/dir1] + node[/dir1/dir2])
but directly jump to the end node (like node[/dir1/dir2]) whenever possible.
@ -99,13 +99,13 @@ class nodefd_file(nodes.node_descriptor):
mode = mode[:-1]
self.mode = mode
self._size = os.stat(path).st_size
for attr in ('closed', 'read', 'write', 'seek', 'tell', 'next'):
setattr(self,attr, getattr(self.__file, attr))
def size(self):
return self._size
def __iter__(self):
return self
@ -122,7 +122,7 @@ class nodefd_file(nodes.node_descriptor):
filename = par.path
if isinstance(filename, (tuple, list)):
filename = '/'.join(filename)
try:
mime, icont = cntIndex.doIndex(None, filename=filename,
content_type=None, realfname=fname)
@ -173,7 +173,7 @@ class nodefd_db(StringIO, nodes.node_descriptor):
self._size = 0L
if mode.endswith('b'):
mode = mode[:-1]
if mode in ('r', 'r+'):
cr = ira_browse._cr # reuse the cursor of the browse object, just now
cr.execute('SELECT db_datas FROM ir_attachment WHERE id = %s',(ira_browse.id,))
@ -188,8 +188,8 @@ class nodefd_db(StringIO, nodes.node_descriptor):
elif mode == 'a':
StringIO.__init__(self, None)
else:
_logger.error("Incorrect mode %s specified", mode)
raise IOError(errno.EINVAL, "Invalid file mode")
_logger.error("Incorrect mode %s is specified.", mode)
raise IOError(errno.EINVAL, "Invalid file mode.")
self.mode = mode
def size(self):
@ -209,7 +209,7 @@ class nodefd_db(StringIO, nodes.node_descriptor):
filename = par.path
if isinstance(filename, (tuple, list)):
filename = '/'.join(filename)
try:
mime, icont = cntIndex.doIndex(data, filename=filename,
content_type=None, realfname=None)
@ -238,7 +238,7 @@ class nodefd_db(StringIO, nodes.node_descriptor):
(out, len(data), par.file_id))
cr.commit()
except Exception:
_logger.exception('Cannot update db file #%d for close:', par.file_id)
_logger.exception('Cannot update db file #%d for close.', par.file_id)
raise
finally:
cr.close()
@ -246,7 +246,7 @@ class nodefd_db(StringIO, nodes.node_descriptor):
class nodefd_db64(StringIO, nodes.node_descriptor):
""" A descriptor to db data, base64 (the old way)
It stores the data in base64 encoding at the db. Not optimal, but
the transparent compression of Postgres will save the day.
"""
@ -255,7 +255,7 @@ class nodefd_db64(StringIO, nodes.node_descriptor):
self._size = 0L
if mode.endswith('b'):
mode = mode[:-1]
if mode in ('r', 'r+'):
data = base64.decodestring(ira_browse.db_datas)
if data:
@ -268,8 +268,8 @@ class nodefd_db64(StringIO, nodes.node_descriptor):
elif mode == 'a':
StringIO.__init__(self, None)
else:
_logger.error("Incorrect mode %s specified", mode)
raise IOError(errno.EINVAL, "Invalid file mode")
_logger.error("Incorrect mode %s is specified.", mode)
raise IOError(errno.EINVAL, "Invalid file mode.")
self.mode = mode
def size(self):
@ -289,7 +289,7 @@ class nodefd_db64(StringIO, nodes.node_descriptor):
filename = par.path
if isinstance(filename, (tuple, list)):
filename = '/'.join(filename)
try:
mime, icont = cntIndex.doIndex(data, filename=filename,
content_type=None, realfname=None)
@ -317,7 +317,7 @@ class nodefd_db64(StringIO, nodes.node_descriptor):
(base64.encodestring(data), len(data), par.file_id))
cr.commit()
except Exception:
_logger.exception('Cannot update db file #%d for close:', par.file_id)
_logger.exception('Cannot update db file #%d for close.', par.file_id)
raise
finally:
cr.close()
@ -330,7 +330,7 @@ class document_storage(osv.osv):
media.
The referring document.directory-ies will control the placement of data
into the storage.
It is a bad idea to have multiple document.storage objects pointing to
the same tree of filesystem storage.
"""
@ -384,12 +384,12 @@ class document_storage(osv.osv):
def __prepare_realpath(self, cr, file_node, ira, store_path, do_create=True):
""" Cleanup path for realstore, create dirs if needed
@param file_node the node
@param ira ir.attachment browse of the file_node
@param store_path the path of the parent storage object, list
@param do_create create the directories, if needed
@return tuple(path "/var/filestore/real/dir/", npath ['dir','fname.ext'] )
"""
file_node.fix_ppath(cr, ira)
@ -401,10 +401,10 @@ class document_storage(osv.osv):
# self._logger.debug('Npath: %s', npath)
for n in npath:
if n == '..':
raise ValueError("Invalid '..' element in path")
raise ValueError("Invalid '..' element in path.")
for ch in ('*', '|', "\\", '/', ':', '"', '<', '>', '?',):
if ch in n:
raise ValueError("Invalid char %s in path %s" %(ch, n))
raise ValueError("Invalid char %s in path %s." %(ch, n))
dpath = [store_path,]
dpath += npath[:-1]
path = os.path.join(*dpath)
@ -420,8 +420,8 @@ class document_storage(osv.osv):
"""
boo = self.browse(cr, uid, id, context=context)
if not boo.online:
raise IOError(errno.EREMOTE, 'medium offline')
raise IOError(errno.EREMOTE, 'Medium offline.')
if fil_obj:
ira = fil_obj
else:
@ -435,11 +435,11 @@ class document_storage(osv.osv):
context = {}
boo = self.browse(cr, uid, id, context=context)
if not boo.online:
raise IOError(errno.EREMOTE, 'medium offline')
raise IOError(errno.EREMOTE, 'Medium offline.')
if boo.readonly and mode not in ('r', 'rb'):
raise IOError(errno.EPERM, "Readonly medium")
raise IOError(errno.EPERM, "Readonly medium.")
ira = self.pool.get('ir.attachment').browse(cr, uid, file_node.file_id, context=context)
if boo.type == 'filestore':
if not ira.store_fname:
@ -447,8 +447,8 @@ class document_storage(osv.osv):
# try to fix their directory.
if mode in ('r','r+'):
if ira.file_size:
_logger.warning( "ir.attachment #%d does not have a filename, but is at filestore, fix it!" % ira.id)
raise IOError(errno.ENOENT, 'No file can be located')
_logger.warning( "ir.attachment #%d does not have a filename, but is at filestore. This should get fixed." % ira.id)
raise IOError(errno.ENOENT, 'No file can be located.')
else:
store_fname = self.__get_random_fname(boo.path)
cr.execute('UPDATE ir_attachment SET store_fname = %s WHERE id = %s',
@ -470,7 +470,7 @@ class document_storage(osv.osv):
do_create = (mode[0] in ('w','a')) )
fpath = os.path.join(path, npath[-1])
if (not os.path.exists(fpath)) and mode[0] == 'r':
raise IOError("File not found: %s" % fpath)
raise IOError("File not found: %s." % fpath)
elif mode[0] in ('w', 'a') and not ira.store_fname:
store_fname = os.path.join(*npath)
cr.execute('UPDATE ir_attachment SET store_fname = %s WHERE id = %s',
@ -478,10 +478,10 @@ class document_storage(osv.osv):
return nodefd_file(file_node, path=fpath, mode=mode)
elif boo.type == 'virtual':
raise ValueError('Virtual storage does not support static files')
raise ValueError('Virtual storage does not support static file(s).')
else:
raise TypeError("No %s storage" % boo.type)
raise TypeError("No %s storage." % boo.type)
def __get_data_3(self, cr, uid, boo, ira, context):
if boo.type == 'filestore':
@ -489,7 +489,7 @@ class document_storage(osv.osv):
# On a migrated db, some files may have the wrong storage type
# try to fix their directory.
if ira.file_size:
_logger.warning( "ir.attachment #%d does not have a filename, but is at filestore, fix it!" % ira.id)
_logger.warning( "ir.attachment #%d does not have a filename, but is at filestore. This should get fixed." % ira.id)
return None
fpath = os.path.join(boo.path, ira.store_fname)
return file(fpath, 'rb').read()
@ -521,13 +521,13 @@ class document_storage(osv.osv):
elif not ira.store_fname:
return None
else:
raise IOError(errno.ENOENT, "File not found: %s" % fpath)
raise IOError(errno.ENOENT, "File not found: %s." % fpath)
elif boo.type == 'virtual':
raise ValueError('Virtual storage does not support static files')
raise ValueError('Virtual storage does not support static file(s).')
else:
raise TypeError("No %s storage" % boo.type)
raise TypeError("No %s storage!" % boo.type)
def set_data(self, cr, uid, id, file_node, data, context=None, fil_obj=None):
""" store the data.
@ -541,12 +541,12 @@ class document_storage(osv.osv):
ira = self.pool.get('ir.attachment').browse(cr, uid, file_node.file_id, context=context)
if not boo.online:
raise IOError(errno.EREMOTE, 'medium offline')
if boo.readonly:
raise IOError(errno.EPERM, "Readonly medium")
raise IOError(errno.EREMOTE, 'Medium offline.')
_logger.debug( "Store data for ir.attachment #%d" % ira.id)
if boo.readonly:
raise IOError(errno.EPERM, "Readonly medium.")
_logger.debug( "Store data for ir.attachment #%d." % ira.id)
store_fname = None
fname = None
if boo.type == 'filestore':
@ -557,15 +557,15 @@ class document_storage(osv.osv):
fp = open(fname, 'wb')
try:
fp.write(data)
finally:
finally:
fp.close()
_logger.debug( "Saved data to %s" % fname)
_logger.debug( "Saved data to %s." % fname)
filesize = len(data) # os.stat(fname).st_size
# TODO Here, an old file would be left hanging.
except Exception, e:
_logger.warning( "Couldn't save data to %s", path, exc_info=True)
_logger.warning( "Cannot save data to %s.", path, exc_info=True)
raise except_orm(_('Error!'), str(e))
elif boo.type == 'db':
filesize = len(data)
@ -586,21 +586,21 @@ class document_storage(osv.osv):
fp = open(fname,'wb')
try:
fp.write(data)
finally:
finally:
fp.close()
_logger.debug("Saved data to %s", fname)
_logger.debug("Saved data to %s.", fname)
filesize = len(data) # os.stat(fname).st_size
store_fname = os.path.join(*npath)
# TODO Here, an old file would be left hanging.
except Exception,e :
_logger.warning("Couldn't save data:", exc_info=True)
_logger.warning("Cannot save data.", exc_info=True)
raise except_orm(_('Error!'), str(e))
elif boo.type == 'virtual':
raise ValueError('Virtual storage does not support static files')
raise ValueError('Virtual storage does not support static file(s).')
else:
raise TypeError("No %s storage" % boo.type)
raise TypeError("No %s storage!" % boo.type)
# 2nd phase: store the metadata
try:
@ -612,7 +612,7 @@ class document_storage(osv.osv):
mime, icont = cntIndex.doIndex(data, ira.datas_fname,
ira.file_type or None, fname)
except Exception:
_logger.debug('Cannot index file:', exc_info=True)
_logger.debug('Cannot index file.', exc_info=True)
pass
try:
@ -629,7 +629,7 @@ class document_storage(osv.osv):
file_node.content_type = mime
return True
except Exception, e :
self._logger.warning("Couldn't save data:", exc_info=True)
self._logger.warning("Cannot save data.", exc_info=True)
# should we really rollback once we have written the actual data?
# at the db case (only), that rollback would be safe
raise except_orm(_('Error at doc write!'), str(e))
@ -639,10 +639,10 @@ class document_storage(osv.osv):
files that have to be removed, too. """
if not storage_bo.online:
raise IOError(errno.EREMOTE, 'medium offline')
raise IOError(errno.EREMOTE, 'Medium offline.')
if storage_bo.readonly:
raise IOError(errno.EPERM, "Readonly medium")
raise IOError(errno.EPERM, "Readonly medium.")
if storage_bo.type == 'filestore':
fname = fil_bo.store_fname
@ -659,7 +659,7 @@ class document_storage(osv.osv):
path = storage_bo.path
return ( storage_bo.id, 'file', os.path.join(path, fname))
else:
raise TypeError("No %s storage" % storage_bo.type)
raise TypeError("No %s storage!" % storage_bo.type)
def do_unlink(self, cr, uid, unres):
for id, ktype, fname in unres:
@ -667,9 +667,9 @@ class document_storage(osv.osv):
try:
os.unlink(fname)
except Exception:
_logger.warning("Could not remove file %s, please remove manually.", fname, exc_info=True)
_logger.warning("Cannot remove file %s, please remove it manually.", fname, exc_info=True)
else:
_logger.warning("Unknown unlink key %s" % ktype)
_logger.warning("Unlink unknown key %s." % ktype)
return True
@ -677,17 +677,17 @@ class document_storage(osv.osv):
""" A preparation for a file rename.
It will not affect the database, but merely check and perhaps
rename the realstore file.
@return the dict of values that can safely be be stored in the db.
"""
sbro = self.browse(cr, uid, file_node.storage_id, context=context)
assert sbro, "The file #%d didn't provide storage" % file_node.file_id
if not sbro.online:
raise IOError(errno.EREMOTE, 'medium offline')
raise IOError(errno.EREMOTE, 'Medium offline.')
if sbro.readonly:
raise IOError(errno.EPERM, "Readonly medium")
raise IOError(errno.EPERM, "Readonly medium.")
if sbro.type in ('filestore', 'db', 'db64'):
# nothing to do for a rename, allow to change the db field
@ -699,9 +699,9 @@ class document_storage(osv.osv):
fname = ira.store_fname
if not fname:
_logger.warning("Trying to rename a non-stored file")
_logger.warning("Trying to rename a non-stored file.")
if fname != os.path.join(*npath):
_logger.warning("inconsistency in realstore: %s != %s" , fname, repr(npath))
_logger.warning("Inconsistency to realstore: %s != %s." , fname, repr(npath))
oldpath = os.path.join(path, npath[-1])
newpath = os.path.join(path, new_name)
@ -711,13 +711,13 @@ class document_storage(osv.osv):
store_fname = os.path.join(*store_path)
return { 'name': new_name, 'datas_fname': new_name, 'store_fname': store_fname }
else:
raise TypeError("No %s storage" % sbro.type)
raise TypeError("No %s storage!" % sbro.type)
def simple_move(self, cr, uid, file_node, ndir_bro, context=None):
""" A preparation for a file move.
It will not affect the database, but merely check and perhaps
move the realstore file.
@param ndir_bro a browse object of document.directory, where this
file should move to.
@return the dict of values that can safely be be stored in the db.
@ -726,10 +726,10 @@ class document_storage(osv.osv):
assert sbro, "The file #%d didn't provide storage" % file_node.file_id
if not sbro.online:
raise IOError(errno.EREMOTE, 'medium offline')
raise IOError(errno.EREMOTE, 'Medium offline.')
if sbro.readonly:
raise IOError(errno.EPERM, "Readonly medium")
raise IOError(errno.EPERM, "Readonly medium.")
par = ndir_bro
psto = None
@ -739,8 +739,8 @@ class document_storage(osv.osv):
break
par = par.parent_id
if file_node.storage_id != psto:
_logger.debug('Cannot move file %r from %r to %r', file_node, file_node.parent, ndir_bro.name)
raise NotImplementedError('Cannot move files between storage media')
_logger.debug('Cannot move file %r from %r to %r.', file_node, file_node.parent, ndir_bro.name)
raise NotImplementedError('Cannot move file(s) between storage media.')
if sbro.type in ('filestore', 'db', 'db64'):
# nothing to do for a rename, allow to change the db field
@ -752,30 +752,30 @@ class document_storage(osv.osv):
fname = ira.store_fname
if not fname:
_logger.warning("Trying to rename a non-stored file")
_logger.warning("Trying to rename a non-stored file.")
if fname != os.path.join(*opath):
_logger.warning("inconsistency in realstore: %s != %s" , fname, repr(opath))
_logger.warning("Inconsistency to realstore: %s != %s." , fname, repr(opath))
oldpath = os.path.join(path, opath[-1])
npath = [sbro.path,] + (ndir_bro.get_full_path() or [])
npath = filter(lambda x: x is not None, npath)
newdir = os.path.join(*npath)
if not os.path.isdir(newdir):
_logger.debug("Must create dir %s", newdir)
_logger.debug("Must create dir %s.", newdir)
os.makedirs(newdir)
npath.append(opath[-1])
newpath = os.path.join(*npath)
_logger.debug("Going to move %s from %s to %s", opath[-1], oldpath, newpath)
_logger.debug("Going to move %s from %s to %s.", opath[-1], oldpath, newpath)
shutil.move(oldpath, newpath)
store_path = npath[1:] + [opath[-1],]
store_fname = os.path.join(*store_path)
return { 'store_fname': store_fname }
else:
raise TypeError("No %s storage" % sbro.type)
raise TypeError("No %s storage." % sbro.type)
document_storage()

View File

@ -923,7 +923,7 @@ msgstr ""
#. module: document
#: sql_constraint:document.directory:0
msgid "Directory must have a parent or a storage"
msgid "Directory must have a parent or a storage."
msgstr ""
#. module: document

View File

@ -45,7 +45,7 @@ _logger = logging.getLogger(__name__)
def _str2time(cre):
""" Convert a string with time representation (from db) into time (float)
Note: a place to fix if datetime is used in db.
"""
if not cre:
@ -62,7 +62,7 @@ def get_node_context(cr, uid, context):
class node_context(object):
""" This is the root node, representing access to some particular context
A context is a set of persistent data, which may influence the structure
of the nodes. All other transient information during a data query should
be passed down with function arguments.
@ -99,7 +99,7 @@ class node_context(object):
def __ne__(self, other):
return not self.__eq__(other)
def get(self, name, default=None):
return self.context.get(name, default)
@ -119,7 +119,7 @@ class node_context(object):
"""Create (or locate) a node for a directory
@param dbro a browse object of document.directory
"""
fullpath = dbro.get_full_path(context=self.context)
klass = dbro.get_node_class(dbro, context=self.context)
return klass(fullpath, None ,self, dbro)
@ -183,7 +183,7 @@ class node_descriptor(object):
def __nonzero__(self):
""" Ensure that a node_descriptor will never equal False
Since we do define __len__ and __iter__ for us, we must avoid
being regarded as non-true objects.
"""
@ -244,7 +244,7 @@ class node_class(object):
else:
s.append(self.path)
return s #map(lambda x: '/' +x, s)
def __repr__(self):
return "%s@/%s" % (self.our_type, '/'.join(self.full_path()))
@ -271,7 +271,7 @@ class node_class(object):
return False
def get_data(self,cr):
raise TypeError('no data for %s'% self.type)
raise TypeError('No data for %s.'% self.type)
def open_data(self, cr, mode):
""" Open a node_descriptor object for this node.
@ -285,10 +285,10 @@ class node_class(object):
For this class, there is no data, so no implementation. Each
child class that has data should override this.
"""
raise TypeError('no data for %s' % self.type)
raise TypeError('No data for %s.' % self.type)
def _get_storage(self,cr):
raise RuntimeError("no storage for base class")
raise RuntimeError("No storage for base class.")
def get_etag(self,cr):
""" Get a tag, unique per object + modification.
@ -323,11 +323,11 @@ class node_class(object):
def get_dav_eprop(self, cr, ns, prop):
if not self.DAV_M_NS:
return None
if self.DAV_M_NS.has_key(ns):
prefix = self.DAV_M_NS[ns]
else:
_logger.debug('No namespace: %s ("%s")',ns, prop)
_logger.debug('No namespace: %s ("%s").',ns, prop)
return None
mname = prefix + "_" + prop.replace('-','_')
@ -340,12 +340,12 @@ class node_class(object):
r = m(cr)
return r
except AttributeError:
_logger.debug('Property %s not supported' % prop, exc_info=True)
_logger.debug('The property %s is not supported.' % prop, exc_info=True)
return None
def get_dav_resourcetype(self, cr):
""" Get the DAV resource type.
Is here because some nodes may exhibit special behaviour, like
CalDAV/GroupDAV collections
"""
@ -384,13 +384,13 @@ class node_class(object):
""" Create a regular file under this node
"""
_logger.warning("Attempted to create a file under %r, not possible.", self)
raise IOError(errno.EPERM, "Not allowed to create files here")
raise IOError(errno.EPERM, "Not allowed to create file(s) here.")
def create_child_collection(self, cr, objname):
""" Create a child collection (directory) under self
"""
_logger.warning("Attempted to create a collection under %r, not possible.", self)
raise IOError(errno.EPERM, "Not allowed to create folders here")
raise IOError(errno.EPERM, "Not allowed to create folder(s) here.")
def rm(self, cr):
raise NotImplementedError(repr(self))
@ -404,7 +404,7 @@ class node_class(object):
def check_perms(self, perms):
""" Check the permissions of the current node.
@param perms either an integers of the bits to check, or
a string with the permission letters
@ -414,7 +414,7 @@ class node_class(object):
4, r : allow read of file, or listing of dir contents
8, u : allow remove (unlink)
"""
if isinstance(perms, str):
pe2 = 0
chars = { 'x': 1, 'w': 2, 'r': 4, 'u': 8 }
@ -423,10 +423,10 @@ class node_class(object):
perms = pe2
elif isinstance(perms, int):
if perms < 0 or perms > 15:
raise ValueError("Invalid permission bits")
raise ValueError("Invalid permission bits.")
else:
raise ValueError("Invalid permission attribute")
raise ValueError("Invalid permission attribute.")
return ((self.uidperms & perms) == perms)
class node_database(node_class):
@ -463,9 +463,9 @@ class node_database(node_class):
is_allowed = self.check_perms(1)
else:
is_allowed = self.check_perms(5)
if not is_allowed:
raise IOError(errno.EPERM, "Permission into directory denied")
raise IOError(errno.EPERM, "Permission into directory denied.")
if domain:
where = where + domain
@ -493,7 +493,7 @@ def mkdosname(company_name, default='noname'):
for c in company_name[:8]:
n += (c in badchars and '_') or c
return n
def _uid2unixperms(perms, has_owner):
""" Convert the uidperms and the owner flag to full unix bits
@ -543,7 +543,7 @@ class node_dir(node_database):
try:
self.dctx['dctx_' + dfld.field] = safe_eval(dfld.expr,dc2)
except Exception,e:
print "Cannot eval %s" % dfld.expr
print "Cannot eval %s." % dfld.expr
print e
pass
@ -566,10 +566,10 @@ class node_dir(node_database):
def _file_get(self, cr, nodename=False):
res = super(node_dir,self)._file_get(cr, nodename)
is_allowed = self.check_perms(nodename and 1 or 5)
if not is_allowed:
raise IOError(errno.EPERM, "Permission into directory denied")
raise IOError(errno.EPERM, "Permission into directory denied.")
cntobj = self.context._dirobj.pool.get('document.directory.content')
uid = self.context.uid
@ -583,7 +583,7 @@ class node_dir(node_database):
res.extend(res3)
return res
def _child_get(self, cr, name=None, domain=None):
dirobj = self.context._dirobj
uid = self.context.uid
@ -595,9 +595,9 @@ class node_dir(node_database):
is_allowed = self.check_perms(1)
else:
is_allowed = self.check_perms(5)
if not is_allowed:
raise IOError(errno.EPERM, "Permission into directory denied")
raise IOError(errno.EPERM, "Permission into directory denied.")
if not domain:
domain = []
@ -633,20 +633,20 @@ class node_dir(node_database):
if not directory:
raise OSError(2, 'Not such file or directory.')
if not self.check_perms('u'):
raise IOError(errno.EPERM,"Permission denied")
raise IOError(errno.EPERM,"Permission denied.")
if directory._table_name=='document.directory':
if self.children(cr):
raise OSError(39, 'Directory not empty.')
res = self.context._dirobj.unlink(cr, uid, [directory.id])
else:
raise OSError(1, 'Operation not permited.')
raise OSError(1, 'Operation is not permitted.')
return res
def create_child_collection(self, cr, objname):
object2 = False
if not self.check_perms(2):
raise IOError(errno.EPERM,"Permission denied")
raise IOError(errno.EPERM,"Permission denied.")
dirobj = self.context._dirobj
uid = self.context.uid
@ -654,7 +654,7 @@ class node_dir(node_database):
ctx.update(self.dctx)
obj = dirobj.browse(cr, uid, self.dir_id)
if obj and (obj.type == 'ressource') and not object2:
raise OSError(1, 'Operation not permited.')
raise OSError(1, 'Operation is not permitted.')
#objname = uri2[-1]
val = {
@ -672,7 +672,7 @@ class node_dir(node_database):
Return the node_* created
"""
if not self.check_perms(2):
raise IOError(errno.EPERM,"Permission denied")
raise IOError(errno.EPERM,"Permission denied.")
dirobj = self.context._dirobj
uid = self.context.uid
@ -702,10 +702,10 @@ class node_dir(node_database):
Note /may/ be called with ndir_node = None, to rename the document root.
"""
if ndir_node and (ndir_node.context != self.context):
raise NotImplementedError("Cannot move directories between contexts")
raise NotImplementedError("Cannot move directories between contexts.")
if (not self.check_perms('u')) or (not ndir_node.check_perms('w')):
raise IOError(errno.EPERM,"Permission denied")
raise IOError(errno.EPERM,"Permission denied.")
dir_obj = self.context._dirobj
if not fil_obj:
@ -724,13 +724,13 @@ class node_dir(node_database):
assert self.parent
if self.parent != ndir_node:
_logger.debug('Cannot move dir %r from %r to %r', self, self.parent, ndir_node)
raise NotImplementedError('Cannot move dir to another dir')
_logger.debug('Cannot move dir %r from %r to %r.', self, self.parent, ndir_node)
raise NotImplementedError('Cannot move dir to another dir.')
ret = {}
if new_name and (new_name != dbro.name):
if ndir_node.child(cr, new_name):
raise IOError(errno.EEXIST, "Destination path already exists")
raise IOError(errno.EEXIST, "Destination path already exists.")
ret['name'] = new_name
del dbro
@ -832,7 +832,7 @@ class node_res_dir(node_class):
elif isinstance(app, tuple):
where.append(app)
else:
raise RuntimeError("incorrect domain expr: %s" % self.domain)
raise RuntimeError("Incorrect domain expr: %s." % self.domain)
if self.resm_id:
where.append(('id','=',self.resm_id))
@ -845,7 +845,7 @@ class node_res_dir(node_class):
is_allowed = self.check_perms(5)
if not is_allowed:
raise IOError(errno.EPERM,"Permission denied")
raise IOError(errno.EPERM,"Permission denied.")
# print "Where clause for %s" % self.res_model, where
if self.ressource_tree:
@ -864,10 +864,10 @@ class node_res_dir(node_class):
if not res_name:
continue
# Yes! we can't do better but skip nameless records.
# Escape the name for characters not supported in filenames
res_name = res_name.replace('/','_') # any other weird char?
if name and (res_name != ustr(name)):
# we have matched _ to any character, but we only meant to match
# the special ones.
@ -923,7 +923,7 @@ class node_res_obj(node_class):
try:
self.dctx[fld] = safe_eval(expr, dc2)
except Exception,e:
print "Cannot eval %s for %s" % (expr, fld)
print "Cannot eval %s for %s." % (expr, fld)
print e
pass
else:
@ -962,7 +962,7 @@ class node_res_obj(node_class):
res = []
is_allowed = self.check_perms((nodename and 1) or 5)
if not is_allowed:
raise IOError(errno.EPERM,"Permission denied")
raise IOError(errno.EPERM,"Permission denied.")
cntobj = self.context._dirobj.pool.get('document.directory.content')
uid = self.context.uid
@ -997,7 +997,7 @@ class node_res_obj(node_class):
def get_dav_eprop_DEPR(self, cr, ns, prop):
# Deprecated!
if ns != 'http://groupdav.org/' or prop != 'resourcetype':
_logger.warning("Who asked for %s:%s?" % (ns, prop))
_logger.warning("Who asks for %s:%s?" % (ns, prop))
return None
cntobj = self.context._dirobj.pool.get('document.directory.content')
uid = self.context.uid
@ -1016,7 +1016,7 @@ class node_res_obj(node_class):
is_allowed = self.check_perms((name and 1) or 5)
if not is_allowed:
raise IOError(errno.EPERM,"Permission denied")
raise IOError(errno.EPERM,"Permission denied.")
uid = self.context.uid
ctx = self.context.context.copy()
@ -1057,7 +1057,7 @@ class node_res_obj(node_class):
where2 = where + [('parent_id','=',self.dir_id) ]
ids = dirobj.search(cr, uid, where2, context=ctx)
bo = obj.browse(cr, uid, self.res_id, context=ctx)
for dirr in dirobj.browse(cr, uid, ids, context=ctx):
if name and (name != dirr.name):
continue
@ -1103,7 +1103,7 @@ class node_res_obj(node_class):
dirobj = self.context._dirobj
is_allowed = self.check_perms(2)
if not is_allowed:
raise IOError(errno.EPERM,"Permission denied")
raise IOError(errno.EPERM,"Permission denied.")
uid = self.context.uid
ctx = self.context.context.copy()
@ -1114,7 +1114,7 @@ class node_res_obj(node_class):
obj = dirobj.browse(cr, uid, self.dir_id)
if obj and (obj.type == 'ressource') and not object2:
raise OSError(1, 'Operation not permited.')
raise OSError(1, 'Operation is not permitted.')
val = {
@ -1135,7 +1135,7 @@ class node_res_obj(node_class):
"""
is_allowed = self.check_perms(2)
if not is_allowed:
raise IOError(errno.EPERM,"Permission denied")
raise IOError(errno.EPERM,"Permission denied.")
dirobj = self.context._dirobj
uid = self.context.uid
@ -1177,14 +1177,14 @@ class node_file(node_class):
self.write_date = fil.write_date or fil.create_date
self.content_length = fil.file_size
self.displayname = fil.name
self.uidperms = 14
if parent:
if not parent.check_perms('x'):
self.uidperms = 0
elif not parent.check_perms('w'):
self.uidperms = 4
try:
self.uuser = (fil.user_id and fil.user_id.login) or 'nobody'
except Exception:
@ -1213,9 +1213,9 @@ class node_file(node_class):
def open_data(self, cr, mode):
stor = self.storage_id
assert stor, "No storage for file #%s" % self.file_id
assert stor, "No storage for file #%s." % self.file_id
if not self.check_perms(4):
raise IOError(errno.EPERM, "Permission denied")
raise IOError(errno.EPERM, "Permission denied.")
# If storage is not set properly, we are just screwed here, don't
# try to get it from default.
@ -1225,7 +1225,7 @@ class node_file(node_class):
def rm(self, cr):
uid = self.context.uid
if not self.check_perms(8):
raise IOError(errno.EPERM, "Permission denied")
raise IOError(errno.EPERM, "Permission denied.")
document_obj = self.context._dirobj.pool.get('ir.attachment')
if self.type in ('collection','database'):
return False
@ -1271,7 +1271,7 @@ class node_file(node_class):
stor = self.storage_id
assert stor, "No storage for file #%s" % self.file_id
if not self.check_perms(4):
raise IOError(errno.EPERM, "Permission denied")
raise IOError(errno.EPERM, "Permission denied.")
# If storage is not set properly, we are just screwed here, don't
# try to get it from default.
@ -1294,7 +1294,7 @@ class node_file(node_class):
stor = self.storage_id
assert stor, "No storage for file #%s" % self.file_id
if not self.check_perms(2):
raise IOError(errno.EPERM, "Permission denied")
raise IOError(errno.EPERM, "Permission denied.")
stobj = self.context._dirobj.pool.get('document.storage')
return stobj.set_data(cr, self.context.uid,stor, self, data, self.context.context, fil_obj)
@ -1304,31 +1304,31 @@ class node_file(node_class):
def move_to(self, cr, ndir_node, new_name=False, fil_obj=None, ndir_obj=None, in_write=False):
if ndir_node and ndir_node.context != self.context:
raise NotImplementedError("Cannot move files between contexts")
raise NotImplementedError("Cannot move files between contexts.")
if (not self.check_perms(8)) and ndir_node.check_perms(2):
raise IOError(errno.EPERM, "Permission denied")
raise IOError(errno.EPERM, "Permission denied.")
doc_obj = self.context._dirobj.pool.get('ir.attachment')
if not fil_obj:
dbro = doc_obj.browse(cr, self.context.uid, self.file_id, context=self.context.context)
else:
dbro = fil_obj
assert dbro.id == self.file_id, "%s != %s for %r" % (dbro.id, self.file_id, self)
assert dbro.id == self.file_id, "%s != %s for %r." % (dbro.id, self.file_id, self)
if not dbro:
raise IndexError("Cannot locate doc %d", self.file_id)
raise IndexError("Cannot locate doc %d.", self.file_id)
if (not self.parent):
# there *must* be a parent node for this one
self.parent = self.context.get_dir_node(cr, dbro.parent_id)
assert self.parent
ret = {}
if ndir_node and self.parent != ndir_node:
if not (isinstance(self.parent, node_dir) and isinstance(ndir_node, node_dir)):
_logger.debug('Cannot move file %r from %r to %r', self, self.parent, ndir_node)
raise NotImplementedError('Cannot move files between dynamic folders')
_logger.debug('Cannot move file %r from %r to %r.', self, self.parent, ndir_node)
raise NotImplementedError('Cannot move files between dynamic folders.')
if not ndir_obj:
ndir_obj = self.context._dirobj.browse(cr, self.context.uid, \
@ -1343,7 +1343,7 @@ class node_file(node_class):
if new_name and (new_name != dbro.name):
if len(ret):
raise NotImplementedError("Cannot rename and move") # TODO
raise NotImplementedError("Cannot rename and move.") # TODO
stobj = self.context._dirobj.pool.get('document.storage')
r2 = stobj.simple_rename(cr, self.context.uid, self, new_name, self.context.context)
ret.update(r2)
@ -1373,7 +1373,7 @@ class node_content(node_class):
self.uidperms = parent.uidperms & 14
self.uuser = parent.uuser
self.ugroup = parent.ugroup
self.extension = cnt.extension
self.report_id = cnt.report_id and cnt.report_id.id
#self.mimetype = cnt.extension.
@ -1399,7 +1399,7 @@ class node_content(node_class):
def get_data(self, cr, fil_obj = None):
cntobj = self.context._dirobj.pool.get('document.directory.content')
if not self.check_perms(4):
raise IOError(errno.EPERM, "Permission denied")
raise IOError(errno.EPERM, "Permission denied.")
ctx = self.context.context.copy()
ctx.update(self.dctx)
@ -1416,14 +1416,14 @@ class node_content(node_class):
elif mode in ('r+', 'w+'):
cperms = 'rw'
else:
raise IOError(errno.EINVAL, "Cannot open at mode %s" % mode)
raise IOError(errno.EINVAL, "Cannot open at mode %s." % mode)
if not self.check_perms(cperms):
raise IOError(errno.EPERM, "Permission denied")
raise IOError(errno.EPERM, "Permission denied.")
ctx = self.context.context.copy()
ctx.update(self.dctx)
return nodefd_content(self, cr, mode, ctx)
def get_data_len(self, cr, fil_obj = None):
@ -1438,7 +1438,7 @@ class node_content(node_class):
def set_data(self, cr, data, fil_obj = None):
cntobj = self.context._dirobj.pool.get('document.directory.content')
if not self.check_perms(2):
raise IOError(errno.EPERM, "Permission denied")
raise IOError(errno.EPERM, "Permission denied.")
ctx = self.context.context.copy()
ctx.update(self.dctx)
@ -1451,7 +1451,7 @@ class node_content(node_class):
return ''
class nodefd_content(StringIO, node_descriptor):
""" A descriptor to content nodes
"""
def __init__(self, parent, cr, mode, ctx):
@ -1473,8 +1473,8 @@ class nodefd_content(StringIO, node_descriptor):
elif mode == 'a':
StringIO.__init__(self, None)
else:
_logger.error("Incorrect mode %s specified", mode)
raise IOError(errno.EINVAL, "Invalid file mode")
_logger.error("Incorrect mode %s is specified.", mode)
raise IOError(errno.EINVAL, "Invalid file mode.")
self.mode = mode
def size(self):
@ -1499,14 +1499,14 @@ class nodefd_content(StringIO, node_descriptor):
raise NotImplementedError
cr.commit()
except Exception:
_logger.exception('Cannot update db content #%d for close:', par.cnt_id)
_logger.exception('Cannot update db content #%d for close.', par.cnt_id)
raise
finally:
cr.close()
StringIO.close(self)
class nodefd_static(StringIO, node_descriptor):
""" A descriptor to nodes with static data.
"""
def __init__(self, parent, cr, mode, ctx=None):
@ -1527,8 +1527,8 @@ class nodefd_static(StringIO, node_descriptor):
elif mode == 'a':
StringIO.__init__(self, None)
else:
_logger.error("Incorrect mode %s specified", mode)
raise IOError(errno.EINVAL, "Invalid file mode")
_logger.error("Incorrect mode %s is specified.", mode)
raise IOError(errno.EINVAL, "Invalid file mode.")
self.mode = mode
def size(self):
@ -1552,7 +1552,7 @@ class nodefd_static(StringIO, node_descriptor):
raise NotImplementedError
cr.commit()
except Exception:
_logger.exception('Cannot update db content #%d for close:', par.cnt_id)
_logger.exception('Cannot update db content #%d for close.', par.cnt_id)
raise
finally:
cr.close()

View File

@ -104,7 +104,7 @@ class DocIndex(indexer):
except OSError:
_logger.warn("Failed attempt to execute antiword (MS Word reader). Antiword is necessary to index the file %s of MIME type %s. Detailed error available at DEBUG level.", fname, self._getMimeTypes()[0])
_logger.debug("Trace of the failed file indexing attempt: ", exc_info=True)
_logger.debug("Trace of the failed file indexing attempt.", exc_info=True)
return False
cntIndex.register(DocIndex())

View File

@ -81,7 +81,7 @@ class abstracted_fs(object):
self.db_name_list.append(db_name)
cr.commit()
except Exception:
self._log.warning('Cannot use db "%s"', db_name)
self._log.warning('Cannot use db "%s".', db_name)
finally:
if cr is not None:
cr.close()
@ -143,40 +143,40 @@ class abstracted_fs(object):
child = node.child(cr, objname)
if child:
if child.type not in ('file','content'):
raise OSError(1, 'Operation not permited.')
raise OSError(1, 'Operation is not permitted.')
ret = child.open_data(cr, mode)
cr.commit()
assert ret, "Cannot create descriptor for %r: %r" % (child, ret)
assert ret, "Cannot create descriptor for %r: %r." % (child, ret)
return ret
except EnvironmentError:
raise
except Exception:
self._log.exception('Cannot locate item %s at node %s', objname, repr(node))
self._log.exception('Cannot locate item %s at node %s.', objname, repr(node))
pass
try:
child = node.create_child(cr, objname, data=None)
ret = child.open_data(cr, mode)
assert ret, "cannot create descriptor for %r" % child
assert ret, "Cannot create descriptor for %r." % child
cr.commit()
return ret
except EnvironmentError:
raise
except Exception:
self._log.exception('Cannot create item %s at node %s', objname, repr(node))
raise OSError(1, 'Operation not permited.')
self._log.exception('Cannot create item %s at node %s.', objname, repr(node))
raise OSError(1, 'Operation is not permitted.')
def open(self, datacr, mode):
if not (datacr and datacr[1]):
raise OSError(1, 'Operation not permited.')
raise OSError(1, 'Operation is not permitted.')
# Reading operation
cr, node, rem = datacr
try:
res = node.open_data(cr, mode)
cr.commit()
except TypeError:
raise IOError(errno.EINVAL, "No data")
raise IOError(errno.EINVAL, "No data.")
return res
# ok, but need test more
@ -211,9 +211,9 @@ class abstracted_fs(object):
self.cwd_node = None
return None
if not datacr[1]:
raise OSError(1, 'Operation not permitted')
raise OSError(1, 'Operation is not permitted.')
if datacr[1].type not in ('collection','database'):
raise OSError(2, 'Path is not a directory')
raise OSError(2, 'Path is not a directory.')
self.cwd = '/'+datacr[1].context.dbname + '/'
self.cwd += '/'.join(datacr[1].full_path())
self.cwd_node = datacr[1]
@ -223,7 +223,7 @@ class abstracted_fs(object):
"""Create the specified directory."""
cr, node, rem = datacr or (None, None, None)
if not node:
raise OSError(1, 'Operation not permited.')
raise OSError(1, 'Operation is not permitted.')
try:
basename =_to_unicode(basename)
@ -231,8 +231,8 @@ class abstracted_fs(object):
self._log.debug("Created child dir: %r", cdir)
cr.commit()
except Exception:
self._log.exception('Cannot create dir "%s" at node %s', basename, repr(node))
raise OSError(1, 'Operation not permited.')
self._log.exception('Cannot create dir "%s" at node %s.', basename, repr(node))
raise OSError(1, 'Operation is not permitted.')
def close_cr(self, data):
if data and data[0]:
@ -287,10 +287,10 @@ class abstracted_fs(object):
p_parts = p_parts[1:]
# self._log.debug("Path parts: %r ", p_parts)
if not p_parts:
raise IOError(errno.EPERM, 'Cannot perform operation at root dir')
raise IOError(errno.EPERM, 'Cannot perform operation at root directory.')
dbname = p_parts[0]
if dbname not in self.db_list():
raise IOError(errno.ENOENT,'Invalid database path: %s' % dbname)
raise IOError(errno.ENOENT,'Invalid database path: %s.' % dbname)
try:
db = pooler.get_db(dbname)
except Exception:
@ -303,7 +303,7 @@ class abstracted_fs(object):
raise
if not uid:
cr.close()
raise OSError(2, 'Authentification Required.')
raise OSError(2, 'Authentification required.')
n = get_node_context(cr, uid, {})
node = n.get_uri(cr, p_parts[1:])
return (cr, node, rem_path)
@ -318,7 +318,7 @@ class abstracted_fs(object):
node = self.cwd_node
if node is False and mode not in ('???'):
cr.close()
raise IOError(errno.ENOENT, 'Path does not exist')
raise IOError(errno.ENOENT, 'Path does not exist.')
return (cr, node, rem_path)
def get_node_cr_uid(self, node):
@ -375,7 +375,7 @@ class abstracted_fs(object):
return self.rmdir(datacr)
elif datacr[1].type == 'file':
return self.rmfile(datacr)
raise OSError(1, 'Operation not permited.')
raise OSError(1, 'Operation is not permitted.')
def rmfile(self, datacr):
"""Remove the specified file."""
@ -399,8 +399,8 @@ class abstracted_fs(object):
except EnvironmentError:
raise
except Exception:
self._log.exception('Cannot rename "%s" to "%s" at "%s"', src, datacr[2], datacr[1])
raise OSError(1,'Operation not permited.')
self._log.exception('Cannot rename "%s" to "%s" at "%s".', src, datacr[2], datacr[1])
raise OSError(1,'Operation is not permitted.')
def stat(self, node):
raise NotImplementedError()
@ -429,7 +429,7 @@ class abstracted_fs(object):
def getsize(self, datacr):
"""Return the size of the specified file in bytes."""
if not (datacr and datacr[1]):
raise IOError(errno.ENOENT, "No such file or directory")
raise IOError(errno.ENOENT, "No such file or directory.")
if datacr[1].type in ('file', 'content'):
return datacr[1].get_data_len(datacr[0]) or 0L
return 0L

View File

@ -302,16 +302,16 @@ class DummyAuthorizer:
provide customized response strings when user log-in and quit.
"""
if self.has_user(username):
raise AuthorizerError('User "%s" already exists' %username)
raise AuthorizerError('User "%s" already exists.' %username)
homedir = os.path.realpath(homedir)
if not os.path.isdir(homedir):
raise AuthorizerError('No such directory: "%s"' %homedir)
raise AuthorizerError('No such directory: "%s".' %homedir)
for p in perm:
if p not in 'elradfmw':
raise AuthorizerError('No such permission "%s"' %p)
raise AuthorizerError('No such permission: "%s".' %p)
for p in perm:
if (p in self.write_perms) and (username == 'anonymous'):
warnings.warn("write permissions assigned to anonymous user.",
warnings.warn("Write permissions are assigned to anonymous user.",
RuntimeWarning)
break
dic = {'pwd': str(password),
@ -532,7 +532,7 @@ class ActiveDTP(asyncore.dispatcher):
try:
self.connect((ip, port))
except socket.gaierror:
self.cmd_channel.respond("425 Can't connect to specified address.")
self.cmd_channel.respond("425 Cannot connect to specified address.")
self.close()
# --- connection / overridden
@ -542,14 +542,14 @@ class ActiveDTP(asyncore.dispatcher):
def handle_connect(self):
"""Called when connection is established."""
self.cmd_channel.respond('200 Active data connection established.')
self.cmd_channel.respond('200 Active data connection has been established.')
# delegate such connection to DTP handler
handler = self.cmd_channel.dtp_handler(self.socket, self.cmd_channel)
self.cmd_channel.data_channel = handler
self.cmd_channel.on_dtp_connection()
def handle_expt(self):
self.cmd_channel.respond("425 Can't connect to specified address.")
self.cmd_channel.respond("425 Cannot connect to specified address.")
self.close()
def handle_error(self):
@ -562,7 +562,7 @@ class ActiveDTP(asyncore.dispatcher):
pass
except:
logerror(traceback.format_exc())
self.cmd_channel.respond("425 Can't connect to specified address.")
self.cmd_channel.respond("425 Cannot connect to specified address.")
self.close()
class DTPHandler(asyncore.dispatcher):
@ -638,7 +638,7 @@ class DTPHandler(asyncore.dispatcher):
elif type == 'i':
self.data_wrapper = lambda x: x
else:
raise TypeError, "Unsupported type"
raise TypeError, "Unsupported type."
self.receive = True
def get_transmitted_bytes(self):
@ -767,7 +767,7 @@ class DTPHandler(asyncore.dispatcher):
# some other exception occurred; we don't want to provide
# confidential error messages
logerror(traceback.format_exc())
error = "Internal error"
error = "Internal error."
self.cmd_channel.respond("426 %s; transfer aborted." %error)
self.close()
@ -823,7 +823,7 @@ class FileProducer:
elif type == 'i':
self.data_wrapper = lambda x: x
else:
raise TypeError, "Unsupported type"
raise TypeError, "Unsupported type."
def more(self):
"""Attempt a chunk of data of size self.buffer_size."""
@ -1485,7 +1485,7 @@ class FTPHandler(asynchat.async_chat):
buflimit = 2048
if self.in_buffer_len > buflimit:
self.respond('500 Command too long.')
self.log('Command received exceeded buffer limit of %s.' %(buflimit))
self.log('Command has been received exceeds buffer limit of %s.' %(buflimit))
self.in_buffer = []
self.in_buffer_len = 0
@ -1528,12 +1528,12 @@ class FTPHandler(asynchat.async_chat):
# let's check if user provided an argument for those commands
# needing one
if not arg and cmd in self.arg_cmds:
self.respond("501 Syntax error: command needs an argument.")
self.respond("501 Syntax error! Command needs an argument.")
return
# let's do the same for those commands requiring no argument.
elif arg and cmd in self.unarg_cmds:
self.respond("501 Syntax error: command does not accept arguments.")
self.respond("501 Syntax error! Command does not accept arguments.")
return
# provide a limited set of commands if user isn't
@ -1617,7 +1617,7 @@ class FTPHandler(asynchat.async_chat):
else:
self.in_buffer.append(data)
return
self.log("Can't handle OOB data.")
self.log("Cannot handle OOB data.")
self.close()
def handle_error(self):
@ -1801,7 +1801,7 @@ class FTPHandler(asynchat.async_chat):
except NotImplementedError, err:
cmdname = function.__name__
why = err.args[0] or 'Not implemented'
self.log('FAIL %s() not implemented: %s.' %(cmdname, why))
self.log('FAIL %s() is not implemented: %s.' %(cmdname, why))
self.respond('502 %s.' %why)
raise FTPExceptionSent(why)
except EnvironmentError, err:
@ -1811,7 +1811,7 @@ class FTPHandler(asynchat.async_chat):
except Exception:
pass
ret_code = eresp.get(err.errno, '451')
why = (err.strerror) or 'Error in command'
why = (err.strerror) or 'Error in command.'
self.log('FAIL %s() %s errno=%s: %s.' %(cmdname, uline, err.errno, why))
self.respond('%s %s.' % (str(ret_code), why))
@ -1841,15 +1841,15 @@ class FTPHandler(asynchat.async_chat):
if ip != self.remote_ip:
self.log("Rejected data connection to foreign address %s:%s."
%(ip, port))
self.respond("501 Can't connect to a foreign address.")
self.respond("501 Cannot connect to a foreign address.")
return
# ...another RFC-2577 recommendation is rejecting connections
# to privileged ports (< 1024) for security reasons.
if not self.permit_privileged_ports:
if port < 1024:
self.log('PORT against the privileged port "%s" refused.' %port)
self.respond("501 Can't connect over a privileged port.")
self.log('PORT against the privileged port "%s" has been refused.' %port)
self.respond("501 Cannot connect over a privileged port.")
return
# close existent DTP-server instance, if any.
@ -1889,7 +1889,7 @@ class FTPHandler(asynchat.async_chat):
# make sure we are not hitting the max connections limit
if self.server.max_cons:
if len(self._map) >= self.server.max_cons:
msg = "Too many connections. Can't open data channel."
msg = "Too many connections. Cannot open data channel."
self.respond("425 %s" %msg)
self.log(msg)
return
@ -2150,7 +2150,7 @@ class FTPHandler(asynchat.async_chat):
datacr = self.get_crdata2(line, mode='list')
# RFC-3659 requires 501 response code if path is not a directory
if not self.fs.isdir(datacr[1]):
err = 'No such directory'
err = 'No such directory.'
self.log('FAIL MLSD "%s". %s.' %(line, err))
self.respond("501 %s." %err)
return
@ -2191,7 +2191,7 @@ class FTPHandler(asynchat.async_chat):
fd.seek(self.restart_position)
ok = 1
except AssertionError:
why = "Invalid REST parameter"
why = "Invalid REST parameter."
except IOError, err:
why = _strerror(err)
self.restart_position = 0
@ -2240,7 +2240,7 @@ class FTPHandler(asynchat.async_chat):
fd.seek(self.restart_position)
ok = 1
except AssertionError:
why = "Invalid REST parameter"
why = "Invalid REST parameter."
except IOError, err:
why = _strerror(err)
self.restart_position = 0
@ -2275,7 +2275,7 @@ class FTPHandler(asynchat.async_chat):
# watch for STOU preceded by REST, which makes no sense.
if self.restart_position:
self.respond("450 Can't STOU while REST request is pending.")
self.respond("450 Cannot STOU while REST request is pending.")
return
@ -2296,7 +2296,7 @@ class FTPHandler(asynchat.async_chat):
# hitted the max number of tries to find out file with
# unique name
if err.errno == errno.EEXIST:
why = 'No usable unique file name found'
why = 'No usable unique file name found.'
# something else happened
else:
why = _strerror(err)
@ -2307,9 +2307,9 @@ class FTPHandler(asynchat.async_chat):
filename = line
if not self.authorizer.has_perm(self.username, 'w', filename):
self.log('FAIL STOU "%s". Not enough privileges'
self.log('FAIL STOU "%s". Not enough privileges.'
%self.fs.ftpnorm(line))
self.respond("550 Can't STOU: not enough privileges.")
self.respond("550 Cannot STOU: not enough privileges.")
self.fs.close_cr(datacr)
return
@ -2329,7 +2329,7 @@ class FTPHandler(asynchat.async_chat):
"""Append data to an existing file on the server."""
# watch for APPE preceded by REST, which makes no sense.
if self.restart_position:
self.respond("550 Can't APPE while REST request is pending.")
self.respond("550 Cannot APPE while REST request is pending.")
else:
self.ftp_STOR(line, mode='a')
@ -2405,7 +2405,7 @@ class FTPHandler(asynchat.async_chat):
# and account information already supplied and beginning the
# login sequence again.
self.flush_account()
msg = 'Previous account information was flushed'
msg = 'Previous account information is flushed.'
self.log('OK USER "%s". %s.' %(line, msg))
self.respond('331 %s, send password.' %msg)
self.username = line
@ -2554,7 +2554,7 @@ class FTPHandler(asynchat.async_chat):
else:
datacr = self.get_crdata2(line)
if not datacr:
raise IOError(errno.ENOENT, "%s is not retrievable" %line)
raise IOError(errno.ENOENT, "%s is not retrievable." %line)
lmt = self.try_as_current_user(self.fs.getmtime, (datacr,), line=line)
lmt = time.strftime("%Y%m%d%H%M%S", time.localtime(lmt))
@ -2584,7 +2584,7 @@ class FTPHandler(asynchat.async_chat):
try:
datacr = self.get_crdata2(line, mode='delete')
if not datacr[1]:
msg = "Can't remove root directory."
msg = "Cannot remove root directory."
self.respond("553 %s" %msg)
self.log('FAIL MKD "/". %s' %msg)
self.fs.close_cr(datacr)
@ -2617,7 +2617,7 @@ class FTPHandler(asynchat.async_chat):
if not datacr[1]:
self.respond("550 No such file or directory.")
elif not datacr[1]:
self.respond("553 Can't rename the home directory.")
self.respond("553 Cannot rename the home directory.")
else:
self.fs.rnfr = datacr[1]
self.respond("350 Ready for destination name.")
@ -2760,14 +2760,14 @@ class FTPHandler(asynchat.async_chat):
def ftp_OPTS(self, line):
"""Specify options for FTP commands as specified in RFC-2389."""
try:
assert (not line.count(' ') > 1), 'Invalid number of arguments'
assert (not line.count(' ') > 1), 'Invalid number of arguments.'
if ' ' in line:
cmd, arg = line.split(' ')
assert (';' in arg), 'Invalid argument'
assert (';' in arg), 'Invalid argument!'
else:
cmd, arg = line, ''
# actually the only command able to accept options is MLST
assert (cmd.upper() == 'MLST'), 'Unsupported command "%s"' %cmd
assert (cmd.upper() == 'MLST'), 'Unsupported command "%s".' %cmd
except AssertionError, err:
self.respond('501 %s.' %err)
else:

View File

@ -62,7 +62,7 @@ def get_ftp_fulldata(ftp, fname, limit=8192):
data = []
def ffp(data, ndata):
if len(data)+ len(ndata) > limit:
raise IndexError('Data over the limit')
raise IndexError('Data over the limit.')
data.append(ndata)
ftp.retrbinary('RETR %s' % fname, partial(ffp,data))
return ''.join(data)

View File

@ -78,13 +78,13 @@ def _str2time(cre):
class BoundStream2(object):
"""Wraps around a seekable buffer, reads a determined range of data
Note that the supplied stream object MUST support a size() which
should return its data length (in bytes).
A variation of the class in websrv_lib.py
"""
def __init__(self, stream, offset=None, length=None, chunk_size=None):
self._stream = stream
self._offset = offset or 0
@ -98,8 +98,8 @@ class BoundStream2(object):
def read(self, size=-1):
if not self._stream:
raise IOError(errno.EBADF, "read() without stream")
raise IOError(errno.EBADF, "read() without stream.")
if self._rem_length == 0:
return ''
elif self._rem_length < 0:
@ -110,7 +110,7 @@ class BoundStream2(object):
rsize = size
if self._chunk_size and self._chunk_size < rsize:
rsize = self._chunk_size
data = self._stream.read(rsize)
self._rem_length -= len(data)
@ -136,25 +136,25 @@ class BoundStream2(object):
"""
if whence == os.SEEK_SET:
if pos < 0 or pos > self._length:
raise IOError(errno.EINVAL,"Cannot seek")
raise IOError(errno.EINVAL,"Cannot seek.")
self._stream.seek(pos - self._offset)
self._rem_length = self._length - pos
elif whence == os.SEEK_CUR:
if pos > 0:
if pos > self._rem_length:
raise IOError(errno.EINVAL,"Cannot seek past end")
raise IOError(errno.EINVAL,"Cannot seek past end.")
elif pos < 0:
oldpos = self.tell()
if oldpos + pos < 0:
raise IOError(errno.EINVAL,"Cannot seek before start")
raise IOError(errno.EINVAL,"Cannot seek before start.")
self._stream.seek(pos, os.SEEK_CUR)
self._rem_length -= pos
elif whence == os.SEEK_END:
if pos > 0:
raise IOError(errno.EINVAL,"Cannot seek past end")
raise IOError(errno.EINVAL,"Cannot seek past end.")
else:
if self._length + pos < 0:
raise IOError(errno.EINVAL,"Cannot seek before start")
raise IOError(errno.EINVAL,"Cannot seek before start.")
newpos = self._offset + self._length + pos
self._stream.seek(newpos, os.SEEK_SET)
self._rem_length = 0 - pos
@ -206,7 +206,7 @@ class openerp_dav_handler(dav_interface):
self.parent.log_error("Cannot %s: %s", opname, str(e))
self.parent.log_message("Exc: %s",traceback.format_exc())
# see par 9.3.1 of rfc
raise DAV_Error(403, str(e) or 'Not supported at this path')
raise DAV_Error(403, str(e) or 'Not supported at this path.')
except EnvironmentError, err:
if cr: cr.close()
import traceback
@ -218,7 +218,7 @@ class openerp_dav_handler(dav_interface):
if cr: cr.close()
self.parent.log_error("Cannot %s: %s", opname, str(e))
self.parent.log_message("Exc: %s",traceback.format_exc())
raise default_exc("Operation failed")
raise default_exc("Operation failed.")
def _get_dav_lockdiscovery(self, uri):
""" We raise that so that the node API is used """
@ -400,7 +400,7 @@ class openerp_dav_handler(dav_interface):
domain = None
if filters:
domain = node.get_domain(cr, filters)
if hasattr(filters, 'getElementsByTagNameNS'):
hrefs = filters.getElementsByTagNameNS('DAV:', 'href')
if hrefs:
@ -434,7 +434,7 @@ class openerp_dav_handler(dav_interface):
except DAV_Error:
raise
except Exception, e:
self.parent.log_error("cannot get_children: "+ str(e))
self.parent.log_error("Cannot get_children: "+str(e)+".")
raise
finally:
if cr: cr.close()
@ -488,7 +488,7 @@ class openerp_dav_handler(dav_interface):
if not node:
raise DAV_NotFound2(uri2)
# TODO: if node is a collection, for some specific set of
# clients ( web browsers; available in node context),
# clients ( web browsers; available in node context),
# we may return a pseydo-html page with the directory listing.
try:
res = node.open_data(cr,'r')
@ -500,15 +500,15 @@ class openerp_dav_handler(dav_interface):
assert start >= 0
if end and end < start:
self.parent.log_error("Invalid range for data: %s-%s" %(start, end))
raise DAV_Error(416, "Invalid range for data")
raise DAV_Error(416, "Invalid range for data.")
if end:
if end >= res.size():
raise DAV_Error(416, "Requested data exceeds available size")
raise DAV_Error(416, "Requested data exceeds available size.")
length = (end + 1) - start
else:
length = res.size() - start
res = BoundStream2(res, offset=start, length=length)
except TypeError,e:
# for the collections that return this error, the DAV standard
# says we'd better just return 200 OK with empty data
@ -564,10 +564,10 @@ class openerp_dav_handler(dav_interface):
@memoize(CACHE_SIZE)
def _get_dav_getcontentlength(self, uri):
""" return the content length of an object """
""" return the content length of an object """
self.parent.log_message('get length: %s' % uri)
result = 0
cr, uid, pool, dbname, uri2 = self.get_cr(uri)
cr, uid, pool, dbname, uri2 = self.get_cr(uri)
if not dbname:
if cr: cr.close()
return str(result)
@ -602,7 +602,7 @@ class openerp_dav_handler(dav_interface):
cr, uid, pool, dbname, uri2 = self.get_cr(uri)
if not dbname:
return time.time()
try:
try:
node = self.uri2object(cr, uid, pool, uri2)
if not node:
raise DAV_NotFound2(uri2)
@ -623,11 +623,11 @@ class openerp_dav_handler(dav_interface):
@memoize(CACHE_SIZE)
def get_creationdate(self, uri):
""" return the last modified date of the object """
""" return the last modified date of the object """
cr, uid, pool, dbname, uri2 = self.get_cr(uri)
if not dbname:
raise DAV_Error, 409
try:
try:
node = self.uri2object(cr, uid, pool, uri2)
if not node:
raise DAV_NotFound2(uri2)
@ -643,7 +643,7 @@ class openerp_dav_handler(dav_interface):
if not dbname:
if cr: cr.close()
return 'httpd/unix-directory'
try:
try:
node = self.uri2object(cr, uid, pool, uri2)
if not node:
raise DAV_NotFound2(uri2)
@ -651,8 +651,8 @@ class openerp_dav_handler(dav_interface):
return result
#raise DAV_NotFound, 'Could not find %s' % path
finally:
if cr: cr.close()
if cr: cr.close()
def mkcol(self,uri):
""" create a new collection
see par. 9.3 of rfc4918
@ -661,7 +661,7 @@ class openerp_dav_handler(dav_interface):
cr, uid, pool, dbname, uri2 = self.get_cr(uri)
if not uri2[-1]:
if cr: cr.close()
raise DAV_Error(409, "Cannot create nameless collection")
raise DAV_Error(409, "Cannot create nameless collection.")
if not dbname:
if cr: cr.close()
raise DAV_Error, 409
@ -672,7 +672,7 @@ class openerp_dav_handler(dav_interface):
nc = node.child(cr, uri2[-1])
if nc:
cr.close()
raise DAV_Error(405, "Path already exists")
raise DAV_Error(405, "Path already exists.")
self._try_function(node.create_child_collection, (cr, uri2[-1]),
"create col %s" % uri2[-1], cr=cr)
cr.commit()
@ -690,30 +690,30 @@ class openerp_dav_handler(dav_interface):
node = self.uri2object(cr, uid, pool, uri2[:])
except Exception:
node = False
objname = misc.ustr(uri2[-1])
ret = None
if not node:
dir_node = self.uri2object(cr, uid, pool, uri2[:-1])
if not dir_node:
cr.close()
raise DAV_NotFound('Parent folder not found')
raise DAV_NotFound('Parent folder not found.')
newchild = self._try_function(dir_node.create_child, (cr, objname, data),
"create %s" % objname, cr=cr)
if not newchild:
cr.commit()
cr.close()
raise DAV_Error(400, "Failed to create resource")
raise DAV_Error(400, "Failed to create resource.")
uparts=urlparse.urlparse(uri)
fileloc = '/'.join(newchild.full_path())
if isinstance(fileloc, unicode):
fileloc = fileloc.encode('utf-8')
# the uri we get is a mangled one, where the davpath has been removed
davpath = self.parent.get_davpath()
surl = '%s://%s' % (uparts[0], uparts[1])
uloc = urllib.quote(fileloc)
hurl = False
@ -727,19 +727,19 @@ class openerp_dav_handler(dav_interface):
ret = (str(hurl), etag)
else:
self._try_function(node.set_data, (cr, data), "save %s" % objname, cr=cr)
cr.commit()
cr.close()
return ret
def rmcol(self,uri):
""" delete a collection """
cr, uid, pool, dbname, uri2 = self.get_cr(uri)
cr, uid, pool, dbname, uri2 = self.get_cr(uri)
if not dbname:
if cr: cr.close()
raise DAV_Error, 409
node = self.uri2object(cr, uid, pool, uri2)
node = self.uri2object(cr, uid, pool, uri2)
self._try_function(node.rmcol, (cr,), "rmcol %s" % uri, cr=cr)
cr.commit()
@ -748,14 +748,14 @@ class openerp_dav_handler(dav_interface):
def rm(self,uri):
cr, uid, pool,dbname, uri2 = self.get_cr(uri)
if not dbname:
if not dbname:
if cr: cr.close()
raise DAV_Error, 409
node = self.uri2object(cr, uid, pool, uri2)
res = self._try_function(node.rm, (cr,), "rm %s" % uri, cr=cr)
if not res:
if cr: cr.close()
raise OSError(1, 'Operation not permited.')
raise OSError(1, 'Invalid Action!')
cr.commit()
cr.close()
return 204
@ -922,8 +922,8 @@ class openerp_dav_handler(dav_interface):
return result
def unlock(self, uri, token):
""" Unlock a resource from that token
""" Unlock a resource from that token
@return True if unlocked, False if no lock existed, Exceptions
"""
cr, uid, pool, dbname, uri2 = self.get_cr(uri)
@ -937,7 +937,7 @@ class openerp_dav_handler(dav_interface):
except AttributeError:
# perhaps the node doesn't support locks
cr.close()
raise DAV_Error(400, 'No locks for this resource')
raise DAV_Error(400, 'No locks for this resource.')
res = self._try_function(node_fn, (cr, token), "unlock %s" % uri, cr=cr)
cr.commit()
@ -959,14 +959,14 @@ class openerp_dav_handler(dav_interface):
node = self.uri2object(cr, uid, pool, uri2[:])
except Exception:
node = False
objname = misc.ustr(uri2[-1])
if not node:
dir_node = self.uri2object(cr, uid, pool, uri2[:-1])
if not dir_node:
cr.close()
raise DAV_NotFound('Parent folder not found')
raise DAV_NotFound('Parent folder not found.')
# We create a new node (file) but with empty data=None,
# as in RFC4918 p. 9.10.4
@ -975,8 +975,8 @@ class openerp_dav_handler(dav_interface):
if not node:
cr.commit()
cr.close()
raise DAV_Error(400, "Failed to create resource")
raise DAV_Error(400, "Failed to create resource.")
created = True
try:
@ -984,7 +984,7 @@ class openerp_dav_handler(dav_interface):
except AttributeError:
# perhaps the node doesn't support locks
cr.close()
raise DAV_Error(400, 'No locks for this resource')
raise DAV_Error(400, 'No locks for this resource.')
# Obtain the lock on the node
lres, pid, token = self._try_function(node_fn, (cr, lock_data), "lock %s" % objname, cr=cr)
@ -992,10 +992,10 @@ class openerp_dav_handler(dav_interface):
if not lres:
cr.commit()
cr.close()
raise DAV_Error(423, "Resource already locked")
raise DAV_Error(423, "Resource already locked.")
assert isinstance(lres, list), 'lres: %s' % repr(lres)
try:
data = mk_lock_response(self, uri, lres)
cr.commit()

View File

@ -43,7 +43,7 @@ class document_davdir(osv.osv):
elif dbro.type == 'ressource':
return nodes.node_res_dir
else:
raise ValueError("dir node for %s type", dbro.type)
raise ValueError("Directory node for %s type.", dbro.type)
def _prepare_context(self, cr, uid, nctx, context=None):
nctx.node_file_class = nodes.node_file
@ -67,18 +67,18 @@ document_davdir()
class dav_dir_property(osv.osv):
""" Arbitrary WebDAV properties, attached to document.directories.
Some DAV properties have to be settable at directories, depending
on the database directory structure.
Example would be the principal-URL.
There _can_ be properties without a directory, which means that they
globally apply to all the directories (aka. collections) of the
present database.
"""
_name = 'document.webdav.dir.property'
_columns = {
'create_date': fields.datetime('Date Created', readonly=True),
'create_uid': fields.many2one('res.users', 'Creator', readonly=True),
@ -90,25 +90,25 @@ class dav_dir_property(osv.osv):
'value': fields.text('Value'),
'do_subst': fields.boolean('Substitute', required=True),
}
_defaults = {
'do_subst': False,
}
dav_dir_property()
class dav_file_property(osv.osv):
""" Arbitrary WebDAV properties, attached to ir.attachments.
A special case is the locks that can be applied on file nodes.
There _can_ be properties without a file (RFC?), which means that they
globally apply to all the attachments of the present database.
TODO access permissions, per property.
"""
_name = 'document.webdav.file.property'
_columns = {
'create_date': fields.datetime('Date Created', readonly=True),
'create_uid': fields.many2one('res.users', 'Creator', readonly=True),
@ -120,11 +120,11 @@ class dav_file_property(osv.osv):
'value': fields.text('Value'),
'do_subst': fields.boolean('Substitute', required=True),
}
_defaults = {
'do_subst': False,
}
dav_file_property()
#eof

View File

@ -69,9 +69,9 @@ class PersistentTransport(Transport):
host, extra_headers, x509 = Transport.get_host_info(self,host)
if extra_headers == None:
extra_headers = []
extra_headers.append( ( 'Connection', 'keep-alive' ))
return host, extra_headers, x509
def _parse_response(self, file, sock, response):
@ -122,9 +122,9 @@ class PersistentTransport(Transport):
resp = h._conn.getresponse()
# TODO: except BadStatusLine, e:
errcode, errmsg, headers = resp.status, resp.reason, resp.msg
if errcode != 200:
raise ProtocolError(
@ -145,7 +145,7 @@ class PersistentTransport(Transport):
class CompressedTransport(PersistentTransport):
def send_content(self, connection, request_body):
connection.putheader("Content-Type", "text/xml")
if len(request_body) > 512 or True:
buffer = StringIO.StringIO()
output = gzip.GzipFile(mode='wb', fileobj=buffer)
@ -176,7 +176,7 @@ class SafePersistentTransport(PersistentTransport):
class AuthClient(object):
def getAuth(self, atype, realm):
raise NotImplementedError("Cannot authenticate for %s" % atype)
def resolveFailedRealm(self, realm):
""" Called when, using a known auth type, the realm is not in cache
"""
@ -195,7 +195,7 @@ class BasicAuthClient(AuthClient):
_logger.debug("missing key: \"%s\"" % realm)
self.resolveFailedRealm(realm)
return 'Basic '+ self._realm_dict[realm]
def addLogin(self, realm, username, passwd):
""" Add some known username/password for a specific login.
This function should be called once, for each realm
@ -210,7 +210,7 @@ class BasicAuthClient(AuthClient):
class addAuthTransport:
""" Intermediate class that authentication algorithm to http transport
"""
def setAuthClient(self, authobj):
""" Set the authentication client object.
This method must be called before any request is issued, that
@ -218,7 +218,7 @@ class addAuthTransport:
"""
assert isinstance(authobj, AuthClient)
self._auth_client = authobj
def request(self, host, handler, request_body, verbose=0):
# issue XML-RPC request
@ -226,7 +226,7 @@ class addAuthTransport:
h = self.make_connection(host)
if verbose:
h.set_debuglevel(1)
tries = 0
atype = None
realm = None
@ -246,7 +246,7 @@ class addAuthTransport:
resp = h._conn.getresponse()
# except BadStatusLine, e:
tries += 1
if resp.status == 401:
if 'www-authenticate' in resp.msg:
(atype,realm) = resp.msg.getheader('www-authenticate').split(' ',1)
@ -258,7 +258,7 @@ class addAuthTransport:
_logger.debug("Resp: %r %r", resp.version,resp.isclosed(), resp.will_close)
_logger.debug("Want to do auth %s for realm %s", atype, realm)
if atype != 'Basic':
raise ProtocolError(host+handler, 403,
raise ProtocolError(host+handler, 403,
"Unknown authentication method: %s" % atype, resp.msg)
continue # with the outer while loop
else:
@ -268,17 +268,17 @@ class addAuthTransport:
if resp.status != 200:
raise ProtocolError( host + handler,
resp.status, resp.reason, resp.msg )
self.verbose = verbose
try:
sock = h._conn.sock
except AttributeError:
sock = None
return self._parse_response(h.getfile(), sock, resp)
raise ProtocolError(host+handler, 403, "No authentication",'')
raise ProtocolError(host+handler, 403, "No authentication.",'')
class PersistentAuthTransport(addAuthTransport,PersistentTransport):
pass
@ -302,7 +302,7 @@ class HTTPSConnection(httplib.HTTPSConnection):
self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file,
ca_certs=ca_certs,
cert_reqs=cert_reqs)
def getpeercert(self):
import ssl
@ -316,14 +316,14 @@ class HTTPSConnection(httplib.HTTPSConnection):
if cert[0-lf] != '\n':
cert = cert[:0-lf]+'\n'+cert[0-lf:]
_logger.debug("len-footer: %s cert: %r", lf, cert[0-lf])
return cert
class DAVClient(object):
"""An instance of a WebDAV client, connected to the OpenERP server
"""
def __init__(self, user=None, passwd=None, dbg=0, use_ssl=False, useragent=False, timeout=None):
if use_ssl:
self.host = config.get_misc('httpsd', 'interface', False)
@ -353,10 +353,10 @@ class DAVClient(object):
def get_creds(self, obj, cr, uid):
"""Read back the user credentials from cr, uid
@param obj is any orm object, in order to use its pool
@param uid is the numeric id, which we will try to reverse resolve
note: this is a hackish way to get the credentials. It is expected
to break if "base_crypt" is used.
"""
@ -366,7 +366,7 @@ class DAVClient(object):
self.user = res[0]['login']
self.passwd = res[0]['password']
if self.passwd.startswith('$1$'):
# md5 by base crypt. We cannot decode, wild guess
# md5 by base crypt. We cannot decode, wild guess
# that passwd = login
self.passwd = self.user
return True
@ -402,7 +402,7 @@ class DAVClient(object):
r1 = conn.getresponse()
except httplib.BadStatusLine, bsl:
log.warning("Bad status line: %s", bsl.line)
raise Exception('Bad status line')
raise Exception('Bad status line.')
if r1.status == 401: # and r1.headers:
if 'www-authenticate' in r1.msg:
(atype,realm) = r1.msg.getheader('www-authenticate').split(' ',1)
@ -415,7 +415,7 @@ class DAVClient(object):
auths = base64.encodestring(self.user + ':' + self.passwd)
if auths[-1] == "\n":
auths = auths[:-1]
hdrs['Authorization']= 'Basic '+ auths
hdrs['Authorization']= 'Basic '+ auths
#sleep(1)
conn.request(method, path, body, hdrs )
r1 = conn.getresponse()
@ -437,7 +437,7 @@ class DAVClient(object):
doc = xml.dom.minidom.parseString(data1)
_logger.debug("XML Body:\n %s", doc.toprettyxml(indent="\t"))
except Exception:
_logger.warning("could not print xml", exc_info=True)
_logger.warning("Cannot print XML.", exc_info=True)
pass
conn.close()
return r1.status, r1.msg, data1
@ -475,10 +475,10 @@ class DAVClient(object):
assert s == 200, "Status: %r" % s
assert 'OPTIONS' in m.getheader('Allow')
_logger.debug('Options: %r', m.getheader('Allow'))
if expect:
self._assert_headers(expect, m)
def _parse_prop_response(self, data):
""" Parse a propfind/propname response
"""
@ -488,7 +488,7 @@ class DAVClient(object):
if node.nodeType == node.TEXT_NODE:
rc.append(node.data)
return ''.join(rc)
def getElements(node, namespaces=None, strict=False):
for cnod in node.childNodes:
if cnod.nodeType != node.ELEMENT_NODE:
@ -534,10 +534,10 @@ class DAVClient(object):
rstatus = int(sta)
else:
_logger.debug("What is <%s> inside a <propstat>?", pno.tagName)
else:
_logger.debug("Unknown node: %s", cno.tagName)
res.setdefault(href,[]).append((status, res_nss))
return res
@ -558,7 +558,7 @@ class DAVClient(object):
propstr += '<ns%d:%s xmlns:ns%d="%s" />' %(nscount, p, nscount, ns)
nscount += 1
propstr += '</prop>'
body="""<?xml version="1.0" encoding="utf-8"?>
<propfind xmlns="DAV:">%s</propfind>""" % propstr
hdrs = { 'Content-Type': 'text/xml; charset=utf-8',
@ -566,7 +566,7 @@ class DAVClient(object):
'Depth': depth,
}
s, m, d = self._http_request(self.davpath + path, method='PROPFIND',
s, m, d = self._http_request(self.davpath + path, method='PROPFIND',
hdrs=hdrs, body=body)
assert s == 207, "Bad status: %s" % s
ctype = m.getheader('Content-Type').split(';',1)[0]
@ -578,7 +578,7 @@ class DAVClient(object):
else:
assert len(res) >= 1
return res
def gd_propname(self, path, depth=0):
body="""<?xml version="1.0" encoding="utf-8"?>
@ -587,7 +587,7 @@ class DAVClient(object):
'Accept': 'text/xml',
'Depth': depth
}
s, m, d = self._http_request(self.davpath + path, method='PROPFIND',
s, m, d = self._http_request(self.davpath + path, method='PROPFIND',
hdrs=hdrs, body=body)
assert s == 207, "Bad status: %s" % s
ctype = m.getheader('Content-Type').split(';',1)[0]
@ -605,7 +605,7 @@ class DAVClient(object):
def gd_lsl(self, path):
""" Return a list of 'ls -l' kind of data for a folder
This is based on propfind.
"""
@ -616,7 +616,7 @@ class DAVClient(object):
propnames = [ l[1] for l in lspairs]
propres = self.gd_propfind(path, props=propnames, depth=1)
res = []
for href, pr in propres.items():
lsline = {}
@ -638,9 +638,9 @@ class DAVClient(object):
lsline[lsp[0]] = lsp[2]
else:
_logger.debug("Strange status: %s", st)
res.append(lsline)
return res
def gd_get(self, path, crange=None, mime=None, compare=None):
@ -651,7 +651,7 @@ class DAVClient(object):
if isinstance(crange, tuple):
crange = [crange,]
if not isinstance(crange, list):
raise TypeError("Range must be a tuple or list of tuples")
raise TypeError("Range must be a tuple or list of tuples.")
rs = []
for r in crange:
rs.append('%d-%d' % r)
@ -683,13 +683,13 @@ class DAVClient(object):
return ctype, rrange, d
def gd_put(self, path, body=None, srcpath=None, mime=None, noclobber=False, ):
""" HTTP PUT
""" HTTP PUT
@param noclobber will prevent overwritting a resource (If-None-Match)
@param mime will set the content-type
"""
hdrs = { }
if not (body or srcpath):
raise ValueError("PUT must have something to send")
raise ValueError("PUT must have something to send.")
if (not body) and srcpath:
fd = open(srcpath, 'rb')
body = fd.read()
@ -698,7 +698,7 @@ class DAVClient(object):
hdrs['Content-Type'] = mime
if noclobber:
hdrs['If-None-Match'] = '*'
s, m, d = self._http_request(self.davpath + path, method='PUT',
s, m, d = self._http_request(self.davpath + path, method='PUT',
hdrs=hdrs, body=body)
assert s == (201), "Bad status: %s" % s
etag = m.getheader('ETag')

View File

@ -65,7 +65,7 @@ class Prop2xml(object):
def createText2Node(self, data):
if not isinstance(data, StringTypes):
raise TypeError, "node contents must be a string"
raise TypeError, "Node contents must be a string."
t = Text2()
t.data = data
t.ownerDocument = self.doc

View File

@ -73,7 +73,7 @@ def OpenDAVConfig(**kw):
class DAVHandler(HttpOptions, FixSendError, DAVRequestHandler):
verbose = False
protocol_version = 'HTTP/1.1'
_HTTP_OPTIONS= { 'DAV' : ['1', '2'],
'Allow' : [ 'GET', 'HEAD', 'COPY', 'MOVE', 'POST', 'PUT',
@ -119,7 +119,7 @@ class DAVHandler(HttpOptions, FixSendError, DAVRequestHandler):
if up.path.startswith(self.davpath):
self.headers['Destination'] = up.path[len(self.davpath):]
else:
raise DAV_Forbidden("Not allowed to copy/move outside webdav path")
raise DAV_Forbidden("Not allowed to copy/move outside webdav path.")
# TODO: locks
DAVRequestHandler.copymove(self, CLASS)
@ -304,7 +304,7 @@ class DAVHandler(HttpOptions, FixSendError, DAVRequestHandler):
res = dc.unlock(uri, token)
except DAV_Error, (ec, dd):
return self.send_status(ec, dd)
if res == True:
self.send_body(None, '204', 'OK', 'Resource unlocked.')
else:
@ -338,7 +338,7 @@ class DAVHandler(HttpOptions, FixSendError, DAVRequestHandler):
if isinstance(ldif, list):
if len(ldif) !=1 or (not isinstance(ldif[0], TagList)) \
or len(ldif[0].list) != 1:
raise DAV_Error(400, "Cannot accept multiple tokens")
raise DAV_Error(400, "Cannot accept multiple tokens.")
ldif = ldif[0].list[0]
if ldif[0] == '<' and ldif[-1] == '>':
ldif = ldif[1:-1]
@ -352,7 +352,7 @@ class DAVHandler(HttpOptions, FixSendError, DAVRequestHandler):
lock_data.update(self._lock_unlock_parse(body))
if lock_data['refresh'] and not lock_data.get('token', False):
raise DAV_Error(400, 'Lock refresh must specify token')
raise DAV_Error(400, 'Lock refresh must specify token.')
lock_data['depth'] = depth
@ -487,7 +487,7 @@ class dummy_dav_interface(object):
class DAVStaticHandler(http_server.StaticHTTPHandler):
""" A variant of the Static handler, which will serve dummy DAV requests
"""
verbose = False
protocol_version = 'HTTP/1.1'
_HTTP_OPTIONS= { 'DAV' : ['1', '2'],
@ -503,13 +503,13 @@ class DAVStaticHandler(http_server.StaticHTTPHandler):
self.end_headers()
if hasattr(self, '_flush'):
self._flush()
if self.command != 'HEAD':
self.wfile.write(content)
def do_PROPFIND(self):
"""Answer to PROPFIND with generic data.
A rough copy of python-webdav's do_PROPFIND, but hacked to work
statically.
"""
@ -575,7 +575,7 @@ try:
handler._config = conf
reg_http_service(directory, DAVHandler, DAVAuthProvider)
_logger.info("WebDAV service registered at path: %s/ "% directory)
if not (config.get_misc('webdav', 'no_root_hack', False)):
# Now, replace the static http handler with the dav-enabled one.
# If a static-http service has been specified for our server, then
@ -592,7 +592,7 @@ try:
# an _ugly_ hack: we put that dir back in tools.config.misc, so that
# the StaticHttpHandler can find its dir_path.
config.misc.setdefault('static-http',{})['dir_path'] = dir_path
reg_http_service('/', DAVStaticHandler)
except Exception, e:
@ -617,10 +617,10 @@ def init_well_known():
init_well_known()
class PrincipalsRedirect(RedirectHTTPHandler):
redirect_paths = {}
def _find_redirect(self):
for b, r in self.redirect_paths.items():
if self.path.startswith(b):
@ -628,7 +628,7 @@ class PrincipalsRedirect(RedirectHTTPHandler):
return False
def init_principals_redirect():
""" Some devices like the iPhone will look under /principals/users/xxx for
""" Some devices like the iPhone will look under /principals/users/xxx for
the user's properties. In OpenERP we _cannot_ have a stray /principals/...
working path, since we have a database path and the /webdav/ component. So,
the best solution is to redirect the url with 301. Luckily, it does work in

Some files were not shown because too many files have changed in this diff Show More