Merge commit 'origin/master' into xrg
Conflicts: bin/netsvc.py bin/report/report_sxw.py setup.py bzr revid: p_christ@hol.gr-20090803072357-zz2cm1sycsagftnn
This commit is contained in:
commit
c3e569fb08
|
@ -675,6 +675,17 @@ def load_modules(db, force_demo=False, status=None, update_module=False):
|
||||||
status = {}
|
status = {}
|
||||||
|
|
||||||
cr = db.cursor()
|
cr = db.cursor()
|
||||||
|
if cr:
|
||||||
|
cr.execute("SELECT relname FROM pg_class WHERE relkind='r' AND relname='ir_module_module'")
|
||||||
|
if len(cr.fetchall())==0:
|
||||||
|
logger.notifyChannel("init", netsvc.LOG_INFO, "init db")
|
||||||
|
tools.init_db(cr)
|
||||||
|
# cr.execute("update res_users set password=%s where id=%s",('admin',1))
|
||||||
|
# in that case, force --init=all
|
||||||
|
tools.config["init"]["all"] = 1
|
||||||
|
tools.config['update']['all'] = 1
|
||||||
|
if not tools.config['without_demo']:
|
||||||
|
tools.config["demo"]['all'] = 1
|
||||||
force = []
|
force = []
|
||||||
if force_demo:
|
if force_demo:
|
||||||
force.append('demo')
|
force.append('demo')
|
||||||
|
|
|
@ -7,13 +7,13 @@ msgstr ""
|
||||||
"Project-Id-Version: OpenERP Server 5.0.0\n"
|
"Project-Id-Version: OpenERP Server 5.0.0\n"
|
||||||
"Report-Msgid-Bugs-To: support@openerp.com\n"
|
"Report-Msgid-Bugs-To: support@openerp.com\n"
|
||||||
"POT-Creation-Date: 2009-05-19 14:36+0000\n"
|
"POT-Creation-Date: 2009-05-19 14:36+0000\n"
|
||||||
"PO-Revision-Date: 2009-06-08 09:44+0000\n"
|
"PO-Revision-Date: 2009-07-07 13:42+0000\n"
|
||||||
"Last-Translator: Olivier (OpenERP) <Unknown>\n"
|
"Last-Translator: Olivier (Open ERP) <Unknown>\n"
|
||||||
"Language-Team: \n"
|
"Language-Team: \n"
|
||||||
"MIME-Version: 1.0\n"
|
"MIME-Version: 1.0\n"
|
||||||
"Content-Type: text/plain; charset=UTF-8\n"
|
"Content-Type: text/plain; charset=UTF-8\n"
|
||||||
"Content-Transfer-Encoding: 8bit\n"
|
"Content-Transfer-Encoding: 8bit\n"
|
||||||
"X-Launchpad-Export-Date: 2009-06-08 09:48+0000\n"
|
"X-Launchpad-Export-Date: 2009-07-07 14:08+0000\n"
|
||||||
"X-Generator: Launchpad (build Unknown)\n"
|
"X-Generator: Launchpad (build Unknown)\n"
|
||||||
|
|
||||||
#. module: base
|
#. module: base
|
||||||
|
@ -3424,7 +3424,6 @@ msgstr "Configuration de l'action client"
|
||||||
#. module: base
|
#. module: base
|
||||||
#: model:ir.actions.act_window,name:base.action_partner_address_form
|
#: model:ir.actions.act_window,name:base.action_partner_address_form
|
||||||
#: model:ir.model,name:base.model_res_partner_address
|
#: model:ir.model,name:base.model_res_partner_address
|
||||||
#: model:ir.ui.menu,name:base.menu_partner_address_form
|
|
||||||
#: view:res.partner.address:0
|
#: view:res.partner.address:0
|
||||||
msgid "Partner Addresses"
|
msgid "Partner Addresses"
|
||||||
msgstr "Adresses des Partenaires"
|
msgstr "Adresses des Partenaires"
|
||||||
|
@ -4024,7 +4023,7 @@ msgstr "Etat d'Esprit Partenaire"
|
||||||
#. module: base
|
#. module: base
|
||||||
#: selection:ir.ui.view,type:0
|
#: selection:ir.ui.view,type:0
|
||||||
msgid "mdx"
|
msgid "mdx"
|
||||||
msgstr ""
|
msgstr "mdx"
|
||||||
|
|
||||||
#. module: base
|
#. module: base
|
||||||
#: model:res.country,name:base.bi
|
#: model:res.country,name:base.bi
|
||||||
|
@ -6348,7 +6347,7 @@ msgstr "Voir la Réf."
|
||||||
#. module: base
|
#. module: base
|
||||||
#: selection:module.lang.install,init,lang:0
|
#: selection:module.lang.install,init,lang:0
|
||||||
msgid "Dutch (Belgium) / Nederlands (Belgïe)"
|
msgid "Dutch (Belgium) / Nederlands (Belgïe)"
|
||||||
msgstr ""
|
msgstr "Néerlandais (Belgique) / Nederlands (Belgïe)"
|
||||||
|
|
||||||
#. module: base
|
#. module: base
|
||||||
#: model:ir.actions.act_window,name:base.open_repository_tree
|
#: model:ir.actions.act_window,name:base.open_repository_tree
|
||||||
|
|
|
@ -73,7 +73,7 @@ class ir_model(osv.osv):
|
||||||
|
|
||||||
def write(self, cr, user, ids, vals, context=None):
|
def write(self, cr, user, ids, vals, context=None):
|
||||||
if context:
|
if context:
|
||||||
del context['__last_update']
|
context.pop('__last_update', None)
|
||||||
return super(ir_model,self).write(cr, user, ids, vals, context)
|
return super(ir_model,self).write(cr, user, ids, vals, context)
|
||||||
|
|
||||||
def create(self, cr, user, vals, context=None):
|
def create(self, cr, user, vals, context=None):
|
||||||
|
@ -229,6 +229,9 @@ class ir_model_fields(osv.osv):
|
||||||
'field_description': lambda *a: '',
|
'field_description': lambda *a: '',
|
||||||
}
|
}
|
||||||
_order = "id"
|
_order = "id"
|
||||||
|
_sql_constraints = [
|
||||||
|
('size_gt_zero', 'CHECK (size>0)', 'Size of the field can never be less than 1 !'),
|
||||||
|
]
|
||||||
def unlink(self, cr, user, ids, context=None):
|
def unlink(self, cr, user, ids, context=None):
|
||||||
for field in self.browse(cr, user, ids, context):
|
for field in self.browse(cr, user, ids, context):
|
||||||
if field.state <> 'manual':
|
if field.state <> 'manual':
|
||||||
|
@ -244,17 +247,18 @@ class ir_model_fields(osv.osv):
|
||||||
vals['model'] = model_data.model
|
vals['model'] = model_data.model
|
||||||
if context and context.get('manual',False):
|
if context and context.get('manual',False):
|
||||||
vals['state'] = 'manual'
|
vals['state'] = 'manual'
|
||||||
res = super(ir_model_fields,self).create(cr, user, vals, context)
|
res = super(ir_model_fields,self).create(cr, user, vals, context)
|
||||||
if vals.get('state','base') == 'manual':
|
if vals.get('state','base') == 'manual':
|
||||||
if not vals['name'].startswith('x_'):
|
if not vals['name'].startswith('x_'):
|
||||||
raise except_orm(_('Error'), _("Custom fields must have a name that starts with 'x_' !"))
|
raise except_orm(_('Error'), _("Custom fields must have a name that starts with 'x_' !"))
|
||||||
|
|
||||||
if 'relation' in vals and not self.pool.get('ir.model').search(cr, user, [('model','=',vals['relation'])]):
|
if 'relation' in vals and not self.pool.get('ir.model').search(cr, user, [('model','=',vals['relation'])]):
|
||||||
raise except_orm(_('Error'), _("Model %s Does not Exist !" % vals['relation']))
|
raise except_orm(_('Error'), _("Model %s Does not Exist !" % vals['relation']))
|
||||||
|
|
||||||
if self.pool.get(vals['model']):
|
if self.pool.get(vals['model']):
|
||||||
self.pool.get(vals['model']).__init__(self.pool, cr)
|
self.pool.get(vals['model']).__init__(self.pool, cr)
|
||||||
self.pool.get(vals['model'])._auto_init(cr, {})
|
self.pool.get(vals['model'])._auto_init(cr, {})
|
||||||
|
|
||||||
return res
|
return res
|
||||||
ir_model_fields()
|
ir_model_fields()
|
||||||
|
|
||||||
|
|
|
@ -35,7 +35,7 @@ def _check_xml(self, cr, uid, ids, context={}):
|
||||||
if not relaxng.validate(eview):
|
if not relaxng.validate(eview):
|
||||||
logger = netsvc.Logger()
|
logger = netsvc.Logger()
|
||||||
logger.notifyChannel('init', netsvc.LOG_ERROR, 'The view does not fit the required schema !')
|
logger.notifyChannel('init', netsvc.LOG_ERROR, 'The view does not fit the required schema !')
|
||||||
logger.notifyChannel('init', netsvc.LOG_ERROR, relaxng.error_log.last_error)
|
logger.notifyChannel('init', netsvc.LOG_ERROR, tools.ustr(relaxng.error_log.last_error))
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
|
@ -480,7 +480,7 @@ class module(osv.osv):
|
||||||
if not mod.description:
|
if not mod.description:
|
||||||
logger.notifyChannel("init", netsvc.LOG_WARNING, 'module %s: description is empty !' % (mod.name,))
|
logger.notifyChannel("init", netsvc.LOG_WARNING, 'module %s: description is empty !' % (mod.name,))
|
||||||
|
|
||||||
if not mod.certificate:
|
if not mod.certificate or not mod.certificate.isdigit():
|
||||||
logger.notifyChannel('init', netsvc.LOG_WARNING, 'module %s: no quality certificate' % (mod.name,))
|
logger.notifyChannel('init', netsvc.LOG_WARNING, 'module %s: no quality certificate' % (mod.name,))
|
||||||
else:
|
else:
|
||||||
val = long(mod.certificate[2:]) % 97 == 29
|
val = long(mod.certificate[2:]) % 97 == 29
|
||||||
|
|
|
@ -7,8 +7,10 @@
|
||||||
<xsl:variable name="rightMargin">1cm</xsl:variable>
|
<xsl:variable name="rightMargin">1cm</xsl:variable>
|
||||||
<xsl:variable name="topMargin">1cm</xsl:variable>
|
<xsl:variable name="topMargin">1cm</xsl:variable>
|
||||||
<xsl:variable name="bottomMargin">1cm</xsl:variable>
|
<xsl:variable name="bottomMargin">1cm</xsl:variable>
|
||||||
<xsl:variable name="pageSize">29.7cm,21cm</xsl:variable>
|
<!--:variable name="pageSize">29.7cm,21cm</xsl:variable> Or use default width and height for frame -->
|
||||||
|
<xsl:variable name="pageSize">
|
||||||
|
<xsl:value-of select="/report/config/PageSize"/>
|
||||||
|
</xsl:variable>
|
||||||
<xsl:variable name="page_format">a4_letter</xsl:variable>
|
<xsl:variable name="page_format">a4_letter</xsl:variable>
|
||||||
|
|
||||||
<xsl:template name="first_page_frames">
|
<xsl:template name="first_page_frames">
|
||||||
|
|
|
@ -24,7 +24,6 @@
|
||||||
#
|
#
|
||||||
##############################################################################
|
##############################################################################
|
||||||
|
|
||||||
|
|
||||||
import SimpleXMLRPCServer
|
import SimpleXMLRPCServer
|
||||||
import SocketServer
|
import SocketServer
|
||||||
import logging
|
import logging
|
||||||
|
@ -121,7 +120,7 @@ def init_logger():
|
||||||
handler = logging.handlers.FileHandler(logf)
|
handler = logging.handlers.FileHandler(logf)
|
||||||
except Exception, ex:
|
except Exception, ex:
|
||||||
sys.stderr.write("ERROR: couldn't create the logfile directory. Logging to the standard output.\n")
|
sys.stderr.write("ERROR: couldn't create the logfile directory. Logging to the standard output.\n")
|
||||||
handler = logging.StreamHandler(sys.stdout)
|
handler = logging.StreamHandler(sys.stdout)
|
||||||
else:
|
else:
|
||||||
# Normal Handler on standard output
|
# Normal Handler on standard output
|
||||||
handler = logging.StreamHandler(sys.stdout)
|
handler = logging.StreamHandler(sys.stdout)
|
||||||
|
@ -158,7 +157,10 @@ def init_logger():
|
||||||
|
|
||||||
|
|
||||||
class Logger(object):
|
class Logger(object):
|
||||||
|
|
||||||
def notifyChannel(self, name, level, msg):
|
def notifyChannel(self, name, level, msg):
|
||||||
|
from service.web_services import common
|
||||||
|
|
||||||
log = logging.getLogger(tools.ustr(name))
|
log = logging.getLogger(tools.ustr(name))
|
||||||
|
|
||||||
if level == LOG_DEBUG_RPC and not hasattr(log, level):
|
if level == LOG_DEBUG_RPC and not hasattr(log, level):
|
||||||
|
@ -171,6 +173,9 @@ class Logger(object):
|
||||||
msg = tools.exception_to_unicode(msg)
|
msg = tools.exception_to_unicode(msg)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
if level in (LOG_ERROR,LOG_CRITICAL):
|
||||||
|
msg = common().get_server_environment() + msg
|
||||||
|
|
||||||
result = tools.ustr(msg).strip().split('\n')
|
result = tools.ustr(msg).strip().split('\n')
|
||||||
except UnicodeDecodeError:
|
except UnicodeDecodeError:
|
||||||
result = msg.strip().split('\n')
|
result = msg.strip().split('\n')
|
||||||
|
@ -210,7 +215,7 @@ class Agent(object):
|
||||||
for timer in self._timers[db]:
|
for timer in self._timers[db]:
|
||||||
if not timer.isAlive():
|
if not timer.isAlive():
|
||||||
self._timers[db].remove(timer)
|
self._timers[db].remove(timer)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def cancel(cls, db_name):
|
def cancel(cls, db_name):
|
||||||
"""Cancel all timers for a given database. If None passed, all timers are cancelled"""
|
"""Cancel all timers for a given database. If None passed, all timers are cancelled"""
|
||||||
|
@ -218,7 +223,7 @@ class Agent(object):
|
||||||
if db_name is None or db == db_name:
|
if db_name is None or db == db_name:
|
||||||
for timer in cls._timers[db]:
|
for timer in cls._timers[db]:
|
||||||
timer.cancel()
|
timer.cancel()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def quit(cls):
|
def quit(cls):
|
||||||
cls.cancel(None)
|
cls.cancel(None)
|
||||||
|
|
|
@ -198,10 +198,22 @@ class expression(object):
|
||||||
self.__exp[i] = (left, 'in', right)
|
self.__exp[i] = (left, 'in', right)
|
||||||
else:
|
else:
|
||||||
# other field type
|
# other field type
|
||||||
|
# add the time part to datetime field when it's not there:
|
||||||
|
if field._type == 'datetime' and self.__exp[i][2] and len(self.__exp[i][2]) == 10:
|
||||||
|
|
||||||
|
self.__exp[i] = list(self.__exp[i])
|
||||||
|
|
||||||
|
if operator in ('>', '>='):
|
||||||
|
self.__exp[i][2] += ' 00:00:00'
|
||||||
|
elif operator in ('<', '<='):
|
||||||
|
self.__exp[i][2] += ' 23:59:59'
|
||||||
|
|
||||||
|
self.__exp[i] = tuple(self.__exp[i])
|
||||||
|
|
||||||
if field.translate:
|
if field.translate:
|
||||||
if operator in ('like', 'ilike', 'not like', 'not ilike'):
|
if operator in ('like', 'ilike', 'not like', 'not ilike'):
|
||||||
right = '%%%s%%' % right
|
right = '%%%s%%' % right
|
||||||
|
|
||||||
operator = operator == '=like' and 'like' or operator
|
operator = operator == '=like' and 'like' or operator
|
||||||
|
|
||||||
query1 = '( SELECT res_id' \
|
query1 = '( SELECT res_id' \
|
||||||
|
@ -224,7 +236,7 @@ class expression(object):
|
||||||
' SELECT id' \
|
' SELECT id' \
|
||||||
' FROM "' + working_table._table + '"' \
|
' FROM "' + working_table._table + '"' \
|
||||||
' WHERE "' + left + '" ' + operator + instr + ")"
|
' WHERE "' + left + '" ' + operator + instr + ")"
|
||||||
|
|
||||||
query2 = [working_table._name + ',' + left,
|
query2 = [working_table._name + ',' + left,
|
||||||
context.get('lang', False) or 'en_US',
|
context.get('lang', False) or 'en_US',
|
||||||
'model',
|
'model',
|
||||||
|
|
210
bin/osv/orm.py
210
bin/osv/orm.py
|
@ -390,8 +390,12 @@ class orm_template(object):
|
||||||
vals['select_level']
|
vals['select_level']
|
||||||
))
|
))
|
||||||
if 'module' in context:
|
if 'module' in context:
|
||||||
|
name1 = 'field_' + self._table + '_' + k
|
||||||
|
cr.execute("select name from ir_model_data where name='%s'"%(name1))
|
||||||
|
if cr.fetchone():
|
||||||
|
name1 = name1 + "_" + str(id)
|
||||||
cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, now(), now(), %s, %s, %s)", \
|
cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, now(), now(), %s, %s, %s)", \
|
||||||
(('field_'+self._table+'_'+k)[:64], context['module'], 'ir.model.fields', id)
|
(name1[:64], context['module'], 'ir.model.fields', id)
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
for key, val in vals.items():
|
for key, val in vals.items():
|
||||||
|
@ -443,32 +447,45 @@ class orm_template(object):
|
||||||
|
|
||||||
def __export_row(self, cr, uid, row, fields, context=None):
|
def __export_row(self, cr, uid, row, fields, context=None):
|
||||||
|
|
||||||
def check_type(type,r):
|
def check_type(field_type):
|
||||||
if type == 'float':
|
if field_type == 'float':
|
||||||
return 0.0
|
return 0.0
|
||||||
elif type == 'integer':
|
elif field_type == 'integer':
|
||||||
return 0
|
return 0
|
||||||
elif type == 'char':
|
elif field_type == 'boolean':
|
||||||
return ''
|
return False
|
||||||
return r
|
return ''
|
||||||
|
|
||||||
lines = []
|
lines = []
|
||||||
data = map(lambda x: '', range(len(fields)))
|
data = map(lambda x: '', range(len(fields)))
|
||||||
done = []
|
done = []
|
||||||
for fpos in range(len(fields)):
|
for fpos in range(len(fields)):
|
||||||
f = fields[fpos]
|
f = fields[fpos]
|
||||||
if f:
|
if f:
|
||||||
r = row
|
r = row
|
||||||
i = 0
|
i = 0
|
||||||
while i < len(f):
|
while i < len(f):
|
||||||
r = r[f[i]]
|
if f[i] == 'db_id':
|
||||||
|
r = r['id']
|
||||||
|
elif f[i] == 'id':
|
||||||
|
model_data = self.pool.get('ir.model.data')
|
||||||
|
data_ids = model_data.search(cr, uid, [('model','=',r._table_name),('res_id','=',r['id'])])
|
||||||
|
if len(data_ids):
|
||||||
|
d = model_data.read(cr, uid, data_ids, ['name','module'])[0]
|
||||||
|
if d['module']:
|
||||||
|
r = '%s.%s'%(d['module'],d['name'])
|
||||||
|
else:
|
||||||
|
r = d['name']
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
r = r[f[i]]
|
||||||
if not r:
|
if not r:
|
||||||
if f[i] in self._columns:
|
if f[i] in self._columns:
|
||||||
r = check_type(self._columns[f[i]]._type,r)
|
r = check_type(self._columns[f[i]]._type)
|
||||||
elif f[i] in self._inherit_fields:
|
elif f[i] in self._inherit_fields:
|
||||||
r = check_type(self._inherit_fields[f[i]][2]._type,r)
|
r = check_type(self._inherit_fields[f[i]][2]._type)
|
||||||
|
data[fpos] = r
|
||||||
data[fpos] = tools.ustr(r)
|
|
||||||
break
|
break
|
||||||
if isinstance(r, (browse_record_list, list)):
|
if isinstance(r, (browse_record_list, list)):
|
||||||
first = True
|
first = True
|
||||||
|
@ -476,56 +493,89 @@ class orm_template(object):
|
||||||
or [], fields)
|
or [], fields)
|
||||||
if fields2 in done:
|
if fields2 in done:
|
||||||
break
|
break
|
||||||
done.append(fields2)
|
done.append(fields2)
|
||||||
for row2 in r:
|
for row2 in r:
|
||||||
lines2 = self.__export_row(cr, uid, row2, fields2,
|
lines2 = self.__export_row(cr, uid, row2, fields2,
|
||||||
context)
|
context)
|
||||||
if first:
|
if first:
|
||||||
for fpos2 in range(len(fields)):
|
for fpos2 in range(len(fields)):
|
||||||
if lines2 and lines2[0][fpos2]:
|
if lines2 and lines2[0][fpos2]:
|
||||||
data[fpos2] = lines2[0][fpos2]
|
data[fpos2] = lines2[0][fpos2]
|
||||||
|
if not data[fpos]:
|
||||||
|
dt = ''
|
||||||
|
for rr in r :
|
||||||
|
if isinstance(rr.name, browse_record):
|
||||||
|
rr = rr.name
|
||||||
|
dt+=rr.name+','
|
||||||
|
data[fpos] = dt[:-1]
|
||||||
|
break
|
||||||
lines += lines2[1:]
|
lines += lines2[1:]
|
||||||
first = False
|
first = False
|
||||||
else:
|
else:
|
||||||
lines += lines2
|
lines += lines2
|
||||||
break
|
break
|
||||||
i += 1
|
i += 1
|
||||||
if i == len(f):
|
if i == len(f):
|
||||||
|
if isinstance(r, browse_record):
|
||||||
|
r = r.name
|
||||||
data[fpos] = tools.ustr(r or '')
|
data[fpos] = tools.ustr(r or '')
|
||||||
return [data] + lines
|
return [data] + lines
|
||||||
|
|
||||||
def export_data(self, cr, uid, ids, fields, context=None):
|
def export_data(self, cr, uid, ids, fields_to_export, context=None):
|
||||||
if not context:
|
if not context:
|
||||||
context = {}
|
context = {}
|
||||||
fields = map(lambda x: x.split('/'), fields)
|
imp_comp = context.get('import_comp',False)
|
||||||
|
cols = self._columns.copy()
|
||||||
|
for f in self._inherit_fields:
|
||||||
|
cols.update({f: self._inherit_fields[f][2]})
|
||||||
|
fields_to_export = map(lambda x: x.split('/'), fields_to_export)
|
||||||
|
fields_export = fields_to_export+[]
|
||||||
|
warning = ''
|
||||||
|
warning_fields = []
|
||||||
|
for field in fields_export:
|
||||||
|
if imp_comp and len(field)>1:
|
||||||
|
warning_fields.append('/'.join(map(lambda x:x in cols and cols[x].string or x,field)))
|
||||||
|
elif len (field) <=1:
|
||||||
|
if imp_comp and cols.get(field and field[0],False):
|
||||||
|
if ((isinstance(cols[field[0]], fields.function) and not cols[field[0]].store) \
|
||||||
|
or isinstance(cols[field[0]], fields.related)\
|
||||||
|
or isinstance(cols[field[0]], fields.one2many)):
|
||||||
|
warning_fields.append('/'.join(map(lambda x:x in cols and cols[x].string or x,field)))
|
||||||
datas = []
|
datas = []
|
||||||
|
if imp_comp and len(warning_fields):
|
||||||
|
warning = 'Following columns cannot be exported since you select to be import compatible.\n%s' %('\n'.join(warning_fields))
|
||||||
|
cr.rollback()
|
||||||
|
return {'warning' : warning}
|
||||||
for row in self.browse(cr, uid, ids, context):
|
for row in self.browse(cr, uid, ids, context):
|
||||||
datas += self.__export_row(cr, uid, row, fields, context)
|
datas += self.__export_row(cr, uid, row, fields_to_export, context)
|
||||||
return datas
|
return {'datas':datas}
|
||||||
|
|
||||||
def import_data(self, cr, uid, fields, datas, mode='init', current_module='', noupdate=False, context=None, filename=None):
|
def import_data(self, cr, uid, fields, datas, mode='init', current_module='', noupdate=False, context=None, filename=None):
|
||||||
if not context:
|
if not context:
|
||||||
context = {}
|
context = {}
|
||||||
fields = map(lambda x: x.split('/'), fields)
|
fields = map(lambda x: x.split('/'), fields)
|
||||||
logger = netsvc.Logger()
|
logger = netsvc.Logger()
|
||||||
|
ir_model_data_obj = self.pool.get('ir.model.data')
|
||||||
def process_liness(self, datas, prefix, fields_def, position=0):
|
def process_liness(self, datas, prefix, current_module, model_name, fields_def, position=0):
|
||||||
line = datas[position]
|
line = datas[position]
|
||||||
row = {}
|
row = {}
|
||||||
translate = {}
|
translate = {}
|
||||||
todo = []
|
todo = []
|
||||||
warning = ''
|
warning = ''
|
||||||
data_id = False
|
data_id = False
|
||||||
|
data_res_id = False
|
||||||
|
is_xml_id = False
|
||||||
|
is_db_id = False
|
||||||
|
ir_model_data_obj = self.pool.get('ir.model.data')
|
||||||
#
|
#
|
||||||
# Import normal fields
|
# Import normal fields
|
||||||
#
|
#
|
||||||
for i in range(len(fields)):
|
for i in range(len(fields)):
|
||||||
if i >= len(line):
|
if i >= len(line):
|
||||||
raise Exception(_('Please check that all your lines have %d columns.') % (len(fields),))
|
raise Exception(_('Please check that all your lines have %d columns.') % (len(fields),))
|
||||||
field = fields[i]
|
if not line[i]:
|
||||||
if field == ["id"]:
|
|
||||||
data_id = line[i]
|
|
||||||
continue
|
continue
|
||||||
|
field = fields[i]
|
||||||
if (len(field)==len(prefix)+1) and field[len(prefix)].endswith(':id'):
|
if (len(field)==len(prefix)+1) and field[len(prefix)].endswith(':id'):
|
||||||
res_id = False
|
res_id = False
|
||||||
if line[i]:
|
if line[i]:
|
||||||
|
@ -535,8 +585,7 @@ class orm_template(object):
|
||||||
if '.' in word:
|
if '.' in word:
|
||||||
module, xml_id = word.rsplit('.', 1)
|
module, xml_id = word.rsplit('.', 1)
|
||||||
else:
|
else:
|
||||||
module, xml_id = current_module, word
|
module, xml_id = current_module, word
|
||||||
ir_model_data_obj = self.pool.get('ir.model.data')
|
|
||||||
id = ir_model_data_obj._get_id(cr, uid, module,
|
id = ir_model_data_obj._get_id(cr, uid, module,
|
||||||
xml_id)
|
xml_id)
|
||||||
res_id2 = ir_model_data_obj.read(cr, uid, [id],
|
res_id2 = ir_model_data_obj.read(cr, uid, [id],
|
||||||
|
@ -549,8 +598,7 @@ class orm_template(object):
|
||||||
if '.' in line[i]:
|
if '.' in line[i]:
|
||||||
module, xml_id = line[i].rsplit('.', 1)
|
module, xml_id = line[i].rsplit('.', 1)
|
||||||
else:
|
else:
|
||||||
module, xml_id = current_module, line[i]
|
module, xml_id = current_module, line[i]
|
||||||
ir_model_data_obj = self.pool.get('ir.model.data')
|
|
||||||
id = ir_model_data_obj._get_id(cr, uid, module, xml_id)
|
id = ir_model_data_obj._get_id(cr, uid, module, xml_id)
|
||||||
res_res_id = ir_model_data_obj.read(cr, uid, [id],
|
res_res_id = ir_model_data_obj.read(cr, uid, [id],
|
||||||
['res_id'])
|
['res_id'])
|
||||||
|
@ -565,6 +613,63 @@ class orm_template(object):
|
||||||
continue
|
continue
|
||||||
if (len(field) == len(prefix)+1) and \
|
if (len(field) == len(prefix)+1) and \
|
||||||
(prefix == field[0:len(prefix)]):
|
(prefix == field[0:len(prefix)]):
|
||||||
|
if field[len(prefix)] == "id":
|
||||||
|
# XML ID
|
||||||
|
db_id = False
|
||||||
|
is_xml_id = data_id = line[i]
|
||||||
|
d = data_id.split('.')
|
||||||
|
module = len(d)>1 and d[0] or ''
|
||||||
|
name = len(d)>1 and d[1] or d[0]
|
||||||
|
data_ids = ir_model_data_obj.search(cr, uid, [('module','=',module),('model','=',model_name),('name','=',name)])
|
||||||
|
if len(data_ids):
|
||||||
|
d = ir_model_data_obj.read(cr, uid, data_ids, ['res_id'])[0]
|
||||||
|
db_id = d['res_id']
|
||||||
|
if is_db_id and not db_id:
|
||||||
|
data_ids = ir_model_data_obj.search(cr, uid, [('module','=',module),('model','=',model_name),('res_id','=',is_db_id)])
|
||||||
|
if not len(data_ids):
|
||||||
|
ir_model_data_obj.create(cr, uid, {'module':module, 'model':model_name, 'name':name, 'res_id':is_db_id})
|
||||||
|
db_id = is_db_id
|
||||||
|
if is_db_id and int(db_id) != int(is_db_id):
|
||||||
|
warning += ("Id is not the same than existing one: " + str(is_db_id) + " !\n")
|
||||||
|
logger.notifyChannel("import", netsvc.LOG_ERROR,
|
||||||
|
"Id is not the same than existing one: " + str(is_db_id) + ' !\n')
|
||||||
|
continue
|
||||||
|
|
||||||
|
if field[len(prefix)] == "db_id":
|
||||||
|
# Database ID
|
||||||
|
try:
|
||||||
|
line[i]= int(line[i])
|
||||||
|
except Exception, e:
|
||||||
|
warning += (str(e) + "!\n")
|
||||||
|
logger.notifyChannel("import", netsvc.LOG_ERROR,
|
||||||
|
str(e) + '!\n')
|
||||||
|
continue
|
||||||
|
is_db_id = line[i]
|
||||||
|
obj_model = self.pool.get(model_name)
|
||||||
|
ids = obj_model.search(cr, uid, [('id','=',line[i])])
|
||||||
|
if not len(ids):
|
||||||
|
warning += ("Database ID doesn't exist: " + model_name + ": " + str(line[i]) + " !\n")
|
||||||
|
logger.notifyChannel("import", netsvc.LOG_ERROR,
|
||||||
|
"Database ID doesn't exist: " + model_name + ": " + str(line[i]) + ' !\n')
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
data_res_id = ids[0]
|
||||||
|
data_ids = ir_model_data_obj.search(cr, uid, [('model','=',model_name),('res_id','=',line[i])])
|
||||||
|
if len(data_ids):
|
||||||
|
d = ir_model_data_obj.read(cr, uid, data_ids, ['name','module'])[0]
|
||||||
|
data_id = d['name']
|
||||||
|
if d['module']:
|
||||||
|
data_id = '%s.%s'%(d['module'],d['name'])
|
||||||
|
else:
|
||||||
|
data_id = d['name']
|
||||||
|
if is_xml_id and not data_id:
|
||||||
|
data_id = is_xml_id
|
||||||
|
if is_xml_id and is_xml_id!=data_id:
|
||||||
|
warning += ("Id is not the same than existing one: " + str(line[i]) + " !\n")
|
||||||
|
logger.notifyChannel("import", netsvc.LOG_ERROR,
|
||||||
|
"Id is not the same than existing one: " + str(line[i]) + ' !\n')
|
||||||
|
|
||||||
|
continue
|
||||||
if fields_def[field[len(prefix)]]['type'] == 'integer':
|
if fields_def[field[len(prefix)]]['type'] == 'integer':
|
||||||
res = line[i] and int(line[i])
|
res = line[i] and int(line[i])
|
||||||
elif fields_def[field[len(prefix)]]['type'] == 'boolean':
|
elif fields_def[field[len(prefix)]]['type'] == 'boolean':
|
||||||
|
@ -580,18 +685,22 @@ class orm_template(object):
|
||||||
sel = fields_def[field[len(prefix)]]['selection'](self,
|
sel = fields_def[field[len(prefix)]]['selection'](self,
|
||||||
cr, uid, context)
|
cr, uid, context)
|
||||||
for key, val in sel:
|
for key, val in sel:
|
||||||
if str(key) == line[i]:
|
if line[i] in [tools.ustr(key),tools.ustr(val)]: #Acepting key or value for selection field
|
||||||
res = key
|
res = key
|
||||||
|
break
|
||||||
if line[i] and not res:
|
if line[i] and not res:
|
||||||
logger.notifyChannel("import", netsvc.LOG_WARNING,
|
logger.notifyChannel("import", netsvc.LOG_WARNING,
|
||||||
"key '%s' not found in selection field '%s'" % \
|
"key '%s' not found in selection field '%s'" % \
|
||||||
(line[i], field[len(prefix)]))
|
(line[i], field[len(prefix)]))
|
||||||
|
|
||||||
|
warning += "Key/value '"+ str(line[i]) +"' not found in selection field '"+str(field[len(prefix)])+"'"
|
||||||
|
|
||||||
elif fields_def[field[len(prefix)]]['type']=='many2one':
|
elif fields_def[field[len(prefix)]]['type']=='many2one':
|
||||||
res = False
|
res = False
|
||||||
if line[i]:
|
if line[i]:
|
||||||
relation = fields_def[field[len(prefix)]]['relation']
|
relation = fields_def[field[len(prefix)]]['relation']
|
||||||
res2 = self.pool.get(relation).name_search(cr, uid,
|
res2 = self.pool.get(relation).name_search(cr, uid,
|
||||||
line[i], [], operator='=')
|
line[i], [], operator='=', context=context)
|
||||||
res = (res2 and res2[0][0]) or False
|
res = (res2 and res2[0][0]) or False
|
||||||
if not res:
|
if not res:
|
||||||
warning += ('Relation not found: ' + line[i] + \
|
warning += ('Relation not found: ' + line[i] + \
|
||||||
|
@ -605,7 +714,7 @@ class orm_template(object):
|
||||||
relation = fields_def[field[len(prefix)]]['relation']
|
relation = fields_def[field[len(prefix)]]['relation']
|
||||||
for word in line[i].split(config.get('csv_internal_sep')):
|
for word in line[i].split(config.get('csv_internal_sep')):
|
||||||
res2 = self.pool.get(relation).name_search(cr,
|
res2 = self.pool.get(relation).name_search(cr,
|
||||||
uid, word, [], operator='=')
|
uid, word, [], operator='=', context=context)
|
||||||
res3 = (res2 and res2[0][0]) or False
|
res3 = (res2 and res2[0][0]) or False
|
||||||
if not res3:
|
if not res3:
|
||||||
warning += ('Relation not found: ' + \
|
warning += ('Relation not found: ' + \
|
||||||
|
@ -625,19 +734,20 @@ class orm_template(object):
|
||||||
if field[0] not in todo:
|
if field[0] not in todo:
|
||||||
todo.append(field[len(prefix)])
|
todo.append(field[len(prefix)])
|
||||||
#
|
#
|
||||||
# Import one2many fields
|
# Import one2many, many2many fields
|
||||||
#
|
#
|
||||||
nbrmax = 1
|
nbrmax = 1
|
||||||
for field in todo:
|
for field in todo:
|
||||||
newfd = self.pool.get(fields_def[field]['relation']).fields_get(
|
relation_obj = self.pool.get(fields_def[field]['relation'])
|
||||||
|
newfd = relation_obj.fields_get(
|
||||||
cr, uid, context=context)
|
cr, uid, context=context)
|
||||||
res = process_liness(self, datas, prefix + [field], newfd, position)
|
res = process_liness(self, datas, prefix + [field], current_module, relation_obj._name, newfd, position)
|
||||||
(newrow, max2, w2, translate2, data_id2) = res
|
(newrow, max2, w2, translate2, data_id2, data_res_id2) = res
|
||||||
nbrmax = max(nbrmax, max2)
|
nbrmax = max(nbrmax, max2)
|
||||||
warning = warning + w2
|
warning = warning + w2
|
||||||
reduce(lambda x, y: x and y, newrow)
|
reduce(lambda x, y: x and y, newrow)
|
||||||
row[field] = (reduce(lambda x, y: x or y, newrow.values()) and \
|
row[field] = (reduce(lambda x, y: x or y, newrow.values()) and \
|
||||||
[(0, 0, newrow)]) or []
|
[(0, 0, newrow)]) or []
|
||||||
i = max2
|
i = max2
|
||||||
while (position+i)<len(datas):
|
while (position+i)<len(datas):
|
||||||
ok = True
|
ok = True
|
||||||
|
@ -648,11 +758,11 @@ class orm_template(object):
|
||||||
if not ok:
|
if not ok:
|
||||||
break
|
break
|
||||||
|
|
||||||
(newrow, max2, w2, translate2, data_id2) = process_liness(
|
(newrow, max2, w2, translate2, data_id2, data_res_id2) = process_liness(
|
||||||
self, datas, prefix+[field], newfd, position+i)
|
self, datas, prefix+[field], current_module, relation_obj._name, newfd, position+i)
|
||||||
warning = warning+w2
|
warning = warning+w2
|
||||||
if reduce(lambda x, y: x or y, newrow.values()):
|
if reduce(lambda x, y: x or y, newrow.values()):
|
||||||
row[field].append((0, 0, newrow))
|
row[field].append((0, 0, newrow))
|
||||||
i += max2
|
i += max2
|
||||||
nbrmax = max(nbrmax, i)
|
nbrmax = max(nbrmax, i)
|
||||||
|
|
||||||
|
@ -660,7 +770,7 @@ class orm_template(object):
|
||||||
for i in range(max(nbrmax, 1)):
|
for i in range(max(nbrmax, 1)):
|
||||||
#if datas:
|
#if datas:
|
||||||
datas.pop(0)
|
datas.pop(0)
|
||||||
result = (row, nbrmax, warning, translate, data_id)
|
result = (row, nbrmax, warning, translate, data_id, data_res_id)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
fields_def = self.fields_get(cr, uid, context=context)
|
fields_def = self.fields_get(cr, uid, context=context)
|
||||||
|
@ -675,16 +785,16 @@ class orm_template(object):
|
||||||
counter += 1
|
counter += 1
|
||||||
res = {}
|
res = {}
|
||||||
#try:
|
#try:
|
||||||
(res, other, warning, translate, data_id) = \
|
(res, other, warning, translate, data_id, res_id) = \
|
||||||
process_liness(self, datas, [], fields_def)
|
process_liness(self, datas, [], current_module, self._name, fields_def)
|
||||||
if warning:
|
if warning:
|
||||||
cr.rollback()
|
cr.rollback()
|
||||||
return (-1, res, warning, '')
|
return (-1, res, 'Line ' + str(counter) +' : ' + warning, '')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
id = self.pool.get('ir.model.data')._update(cr, uid, self._name,
|
id = ir_model_data_obj._update(cr, uid, self._name,
|
||||||
current_module, res, xml_id=data_id, mode=mode,
|
current_module, res, xml_id=data_id, mode=mode,
|
||||||
noupdate=noupdate)
|
noupdate=noupdate, res_id=res_id)
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
import psycopg2
|
import psycopg2
|
||||||
if isinstance(e,psycopg2.IntegrityError):
|
if isinstance(e,psycopg2.IntegrityError):
|
||||||
|
@ -693,7 +803,7 @@ class orm_template(object):
|
||||||
if key in e[0]:
|
if key in e[0]:
|
||||||
msg = self.pool._sql_error[key]
|
msg = self.pool._sql_error[key]
|
||||||
break
|
break
|
||||||
return (-1, res,msg,'' )
|
return (-1, res,'Line ' + str(counter) +' : ' + msg,'' )
|
||||||
|
|
||||||
for lang in translate:
|
for lang in translate:
|
||||||
context2 = context.copy()
|
context2 = context.copy()
|
||||||
|
|
|
@ -27,8 +27,10 @@ from lxml import etree
|
||||||
from report import render
|
from report import render
|
||||||
import libxml2
|
import libxml2
|
||||||
import libxslt
|
import libxslt
|
||||||
|
import locale
|
||||||
|
|
||||||
import time, os
|
import time, os
|
||||||
|
import mx.DateTime
|
||||||
|
|
||||||
class report_printscreen_list(report_int):
|
class report_printscreen_list(report_int):
|
||||||
def __init__(self, name):
|
def __init__(self, name):
|
||||||
|
@ -87,7 +89,8 @@ class report_printscreen_list(report_int):
|
||||||
n.text = text
|
n.text = text
|
||||||
config.append(n)
|
config.append(n)
|
||||||
|
|
||||||
_append_node('date', time.strftime('%d/%m/%Y'))
|
#_append_node('date', time.strftime('%d/%m/%Y'))
|
||||||
|
_append_node('date', time.strftime(str(locale.nl_langinfo(locale.D_FMT).replace('%y', '%Y'))))
|
||||||
_append_node('PageSize', '%.2fmm,%.2fmm' % tuple(pageSize))
|
_append_node('PageSize', '%.2fmm,%.2fmm' % tuple(pageSize))
|
||||||
_append_node('PageWidth', '%.2f' % (pageSize[0] * 2.8346,))
|
_append_node('PageWidth', '%.2f' % (pageSize[0] * 2.8346,))
|
||||||
_append_node('PageHeight', '%.2f' %(pageSize[1] * 2.8346,))
|
_append_node('PageHeight', '%.2f' %(pageSize[1] * 2.8346,))
|
||||||
|
@ -106,7 +109,7 @@ class report_printscreen_list(report_int):
|
||||||
for f in fields_order:
|
for f in fields_order:
|
||||||
s = 0
|
s = 0
|
||||||
ince += 1
|
ince += 1
|
||||||
if fields[f]['type'] in ('date','time','float','integer'):
|
if fields[f]['type'] in ('date','time','datetime','float','integer'):
|
||||||
s = 60
|
s = 60
|
||||||
strmax -= s
|
strmax -= s
|
||||||
if fields[f]['type'] in ('float','integer'):
|
if fields[f]['type'] in ('float','integer'):
|
||||||
|
@ -132,13 +135,11 @@ class report_printscreen_list(report_int):
|
||||||
|
|
||||||
new_doc.append(header)
|
new_doc.append(header)
|
||||||
lines = etree.Element("lines")
|
lines = etree.Element("lines")
|
||||||
|
|
||||||
tsum = []
|
tsum = []
|
||||||
count = len(fields_order)
|
count = len(fields_order)
|
||||||
for i in range(0,count):
|
for i in range(0,count):
|
||||||
tsum.append(0)
|
tsum.append(0)
|
||||||
|
|
||||||
|
|
||||||
for line in results:
|
for line in results:
|
||||||
node_line = etree.Element("row")
|
node_line = etree.Element("row")
|
||||||
count = -1
|
count = -1
|
||||||
|
@ -158,10 +159,28 @@ class report_printscreen_list(report_int):
|
||||||
if fields[f]['type'] in ('one2many','many2many') and line[f]:
|
if fields[f]['type'] in ('one2many','many2many') and line[f]:
|
||||||
line[f] = '( '+tools.ustr(len(line[f])) + ' )'
|
line[f] = '( '+tools.ustr(len(line[f])) + ' )'
|
||||||
|
|
||||||
if fields[f]['type'] == 'float':
|
if fields[f]['type'] == 'float' and line[f]:
|
||||||
precision=(('digits' in fields[f]) and fields[f]['digits'][1]) or 2
|
precision=(('digits' in fields[f]) and fields[f]['digits'][1]) or 2
|
||||||
line[f]='%.2f'%(line[f])
|
line[f]='%.2f'%(line[f])
|
||||||
|
|
||||||
|
if fields[f]['type'] == 'date' and line[f]:
|
||||||
|
format = str(locale.nl_langinfo(locale.D_FMT).replace('%y', '%Y'))
|
||||||
|
d1= mx.DateTime.strptime(line[f],'%Y-%m-%d')
|
||||||
|
new_d1 = d1.strftime(format)
|
||||||
|
line[f] = new_d1
|
||||||
|
|
||||||
|
if fields[f]['type'] == 'time' and line[f]:
|
||||||
|
format = str(locale.nl_langinfo(locale.T_FMT))
|
||||||
|
d1= mx.DateTime.strptime(line[f],'%H:%M:%S')
|
||||||
|
new_d1 = d1.strftime(format)
|
||||||
|
line[f] = new_d1
|
||||||
|
|
||||||
|
if fields[f]['type'] == 'datetime' and line[f]:
|
||||||
|
format = str(locale.nl_langinfo(locale.D_FMT).replace('%y', '%Y'))+' '+str(locale.nl_langinfo(locale.T_FMT))
|
||||||
|
d1= mx.DateTime.strptime(line[f],'%Y-%m-%d %H:%M:%S')
|
||||||
|
new_d1 = d1.strftime(format)
|
||||||
|
line[f] = new_d1
|
||||||
|
|
||||||
col = etree.Element("col")
|
col = etree.Element("col")
|
||||||
col.set('para','yes')
|
col.set('para','yes')
|
||||||
col.set('tree','no')
|
col.set('tree','no')
|
||||||
|
@ -169,7 +188,6 @@ class report_printscreen_list(report_int):
|
||||||
col.text = tools.ustr(line[f] or '')
|
col.text = tools.ustr(line[f] or '')
|
||||||
if temp[count] == 1:
|
if temp[count] == 1:
|
||||||
tsum[count] = float(tsum[count]) + float(line[f]);
|
tsum[count] = float(tsum[count]) + float(line[f]);
|
||||||
|
|
||||||
else:
|
else:
|
||||||
col.text = '/'
|
col.text = '/'
|
||||||
node_line.append(col)
|
node_line.append(col)
|
||||||
|
@ -197,7 +215,6 @@ class report_printscreen_list(report_int):
|
||||||
|
|
||||||
lines.append(node_line)
|
lines.append(node_line)
|
||||||
|
|
||||||
|
|
||||||
new_doc.append(lines)
|
new_doc.append(lines)
|
||||||
styledoc = libxml2.parseFile(os.path.join(tools.config['root_path'],'addons/base/report/custom_new.xsl'))
|
styledoc = libxml2.parseFile(os.path.join(tools.config['root_path'],'addons/base/report/custom_new.xsl'))
|
||||||
style = libxslt.parseStylesheetDoc(styledoc)
|
style = libxslt.parseStylesheetDoc(styledoc)
|
||||||
|
|
|
@ -223,7 +223,7 @@ class _rml_doc(object):
|
||||||
self.canvas.save()
|
self.canvas.save()
|
||||||
|
|
||||||
class _rml_canvas(object):
|
class _rml_canvas(object):
|
||||||
def __init__(self, canvas,localcontext, doc_tmpl=None, doc=None, images={}, path='.', title=None):
|
def __init__(self, canvas, localcontext, doc_tmpl=None, doc=None, images={}, path='.', title=None):
|
||||||
self.localcontext = localcontext
|
self.localcontext = localcontext
|
||||||
self.canvas = canvas
|
self.canvas = canvas
|
||||||
self.styles = doc.styles
|
self.styles = doc.styles
|
||||||
|
@ -236,7 +236,7 @@ class _rml_canvas(object):
|
||||||
self.canvas.setTitle(self.title)
|
self.canvas.setTitle(self.title)
|
||||||
|
|
||||||
def _textual(self, node, x=0, y=0):
|
def _textual(self, node, x=0, y=0):
|
||||||
rc = utils._process_text(self, node.text or '')
|
rc = utils._process_text(self, node.text.encode('utf-8') or '')
|
||||||
for n in node:
|
for n in node:
|
||||||
if n.tag == 'seq':
|
if n.tag == 'seq':
|
||||||
from reportlab.lib.sequencer import getSequencer
|
from reportlab.lib.sequencer import getSequencer
|
||||||
|
@ -315,7 +315,7 @@ class _rml_canvas(object):
|
||||||
self.canvas.circle(x_cen=utils.unit_get(node.get('x')), y_cen=utils.unit_get(node.get('y')), r=utils.unit_get(node.get('radius')), **utils.attr_get(node, [], {'fill':'bool','stroke':'bool'}))
|
self.canvas.circle(x_cen=utils.unit_get(node.get('x')), y_cen=utils.unit_get(node.get('y')), r=utils.unit_get(node.get('radius')), **utils.attr_get(node, [], {'fill':'bool','stroke':'bool'}))
|
||||||
|
|
||||||
def _place(self, node):
|
def _place(self, node):
|
||||||
flows = _rml_flowable(self.doc, images=self.images, path=self.path, title=self.title).render(node)
|
flows = _rml_flowable(self.doc, self.localcontext, images=self.images, path=self.path, title=self.title).render(node)
|
||||||
infos = utils.attr_get(node, ['x','y','width','height'])
|
infos = utils.attr_get(node, ['x','y','width','height'])
|
||||||
|
|
||||||
infos['y']+=infos['height']
|
infos['y']+=infos['height']
|
||||||
|
@ -453,12 +453,12 @@ class _rml_draw(object):
|
||||||
|
|
||||||
def render(self, canvas, doc):
|
def render(self, canvas, doc):
|
||||||
canvas.saveState()
|
canvas.saveState()
|
||||||
cnv = _rml_canvas(canvas,self.localcontext, doc, self.styles, images=self.images, path=self.path, title=self.canvas_title)
|
cnv = _rml_canvas(canvas, self.localcontext, doc, self.styles, images=self.images, path=self.path, title=self.canvas_title)
|
||||||
cnv.render(self.node)
|
cnv.render(self.node)
|
||||||
canvas.restoreState()
|
canvas.restoreState()
|
||||||
|
|
||||||
class _rml_flowable(object):
|
class _rml_flowable(object):
|
||||||
def __init__(self,doc,localcontext, images={}, path='.', title=None):
|
def __init__(self, doc, localcontext, images={}, path='.', title=None):
|
||||||
self.localcontext = localcontext
|
self.localcontext = localcontext
|
||||||
self.doc = doc
|
self.doc = doc
|
||||||
self.styles = doc.styles
|
self.styles = doc.styles
|
||||||
|
@ -474,7 +474,7 @@ class _rml_flowable(object):
|
||||||
if key in ('rml_except', 'rml_loop', 'rml_tag'):
|
if key in ('rml_except', 'rml_loop', 'rml_tag'):
|
||||||
del txt_n.attrib[key]
|
del txt_n.attrib[key]
|
||||||
if True or not self._textual(n).isspace():
|
if True or not self._textual(n).isspace():
|
||||||
txt_n.text = self._textual(n)
|
txt_n.text = utils.xml2str(self._textual(n))
|
||||||
txt_n.tail = ''
|
txt_n.tail = ''
|
||||||
rc1 += etree.tostring(txt_n)
|
rc1 += etree.tostring(txt_n)
|
||||||
#rc1 += utils._process_text(self, node.tail or '')
|
#rc1 += utils._process_text(self, node.tail or '')
|
||||||
|
|
|
@ -41,6 +41,7 @@ import re
|
||||||
import reportlab
|
import reportlab
|
||||||
from lxml import etree
|
from lxml import etree
|
||||||
import copy
|
import copy
|
||||||
|
import tools
|
||||||
|
|
||||||
_regex = re.compile('\[\[(.+?)\]\]')
|
_regex = re.compile('\[\[(.+?)\]\]')
|
||||||
|
|
||||||
|
@ -111,7 +112,7 @@ def _process_text(self, txt):
|
||||||
pass
|
pass
|
||||||
if type(txt)==type('') or type(txt)==type(u''):
|
if type(txt)==type('') or type(txt)==type(u''):
|
||||||
txt2 = str2xml(txt)
|
txt2 = str2xml(txt)
|
||||||
result += txt2
|
result += tools.ustr(txt2)
|
||||||
elif (txt is not None) and (txt is not False):
|
elif (txt is not None) and (txt is not False):
|
||||||
result += str(txt)
|
result += str(txt)
|
||||||
return result
|
return result
|
||||||
|
@ -119,7 +120,7 @@ def _process_text(self, txt):
|
||||||
def text_get(node):
|
def text_get(node):
|
||||||
rc = ''
|
rc = ''
|
||||||
for node in node.getchildren():
|
for node in node.getchildren():
|
||||||
rc = rc + node.text
|
rc = rc + tools.ustr(node.text)
|
||||||
return rc
|
return rc
|
||||||
|
|
||||||
units = [
|
units = [
|
||||||
|
|
|
@ -305,7 +305,7 @@ class rml_parse(object):
|
||||||
head_dom = etree.XML(rml_head)
|
head_dom = etree.XML(rml_head)
|
||||||
for tag in head_dom.getchildren():
|
for tag in head_dom.getchildren():
|
||||||
found = rml_dom.find('.//'+tag.tag)
|
found = rml_dom.find('.//'+tag.tag)
|
||||||
if found is not None:
|
if found is not None and len(found):
|
||||||
if tag.get('position'):
|
if tag.get('position'):
|
||||||
found.append(tag)
|
found.append(tag)
|
||||||
else :
|
else :
|
||||||
|
|
|
@ -29,7 +29,7 @@ import thread
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
import sys
|
import sys
|
||||||
|
import platform
|
||||||
from tools.translate import _
|
from tools.translate import _
|
||||||
import addons
|
import addons
|
||||||
import ir
|
import ir
|
||||||
|
@ -38,7 +38,7 @@ import pooler
|
||||||
import release
|
import release
|
||||||
import sql_db
|
import sql_db
|
||||||
import tools
|
import tools
|
||||||
|
import locale
|
||||||
logging.basicConfig()
|
logging.basicConfig()
|
||||||
|
|
||||||
class db(netsvc.Service):
|
class db(netsvc.Service):
|
||||||
|
@ -60,6 +60,8 @@ class db(netsvc.Service):
|
||||||
self.id = 0
|
self.id = 0
|
||||||
self.id_protect = threading.Semaphore()
|
self.id_protect = threading.Semaphore()
|
||||||
|
|
||||||
|
self._pg_psw_env_var_is_set = False # on win32, pg_dump need the PGPASSWORD env var
|
||||||
|
|
||||||
def create(self, password, db_name, demo, lang, user_password='admin'):
|
def create(self, password, db_name, demo, lang, user_password='admin'):
|
||||||
security.check_super(password)
|
security.check_super(password)
|
||||||
self.id_protect.acquire()
|
self.id_protect.acquire()
|
||||||
|
@ -169,17 +171,28 @@ class db(netsvc.Service):
|
||||||
sql_db.close_db('template1')
|
sql_db.close_db('template1')
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
def _set_pg_psw_env_var(self):
|
||||||
|
if os.name == 'nt' and not os.environ.get('PGPASSWORD', ''):
|
||||||
|
os.environ['PGPASSWORD'] = tools.config['db_password']
|
||||||
|
self._pg_psw_env_var_is_set = True
|
||||||
|
|
||||||
|
def _unset_pg_psw_env_var(self):
|
||||||
|
if os.name == 'nt' and self._pg_psw_env_var_is_set:
|
||||||
|
os.environ['PGPASSWORD'] = ''
|
||||||
|
|
||||||
def dump(self, password, db_name):
|
def dump(self, password, db_name):
|
||||||
security.check_super(password)
|
security.check_super(password)
|
||||||
logger = netsvc.Logger()
|
logger = netsvc.Logger()
|
||||||
|
|
||||||
|
self._set_pg_psw_env_var()
|
||||||
|
|
||||||
cmd = ['pg_dump', '--format=c', '--no-owner']
|
cmd = ['pg_dump', '--format=c', '--no-owner']
|
||||||
if tools.config['db_user']:
|
if tools.config['db_user']:
|
||||||
cmd.append('--username=' + tools.config['db_user'])
|
cmd.append('--username=' + tools.config['db_user'])
|
||||||
if tools.config['db_host']:
|
if tools.config['db_host']:
|
||||||
cmd.append('--host=' + tools.config['db_host'])
|
cmd.append('--host=' + tools.config['db_host'])
|
||||||
if tools.config['db_port']:
|
if tools.config['db_port']:
|
||||||
cmd.append('--port=' + tools.config['db_port'])
|
cmd.append('--port=' + str(tools.config['db_port']))
|
||||||
cmd.append(db_name)
|
cmd.append(db_name)
|
||||||
|
|
||||||
stdin, stdout = tools.exec_pg_command_pipe(*tuple(cmd))
|
stdin, stdout = tools.exec_pg_command_pipe(*tuple(cmd))
|
||||||
|
@ -192,12 +205,17 @@ class db(netsvc.Service):
|
||||||
raise Exception, "Couldn't dump database"
|
raise Exception, "Couldn't dump database"
|
||||||
logger.notifyChannel("web-services", netsvc.LOG_INFO,
|
logger.notifyChannel("web-services", netsvc.LOG_INFO,
|
||||||
'DUMP DB: %s' % (db_name))
|
'DUMP DB: %s' % (db_name))
|
||||||
|
|
||||||
|
self._unset_pg_psw_env_var()
|
||||||
|
|
||||||
return base64.encodestring(data)
|
return base64.encodestring(data)
|
||||||
|
|
||||||
def restore(self, password, db_name, data):
|
def restore(self, password, db_name, data):
|
||||||
security.check_super(password)
|
security.check_super(password)
|
||||||
logger = netsvc.Logger()
|
logger = netsvc.Logger()
|
||||||
|
|
||||||
|
self._set_pg_psw_env_var()
|
||||||
|
|
||||||
if self.db_exist(db_name):
|
if self.db_exist(db_name):
|
||||||
logger.notifyChannel("web-services", netsvc.LOG_WARNING,
|
logger.notifyChannel("web-services", netsvc.LOG_WARNING,
|
||||||
'RESTORE DB: %s already exists' % (db_name,))
|
'RESTORE DB: %s already exists' % (db_name,))
|
||||||
|
@ -218,7 +236,7 @@ class db(netsvc.Service):
|
||||||
if tools.config['db_host']:
|
if tools.config['db_host']:
|
||||||
cmd.append('--host=' + tools.config['db_host'])
|
cmd.append('--host=' + tools.config['db_host'])
|
||||||
if tools.config['db_port']:
|
if tools.config['db_port']:
|
||||||
cmd.append('--port=' + tools.config['db_port'])
|
cmd.append('--port=' + str(tools.config['db_port']))
|
||||||
cmd.append('--dbname=' + db_name)
|
cmd.append('--dbname=' + db_name)
|
||||||
args2 = tuple(cmd)
|
args2 = tuple(cmd)
|
||||||
|
|
||||||
|
@ -238,6 +256,9 @@ class db(netsvc.Service):
|
||||||
raise Exception, "Couldn't restore database"
|
raise Exception, "Couldn't restore database"
|
||||||
logger.notifyChannel("web-services", netsvc.LOG_INFO,
|
logger.notifyChannel("web-services", netsvc.LOG_INFO,
|
||||||
'RESTORE DB: %s' % (db_name))
|
'RESTORE DB: %s' % (db_name))
|
||||||
|
|
||||||
|
self._unset_pg_psw_env_var()
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def rename(self, password, old_name, new_name):
|
def rename(self, password, old_name, new_name):
|
||||||
|
@ -505,27 +526,34 @@ GNU Public Licence.
|
||||||
l.notifyChannel('migration', netsvc.LOG_ERROR, tb_s)
|
l.notifyChannel('migration', netsvc.LOG_ERROR, tb_s)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
def get_server_environment(self,lang=False):
|
def get_server_environment(self):
|
||||||
try:
|
try:
|
||||||
if '.bzr' in os.listdir((os.getcwd()[0:-3])):
|
rev_id = os.popen('bzr revision-info').read()
|
||||||
fp = open(os.path.join(os.getcwd()[0:-3],'.bzr/branch/last-revision'))
|
except Exception,e:
|
||||||
rev_no = fp.read()
|
rev_id = 'Exception: %s\n' % (tools.ustr(e))
|
||||||
fp.close()
|
|
||||||
else:
|
os_lang = '.'.join( [x for x in locale.getdefaultlocale() if x] )
|
||||||
rev_no = 'Bazaar Not Installed !'
|
if not os_lang:
|
||||||
except:
|
os_lang = 'NOT SET'
|
||||||
rev_no = 'Bazaar Not Installed !'
|
environment = '\nEnvironment Information : \n' \
|
||||||
if not lang:
|
'System : %s\n' \
|
||||||
lang = os.environ.get('LANG', '').split('.')[0]
|
'OS Name : %s\n' \
|
||||||
environment = 'Environment_Information : \n' \
|
%(platform.platform(), platform.os.name)
|
||||||
'Operating System : %s\n' \
|
if os.name == 'posix':
|
||||||
'PlatForm : %s\n' \
|
if platform.system() == 'Linux':
|
||||||
'Operating System Version : %s\n' \
|
lsbinfo = os.popen('lsb_release -a').read()
|
||||||
'Python Version : %s\n'\
|
environment += '%s'%(lsbinfo)
|
||||||
'Locale : %s\n' \
|
else:
|
||||||
'OpenERP-Server Version : %s\n'\
|
environment += 'Your System is not lsb compliant\n'
|
||||||
'OpenERP-Server Last Revision ID : %s' \
|
environment += 'Operating System Release : %s\n' \
|
||||||
%(os.name,sys.platform,str(sys.version.split('\n')[1]),str(sys.version[0:5]), lang, release.version,rev_no)
|
'Operating System Version : %s\n' \
|
||||||
|
'Operating System Architecture : %s\n' \
|
||||||
|
'Operating System Locale : %s\n'\
|
||||||
|
'Python Version : %s\n'\
|
||||||
|
'OpenERP-Server Version : %s\n'\
|
||||||
|
'Last revision No. & ID : %s'\
|
||||||
|
%(platform.release(), platform.version(), platform.architecture()[0],
|
||||||
|
os_lang, platform.python_version(),release.version,rev_id)
|
||||||
return environment
|
return environment
|
||||||
common()
|
common()
|
||||||
|
|
||||||
|
@ -660,7 +688,6 @@ class report_spool(netsvc.Service):
|
||||||
tb = sys.exc_info()
|
tb = sys.exc_info()
|
||||||
tb_s = "".join(traceback.format_exception(*tb))
|
tb_s = "".join(traceback.format_exception(*tb))
|
||||||
logger = netsvc.Logger()
|
logger = netsvc.Logger()
|
||||||
logger.notifyChannel('web-services', netsvc.LOG_ERROR,common().get_server_environment(context.get('lang',False)))
|
|
||||||
logger.notifyChannel('web-services', netsvc.LOG_ERROR,
|
logger.notifyChannel('web-services', netsvc.LOG_ERROR,
|
||||||
'Exception: %s\n%s' % (str(exception), tb_s))
|
'Exception: %s\n%s' % (str(exception), tb_s))
|
||||||
self._reports[id]['exception'] = ExceptionWithTraceback(tools.exception_to_unicode(exception), tb)
|
self._reports[id]['exception'] = ExceptionWithTraceback(tools.exception_to_unicode(exception), tb)
|
||||||
|
|
|
@ -98,7 +98,9 @@ def amount_to_text(number, currency):
|
||||||
cents_number = int(number * 100) % 100
|
cents_number = int(number * 100) % 100
|
||||||
cents_name = (cents_number > 1) and 'cents' or 'cent'
|
cents_name = (cents_number > 1) and 'cents' or 'cent'
|
||||||
cents = _100_to_text(cents_number)
|
cents = _100_to_text(cents_number)
|
||||||
cents = cents_number and '%s %s' % (cents, cents_name) or ''
|
cents = cents_number and '%s %s' % (cents.lower(), cents_name) or ''
|
||||||
|
if cents:
|
||||||
|
lacs += ' and %s' % (cents, )
|
||||||
return lacs
|
return lacs
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -883,7 +883,7 @@ def convert_xml_import(cr, module, xmlfile, idref=None, mode='init', noupdate=Fa
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
logger = netsvc.Logger()
|
logger = netsvc.Logger()
|
||||||
logger.notifyChannel('init', netsvc.LOG_ERROR, 'The XML file does not fit the required schema !')
|
logger.notifyChannel('init', netsvc.LOG_ERROR, 'The XML file does not fit the required schema !')
|
||||||
logger.notifyChannel('init', netsvc.LOG_ERROR, relaxng.error_log.last_error)
|
logger.notifyChannel('init', netsvc.LOG_ERROR, tools.ustr(relaxng.error_log.last_error))
|
||||||
raise
|
raise
|
||||||
|
|
||||||
if idref is None:
|
if idref is None:
|
||||||
|
|
|
@ -672,9 +672,9 @@ class cache(object):
|
||||||
if time.time()-self.timeout > self.lasttime:
|
if time.time()-self.timeout > self.lasttime:
|
||||||
self.lasttime = time.time()
|
self.lasttime = time.time()
|
||||||
t = time.time()-self.timeout
|
t = time.time()-self.timeout
|
||||||
for key in self.cache.keys():
|
old_keys = [key for key in self.cache if self.cache[key][1] < t]
|
||||||
if self.cache[key][1]<t:
|
for key in old_keys:
|
||||||
del self.cache[key]
|
del self.cache[key]
|
||||||
|
|
||||||
kwargs2 = self._unify_args(*args, **kwargs)
|
kwargs2 = self._unify_args(*args, **kwargs)
|
||||||
|
|
||||||
|
|
|
@ -598,20 +598,28 @@ def trans_generate(lang, modules, dbname=None):
|
||||||
modobj = pool.get('ir.module.module')
|
modobj = pool.get('ir.module.module')
|
||||||
installed_modids = modobj.search(cr, uid, [('state', '=', 'installed')])
|
installed_modids = modobj.search(cr, uid, [('state', '=', 'installed')])
|
||||||
installed_modules = map(lambda m: m['name'], modobj.read(cr, uid, installed_modids, ['name']))
|
installed_modules = map(lambda m: m['name'], modobj.read(cr, uid, installed_modids, ['name']))
|
||||||
|
|
||||||
|
if tools.config['root_path'] in tools.config['addons_path'] :
|
||||||
|
path_list = [tools.config['root_path']]
|
||||||
|
else :
|
||||||
|
path_list = [tools.config['root_path'],tools.config['addons_path']]
|
||||||
|
|
||||||
for root, dirs, files in tools.osutil.walksymlinks(tools.config['root_path']):
|
for path in path_list:
|
||||||
for fname in fnmatch.filter(files, '*.py'):
|
for root, dirs, files in tools.osutil.walksymlinks(path):
|
||||||
fabsolutepath = join(root, fname)
|
for fname in fnmatch.filter(files, '*.py'):
|
||||||
frelativepath = fabsolutepath[len(tools.config['root_path'])+1:]
|
fabsolutepath = join(root, fname)
|
||||||
module = get_module_from_path(frelativepath)
|
frelativepath = fabsolutepath[len(path):]
|
||||||
is_mod_installed = module in installed_modules
|
module = get_module_from_path(frelativepath)
|
||||||
if (('all' in modules) or (module in modules)) and is_mod_installed:
|
is_mod_installed = module in installed_modules
|
||||||
code_string = tools.file_open(fabsolutepath, subdir='').read()
|
if (('all' in modules) or (module in modules)) and is_mod_installed:
|
||||||
iter = re.finditer(
|
code_string = tools.file_open(fabsolutepath, subdir='').read()
|
||||||
'[^a-zA-Z0-9_]_\([\s]*["\'](.+?)["\'][\s]*\)',
|
iter = re.finditer('[^a-zA-Z0-9_]_\([\s]*["\'](.+?)["\'][\s]*\)',
|
||||||
code_string, re.M)
|
code_string, re.S)
|
||||||
for i in iter:
|
|
||||||
push_translation(module, 'code', frelativepath, 0, encode(i.group(1)))
|
if module in installed_modules :
|
||||||
|
frelativepath =str("addons"+frelativepath)
|
||||||
|
for i in iter:
|
||||||
|
push_translation(module, 'code', frelativepath, 0, encode(i.group(1)))
|
||||||
|
|
||||||
|
|
||||||
out = [["module","type","name","res_id","src","value"]] # header
|
out = [["module","type","name","res_id","src","value"]] # header
|
||||||
|
|
|
@ -52,7 +52,7 @@ class workflow_service(netsvc.Service):
|
||||||
cr.execute('select instance_id from wkf_triggers where res_id=%s and model=%s', (res_id,res_type))
|
cr.execute('select instance_id from wkf_triggers where res_id=%s and model=%s', (res_id,res_type))
|
||||||
res = cr.fetchall()
|
res = cr.fetchall()
|
||||||
for (instance_id,) in res:
|
for (instance_id,) in res:
|
||||||
cr.execute('select uid,res_type,res_id from wkf_instance where id=%s', (instance_id,))
|
cr.execute('select %s,res_type,res_id from wkf_instance where id=%s', (uid, instance_id,))
|
||||||
ident = cr.fetchone()
|
ident = cr.fetchone()
|
||||||
instance.update(cr, instance_id, ident)
|
instance.update(cr, instance_id, ident)
|
||||||
|
|
||||||
|
|
32
setup.py
32
setup.py
|
@ -35,8 +35,10 @@ import glob
|
||||||
from distutils.core import setup, Command
|
from distutils.core import setup, Command
|
||||||
from distutils.command.install import install
|
from distutils.command.install import install
|
||||||
|
|
||||||
|
has_py2exe = False
|
||||||
if os.name == 'nt':
|
if os.name == 'nt':
|
||||||
import py2exe
|
import py2exe
|
||||||
|
has_py2exe = True
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.path.abspath(os.path.dirname(__file__)), "bin"))
|
sys.path.append(os.path.join(os.path.abspath(os.path.dirname(__file__)), "bin"))
|
||||||
|
|
||||||
|
@ -174,6 +176,7 @@ options = {
|
||||||
"py2exe": {
|
"py2exe": {
|
||||||
"compressed": 1,
|
"compressed": 1,
|
||||||
"optimize": 2,
|
"optimize": 2,
|
||||||
|
"dist_dir": 'dist',
|
||||||
"packages": ["lxml", "lxml.builder", "lxml._elementpath", "lxml.etree",
|
"packages": ["lxml", "lxml.builder", "lxml._elementpath", "lxml.etree",
|
||||||
"lxml.objectify", "decimal", "xml", "xml.dom", "xml.xpath",
|
"lxml.objectify", "decimal", "xml", "xml.dom", "xml.xpath",
|
||||||
"encodings","mx.DateTime","wizard","pychart","PIL", "pyparsing",
|
"encodings","mx.DateTime","wizard","pychart","PIL", "pyparsing",
|
||||||
|
@ -220,7 +223,30 @@ setup(name = name,
|
||||||
options = options,
|
options = options,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if has_py2exe:
|
||||||
#
|
# Sometime between pytz-2008a and pytz-2008i common_timezones started to
|
||||||
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
# include only names of zones with a corresponding data file in zoneinfo.
|
||||||
|
# pytz installs the zoneinfo directory tree in the same directory
|
||||||
|
# as the pytz/__init__.py file. These data files are loaded using
|
||||||
|
# pkg_resources.resource_stream. py2exe does not copy this to library.zip so
|
||||||
|
# resource_stream can't find the files and common_timezones is empty when
|
||||||
|
# read in the py2exe executable.
|
||||||
|
# This manually copies zoneinfo into the zip. See also
|
||||||
|
# http://code.google.com/p/googletransitdatafeed/issues/detail?id=121
|
||||||
|
import pytz
|
||||||
|
import zipfile
|
||||||
|
# Make sure the layout of pytz hasn't changed
|
||||||
|
assert (pytz.__file__.endswith('__init__.pyc') or
|
||||||
|
pytz.__file__.endswith('__init__.py')), pytz.__file__
|
||||||
|
zoneinfo_dir = os.path.join(os.path.dirname(pytz.__file__), 'zoneinfo')
|
||||||
|
# '..\\Lib\\pytz\\__init__.py' -> '..\\Lib'
|
||||||
|
disk_basedir = os.path.dirname(os.path.dirname(pytz.__file__))
|
||||||
|
zipfile_path = os.path.join(options['py2exe']['dist_dir'], 'library.zip')
|
||||||
|
z = zipfile.ZipFile(zipfile_path, 'a')
|
||||||
|
for absdir, directories, filenames in os.walk(zoneinfo_dir):
|
||||||
|
assert absdir.startswith(disk_basedir), (absdir, disk_basedir)
|
||||||
|
zip_dir = absdir[len(disk_basedir):]
|
||||||
|
for f in filenames:
|
||||||
|
z.write(os.path.join(absdir, f), os.path.join(zip_dir, f))
|
||||||
|
z.close()
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue