*modified -C option to make partial import

bzr revid: qdp@tinyerp.com-20080819121345-up2us3o4o1g4v8us
This commit is contained in:
qdp 2008-08-19 14:13:45 +02:00
parent 8612ed6da6
commit 9c91996f67
2 changed files with 14 additions and 4 deletions

View File

@ -58,7 +58,7 @@ class configmanager(object):
'addons_path': None,
'root_path': None,
'debug_mode': False,
'commit_mode': False,
'import_partial': "",
'pidfile': None,
'logfile': None,
'secure': False,
@ -114,7 +114,7 @@ class configmanager(object):
group.add_option("--db_host", dest="db_host", help="specify the database host")
group.add_option("--db_port", dest="db_port", help="specify the database port")
group.add_option("--db_maxconn", dest="db_maxconn", default='64', help="specify the the maximum number of physical connections to posgresql")
group.add_option("-C", "--commit-mode", dest="commit_mode", action="store_true", help="Several commit during one file importation. Use this for big data importation.", default=False)
group.add_option("-C", "--commit-mode", dest="import_partial", help="Several commit during one file importation. Use this for big data importation. Provide a filename to store intermediate state.", default=False)
parser.add_option_group(group)
group = optparse.OptionGroup(parser, "Internationalisation options",
@ -153,7 +153,7 @@ class configmanager(object):
self.options['pidfile'] = False
for arg in ('interface', 'port', 'db_name', 'db_user', 'db_password', 'db_host',
'db_port', 'logfile', 'pidfile', 'secure', 'smtp_ssl', 'email_from', 'smtp_server', 'smtp_user', 'smtp_password', 'price_accuracy', 'netinterface', 'netport', 'db_maxconn', 'commit_mode', 'addons_path'):
'db_port', 'logfile', 'pidfile', 'secure', 'smtp_ssl', 'email_from', 'smtp_server', 'smtp_user', 'smtp_password', 'price_accuracy', 'netinterface', 'netport', 'db_maxconn', 'import_partial', 'addons_path'):
if getattr(opt, arg):
self.options[arg] = getattr(opt, arg)

View File

@ -657,7 +657,7 @@ form: module.record_id""" % (xml_id,)
id = self.pool.get('ir.model.data')._update(cr, self.uid, rec_model, self.module, res, rec_id or False, not self.isnoupdate(data_node), noupdate=self.isnoupdate(data_node), mode=self.mode )
if rec_id:
self.idref[rec_id] = int(id)
if config.get('commit_mode', False):
if config.get('import_partial', False):
cr.commit()
return rec_model, id
@ -724,6 +724,12 @@ def convert_csv_import(cr, module, fname, csvcontent, idref=None, mode='init',
pool = pooler.get_pool(cr.dbname)
import pickle
if config.get('import_partial'):
data = pickle.load(config.get('import_partial'))
if fname in data:
if not data[fname]:
return
input = StringIO.StringIO(csvcontent)
reader = csv.reader(input, quotechar='"', delimiter=',')
fields = reader.next()
@ -738,6 +744,10 @@ def convert_csv_import(cr, module, fname, csvcontent, idref=None, mode='init',
continue
datas.append( map(lambda x:x.decode('utf8').encode('utf8'), line))
pool.get(model).import_data(cr, uid, fields, datas,mode, module,noupdate)
if config.get('import_partial'):
data = pickle.load(config.get('import_partial'))
data[fname] = 0
pickle.save(config.get('import_partial'), data)
#
# xml import/export