2009-10-13 05:58:37 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
2008-10-14 13:23:55 +00:00
|
|
|
##############################################################################
|
2009-11-26 12:39:16 +00:00
|
|
|
#
|
2009-01-12 11:31:51 +00:00
|
|
|
# OpenERP, Open Source Management Solution
|
2010-01-12 09:18:39 +00:00
|
|
|
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
|
2008-10-14 13:23:55 +00:00
|
|
|
#
|
2008-11-03 19:18:56 +00:00
|
|
|
# This program is free software: you can redistribute it and/or modify
|
2009-10-14 11:15:34 +00:00
|
|
|
# it under the terms of the GNU Affero General Public License as
|
|
|
|
# published by the Free Software Foundation, either version 3 of the
|
|
|
|
# License, or (at your option) any later version.
|
2008-09-26 14:23:55 +00:00
|
|
|
#
|
2008-11-03 19:18:56 +00:00
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
2009-10-14 11:15:34 +00:00
|
|
|
# GNU Affero General Public License for more details.
|
2008-09-26 14:23:55 +00:00
|
|
|
#
|
2009-10-14 11:15:34 +00:00
|
|
|
# You should have received a copy of the GNU Affero General Public License
|
2009-11-26 12:39:16 +00:00
|
|
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
2008-09-26 14:23:55 +00:00
|
|
|
#
|
|
|
|
##############################################################################
|
|
|
|
|
|
|
|
import base64
|
|
|
|
|
|
|
|
from osv import osv, fields
|
|
|
|
import urlparse
|
|
|
|
|
|
|
|
import os
|
|
|
|
|
|
|
|
import pooler
|
2008-10-28 10:07:10 +00:00
|
|
|
import netsvc
|
2009-12-02 05:36:57 +00:00
|
|
|
#import StringIO
|
2008-09-26 14:23:55 +00:00
|
|
|
|
2008-12-10 14:29:55 +00:00
|
|
|
from psycopg2 import Binary
|
2009-12-02 05:36:57 +00:00
|
|
|
#from tools import config
|
2008-12-16 12:35:15 +00:00
|
|
|
import tools
|
2009-03-06 22:18:24 +00:00
|
|
|
from tools.translate import _
|
2009-12-02 05:36:57 +00:00
|
|
|
import nodes
|
2008-09-26 14:23:55 +00:00
|
|
|
|
2010-02-24 09:32:10 +00:00
|
|
|
DMS_ROOT_PATH = tools.config.get('document_path', os.path.join(tools.config['root_path'], 'filestore'))
|
2008-09-26 14:23:55 +00:00
|
|
|
|
|
|
|
class document_file(osv.osv):
|
2008-10-24 13:33:07 +00:00
|
|
|
_inherit = 'ir.attachment'
|
2009-08-06 06:35:53 +00:00
|
|
|
_rec_name = 'datas_fname'
|
2009-01-22 23:52:23 +00:00
|
|
|
def _get_filestore(self, cr):
|
2010-02-24 09:32:10 +00:00
|
|
|
return os.path.join(DMS_ROOT_PATH, cr.dbname)
|
2009-01-22 23:52:23 +00:00
|
|
|
|
2008-10-24 13:33:07 +00:00
|
|
|
def _data_get(self, cr, uid, ids, name, arg, context):
|
2010-02-24 08:54:04 +00:00
|
|
|
fbrl = self.browse(cr, uid, ids, context=context)
|
|
|
|
nctx = nodes.get_node_context(cr, uid, context)
|
2008-10-24 13:33:07 +00:00
|
|
|
result = {}
|
2009-12-02 05:36:57 +00:00
|
|
|
bin_size = context.get('bin_size', False)
|
|
|
|
for fbro in fbrl:
|
2010-02-24 08:54:04 +00:00
|
|
|
fnode = nodes.node_file(None, None, nctx, fbro)
|
2009-12-02 05:36:57 +00:00
|
|
|
if not bin_size:
|
2010-02-24 08:54:04 +00:00
|
|
|
data = fnode.get_data(cr, fbro)
|
2009-12-02 05:36:57 +00:00
|
|
|
result[fbro.id] = base64.encodestring(data or '')
|
|
|
|
else:
|
2010-02-24 08:54:04 +00:00
|
|
|
result[fbro.id] = fnode.get_data_len(cr, fbro)
|
|
|
|
|
2008-10-24 13:33:07 +00:00
|
|
|
return result
|
|
|
|
|
|
|
|
#
|
|
|
|
# This code can be improved
|
|
|
|
#
|
2009-12-02 05:36:57 +00:00
|
|
|
def _data_set(self, cr, uid, id, name, value, arg, context):
|
2008-10-24 13:33:07 +00:00
|
|
|
if not value:
|
|
|
|
return True
|
2010-02-24 08:54:04 +00:00
|
|
|
fbro = self.browse(cr, uid, id, context=context)
|
|
|
|
nctx = nodes.get_node_context(cr, uid, context)
|
|
|
|
fnode = nodes.node_file(None, None, nctx, fbro)
|
|
|
|
res = fnode.set_data(cr, base64.decodestring(value), fbro)
|
2009-12-02 05:36:57 +00:00
|
|
|
return res
|
2008-10-24 13:33:07 +00:00
|
|
|
|
|
|
|
_columns = {
|
2010-06-27 20:18:09 +00:00
|
|
|
# Columns from ir.attachment:
|
|
|
|
'create_date': fields.datetime('Date Created', readonly=True),
|
|
|
|
'create_uid': fields.many2one('res.users', 'Creator', readonly=True),
|
|
|
|
'write_date': fields.datetime('Date Modified', readonly=True),
|
|
|
|
'write_uid': fields.many2one('res.users', 'Last Modification User', readonly=True),
|
|
|
|
'res_model': fields.char('Attached Model', size=64), # readonly?
|
|
|
|
'res_id': fields.integer('Attached ID'),
|
|
|
|
|
|
|
|
# If ir.attachment contained any data before document is installed, preserve
|
|
|
|
# the data, don't drop the column!
|
|
|
|
'db_datas': fields.binary('Data', oldname='datas'),
|
|
|
|
'datas': fields.function(_data_get, method=True, fnct_inv=_data_set, string='File Content', type="binary", nodrop=True),
|
|
|
|
|
|
|
|
# Fields of document:
|
2008-10-24 13:33:07 +00:00
|
|
|
'user_id': fields.many2one('res.users', 'Owner', select=1),
|
2010-04-07 13:18:54 +00:00
|
|
|
'group_ids': fields.many2many('res.groups', 'document_group_rel', 'item_id', 'group_id', 'Groups'),
|
2009-12-02 05:36:57 +00:00
|
|
|
# the directory id now is mandatory. It can still be computed automatically.
|
2010-05-27 12:44:58 +00:00
|
|
|
'parent_id': fields.many2one('document.directory', 'Directory', select=1),
|
2008-10-24 13:33:07 +00:00
|
|
|
'index_content': fields.text('Indexed Content'),
|
|
|
|
'partner_id':fields.many2one('res.partner', 'Partner', select=1),
|
2010-06-23 11:53:49 +00:00
|
|
|
'company_id': fields.many2one('res.company', 'Company'),
|
2010-06-27 20:18:09 +00:00
|
|
|
'file_size': fields.integer('File Size', required=True),
|
|
|
|
'file_type': fields.char('Content Type', size=128),
|
|
|
|
|
|
|
|
# fields used for file storage
|
|
|
|
'store_fname': fields.char('Stored Filename', size=200),
|
2008-10-24 13:33:07 +00:00
|
|
|
}
|
|
|
|
|
2010-02-24 08:54:04 +00:00
|
|
|
def __get_def_directory(self, cr, uid, context=None):
|
2009-12-02 05:36:57 +00:00
|
|
|
dirobj = self.pool.get('document.directory')
|
2010-02-24 08:54:04 +00:00
|
|
|
return dirobj._get_root_directory(cr, uid, context)
|
2009-12-02 05:36:57 +00:00
|
|
|
|
2008-10-24 13:33:07 +00:00
|
|
|
_defaults = {
|
2010-06-23 11:53:49 +00:00
|
|
|
'company_id': lambda s,cr,uid,c: s.pool.get('res.company')._company_default_get(cr, uid, 'ir.attachment', context=c),
|
2010-02-24 08:54:04 +00:00
|
|
|
'user_id': lambda self, cr, uid, ctx:uid,
|
|
|
|
'file_size': lambda self, cr, uid, ctx:0,
|
2009-12-02 05:36:57 +00:00
|
|
|
'parent_id': __get_def_directory
|
2008-10-24 13:33:07 +00:00
|
|
|
}
|
|
|
|
_sql_constraints = [
|
|
|
|
('filename_uniq', 'unique (name,parent_id,res_id,res_model)', 'The file name must be unique !')
|
2009-01-12 11:31:51 +00:00
|
|
|
]
|
2010-02-24 08:54:04 +00:00
|
|
|
def _check_duplication(self, cr, uid, vals, ids=[], op='create'):
|
|
|
|
name = vals.get('name', False)
|
|
|
|
parent_id = vals.get('parent_id', False)
|
|
|
|
res_model = vals.get('res_model', False)
|
|
|
|
res_id = vals.get('res_id', 0)
|
|
|
|
if op == 'write':
|
|
|
|
for file in self.browse(cr, uid, ids):
|
2008-11-18 13:04:58 +00:00
|
|
|
if not name:
|
2010-02-24 08:54:04 +00:00
|
|
|
name = file.name
|
2008-11-18 13:04:58 +00:00
|
|
|
if not parent_id:
|
2010-02-24 08:54:04 +00:00
|
|
|
parent_id = file.parent_id and file.parent_id.id or False
|
2008-11-18 13:04:58 +00:00
|
|
|
if not res_model:
|
2010-02-24 08:54:04 +00:00
|
|
|
res_model = file.res_model and file.res_model or False
|
2008-11-18 13:04:58 +00:00
|
|
|
if not res_id:
|
2010-02-24 08:54:04 +00:00
|
|
|
res_id = file.res_id and file.res_id or 0
|
|
|
|
res = self.search(cr, uid, [('id', '<>', file.id), ('name', '=', name), ('parent_id', '=', parent_id), ('res_model', '=', res_model), ('res_id', '=', res_id)])
|
2008-11-18 13:04:58 +00:00
|
|
|
if len(res):
|
2009-03-03 09:51:57 +00:00
|
|
|
return False
|
2010-02-24 08:54:04 +00:00
|
|
|
if op == 'create':
|
|
|
|
res = self.search(cr, uid, [('name', '=', name), ('parent_id', '=', parent_id), ('res_id', '=', res_id), ('res_model', '=', res_model)])
|
2008-10-24 13:33:07 +00:00
|
|
|
if len(res):
|
|
|
|
return False
|
|
|
|
return True
|
2010-01-27 06:57:08 +00:00
|
|
|
|
2008-11-18 13:04:58 +00:00
|
|
|
def copy(self, cr, uid, id, default=None, context=None):
|
|
|
|
if not default:
|
2010-02-24 08:54:04 +00:00
|
|
|
default = {}
|
2010-01-27 06:57:08 +00:00
|
|
|
if 'name' not in default:
|
|
|
|
name = self.read(cr, uid, [id])[0]['name']
|
2010-02-24 08:54:04 +00:00
|
|
|
default.update({'name': name + " (copy)"})
|
|
|
|
return super(document_file, self).copy(cr, uid, id, default, context)
|
2010-01-27 06:57:08 +00:00
|
|
|
|
2009-01-12 11:31:51 +00:00
|
|
|
def write(self, cr, uid, ids, vals, context=None):
|
2010-03-15 07:27:07 +00:00
|
|
|
if not isinstance(ids, list):
|
|
|
|
ids = [ids]
|
2010-02-24 08:54:04 +00:00
|
|
|
res = self.search(cr, uid, [('id', 'in', ids)])
|
2008-12-30 13:56:38 +00:00
|
|
|
if not len(res):
|
|
|
|
return False
|
2010-02-24 08:54:04 +00:00
|
|
|
if not self._check_duplication(cr, uid, vals, ids, 'write'):
|
2010-01-25 09:13:19 +00:00
|
|
|
raise osv.except_osv(_('ValidateError'), _('File name must be unique!'))
|
2010-02-24 08:54:04 +00:00
|
|
|
result = super(document_file, self).write(cr, uid, ids, vals, context=context)
|
2008-11-11 05:27:50 +00:00
|
|
|
cr.commit()
|
2008-10-24 13:33:07 +00:00
|
|
|
return result
|
|
|
|
|
2009-12-02 05:36:57 +00:00
|
|
|
def create(self, cr, uid, vals, context=None):
|
|
|
|
if not context:
|
|
|
|
context = {}
|
2010-02-24 08:54:04 +00:00
|
|
|
vals['parent_id'] = context.get('parent_id', False) or vals.get('parent_id', False)
|
2010-03-18 15:30:43 +00:00
|
|
|
if not vals['parent_id']:
|
|
|
|
vals['parent_id'] = self.pool.get('document.directory')._get_root_directory(cr,uid, context)
|
2010-02-24 08:54:04 +00:00
|
|
|
if not vals.get('res_id', False) and context.get('default_res_id', False):
|
|
|
|
vals['res_id'] = context.get('default_res_id', False)
|
|
|
|
if not vals.get('res_model', False) and context.get('default_res_model', False):
|
|
|
|
vals['res_model'] = context.get('default_res_model', False)
|
|
|
|
if vals.get('res_id', False) and vals.get('res_model', False):
|
|
|
|
obj_model = self.pool.get(vals['res_model'])
|
2010-03-23 14:01:06 +00:00
|
|
|
result = obj_model.read(cr, uid, [vals['res_id']], ['name', 'partner_id', 'address_id'], context=context)
|
2008-10-24 13:33:07 +00:00
|
|
|
if len(result):
|
2010-02-24 08:54:04 +00:00
|
|
|
obj = result[0]
|
|
|
|
if obj_model._name == 'res.partner':
|
|
|
|
vals['partner_id'] = obj['id']
|
|
|
|
elif obj.get('address_id', False):
|
|
|
|
if isinstance(obj['address_id'], tuple) or isinstance(obj['address_id'], list):
|
|
|
|
address_id = obj['address_id'][0]
|
2008-11-18 13:04:58 +00:00
|
|
|
else:
|
2010-02-24 08:54:04 +00:00
|
|
|
address_id = obj['address_id']
|
|
|
|
address = self.pool.get('res.partner.address').read(cr, uid, [address_id], context=context)
|
2008-10-24 13:33:07 +00:00
|
|
|
if len(address):
|
2010-02-24 08:54:04 +00:00
|
|
|
vals['partner_id'] = address[0]['partner_id'][0] or False
|
|
|
|
elif obj.get('partner_id', False):
|
|
|
|
if isinstance(obj['partner_id'], tuple) or isinstance(obj['partner_id'], list):
|
|
|
|
vals['partner_id'] = obj['partner_id'][0]
|
2008-10-24 13:33:07 +00:00
|
|
|
else:
|
2010-02-24 08:54:04 +00:00
|
|
|
vals['partner_id'] = obj['partner_id']
|
2008-10-24 13:33:07 +00:00
|
|
|
|
2010-02-24 08:54:04 +00:00
|
|
|
datas = None
|
|
|
|
if vals.get('link', False) :
|
2008-10-24 13:33:07 +00:00
|
|
|
import urllib
|
2010-02-24 08:54:04 +00:00
|
|
|
datas = base64.encodestring(urllib.urlopen(vals['link']).read())
|
2008-10-24 13:33:07 +00:00
|
|
|
else:
|
2010-02-24 08:54:04 +00:00
|
|
|
datas = vals.get('datas', False)
|
2009-11-26 12:39:16 +00:00
|
|
|
|
2010-02-24 08:54:04 +00:00
|
|
|
vals['file_size'] = datas and len(datas) or 0
|
|
|
|
if not self._check_duplication(cr, uid, vals):
|
2010-01-25 09:13:19 +00:00
|
|
|
raise osv.except_osv(_('ValidateError'), _('File name must be unique!'))
|
2010-02-24 08:54:04 +00:00
|
|
|
result = super(document_file, self).create(cr, uid, vals, context)
|
2008-10-24 13:33:07 +00:00
|
|
|
cr.commit()
|
|
|
|
return result
|
|
|
|
|
2010-02-24 08:54:04 +00:00
|
|
|
def unlink(self, cr, uid, ids, context={}):
|
2009-12-02 05:36:57 +00:00
|
|
|
stor = self.pool.get('document.storage')
|
2010-02-24 08:54:04 +00:00
|
|
|
unres = []
|
2009-12-02 05:36:57 +00:00
|
|
|
# We have to do the unlink in 2 stages: prepare a list of actual
|
|
|
|
# files to be unlinked, update the db (safer to do first, can be
|
|
|
|
# rolled back) and then unlink the files. The list wouldn't exist
|
|
|
|
# after we discard the objects
|
2010-02-24 08:54:04 +00:00
|
|
|
|
2008-10-24 13:33:07 +00:00
|
|
|
for f in self.browse(cr, uid, ids, context):
|
2009-12-02 05:36:57 +00:00
|
|
|
# TODO: update the node cache
|
2010-02-24 08:54:04 +00:00
|
|
|
r = stor.prepare_unlink(cr, uid, f.parent_id.storage_id, f)
|
2009-12-02 05:36:57 +00:00
|
|
|
if r:
|
|
|
|
unres.append(r)
|
|
|
|
res = super(document_file, self).unlink(cr, uid, ids, context)
|
2010-02-24 08:54:04 +00:00
|
|
|
stor.do_unlink(cr, uid, unres)
|
2009-12-02 05:36:57 +00:00
|
|
|
return res
|
2010-02-24 08:54:04 +00:00
|
|
|
|
2008-10-07 05:24:01 +00:00
|
|
|
document_file()
|
|
|
|
|