document partial refactoring and cleanups
bzr revid: al@openerp.com-20121217011958-70esu8d6q54fu0zd
This commit is contained in:
parent
0adc138bdc
commit
76ce1a2972
|
@ -21,10 +21,6 @@
|
|||
|
||||
import content_index
|
||||
import std_index
|
||||
import document_storage
|
||||
import document_directory
|
||||
import directory_content
|
||||
import directory_report
|
||||
import document
|
||||
import report
|
||||
import wizard
|
||||
|
|
|
@ -37,7 +37,7 @@ This is a complete document management system.
|
|||
""",
|
||||
'author': 'OpenERP SA',
|
||||
'website': 'http://www.openerp.com',
|
||||
'depends': ['process','board', 'knowledge'],
|
||||
'depends': ['knowledge'],
|
||||
'data': [
|
||||
'security/document_security.xml',
|
||||
'document_view.xml',
|
||||
|
@ -45,7 +45,6 @@ This is a complete document management system.
|
|||
'wizard/document_configuration_view.xml',
|
||||
'security/ir.model.access.csv',
|
||||
'report/document_report_view.xml',
|
||||
'board_document_view.xml',
|
||||
],
|
||||
'demo': [ 'document_demo.xml' ],
|
||||
'test': ['test/document_test2.yml'],
|
||||
|
|
|
@ -1,124 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
##############################################################################
|
||||
#
|
||||
# OpenERP, Open Source Management Solution
|
||||
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
##############################################################################
|
||||
|
||||
from osv import osv, fields
|
||||
|
||||
import netsvc
|
||||
# import os
|
||||
import nodes
|
||||
# import StringIO
|
||||
|
||||
class document_directory_content_type(osv.osv):
|
||||
_name = 'document.directory.content.type'
|
||||
_description = 'Directory Content Type'
|
||||
_columns = {
|
||||
'name': fields.char('Content Type', size=64, required=True),
|
||||
'code': fields.char('Extension', size=4),
|
||||
'active': fields.boolean('Active'),
|
||||
'mimetype': fields.char('Mime Type',size=32)
|
||||
}
|
||||
_defaults = {
|
||||
'active': lambda *args: 1
|
||||
}
|
||||
document_directory_content_type()
|
||||
|
||||
class document_directory_content(osv.osv):
|
||||
_name = 'document.directory.content'
|
||||
_description = 'Directory Content'
|
||||
_order = "sequence"
|
||||
def _extension_get(self, cr, uid, context=None):
|
||||
cr.execute('select code,name from document_directory_content_type where active')
|
||||
res = cr.fetchall()
|
||||
return res
|
||||
_columns = {
|
||||
'name': fields.char('Content Name', size=64, required=True),
|
||||
'sequence': fields.integer('Sequence', size=16),
|
||||
'prefix': fields.char('Prefix', size=16),
|
||||
'suffix': fields.char('Suffix', size=16),
|
||||
'report_id': fields.many2one('ir.actions.report.xml', 'Report'),
|
||||
'extension': fields.selection(_extension_get, 'Document Type', required=True, size=4),
|
||||
'include_name': fields.boolean('Include Record Name',
|
||||
help="Check this field if you want that the name of the file to contain the record name." \
|
||||
"\nIf set, the directory will have to be a resource one."),
|
||||
'directory_id': fields.many2one('document.directory', 'Directory'),
|
||||
}
|
||||
_defaults = {
|
||||
'extension': lambda *args: '.pdf',
|
||||
'sequence': lambda *args: 1,
|
||||
'include_name': lambda *args: 1,
|
||||
}
|
||||
|
||||
def _file_get(self, cr, node, nodename, content, context=None):
|
||||
""" return the nodes of a <node> parent having a <content> content
|
||||
The return value MUST be false or a list of node_class objects.
|
||||
"""
|
||||
|
||||
# TODO: respect the context!
|
||||
model = node.res_model
|
||||
if content.include_name and not model:
|
||||
return False
|
||||
|
||||
res2 = []
|
||||
tname = ''
|
||||
if content.include_name:
|
||||
content_name = node.displayname or ''
|
||||
# obj = node.context._dirobj.pool.get(model)
|
||||
if content_name:
|
||||
tname = (content.prefix or '') + content_name + (content.suffix or '') + (content.extension or '')
|
||||
else:
|
||||
tname = (content.prefix or '') + (content.suffix or '') + (content.extension or '')
|
||||
if tname.find('/'):
|
||||
tname=tname.replace('/', '_')
|
||||
act_id = False
|
||||
if 'dctx_res_id' in node.dctx:
|
||||
act_id = node.dctx['dctx_res_id']
|
||||
elif hasattr(node, 'res_id'):
|
||||
act_id = node.res_id
|
||||
else:
|
||||
act_id = node.context.context.get('res_id',False)
|
||||
if not nodename:
|
||||
n = nodes.node_content(tname, node, node.context,content, act_id=act_id)
|
||||
res2.append( n)
|
||||
else:
|
||||
if nodename == tname:
|
||||
n = nodes.node_content(tname, node, node.context,content, act_id=act_id)
|
||||
n.fill_fields(cr)
|
||||
res2.append(n)
|
||||
return res2
|
||||
|
||||
def process_write(self, cr, uid, node, data, context=None):
|
||||
if node.extension != '.pdf':
|
||||
raise Exception("Invalid content: %s" % node.extension)
|
||||
return True
|
||||
|
||||
def process_read(self, cr, uid, node, context=None):
|
||||
if node.extension != '.pdf':
|
||||
raise Exception("Invalid content: %s" % node.extension)
|
||||
report = self.pool.get('ir.actions.report.xml').browse(cr, uid, node.report_id, context=context)
|
||||
srv = netsvc.Service._services['report.'+report.report_name]
|
||||
ctx = node.context.context.copy()
|
||||
ctx.update(node.dctx)
|
||||
pdf,pdftype = srv.create(cr, uid, [node.act_id,], {}, context=ctx)
|
||||
return pdf
|
||||
document_directory_content()
|
||||
|
||||
#eof
|
||||
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
|
@ -1,64 +0,0 @@
|
|||
# -*- encoding: utf-8 -*-
|
||||
##############################################################################
|
||||
#
|
||||
# OpenERP, Open Source Management Solution
|
||||
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). All Rights Reserved
|
||||
# $Id$
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
##############################################################################
|
||||
|
||||
from osv import osv, fields
|
||||
|
||||
class ir_action_report_xml(osv.osv):
|
||||
_name="ir.actions.report.xml"
|
||||
_inherit ="ir.actions.report.xml"
|
||||
|
||||
def _model_get(self, cr, uid, ids, name, arg, context=None):
|
||||
res = {}
|
||||
model_pool = self.pool.get('ir.model')
|
||||
for data in self.read(cr, uid, ids, ['model']):
|
||||
model = data.get('model',False)
|
||||
if model:
|
||||
model_id =model_pool.search(cr, uid, [('model','=',model)])
|
||||
if model_id:
|
||||
res[data.get('id')] = model_id[0]
|
||||
else:
|
||||
res[data.get('id')] = False
|
||||
return res
|
||||
|
||||
def _model_search(self, cr, uid, obj, name, args, context=None):
|
||||
if not len(args):
|
||||
return []
|
||||
assert len(args) == 1 and args[0][1] == '=', 'expression is not what we expect: %r' % args
|
||||
model_id= args[0][2]
|
||||
if not model_id:
|
||||
# a deviation from standard behavior: when searching model_id = False
|
||||
# we return *all* reports, not just ones with empty model.
|
||||
# One reason is that 'model' is a required field so far
|
||||
return []
|
||||
model = self.pool.get('ir.model').read(cr, uid, [model_id])[0]['model']
|
||||
report_id = self.search(cr, uid, [('model','=',model)])
|
||||
if not report_id:
|
||||
return [('id','=','0')]
|
||||
return [('id','in',report_id)]
|
||||
|
||||
_columns={
|
||||
'model_id' : fields.function(_model_get, fnct_search=_model_search, string='Model Id'),
|
||||
}
|
||||
|
||||
ir_action_report_xml()
|
||||
|
||||
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
File diff suppressed because it is too large
Load Diff
|
@ -1,17 +1,9 @@
|
|||
<?xml version="1.0"?>
|
||||
<openerp>
|
||||
<data noupdate="1">
|
||||
|
||||
<record model="document.storage" id="storage_default">
|
||||
<field name="name">Database storage</field>
|
||||
<field name="type">db</field>
|
||||
<field name="user_id" ref="base.user_root"/>
|
||||
</record>
|
||||
|
||||
<record model="document.directory" id="dir_root">
|
||||
<field name="name">Documents</field>
|
||||
<field name="user_id" eval="False"/>
|
||||
<field name="storage_id" ref="storage_default"/>
|
||||
<field name="ressource_id">0</field>
|
||||
</record>
|
||||
|
||||
|
@ -20,7 +12,6 @@
|
|||
<field name="parent_id" ref="dir_root"/>
|
||||
<field name="user_id" ref="base.user_root"/>
|
||||
<field name="ressource_id">0</field>
|
||||
|
||||
</record>
|
||||
</data>
|
||||
|
||||
|
@ -101,8 +92,5 @@
|
|||
<field name="ressource_id">0</field>
|
||||
</record>
|
||||
|
||||
<!-- After we have setup the root directory, migrate the attachments
|
||||
to point to that. -->
|
||||
<function model="ir.attachment" name="_attach_parent_id"/>
|
||||
</data>
|
||||
</openerp>
|
||||
|
|
|
@ -1,308 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
##############################################################################
|
||||
#
|
||||
# OpenERP, Open Source Management Solution
|
||||
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
##############################################################################
|
||||
|
||||
|
||||
from osv import osv, fields
|
||||
from osv.orm import except_orm
|
||||
import logging
|
||||
import nodes
|
||||
from tools.translate import _
|
||||
_logger = logging.getLogger(__name__)
|
||||
class document_directory(osv.osv):
|
||||
_name = 'document.directory'
|
||||
_description = 'Directory'
|
||||
_order = 'name'
|
||||
_columns = {
|
||||
'name': fields.char('Name', size=64, required=True, select=1),
|
||||
'write_date': fields.datetime('Date Modified', readonly=True),
|
||||
'write_uid': fields.many2one('res.users', 'Last Modification User', readonly=True),
|
||||
'create_date': fields.datetime('Date Created', readonly=True),
|
||||
'create_uid': fields.many2one('res.users', 'Creator', readonly=True),
|
||||
'domain': fields.char('Domain', size=128, help="Use a domain if you want to apply an automatic filter on visible resources."),
|
||||
'user_id': fields.many2one('res.users', 'Owner'),
|
||||
'storage_id': fields.many2one('document.storage', 'Storage', change_default=True),
|
||||
'group_ids': fields.many2many('res.groups', 'document_directory_group_rel', 'item_id', 'group_id', 'Groups'),
|
||||
'parent_id': fields.many2one('document.directory', 'Parent Directory', select=1, change_default=True),
|
||||
'child_ids': fields.one2many('document.directory', 'parent_id', 'Children'),
|
||||
'file_ids': fields.one2many('ir.attachment', 'parent_id', 'Files'),
|
||||
'content_ids': fields.one2many('document.directory.content', 'directory_id', 'Virtual Files'),
|
||||
'type': fields.selection([
|
||||
('directory','Static Directory'),
|
||||
('ressource','Folders per resource'),
|
||||
],
|
||||
'Type', required=True, select=1, change_default=True,
|
||||
help="Each directory can either have the type Static or be linked to another resource. A static directory, as with Operating Systems, is the classic directory that can contain a set of files. The directories linked to systems resources automatically possess sub-directories for each of resource types defined in the parent directory."),
|
||||
'ressource_type_id': fields.many2one('ir.model', 'Resource model', change_default=True,
|
||||
help="Select an object here and there will be one folder per record of that resource."),
|
||||
'resource_field': fields.many2one('ir.model.fields', 'Name field', help='Field to be used as name on resource directories. If empty, the "name" will be used.'),
|
||||
'resource_find_all': fields.boolean('Find all resources',
|
||||
help="If true, all attachments that match this resource will " \
|
||||
" be located. If false, only ones that have this as parent." ),
|
||||
'ressource_parent_type_id': fields.many2one('ir.model', 'Parent Model', change_default=True,
|
||||
help="If you put an object here, this directory template will appear bellow all of these objects. " \
|
||||
"Such directories are \"attached\" to the specific model or record, just like attachments. " \
|
||||
"Don't put a parent directory if you select a parent model."),
|
||||
'ressource_id': fields.integer('Resource ID',
|
||||
help="Along with Parent Model, this ID attaches this folder to a specific record of Parent Model."),
|
||||
'ressource_tree': fields.boolean('Tree Structure',
|
||||
help="Check this if you want to use the same tree structure as the object selected in the system."),
|
||||
'dctx_ids': fields.one2many('document.directory.dctx', 'dir_id', 'Context fields'),
|
||||
'company_id': fields.many2one('res.company', 'Company', change_default=True),
|
||||
}
|
||||
|
||||
|
||||
def _get_root_directory(self, cr, uid, context=None):
|
||||
objid=self.pool.get('ir.model.data')
|
||||
try:
|
||||
mid = objid._get_id(cr, uid, 'document', 'dir_root')
|
||||
if not mid:
|
||||
return False
|
||||
root_id = objid.read(cr, uid, mid, ['res_id'])['res_id']
|
||||
return root_id
|
||||
except Exception:
|
||||
return False
|
||||
return objid.browse(cr, uid, mid, context=context).res_id
|
||||
|
||||
def _get_def_storage(self, cr, uid, context=None):
|
||||
if context and context.has_key('default_parent_id'):
|
||||
# Use the same storage as the parent..
|
||||
diro = self.browse(cr, uid, context['default_parent_id'])
|
||||
if diro.storage_id:
|
||||
return diro.storage_id.id
|
||||
objid=self.pool.get('ir.model.data')
|
||||
try:
|
||||
mid = objid._get_id(cr, uid, 'document', 'storage_default')
|
||||
return objid.browse(cr, uid, mid, context=context).res_id
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
_defaults = {
|
||||
'company_id': lambda s,cr,uid,c: s.pool.get('res.company')._company_default_get(cr, uid, 'document.directory', context=c),
|
||||
'user_id': lambda self,cr,uid,ctx: uid,
|
||||
'domain': '[]',
|
||||
'type': 'directory',
|
||||
'ressource_id': 0,
|
||||
'storage_id': _get_def_storage, # Still, it is bad practice to set it everywhere.
|
||||
'resource_find_all': True,
|
||||
}
|
||||
_sql_constraints = [
|
||||
('dirname_uniq', 'unique (name,parent_id,ressource_id,ressource_parent_type_id)', 'The directory name must be unique !'),
|
||||
('no_selfparent', 'check(parent_id <> id)', 'Directory cannot be parent of itself!'),
|
||||
('dir_parented', 'check(parent_id IS NOT NULL OR storage_id IS NOT NULL)', 'Directory must have a parent or a storage.')
|
||||
]
|
||||
def name_get(self, cr, uid, ids, context=None):
|
||||
res = []
|
||||
if not self.search(cr,uid,[('id','in',ids)]):
|
||||
ids = []
|
||||
for d in self.browse(cr, uid, ids, context=context):
|
||||
s = ''
|
||||
d2 = d
|
||||
while d2 and d2.parent_id:
|
||||
s = d2.name + (s and ('/' + s) or '')
|
||||
d2 = d2.parent_id
|
||||
res.append((d.id, s or d.name))
|
||||
return res
|
||||
|
||||
def get_full_path(self, cr, uid, dir_id, context=None):
|
||||
""" Return the full path to this directory, in a list, root first
|
||||
"""
|
||||
if isinstance(dir_id, (tuple, list)):
|
||||
assert len(dir_id) == 1
|
||||
dir_id = dir_id[0]
|
||||
|
||||
def _parent(dir_id, path):
|
||||
parent=self.browse(cr, uid, dir_id)
|
||||
if parent.parent_id and not parent.ressource_parent_type_id:
|
||||
_parent(parent.parent_id.id,path)
|
||||
path.append(parent.name)
|
||||
else:
|
||||
path.append(parent.name)
|
||||
return path
|
||||
path = []
|
||||
_parent(dir_id, path)
|
||||
return path
|
||||
|
||||
def _check_recursion(self, cr, uid, ids, context=None):
|
||||
level = 100
|
||||
while len(ids):
|
||||
cr.execute('select distinct parent_id from document_directory where id in ('+','.join(map(str,ids))+')')
|
||||
ids = filter(None, map(lambda x:x[0], cr.fetchall()))
|
||||
if not level:
|
||||
return False
|
||||
level -= 1
|
||||
return True
|
||||
|
||||
_constraints = [
|
||||
(_check_recursion, 'Error! You cannot create recursive directories.', ['parent_id'])
|
||||
]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(document_directory, self).__init__(*args, **kwargs)
|
||||
|
||||
def onchange_content_id(self, cr, uid, ids, ressource_type_id):
|
||||
return {}
|
||||
|
||||
"""
|
||||
PRE:
|
||||
uri: of the form "Sales Order/SO001"
|
||||
PORT:
|
||||
uri
|
||||
object: the object.directory or object.directory.content
|
||||
object2: the other object linked (if object.directory.content)
|
||||
"""
|
||||
def get_object(self, cr, uid, uri, context=None):
|
||||
""" Return a node object for the given uri.
|
||||
This fn merely passes the call to node_context
|
||||
"""
|
||||
|
||||
return nodes.get_node_context(cr, uid, context).get_uri(cr, uri)
|
||||
|
||||
def get_node_class(self, cr, uid, ids, dbro=None, dynamic=False, context=None):
|
||||
"""Retrieve the class of nodes for this directory
|
||||
|
||||
This function can be overriden by inherited classes ;)
|
||||
@param dbro The browse object, if caller already has it
|
||||
"""
|
||||
if dbro is None:
|
||||
dbro = self.browse(cr, uid, ids, context=context)
|
||||
|
||||
if dynamic:
|
||||
return nodes.node_res_obj
|
||||
elif dbro.type == 'directory':
|
||||
return nodes.node_dir
|
||||
elif dbro.type == 'ressource':
|
||||
return nodes.node_res_dir
|
||||
else:
|
||||
raise ValueError("dir node for %s type.", dbro.type)
|
||||
|
||||
def _prepare_context(self, cr, uid, nctx, context=None):
|
||||
""" Fill nctx with properties for this database
|
||||
@param nctx instance of nodes.node_context, to be filled
|
||||
@param context ORM context (dict) for us
|
||||
|
||||
Note that this function is called *without* a list of ids,
|
||||
it should behave the same for the whole database (based on the
|
||||
ORM instance of document.directory).
|
||||
|
||||
Some databases may override this and attach properties to the
|
||||
node_context. See WebDAV, CalDAV.
|
||||
"""
|
||||
return
|
||||
|
||||
def get_dir_permissions(self, cr, uid, ids, context=None):
|
||||
"""Check what permission user 'uid' has on directory 'id'
|
||||
"""
|
||||
assert len(ids) == 1
|
||||
|
||||
res = 0
|
||||
for pperms in [('read', 5), ('write', 2), ('unlink', 8)]:
|
||||
try:
|
||||
self.check_access_rule(cr, uid, ids, pperms[0], context=context)
|
||||
res |= pperms[1]
|
||||
except except_orm:
|
||||
pass
|
||||
return res
|
||||
|
||||
def _locate_child(self, cr, uid, root_id, uri, nparent, ncontext):
|
||||
""" try to locate the node in uri,
|
||||
Return a tuple (node_dir, remaining_path)
|
||||
"""
|
||||
return (nodes.node_database(context=ncontext), uri)
|
||||
|
||||
def copy(self, cr, uid, id, default=None, context=None):
|
||||
if not default:
|
||||
default ={}
|
||||
name = self.read(cr, uid, [id])[0]['name']
|
||||
default.update(name=_("%s (copy)") % (name))
|
||||
return super(document_directory,self).copy(cr, uid, id, default, context=context)
|
||||
|
||||
def _check_duplication(self, cr, uid, vals, ids=None, op='create'):
|
||||
name=vals.get('name',False)
|
||||
parent_id=vals.get('parent_id',False)
|
||||
ressource_parent_type_id=vals.get('ressource_parent_type_id',False)
|
||||
ressource_id=vals.get('ressource_id',0)
|
||||
if op=='write':
|
||||
for directory in self.browse(cr, uid, ids):
|
||||
if not name:
|
||||
name=directory.name
|
||||
if not parent_id:
|
||||
parent_id=directory.parent_id and directory.parent_id.id or False
|
||||
# TODO fix algo
|
||||
if not ressource_parent_type_id:
|
||||
ressource_parent_type_id=directory.ressource_parent_type_id and directory.ressource_parent_type_id.id or False
|
||||
if not ressource_id:
|
||||
ressource_id=directory.ressource_id and directory.ressource_id or 0
|
||||
res=self.search(cr,uid,[('id','<>',directory.id),('name','=',name),('parent_id','=',parent_id),('ressource_parent_type_id','=',ressource_parent_type_id),('ressource_id','=',ressource_id)])
|
||||
if len(res):
|
||||
return False
|
||||
if op=='create':
|
||||
res=self.search(cr,uid,[('name','=',name),('parent_id','=',parent_id),('ressource_parent_type_id','=',ressource_parent_type_id),('ressource_id','=',ressource_id)])
|
||||
if len(res):
|
||||
return False
|
||||
return True
|
||||
def write(self, cr, uid, ids, vals, context=None):
|
||||
if not self._check_duplication(cr, uid, vals, ids, op='write'):
|
||||
raise osv.except_osv(_('ValidateError'), _('Directory name must be unique!'))
|
||||
return super(document_directory,self).write(cr, uid, ids, vals, context=context)
|
||||
|
||||
def create(self, cr, uid, vals, context=None):
|
||||
if not self._check_duplication(cr, uid, vals):
|
||||
raise osv.except_osv(_('ValidateError'), _('Directory name must be unique!'))
|
||||
newname = vals.get('name',False)
|
||||
if newname:
|
||||
for illeg in ('/', '@', '$', '#'):
|
||||
if illeg in newname:
|
||||
raise osv.except_osv(_('ValidateError'), _('Directory name contains special characters!'))
|
||||
return super(document_directory,self).create(cr, uid, vals, context)
|
||||
|
||||
# TODO def unlink(...
|
||||
|
||||
document_directory()
|
||||
|
||||
class document_directory_dctx(osv.osv):
|
||||
""" In order to evaluate dynamic folders, child items could have a limiting
|
||||
domain expression. For that, their parents will export a context where useful
|
||||
information will be passed on.
|
||||
If you define sth like "s_id" = "this.id" at a folder iterating over sales, its
|
||||
children could have a domain like [('sale_id', = ,dctx_s_id )]
|
||||
This system should be used recursively, that is, parent dynamic context will be
|
||||
appended to all children down the tree.
|
||||
"""
|
||||
_name = 'document.directory.dctx'
|
||||
_description = 'Directory Dynamic Context'
|
||||
_columns = {
|
||||
'dir_id': fields.many2one('document.directory', 'Directory', required=True, ondelete="cascade"),
|
||||
'field': fields.char('Field', size=20, required=True, select=1, help="The name of the field. Note that the prefix \"dctx_\" will be prepended to what is typed here."),
|
||||
'expr': fields.char('Expression', size=64, required=True, help="A python expression used to evaluate the field.\n" + \
|
||||
"You can use 'dir_id' for current dir, 'res_id', 'res_model' as a reference to the current record, in dynamic folders"),
|
||||
}
|
||||
|
||||
document_directory_dctx()
|
||||
|
||||
|
||||
class document_directory_node(osv.osv):
|
||||
_inherit = 'process.node'
|
||||
_columns = {
|
||||
'directory_id': fields.many2one('document.directory', 'Document directory', ondelete="set null"),
|
||||
}
|
||||
document_directory_node()
|
||||
|
||||
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
|
@ -1,278 +0,0 @@
|
|||
# -*- encoding: utf-8 -*-
|
||||
##############################################################################
|
||||
#
|
||||
# OpenERP, Open Source Management Solution
|
||||
#
|
||||
# Copyright (C) P. Christeas, 2009, all rights reserved
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
##############################################################################
|
||||
|
||||
from osv import osv, fields
|
||||
import os
|
||||
import tools
|
||||
import base64
|
||||
import errno
|
||||
import logging
|
||||
import shutil
|
||||
from StringIO import StringIO
|
||||
import psycopg2
|
||||
from tools.misc import ustr
|
||||
from tools.translate import _
|
||||
from osv.orm import except_orm
|
||||
import random
|
||||
import string
|
||||
import pooler
|
||||
import nodes
|
||||
from content_index import cntIndex
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
""" The algorithm of data storage
|
||||
|
||||
We have to consider 3 cases of data /retrieval/:
|
||||
Given (context,path) we need to access the file (aka. node).
|
||||
given (directory, context), we need one of its children (for listings, views)
|
||||
given (ir.attachment, context), we need its data and metadata (node).
|
||||
|
||||
For data /storage/ we have the cases:
|
||||
Have (ir.attachment, context), we modify the file (save, update, rename etc).
|
||||
Have (directory, context), we create a file.
|
||||
Have (path, context), we create or modify a file.
|
||||
|
||||
Note that in all above cases, we don't explicitly choose the storage media,
|
||||
but always require a context to be present.
|
||||
|
||||
Note that a node will not always have a corresponding ir.attachment. Dynamic
|
||||
nodes, for once, won't. Their metadata will be computed by the parent storage
|
||||
media + directory.
|
||||
|
||||
The algorithm says that in any of the above cases, our first goal is to locate
|
||||
the node for any combination of search criteria. It would be wise NOT to
|
||||
represent each node in the path (like node[/] + node[/dir1] + node[/dir1/dir2])
|
||||
but directly jump to the end node (like node[/dir1/dir2]) whenever possible.
|
||||
|
||||
We also contain all the parenting loop code in one function. This is intentional,
|
||||
because one day this will be optimized in the db (Pg 8.4).
|
||||
|
||||
"""
|
||||
|
||||
class nodefd_db(StringIO, nodes.node_descriptor):
|
||||
""" A descriptor to db data
|
||||
"""
|
||||
def __init__(self, parent, ira_browse, mode):
|
||||
nodes.node_descriptor.__init__(self, parent)
|
||||
self._size = 0L
|
||||
if mode.endswith('b'):
|
||||
mode = mode[:-1]
|
||||
|
||||
if mode in ('r', 'r+'):
|
||||
data = ira_browse.datas
|
||||
if data:
|
||||
self._size = len(data)
|
||||
StringIO.__init__(self, data)
|
||||
elif mode in ('w', 'w+'):
|
||||
StringIO.__init__(self, None)
|
||||
# at write, we start at 0 (= overwrite), but have the original
|
||||
# data available, in case of a seek()
|
||||
elif mode == 'a':
|
||||
StringIO.__init__(self, None)
|
||||
else:
|
||||
_logger.error("Incorrect mode %s is specified.", mode)
|
||||
raise IOError(errno.EINVAL, "Invalid file mode.")
|
||||
self.mode = mode
|
||||
|
||||
def size(self):
|
||||
return self._size
|
||||
|
||||
def close(self):
|
||||
# we now open a *separate* cursor, to update the data.
|
||||
# FIXME: this may be improved, for concurrency handling
|
||||
par = self._get_parent()
|
||||
# uid = par.context.uid
|
||||
cr = pooler.get_db(par.context.dbname).cursor()
|
||||
try:
|
||||
if self.mode in ('w', 'w+', 'r+'):
|
||||
data = self.getvalue()
|
||||
icont = ''
|
||||
mime = ''
|
||||
filename = par.path
|
||||
if isinstance(filename, (tuple, list)):
|
||||
filename = '/'.join(filename)
|
||||
|
||||
try:
|
||||
mime, icont = cntIndex.doIndex(data, filename=filename,
|
||||
content_type=None, realfname=None)
|
||||
except Exception:
|
||||
_logger.debug('Cannot index file:', exc_info=True)
|
||||
pass
|
||||
|
||||
try:
|
||||
icont_u = ustr(icont)
|
||||
except UnicodeError:
|
||||
icont_u = ''
|
||||
|
||||
out = psycopg2.Binary(data)
|
||||
cr.execute("UPDATE ir_attachment " \
|
||||
"SET db_datas = %s, file_size=%s, " \
|
||||
" index_content= %s, file_type=%s " \
|
||||
" WHERE id = %s",
|
||||
(out, len(data), icont_u, mime, par.file_id))
|
||||
elif self.mode == 'a':
|
||||
data = self.getvalue()
|
||||
out = psycopg2.Binary(data)
|
||||
cr.execute("UPDATE ir_attachment " \
|
||||
"SET db_datas = COALESCE(db_datas,'') || %s, " \
|
||||
" file_size = COALESCE(file_size, 0) + %s " \
|
||||
" WHERE id = %s",
|
||||
(out, len(data), par.file_id))
|
||||
cr.commit()
|
||||
except Exception:
|
||||
_logger.exception('Cannot update db file #%d for close.', par.file_id)
|
||||
raise
|
||||
finally:
|
||||
cr.close()
|
||||
StringIO.close(self)
|
||||
|
||||
class document_storage(osv.osv):
|
||||
""" The primary object for data storage.
|
||||
Each instance of this object is a storage media, in which our application
|
||||
can store contents. The object here controls the behaviour of the storage
|
||||
media.
|
||||
The referring document.directory-ies will control the placement of data
|
||||
into the storage.
|
||||
|
||||
It is a bad idea to have multiple document.storage objects pointing to
|
||||
the same tree of filesystem storage.
|
||||
"""
|
||||
_name = 'document.storage'
|
||||
_description = 'Storage Media'
|
||||
|
||||
_columns = {
|
||||
'name': fields.char('Name', size=64, required=True, select=1),
|
||||
'write_date': fields.datetime('Date Modified', readonly=True),
|
||||
'write_uid': fields.many2one('res.users', 'Last Modification User', readonly=True),
|
||||
'create_date': fields.datetime('Date Created', readonly=True),
|
||||
'create_uid': fields.many2one('res.users', 'Creator', readonly=True),
|
||||
'user_id': fields.many2one('res.users', 'Owner'),
|
||||
'group_ids': fields.many2many('res.groups', 'document_storage_group_rel', 'item_id', 'group_id', 'Groups'),
|
||||
'dir_ids': fields.one2many('document.directory', 'parent_id', 'Directories'),
|
||||
'readonly': fields.boolean('Read Only', help="If set, media is for reading only"),
|
||||
}
|
||||
|
||||
_defaults = {
|
||||
'user_id': lambda self, cr, uid, ctx: uid,
|
||||
}
|
||||
|
||||
def get_data(self, cr, uid, id, file_node, context=None, fil_obj=None):
|
||||
""" retrieve the contents of some file_node having storage_id = id
|
||||
optionally, fil_obj could point to the browse object of the file
|
||||
(ir.attachment)
|
||||
"""
|
||||
boo = self.browse(cr, uid, id, context=context)
|
||||
if fil_obj:
|
||||
ira = fil_obj
|
||||
else:
|
||||
ira = self.pool.get('ir.attachment').browse(cr, uid, file_node.file_id, context=context)
|
||||
data = ira.datas
|
||||
if data:
|
||||
out = data.decode('base64')
|
||||
else:
|
||||
out = ''
|
||||
return out
|
||||
|
||||
def get_file(self, cr, uid, id, file_node, mode, context=None):
|
||||
""" Return a file-like object for the contents of some node
|
||||
"""
|
||||
if context is None:
|
||||
context = {}
|
||||
boo = self.browse(cr, uid, id, context=context)
|
||||
|
||||
ira = self.pool.get('ir.attachment').browse(cr, uid, file_node.file_id, context=context)
|
||||
return nodefd_db(file_node, ira_browse=ira, mode=mode)
|
||||
|
||||
def set_data(self, cr, uid, id, file_node, data, context=None, fil_obj=None):
|
||||
""" store the data.
|
||||
This function MUST be used from an ir.attachment. It wouldn't make sense
|
||||
to store things persistently for other types (dynamic).
|
||||
"""
|
||||
boo = self.browse(cr, uid, id, context=context)
|
||||
if fil_obj:
|
||||
ira = fil_obj
|
||||
else:
|
||||
ira = self.pool.get('ir.attachment').browse(cr, uid, file_node.file_id, context=context)
|
||||
|
||||
_logger.debug( "Store data for ir.attachment #%d." % ira.id)
|
||||
store_fname = None
|
||||
fname = None
|
||||
filesize = len(data)
|
||||
self.pool.get('ir.attachment').write(cr, uid, [file_node.file_id], {'datas': data.encode('base64')}, context=context)
|
||||
# 2nd phase: store the metadata
|
||||
try:
|
||||
icont = ''
|
||||
mime = ira.file_type
|
||||
if not mime:
|
||||
mime = ""
|
||||
try:
|
||||
mime, icont = cntIndex.doIndex(data, ira.datas_fname, ira.file_type or None, fname)
|
||||
except Exception:
|
||||
_logger.debug('Cannot index file.', exc_info=True)
|
||||
pass
|
||||
try:
|
||||
icont_u = ustr(icont)
|
||||
except UnicodeError:
|
||||
icont_u = ''
|
||||
# a hack: /assume/ that the calling write operation will not try
|
||||
# to write the fname and size, and update them in the db concurrently.
|
||||
# We cannot use a write() here, because we are already in one.
|
||||
cr.execute('UPDATE ir_attachment SET file_size = %s, index_content = %s, file_type = %s WHERE id = %s', (filesize, icont_u, mime, file_node.file_id))
|
||||
file_node.content_length = filesize
|
||||
file_node.content_type = mime
|
||||
return True
|
||||
except Exception, e :
|
||||
self._logger.warning("Cannot save data.", exc_info=True)
|
||||
# should we really rollback once we have written the actual data?
|
||||
# at the db case (only), that rollback would be safe
|
||||
raise except_orm(_('Error at doc write!'), str(e))
|
||||
|
||||
def prepare_unlink(self, cr, uid, storage_bo, fil_bo):
|
||||
""" Before we unlink a file (fil_boo), prepare the list of real
|
||||
files that have to be removed, too. """
|
||||
pass
|
||||
|
||||
def do_unlink(self, cr, uid, unres):
|
||||
return True
|
||||
|
||||
def simple_rename(self, cr, uid, file_node, new_name, context=None):
|
||||
""" A preparation for a file rename.
|
||||
It will not affect the database, but merely check and perhaps
|
||||
rename the realstore file.
|
||||
|
||||
@return the dict of values that can safely be be stored in the db.
|
||||
"""
|
||||
# nothing to do for a rename, allow to change the db field
|
||||
return { 'name': new_name, 'datas_fname': new_name }
|
||||
|
||||
def simple_move(self, cr, uid, file_node, ndir_bro, context=None):
|
||||
""" A preparation for a file move.
|
||||
It will not affect the database, but merely check and perhaps
|
||||
move the realstore file.
|
||||
|
||||
@param ndir_bro a browse object of document.directory, where this
|
||||
file should move to.
|
||||
@return the dict of values that can safely be be stored in the db.
|
||||
"""
|
||||
return { 'parent_id': ndir_bro.id }
|
||||
|
||||
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
|
@ -3,64 +3,6 @@
|
|||
<data>
|
||||
<menuitem name="Document Management" id="menu_document_management_configuration" parent="knowledge.menu_document_configuration" sequence="1"/>
|
||||
|
||||
<record model="ir.ui.view" id="view_document_storage_form">
|
||||
<field name="name">document.storage</field>
|
||||
<field name="model">document.storage</field>
|
||||
<field name="arch" type="xml">
|
||||
<form string="Storage Media">
|
||||
<group col="4">
|
||||
<field name="name" colspan="4"/>
|
||||
<field name="type"/>
|
||||
<field name="user_id"/>
|
||||
<field name="online"/>
|
||||
<field name="readonly"/>
|
||||
</group>
|
||||
<group attrs="{'invisible':[('type','in',['db', 'db64'])]}">
|
||||
<field name="path"/>
|
||||
</group>
|
||||
</form>
|
||||
</field>
|
||||
</record>
|
||||
|
||||
<record model="ir.ui.view" id="view_document_storage_tree">
|
||||
<field name="name">document.storage</field>
|
||||
<field name="model">document.storage</field>
|
||||
<field name="arch" type="xml">
|
||||
<tree string="Storage Media" toolbar="1">
|
||||
<field name="name"/>
|
||||
<field name="type"/>
|
||||
</tree>
|
||||
</field>
|
||||
</record>
|
||||
|
||||
<record id="view_document_storage_filter" model="ir.ui.view">
|
||||
<field name="name">Search View: Document Storage</field>
|
||||
<field name="model">document.storage</field>
|
||||
<field name="arch" type="xml">
|
||||
<search string="Search Document Storage">
|
||||
<field name="name" string="Document Storage"/>
|
||||
<group expand="0" string="Group By...">
|
||||
<filter string="Type" icon="terp-stock_symbol-selection" domain="[]" context="{'group_by':'type'}"/>
|
||||
</group>
|
||||
</search>
|
||||
</field>
|
||||
</record>
|
||||
|
||||
<record model="ir.actions.act_window" id="action_document_storage_form">
|
||||
<field name="name">Storage Media</field>
|
||||
<field name="type">ir.actions.act_window</field>
|
||||
<field name="res_model">document.storage</field>
|
||||
<field name="view_type">form</field>
|
||||
<field name="view_mode">tree,form</field>
|
||||
<field name="search_view_id" ref="view_document_storage_filter"/>
|
||||
</record>
|
||||
<menuitem
|
||||
name="Storage Media"
|
||||
action="action_document_storage_form"
|
||||
id="menu_document_storage_media"
|
||||
groups="base.group_no_one"
|
||||
parent="menu_document_management_configuration"/>
|
||||
|
||||
<record model="ir.ui.view" id="view_document_directory_form">
|
||||
<field name="name">document.directory</field>
|
||||
<field name="model">document.directory</field>
|
||||
|
@ -69,7 +11,6 @@
|
|||
<group col="4">
|
||||
<field name="name"/>
|
||||
<field name="parent_id"/>
|
||||
<field name="storage_id"/>
|
||||
<field name="user_id"/>
|
||||
<field name="company_id" groups="base.group_multi_company" widget="selection"/>
|
||||
</group>
|
||||
|
@ -153,7 +94,6 @@
|
|||
<field name="type"/>
|
||||
<field name="user_id"/>
|
||||
<field name="company_id" groups="base.group_multi_company"/>
|
||||
<field name="storage_id"/>
|
||||
<field name="create_date"/>
|
||||
<field name="write_date"/>
|
||||
</tree>
|
||||
|
@ -161,25 +101,23 @@
|
|||
</record>
|
||||
|
||||
<record id="view_document_directory_filter" model="ir.ui.view">
|
||||
<field name="name">Search View: Document Directory</field>
|
||||
<field name="model">document.directory</field>
|
||||
<field name="arch" type="xml">
|
||||
<search string="Search Document Directory">
|
||||
<field name="name" string="Document Directory"/>
|
||||
<filter string="Static" domain="[('type','=','directory')]"/>
|
||||
<filter string="Resources" icon="terp-personal" domain="[('type','=','ressource')]"/>
|
||||
<field name="company_id" groups="base.group_multi_company"/>
|
||||
<field name="storage_id" />
|
||||
<field name="user_id" />
|
||||
<group expand="0" string="Group By...">
|
||||
<filter string="Owner" icon="terp-personal" domain="[]" context="{'group_by':'user_id'}"/>
|
||||
<filter string="Type" icon="terp-stock_symbol-selection" domain="[]" context="{'group_by':'type'}"/>
|
||||
<filter string="Storage" domain="[]" icon="terp-accessories-archiver" context="{'group_by':'storage_id'}"/>
|
||||
<filter string="Company" domain="[]" icon="terp-go-home" context="{'group_by':'company_id'}" groups="base.group_multi_company"/>
|
||||
</group>
|
||||
</search>
|
||||
</field>
|
||||
</record>
|
||||
<field name="name">Search View: Document Directory</field>
|
||||
<field name="model">document.directory</field>
|
||||
<field name="arch" type="xml">
|
||||
<search string="Search Document Directory">
|
||||
<field name="name" string="Document Directory"/>
|
||||
<filter string="Static" domain="[('type','=','directory')]"/>
|
||||
<filter string="Resources" icon="terp-personal" domain="[('type','=','ressource')]"/>
|
||||
<field name="company_id" groups="base.group_multi_company"/>
|
||||
<field name="user_id" />
|
||||
<group expand="0" string="Group By...">
|
||||
<filter string="Owner" icon="terp-personal" domain="[]" context="{'group_by':'user_id'}"/>
|
||||
<filter string="Type" icon="terp-stock_symbol-selection" domain="[]" context="{'group_by':'type'}"/>
|
||||
<filter string="Company" domain="[]" icon="terp-go-home" context="{'group_by':'company_id'}" groups="base.group_multi_company"/>
|
||||
</group>
|
||||
</search>
|
||||
</field>
|
||||
</record>
|
||||
|
||||
<record model="ir.actions.act_window" id="action_document_directory_form">
|
||||
<field name="name">Directories</field>
|
||||
|
@ -268,10 +206,7 @@
|
|||
</xpath>
|
||||
</field>
|
||||
</record>
|
||||
|
||||
|
||||
<record id="view_attach_filter_inherit2" model="ir.ui.view">
|
||||
|
||||
<field name="name">IR Attachment2</field>
|
||||
<field name="model">ir.attachment</field>
|
||||
<field name="inherit_id" ref="base.view_attachment_search"/>
|
||||
|
@ -342,32 +277,6 @@
|
|||
</record>
|
||||
|
||||
|
||||
<record model="ir.ui.view" id="view_process_node_form_inherit1">
|
||||
<field name="name">process.node.form</field>
|
||||
<field name="model">process.node</field>
|
||||
<field name="inherit_id" ref="process.view_process_node_form"/>
|
||||
<field name="priority" eval="1"/>
|
||||
<field name="arch" type="xml">
|
||||
<field name="subflow_id" position="after">
|
||||
<field name="directory_id" string="Document Directory" domain="[('ressource_type_id','=',model_id),('ressource_parent_type_id','=',False)]"/>
|
||||
<newline/>
|
||||
</field>
|
||||
</field>
|
||||
</record>
|
||||
|
||||
<record model="ir.ui.view" id="view_process_form_inherit1">
|
||||
<field name="name">process.process.form</field>
|
||||
<field name="model">process.process</field>
|
||||
<field name="inherit_id" ref="process.view_process_form"/>
|
||||
<field name="priority" eval="1"/>
|
||||
<field name="arch" type="xml">
|
||||
<field name="subflow_id" position="after">
|
||||
<field name="directory_id" domain="[('ressource_type_id','=',model_id),('ressource_parent_type_id','=',False)]"/>
|
||||
<newline/>
|
||||
</field>
|
||||
</field>
|
||||
</record>
|
||||
|
||||
<act_window domain="[('partner_id', '=', active_id)]"
|
||||
context="{'default_partner_id': active_id}"
|
||||
id="act_res_partner_document" name="Related Documents"
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -15,7 +15,7 @@ import sql_db
|
|||
|
||||
from service import security
|
||||
from osv import osv
|
||||
from document.nodes import get_node_context
|
||||
from document.document import get_node_context
|
||||
|
||||
def _get_month_name(month):
|
||||
month=int(month)
|
||||
|
|
Loading…
Reference in New Issue