2009-10-20 10:52:23 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
2006-12-07 13:41:40 +00:00
|
|
|
##############################################################################
|
2009-11-20 11:44:41 +00:00
|
|
|
#
|
2008-11-04 06:33:23 +00:00
|
|
|
# OpenERP, Open Source Management Solution
|
2009-10-14 12:32:15 +00:00
|
|
|
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
|
2010-05-31 09:39:13 +00:00
|
|
|
# Copyright (C) 2010 OpenERP s.a. (<http://openerp.com>).
|
2006-12-07 13:41:40 +00:00
|
|
|
#
|
2008-11-03 18:27:16 +00:00
|
|
|
# This program is free software: you can redistribute it and/or modify
|
2009-10-14 12:32:15 +00:00
|
|
|
# it under the terms of the GNU Affero General Public License as
|
|
|
|
# published by the Free Software Foundation, either version 3 of the
|
|
|
|
# License, or (at your option) any later version.
|
2006-12-07 13:41:40 +00:00
|
|
|
#
|
2008-11-03 18:27:16 +00:00
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
2009-10-14 12:32:15 +00:00
|
|
|
# GNU Affero General Public License for more details.
|
2006-12-07 13:41:40 +00:00
|
|
|
#
|
2009-10-14 12:32:15 +00:00
|
|
|
# You should have received a copy of the GNU Affero General Public License
|
2009-11-20 11:44:41 +00:00
|
|
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
2006-12-07 13:41:40 +00:00
|
|
|
#
|
2008-11-03 18:27:16 +00:00
|
|
|
##############################################################################
|
2006-12-07 13:41:40 +00:00
|
|
|
|
|
|
|
"""
|
2008-09-10 08:46:40 +00:00
|
|
|
Miscelleanous tools used by OpenERP.
|
2006-12-07 13:41:40 +00:00
|
|
|
"""
|
|
|
|
|
|
|
|
import inspect
|
2010-11-23 15:22:24 +00:00
|
|
|
import subprocess
|
2010-06-28 09:38:29 +00:00
|
|
|
import logging
|
2010-10-27 12:28:30 +00:00
|
|
|
import os
|
2010-01-05 08:13:29 +00:00
|
|
|
import re
|
2010-06-28 09:38:29 +00:00
|
|
|
import smtplib
|
2010-10-27 12:28:30 +00:00
|
|
|
import socket
|
|
|
|
import sys
|
|
|
|
import threading
|
|
|
|
import time
|
2010-11-23 18:08:24 +00:00
|
|
|
import warnings
|
2010-10-27 12:28:30 +00:00
|
|
|
import zipfile
|
|
|
|
from datetime import datetime
|
2010-06-28 09:38:29 +00:00
|
|
|
from email.MIMEText import MIMEText
|
|
|
|
from email.MIMEBase import MIMEBase
|
|
|
|
from email.MIMEMultipart import MIMEMultipart
|
|
|
|
from email.Header import Header
|
|
|
|
from email.Utils import formatdate, COMMASPACE
|
|
|
|
from email import Encoders
|
2010-10-27 12:28:30 +00:00
|
|
|
from itertools import islice, izip
|
2010-12-29 10:50:04 +00:00
|
|
|
from lxml import etree
|
2010-10-27 12:28:30 +00:00
|
|
|
from which import which
|
2006-12-07 13:41:40 +00:00
|
|
|
if sys.version_info[:2] < (2, 4):
|
2008-07-22 14:24:36 +00:00
|
|
|
from threadinglocal import local
|
2006-12-07 13:41:40 +00:00
|
|
|
else:
|
2008-07-22 14:24:36 +00:00
|
|
|
from threading import local
|
2010-12-22 12:35:09 +00:00
|
|
|
try:
|
|
|
|
from html2text import html2text
|
|
|
|
except ImportError:
|
|
|
|
html2text = None
|
2006-12-07 13:41:40 +00:00
|
|
|
|
2011-02-07 12:57:23 +00:00
|
|
|
import openerp.loglevels as loglevels
|
2010-10-27 12:28:30 +00:00
|
|
|
from config import config
|
2010-11-09 16:57:39 +00:00
|
|
|
from lru import LRU
|
2008-08-14 09:51:20 +00:00
|
|
|
|
2011-02-08 09:24:58 +00:00
|
|
|
# get_encodings, ustr and exception_to_unicode were originally from tools.misc.
|
|
|
|
# There are moved to loglevels until we refactor tools.
|
|
|
|
from openerp.loglevels import get_encodings, ustr, exception_to_unicode
|
|
|
|
|
2010-06-28 09:38:29 +00:00
|
|
|
_logger = logging.getLogger('tools')
|
|
|
|
|
2010-12-29 10:50:04 +00:00
|
|
|
# List of etree._Element subclasses that we choose to ignore when parsing XML.
|
|
|
|
# We include the *Base ones just in case, currently they seem to be subclasses of the _* ones.
|
|
|
|
SKIPPED_ELEMENT_TYPES = (etree._Comment, etree._ProcessingInstruction, etree.CommentBase, etree.PIBase)
|
|
|
|
|
2008-09-16 13:32:15 +00:00
|
|
|
# initialize a database with base/base.sql
|
2006-12-07 13:41:40 +00:00
|
|
|
def init_db(cr):
|
2011-02-07 12:57:23 +00:00
|
|
|
import openerp.addons as addons
|
2008-07-22 14:24:36 +00:00
|
|
|
f = addons.get_module_resource('base', 'base.sql')
|
2011-01-04 10:13:35 +00:00
|
|
|
base_sql_file = file_open(f)
|
|
|
|
try:
|
|
|
|
cr.execute(base_sql_file.read())
|
|
|
|
cr.commit()
|
|
|
|
finally:
|
|
|
|
base_sql_file.close()
|
2008-07-22 14:24:36 +00:00
|
|
|
|
|
|
|
for i in addons.get_modules():
|
|
|
|
mod_path = addons.get_module_path(i)
|
2008-10-27 12:18:52 +00:00
|
|
|
if not mod_path:
|
|
|
|
continue
|
2010-03-11 08:49:41 +00:00
|
|
|
|
2010-03-12 06:15:00 +00:00
|
|
|
info = addons.load_information_from_description_file(i)
|
2010-03-11 08:49:41 +00:00
|
|
|
|
2010-03-12 06:15:00 +00:00
|
|
|
if not info:
|
|
|
|
continue
|
|
|
|
categs = info.get('category', 'Uncategorized').split('/')
|
|
|
|
p_id = None
|
|
|
|
while categs:
|
|
|
|
if p_id is not None:
|
2010-11-27 03:04:50 +00:00
|
|
|
cr.execute('SELECT id \
|
|
|
|
FROM ir_module_category \
|
|
|
|
WHERE name=%s AND parent_id=%s', (categs[0], p_id))
|
2010-03-12 06:15:00 +00:00
|
|
|
else:
|
2010-11-27 03:04:50 +00:00
|
|
|
cr.execute('SELECT id \
|
|
|
|
FROM ir_module_category \
|
|
|
|
WHERE name=%s AND parent_id IS NULL', (categs[0],))
|
2010-03-12 06:15:00 +00:00
|
|
|
c_id = cr.fetchone()
|
|
|
|
if not c_id:
|
2010-11-27 03:04:50 +00:00
|
|
|
cr.execute('INSERT INTO ir_module_category \
|
2010-11-27 03:20:14 +00:00
|
|
|
(name, parent_id) \
|
2010-11-27 03:04:50 +00:00
|
|
|
VALUES (%s, %s) RETURNING id', (categs[0], p_id))
|
2010-03-12 06:15:00 +00:00
|
|
|
c_id = cr.fetchone()[0]
|
|
|
|
else:
|
|
|
|
c_id = c_id[0]
|
|
|
|
p_id = c_id
|
|
|
|
categs = categs[1:]
|
2008-07-22 14:24:36 +00:00
|
|
|
|
2010-03-12 06:15:00 +00:00
|
|
|
active = info.get('active', False)
|
|
|
|
installable = info.get('installable', True)
|
|
|
|
if installable:
|
|
|
|
if active:
|
|
|
|
state = 'to install'
|
2008-07-22 14:24:36 +00:00
|
|
|
else:
|
2010-03-12 06:15:00 +00:00
|
|
|
state = 'uninstalled'
|
|
|
|
else:
|
|
|
|
state = 'uninstallable'
|
2010-11-27 03:04:50 +00:00
|
|
|
cr.execute('INSERT INTO ir_module_module \
|
|
|
|
(author, website, name, shortdesc, description, \
|
|
|
|
category_id, state, certificate, web, license) \
|
|
|
|
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s) RETURNING id', (
|
|
|
|
info.get('author', ''),
|
2010-03-12 06:15:00 +00:00
|
|
|
info.get('website', ''), i, info.get('name', False),
|
2010-09-28 07:52:57 +00:00
|
|
|
info.get('description', ''), p_id, state, info.get('certificate') or None,
|
2010-11-27 03:04:50 +00:00
|
|
|
info.get('web') or False,
|
|
|
|
info.get('license') or 'AGPL-3'))
|
|
|
|
id = cr.fetchone()[0]
|
|
|
|
cr.execute('INSERT INTO ir_model_data \
|
|
|
|
(name,model,module, res_id, noupdate) VALUES (%s,%s,%s,%s,%s)', (
|
2010-03-12 06:15:00 +00:00
|
|
|
'module_meta_information', 'ir.module.module', i, id, True))
|
|
|
|
dependencies = info.get('depends', [])
|
|
|
|
for d in dependencies:
|
2010-11-27 03:04:50 +00:00
|
|
|
cr.execute('INSERT INTO ir_module_module_dependency \
|
|
|
|
(module_id,name) VALUES (%s, %s)', (id, d))
|
2010-03-12 06:15:00 +00:00
|
|
|
cr.commit()
|
2006-12-07 13:41:40 +00:00
|
|
|
|
|
|
|
def find_in_path(name):
|
2010-05-31 09:39:13 +00:00
|
|
|
try:
|
|
|
|
return which(name)
|
|
|
|
except IOError:
|
|
|
|
return None
|
2006-12-07 13:41:40 +00:00
|
|
|
|
|
|
|
def find_pg_tool(name):
|
2010-05-31 09:39:13 +00:00
|
|
|
path = None
|
2008-07-22 14:24:36 +00:00
|
|
|
if config['pg_path'] and config['pg_path'] != 'None':
|
2010-05-31 09:39:13 +00:00
|
|
|
path = config['pg_path']
|
|
|
|
try:
|
|
|
|
return which(name, path=path)
|
|
|
|
except IOError:
|
|
|
|
return None
|
2006-12-07 13:41:40 +00:00
|
|
|
|
|
|
|
def exec_pg_command(name, *args):
|
2008-07-22 14:24:36 +00:00
|
|
|
prog = find_pg_tool(name)
|
|
|
|
if not prog:
|
|
|
|
raise Exception('Couldn\'t find %s' % name)
|
2010-12-07 17:05:51 +00:00
|
|
|
args2 = (prog,) + args
|
2011-02-03 07:19:44 +00:00
|
|
|
|
2010-12-07 17:05:51 +00:00
|
|
|
return subprocess.call(args2)
|
2006-12-07 13:41:40 +00:00
|
|
|
|
|
|
|
def exec_pg_command_pipe(name, *args):
|
2008-07-22 14:24:36 +00:00
|
|
|
prog = find_pg_tool(name)
|
|
|
|
if not prog:
|
|
|
|
raise Exception('Couldn\'t find %s' % name)
|
2011-01-19 15:01:14 +00:00
|
|
|
# on win32, passing close_fds=True is not compatible
|
|
|
|
# with redirecting std[in/err/out]
|
2010-12-07 17:05:51 +00:00
|
|
|
pop = subprocess.Popen((prog,) + args, bufsize= -1,
|
2011-01-19 15:01:14 +00:00
|
|
|
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
|
|
|
|
close_fds=(os.name=="posix"))
|
2010-11-23 15:22:24 +00:00
|
|
|
return (pop.stdin, pop.stdout)
|
2006-12-07 13:41:40 +00:00
|
|
|
|
2007-03-15 12:26:40 +00:00
|
|
|
def exec_command_pipe(name, *args):
|
2008-07-22 14:24:36 +00:00
|
|
|
prog = find_in_path(name)
|
|
|
|
if not prog:
|
|
|
|
raise Exception('Couldn\'t find %s' % name)
|
2011-01-19 15:01:14 +00:00
|
|
|
# on win32, passing close_fds=True is not compatible
|
|
|
|
# with redirecting std[in/err/out]
|
2010-12-07 17:05:51 +00:00
|
|
|
pop = subprocess.Popen((prog,) + args, bufsize= -1,
|
2011-01-19 15:01:14 +00:00
|
|
|
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
|
|
|
|
close_fds=(os.name=="posix"))
|
2010-11-23 15:22:24 +00:00
|
|
|
return (pop.stdin, pop.stdout)
|
2007-03-15 12:26:40 +00:00
|
|
|
|
2006-12-07 13:41:40 +00:00
|
|
|
#----------------------------------------------------------
|
|
|
|
# File paths
|
|
|
|
#----------------------------------------------------------
|
|
|
|
#file_path_root = os.getcwd()
|
|
|
|
#file_path_addons = os.path.join(file_path_root, 'addons')
|
|
|
|
|
2008-07-17 09:55:16 +00:00
|
|
|
def file_open(name, mode="r", subdir='addons', pathinfo=False):
|
2008-09-10 08:46:40 +00:00
|
|
|
"""Open a file from the OpenERP root, using a subdir folder.
|
2008-07-22 14:24:36 +00:00
|
|
|
|
|
|
|
>>> file_open('hr/report/timesheer.xsl')
|
|
|
|
>>> file_open('addons/hr/report/timesheet.xsl')
|
|
|
|
>>> file_open('../../base/report/rml_template.xsl', subdir='addons/hr/report', pathinfo=True)
|
|
|
|
|
|
|
|
@param name: name of the file
|
|
|
|
@param mode: file open mode
|
|
|
|
@param subdir: subdirectory
|
|
|
|
@param pathinfo: if True returns tupple (fileobject, filepath)
|
|
|
|
|
|
|
|
@return: fileobject if pathinfo is False else (fileobject, filepath)
|
|
|
|
"""
|
2011-02-07 12:57:23 +00:00
|
|
|
import openerp.addons as addons
|
2009-06-01 18:27:47 +00:00
|
|
|
adps = addons.ad_paths
|
2008-07-22 14:24:36 +00:00
|
|
|
rtp = os.path.normcase(os.path.abspath(config['root_path']))
|
|
|
|
|
|
|
|
if name.replace(os.path.sep, '/').startswith('addons/'):
|
|
|
|
subdir = 'addons'
|
|
|
|
name = name[7:]
|
|
|
|
|
|
|
|
# First try to locate in addons_path
|
|
|
|
if subdir:
|
|
|
|
subdir2 = subdir
|
|
|
|
if subdir2.replace(os.path.sep, '/').startswith('addons/'):
|
|
|
|
subdir2 = subdir2[7:]
|
|
|
|
|
|
|
|
subdir2 = (subdir2 != 'addons' or None) and subdir2
|
|
|
|
|
2009-06-01 18:27:47 +00:00
|
|
|
for adp in adps:
|
2010-10-27 12:28:30 +00:00
|
|
|
try:
|
|
|
|
if subdir2:
|
|
|
|
fn = os.path.join(adp, subdir2, name)
|
|
|
|
else:
|
|
|
|
fn = os.path.join(adp, name)
|
|
|
|
fn = os.path.normpath(fn)
|
|
|
|
fo = file_open(fn, mode=mode, subdir=None, pathinfo=pathinfo)
|
|
|
|
if pathinfo:
|
|
|
|
return fo, fn
|
|
|
|
return fo
|
|
|
|
except IOError:
|
|
|
|
pass
|
2008-07-22 14:24:36 +00:00
|
|
|
|
|
|
|
if subdir:
|
|
|
|
name = os.path.join(rtp, subdir, name)
|
|
|
|
else:
|
|
|
|
name = os.path.join(rtp, name)
|
|
|
|
|
|
|
|
name = os.path.normpath(name)
|
|
|
|
|
|
|
|
# Check for a zipfile in the path
|
|
|
|
head = name
|
|
|
|
zipname = False
|
|
|
|
name2 = False
|
|
|
|
while True:
|
|
|
|
head, tail = os.path.split(head)
|
|
|
|
if not tail:
|
|
|
|
break
|
|
|
|
if zipname:
|
|
|
|
zipname = os.path.join(tail, zipname)
|
|
|
|
else:
|
|
|
|
zipname = tail
|
|
|
|
if zipfile.is_zipfile(head+'.zip'):
|
2009-01-09 11:35:25 +00:00
|
|
|
from cStringIO import StringIO
|
2008-07-22 14:24:36 +00:00
|
|
|
zfile = zipfile.ZipFile(head+'.zip')
|
|
|
|
try:
|
2009-01-09 11:35:25 +00:00
|
|
|
fo = StringIO()
|
|
|
|
fo.write(zfile.read(os.path.join(
|
2008-07-22 14:24:36 +00:00
|
|
|
os.path.basename(head), zipname).replace(
|
|
|
|
os.sep, '/')))
|
2009-01-09 14:28:49 +00:00
|
|
|
fo.seek(0)
|
2008-07-22 14:24:36 +00:00
|
|
|
if pathinfo:
|
|
|
|
return fo, name
|
|
|
|
return fo
|
2010-10-27 12:28:30 +00:00
|
|
|
except Exception:
|
2008-07-22 14:24:36 +00:00
|
|
|
name2 = os.path.normpath(os.path.join(head + '.zip', zipname))
|
|
|
|
pass
|
|
|
|
for i in (name2, name):
|
|
|
|
if i and os.path.isfile(i):
|
|
|
|
fo = file(i, mode)
|
|
|
|
if pathinfo:
|
|
|
|
return fo, i
|
|
|
|
return fo
|
2008-11-12 13:05:09 +00:00
|
|
|
if os.path.splitext(name)[1] == '.rml':
|
|
|
|
raise IOError, 'Report %s doesn\'t exist or deleted : ' %str(name)
|
2010-12-30 01:48:31 +00:00
|
|
|
raise IOError, 'File not found : %s' % name
|
2007-05-03 13:34:39 +00:00
|
|
|
|
2006-12-07 13:41:40 +00:00
|
|
|
|
2008-06-03 11:14:02 +00:00
|
|
|
#----------------------------------------------------------
|
|
|
|
# iterables
|
|
|
|
#----------------------------------------------------------
|
|
|
|
def flatten(list):
|
|
|
|
"""Flatten a list of elements into a uniqu list
|
|
|
|
Author: Christophe Simonis (christophe@tinyerp.com)
|
2008-09-16 13:32:15 +00:00
|
|
|
|
2008-06-03 11:14:02 +00:00
|
|
|
Examples:
|
|
|
|
>>> flatten(['a'])
|
|
|
|
['a']
|
|
|
|
>>> flatten('b')
|
|
|
|
['b']
|
|
|
|
>>> flatten( [] )
|
|
|
|
[]
|
|
|
|
>>> flatten( [[], [[]]] )
|
|
|
|
[]
|
|
|
|
>>> flatten( [[['a','b'], 'c'], 'd', ['e', [], 'f']] )
|
|
|
|
['a', 'b', 'c', 'd', 'e', 'f']
|
|
|
|
>>> t = (1,2,(3,), [4, 5, [6, [7], (8, 9), ([10, 11, (12, 13)]), [14, [], (15,)], []]])
|
|
|
|
>>> flatten(t)
|
|
|
|
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]
|
|
|
|
"""
|
2008-09-16 13:32:15 +00:00
|
|
|
|
2008-06-03 11:14:02 +00:00
|
|
|
def isiterable(x):
|
|
|
|
return hasattr(x, "__iter__")
|
|
|
|
|
|
|
|
r = []
|
|
|
|
for e in list:
|
|
|
|
if isiterable(e):
|
|
|
|
map(r.append, flatten(e))
|
|
|
|
else:
|
|
|
|
r.append(e)
|
|
|
|
return r
|
|
|
|
|
2008-08-18 07:56:02 +00:00
|
|
|
def reverse_enumerate(l):
|
|
|
|
"""Like enumerate but in the other sens
|
|
|
|
>>> a = ['a', 'b', 'c']
|
|
|
|
>>> it = reverse_enumerate(a)
|
|
|
|
>>> it.next()
|
|
|
|
(2, 'c')
|
|
|
|
>>> it.next()
|
|
|
|
(1, 'b')
|
|
|
|
>>> it.next()
|
|
|
|
(0, 'a')
|
|
|
|
>>> it.next()
|
|
|
|
Traceback (most recent call last):
|
|
|
|
File "<stdin>", line 1, in <module>
|
|
|
|
StopIteration
|
|
|
|
"""
|
|
|
|
return izip(xrange(len(l)-1, -1, -1), reversed(l))
|
2008-06-03 11:14:02 +00:00
|
|
|
|
2006-12-07 13:41:40 +00:00
|
|
|
#----------------------------------------------------------
|
|
|
|
# Emails
|
|
|
|
#----------------------------------------------------------
|
2010-01-05 08:13:29 +00:00
|
|
|
email_re = re.compile(r"""
|
|
|
|
([a-zA-Z][\w\.-]*[a-zA-Z0-9] # username part
|
|
|
|
@ # mandatory @ sign
|
|
|
|
[a-zA-Z0-9][\w\.-]* # domain must start with a letter ... Ged> why do we include a 0-9 then?
|
|
|
|
\.
|
|
|
|
[a-z]{2,3} # TLD
|
|
|
|
)
|
|
|
|
""", re.VERBOSE)
|
|
|
|
res_re = re.compile(r"\[([0-9]+)\]", re.UNICODE)
|
|
|
|
command_re = re.compile("^Set-([a-z]+) *: *(.+)$", re.I + re.UNICODE)
|
|
|
|
reference_re = re.compile("<.*-openobject-(\\d+)@(.*)>", re.UNICODE)
|
|
|
|
|
|
|
|
priorities = {
|
|
|
|
'1': '1 (Highest)',
|
|
|
|
'2': '2 (High)',
|
|
|
|
'3': '3 (Normal)',
|
|
|
|
'4': '4 (Low)',
|
|
|
|
'5': '5 (Lowest)',
|
|
|
|
}
|
|
|
|
|
|
|
|
def html2plaintext(html, body_id=None, encoding='utf-8'):
|
|
|
|
## (c) Fry-IT, www.fry-it.com, 2007
|
|
|
|
## <peter@fry-it.com>
|
|
|
|
## download here: http://www.peterbe.com/plog/html2plaintext
|
2010-02-26 07:00:09 +00:00
|
|
|
|
|
|
|
|
2010-01-05 08:13:29 +00:00
|
|
|
""" from an HTML text, convert the HTML to plain text.
|
2010-02-26 07:00:09 +00:00
|
|
|
If @body_id is provided then this is the tag where the
|
2010-01-05 08:13:29 +00:00
|
|
|
body (not necessarily <body>) starts.
|
|
|
|
"""
|
2010-06-25 18:05:41 +00:00
|
|
|
|
|
|
|
html = ustr(html)
|
|
|
|
|
2010-10-27 12:28:30 +00:00
|
|
|
from lxml.etree import tostring
|
2010-01-05 08:13:29 +00:00
|
|
|
try:
|
2010-06-28 18:12:44 +00:00
|
|
|
from lxml.html.soupparser import fromstring
|
|
|
|
kwargs = {}
|
|
|
|
except ImportError:
|
|
|
|
_logger.debug('tools.misc.html2plaintext: cannot use BeautifulSoup, fallback to lxml.etree.HTMLParser')
|
|
|
|
from lxml.etree import fromstring, HTMLParser
|
|
|
|
kwargs = dict(parser=HTMLParser())
|
|
|
|
|
|
|
|
tree = fromstring(html, **kwargs)
|
2010-02-26 07:00:09 +00:00
|
|
|
|
2010-01-05 08:13:29 +00:00
|
|
|
if body_id is not None:
|
2010-06-28 18:12:44 +00:00
|
|
|
source = tree.xpath('//*[@id=%s]'%(body_id,))
|
2010-01-05 08:13:29 +00:00
|
|
|
else:
|
2010-06-28 18:12:44 +00:00
|
|
|
source = tree.xpath('//body')
|
|
|
|
if len(source):
|
|
|
|
tree = source[0]
|
2010-02-26 07:00:09 +00:00
|
|
|
|
2010-01-05 08:13:29 +00:00
|
|
|
url_index = []
|
|
|
|
i = 0
|
2010-06-28 18:12:44 +00:00
|
|
|
for link in tree.findall('.//a'):
|
|
|
|
url = link.get('href')
|
|
|
|
if url:
|
2010-01-05 08:13:29 +00:00
|
|
|
i += 1
|
2010-06-28 18:12:44 +00:00
|
|
|
link.tag = 'span'
|
|
|
|
link.text = '%s [%s]' % (link.text, i)
|
|
|
|
url_index.append(url)
|
|
|
|
|
|
|
|
html = ustr(tostring(tree, encoding=encoding))
|
2010-01-05 08:13:29 +00:00
|
|
|
|
|
|
|
html = html.replace('<strong>','*').replace('</strong>','*')
|
|
|
|
html = html.replace('<b>','*').replace('</b>','*')
|
|
|
|
html = html.replace('<h3>','*').replace('</h3>','*')
|
|
|
|
html = html.replace('<h2>','**').replace('</h2>','**')
|
|
|
|
html = html.replace('<h1>','**').replace('</h1>','**')
|
|
|
|
html = html.replace('<em>','/').replace('</em>','/')
|
|
|
|
html = html.replace('<tr>', '\n')
|
2010-06-28 18:12:44 +00:00
|
|
|
html = html.replace('</p>', '\n')
|
|
|
|
html = re.sub('<br\s*/?>', '\n', html)
|
|
|
|
html = re.sub('<.*?>', ' ', html)
|
2010-01-05 08:13:29 +00:00
|
|
|
html = html.replace(' ' * 2, ' ')
|
|
|
|
|
2010-06-28 18:12:44 +00:00
|
|
|
# strip all lines
|
|
|
|
html = '\n'.join([x.strip() for x in html.splitlines()])
|
|
|
|
html = html.replace('\n' * 2, '\n')
|
2010-01-05 08:13:29 +00:00
|
|
|
|
|
|
|
for i, url in enumerate(url_index):
|
|
|
|
if i == 0:
|
|
|
|
html += '\n\n'
|
2010-06-25 18:05:41 +00:00
|
|
|
html += ustr('[%s] %s\n') % (i+1, url)
|
2010-06-25 18:30:24 +00:00
|
|
|
|
2010-01-05 08:13:29 +00:00
|
|
|
return html
|
|
|
|
|
2010-08-13 10:54:04 +00:00
|
|
|
def generate_tracking_message_id(openobject_id):
|
|
|
|
"""Returns a string that can be used in the Message-ID RFC822 header field so we
|
|
|
|
can track the replies related to a given object thanks to the "In-Reply-To" or
|
|
|
|
"References" fields that Mail User Agents will set.
|
|
|
|
"""
|
|
|
|
return "<%s-openobject-%s@%s>" % (time.time(), openobject_id, socket.gethostname())
|
|
|
|
|
2011-02-14 09:10:53 +00:00
|
|
|
def connect_smtp_server(server_host, server_port, user_name=None, user_password=None, ssl=False, tls=False, debug=False):
|
|
|
|
"""
|
|
|
|
Connect SMTP Server and returned the (SMTP) object
|
|
|
|
"""
|
|
|
|
smtp_server = None
|
|
|
|
try:
|
|
|
|
if ssl:
|
|
|
|
# In Python 2.6
|
|
|
|
smtp_server = smtplib.SMTP_SSL(server_host, server_port)
|
|
|
|
else:
|
|
|
|
smtp_server = smtplib.SMTP(server_host, server_port)
|
|
|
|
|
|
|
|
smtp_server.set_debuglevel(int(bool(debug))) # 0 or 1
|
|
|
|
|
|
|
|
|
|
|
|
if tls:
|
|
|
|
smtp_server.ehlo()
|
|
|
|
smtp_server.starttls()
|
|
|
|
smtp_server.ehlo()
|
|
|
|
|
|
|
|
#smtp_server.connect(server_host, server_port)
|
|
|
|
|
|
|
|
if smtp_server.has_extn('AUTH') or user_name or user_password:
|
|
|
|
smtp_server.login(user_name, user_password)
|
|
|
|
|
|
|
|
|
|
|
|
except Exception, error:
|
|
|
|
_logger.error('Could not connect to smtp server : %s' %(error), exc_info=True)
|
|
|
|
raise error
|
|
|
|
return smtp_server
|
|
|
|
|
|
|
|
|
2011-02-03 07:19:44 +00:00
|
|
|
def _email_send(smtp_from, smtp_to_list, message, openobject_id=None, ssl=False, debug=False,
|
|
|
|
smtp_server=None, smtp_port=None, smtp_user=None, smtp_password=None):
|
2010-06-28 09:38:29 +00:00
|
|
|
"""Low-level method to send directly a Message through the configured smtp server.
|
2010-06-28 14:46:27 +00:00
|
|
|
:param smtp_from: RFC-822 envelope FROM (not displayed to recipient)
|
|
|
|
:param smtp_to_list: RFC-822 envelope RCPT_TOs (not displayed to recipient)
|
2010-06-28 09:38:29 +00:00
|
|
|
:param message: an email.message.Message to send
|
|
|
|
:param debug: True if messages should be output to stderr before being sent,
|
|
|
|
and smtplib.SMTP put into debug mode.
|
|
|
|
:return: True if the mail was delivered successfully to the smtp,
|
|
|
|
else False (+ exception logged)
|
|
|
|
"""
|
2010-09-23 13:58:20 +00:00
|
|
|
class WriteToLogger(object):
|
|
|
|
def __init__(self):
|
2011-02-07 12:57:23 +00:00
|
|
|
self.logger = loglevels.Logger()
|
2010-09-23 13:58:20 +00:00
|
|
|
|
|
|
|
def write(self, s):
|
2011-02-07 12:57:23 +00:00
|
|
|
self.logger.notifyChannel('email_send', loglevels.LOG_DEBUG, s)
|
2010-09-23 13:58:20 +00:00
|
|
|
|
2010-06-28 09:38:29 +00:00
|
|
|
if openobject_id:
|
2010-08-13 10:54:04 +00:00
|
|
|
message['Message-Id'] = generate_tracking_message_id(openobject_id)
|
2010-06-28 09:38:29 +00:00
|
|
|
|
|
|
|
try:
|
2011-02-03 07:19:44 +00:00
|
|
|
smtp_server = smtp_server or config['smtp_server']
|
2010-06-28 14:46:27 +00:00
|
|
|
|
|
|
|
if smtp_server.startswith('maildir:/'):
|
|
|
|
from mailbox import Maildir
|
|
|
|
maildir_path = smtp_server[8:]
|
|
|
|
mdir = Maildir(maildir_path,factory=None, create = True)
|
2010-11-09 16:50:14 +00:00
|
|
|
mdir.add(message.as_string(True))
|
2010-06-28 14:46:27 +00:00
|
|
|
return True
|
|
|
|
|
2011-02-14 09:10:53 +00:00
|
|
|
if debug:
|
|
|
|
oldstderr = smtplib.stderr
|
|
|
|
smtplib.stderr = WriteToLogger()
|
|
|
|
|
2010-06-28 14:46:27 +00:00
|
|
|
if not ssl: ssl = config.get('smtp_ssl', False)
|
2011-02-14 09:10:53 +00:00
|
|
|
smtp = connect_smtp_server(smtp_server, smtp_port, smtp_user, smtp_password, ssl=ssl, tls=True, debug=debug)
|
2010-06-28 09:38:29 +00:00
|
|
|
try:
|
2011-02-14 09:10:53 +00:00
|
|
|
smtp.sendmail(smtp_from, smtp_to_list, message.as_string())
|
|
|
|
except Exception:
|
|
|
|
_logger.error('could not deliver Email(s)', exc_info=True)
|
|
|
|
return False
|
2010-06-28 09:38:29 +00:00
|
|
|
finally:
|
2010-06-28 14:46:27 +00:00
|
|
|
try:
|
2011-02-14 09:10:53 +00:00
|
|
|
smtp.quit()
|
2010-10-27 12:28:30 +00:00
|
|
|
except Exception:
|
2010-06-28 14:46:27 +00:00
|
|
|
# ignored, just a consequence of the previous exception
|
|
|
|
pass
|
2010-06-28 09:38:29 +00:00
|
|
|
|
2011-02-14 09:10:53 +00:00
|
|
|
if debug:
|
|
|
|
smtplib.stderr = oldstderr
|
2010-10-27 12:28:30 +00:00
|
|
|
except Exception:
|
2011-02-14 09:10:53 +00:00
|
|
|
_logger.error('Error on Send Emails Services', exc_info=True)
|
2010-06-28 09:38:29 +00:00
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2009-03-02 14:01:35 +00:00
|
|
|
def email_send(email_from, email_to, subject, body, email_cc=None, email_bcc=None, reply_to=False,
|
2011-02-03 07:19:44 +00:00
|
|
|
attach=None, openobject_id=False, debug=False, subtype='plain', x_headers=None, priority='3',
|
2011-02-15 06:19:22 +00:00
|
|
|
smtp_server=None, smtp_port=None, ssl=False, smtp_user=None, smtp_password=None):
|
2009-05-28 16:27:35 +00:00
|
|
|
|
2010-01-27 12:14:12 +00:00
|
|
|
"""Send an email.
|
|
|
|
|
|
|
|
Arguments:
|
|
|
|
|
|
|
|
`email_from`: A string used to fill the `From` header, if falsy,
|
|
|
|
config['email_from'] is used instead. Also used for
|
|
|
|
the `Reply-To` header if `reply_to` is not provided
|
|
|
|
|
|
|
|
`email_to`: a sequence of addresses to send the mail to.
|
|
|
|
"""
|
2009-03-10 10:06:00 +00:00
|
|
|
if x_headers is None:
|
|
|
|
x_headers = {}
|
|
|
|
|
2008-09-16 13:32:15 +00:00
|
|
|
|
2011-02-15 06:19:22 +00:00
|
|
|
if not (email_from or config['email_from']):
|
2010-01-26 14:30:03 +00:00
|
|
|
raise ValueError("Sending an email requires either providing a sender "
|
|
|
|
"address or having configured one")
|
2009-02-28 13:53:40 +00:00
|
|
|
|
2011-02-15 06:19:22 +00:00
|
|
|
if not email_from: email_from = config.get('email_from', False)
|
2010-10-27 12:28:30 +00:00
|
|
|
email_from = ustr(email_from).encode('utf-8')
|
2010-01-05 08:13:29 +00:00
|
|
|
|
2010-01-26 14:41:54 +00:00
|
|
|
if not email_cc: email_cc = []
|
|
|
|
if not email_bcc: email_bcc = []
|
|
|
|
if not body: body = u''
|
2008-11-24 11:13:20 +00:00
|
|
|
|
2010-10-27 12:28:30 +00:00
|
|
|
email_body = ustr(body).encode('utf-8')
|
|
|
|
email_text = MIMEText(email_body or '',_subtype=subtype,_charset='utf-8')
|
2010-12-22 12:35:09 +00:00
|
|
|
msg = MIMEMultipart()
|
2008-11-24 11:13:20 +00:00
|
|
|
|
2009-03-05 12:14:16 +00:00
|
|
|
msg['Subject'] = Header(ustr(subject), 'utf-8')
|
2008-07-22 14:24:36 +00:00
|
|
|
msg['From'] = email_from
|
|
|
|
del msg['Reply-To']
|
|
|
|
if reply_to:
|
2009-02-28 13:53:40 +00:00
|
|
|
msg['Reply-To'] = reply_to
|
|
|
|
else:
|
|
|
|
msg['Reply-To'] = msg['From']
|
2008-07-22 14:24:36 +00:00
|
|
|
msg['To'] = COMMASPACE.join(email_to)
|
|
|
|
if email_cc:
|
|
|
|
msg['Cc'] = COMMASPACE.join(email_cc)
|
|
|
|
if email_bcc:
|
|
|
|
msg['Bcc'] = COMMASPACE.join(email_bcc)
|
|
|
|
msg['Date'] = formatdate(localtime=True)
|
2008-11-24 11:13:20 +00:00
|
|
|
|
2010-01-26 14:26:18 +00:00
|
|
|
msg['X-Priority'] = priorities.get(priority, '3 (Normal)')
|
2009-02-28 13:53:40 +00:00
|
|
|
|
|
|
|
# Add dynamic X Header
|
2010-01-26 14:30:03 +00:00
|
|
|
for key, value in x_headers.iteritems():
|
2010-04-17 20:59:18 +00:00
|
|
|
msg['%s' % key] = str(value)
|
2010-05-05 13:31:47 +00:00
|
|
|
|
2010-11-04 10:04:14 +00:00
|
|
|
if html2text and subtype == 'html':
|
2010-12-22 12:35:09 +00:00
|
|
|
text = html2text(email_body.decode('utf-8')).encode('utf-8')
|
|
|
|
alternative_part = MIMEMultipart(_subtype="alternative")
|
|
|
|
alternative_part.attach(MIMEText(text, _charset='utf-8', _subtype='plain'))
|
|
|
|
alternative_part.attach(email_text)
|
|
|
|
msg.attach(alternative_part)
|
|
|
|
else:
|
2010-01-26 14:41:54 +00:00
|
|
|
msg.attach(email_text)
|
2010-12-22 12:35:09 +00:00
|
|
|
|
|
|
|
if attach:
|
2008-11-24 11:13:20 +00:00
|
|
|
for (fname,fcontent) in attach:
|
|
|
|
part = MIMEBase('application', "octet-stream")
|
|
|
|
part.set_payload( fcontent )
|
|
|
|
Encoders.encode_base64(part)
|
|
|
|
part.add_header('Content-Disposition', 'attachment; filename="%s"' % (fname,))
|
|
|
|
msg.attach(part)
|
2009-11-20 11:44:41 +00:00
|
|
|
|
2011-02-03 07:19:44 +00:00
|
|
|
return _email_send(email_from, flatten([email_to, email_cc, email_bcc]), msg, openobject_id=openobject_id, ssl=ssl, debug=debug,
|
|
|
|
smtp_server=smtp_server, smtp_port=smtp_port, smtp_user=smtp_user, smtp_password=smtp_password)
|
2007-04-13 05:03:14 +00:00
|
|
|
|
2006-12-07 13:41:40 +00:00
|
|
|
#----------------------------------------------------------
|
|
|
|
# SMS
|
|
|
|
#----------------------------------------------------------
|
|
|
|
# text must be latin-1 encoded
|
|
|
|
def sms_send(user, password, api_id, text, to):
|
2008-07-22 14:24:36 +00:00
|
|
|
import urllib
|
2009-02-19 05:55:32 +00:00
|
|
|
url = "http://api.urlsms.com/SendSMS.aspx"
|
|
|
|
#url = "http://196.7.150.220/http/sendmsg"
|
|
|
|
params = urllib.urlencode({'UserID': user, 'Password': password, 'SenderID': api_id, 'MsgText': text, 'RecipientMobileNo':to})
|
2010-10-27 12:28:30 +00:00
|
|
|
urllib.urlopen(url+"?"+params)
|
2008-12-10 21:36:04 +00:00
|
|
|
# FIXME: Use the logger if there is an error
|
2008-07-22 14:24:36 +00:00
|
|
|
return True
|
2006-12-07 13:41:40 +00:00
|
|
|
|
|
|
|
#---------------------------------------------------------
|
|
|
|
# Class that stores an updateable string (used in wizards)
|
|
|
|
#---------------------------------------------------------
|
|
|
|
class UpdateableStr(local):
|
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def __init__(self, string=''):
|
|
|
|
self.string = string
|
2008-09-16 13:32:15 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def __str__(self):
|
|
|
|
return str(self.string)
|
2006-12-07 13:41:40 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def __repr__(self):
|
|
|
|
return str(self.string)
|
2006-12-07 13:41:40 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def __nonzero__(self):
|
|
|
|
return bool(self.string)
|
2006-12-07 13:41:40 +00:00
|
|
|
|
2007-10-11 13:56:36 +00:00
|
|
|
|
|
|
|
class UpdateableDict(local):
|
2008-07-22 14:24:36 +00:00
|
|
|
'''Stores an updateable dict to use in wizards'''
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def __init__(self, dict=None):
|
|
|
|
if dict is None:
|
|
|
|
dict = {}
|
|
|
|
self.dict = dict
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def __str__(self):
|
|
|
|
return str(self.dict)
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def __repr__(self):
|
|
|
|
return str(self.dict)
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def clear(self):
|
|
|
|
return self.dict.clear()
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def keys(self):
|
|
|
|
return self.dict.keys()
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def __setitem__(self, i, y):
|
|
|
|
self.dict.__setitem__(i, y)
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def __getitem__(self, i):
|
|
|
|
return self.dict.__getitem__(i)
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def copy(self):
|
|
|
|
return self.dict.copy()
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def iteritems(self):
|
|
|
|
return self.dict.iteritems()
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def iterkeys(self):
|
|
|
|
return self.dict.iterkeys()
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def itervalues(self):
|
|
|
|
return self.dict.itervalues()
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def pop(self, k, d=None):
|
|
|
|
return self.dict.pop(k, d)
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def popitem(self):
|
|
|
|
return self.dict.popitem()
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def setdefault(self, k, d=None):
|
|
|
|
return self.dict.setdefault(k, d)
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def update(self, E, **F):
|
|
|
|
return self.dict.update(E, F)
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def values(self):
|
|
|
|
return self.dict.values()
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def get(self, k, d=None):
|
|
|
|
return self.dict.get(k, d)
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def has_key(self, k):
|
|
|
|
return self.dict.has_key(k)
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def items(self):
|
|
|
|
return self.dict.items()
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def __cmp__(self, y):
|
|
|
|
return self.dict.__cmp__(y)
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def __contains__(self, k):
|
|
|
|
return self.dict.__contains__(k)
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def __delitem__(self, y):
|
|
|
|
return self.dict.__delitem__(y)
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def __eq__(self, y):
|
|
|
|
return self.dict.__eq__(y)
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def __ge__(self, y):
|
|
|
|
return self.dict.__ge__(y)
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def __gt__(self, y):
|
|
|
|
return self.dict.__gt__(y)
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def __hash__(self):
|
|
|
|
return self.dict.__hash__()
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def __iter__(self):
|
|
|
|
return self.dict.__iter__()
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def __le__(self, y):
|
|
|
|
return self.dict.__le__(y)
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def __len__(self):
|
|
|
|
return self.dict.__len__()
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def __lt__(self, y):
|
|
|
|
return self.dict.__lt__(y)
|
2007-10-11 13:56:36 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def __ne__(self, y):
|
|
|
|
return self.dict.__ne__(y)
|
2007-10-11 13:56:36 +00:00
|
|
|
|
|
|
|
|
2006-12-28 09:44:56 +00:00
|
|
|
# Don't use ! Use res.currency.round()
|
2007-10-11 13:56:36 +00:00
|
|
|
class currency(float):
|
2006-12-07 13:41:40 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def __init__(self, value, accuracy=2, rounding=None):
|
|
|
|
if rounding is None:
|
|
|
|
rounding=10**-accuracy
|
|
|
|
self.rounding=rounding
|
|
|
|
self.accuracy=accuracy
|
2006-12-07 13:41:40 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
def __new__(cls, value, accuracy=2, rounding=None):
|
|
|
|
return float.__new__(cls, round(value, accuracy))
|
2006-12-07 13:41:40 +00:00
|
|
|
|
2008-07-22 14:24:36 +00:00
|
|
|
#def __str__(self):
|
|
|
|
# display_value = int(self*(10**(-self.accuracy))/self.rounding)*self.rounding/(10**(-self.accuracy))
|
|
|
|
# return str(display_value)
|
2006-12-07 13:41:40 +00:00
|
|
|
|
|
|
|
|
2008-10-16 18:28:16 +00:00
|
|
|
def is_hashable(h):
|
|
|
|
try:
|
|
|
|
hash(h)
|
|
|
|
return True
|
|
|
|
except TypeError:
|
|
|
|
return False
|
|
|
|
|
2006-12-07 13:41:40 +00:00
|
|
|
class cache(object):
|
2008-12-15 11:58:01 +00:00
|
|
|
"""
|
|
|
|
Use it as a decorator of the function you plan to cache
|
|
|
|
Timeout: 0 = no timeout, otherwise in seconds
|
|
|
|
"""
|
2009-11-20 11:44:41 +00:00
|
|
|
|
2008-12-24 00:24:18 +00:00
|
|
|
__caches = []
|
2009-11-20 11:44:41 +00:00
|
|
|
|
2010-11-09 16:57:39 +00:00
|
|
|
def __init__(self, timeout=None, skiparg=2, multi=None, size=8192):
|
2008-12-15 11:58:01 +00:00
|
|
|
assert skiparg >= 2 # at least self and cr
|
2008-12-24 00:24:18 +00:00
|
|
|
if timeout is None:
|
|
|
|
self.timeout = config['cache_timeout']
|
|
|
|
else:
|
|
|
|
self.timeout = timeout
|
2008-12-13 06:01:18 +00:00
|
|
|
self.skiparg = skiparg
|
|
|
|
self.multi = multi
|
2008-12-18 19:41:14 +00:00
|
|
|
self.lasttime = time.time()
|
2010-11-09 16:57:39 +00:00
|
|
|
self.cache = LRU(size) # TODO take size from config
|
2009-11-20 11:44:41 +00:00
|
|
|
self.fun = None
|
2008-12-24 00:24:18 +00:00
|
|
|
cache.__caches.append(self)
|
|
|
|
|
2009-11-20 11:44:41 +00:00
|
|
|
|
2009-01-05 21:17:46 +00:00
|
|
|
def _generate_keys(self, dbname, kwargs2):
|
|
|
|
"""
|
|
|
|
Generate keys depending of the arguments and the self.mutli value
|
|
|
|
"""
|
2009-11-20 11:44:41 +00:00
|
|
|
|
2009-01-05 21:17:46 +00:00
|
|
|
def to_tuple(d):
|
2009-08-14 15:05:06 +00:00
|
|
|
pairs = d.items()
|
|
|
|
pairs.sort(key=lambda (k,v): k)
|
|
|
|
for i, (k, v) in enumerate(pairs):
|
|
|
|
if isinstance(v, dict):
|
|
|
|
pairs[i] = (k, to_tuple(v))
|
|
|
|
if isinstance(v, (list, set)):
|
|
|
|
pairs[i] = (k, tuple(v))
|
|
|
|
elif not is_hashable(v):
|
|
|
|
pairs[i] = (k, repr(v))
|
|
|
|
return tuple(pairs)
|
2009-01-05 21:17:46 +00:00
|
|
|
|
|
|
|
if not self.multi:
|
|
|
|
key = (('dbname', dbname),) + to_tuple(kwargs2)
|
|
|
|
yield key, None
|
|
|
|
else:
|
2009-11-20 11:44:41 +00:00
|
|
|
multis = kwargs2[self.multi][:]
|
2009-01-05 21:17:46 +00:00
|
|
|
for id in multis:
|
2009-01-06 16:28:38 +00:00
|
|
|
kwargs2[self.multi] = (id,)
|
2009-01-05 21:17:46 +00:00
|
|
|
key = (('dbname', dbname),) + to_tuple(kwargs2)
|
|
|
|
yield key, id
|
2009-11-20 11:44:41 +00:00
|
|
|
|
2009-01-05 21:17:46 +00:00
|
|
|
def _unify_args(self, *args, **kwargs):
|
|
|
|
# Update named arguments with positional argument values (without self and cr)
|
|
|
|
kwargs2 = self.fun_default_values.copy()
|
|
|
|
kwargs2.update(kwargs)
|
|
|
|
kwargs2.update(dict(zip(self.fun_arg_names, args[self.skiparg-2:])))
|
|
|
|
return kwargs2
|
2009-11-20 11:44:41 +00:00
|
|
|
|
2009-01-05 21:17:46 +00:00
|
|
|
def clear(self, dbname, *args, **kwargs):
|
|
|
|
"""clear the cache for database dbname
|
|
|
|
if *args and **kwargs are both empty, clear all the keys related to this database
|
|
|
|
"""
|
|
|
|
if not args and not kwargs:
|
2010-01-20 16:33:27 +00:00
|
|
|
keys_to_del = [key for key in self.cache.keys() if key[0][1] == dbname]
|
2009-01-05 21:17:46 +00:00
|
|
|
else:
|
|
|
|
kwargs2 = self._unify_args(*args, **kwargs)
|
2010-01-20 16:33:27 +00:00
|
|
|
keys_to_del = [key for key, _ in self._generate_keys(dbname, kwargs2) if key in self.cache.keys()]
|
2010-02-26 07:00:09 +00:00
|
|
|
|
2009-01-05 21:17:46 +00:00
|
|
|
for key in keys_to_del:
|
2010-01-20 16:33:27 +00:00
|
|
|
self.cache.pop(key)
|
2010-02-26 07:00:09 +00:00
|
|
|
|
2008-12-31 16:06:10 +00:00
|
|
|
@classmethod
|
2009-01-05 21:17:46 +00:00
|
|
|
def clean_caches_for_db(cls, dbname):
|
|
|
|
for c in cls.__caches:
|
|
|
|
c.clear(dbname)
|
2008-07-22 14:24:36 +00:00
|
|
|
|
|
|
|
def __call__(self, fn):
|
2009-01-05 21:17:46 +00:00
|
|
|
if self.fun is not None:
|
|
|
|
raise Exception("Can not use a cache instance on more than one function")
|
|
|
|
self.fun = fn
|
|
|
|
|
|
|
|
argspec = inspect.getargspec(fn)
|
|
|
|
self.fun_arg_names = argspec[0][self.skiparg:]
|
|
|
|
self.fun_default_values = {}
|
|
|
|
if argspec[3]:
|
|
|
|
self.fun_default_values = dict(zip(self.fun_arg_names[-len(argspec[3]):], argspec[3]))
|
2009-11-20 11:44:41 +00:00
|
|
|
|
2009-01-05 21:17:46 +00:00
|
|
|
def cached_result(self2, cr, *args, **kwargs):
|
2009-10-26 12:50:05 +00:00
|
|
|
if time.time()-int(self.timeout) > self.lasttime:
|
2008-12-18 19:41:14 +00:00
|
|
|
self.lasttime = time.time()
|
2009-11-20 11:44:41 +00:00
|
|
|
t = time.time()-int(self.timeout)
|
2010-01-20 16:33:27 +00:00
|
|
|
old_keys = [key for key in self.cache.keys() if self.cache[key][1] < t]
|
2009-07-18 11:21:10 +00:00
|
|
|
for key in old_keys:
|
2010-01-20 16:33:27 +00:00
|
|
|
self.cache.pop(key)
|
2008-12-18 19:41:14 +00:00
|
|
|
|
2009-01-05 21:17:46 +00:00
|
|
|
kwargs2 = self._unify_args(*args, **kwargs)
|
2008-07-22 14:24:36 +00:00
|
|
|
|
2009-01-05 21:17:46 +00:00
|
|
|
result = {}
|
|
|
|
notincache = {}
|
|
|
|
for key, id in self._generate_keys(cr.dbname, kwargs2):
|
|
|
|
if key in self.cache:
|
|
|
|
result[id] = self.cache[key][0]
|
|
|
|
else:
|
|
|
|
notincache[id] = key
|
2009-11-20 11:44:41 +00:00
|
|
|
|
2009-01-05 21:17:46 +00:00
|
|
|
if notincache:
|
|
|
|
if self.multi:
|
|
|
|
kwargs2[self.multi] = notincache.keys()
|
2009-11-20 11:44:41 +00:00
|
|
|
|
2009-05-19 08:40:30 +00:00
|
|
|
result2 = fn(self2, cr, *args[:self.skiparg-2], **kwargs2)
|
2009-01-05 21:17:46 +00:00
|
|
|
if not self.multi:
|
|
|
|
key = notincache[None]
|
|
|
|
self.cache[key] = (result2, time.time())
|
|
|
|
result[None] = result2
|
|
|
|
else:
|
|
|
|
for id in result2:
|
|
|
|
key = notincache[id]
|
|
|
|
self.cache[key] = (result2[id], time.time())
|
|
|
|
result.update(result2)
|
2009-11-20 11:44:41 +00:00
|
|
|
|
2009-01-05 21:17:46 +00:00
|
|
|
if not self.multi:
|
|
|
|
return result[None]
|
2008-07-22 14:24:36 +00:00
|
|
|
return result
|
2009-01-05 21:17:46 +00:00
|
|
|
|
|
|
|
cached_result.clear_cache = self.clear
|
2008-07-22 14:24:36 +00:00
|
|
|
return cached_result
|
2006-12-07 13:41:40 +00:00
|
|
|
|
2008-03-03 11:54:45 +00:00
|
|
|
def to_xml(s):
|
2008-07-22 14:24:36 +00:00
|
|
|
return s.replace('&','&').replace('<','<').replace('>','>')
|
2008-03-03 11:54:45 +00:00
|
|
|
|
2008-12-26 10:18:58 +00:00
|
|
|
# to be compatible with python 2.4
|
|
|
|
import __builtin__
|
|
|
|
if not hasattr(__builtin__, 'all'):
|
|
|
|
def all(iterable):
|
|
|
|
for element in iterable:
|
|
|
|
if not element:
|
2009-11-17 07:20:14 +00:00
|
|
|
return False
|
2008-12-26 10:18:58 +00:00
|
|
|
return True
|
2009-11-17 07:20:14 +00:00
|
|
|
|
2008-12-26 10:18:58 +00:00
|
|
|
__builtin__.all = all
|
|
|
|
del all
|
2009-11-17 07:20:14 +00:00
|
|
|
|
2008-12-26 10:18:58 +00:00
|
|
|
if not hasattr(__builtin__, 'any'):
|
|
|
|
def any(iterable):
|
|
|
|
for element in iterable:
|
|
|
|
if element:
|
2009-11-17 07:20:14 +00:00
|
|
|
return True
|
2008-12-26 10:18:58 +00:00
|
|
|
return False
|
2009-11-17 07:20:14 +00:00
|
|
|
|
2008-12-26 10:18:58 +00:00
|
|
|
__builtin__.any = any
|
|
|
|
del any
|
|
|
|
|
2009-11-20 11:44:41 +00:00
|
|
|
def get_iso_codes(lang):
|
2010-03-02 08:51:09 +00:00
|
|
|
if lang.find('_') != -1:
|
2009-11-20 11:44:41 +00:00
|
|
|
if lang.split('_')[0] == lang.split('_')[1].lower():
|
|
|
|
lang = lang.split('_')[0]
|
|
|
|
return lang
|
2008-12-26 10:18:58 +00:00
|
|
|
|
2006-12-07 13:41:40 +00:00
|
|
|
def get_languages():
|
2010-12-03 16:09:07 +00:00
|
|
|
# The codes below are those from Launchpad's Rosetta, with the exception
|
|
|
|
# of some trivial codes where the Launchpad code is xx and we have xx_XX.
|
2008-07-22 14:24:36 +00:00
|
|
|
languages={
|
2010-10-20 16:18:43 +00:00
|
|
|
'ab_RU': u'Abkhazian / аҧсуа',
|
2008-12-22 19:34:26 +00:00
|
|
|
'ar_AR': u'Arabic / الْعَرَبيّة',
|
2010-10-20 16:18:43 +00:00
|
|
|
'bg_BG': u'Bulgarian / български език',
|
2009-01-03 11:14:41 +00:00
|
|
|
'bs_BS': u'Bosnian / bosanski jezik',
|
2008-11-21 19:50:42 +00:00
|
|
|
'ca_ES': u'Catalan / Català',
|
|
|
|
'cs_CZ': u'Czech / Čeština',
|
2009-01-09 14:20:08 +00:00
|
|
|
'da_DK': u'Danish / Dansk',
|
2008-11-21 19:50:42 +00:00
|
|
|
'de_DE': u'German / Deutsch',
|
2009-01-29 09:22:10 +00:00
|
|
|
'el_GR': u'Greek / Ελληνικά',
|
2008-11-22 00:09:35 +00:00
|
|
|
'en_CA': u'English (CA)',
|
|
|
|
'en_GB': u'English (UK)',
|
|
|
|
'en_US': u'English (US)',
|
|
|
|
'es_AR': u'Spanish (AR) / Español (AR)',
|
2010-10-20 16:18:43 +00:00
|
|
|
'es_BO': u'Spanish (BO) / Español (BO)',
|
|
|
|
'es_CL': u'Spanish (CL) / Español (CL)',
|
|
|
|
'es_CO': u'Spanish (CO) / Español (CO)',
|
|
|
|
'es_CR': u'Spanish (CR) / Español (CR)',
|
|
|
|
'es_DO': u'Spanish (DO) / Español (DO)',
|
|
|
|
'es_EC': u'Spanish (EC) / Español (EC)',
|
2008-11-21 19:50:42 +00:00
|
|
|
'es_ES': u'Spanish / Español',
|
2010-10-20 16:18:43 +00:00
|
|
|
'es_GT': u'Spanish (GT) / Español (GT)',
|
|
|
|
'es_HN': u'Spanish (HN) / Español (HN)',
|
|
|
|
'es_MX': u'Spanish (MX) / Español (MX)',
|
|
|
|
'es_NI': u'Spanish (NI) / Español (NI)',
|
|
|
|
'es_PA': u'Spanish (PA) / Español (PA)',
|
|
|
|
'es_PE': u'Spanish (PE) / Español (PE)',
|
|
|
|
'es_PR': u'Spanish (PR) / Español (PR)',
|
|
|
|
'es_PY': u'Spanish (PY) / Español (PY)',
|
|
|
|
'es_SV': u'Spanish (SV) / Español (SV)',
|
|
|
|
'es_UY': u'Spanish (UY) / Español (UY)',
|
|
|
|
'es_VE': u'Spanish (VE) / Español (VE)',
|
2009-01-12 12:01:09 +00:00
|
|
|
'et_EE': u'Estonian / Eesti keel',
|
2010-04-14 11:50:06 +00:00
|
|
|
'fa_IR': u'Persian / فارس',
|
2010-10-20 16:18:43 +00:00
|
|
|
'fi_FI': u'Finnish / Suomi',
|
2008-11-22 00:09:35 +00:00
|
|
|
'fr_BE': u'French (BE) / Français (BE)',
|
|
|
|
'fr_CH': u'French (CH) / Français (CH)',
|
2008-11-21 19:50:42 +00:00
|
|
|
'fr_FR': u'French / Français',
|
2010-05-12 11:07:58 +00:00
|
|
|
'gl_ES': u'Galician / Galego',
|
2010-10-20 16:18:43 +00:00
|
|
|
'gu_IN': u'Gujarati / ગુજરાતી',
|
2010-12-09 09:19:04 +00:00
|
|
|
'he_IL': u'Hebrew / עִבְרִי',
|
2010-10-20 16:18:43 +00:00
|
|
|
'hi_IN': u'Hindi / हिंदी',
|
2008-11-21 19:50:42 +00:00
|
|
|
'hr_HR': u'Croatian / hrvatski jezik',
|
|
|
|
'hu_HU': u'Hungarian / Magyar',
|
2009-01-09 14:20:08 +00:00
|
|
|
'id_ID': u'Indonesian / Bahasa Indonesia',
|
2008-11-21 19:50:42 +00:00
|
|
|
'it_IT': u'Italian / Italiano',
|
2010-10-20 16:18:43 +00:00
|
|
|
'iu_CA': u'Inuktitut / ᐃᓄᒃᑎᑐᑦ',
|
|
|
|
'ja_JP': u'Japanese / 日本語',
|
|
|
|
'ko_KP': u'Korean (KP) / 한국어 (KP)',
|
|
|
|
'ko_KR': u'Korean (KR) / 한국어 (KR)',
|
2008-11-21 19:50:42 +00:00
|
|
|
'lt_LT': u'Lithuanian / Lietuvių kalba',
|
2010-10-20 16:18:43 +00:00
|
|
|
'lv_LV': u'Latvian / latviešu valoda',
|
|
|
|
'ml_IN': u'Malayalam / മലയാളം',
|
|
|
|
'mn_MN': u'Mongolian / монгол',
|
|
|
|
'nb_NO': u'Norwegian Bokmål / Norsk bokmål',
|
2008-11-21 19:50:42 +00:00
|
|
|
'nl_NL': u'Dutch / Nederlands',
|
2010-10-20 16:18:43 +00:00
|
|
|
'nl_BE': u'Flemish (BE) / Vlaams (BE)',
|
|
|
|
'oc_FR': u'Occitan (FR, post 1500) / Occitan',
|
2008-12-22 19:34:26 +00:00
|
|
|
'pl_PL': u'Polish / Język polski',
|
2010-10-20 16:18:43 +00:00
|
|
|
'pt_BR': u'Portugese (BR) / Português (BR)',
|
|
|
|
'pt_PT': u'Portugese / Português',
|
|
|
|
'ro_RO': u'Romanian / română',
|
2008-11-21 19:50:42 +00:00
|
|
|
'ru_RU': u'Russian / русский язык',
|
2010-10-20 16:18:43 +00:00
|
|
|
'si_LK': u'Sinhalese / සිංහල',
|
2010-05-12 11:07:58 +00:00
|
|
|
'sl_SI': u'Slovenian / slovenščina',
|
|
|
|
'sk_SK': u'Slovak / Slovenský jazyk',
|
2010-10-20 16:18:43 +00:00
|
|
|
'sq_AL': u'Albanian / Shqip',
|
2010-12-03 16:09:07 +00:00
|
|
|
'sr_RS': u'Serbian (Cyrillic) / српски',
|
|
|
|
'sr@latin': u'Serbian (Latin) / srpski',
|
2008-11-21 19:50:42 +00:00
|
|
|
'sv_SE': u'Swedish / svenska',
|
2010-10-20 16:18:43 +00:00
|
|
|
'te_IN': u'Telugu / తెలుగు',
|
2008-12-22 19:34:26 +00:00
|
|
|
'tr_TR': u'Turkish / Türkçe',
|
2010-10-20 16:18:43 +00:00
|
|
|
'vi_VN': u'Vietnamese / Tiếng Việt',
|
|
|
|
'uk_UA': u'Ukrainian / українська',
|
|
|
|
'ur_PK': u'Urdu / اردو',
|
2009-08-28 16:18:29 +00:00
|
|
|
'zh_CN': u'Chinese (CN) / 简体中文',
|
2010-02-26 07:00:09 +00:00
|
|
|
'zh_HK': u'Chinese (HK)',
|
2008-11-22 00:09:35 +00:00
|
|
|
'zh_TW': u'Chinese (TW) / 正體字',
|
2009-08-28 16:18:29 +00:00
|
|
|
'th_TH': u'Thai / ภาษาไทย',
|
2009-10-14 12:32:15 +00:00
|
|
|
'tlh_TLH': u'Klingon',
|
2008-07-22 14:24:36 +00:00
|
|
|
}
|
|
|
|
return languages
|
2006-12-07 13:41:40 +00:00
|
|
|
|
|
|
|
def scan_languages():
|
2009-11-25 06:25:20 +00:00
|
|
|
# Now it will take all languages from get languages function without filter it with base module languages
|
2008-07-22 14:24:36 +00:00
|
|
|
lang_dict = get_languages()
|
2009-11-25 06:25:20 +00:00
|
|
|
ret = [(lang, lang_dict.get(lang, lang)) for lang in list(lang_dict)]
|
2008-11-21 19:50:42 +00:00
|
|
|
ret.sort(key=lambda k:k[1])
|
|
|
|
return ret
|
2006-12-07 13:41:40 +00:00
|
|
|
|
2007-04-23 13:13:47 +00:00
|
|
|
|
|
|
|
def get_user_companies(cr, user):
|
2008-07-22 14:24:36 +00:00
|
|
|
def _get_company_children(cr, ids):
|
|
|
|
if not ids:
|
|
|
|
return []
|
2010-06-15 13:27:22 +00:00
|
|
|
cr.execute('SELECT id FROM res_company WHERE parent_id IN %s', (tuple(ids),))
|
|
|
|
res = [x[0] for x in cr.fetchall()]
|
2008-07-22 14:24:36 +00:00
|
|
|
res.extend(_get_company_children(cr, res))
|
|
|
|
return res
|
2010-06-15 13:27:22 +00:00
|
|
|
cr.execute('SELECT company_id FROM res_users WHERE id=%s', (user,))
|
|
|
|
user_comp = cr.fetchone()[0]
|
|
|
|
if not user_comp:
|
|
|
|
return []
|
|
|
|
return [user_comp] + _get_company_children(cr, [user_comp])
|
2007-04-23 13:13:47 +00:00
|
|
|
|
2007-10-04 05:59:39 +00:00
|
|
|
def mod10r(number):
|
2008-07-22 14:24:36 +00:00
|
|
|
"""
|
|
|
|
Input number : account or invoice number
|
|
|
|
Output return: the same number completed with the recursive mod10
|
|
|
|
key
|
|
|
|
"""
|
|
|
|
codec=[0,9,4,6,8,2,7,1,3,5]
|
|
|
|
report = 0
|
|
|
|
result=""
|
|
|
|
for digit in number:
|
|
|
|
result += digit
|
|
|
|
if digit.isdigit():
|
|
|
|
report = codec[ (int(digit) + report) % 10 ]
|
|
|
|
return result + str((10 - report) % 10)
|
2007-10-04 05:59:39 +00:00
|
|
|
|
2008-06-03 11:14:02 +00:00
|
|
|
|
2008-09-02 10:09:28 +00:00
|
|
|
def human_size(sz):
|
|
|
|
"""
|
|
|
|
Return the size in a human readable format
|
|
|
|
"""
|
|
|
|
if not sz:
|
|
|
|
return False
|
|
|
|
units = ('bytes', 'Kb', 'Mb', 'Gb')
|
2008-09-16 13:32:15 +00:00
|
|
|
if isinstance(sz,basestring):
|
|
|
|
sz=len(sz)
|
2008-09-02 10:09:28 +00:00
|
|
|
s, i = float(sz), 0
|
|
|
|
while s >= 1024 and i < len(units)-1:
|
|
|
|
s = s / 1024
|
|
|
|
i = i + 1
|
|
|
|
return "%0.2f %s" % (s, units[i])
|
2008-06-03 11:14:02 +00:00
|
|
|
|
2008-12-12 10:51:23 +00:00
|
|
|
def logged(f):
|
2011-02-07 12:57:23 +00:00
|
|
|
from func import wraps
|
2009-11-20 11:44:41 +00:00
|
|
|
|
2008-12-12 10:51:23 +00:00
|
|
|
@wraps(f)
|
|
|
|
def wrapper(*args, **kwargs):
|
|
|
|
from pprint import pformat
|
|
|
|
|
|
|
|
vector = ['Call -> function: %r' % f]
|
2008-09-09 16:15:17 +00:00
|
|
|
for i, arg in enumerate(args):
|
2008-12-12 10:51:23 +00:00
|
|
|
vector.append(' arg %02d: %s' % (i, pformat(arg)))
|
2008-09-09 16:15:17 +00:00
|
|
|
for key, value in kwargs.items():
|
2008-12-12 10:51:23 +00:00
|
|
|
vector.append(' kwarg %10s: %s' % (key, pformat(value)))
|
|
|
|
|
|
|
|
timeb4 = time.time()
|
|
|
|
res = f(*args, **kwargs)
|
2009-11-20 11:44:41 +00:00
|
|
|
|
2008-12-12 10:51:23 +00:00
|
|
|
vector.append(' result: %s' % pformat(res))
|
|
|
|
vector.append(' time delta: %s' % (time.time() - timeb4))
|
2011-02-07 12:57:23 +00:00
|
|
|
loglevels.Logger().notifyChannel('logged', loglevels.LOG_DEBUG, '\n'.join(vector))
|
2008-12-12 10:51:23 +00:00
|
|
|
return res
|
|
|
|
|
|
|
|
return wrapper
|
2008-09-09 16:15:17 +00:00
|
|
|
|
2009-01-26 18:52:11 +00:00
|
|
|
class profile(object):
|
|
|
|
def __init__(self, fname=None):
|
|
|
|
self.fname = fname
|
|
|
|
|
|
|
|
def __call__(self, f):
|
2011-02-07 12:57:23 +00:00
|
|
|
from func import wraps
|
2009-01-26 18:52:11 +00:00
|
|
|
|
|
|
|
@wraps(f)
|
|
|
|
def wrapper(*args, **kwargs):
|
|
|
|
class profile_wrapper(object):
|
|
|
|
def __init__(self):
|
|
|
|
self.result = None
|
|
|
|
def __call__(self):
|
|
|
|
self.result = f(*args, **kwargs)
|
|
|
|
pw = profile_wrapper()
|
|
|
|
import cProfile
|
|
|
|
fname = self.fname or ("%s.cprof" % (f.func_name,))
|
|
|
|
cProfile.runctx('pw()', globals(), locals(), filename=fname)
|
|
|
|
return pw.result
|
|
|
|
|
|
|
|
return wrapper
|
|
|
|
|
2008-12-18 10:20:03 +00:00
|
|
|
def debug(what):
|
|
|
|
"""
|
|
|
|
This method allow you to debug your code without print
|
|
|
|
Example:
|
|
|
|
>>> def func_foo(bar)
|
|
|
|
... baz = bar
|
|
|
|
... debug(baz)
|
|
|
|
... qnx = (baz, bar)
|
|
|
|
... debug(qnx)
|
|
|
|
...
|
|
|
|
>>> func_foo(42)
|
|
|
|
|
|
|
|
This will output on the logger:
|
2009-11-20 11:44:41 +00:00
|
|
|
|
2008-12-18 10:20:03 +00:00
|
|
|
[Wed Dec 25 00:00:00 2008] DEBUG:func_foo:baz = 42
|
|
|
|
[Wed Dec 25 00:00:00 2008] DEBUG:func_foo:qnx = (42, 42)
|
|
|
|
|
|
|
|
To view the DEBUG lines in the logger you must start the server with the option
|
|
|
|
--log-level=debug
|
|
|
|
|
|
|
|
"""
|
2010-11-23 15:33:25 +00:00
|
|
|
warnings.warn("The tools.debug() method is deprecated, please use logging.",
|
|
|
|
DeprecationWarning, stacklevel=2)
|
2008-12-18 10:20:03 +00:00
|
|
|
from inspect import stack
|
|
|
|
from pprint import pformat
|
|
|
|
st = stack()[1]
|
|
|
|
param = re.split("debug *\((.+)\)", st[4][0].strip())[1].strip()
|
|
|
|
while param.count(')') > param.count('('): param = param[:param.rfind(')')]
|
2008-12-22 16:55:15 +00:00
|
|
|
what = pformat(what)
|
|
|
|
if param != what:
|
|
|
|
what = "%s = %s" % (param, what)
|
2010-11-23 15:33:25 +00:00
|
|
|
logging.getLogger(st[3]).debug(what)
|
2008-12-18 10:20:03 +00:00
|
|
|
|
2008-06-03 11:14:02 +00:00
|
|
|
|
2010-11-16 15:05:37 +00:00
|
|
|
__icons_list = ['STOCK_ABOUT', 'STOCK_ADD', 'STOCK_APPLY', 'STOCK_BOLD',
|
2008-09-16 12:26:07 +00:00
|
|
|
'STOCK_CANCEL', 'STOCK_CDROM', 'STOCK_CLEAR', 'STOCK_CLOSE', 'STOCK_COLOR_PICKER',
|
|
|
|
'STOCK_CONNECT', 'STOCK_CONVERT', 'STOCK_COPY', 'STOCK_CUT', 'STOCK_DELETE',
|
|
|
|
'STOCK_DIALOG_AUTHENTICATION', 'STOCK_DIALOG_ERROR', 'STOCK_DIALOG_INFO',
|
|
|
|
'STOCK_DIALOG_QUESTION', 'STOCK_DIALOG_WARNING', 'STOCK_DIRECTORY', 'STOCK_DISCONNECT',
|
|
|
|
'STOCK_DND', 'STOCK_DND_MULTIPLE', 'STOCK_EDIT', 'STOCK_EXECUTE', 'STOCK_FILE',
|
|
|
|
'STOCK_FIND', 'STOCK_FIND_AND_REPLACE', 'STOCK_FLOPPY', 'STOCK_GOTO_BOTTOM',
|
|
|
|
'STOCK_GOTO_FIRST', 'STOCK_GOTO_LAST', 'STOCK_GOTO_TOP', 'STOCK_GO_BACK',
|
|
|
|
'STOCK_GO_DOWN', 'STOCK_GO_FORWARD', 'STOCK_GO_UP', 'STOCK_HARDDISK',
|
|
|
|
'STOCK_HELP', 'STOCK_HOME', 'STOCK_INDENT', 'STOCK_INDEX', 'STOCK_ITALIC',
|
|
|
|
'STOCK_JUMP_TO', 'STOCK_JUSTIFY_CENTER', 'STOCK_JUSTIFY_FILL',
|
|
|
|
'STOCK_JUSTIFY_LEFT', 'STOCK_JUSTIFY_RIGHT', 'STOCK_MEDIA_FORWARD',
|
|
|
|
'STOCK_MEDIA_NEXT', 'STOCK_MEDIA_PAUSE', 'STOCK_MEDIA_PLAY',
|
|
|
|
'STOCK_MEDIA_PREVIOUS', 'STOCK_MEDIA_RECORD', 'STOCK_MEDIA_REWIND',
|
|
|
|
'STOCK_MEDIA_STOP', 'STOCK_MISSING_IMAGE', 'STOCK_NETWORK', 'STOCK_NEW',
|
|
|
|
'STOCK_NO', 'STOCK_OK', 'STOCK_OPEN', 'STOCK_PASTE', 'STOCK_PREFERENCES',
|
|
|
|
'STOCK_PRINT', 'STOCK_PRINT_PREVIEW', 'STOCK_PROPERTIES', 'STOCK_QUIT',
|
|
|
|
'STOCK_REDO', 'STOCK_REFRESH', 'STOCK_REMOVE', 'STOCK_REVERT_TO_SAVED',
|
|
|
|
'STOCK_SAVE', 'STOCK_SAVE_AS', 'STOCK_SELECT_COLOR', 'STOCK_SELECT_FONT',
|
|
|
|
'STOCK_SORT_ASCENDING', 'STOCK_SORT_DESCENDING', 'STOCK_SPELL_CHECK',
|
|
|
|
'STOCK_STOP', 'STOCK_STRIKETHROUGH', 'STOCK_UNDELETE', 'STOCK_UNDERLINE',
|
|
|
|
'STOCK_UNDO', 'STOCK_UNINDENT', 'STOCK_YES', 'STOCK_ZOOM_100',
|
|
|
|
'STOCK_ZOOM_FIT', 'STOCK_ZOOM_IN', 'STOCK_ZOOM_OUT',
|
|
|
|
'terp-account', 'terp-crm', 'terp-mrp', 'terp-product', 'terp-purchase',
|
|
|
|
'terp-sale', 'terp-tools', 'terp-administration', 'terp-hr', 'terp-partner',
|
|
|
|
'terp-project', 'terp-report', 'terp-stock', 'terp-calendar', 'terp-graph',
|
2010-06-01 05:18:04 +00:00
|
|
|
'terp-check','terp-go-month','terp-go-year','terp-go-today','terp-document-new','terp-camera_test',
|
|
|
|
'terp-emblem-important','terp-gtk-media-pause','terp-gtk-stop','terp-gnome-cpu-frequency-applet+',
|
|
|
|
'terp-dialog-close','terp-gtk-jump-to-rtl','terp-gtk-jump-to-ltr','terp-accessories-archiver',
|
|
|
|
'terp-stock_align_left_24','terp-stock_effects-object-colorize','terp-go-home','terp-gtk-go-back-rtl',
|
|
|
|
'terp-gtk-go-back-ltr','terp-personal','terp-personal-','terp-personal+','terp-accessories-archiver-minus',
|
|
|
|
'terp-accessories-archiver+','terp-stock_symbol-selection','terp-call-start','terp-dolar',
|
|
|
|
'terp-face-plain','terp-folder-blue','terp-folder-green','terp-folder-orange','terp-folder-yellow',
|
|
|
|
'terp-gdu-smart-failing','terp-go-week','terp-gtk-select-all','terp-locked','terp-mail-forward',
|
2010-06-02 04:51:38 +00:00
|
|
|
'terp-mail-message-new','terp-mail-replied','terp-rating-rated','terp-stage','terp-stock_format-scientific',
|
2010-06-07 12:20:58 +00:00
|
|
|
'terp-dolar_ok!','terp-idea','terp-stock_format-default','terp-mail-','terp-mail_delete'
|
2010-11-16 15:05:37 +00:00
|
|
|
]
|
|
|
|
|
|
|
|
def icons(*a, **kw):
|
|
|
|
global __icons_list
|
|
|
|
return [(x, x) for x in __icons_list ]
|
2008-09-16 12:26:07 +00:00
|
|
|
|
2009-01-22 13:22:38 +00:00
|
|
|
def extract_zip_file(zip_file, outdirectory):
|
|
|
|
zf = zipfile.ZipFile(zip_file, 'r')
|
|
|
|
out = outdirectory
|
|
|
|
for path in zf.namelist():
|
|
|
|
tgt = os.path.join(out, path)
|
|
|
|
tgtdir = os.path.dirname(tgt)
|
|
|
|
if not os.path.exists(tgtdir):
|
|
|
|
os.makedirs(tgtdir)
|
|
|
|
|
|
|
|
if not tgt.endswith(os.sep):
|
|
|
|
fp = open(tgt, 'wb')
|
|
|
|
fp.write(zf.read(path))
|
|
|
|
fp.close()
|
|
|
|
zf.close()
|
|
|
|
|
2009-11-17 07:20:14 +00:00
|
|
|
def detect_ip_addr():
|
2010-06-22 15:45:00 +00:00
|
|
|
"""Try a very crude method to figure out a valid external
|
|
|
|
IP or hostname for the current machine. Don't rely on this
|
|
|
|
for binding to an interface, but it could be used as basis
|
|
|
|
for constructing a remote URL to the server.
|
|
|
|
"""
|
2009-11-17 07:20:14 +00:00
|
|
|
def _detect_ip_addr():
|
|
|
|
from array import array
|
|
|
|
from struct import pack, unpack
|
2009-01-22 13:22:38 +00:00
|
|
|
|
2009-11-17 07:20:14 +00:00
|
|
|
try:
|
|
|
|
import fcntl
|
|
|
|
except ImportError:
|
|
|
|
fcntl = None
|
|
|
|
|
|
|
|
ip_addr = None
|
|
|
|
|
|
|
|
if not fcntl: # not UNIX:
|
|
|
|
host = socket.gethostname()
|
|
|
|
ip_addr = socket.gethostbyname(host)
|
|
|
|
else: # UNIX:
|
|
|
|
# get all interfaces:
|
|
|
|
nbytes = 128 * 32
|
|
|
|
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
|
|
|
names = array('B', '\0' * nbytes)
|
|
|
|
#print 'names: ', names
|
|
|
|
outbytes = unpack('iL', fcntl.ioctl( s.fileno(), 0x8912, pack('iL', nbytes, names.buffer_info()[0])))[0]
|
|
|
|
namestr = names.tostring()
|
|
|
|
|
|
|
|
# try 64 bit kernel:
|
|
|
|
for i in range(0, outbytes, 40):
|
|
|
|
name = namestr[i:i+16].split('\0', 1)[0]
|
|
|
|
if name != 'lo':
|
|
|
|
ip_addr = socket.inet_ntoa(namestr[i+20:i+24])
|
|
|
|
break
|
|
|
|
|
|
|
|
# try 32 bit kernel:
|
|
|
|
if ip_addr is None:
|
|
|
|
ifaces = filter(None, [namestr[i:i+32].split('\0', 1)[0] for i in range(0, outbytes, 32)])
|
|
|
|
|
|
|
|
for ifname in [iface for iface in ifaces if iface != 'lo']:
|
|
|
|
ip_addr = socket.inet_ntoa(fcntl.ioctl(s.fileno(), 0x8915, pack('256s', ifname[:15]))[20:24])
|
|
|
|
break
|
|
|
|
|
|
|
|
return ip_addr or 'localhost'
|
2009-01-22 13:22:38 +00:00
|
|
|
|
2009-11-17 07:20:14 +00:00
|
|
|
try:
|
|
|
|
ip_addr = _detect_ip_addr()
|
2010-10-27 12:28:30 +00:00
|
|
|
except Exception:
|
2009-11-17 07:20:14 +00:00
|
|
|
ip_addr = 'localhost'
|
|
|
|
return ip_addr
|
2008-09-16 12:26:07 +00:00
|
|
|
|
2010-01-05 15:23:12 +00:00
|
|
|
# RATIONALE BEHIND TIMESTAMP CALCULATIONS AND TIMEZONE MANAGEMENT:
|
|
|
|
# The server side never does any timestamp calculation, always
|
|
|
|
# sends them in a naive (timezone agnostic) format supposed to be
|
2010-02-26 07:00:09 +00:00
|
|
|
# expressed within the server timezone, and expects the clients to
|
2010-01-05 15:23:12 +00:00
|
|
|
# provide timestamps in the server timezone as well.
|
2010-02-26 07:00:09 +00:00
|
|
|
# It stores all timestamps in the database in naive format as well,
|
2010-01-05 15:23:12 +00:00
|
|
|
# which also expresses the time in the server timezone.
|
|
|
|
# For this reason the server makes its timezone name available via the
|
|
|
|
# common/timezone_get() rpc method, which clients need to read
|
|
|
|
# to know the appropriate time offset to use when reading/writing
|
|
|
|
# times.
|
|
|
|
def get_win32_timezone():
|
|
|
|
"""Attempt to return the "standard name" of the current timezone on a win32 system.
|
|
|
|
@return: the standard name of the current win32 timezone, or False if it cannot be found.
|
|
|
|
"""
|
|
|
|
res = False
|
|
|
|
if (sys.platform == "win32"):
|
|
|
|
try:
|
|
|
|
import _winreg
|
|
|
|
hklm = _winreg.ConnectRegistry(None,_winreg.HKEY_LOCAL_MACHINE)
|
|
|
|
current_tz_key = _winreg.OpenKey(hklm, r"SYSTEM\CurrentControlSet\Control\TimeZoneInformation", 0,_winreg.KEY_ALL_ACCESS)
|
|
|
|
res = str(_winreg.QueryValueEx(current_tz_key,"StandardName")[0]) # [0] is value, [1] is type code
|
|
|
|
_winreg.CloseKey(current_tz_key)
|
|
|
|
_winreg.CloseKey(hklm)
|
2010-07-27 15:22:11 +00:00
|
|
|
except Exception:
|
2010-01-05 15:23:12 +00:00
|
|
|
pass
|
|
|
|
return res
|
|
|
|
|
|
|
|
def detect_server_timezone():
|
|
|
|
"""Attempt to detect the timezone to use on the server side.
|
|
|
|
Defaults to UTC if no working timezone can be found.
|
|
|
|
@return: the timezone identifier as expected by pytz.timezone.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
import pytz
|
2010-10-27 12:28:30 +00:00
|
|
|
except Exception:
|
2011-02-07 12:57:23 +00:00
|
|
|
loglevels.Logger().notifyChannel("detect_server_timezone", loglevels.LOG_WARNING,
|
2010-01-05 15:23:12 +00:00
|
|
|
"Python pytz module is not available. Timezone will be set to UTC by default.")
|
|
|
|
return 'UTC'
|
|
|
|
|
|
|
|
# Option 1: the configuration option (did not exist before, so no backwards compatibility issue)
|
|
|
|
# Option 2: to be backwards compatible with 5.0 or earlier, the value from time.tzname[0], but only if it is known to pytz
|
|
|
|
# Option 3: the environment variable TZ
|
|
|
|
sources = [ (config['timezone'], 'OpenERP configuration'),
|
|
|
|
(time.tzname[0], 'time.tzname'),
|
|
|
|
(os.environ.get('TZ',False),'TZ environment variable'), ]
|
|
|
|
# Option 4: OS-specific: /etc/timezone on Unix
|
|
|
|
if (os.path.exists("/etc/timezone")):
|
|
|
|
tz_value = False
|
|
|
|
try:
|
|
|
|
f = open("/etc/timezone")
|
|
|
|
tz_value = f.read(128).strip()
|
2010-10-27 12:28:30 +00:00
|
|
|
except Exception:
|
2010-01-05 15:23:12 +00:00
|
|
|
pass
|
|
|
|
finally:
|
|
|
|
f.close()
|
|
|
|
sources.append((tz_value,"/etc/timezone file"))
|
|
|
|
# Option 5: timezone info from registry on Win32
|
|
|
|
if (sys.platform == "win32"):
|
|
|
|
# Timezone info is stored in windows registry.
|
|
|
|
# However this is not likely to work very well as the standard name
|
|
|
|
# of timezones in windows is rarely something that is known to pytz.
|
|
|
|
# But that's ok, it is always possible to use a config option to set
|
|
|
|
# it explicitly.
|
2010-01-06 23:28:31 +00:00
|
|
|
sources.append((get_win32_timezone(),"Windows Registry"))
|
2010-01-05 15:23:12 +00:00
|
|
|
|
|
|
|
for (value,source) in sources:
|
|
|
|
if value:
|
|
|
|
try:
|
|
|
|
tz = pytz.timezone(value)
|
2011-02-07 12:57:23 +00:00
|
|
|
loglevels.Logger().notifyChannel("detect_server_timezone", loglevels.LOG_INFO,
|
2010-01-05 15:23:12 +00:00
|
|
|
"Using timezone %s obtained from %s." % (tz.zone,source))
|
|
|
|
return value
|
|
|
|
except pytz.UnknownTimeZoneError:
|
2011-02-07 12:57:23 +00:00
|
|
|
loglevels.Logger().notifyChannel("detect_server_timezone", loglevels.LOG_WARNING,
|
2010-01-05 15:23:12 +00:00
|
|
|
"The timezone specified in %s (%s) is invalid, ignoring it." % (source,value))
|
|
|
|
|
2011-02-07 12:57:23 +00:00
|
|
|
loglevels.Logger().notifyChannel("detect_server_timezone", loglevels.LOG_WARNING,
|
2010-01-05 15:23:12 +00:00
|
|
|
"No valid timezone could be detected, using default UTC timezone. You can specify it explicitly with option 'timezone' in the server configuration.")
|
|
|
|
return 'UTC'
|
|
|
|
|
2010-07-27 15:22:11 +00:00
|
|
|
def get_server_timezone():
|
|
|
|
# timezone detection is safe in multithread, so lazy init is ok here
|
|
|
|
if (not config['timezone']):
|
|
|
|
config['timezone'] = detect_server_timezone()
|
|
|
|
return config['timezone']
|
|
|
|
|
|
|
|
|
|
|
|
DEFAULT_SERVER_DATE_FORMAT = "%Y-%m-%d"
|
2010-10-12 16:01:30 +00:00
|
|
|
DEFAULT_SERVER_TIME_FORMAT = "%H:%M:%S"
|
|
|
|
DEFAULT_SERVER_DATETIME_FORMAT = "%s %s" % (
|
|
|
|
DEFAULT_SERVER_DATE_FORMAT,
|
|
|
|
DEFAULT_SERVER_TIME_FORMAT)
|
2010-07-27 15:22:11 +00:00
|
|
|
|
2011-01-12 16:06:08 +00:00
|
|
|
# Python's strftime supports only the format directives
|
|
|
|
# that are available on the platform's libc, so in order to
|
|
|
|
# be cross-platform we map to the directives required by
|
|
|
|
# the C standard (1989 version), always available on platforms
|
|
|
|
# with a C standard implementation.
|
|
|
|
DATETIME_FORMATS_MAP = {
|
|
|
|
'%C': '', # century
|
|
|
|
'%D': '%m/%d/%Y', # modified %y->%Y
|
|
|
|
'%e': '%d',
|
|
|
|
'%E': '', # special modifier
|
|
|
|
'%F': '%Y-%m-%d',
|
|
|
|
'%g': '%Y', # modified %y->%Y
|
|
|
|
'%G': '%Y',
|
|
|
|
'%h': '%b',
|
|
|
|
'%k': '%H',
|
|
|
|
'%l': '%I',
|
|
|
|
'%n': '\n',
|
|
|
|
'%O': '', # special modifier
|
|
|
|
'%P': '%p',
|
|
|
|
'%R': '%H:%M',
|
|
|
|
'%r': '%I:%M:%S %p',
|
|
|
|
'%s': '', #num of seconds since epoch
|
|
|
|
'%T': '%H:%M:%S',
|
|
|
|
'%t': ' ', # tab
|
|
|
|
'%u': ' %w',
|
|
|
|
'%V': '%W',
|
|
|
|
'%y': '%Y', # Even if %y works, it's ambiguous, so we should use %Y
|
|
|
|
'%+': '%Y-%m-%d %H:%M:%S',
|
|
|
|
|
|
|
|
# %Z is a special case that causes 2 problems at least:
|
|
|
|
# - the timezone names we use (in res_user.context_tz) come
|
|
|
|
# from pytz, but not all these names are recognized by
|
|
|
|
# strptime(), so we cannot convert in both directions
|
|
|
|
# when such a timezone is selected and %Z is in the format
|
|
|
|
# - %Z is replaced by an empty string in strftime() when
|
|
|
|
# there is not tzinfo in a datetime value (e.g when the user
|
|
|
|
# did not pick a context_tz). The resulting string does not
|
|
|
|
# parse back if the format requires %Z.
|
|
|
|
# As a consequence, we strip it completely from format strings.
|
|
|
|
# The user can always have a look at the context_tz in
|
|
|
|
# preferences to check the timezone.
|
|
|
|
'%z': '',
|
|
|
|
'%Z': '',
|
|
|
|
}
|
|
|
|
|
2010-07-27 15:22:11 +00:00
|
|
|
def server_to_local_timestamp(src_tstamp_str, src_format, dst_format, dst_tz_name,
|
|
|
|
tz_offset=True, ignore_unparsable_time=True):
|
|
|
|
"""
|
|
|
|
Convert a source timestamp string into a destination timestamp string, attempting to apply the
|
|
|
|
correct offset if both the server and local timezone are recognized, or no
|
|
|
|
offset at all if they aren't or if tz_offset is false (i.e. assuming they are both in the same TZ).
|
|
|
|
|
|
|
|
WARNING: This method is here to allow formatting dates correctly for inclusion in strings where
|
|
|
|
the client would not be able to format/offset it correctly. DO NOT use it for returning
|
|
|
|
date fields directly, these are supposed to be handled by the client!!
|
|
|
|
|
|
|
|
@param src_tstamp_str: the str value containing the timestamp in the server timezone.
|
|
|
|
@param src_format: the format to use when parsing the server timestamp.
|
|
|
|
@param dst_format: the format to use when formatting the resulting timestamp for the local/client timezone.
|
|
|
|
@param dst_tz_name: name of the destination timezone (such as the 'tz' value of the client context)
|
|
|
|
@param ignore_unparsable_time: if True, return False if src_tstamp_str cannot be parsed
|
|
|
|
using src_format or formatted using dst_format.
|
|
|
|
|
|
|
|
@return: local/client formatted timestamp, expressed in the local/client timezone if possible
|
|
|
|
and if tz_offset is true, or src_tstamp_str if timezone offset could not be determined.
|
|
|
|
"""
|
|
|
|
if not src_tstamp_str:
|
|
|
|
return False
|
|
|
|
|
|
|
|
res = src_tstamp_str
|
|
|
|
if src_format and dst_format:
|
|
|
|
# find out server timezone
|
|
|
|
server_tz = get_server_timezone()
|
|
|
|
try:
|
|
|
|
# dt_value needs to be a datetime.datetime object (so no time.struct_time or mx.DateTime.DateTime here!)
|
|
|
|
dt_value = datetime.strptime(src_tstamp_str, src_format)
|
|
|
|
if tz_offset and dst_tz_name:
|
|
|
|
try:
|
|
|
|
import pytz
|
|
|
|
src_tz = pytz.timezone(server_tz)
|
|
|
|
dst_tz = pytz.timezone(dst_tz_name)
|
|
|
|
src_dt = src_tz.localize(dt_value, is_dst=True)
|
|
|
|
dt_value = src_dt.astimezone(dst_tz)
|
|
|
|
except Exception:
|
|
|
|
pass
|
|
|
|
res = dt_value.strftime(dst_format)
|
|
|
|
except Exception:
|
|
|
|
# Normal ways to end up here are if strptime or strftime failed
|
|
|
|
if not ignore_unparsable_time:
|
|
|
|
return False
|
|
|
|
return res
|
|
|
|
|
2008-09-16 12:26:07 +00:00
|
|
|
|
2010-03-30 17:26:03 +00:00
|
|
|
def split_every(n, iterable, piece_maker=tuple):
|
|
|
|
"""Splits an iterable into length-n pieces. The last piece will be shorter
|
|
|
|
if ``n`` does not evenly divide the iterable length.
|
|
|
|
@param ``piece_maker``: function to build the pieces
|
|
|
|
from the slices (tuple,list,...)
|
|
|
|
"""
|
|
|
|
iterator = iter(iterable)
|
|
|
|
piece = piece_maker(islice(iterator, n))
|
|
|
|
while piece:
|
|
|
|
yield piece
|
|
|
|
piece = piece_maker(islice(iterator, n))
|
|
|
|
|
2008-06-03 11:14:02 +00:00
|
|
|
if __name__ == '__main__':
|
|
|
|
import doctest
|
|
|
|
doctest.testmod()
|
|
|
|
|
2010-05-05 13:31:47 +00:00
|
|
|
class upload_data_thread(threading.Thread):
|
|
|
|
def __init__(self, email, data, type):
|
|
|
|
self.args = [('email',email),('type',type),('data',data)]
|
|
|
|
super(upload_data_thread,self).__init__()
|
|
|
|
def run(self):
|
|
|
|
try:
|
|
|
|
import urllib
|
|
|
|
args = urllib.urlencode(self.args)
|
|
|
|
fp = urllib.urlopen('http://www.openerp.com/scripts/survey.php', args)
|
|
|
|
fp.read()
|
|
|
|
fp.close()
|
2010-10-27 12:28:30 +00:00
|
|
|
except Exception:
|
2010-05-05 13:31:47 +00:00
|
|
|
pass
|
2008-06-03 11:14:02 +00:00
|
|
|
|
2010-05-05 13:31:47 +00:00
|
|
|
def upload_data(email, data, type='SURVEY'):
|
|
|
|
a = upload_data_thread(email, data, type)
|
|
|
|
a.start()
|
|
|
|
return True
|
2010-09-24 10:15:45 +00:00
|
|
|
|
|
|
|
|
2010-11-09 16:50:14 +00:00
|
|
|
# port of python 2.6's attrgetter with support for dotted notation
|
2010-09-24 10:15:45 +00:00
|
|
|
def resolve_attr(obj, attr):
|
|
|
|
for name in attr.split("."):
|
|
|
|
obj = getattr(obj, name)
|
|
|
|
return obj
|
|
|
|
|
|
|
|
def attrgetter(*items):
|
|
|
|
if len(items) == 1:
|
|
|
|
attr = items[0]
|
|
|
|
def g(obj):
|
|
|
|
return resolve_attr(obj, attr)
|
|
|
|
else:
|
|
|
|
def g(obj):
|
|
|
|
return tuple(resolve_attr(obj, attr) for attr in items)
|
|
|
|
return g
|
|
|
|
|
|
|
|
|
2008-07-23 15:01:27 +00:00
|
|
|
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
|
|
|
|