bitbake: Sync a load of whitespace and other non-functionality changes with bitbake uptream

Signed-off-by: Richard Purdie <rpurdie@linux.intel.com>
This commit is contained in:
Richard Purdie 2011-01-01 23:55:54 +00:00
parent e8c48e668c
commit 0090a798eb
12 changed files with 87 additions and 85 deletions

View File

@ -43,7 +43,7 @@ except ImportError:
logger.info("Importing cPickle failed. " logger.info("Importing cPickle failed. "
"Falling back to a very slow implementation.") "Falling back to a very slow implementation.")
__cache_version__ = "133" __cache_version__ = "134"
recipe_fields = ( recipe_fields = (
'pn', 'pn',
@ -100,19 +100,20 @@ class RecipeInfo(namedtuple('RecipeInfo', recipe_fields)):
def taskvar(cls, var, tasks, metadata): def taskvar(cls, var, tasks, metadata):
return dict((task, cls.getvar("%s_task-%s" % (var, task), metadata)) return dict((task, cls.getvar("%s_task-%s" % (var, task), metadata))
for task in tasks) for task in tasks)
@classmethod @classmethod
def getvar(cls, var, metadata): def getvar(cls, var, metadata):
return metadata.getVar(var, True) or '' return metadata.getVar(var, True) or ''
@classmethod @classmethod
def from_metadata(cls, filename, metadata): def from_metadata(cls, filename, metadata):
tasks = metadata.getVar('__BBTASKS', False)
pn = cls.getvar('PN', metadata) pn = cls.getvar('PN', metadata)
packages = cls.listvar('PACKAGES', metadata) packages = cls.listvar('PACKAGES', metadata)
if not pn in packages: if not pn in packages:
packages.append(pn) packages.append(pn)
tasks = metadata.getVar('__BBTASKS', False)
return RecipeInfo( return RecipeInfo(
tasks = tasks, tasks = tasks,
basetaskhashes = cls.taskvar('BB_BASEHASH', tasks, metadata), basetaskhashes = cls.taskvar('BB_BASEHASH', tasks, metadata),
@ -463,6 +464,7 @@ class Cache(object):
""" """
Save data we need into the cache Save data we need into the cache
""" """
realfn = self.virtualfn2realfn(file_name)[0] realfn = self.virtualfn2realfn(file_name)[0]
info = RecipeInfo.from_metadata(realfn, data) info = RecipeInfo.from_metadata(realfn, data)
self.add_info(file_name, info, cacheData, parsed) self.add_info(file_name, info, cacheData, parsed)
@ -612,7 +614,6 @@ class CacheData(object):
self.possible_world.append(fn) self.possible_world.append(fn)
self.hashfn[fn] = info.hashfilename self.hashfn[fn] = info.hashfilename
for task, taskhash in info.basetaskhashes.iteritems(): for task, taskhash in info.basetaskhashes.iteritems():
identifier = '%s.%s' % (fn, task) identifier = '%s.%s' % (fn, task)
self.basetaskhash[identifier] = taskhash self.basetaskhash[identifier] = taskhash

View File

@ -1,16 +1,20 @@
from bb.pysh import pyshyacc, pyshlex
from itertools import chain
from bb import msg, utils
import ast import ast
import codegen import codegen
import logging
import os.path
import bb.utils, bb.data
from itertools import chain
from bb.pysh import pyshyacc, pyshlex
logger = logging.getLogger('BitBake.CodeParser')
PARSERCACHE_VERSION = 2 PARSERCACHE_VERSION = 2
try: try:
import cPickle as pickle import cPickle as pickle
except ImportError: except ImportError:
import pickle import pickle
bb.msg.note(1, bb.msg.domain.Cache, "Importing cPickle failed. Falling back to a very slow implementation.") logger.info('Importing cPickle failed. Falling back to a very slow implementation.')
def check_indent(codestr): def check_indent(codestr):
"""If the code is indented, add a top level piece of code to 'remove' the indentation""" """If the code is indented, add a top level piece of code to 'remove' the indentation"""
@ -23,7 +27,7 @@ def check_indent(codestr):
return codestr return codestr
if codestr[i-1] is " " or codestr[i-1] is " ": if codestr[i-1] is " " or codestr[i-1] is " ":
return "if 1:\n" + codestr return "if 1:\n" + codestr
return codestr return codestr
@ -31,15 +35,18 @@ pythonparsecache = {}
shellparsecache = {} shellparsecache = {}
def parser_cachefile(d): def parser_cachefile(d):
cachedir = bb.data.getVar("PERSISTENT_DIR", d, True) or bb.data.getVar("CACHE", d, True) cachedir = (bb.data.getVar("PERSISTENT_DIR", d, True) or
bb.data.getVar("CACHE", d, True))
if cachedir in [None, '']: if cachedir in [None, '']:
return None return None
bb.utils.mkdirhier(cachedir) bb.utils.mkdirhier(cachedir)
cachefile = os.path.join(cachedir, "bb_codeparser.dat") cachefile = os.path.join(cachedir, "bb_codeparser.dat")
bb.msg.debug(1, bb.msg.domain.Cache, "Using cache in '%s' for codeparser cache" % cachefile) logger.debug(1, "Using cache in '%s' for codeparser cache", cachefile)
return cachefile return cachefile
def parser_cache_init(d): def parser_cache_init(d):
global pythonparsecache
global shellparsecache
cachefile = parser_cachefile(d) cachefile = parser_cachefile(d)
if not cachefile: if not cachefile:
@ -54,17 +61,16 @@ def parser_cache_init(d):
if version != PARSERCACHE_VERSION: if version != PARSERCACHE_VERSION:
return return
bb.codeparser.pythonparsecache = data[0] pythonparsecache = data[0]
bb.codeparser.shellparsecache = data[1] shellparsecache = data[1]
def parser_cache_save(d): def parser_cache_save(d):
cachefile = parser_cachefile(d) cachefile = parser_cachefile(d)
if not cachefile: if not cachefile:
return return
p = pickle.Pickler(file(cachefile, "wb"), -1) p = pickle.Pickler(file(cachefile, "wb"), -1)
p.dump([[bb.codeparser.pythonparsecache, bb.codeparser.shellparsecache], PARSERCACHE_VERSION]) p.dump([[pythonparsecache, shellparsecache], PARSERCACHE_VERSION])
class PythonParser(): class PythonParser():
class ValueVisitor(): class ValueVisitor():
@ -129,10 +135,10 @@ class PythonParser():
funcstr = codegen.to_source(func) funcstr = codegen.to_source(func)
argstr = codegen.to_source(arg) argstr = codegen.to_source(arg)
except TypeError: except TypeError:
msg.debug(2, None, "Failed to convert function and argument to source form") logger.debug(2, 'Failed to convert function and argument to source form')
else: else:
msg.debug(1, None, "Warning: in call to '%s', argument '%s' is not a literal" % logger.debug(1, "Warning: in call to '%s', argumen t'%s' is"
(funcstr, argstr)) "not a literal", funcstr, argstr)
def visit_Call(self, node): def visit_Call(self, node):
if self.compare_name(self.getvars, node.func): if self.compare_name(self.getvars, node.func):
@ -184,7 +190,7 @@ class PythonParser():
self.execs = pythonparsecache[h]["execs"] self.execs = pythonparsecache[h]["execs"]
return return
code = compile(check_indent(str(node)), "<string>", "exec", code = compile(check_indent(str(node)), "<string>", "exec",
ast.PyCF_ONLY_AST) ast.PyCF_ONLY_AST)
visitor = self.ValueVisitor(code) visitor = self.ValueVisitor(code)
@ -319,11 +325,11 @@ class ShellParser():
cmd = word[1] cmd = word[1]
if cmd.startswith("$"): if cmd.startswith("$"):
msg.debug(1, None, "Warning: execution of non-literal command '%s'" % cmd) logger.debug(1, "Warning: execution of non-literal"
"command '%s'", cmd)
elif cmd == "eval": elif cmd == "eval":
command = " ".join(word for _, word in words[1:]) command = " ".join(word for _, word in words[1:])
self.parse_shell(command) self.parse_shell(command)
else: else:
self.allexecs.add(cmd) self.allexecs.add(cmd)
break break

View File

@ -1,3 +1,4 @@
#!/usr/bin/env python
# ex:ts=4:sw=4:sts=4:et # ex:ts=4:sw=4:sts=4:et
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
# #
@ -483,10 +484,9 @@ class BBCooker:
except (IOError, bb.parse.ParseError) as exc: except (IOError, bb.parse.ParseError) as exc:
parselog.critical("Unable to parse %s: %s" % (f, exc)) parselog.critical("Unable to parse %s: %s" % (f, exc))
sys.exit(1) sys.exit(1)
data = self.configuration.data
bb.parse.init_parser(data, self.configuration.dump_signatures) data = self.configuration.data
bb.parse.init_parser(data)
for f in files: for f in files:
data = _parse(f, data) data = _parse(f, data)
@ -526,9 +526,7 @@ class BBCooker:
if bb.data.getVar("BB_WORKERCONTEXT", self.configuration.data) is None: if bb.data.getVar("BB_WORKERCONTEXT", self.configuration.data) is None:
bb.fetch.fetcher_init(self.configuration.data) bb.fetch.fetcher_init(self.configuration.data)
bb.codeparser.parser_cache_init(self.configuration.data) bb.codeparser.parser_cache_init(self.configuration.data)
bb.parse.init_parser(data)
bb.parse.init_parser(data, self.configuration.dump_signatures)
bb.event.fire(bb.event.ConfigParsed(), self.configuration.data) bb.event.fire(bb.event.ConfigParsed(), self.configuration.data)
def handleCollections( self, collections ): def handleCollections( self, collections ):
@ -1043,7 +1041,6 @@ class CookerParser(object):
self.shutdown(clean=False) self.shutdown(clean=False)
bb.fatal('Error parsing %s: %s' % (exc.recipe, exc)) bb.fatal('Error parsing %s: %s' % (exc.recipe, exc))
self.current += 1 self.current += 1
self.virtuals += len(result) self.virtuals += len(result)
if parsed: if parsed:

View File

@ -259,7 +259,7 @@ def emit_func(func, o=sys.__stdout__, d = init()):
for key in keys: for key in keys:
emit_var(key, o, d, False) and o.write('\n') emit_var(key, o, d, False) and o.write('\n')
emit_var(func, o, d, False) and o.write('\n') emit_var(func, o, d, False) and o.write('\n')
newdeps = bb.codeparser.ShellParser().parse_shell(d.getVar(func, True)) newdeps = bb.codeparser.ShellParser().parse_shell(d.getVar(func, True))
seen = set() seen = set()
while newdeps: while newdeps:
@ -299,7 +299,7 @@ def build_dependencies(key, keys, shelldeps, d):
deps |= set((d.getVarFlag(key, "vardeps", True) or "").split()) deps |= set((d.getVarFlag(key, "vardeps", True) or "").split())
deps -= set((d.getVarFlag(key, "vardepsexclude", True) or "").split()) deps -= set((d.getVarFlag(key, "vardepsexclude", True) or "").split())
except: except:
bb.note("Error expanding variable %s" % key) bb.note("Error expanding variable %s" % key)
raise raise
return deps return deps
#bb.note("Variable %s references %s and calls %s" % (key, str(deps), str(execs))) #bb.note("Variable %s references %s and calls %s" % (key, str(deps), str(execs)))

View File

@ -28,10 +28,10 @@ BitBake build tools.
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
# Based on functions from the base bb module, Copyright 2003 Holger Schurig # Based on functions from the base bb module, Copyright 2003 Holger Schurig
import copy, re, sys import copy, re
from collections import MutableMapping from collections import MutableMapping
import logging import logging
import bb import bb, bb.codeparser
from bb import utils from bb import utils
from bb.COW import COWDictBase from bb.COW import COWDictBase
@ -42,6 +42,7 @@ __setvar_regexp__ = re.compile('(?P<base>.*?)(?P<keyword>_append|_prepend)(_(?P<
__expand_var_regexp__ = re.compile(r"\${[^{}]+}") __expand_var_regexp__ = re.compile(r"\${[^{}]+}")
__expand_python_regexp__ = re.compile(r"\${@.+?}") __expand_python_regexp__ = re.compile(r"\${@.+?}")
class VariableParse: class VariableParse:
def __init__(self, varname, d, val = None): def __init__(self, varname, d, val = None):
self.varname = varname self.varname = varname
@ -72,11 +73,11 @@ class VariableParse:
self.references |= parser.references self.references |= parser.references
self.execs |= parser.execs self.execs |= parser.execs
value = utils.better_eval(codeobj, DataDict(self.d)) value = utils.better_eval(codeobj, DataContext(self.d))
return str(value) return str(value)
class DataDict(dict): class DataContext(dict):
def __init__(self, metadata, **kwargs): def __init__(self, metadata, **kwargs):
self.metadata = metadata self.metadata = metadata
dict.__init__(self, **kwargs) dict.__init__(self, **kwargs)
@ -129,7 +130,7 @@ class DataSmart(MutableMapping):
def expand(self, s, varname): def expand(self, s, varname):
return self.expandWithRefs(s, varname).value return self.expandWithRefs(s, varname).value
def finalize(self): def finalize(self):
"""Performs final steps upon the datastore, including application of overrides""" """Performs final steps upon the datastore, including application of overrides"""
@ -291,7 +292,7 @@ class DataSmart(MutableMapping):
self._makeShadowCopy(var) self._makeShadowCopy(var)
self.dict[var][flag] = flagvalue self.dict[var][flag] = flagvalue
def getVarFlag(self, var, flag, expand = False): def getVarFlag(self, var, flag, expand=False):
local_var = self._findVar(var) local_var = self._findVar(var)
value = None value = None
if local_var: if local_var:
@ -374,7 +375,7 @@ class DataSmart(MutableMapping):
value = self.getVar(variable, False) value = self.getVar(variable, False)
for key in keys: for key in keys:
referrervalue = self.getVar(key, False) referrervalue = self.getVar(key, False)
if ref in referrervalue: if referrervalue and ref in referrervalue:
self.setVar(key, referrervalue.replace(ref, value)) self.setVar(key, referrervalue.replace(ref, value))
def localkeys(self): def localkeys(self):

View File

@ -343,6 +343,7 @@ class CacheLoadCompleted(Event):
self.total = total self.total = total
self.num_entries = num_entries self.num_entries = num_entries
class DepTreeGenerated(Event): class DepTreeGenerated(Event):
""" """
Event when a dependency tree has been generated Event when a dependency tree has been generated
@ -384,4 +385,3 @@ class LogHandler(logging.Handler):
fire(record, None) fire(record, None)
if bb.event.useStdout: if bb.event.useStdout:
print(self.format(record)) print(self.format(record))

View File

@ -93,12 +93,10 @@ domain = _NamedTuple("Domain", (
"RunQueue", "RunQueue",
"TaskData", "TaskData",
"Util")) "Util"))
logger = logging.getLogger("BitBake") logger = logging.getLogger("BitBake")
loggers = Loggers() loggers = Loggers()
debug_level = DebugLevel() debug_level = DebugLevel()
#
# Message control functions # Message control functions
# #
@ -191,4 +189,3 @@ def fatal(msgdomain, msg):
else: else:
loggers[msgdomain].critical(msg) loggers[msgdomain].critical(msg)
sys.exit(1) sys.exit(1)

View File

@ -31,8 +31,7 @@ import logging
import bb import bb
import bb.utils import bb.utils
import bb.siggen import bb.siggen
import bb.utils
logger = logging.getLogger("BitBake.Parsing") logger = logging.getLogger("BitBake.Parsing")
class ParseError(Exception): class ParseError(Exception):
@ -85,8 +84,8 @@ def init(fn, data):
if h['supports'](fn): if h['supports'](fn):
return h['init'](data) return h['init'](data)
def init_parser(d, dumpsigs): def init_parser(d):
bb.parse.siggen = bb.siggen.init(d, dumpsigs) bb.parse.siggen = bb.siggen.init(d)
def resolve_file(fn, d): def resolve_file(fn, d):
if not os.path.isabs(fn): if not os.path.isabs(fn):

View File

@ -649,7 +649,6 @@ def p_error(p):
try: try:
import pyshtables import pyshtables
except ImportError: except ImportError:
import os
outputdir = os.path.dirname(__file__) outputdir = os.path.dirname(__file__)
if not os.access(outputdir, os.W_OK): if not os.access(outputdir, os.W_OK):
outputdir = '' outputdir = ''

View File

@ -992,7 +992,7 @@ class RunQueue:
if self.state is runQueueComplete: if self.state is runQueueComplete:
# All done # All done
logger.info("Tasks Summary: Attempted %d tasks of which %d didn't need to be rerun and %d failed." % (self.rqexe.stats.completed, self.rqexe.stats.skipped, self.rqexe.stats.failed)) logger.info("Tasks Summary: Attempted %d tasks of which %d didn't need to be rerun and %d failed.", self.rqexe.stats.completed, self.rqexe.stats.skipped, self.rqexe.stats.failed)
return False return False
if self.state is runQueueChildProcess: if self.state is runQueueChildProcess:
@ -1114,7 +1114,6 @@ class RunQueueExecute:
sys.stdout.flush() sys.stdout.flush()
sys.stderr.flush() sys.stderr.flush()
try: try:
pipeinfd, pipeoutfd = os.pipe() pipeinfd, pipeoutfd = os.pipe()
pipein = os.fdopen(pipeinfd, 'rb', 4096) pipein = os.fdopen(pipeinfd, 'rb', 4096)
@ -1125,6 +1124,7 @@ class RunQueueExecute:
bb.msg.fatal(bb.msg.domain.RunQueue, "fork failed: %d (%s)" % (e.errno, e.strerror)) bb.msg.fatal(bb.msg.domain.RunQueue, "fork failed: %d (%s)" % (e.errno, e.strerror))
if pid == 0: if pid == 0:
pipein.close() pipein.close()
# Save out the PID so that the event can include it the # Save out the PID so that the event can include it the
# events # events
bb.event.worker_pid = os.getpid() bb.event.worker_pid = os.getpid()
@ -1180,9 +1180,10 @@ class RunQueueExecuteDummy(RunQueueExecute):
def __init__(self, rq): def __init__(self, rq):
self.rq = rq self.rq = rq
self.stats = RunQueueStats(0) self.stats = RunQueueStats(0)
def finish(self): def finish(self):
self.rq.state = runQueueComplete self.rq.state = runQueueComplete
return return
class RunQueueExecuteTasks(RunQueueExecute): class RunQueueExecuteTasks(RunQueueExecute):
def __init__(self, rq): def __init__(self, rq):
@ -1211,7 +1212,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
self.rq.scenequeue_covered.add(task) self.rq.scenequeue_covered.add(task)
found = True found = True
bb.debug(1, "Full skip list %s" % self.rq.scenequeue_covered) logger.debug(1, 'Full skip list %s', self.rq.scenequeue_covered)
for task in self.rq.scenequeue_covered: for task in self.rq.scenequeue_covered:
self.task_skip(task) self.task_skip(task)
@ -1221,7 +1222,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
for scheduler in self.rqdata.schedulers: for scheduler in self.rqdata.schedulers:
if self.scheduler == scheduler.name: if self.scheduler == scheduler.name:
self.sched = scheduler(self, self.rqdata) self.sched = scheduler(self, self.rqdata)
logger.debug(1, "Using runqueue scheduler '%s'" % scheduler.name) logger.debug(1, "Using runqueue scheduler '%s'", scheduler.name)
break break
else: else:
bb.fatal("Invalid scheduler '%s'. Available schedulers: %s" % bb.fatal("Invalid scheduler '%s'. Available schedulers: %s" %
@ -1247,7 +1248,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
self.runq_buildable[revdep] = 1 self.runq_buildable[revdep] = 1
fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[revdep]] fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[revdep]]
taskname = self.rqdata.runq_task[revdep] taskname = self.rqdata.runq_task[revdep]
logger.debug(1, "Marking task %s (%s, %s) as buildable" % (revdep, fn, taskname)) logger.debug(1, "Marking task %s (%s, %s) as buildable", revdep, fn, taskname)
def task_complete(self, task): def task_complete(self, task):
self.stats.taskCompleted() self.stats.taskCompleted()
@ -1295,7 +1296,8 @@ class RunQueueExecuteTasks(RunQueueExecute):
taskdep = self.rqdata.dataCache.task_deps[fn] taskdep = self.rqdata.dataCache.task_deps[fn]
if 'noexec' in taskdep and taskname in taskdep['noexec']: if 'noexec' in taskdep and taskname in taskdep['noexec']:
startevent = runQueueTaskStarted(task, self.stats, self.rq, noexec=True) startevent = runQueueTaskStarted(task, self.stats, self.rq,
noexec=True)
bb.event.fire(startevent, self.cfgData) bb.event.fire(startevent, self.cfgData)
self.runq_running[task] = 1 self.runq_running[task] = 1
self.stats.taskActive() self.stats.taskActive()
@ -1328,11 +1330,11 @@ class RunQueueExecuteTasks(RunQueueExecute):
# Sanity Checks # Sanity Checks
for task in xrange(self.stats.total): for task in xrange(self.stats.total):
if self.runq_buildable[task] == 0: if self.runq_buildable[task] == 0:
logger.error("Task %s never buildable!" % task) logger.error("Task %s never buildable!", task)
if self.runq_running[task] == 0: if self.runq_running[task] == 0:
logger.error("Task %s never ran!" % task) logger.error("Task %s never ran!", task)
if self.runq_complete[task] == 0: if self.runq_complete[task] == 0:
logger.error("Task %s never completed!" % task) logger.error("Task %s never completed!", task)
self.rq.state = runQueueComplete self.rq.state = runQueueComplete
return True return True
@ -1478,7 +1480,8 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
""" """
index = self.rqdata.runq_setscene[task] index = self.rqdata.runq_setscene[task]
logger.debug(1, "Found task %s could be accelerated" % self.rqdata.get_user_idstring(index)) logger.debug(1, 'Found task %s which could be accelerated',
self.rqdata.get_user_idstring(index))
self.scenequeue_covered.add(task) self.scenequeue_covered.add(task)
self.scenequeue_updatecounters(task) self.scenequeue_updatecounters(task)

View File

@ -1,37 +1,47 @@
import hashlib import hashlib
import logging
import re import re
logger = logging.getLogger('BitBake.SigGen')
try: try:
import cPickle as pickle import cPickle as pickle
except ImportError: except ImportError:
import pickle import pickle
bb.msg.note(1, bb.msg.domain.Cache, "Importing cPickle failed. Falling back to a very slow implementation.") logger.info('Importing cPickle failed. Falling back to a very slow implementation.')
def init(d, dumpsigs): def init(d):
siggens = [obj for obj in globals().itervalues() siggens = [obj for obj in globals().itervalues()
if type(obj) is type and issubclass(obj, SignatureGenerator)] if type(obj) is type and issubclass(obj, SignatureGenerator)]
desired = bb.data.getVar("BB_SIGNATURE_HANDLER", d, True) or "noop" desired = bb.data.getVar("BB_SIGNATURE_HANDLER", d, True) or "noop"
for sg in siggens: for sg in siggens:
if desired == sg.name: if desired == sg.name:
return sg(d, dumpsigs) return sg(d)
break break
else: else:
bb.error("Invalid signature generator '%s', using default 'noop' generator" % desired) logger.error("Invalid signature generator '%s', using default 'noop'\n"
bb.error("Available generators: %s" % ", ".join(obj.name for obj in siggens)) "Available generators: %s",
return SignatureGenerator(d, dumpsigs) ', '.join(obj.name for obj in siggens))
return SignatureGenerator(d)
class SignatureGenerator(object): class SignatureGenerator(object):
""" """
""" """
name = "noop" name = "noop"
def __init__(self, data, dumpsigs): def __init__(self, data):
return return
def finalise(self, fn, d, varient): def finalise(self, fn, d, varient):
return return
def get_taskhash(self, fn, task, deps, dataCache):
return 0
def set_taskdata(self, hashes, deps):
return
def stampfile(self, stampbase, taskname, taskhash): def stampfile(self, stampbase, taskname, taskhash):
return "%s.%s" % (stampbase, taskname) return "%s.%s" % (stampbase, taskname)
@ -40,7 +50,7 @@ class SignatureGeneratorBasic(SignatureGenerator):
""" """
name = "basic" name = "basic"
def __init__(self, data, dumpsigs): def __init__(self, data):
self.basehash = {} self.basehash = {}
self.taskhash = {} self.taskhash = {}
self.taskdeps = {} self.taskdeps = {}
@ -78,7 +88,6 @@ class SignatureGeneratorBasic(SignatureGenerator):
if data is None: if data is None:
bb.error("Task %s from %s seems to be empty?!" % (task, fn)) bb.error("Task %s from %s seems to be empty?!" % (task, fn))
self.basehash[fn + "." + task] = hashlib.md5(data).hexdigest() self.basehash[fn + "." + task] = hashlib.md5(data).hexdigest()
#bb.note("Hash for %s is %s" % (task, tashhash[task]))
self.taskdeps[fn] = taskdeps self.taskdeps[fn] = taskdeps
self.gendeps[fn] = gendeps self.gendeps[fn] = gendeps
@ -110,7 +119,6 @@ class SignatureGeneratorBasic(SignatureGenerator):
# then process the actual dependencies # then process the actual dependencies
dep_fn = re.search("(?P<fn>.*)\..*", dep).group('fn') dep_fn = re.search("(?P<fn>.*)\..*", dep).group('fn')
if self.twl.search(dataCache.pkg_fn[dep_fn]): if self.twl.search(dataCache.pkg_fn[dep_fn]):
#bb.note("Skipping %s" % dep)
continue continue
if dep not in self.taskhash: if dep not in self.taskhash:
bb.fatal("%s is not in taskhash, caller isn't calling in dependency order?", dep) bb.fatal("%s is not in taskhash, caller isn't calling in dependency order?", dep)
@ -181,10 +189,6 @@ def compare_sigfiles(a, b):
p2 = pickle.Unpickler(file(b, "rb")) p2 = pickle.Unpickler(file(b, "rb"))
b_data = p2.load() b_data = p2.load()
#print "Checking"
#print str(a_data)
#print str(b_data)
def dict_diff(a, b): def dict_diff(a, b):
sa = set(a.keys()) sa = set(a.keys())
sb = set(b.keys()) sb = set(b.keys())
@ -195,7 +199,7 @@ def compare_sigfiles(a, b):
changed.add(i) changed.add(i)
added = sa - sb added = sa - sb
removed = sb - sa removed = sb - sa
return changed, added, removed return changed, added, removed
if 'basewhitelist' in a_data and a_data['basewhitelist'] != b_data['basewhitelist']: if 'basewhitelist' in a_data and a_data['basewhitelist'] != b_data['basewhitelist']:
print "basewhitelist changed from %s to %s" % (a_data['basewhitelist'], b_data['basewhitelist']) print "basewhitelist changed from %s to %s" % (a_data['basewhitelist'], b_data['basewhitelist'])
@ -225,11 +229,6 @@ def compare_sigfiles(a, b):
if changed: if changed:
for dep in changed: for dep in changed:
print "Variable %s value changed from %s to %s" % (dep, a_data['varvals'][dep], b_data['varvals'][dep]) print "Variable %s value changed from %s to %s" % (dep, a_data['varvals'][dep], b_data['varvals'][dep])
#if added:
# print "Dependency on variable %s was added (value %s)" % (dep, b_data['gendeps'][dep])
#if removed:
# print "Dependency on Variable %s was removed (value %s)" % (dep, a_data['gendeps'][dep])
if 'runtaskdeps' in a_data and 'runtaskdeps' in b_data and sorted(a_data['runtaskdeps']) != sorted(b_data['runtaskdeps']): if 'runtaskdeps' in a_data and 'runtaskdeps' in b_data and sorted(a_data['runtaskdeps']) != sorted(b_data['runtaskdeps']):
print "Tasks this task depends on changed from %s to %s" % (sorted(a_data['runtaskdeps']), sorted(b_data['runtaskdeps'])) print "Tasks this task depends on changed from %s to %s" % (sorted(a_data['runtaskdeps']), sorted(b_data['runtaskdeps']))

View File

@ -311,10 +311,9 @@ def _print_trace(body, line):
max_line = min(line + 4, len(body)) max_line = min(line + 4, len(body))
for i in xrange(min_line, max_line + 1): for i in xrange(min_line, max_line + 1):
if line == i: if line == i:
logger.error(" *** %.4d:%s" % (i, body[i-1]) ) logger.error(' *** %.4d:%s', i, body[i-1])
else: else:
logger.error(" %.4d:%s" % (i, body[i-1]) ) logger.error(' %.4d:%s', i, body[i-1])
def better_compile(text, file, realfile, mode = "exec"): def better_compile(text, file, realfile, mode = "exec"):
""" """
@ -326,16 +325,17 @@ def better_compile(text, file, realfile, mode = "exec"):
except Exception as e: except Exception as e:
# split the text into lines again # split the text into lines again
body = text.split('\n') body = text.split('\n')
logger.error("Error in compiling python function in: %s" % (realfile)) logger.error("Error in compiling python function in %s", realfile)
logger.error(str(e)) logger.error(str(e))
if e.lineno: if e.lineno:
logger.error("The lines leading to this error were:") logger.error("The lines leading to this error were:")
logger.error("\t%d:%s:'%s'" % (e.lineno, e.__class__.__name__, body[e.lineno-1])) logger.error("\t%d:%s:'%s'", e.lineno, e.__class__.__name__, body[e.lineno-1])
_print_trace(body, e.lineno) _print_trace(body, e.lineno)
else: else:
logger.error("The function causing this error was:") logger.error("The function causing this error was:")
for line in body: for line in body:
logger.error(line) logger.error(line)
raise raise
def better_exec(code, context, text, realfile = "<code>"): def better_exec(code, context, text, realfile = "<code>"):
@ -376,16 +376,16 @@ def better_exec(code, context, text, realfile = "<code>"):
logger.error("The code that was being executed was:") logger.error("The code that was being executed was:")
_print_trace(textarray, linefailed) _print_trace(textarray, linefailed)
logger.error("(file: '%s', lineno: %s, function: %s)" % (tbextract[0][0], tbextract[0][1], tbextract[0][2])) logger.error("(file: '%s', lineno: %s, function: %s)", tbextract[0][0], tbextract[0][1], tbextract[0][2])
# See if this is a function we constructed and has calls back into other functions in # See if this is a function we constructed and has calls back into other functions in
# "text". If so, try and improve the context of the error by diving down the trace # "text". If so, try and improve the context of the error by diving down the trace
level = 0 level = 0
nexttb = tb.tb_next nexttb = tb.tb_next
while nexttb is not None: while nexttb is not None:
if tbextract[level][0] == tbextract[level+1][0] and tbextract[level+1][2] == tbextract[level][0]: if tbextract[level][0] == tbextract[level+1][0] and tbextract[level+1][2] == tbextract[level][0]:
_print_trace(textarray, tbextract[level+1][1]) _print_trace(textarray, tbextract[level+1][1])
logger.error("(file: '%s', lineno: %s, function: %s)" % (tbextract[level+1][0], tbextract[level+1][1], tbextract[level+1][2])) logger.error("(file: '%s', lineno: %s, function: %s)", tbextract[level+1][0], tbextract[level+1][1], tbextract[level+1][2])
else: else:
break break
nexttb = tb.tb_next nexttb = tb.tb_next