bitbake: Sync a load of whitespace and other non-functionality changes with bitbake uptream

Signed-off-by: Richard Purdie <rpurdie@linux.intel.com>
This commit is contained in:
Richard Purdie 2011-01-01 23:55:54 +00:00
parent e8c48e668c
commit 0090a798eb
12 changed files with 87 additions and 85 deletions

View File

@ -43,7 +43,7 @@ except ImportError:
logger.info("Importing cPickle failed. "
"Falling back to a very slow implementation.")
__cache_version__ = "133"
__cache_version__ = "134"
recipe_fields = (
'pn',
@ -100,19 +100,20 @@ class RecipeInfo(namedtuple('RecipeInfo', recipe_fields)):
def taskvar(cls, var, tasks, metadata):
return dict((task, cls.getvar("%s_task-%s" % (var, task), metadata))
for task in tasks)
@classmethod
def getvar(cls, var, metadata):
return metadata.getVar(var, True) or ''
@classmethod
def from_metadata(cls, filename, metadata):
tasks = metadata.getVar('__BBTASKS', False)
pn = cls.getvar('PN', metadata)
packages = cls.listvar('PACKAGES', metadata)
if not pn in packages:
packages.append(pn)
tasks = metadata.getVar('__BBTASKS', False)
return RecipeInfo(
tasks = tasks,
basetaskhashes = cls.taskvar('BB_BASEHASH', tasks, metadata),
@ -463,6 +464,7 @@ class Cache(object):
"""
Save data we need into the cache
"""
realfn = self.virtualfn2realfn(file_name)[0]
info = RecipeInfo.from_metadata(realfn, data)
self.add_info(file_name, info, cacheData, parsed)
@ -612,7 +614,6 @@ class CacheData(object):
self.possible_world.append(fn)
self.hashfn[fn] = info.hashfilename
for task, taskhash in info.basetaskhashes.iteritems():
identifier = '%s.%s' % (fn, task)
self.basetaskhash[identifier] = taskhash

View File

@ -1,16 +1,20 @@
from bb.pysh import pyshyacc, pyshlex
from itertools import chain
from bb import msg, utils
import ast
import codegen
import logging
import os.path
import bb.utils, bb.data
from itertools import chain
from bb.pysh import pyshyacc, pyshlex
logger = logging.getLogger('BitBake.CodeParser')
PARSERCACHE_VERSION = 2
try:
import cPickle as pickle
except ImportError:
import pickle
bb.msg.note(1, bb.msg.domain.Cache, "Importing cPickle failed. Falling back to a very slow implementation.")
logger.info('Importing cPickle failed. Falling back to a very slow implementation.')
def check_indent(codestr):
"""If the code is indented, add a top level piece of code to 'remove' the indentation"""
@ -23,7 +27,7 @@ def check_indent(codestr):
return codestr
if codestr[i-1] is " " or codestr[i-1] is " ":
return "if 1:\n" + codestr
return "if 1:\n" + codestr
return codestr
@ -31,15 +35,18 @@ pythonparsecache = {}
shellparsecache = {}
def parser_cachefile(d):
cachedir = bb.data.getVar("PERSISTENT_DIR", d, True) or bb.data.getVar("CACHE", d, True)
cachedir = (bb.data.getVar("PERSISTENT_DIR", d, True) or
bb.data.getVar("CACHE", d, True))
if cachedir in [None, '']:
return None
bb.utils.mkdirhier(cachedir)
cachefile = os.path.join(cachedir, "bb_codeparser.dat")
bb.msg.debug(1, bb.msg.domain.Cache, "Using cache in '%s' for codeparser cache" % cachefile)
logger.debug(1, "Using cache in '%s' for codeparser cache", cachefile)
return cachefile
def parser_cache_init(d):
global pythonparsecache
global shellparsecache
cachefile = parser_cachefile(d)
if not cachefile:
@ -54,17 +61,16 @@ def parser_cache_init(d):
if version != PARSERCACHE_VERSION:
return
bb.codeparser.pythonparsecache = data[0]
bb.codeparser.shellparsecache = data[1]
pythonparsecache = data[0]
shellparsecache = data[1]
def parser_cache_save(d):
cachefile = parser_cachefile(d)
if not cachefile:
return
p = pickle.Pickler(file(cachefile, "wb"), -1)
p.dump([[bb.codeparser.pythonparsecache, bb.codeparser.shellparsecache], PARSERCACHE_VERSION])
p.dump([[pythonparsecache, shellparsecache], PARSERCACHE_VERSION])
class PythonParser():
class ValueVisitor():
@ -129,10 +135,10 @@ class PythonParser():
funcstr = codegen.to_source(func)
argstr = codegen.to_source(arg)
except TypeError:
msg.debug(2, None, "Failed to convert function and argument to source form")
logger.debug(2, 'Failed to convert function and argument to source form')
else:
msg.debug(1, None, "Warning: in call to '%s', argument '%s' is not a literal" %
(funcstr, argstr))
logger.debug(1, "Warning: in call to '%s', argumen t'%s' is"
"not a literal", funcstr, argstr)
def visit_Call(self, node):
if self.compare_name(self.getvars, node.func):
@ -184,7 +190,7 @@ class PythonParser():
self.execs = pythonparsecache[h]["execs"]
return
code = compile(check_indent(str(node)), "<string>", "exec",
code = compile(check_indent(str(node)), "<string>", "exec",
ast.PyCF_ONLY_AST)
visitor = self.ValueVisitor(code)
@ -319,11 +325,11 @@ class ShellParser():
cmd = word[1]
if cmd.startswith("$"):
msg.debug(1, None, "Warning: execution of non-literal command '%s'" % cmd)
logger.debug(1, "Warning: execution of non-literal"
"command '%s'", cmd)
elif cmd == "eval":
command = " ".join(word for _, word in words[1:])
self.parse_shell(command)
else:
self.allexecs.add(cmd)
break

View File

@ -1,3 +1,4 @@
#!/usr/bin/env python
# ex:ts=4:sw=4:sts=4:et
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
#
@ -483,10 +484,9 @@ class BBCooker:
except (IOError, bb.parse.ParseError) as exc:
parselog.critical("Unable to parse %s: %s" % (f, exc))
sys.exit(1)
data = self.configuration.data
bb.parse.init_parser(data, self.configuration.dump_signatures)
data = self.configuration.data
bb.parse.init_parser(data)
for f in files:
data = _parse(f, data)
@ -526,9 +526,7 @@ class BBCooker:
if bb.data.getVar("BB_WORKERCONTEXT", self.configuration.data) is None:
bb.fetch.fetcher_init(self.configuration.data)
bb.codeparser.parser_cache_init(self.configuration.data)
bb.parse.init_parser(data, self.configuration.dump_signatures)
bb.parse.init_parser(data)
bb.event.fire(bb.event.ConfigParsed(), self.configuration.data)
def handleCollections( self, collections ):
@ -1043,7 +1041,6 @@ class CookerParser(object):
self.shutdown(clean=False)
bb.fatal('Error parsing %s: %s' % (exc.recipe, exc))
self.current += 1
self.virtuals += len(result)
if parsed:

View File

@ -259,7 +259,7 @@ def emit_func(func, o=sys.__stdout__, d = init()):
for key in keys:
emit_var(key, o, d, False) and o.write('\n')
emit_var(func, o, d, False) and o.write('\n')
emit_var(func, o, d, False) and o.write('\n')
newdeps = bb.codeparser.ShellParser().parse_shell(d.getVar(func, True))
seen = set()
while newdeps:
@ -299,7 +299,7 @@ def build_dependencies(key, keys, shelldeps, d):
deps |= set((d.getVarFlag(key, "vardeps", True) or "").split())
deps -= set((d.getVarFlag(key, "vardepsexclude", True) or "").split())
except:
bb.note("Error expanding variable %s" % key)
bb.note("Error expanding variable %s" % key)
raise
return deps
#bb.note("Variable %s references %s and calls %s" % (key, str(deps), str(execs)))

View File

@ -28,10 +28,10 @@ BitBake build tools.
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
import copy, re, sys
import copy, re
from collections import MutableMapping
import logging
import bb
import bb, bb.codeparser
from bb import utils
from bb.COW import COWDictBase
@ -42,6 +42,7 @@ __setvar_regexp__ = re.compile('(?P<base>.*?)(?P<keyword>_append|_prepend)(_(?P<
__expand_var_regexp__ = re.compile(r"\${[^{}]+}")
__expand_python_regexp__ = re.compile(r"\${@.+?}")
class VariableParse:
def __init__(self, varname, d, val = None):
self.varname = varname
@ -72,11 +73,11 @@ class VariableParse:
self.references |= parser.references
self.execs |= parser.execs
value = utils.better_eval(codeobj, DataDict(self.d))
value = utils.better_eval(codeobj, DataContext(self.d))
return str(value)
class DataDict(dict):
class DataContext(dict):
def __init__(self, metadata, **kwargs):
self.metadata = metadata
dict.__init__(self, **kwargs)
@ -129,7 +130,7 @@ class DataSmart(MutableMapping):
def expand(self, s, varname):
return self.expandWithRefs(s, varname).value
def finalize(self):
"""Performs final steps upon the datastore, including application of overrides"""
@ -291,7 +292,7 @@ class DataSmart(MutableMapping):
self._makeShadowCopy(var)
self.dict[var][flag] = flagvalue
def getVarFlag(self, var, flag, expand = False):
def getVarFlag(self, var, flag, expand=False):
local_var = self._findVar(var)
value = None
if local_var:
@ -374,7 +375,7 @@ class DataSmart(MutableMapping):
value = self.getVar(variable, False)
for key in keys:
referrervalue = self.getVar(key, False)
if ref in referrervalue:
if referrervalue and ref in referrervalue:
self.setVar(key, referrervalue.replace(ref, value))
def localkeys(self):

View File

@ -343,6 +343,7 @@ class CacheLoadCompleted(Event):
self.total = total
self.num_entries = num_entries
class DepTreeGenerated(Event):
"""
Event when a dependency tree has been generated
@ -384,4 +385,3 @@ class LogHandler(logging.Handler):
fire(record, None)
if bb.event.useStdout:
print(self.format(record))

View File

@ -93,12 +93,10 @@ domain = _NamedTuple("Domain", (
"RunQueue",
"TaskData",
"Util"))
logger = logging.getLogger("BitBake")
loggers = Loggers()
debug_level = DebugLevel()
#
# Message control functions
#
@ -191,4 +189,3 @@ def fatal(msgdomain, msg):
else:
loggers[msgdomain].critical(msg)
sys.exit(1)

View File

@ -31,8 +31,7 @@ import logging
import bb
import bb.utils
import bb.siggen
import bb.utils
logger = logging.getLogger("BitBake.Parsing")
class ParseError(Exception):
@ -85,8 +84,8 @@ def init(fn, data):
if h['supports'](fn):
return h['init'](data)
def init_parser(d, dumpsigs):
bb.parse.siggen = bb.siggen.init(d, dumpsigs)
def init_parser(d):
bb.parse.siggen = bb.siggen.init(d)
def resolve_file(fn, d):
if not os.path.isabs(fn):

View File

@ -649,7 +649,6 @@ def p_error(p):
try:
import pyshtables
except ImportError:
import os
outputdir = os.path.dirname(__file__)
if not os.access(outputdir, os.W_OK):
outputdir = ''

View File

@ -992,7 +992,7 @@ class RunQueue:
if self.state is runQueueComplete:
# All done
logger.info("Tasks Summary: Attempted %d tasks of which %d didn't need to be rerun and %d failed." % (self.rqexe.stats.completed, self.rqexe.stats.skipped, self.rqexe.stats.failed))
logger.info("Tasks Summary: Attempted %d tasks of which %d didn't need to be rerun and %d failed.", self.rqexe.stats.completed, self.rqexe.stats.skipped, self.rqexe.stats.failed)
return False
if self.state is runQueueChildProcess:
@ -1114,7 +1114,6 @@ class RunQueueExecute:
sys.stdout.flush()
sys.stderr.flush()
try:
pipeinfd, pipeoutfd = os.pipe()
pipein = os.fdopen(pipeinfd, 'rb', 4096)
@ -1125,6 +1124,7 @@ class RunQueueExecute:
bb.msg.fatal(bb.msg.domain.RunQueue, "fork failed: %d (%s)" % (e.errno, e.strerror))
if pid == 0:
pipein.close()
# Save out the PID so that the event can include it the
# events
bb.event.worker_pid = os.getpid()
@ -1180,9 +1180,10 @@ class RunQueueExecuteDummy(RunQueueExecute):
def __init__(self, rq):
self.rq = rq
self.stats = RunQueueStats(0)
def finish(self):
self.rq.state = runQueueComplete
return
return
class RunQueueExecuteTasks(RunQueueExecute):
def __init__(self, rq):
@ -1211,7 +1212,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
self.rq.scenequeue_covered.add(task)
found = True
bb.debug(1, "Full skip list %s" % self.rq.scenequeue_covered)
logger.debug(1, 'Full skip list %s', self.rq.scenequeue_covered)
for task in self.rq.scenequeue_covered:
self.task_skip(task)
@ -1221,7 +1222,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
for scheduler in self.rqdata.schedulers:
if self.scheduler == scheduler.name:
self.sched = scheduler(self, self.rqdata)
logger.debug(1, "Using runqueue scheduler '%s'" % scheduler.name)
logger.debug(1, "Using runqueue scheduler '%s'", scheduler.name)
break
else:
bb.fatal("Invalid scheduler '%s'. Available schedulers: %s" %
@ -1247,7 +1248,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
self.runq_buildable[revdep] = 1
fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[revdep]]
taskname = self.rqdata.runq_task[revdep]
logger.debug(1, "Marking task %s (%s, %s) as buildable" % (revdep, fn, taskname))
logger.debug(1, "Marking task %s (%s, %s) as buildable", revdep, fn, taskname)
def task_complete(self, task):
self.stats.taskCompleted()
@ -1295,7 +1296,8 @@ class RunQueueExecuteTasks(RunQueueExecute):
taskdep = self.rqdata.dataCache.task_deps[fn]
if 'noexec' in taskdep and taskname in taskdep['noexec']:
startevent = runQueueTaskStarted(task, self.stats, self.rq, noexec=True)
startevent = runQueueTaskStarted(task, self.stats, self.rq,
noexec=True)
bb.event.fire(startevent, self.cfgData)
self.runq_running[task] = 1
self.stats.taskActive()
@ -1328,11 +1330,11 @@ class RunQueueExecuteTasks(RunQueueExecute):
# Sanity Checks
for task in xrange(self.stats.total):
if self.runq_buildable[task] == 0:
logger.error("Task %s never buildable!" % task)
logger.error("Task %s never buildable!", task)
if self.runq_running[task] == 0:
logger.error("Task %s never ran!" % task)
logger.error("Task %s never ran!", task)
if self.runq_complete[task] == 0:
logger.error("Task %s never completed!" % task)
logger.error("Task %s never completed!", task)
self.rq.state = runQueueComplete
return True
@ -1478,7 +1480,8 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
"""
index = self.rqdata.runq_setscene[task]
logger.debug(1, "Found task %s could be accelerated" % self.rqdata.get_user_idstring(index))
logger.debug(1, 'Found task %s which could be accelerated',
self.rqdata.get_user_idstring(index))
self.scenequeue_covered.add(task)
self.scenequeue_updatecounters(task)

View File

@ -1,37 +1,47 @@
import hashlib
import logging
import re
logger = logging.getLogger('BitBake.SigGen')
try:
import cPickle as pickle
except ImportError:
import pickle
bb.msg.note(1, bb.msg.domain.Cache, "Importing cPickle failed. Falling back to a very slow implementation.")
logger.info('Importing cPickle failed. Falling back to a very slow implementation.')
def init(d, dumpsigs):
def init(d):
siggens = [obj for obj in globals().itervalues()
if type(obj) is type and issubclass(obj, SignatureGenerator)]
desired = bb.data.getVar("BB_SIGNATURE_HANDLER", d, True) or "noop"
for sg in siggens:
if desired == sg.name:
return sg(d, dumpsigs)
return sg(d)
break
else:
bb.error("Invalid signature generator '%s', using default 'noop' generator" % desired)
bb.error("Available generators: %s" % ", ".join(obj.name for obj in siggens))
return SignatureGenerator(d, dumpsigs)
logger.error("Invalid signature generator '%s', using default 'noop'\n"
"Available generators: %s",
', '.join(obj.name for obj in siggens))
return SignatureGenerator(d)
class SignatureGenerator(object):
"""
"""
name = "noop"
def __init__(self, data, dumpsigs):
def __init__(self, data):
return
def finalise(self, fn, d, varient):
return
def get_taskhash(self, fn, task, deps, dataCache):
return 0
def set_taskdata(self, hashes, deps):
return
def stampfile(self, stampbase, taskname, taskhash):
return "%s.%s" % (stampbase, taskname)
@ -40,7 +50,7 @@ class SignatureGeneratorBasic(SignatureGenerator):
"""
name = "basic"
def __init__(self, data, dumpsigs):
def __init__(self, data):
self.basehash = {}
self.taskhash = {}
self.taskdeps = {}
@ -78,7 +88,6 @@ class SignatureGeneratorBasic(SignatureGenerator):
if data is None:
bb.error("Task %s from %s seems to be empty?!" % (task, fn))
self.basehash[fn + "." + task] = hashlib.md5(data).hexdigest()
#bb.note("Hash for %s is %s" % (task, tashhash[task]))
self.taskdeps[fn] = taskdeps
self.gendeps[fn] = gendeps
@ -110,7 +119,6 @@ class SignatureGeneratorBasic(SignatureGenerator):
# then process the actual dependencies
dep_fn = re.search("(?P<fn>.*)\..*", dep).group('fn')
if self.twl.search(dataCache.pkg_fn[dep_fn]):
#bb.note("Skipping %s" % dep)
continue
if dep not in self.taskhash:
bb.fatal("%s is not in taskhash, caller isn't calling in dependency order?", dep)
@ -181,10 +189,6 @@ def compare_sigfiles(a, b):
p2 = pickle.Unpickler(file(b, "rb"))
b_data = p2.load()
#print "Checking"
#print str(a_data)
#print str(b_data)
def dict_diff(a, b):
sa = set(a.keys())
sb = set(b.keys())
@ -195,7 +199,7 @@ def compare_sigfiles(a, b):
changed.add(i)
added = sa - sb
removed = sb - sa
return changed, added, removed
return changed, added, removed
if 'basewhitelist' in a_data and a_data['basewhitelist'] != b_data['basewhitelist']:
print "basewhitelist changed from %s to %s" % (a_data['basewhitelist'], b_data['basewhitelist'])
@ -225,11 +229,6 @@ def compare_sigfiles(a, b):
if changed:
for dep in changed:
print "Variable %s value changed from %s to %s" % (dep, a_data['varvals'][dep], b_data['varvals'][dep])
#if added:
# print "Dependency on variable %s was added (value %s)" % (dep, b_data['gendeps'][dep])
#if removed:
# print "Dependency on Variable %s was removed (value %s)" % (dep, a_data['gendeps'][dep])
if 'runtaskdeps' in a_data and 'runtaskdeps' in b_data and sorted(a_data['runtaskdeps']) != sorted(b_data['runtaskdeps']):
print "Tasks this task depends on changed from %s to %s" % (sorted(a_data['runtaskdeps']), sorted(b_data['runtaskdeps']))

View File

@ -311,10 +311,9 @@ def _print_trace(body, line):
max_line = min(line + 4, len(body))
for i in xrange(min_line, max_line + 1):
if line == i:
logger.error(" *** %.4d:%s" % (i, body[i-1]) )
logger.error(' *** %.4d:%s', i, body[i-1])
else:
logger.error(" %.4d:%s" % (i, body[i-1]) )
logger.error(' %.4d:%s', i, body[i-1])
def better_compile(text, file, realfile, mode = "exec"):
"""
@ -326,16 +325,17 @@ def better_compile(text, file, realfile, mode = "exec"):
except Exception as e:
# split the text into lines again
body = text.split('\n')
logger.error("Error in compiling python function in: %s" % (realfile))
logger.error("Error in compiling python function in %s", realfile)
logger.error(str(e))
if e.lineno:
logger.error("The lines leading to this error were:")
logger.error("\t%d:%s:'%s'" % (e.lineno, e.__class__.__name__, body[e.lineno-1]))
logger.error("\t%d:%s:'%s'", e.lineno, e.__class__.__name__, body[e.lineno-1])
_print_trace(body, e.lineno)
else:
logger.error("The function causing this error was:")
for line in body:
logger.error(line)
raise
def better_exec(code, context, text, realfile = "<code>"):
@ -376,16 +376,16 @@ def better_exec(code, context, text, realfile = "<code>"):
logger.error("The code that was being executed was:")
_print_trace(textarray, linefailed)
logger.error("(file: '%s', lineno: %s, function: %s)" % (tbextract[0][0], tbextract[0][1], tbextract[0][2]))
logger.error("(file: '%s', lineno: %s, function: %s)", tbextract[0][0], tbextract[0][1], tbextract[0][2])
# See if this is a function we constructed and has calls back into other functions in
# See if this is a function we constructed and has calls back into other functions in
# "text". If so, try and improve the context of the error by diving down the trace
level = 0
nexttb = tb.tb_next
while nexttb is not None:
if tbextract[level][0] == tbextract[level+1][0] and tbextract[level+1][2] == tbextract[level][0]:
_print_trace(textarray, tbextract[level+1][1])
logger.error("(file: '%s', lineno: %s, function: %s)" % (tbextract[level+1][0], tbextract[level+1][1], tbextract[level+1][2]))
logger.error("(file: '%s', lineno: %s, function: %s)", tbextract[level+1][0], tbextract[level+1][1], tbextract[level+1][2])
else:
break
nexttb = tb.tb_next