2010-09-03 15:11:29 +00:00
|
|
|
import hashlib
|
2011-01-01 23:55:54 +00:00
|
|
|
import logging
|
2011-03-08 23:29:22 +00:00
|
|
|
import os
|
2010-09-03 15:11:29 +00:00
|
|
|
import re
|
2012-07-17 00:48:57 +00:00
|
|
|
import tempfile
|
2011-02-16 22:41:37 +00:00
|
|
|
import bb.data
|
2010-09-03 15:11:29 +00:00
|
|
|
|
2011-01-01 23:55:54 +00:00
|
|
|
logger = logging.getLogger('BitBake.SigGen')
|
|
|
|
|
2010-09-03 15:11:29 +00:00
|
|
|
try:
|
|
|
|
import cPickle as pickle
|
|
|
|
except ImportError:
|
|
|
|
import pickle
|
2011-01-01 23:55:54 +00:00
|
|
|
logger.info('Importing cPickle failed. Falling back to a very slow implementation.')
|
2010-09-03 15:11:29 +00:00
|
|
|
|
2011-01-01 23:55:54 +00:00
|
|
|
def init(d):
|
2010-09-03 15:11:29 +00:00
|
|
|
siggens = [obj for obj in globals().itervalues()
|
|
|
|
if type(obj) is type and issubclass(obj, SignatureGenerator)]
|
|
|
|
|
2011-11-25 14:57:53 +00:00
|
|
|
desired = d.getVar("BB_SIGNATURE_HANDLER", True) or "noop"
|
2010-09-03 15:11:29 +00:00
|
|
|
for sg in siggens:
|
|
|
|
if desired == sg.name:
|
2011-01-01 23:55:54 +00:00
|
|
|
return sg(d)
|
2010-09-03 15:11:29 +00:00
|
|
|
break
|
|
|
|
else:
|
2011-01-01 23:55:54 +00:00
|
|
|
logger.error("Invalid signature generator '%s', using default 'noop'\n"
|
2013-05-09 21:16:09 +00:00
|
|
|
"Available generators: %s", desired,
|
2011-01-01 23:55:54 +00:00
|
|
|
', '.join(obj.name for obj in siggens))
|
|
|
|
return SignatureGenerator(d)
|
2010-09-03 15:11:29 +00:00
|
|
|
|
|
|
|
class SignatureGenerator(object):
|
|
|
|
"""
|
|
|
|
"""
|
|
|
|
name = "noop"
|
|
|
|
|
2011-01-01 23:55:54 +00:00
|
|
|
def __init__(self, data):
|
2014-01-20 11:44:53 +00:00
|
|
|
self.taskhash = {}
|
|
|
|
self.runtaskdeps = {}
|
|
|
|
self.file_checksum_values = {}
|
2010-09-03 15:11:29 +00:00
|
|
|
|
2010-11-01 10:23:09 +00:00
|
|
|
def finalise(self, fn, d, varient):
|
2010-09-03 15:11:29 +00:00
|
|
|
return
|
|
|
|
|
2011-01-01 23:55:54 +00:00
|
|
|
def get_taskhash(self, fn, task, deps, dataCache):
|
2011-08-21 07:57:40 +00:00
|
|
|
return "0"
|
2011-01-01 23:55:54 +00:00
|
|
|
|
2014-01-20 11:44:53 +00:00
|
|
|
def set_taskdata(self, hashes, deps, checksum):
|
2011-01-01 23:55:54 +00:00
|
|
|
return
|
|
|
|
|
2011-01-18 08:18:18 +00:00
|
|
|
def stampfile(self, stampbase, file_name, taskname, extrainfo):
|
|
|
|
return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.')
|
2010-11-06 12:20:33 +00:00
|
|
|
|
2012-09-18 10:32:04 +00:00
|
|
|
def stampcleanmask(self, stampbase, file_name, taskname, extrainfo):
|
2012-11-28 13:34:33 +00:00
|
|
|
return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.')
|
2012-09-18 10:32:04 +00:00
|
|
|
|
2011-05-25 22:19:41 +00:00
|
|
|
def dump_sigtask(self, fn, task, stampbase, runtime):
|
|
|
|
return
|
|
|
|
|
2012-06-18 15:45:35 +00:00
|
|
|
def invalidate_task(self, task, d, fn):
|
|
|
|
bb.build.del_stamp(task, d, fn)
|
|
|
|
|
2014-03-26 13:47:29 +00:00
|
|
|
def dump_sigs(self, dataCache, options):
|
2014-01-17 13:13:43 +00:00
|
|
|
return
|
2012-06-18 15:45:35 +00:00
|
|
|
|
2010-09-03 15:11:29 +00:00
|
|
|
class SignatureGeneratorBasic(SignatureGenerator):
|
|
|
|
"""
|
|
|
|
"""
|
|
|
|
name = "basic"
|
|
|
|
|
2011-01-01 23:55:54 +00:00
|
|
|
def __init__(self, data):
|
2010-09-03 15:11:29 +00:00
|
|
|
self.basehash = {}
|
|
|
|
self.taskhash = {}
|
|
|
|
self.taskdeps = {}
|
|
|
|
self.runtaskdeps = {}
|
2012-05-22 23:23:32 +00:00
|
|
|
self.file_checksum_values = {}
|
2010-09-03 15:11:29 +00:00
|
|
|
self.gendeps = {}
|
|
|
|
self.lookupcache = {}
|
2012-01-20 16:16:08 +00:00
|
|
|
self.pkgnameextract = re.compile("(?P<fn>.*)\..*")
|
2011-01-12 15:58:48 +00:00
|
|
|
self.basewhitelist = set((data.getVar("BB_HASHBASE_WHITELIST", True) or "").split())
|
2012-01-20 16:16:08 +00:00
|
|
|
self.taskwhitelist = None
|
|
|
|
self.init_rundepcheck(data)
|
2010-09-03 15:11:29 +00:00
|
|
|
|
2012-01-20 16:16:08 +00:00
|
|
|
def init_rundepcheck(self, data):
|
|
|
|
self.taskwhitelist = data.getVar("BB_HASHTASK_WHITELIST", True) or None
|
2010-09-03 15:11:29 +00:00
|
|
|
if self.taskwhitelist:
|
|
|
|
self.twl = re.compile(self.taskwhitelist)
|
|
|
|
else:
|
|
|
|
self.twl = None
|
|
|
|
|
|
|
|
def _build_data(self, fn, d):
|
|
|
|
|
2011-11-24 17:41:02 +00:00
|
|
|
tasklist, gendeps, lookupcache = bb.data.generate_dependencies(d)
|
2010-09-03 15:11:29 +00:00
|
|
|
|
2011-01-12 15:58:48 +00:00
|
|
|
taskdeps = {}
|
2010-09-03 15:11:29 +00:00
|
|
|
basehash = {}
|
|
|
|
|
2011-01-12 15:58:48 +00:00
|
|
|
for task in tasklist:
|
2013-09-16 07:18:34 +00:00
|
|
|
data = lookupcache[task]
|
2011-01-12 15:58:48 +00:00
|
|
|
|
2011-06-02 21:46:13 +00:00
|
|
|
if data is None:
|
|
|
|
bb.error("Task %s from %s seems to be empty?!" % (task, fn))
|
|
|
|
data = ''
|
|
|
|
|
bitbake: Remove whitelisted vars from non-task deps
Though the value of variables in the BB_BASEHASH_WHITELIST is kept out of the
checksums, dependency on them is not, at least for variables and non-task
functions. In the code, the whitelist is removed from the overall task dep
list, but not the individual variable deps. The result of this is that
functions like sysroot_stage_all and oe_runmake end up with whitelisted
variables like TERM listed in their dependencies, which means that doing
a 'unset TERM' before building will result in all checksums for tasks that
depend on those changing, and shared state reuse not behaving correctly.
This is only really a potential issue for variables from the environment, as
it's the existance/removal of the variable that's an issue, not its value, and
the other whitelisted variables are set in our metadata. This which means in
practical terms the only cases where this is likely to be an issue are in
environments where one of the following are unset: TERM, LOGNAME, HOME, USER,
PWD, SHELL. This may seem like an unlikely circumstance, but is in fact a real
issue for those of us using autobuilders. Jenkins does not set TERM when
executing shell, which means shared state archives produced by your jenkins
server would not be fully reused by an actual user.
Fixed by removing the whitelisted elements from the individual variable deps,
not just the accumulated result.
(Bitbake rev: dac12560ac8431ee24609f8de25cb1645572d350)
Signed-off-by: Christopher Larson <chris_larson@mentor.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
2012-04-03 23:23:49 +00:00
|
|
|
gendeps[task] -= self.basewhitelist
|
2011-01-12 15:58:48 +00:00
|
|
|
newdeps = gendeps[task]
|
|
|
|
seen = set()
|
|
|
|
while newdeps:
|
|
|
|
nextdeps = newdeps
|
|
|
|
seen |= nextdeps
|
|
|
|
newdeps = set()
|
|
|
|
for dep in nextdeps:
|
|
|
|
if dep in self.basewhitelist:
|
|
|
|
continue
|
bitbake: Remove whitelisted vars from non-task deps
Though the value of variables in the BB_BASEHASH_WHITELIST is kept out of the
checksums, dependency on them is not, at least for variables and non-task
functions. In the code, the whitelist is removed from the overall task dep
list, but not the individual variable deps. The result of this is that
functions like sysroot_stage_all and oe_runmake end up with whitelisted
variables like TERM listed in their dependencies, which means that doing
a 'unset TERM' before building will result in all checksums for tasks that
depend on those changing, and shared state reuse not behaving correctly.
This is only really a potential issue for variables from the environment, as
it's the existance/removal of the variable that's an issue, not its value, and
the other whitelisted variables are set in our metadata. This which means in
practical terms the only cases where this is likely to be an issue are in
environments where one of the following are unset: TERM, LOGNAME, HOME, USER,
PWD, SHELL. This may seem like an unlikely circumstance, but is in fact a real
issue for those of us using autobuilders. Jenkins does not set TERM when
executing shell, which means shared state archives produced by your jenkins
server would not be fully reused by an actual user.
Fixed by removing the whitelisted elements from the individual variable deps,
not just the accumulated result.
(Bitbake rev: dac12560ac8431ee24609f8de25cb1645572d350)
Signed-off-by: Christopher Larson <chris_larson@mentor.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
2012-04-03 23:23:49 +00:00
|
|
|
gendeps[dep] -= self.basewhitelist
|
2011-01-12 15:58:48 +00:00
|
|
|
newdeps |= gendeps[dep]
|
|
|
|
newdeps -= seen
|
|
|
|
|
bitbake: Remove whitelisted vars from non-task deps
Though the value of variables in the BB_BASEHASH_WHITELIST is kept out of the
checksums, dependency on them is not, at least for variables and non-task
functions. In the code, the whitelist is removed from the overall task dep
list, but not the individual variable deps. The result of this is that
functions like sysroot_stage_all and oe_runmake end up with whitelisted
variables like TERM listed in their dependencies, which means that doing
a 'unset TERM' before building will result in all checksums for tasks that
depend on those changing, and shared state reuse not behaving correctly.
This is only really a potential issue for variables from the environment, as
it's the existance/removal of the variable that's an issue, not its value, and
the other whitelisted variables are set in our metadata. This which means in
practical terms the only cases where this is likely to be an issue are in
environments where one of the following are unset: TERM, LOGNAME, HOME, USER,
PWD, SHELL. This may seem like an unlikely circumstance, but is in fact a real
issue for those of us using autobuilders. Jenkins does not set TERM when
executing shell, which means shared state archives produced by your jenkins
server would not be fully reused by an actual user.
Fixed by removing the whitelisted elements from the individual variable deps,
not just the accumulated result.
(Bitbake rev: dac12560ac8431ee24609f8de25cb1645572d350)
Signed-off-by: Christopher Larson <chris_larson@mentor.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
2012-04-03 23:23:49 +00:00
|
|
|
alldeps = sorted(seen)
|
|
|
|
for dep in alldeps:
|
2011-11-17 14:01:06 +00:00
|
|
|
data = data + dep
|
2013-09-16 07:18:34 +00:00
|
|
|
var = lookupcache[dep]
|
|
|
|
if var is not None:
|
2011-06-02 21:46:13 +00:00
|
|
|
data = data + str(var)
|
2010-09-03 15:11:29 +00:00
|
|
|
self.basehash[fn + "." + task] = hashlib.md5(data).hexdigest()
|
bitbake: Remove whitelisted vars from non-task deps
Though the value of variables in the BB_BASEHASH_WHITELIST is kept out of the
checksums, dependency on them is not, at least for variables and non-task
functions. In the code, the whitelist is removed from the overall task dep
list, but not the individual variable deps. The result of this is that
functions like sysroot_stage_all and oe_runmake end up with whitelisted
variables like TERM listed in their dependencies, which means that doing
a 'unset TERM' before building will result in all checksums for tasks that
depend on those changing, and shared state reuse not behaving correctly.
This is only really a potential issue for variables from the environment, as
it's the existance/removal of the variable that's an issue, not its value, and
the other whitelisted variables are set in our metadata. This which means in
practical terms the only cases where this is likely to be an issue are in
environments where one of the following are unset: TERM, LOGNAME, HOME, USER,
PWD, SHELL. This may seem like an unlikely circumstance, but is in fact a real
issue for those of us using autobuilders. Jenkins does not set TERM when
executing shell, which means shared state archives produced by your jenkins
server would not be fully reused by an actual user.
Fixed by removing the whitelisted elements from the individual variable deps,
not just the accumulated result.
(Bitbake rev: dac12560ac8431ee24609f8de25cb1645572d350)
Signed-off-by: Christopher Larson <chris_larson@mentor.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
2012-04-03 23:23:49 +00:00
|
|
|
taskdeps[task] = alldeps
|
2010-09-03 15:11:29 +00:00
|
|
|
|
2010-12-08 00:08:04 +00:00
|
|
|
self.taskdeps[fn] = taskdeps
|
|
|
|
self.gendeps[fn] = gendeps
|
|
|
|
self.lookupcache[fn] = lookupcache
|
2010-09-28 21:24:13 +00:00
|
|
|
|
|
|
|
return taskdeps
|
2010-09-03 15:11:29 +00:00
|
|
|
|
|
|
|
def finalise(self, fn, d, variant):
|
|
|
|
|
|
|
|
if variant:
|
|
|
|
fn = "virtual:" + variant + ":" + fn
|
|
|
|
|
2012-03-06 23:37:35 +00:00
|
|
|
try:
|
|
|
|
taskdeps = self._build_data(fn, d)
|
|
|
|
except:
|
|
|
|
bb.note("Error during finalise of %s" % fn)
|
|
|
|
raise
|
2010-09-03 15:11:29 +00:00
|
|
|
|
|
|
|
#Slow but can be useful for debugging mismatched basehashes
|
|
|
|
#for task in self.taskdeps[fn]:
|
|
|
|
# self.dump_sigtask(fn, task, d.getVar("STAMP", True), False)
|
|
|
|
|
2010-09-28 21:24:13 +00:00
|
|
|
for task in taskdeps:
|
2010-09-03 15:11:29 +00:00
|
|
|
d.setVar("BB_BASEHASH_task-%s" % task, self.basehash[fn + "." + task])
|
|
|
|
|
2012-02-22 22:50:02 +00:00
|
|
|
def rundep_check(self, fn, recipename, task, dep, depname, dataCache):
|
2012-01-20 16:16:08 +00:00
|
|
|
# Return True if we should keep the dependency, False to drop it
|
|
|
|
# We only manipulate the dependencies for packages not in the whitelist
|
|
|
|
if self.twl and not self.twl.search(recipename):
|
|
|
|
# then process the actual dependencies
|
|
|
|
if self.twl.search(depname):
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
2012-06-18 15:45:35 +00:00
|
|
|
def read_taint(self, fn, task, stampbase):
|
|
|
|
taint = None
|
|
|
|
try:
|
|
|
|
with open(stampbase + '.' + task + '.taint', 'r') as taintf:
|
|
|
|
taint = taintf.read()
|
|
|
|
except IOError:
|
|
|
|
pass
|
|
|
|
return taint
|
|
|
|
|
2010-09-03 15:11:29 +00:00
|
|
|
def get_taskhash(self, fn, task, deps, dataCache):
|
|
|
|
k = fn + "." + task
|
|
|
|
data = dataCache.basetaskhash[k]
|
2010-11-22 12:52:27 +00:00
|
|
|
self.runtaskdeps[k] = []
|
2012-05-22 23:23:32 +00:00
|
|
|
self.file_checksum_values[k] = {}
|
2012-01-20 16:16:08 +00:00
|
|
|
recipename = dataCache.pkg_fn[fn]
|
2011-11-23 08:04:00 +00:00
|
|
|
for dep in sorted(deps, key=clean_basepath):
|
2012-01-20 16:16:08 +00:00
|
|
|
depname = dataCache.pkg_fn[self.pkgnameextract.search(dep).group('fn')]
|
2012-02-22 22:50:02 +00:00
|
|
|
if not self.rundep_check(fn, recipename, task, dep, depname, dataCache):
|
2012-01-20 16:16:08 +00:00
|
|
|
continue
|
2010-09-03 15:11:29 +00:00
|
|
|
if dep not in self.taskhash:
|
2010-11-22 16:35:03 +00:00
|
|
|
bb.fatal("%s is not in taskhash, caller isn't calling in dependency order?", dep)
|
2010-09-03 15:11:29 +00:00
|
|
|
data = data + self.taskhash[dep]
|
2010-11-22 12:52:27 +00:00
|
|
|
self.runtaskdeps[k].append(dep)
|
2012-05-22 23:23:32 +00:00
|
|
|
|
|
|
|
if task in dataCache.file_checksums[fn]:
|
|
|
|
checksums = bb.fetch2.get_file_checksums(dataCache.file_checksums[fn][task], recipename)
|
|
|
|
for (f,cs) in checksums:
|
2012-08-27 20:44:32 +00:00
|
|
|
self.file_checksum_values[k][f] = cs
|
|
|
|
data = data + cs
|
2012-06-18 15:45:35 +00:00
|
|
|
|
|
|
|
taint = self.read_taint(fn, task, dataCache.stamp[fn])
|
|
|
|
if taint:
|
|
|
|
data = data + taint
|
2014-05-30 23:23:46 +00:00
|
|
|
logger.warn("%s is tainted from a forced run" % k)
|
2012-06-18 15:45:35 +00:00
|
|
|
|
2010-09-03 15:11:29 +00:00
|
|
|
h = hashlib.md5(data).hexdigest()
|
|
|
|
self.taskhash[k] = h
|
|
|
|
#d.setVar("BB_TASKHASH_task-%s" % task, taskhash[task])
|
|
|
|
return h
|
|
|
|
|
2013-06-07 17:11:09 +00:00
|
|
|
def set_taskdata(self, hashes, deps, checksums):
|
2010-09-27 14:57:13 +00:00
|
|
|
self.runtaskdeps = deps
|
|
|
|
self.taskhash = hashes
|
2013-06-07 17:11:09 +00:00
|
|
|
self.file_checksum_values = checksums
|
2010-09-27 14:57:13 +00:00
|
|
|
|
2010-09-03 15:11:29 +00:00
|
|
|
def dump_sigtask(self, fn, task, stampbase, runtime):
|
|
|
|
k = fn + "." + task
|
2010-09-13 14:40:31 +00:00
|
|
|
if runtime == "customfile":
|
|
|
|
sigfile = stampbase
|
2011-11-25 11:20:33 +00:00
|
|
|
elif runtime and k in self.taskhash:
|
2010-09-03 15:11:29 +00:00
|
|
|
sigfile = stampbase + "." + task + ".sigdata" + "." + self.taskhash[k]
|
|
|
|
else:
|
|
|
|
sigfile = stampbase + "." + task + ".sigbasedata" + "." + self.basehash[k]
|
2010-09-13 14:40:31 +00:00
|
|
|
|
2010-12-08 00:38:58 +00:00
|
|
|
bb.utils.mkdirhier(os.path.dirname(sigfile))
|
|
|
|
|
2010-09-03 15:11:29 +00:00
|
|
|
data = {}
|
|
|
|
data['basewhitelist'] = self.basewhitelist
|
2010-09-13 11:17:33 +00:00
|
|
|
data['taskwhitelist'] = self.taskwhitelist
|
2010-09-03 15:11:29 +00:00
|
|
|
data['taskdeps'] = self.taskdeps[fn][task]
|
|
|
|
data['basehash'] = self.basehash[k]
|
|
|
|
data['gendeps'] = {}
|
|
|
|
data['varvals'] = {}
|
|
|
|
data['varvals'][task] = self.lookupcache[fn][task]
|
|
|
|
for dep in self.taskdeps[fn][task]:
|
2010-09-30 08:34:54 +00:00
|
|
|
if dep in self.basewhitelist:
|
|
|
|
continue
|
2010-09-03 15:11:29 +00:00
|
|
|
data['gendeps'][dep] = self.gendeps[fn][dep]
|
|
|
|
data['varvals'][dep] = self.lookupcache[fn][dep]
|
|
|
|
|
2011-11-25 11:20:33 +00:00
|
|
|
if runtime and k in self.taskhash:
|
2010-09-03 15:11:29 +00:00
|
|
|
data['runtaskdeps'] = self.runtaskdeps[k]
|
2013-10-08 14:34:32 +00:00
|
|
|
data['file_checksum_values'] = [(os.path.basename(f), cs) for f,cs in self.file_checksum_values[k].items()]
|
2010-09-03 15:11:29 +00:00
|
|
|
data['runtaskhashes'] = {}
|
|
|
|
for dep in data['runtaskdeps']:
|
|
|
|
data['runtaskhashes'][dep] = self.taskhash[dep]
|
|
|
|
|
2012-06-18 15:45:35 +00:00
|
|
|
taint = self.read_taint(fn, task, stampbase)
|
|
|
|
if taint:
|
|
|
|
data['taint'] = taint
|
|
|
|
|
2012-07-17 00:48:57 +00:00
|
|
|
fd, tmpfile = tempfile.mkstemp(dir=os.path.dirname(sigfile), prefix="sigtask.")
|
|
|
|
try:
|
|
|
|
with os.fdopen(fd, "wb") as stream:
|
|
|
|
p = pickle.dump(data, stream, -1)
|
|
|
|
stream.flush()
|
|
|
|
os.chmod(tmpfile, 0664)
|
|
|
|
os.rename(tmpfile, sigfile)
|
2013-05-09 21:18:20 +00:00
|
|
|
except (OSError, IOError) as err:
|
2012-07-17 00:48:57 +00:00
|
|
|
try:
|
|
|
|
os.unlink(tmpfile)
|
|
|
|
except OSError:
|
|
|
|
pass
|
|
|
|
raise err
|
2010-09-03 15:11:29 +00:00
|
|
|
|
2014-03-26 13:47:29 +00:00
|
|
|
def dump_sigs(self, dataCache, options):
|
2010-09-03 15:11:29 +00:00
|
|
|
for fn in self.taskdeps:
|
|
|
|
for task in self.taskdeps[fn]:
|
|
|
|
k = fn + "." + task
|
|
|
|
if k not in self.taskhash:
|
|
|
|
continue
|
|
|
|
if dataCache.basetaskhash[k] != self.basehash[k]:
|
2010-12-16 09:00:07 +00:00
|
|
|
bb.error("Bitbake's cached basehash does not match the one we just generated (%s)!" % k)
|
2010-09-03 15:11:29 +00:00
|
|
|
bb.error("The mismatched hashes were %s and %s" % (dataCache.basetaskhash[k], self.basehash[k]))
|
|
|
|
self.dump_sigtask(fn, task, dataCache.stamp[fn], True)
|
|
|
|
|
2011-01-07 11:04:38 +00:00
|
|
|
class SignatureGeneratorBasicHash(SignatureGeneratorBasic):
|
|
|
|
name = "basichash"
|
|
|
|
|
2012-09-18 10:32:04 +00:00
|
|
|
def stampfile(self, stampbase, fn, taskname, extrainfo, clean=False):
|
2011-01-07 11:04:38 +00:00
|
|
|
if taskname != "do_setscene" and taskname.endswith("_setscene"):
|
|
|
|
k = fn + "." + taskname[:-9]
|
|
|
|
else:
|
|
|
|
k = fn + "." + taskname
|
2012-09-18 10:32:04 +00:00
|
|
|
if clean:
|
|
|
|
h = "*"
|
|
|
|
elif k in self.taskhash:
|
2012-03-06 06:04:36 +00:00
|
|
|
h = self.taskhash[k]
|
|
|
|
else:
|
|
|
|
# If k is not in basehash, then error
|
|
|
|
h = self.basehash[k]
|
2011-01-18 08:18:18 +00:00
|
|
|
return ("%s.%s.%s.%s" % (stampbase, taskname, h, extrainfo)).rstrip('.')
|
2011-01-07 11:04:38 +00:00
|
|
|
|
2012-09-18 10:32:04 +00:00
|
|
|
def stampcleanmask(self, stampbase, fn, taskname, extrainfo):
|
|
|
|
return self.stampfile(stampbase, fn, taskname, extrainfo, clean=True)
|
|
|
|
|
2012-06-18 15:45:35 +00:00
|
|
|
def invalidate_task(self, task, d, fn):
|
2012-06-21 17:28:47 +00:00
|
|
|
bb.note("Tainting hash to force rebuild of task %s, %s" % (fn, task))
|
2012-06-18 15:45:35 +00:00
|
|
|
bb.build.write_taint(task, d, fn)
|
|
|
|
|
2010-09-13 14:40:31 +00:00
|
|
|
def dump_this_task(outfile, d):
|
2011-02-16 22:41:37 +00:00
|
|
|
import bb.parse
|
2010-09-13 14:40:31 +00:00
|
|
|
fn = d.getVar("BB_FILENAME", True)
|
|
|
|
task = "do_" + d.getVar("BB_CURRENTTASK", True)
|
|
|
|
bb.parse.siggen.dump_sigtask(fn, task, outfile, "customfile")
|
|
|
|
|
2011-11-23 08:04:00 +00:00
|
|
|
def clean_basepath(a):
|
|
|
|
if a.startswith("virtual:"):
|
2012-08-27 20:44:33 +00:00
|
|
|
b = a.rsplit(":", 1)[0] + ":" + a.rsplit("/", 1)[1]
|
2011-11-23 08:04:00 +00:00
|
|
|
else:
|
|
|
|
b = a.rsplit("/", 1)[1]
|
|
|
|
return b
|
|
|
|
|
|
|
|
def clean_basepaths(a):
|
|
|
|
b = {}
|
|
|
|
for x in a:
|
|
|
|
b[clean_basepath(x)] = a[x]
|
|
|
|
return b
|
|
|
|
|
2012-08-27 20:44:35 +00:00
|
|
|
def compare_sigfiles(a, b, recursecb = None):
|
2012-08-27 20:44:34 +00:00
|
|
|
output = []
|
|
|
|
|
2012-05-12 00:58:57 +00:00
|
|
|
p1 = pickle.Unpickler(open(a, "rb"))
|
2010-09-03 15:11:29 +00:00
|
|
|
a_data = p1.load()
|
2012-05-12 00:58:57 +00:00
|
|
|
p2 = pickle.Unpickler(open(b, "rb"))
|
2010-09-03 15:11:29 +00:00
|
|
|
b_data = p2.load()
|
|
|
|
|
2011-11-30 17:24:55 +00:00
|
|
|
def dict_diff(a, b, whitelist=set()):
|
2010-09-03 15:11:29 +00:00
|
|
|
sa = set(a.keys())
|
|
|
|
sb = set(b.keys())
|
|
|
|
common = sa & sb
|
|
|
|
changed = set()
|
|
|
|
for i in common:
|
2011-11-30 17:24:55 +00:00
|
|
|
if a[i] != b[i] and i not in whitelist:
|
2010-09-03 15:11:29 +00:00
|
|
|
changed.add(i)
|
2013-12-20 12:07:20 +00:00
|
|
|
added = sb - sa
|
|
|
|
removed = sa - sb
|
2011-01-01 23:55:54 +00:00
|
|
|
return changed, added, removed
|
2010-09-03 15:11:29 +00:00
|
|
|
|
2013-10-08 14:34:32 +00:00
|
|
|
def file_checksums_diff(a, b):
|
|
|
|
from collections import Counter
|
|
|
|
# Handle old siginfo format
|
|
|
|
if isinstance(a, dict):
|
|
|
|
a = [(os.path.basename(f), cs) for f, cs in a.items()]
|
|
|
|
if isinstance(b, dict):
|
|
|
|
b = [(os.path.basename(f), cs) for f, cs in b.items()]
|
|
|
|
# Compare lists, ensuring we can handle duplicate filenames if they exist
|
|
|
|
removedcount = Counter(a)
|
|
|
|
removedcount.subtract(b)
|
|
|
|
addedcount = Counter(b)
|
|
|
|
addedcount.subtract(a)
|
|
|
|
added = []
|
|
|
|
for x in b:
|
|
|
|
if addedcount[x] > 0:
|
|
|
|
addedcount[x] -= 1
|
|
|
|
added.append(x)
|
|
|
|
removed = []
|
|
|
|
changed = []
|
|
|
|
for x in a:
|
|
|
|
if removedcount[x] > 0:
|
|
|
|
removedcount[x] -= 1
|
|
|
|
for y in added:
|
|
|
|
if y[0] == x[0]:
|
|
|
|
changed.append((x[0], x[1], y[1]))
|
|
|
|
added.remove(y)
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
removed.append(x)
|
|
|
|
added = [x[0] for x in added]
|
|
|
|
removed = [x[0] for x in removed]
|
|
|
|
return changed, added, removed
|
|
|
|
|
2010-09-13 11:17:33 +00:00
|
|
|
if 'basewhitelist' in a_data and a_data['basewhitelist'] != b_data['basewhitelist']:
|
2013-03-02 16:50:05 +00:00
|
|
|
output.append("basewhitelist changed from '%s' to '%s'" % (a_data['basewhitelist'], b_data['basewhitelist']))
|
2012-02-10 16:53:45 +00:00
|
|
|
if a_data['basewhitelist'] and b_data['basewhitelist']:
|
2012-08-27 20:44:34 +00:00
|
|
|
output.append("changed items: %s" % a_data['basewhitelist'].symmetric_difference(b_data['basewhitelist']))
|
2010-09-03 15:11:29 +00:00
|
|
|
|
2010-09-13 11:17:33 +00:00
|
|
|
if 'taskwhitelist' in a_data and a_data['taskwhitelist'] != b_data['taskwhitelist']:
|
2013-03-02 16:50:05 +00:00
|
|
|
output.append("taskwhitelist changed from '%s' to '%s'" % (a_data['taskwhitelist'], b_data['taskwhitelist']))
|
2012-02-10 16:53:45 +00:00
|
|
|
if a_data['taskwhitelist'] and b_data['taskwhitelist']:
|
2012-08-27 20:44:34 +00:00
|
|
|
output.append("changed items: %s" % a_data['taskwhitelist'].symmetric_difference(b_data['taskwhitelist']))
|
2010-09-03 15:11:29 +00:00
|
|
|
|
|
|
|
if a_data['taskdeps'] != b_data['taskdeps']:
|
2012-08-27 20:44:34 +00:00
|
|
|
output.append("Task dependencies changed from:\n%s\nto:\n%s" % (sorted(a_data['taskdeps']), sorted(b_data['taskdeps'])))
|
2010-09-03 15:11:29 +00:00
|
|
|
|
|
|
|
if a_data['basehash'] != b_data['basehash']:
|
2012-08-27 20:44:34 +00:00
|
|
|
output.append("basehash changed from %s to %s" % (a_data['basehash'], b_data['basehash']))
|
2010-09-03 15:11:29 +00:00
|
|
|
|
2011-11-30 17:24:55 +00:00
|
|
|
changed, added, removed = dict_diff(a_data['gendeps'], b_data['gendeps'], a_data['basewhitelist'] & b_data['basewhitelist'])
|
2010-09-03 15:11:29 +00:00
|
|
|
if changed:
|
|
|
|
for dep in changed:
|
2013-03-02 16:50:05 +00:00
|
|
|
output.append("List of dependencies for variable %s changed from '%s' to '%s'" % (dep, a_data['gendeps'][dep], b_data['gendeps'][dep]))
|
2012-02-10 16:53:45 +00:00
|
|
|
if a_data['gendeps'][dep] and b_data['gendeps'][dep]:
|
2012-08-27 20:44:34 +00:00
|
|
|
output.append("changed items: %s" % a_data['gendeps'][dep].symmetric_difference(b_data['gendeps'][dep]))
|
2010-09-13 11:17:33 +00:00
|
|
|
if added:
|
|
|
|
for dep in added:
|
2012-08-27 20:44:34 +00:00
|
|
|
output.append("Dependency on variable %s was added" % (dep))
|
2010-09-13 11:17:33 +00:00
|
|
|
if removed:
|
|
|
|
for dep in removed:
|
2012-08-27 20:44:34 +00:00
|
|
|
output.append("Dependency on Variable %s was removed" % (dep))
|
2010-09-03 15:11:29 +00:00
|
|
|
|
|
|
|
|
|
|
|
changed, added, removed = dict_diff(a_data['varvals'], b_data['varvals'])
|
|
|
|
if changed:
|
|
|
|
for dep in changed:
|
2013-03-02 16:50:05 +00:00
|
|
|
output.append("Variable %s value changed from '%s' to '%s'" % (dep, a_data['varvals'][dep], b_data['varvals'][dep]))
|
2010-09-03 15:11:29 +00:00
|
|
|
|
2013-10-08 14:34:32 +00:00
|
|
|
changed, added, removed = file_checksums_diff(a_data['file_checksum_values'], b_data['file_checksum_values'])
|
2012-05-22 23:23:32 +00:00
|
|
|
if changed:
|
2013-10-08 14:34:32 +00:00
|
|
|
for f, old, new in changed:
|
|
|
|
output.append("Checksum for file %s changed from %s to %s" % (f, old, new))
|
2012-05-22 23:23:32 +00:00
|
|
|
if added:
|
|
|
|
for f in added:
|
2012-08-27 20:44:34 +00:00
|
|
|
output.append("Dependency on checksum of file %s was added" % (f))
|
2012-05-22 23:23:32 +00:00
|
|
|
if removed:
|
|
|
|
for f in removed:
|
2012-08-27 20:44:34 +00:00
|
|
|
output.append("Dependency on checksum of file %s was removed" % (f))
|
2012-05-22 23:23:32 +00:00
|
|
|
|
|
|
|
|
2011-01-20 05:54:51 +00:00
|
|
|
if 'runtaskhashes' in a_data and 'runtaskhashes' in b_data:
|
2012-08-27 20:44:35 +00:00
|
|
|
a = a_data['runtaskhashes']
|
|
|
|
b = b_data['runtaskhashes']
|
2012-02-22 15:01:59 +00:00
|
|
|
changed, added, removed = dict_diff(a, b)
|
|
|
|
if added:
|
|
|
|
for dep in added:
|
2012-08-27 20:44:32 +00:00
|
|
|
bdep_found = False
|
|
|
|
if removed:
|
|
|
|
for bdep in removed:
|
2013-12-20 12:07:20 +00:00
|
|
|
if b[dep] == a[bdep]:
|
2012-08-27 20:44:34 +00:00
|
|
|
#output.append("Dependency on task %s was replaced by %s with same hash" % (dep, bdep))
|
2012-08-27 20:44:32 +00:00
|
|
|
bdep_found = True
|
|
|
|
if not bdep_found:
|
2013-12-20 12:07:20 +00:00
|
|
|
output.append("Dependency on task %s was added with hash %s" % (clean_basepath(dep), b[dep]))
|
2012-02-22 15:01:59 +00:00
|
|
|
if removed:
|
|
|
|
for dep in removed:
|
2012-08-27 20:44:32 +00:00
|
|
|
adep_found = False
|
|
|
|
if added:
|
|
|
|
for adep in added:
|
2013-12-20 12:07:20 +00:00
|
|
|
if b[adep] == a[dep]:
|
2012-08-27 20:44:34 +00:00
|
|
|
#output.append("Dependency on task %s was replaced by %s with same hash" % (adep, dep))
|
2012-08-27 20:44:32 +00:00
|
|
|
adep_found = True
|
|
|
|
if not adep_found:
|
2013-12-20 12:07:20 +00:00
|
|
|
output.append("Dependency on task %s was removed with hash %s" % (clean_basepath(dep), a[dep]))
|
2012-02-22 15:01:59 +00:00
|
|
|
if changed:
|
|
|
|
for dep in changed:
|
2012-08-27 20:44:35 +00:00
|
|
|
output.append("Hash for dependent task %s changed from %s to %s" % (clean_basepath(dep), a[dep], b[dep]))
|
|
|
|
if callable(recursecb):
|
2013-12-19 09:37:39 +00:00
|
|
|
# If a dependent hash changed, might as well print the line above and then defer to the changes in
|
|
|
|
# that hash since in all likelyhood, they're the same changes this task also saw.
|
2012-08-27 20:44:35 +00:00
|
|
|
recout = recursecb(dep, a[dep], b[dep])
|
|
|
|
if recout:
|
2013-12-19 09:37:39 +00:00
|
|
|
output = [output[-1]] + recout
|
2012-06-18 15:45:35 +00:00
|
|
|
|
|
|
|
a_taint = a_data.get('taint', None)
|
|
|
|
b_taint = b_data.get('taint', None)
|
|
|
|
if a_taint != b_taint:
|
2012-08-27 20:44:34 +00:00
|
|
|
output.append("Taint (by forced/invalidated task) changed from %s to %s" % (a_taint, b_taint))
|
|
|
|
|
|
|
|
return output
|
2012-06-18 15:45:35 +00:00
|
|
|
|
|
|
|
|
2010-09-30 08:35:23 +00:00
|
|
|
def dump_sigfile(a):
|
2012-08-27 20:44:34 +00:00
|
|
|
output = []
|
|
|
|
|
2012-05-12 00:58:57 +00:00
|
|
|
p1 = pickle.Unpickler(open(a, "rb"))
|
2010-09-30 08:35:23 +00:00
|
|
|
a_data = p1.load()
|
|
|
|
|
2012-08-27 20:44:34 +00:00
|
|
|
output.append("basewhitelist: %s" % (a_data['basewhitelist']))
|
2010-09-30 08:35:23 +00:00
|
|
|
|
2012-08-27 20:44:34 +00:00
|
|
|
output.append("taskwhitelist: %s" % (a_data['taskwhitelist']))
|
2010-09-30 08:35:23 +00:00
|
|
|
|
2012-08-27 20:44:34 +00:00
|
|
|
output.append("Task dependencies: %s" % (sorted(a_data['taskdeps'])))
|
2010-09-30 08:35:23 +00:00
|
|
|
|
2012-08-27 20:44:34 +00:00
|
|
|
output.append("basehash: %s" % (a_data['basehash']))
|
2010-09-30 08:35:23 +00:00
|
|
|
|
|
|
|
for dep in a_data['gendeps']:
|
2012-08-27 20:44:34 +00:00
|
|
|
output.append("List of dependencies for variable %s is %s" % (dep, a_data['gendeps'][dep]))
|
2010-09-30 08:35:23 +00:00
|
|
|
|
|
|
|
for dep in a_data['varvals']:
|
2012-08-27 20:44:34 +00:00
|
|
|
output.append("Variable %s value is %s" % (dep, a_data['varvals'][dep]))
|
2010-09-30 08:35:23 +00:00
|
|
|
|
|
|
|
if 'runtaskdeps' in a_data:
|
2012-08-27 20:44:34 +00:00
|
|
|
output.append("Tasks this task depends on: %s" % (a_data['runtaskdeps']))
|
2010-09-30 08:35:23 +00:00
|
|
|
|
2012-05-22 23:23:32 +00:00
|
|
|
if 'file_checksum_values' in a_data:
|
2012-08-27 20:44:34 +00:00
|
|
|
output.append("This task depends on the checksums of files: %s" % (a_data['file_checksum_values']))
|
2012-05-22 23:23:32 +00:00
|
|
|
|
2010-09-30 08:35:23 +00:00
|
|
|
if 'runtaskhashes' in a_data:
|
|
|
|
for dep in a_data['runtaskhashes']:
|
2012-08-27 20:44:34 +00:00
|
|
|
output.append("Hash for dependent task %s is %s" % (dep, a_data['runtaskhashes'][dep]))
|
2012-06-18 15:45:35 +00:00
|
|
|
|
|
|
|
if 'taint' in a_data:
|
2012-08-27 20:44:34 +00:00
|
|
|
output.append("Tainted (by forced/invalidated task): %s" % a_data['taint'])
|
|
|
|
|
|
|
|
return output
|