2010-09-03 15:11:29 +00:00
import hashlib
2011-01-01 23:55:54 +00:00
import logging
2011-03-08 23:29:22 +00:00
import os
2010-09-03 15:11:29 +00:00
import re
2012-07-17 00:48:57 +00:00
import tempfile
2016-05-12 07:30:35 +00:00
import pickle
2011-02-16 22:41:37 +00:00
import bb . data
2016-01-26 13:34:32 +00:00
from bb . checksum import FileChecksumCache
2010-09-03 15:11:29 +00:00
2011-01-01 23:55:54 +00:00
logger = logging . getLogger ( ' BitBake.SigGen ' )
def init ( d ) :
2016-05-12 07:30:35 +00:00
siggens = [ obj for obj in globals ( ) . values ( )
2010-09-03 15:11:29 +00:00
if type ( obj ) is type and issubclass ( obj , SignatureGenerator ) ]
2016-11-25 15:28:08 +00:00
desired = d . getVar ( " BB_SIGNATURE_HANDLER " ) or " noop "
2010-09-03 15:11:29 +00:00
for sg in siggens :
if desired == sg . name :
2011-01-01 23:55:54 +00:00
return sg ( d )
2010-09-03 15:11:29 +00:00
break
else :
2011-01-01 23:55:54 +00:00
logger . error ( " Invalid signature generator ' %s ' , using default ' noop ' \n "
2013-05-09 21:16:09 +00:00
" Available generators: %s " , desired ,
2011-01-01 23:55:54 +00:00
' , ' . join ( obj . name for obj in siggens ) )
return SignatureGenerator ( d )
2010-09-03 15:11:29 +00:00
class SignatureGenerator ( object ) :
"""
"""
name = " noop "
2011-01-01 23:55:54 +00:00
def __init__ ( self , data ) :
2016-11-02 15:06:50 +00:00
self . basehash = { }
2014-01-20 11:44:53 +00:00
self . taskhash = { }
self . runtaskdeps = { }
self . file_checksum_values = { }
2016-04-02 22:15:06 +00:00
self . taints = { }
2010-09-03 15:11:29 +00:00
2010-11-01 10:23:09 +00:00
def finalise ( self , fn , d , varient ) :
2010-09-03 15:11:29 +00:00
return
2011-01-01 23:55:54 +00:00
def get_taskhash ( self , fn , task , deps , dataCache ) :
2011-08-21 07:57:40 +00:00
return " 0 "
2011-01-01 23:55:54 +00:00
2016-01-26 13:34:28 +00:00
def writeout_file_checksum_cache ( self ) :
""" Write/update the file checksum cache onto disk """
return
2011-01-18 08:18:18 +00:00
def stampfile ( self , stampbase , file_name , taskname , extrainfo ) :
return ( " %s . %s . %s " % ( stampbase , taskname , extrainfo ) ) . rstrip ( ' . ' )
2010-11-06 12:20:33 +00:00
2012-09-18 10:32:04 +00:00
def stampcleanmask ( self , stampbase , file_name , taskname , extrainfo ) :
2012-11-28 13:34:33 +00:00
return ( " %s . %s . %s " % ( stampbase , taskname , extrainfo ) ) . rstrip ( ' . ' )
2012-09-18 10:32:04 +00:00
2011-05-25 22:19:41 +00:00
def dump_sigtask ( self , fn , task , stampbase , runtime ) :
return
2012-06-18 15:45:35 +00:00
def invalidate_task ( self , task , d , fn ) :
bb . build . del_stamp ( task , d , fn )
2014-03-26 13:47:29 +00:00
def dump_sigs ( self , dataCache , options ) :
2014-01-17 13:13:43 +00:00
return
2012-06-18 15:45:35 +00:00
2015-01-23 14:38:12 +00:00
def get_taskdata ( self ) :
2016-11-02 15:06:50 +00:00
return ( self . runtaskdeps , self . taskhash , self . file_checksum_values , self . taints , self . basehash )
2015-01-23 14:38:12 +00:00
def set_taskdata ( self , data ) :
2016-11-02 15:06:50 +00:00
self . runtaskdeps , self . taskhash , self . file_checksum_values , self . taints , self . basehash = data
2015-01-23 14:38:12 +00:00
2010-09-03 15:11:29 +00:00
class SignatureGeneratorBasic ( SignatureGenerator ) :
"""
"""
name = " basic "
2011-01-01 23:55:54 +00:00
def __init__ ( self , data ) :
2010-09-03 15:11:29 +00:00
self . basehash = { }
self . taskhash = { }
self . taskdeps = { }
self . runtaskdeps = { }
2012-05-22 23:23:32 +00:00
self . file_checksum_values = { }
2015-09-30 13:55:56 +00:00
self . taints = { }
2010-09-03 15:11:29 +00:00
self . gendeps = { }
self . lookupcache = { }
2012-01-20 16:16:08 +00:00
self . pkgnameextract = re . compile ( " (?P<fn>.*) \ ..* " )
2016-11-25 15:28:08 +00:00
self . basewhitelist = set ( ( data . getVar ( " BB_HASHBASE_WHITELIST " ) or " " ) . split ( ) )
2012-01-20 16:16:08 +00:00
self . taskwhitelist = None
self . init_rundepcheck ( data )
2016-11-25 15:28:08 +00:00
checksum_cache_file = data . getVar ( " BB_HASH_CHECKSUM_CACHE_FILE " )
2016-01-26 13:34:32 +00:00
if checksum_cache_file :
self . checksum_cache = FileChecksumCache ( )
self . checksum_cache . init_cache ( data , checksum_cache_file )
else :
self . checksum_cache = None
2010-09-03 15:11:29 +00:00
2012-01-20 16:16:08 +00:00
def init_rundepcheck ( self , data ) :
2016-11-25 15:28:08 +00:00
self . taskwhitelist = data . getVar ( " BB_HASHTASK_WHITELIST " ) or None
2010-09-03 15:11:29 +00:00
if self . taskwhitelist :
self . twl = re . compile ( self . taskwhitelist )
else :
self . twl = None
def _build_data ( self , fn , d ) :
2016-12-14 21:05:01 +00:00
ignore_mismatch = ( ( d . getVar ( " BB_HASH_IGNORE_MISMATCH " ) or ' ' ) == ' 1 ' )
2011-11-24 17:41:02 +00:00
tasklist , gendeps , lookupcache = bb . data . generate_dependencies ( d )
2010-09-03 15:11:29 +00:00
2011-01-12 15:58:48 +00:00
taskdeps = { }
2010-09-03 15:11:29 +00:00
basehash = { }
2011-01-12 15:58:48 +00:00
for task in tasklist :
2013-09-16 07:18:34 +00:00
data = lookupcache [ task ]
2011-01-12 15:58:48 +00:00
2011-06-02 21:46:13 +00:00
if data is None :
bb . error ( " Task %s from %s seems to be empty?! " % ( task , fn ) )
data = ' '
bitbake: Remove whitelisted vars from non-task deps
Though the value of variables in the BB_BASEHASH_WHITELIST is kept out of the
checksums, dependency on them is not, at least for variables and non-task
functions. In the code, the whitelist is removed from the overall task dep
list, but not the individual variable deps. The result of this is that
functions like sysroot_stage_all and oe_runmake end up with whitelisted
variables like TERM listed in their dependencies, which means that doing
a 'unset TERM' before building will result in all checksums for tasks that
depend on those changing, and shared state reuse not behaving correctly.
This is only really a potential issue for variables from the environment, as
it's the existance/removal of the variable that's an issue, not its value, and
the other whitelisted variables are set in our metadata. This which means in
practical terms the only cases where this is likely to be an issue are in
environments where one of the following are unset: TERM, LOGNAME, HOME, USER,
PWD, SHELL. This may seem like an unlikely circumstance, but is in fact a real
issue for those of us using autobuilders. Jenkins does not set TERM when
executing shell, which means shared state archives produced by your jenkins
server would not be fully reused by an actual user.
Fixed by removing the whitelisted elements from the individual variable deps,
not just the accumulated result.
(Bitbake rev: dac12560ac8431ee24609f8de25cb1645572d350)
Signed-off-by: Christopher Larson <chris_larson@mentor.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
2012-04-03 23:23:49 +00:00
gendeps [ task ] - = self . basewhitelist
2011-01-12 15:58:48 +00:00
newdeps = gendeps [ task ]
seen = set ( )
while newdeps :
nextdeps = newdeps
seen | = nextdeps
newdeps = set ( )
for dep in nextdeps :
if dep in self . basewhitelist :
continue
bitbake: Remove whitelisted vars from non-task deps
Though the value of variables in the BB_BASEHASH_WHITELIST is kept out of the
checksums, dependency on them is not, at least for variables and non-task
functions. In the code, the whitelist is removed from the overall task dep
list, but not the individual variable deps. The result of this is that
functions like sysroot_stage_all and oe_runmake end up with whitelisted
variables like TERM listed in their dependencies, which means that doing
a 'unset TERM' before building will result in all checksums for tasks that
depend on those changing, and shared state reuse not behaving correctly.
This is only really a potential issue for variables from the environment, as
it's the existance/removal of the variable that's an issue, not its value, and
the other whitelisted variables are set in our metadata. This which means in
practical terms the only cases where this is likely to be an issue are in
environments where one of the following are unset: TERM, LOGNAME, HOME, USER,
PWD, SHELL. This may seem like an unlikely circumstance, but is in fact a real
issue for those of us using autobuilders. Jenkins does not set TERM when
executing shell, which means shared state archives produced by your jenkins
server would not be fully reused by an actual user.
Fixed by removing the whitelisted elements from the individual variable deps,
not just the accumulated result.
(Bitbake rev: dac12560ac8431ee24609f8de25cb1645572d350)
Signed-off-by: Christopher Larson <chris_larson@mentor.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
2012-04-03 23:23:49 +00:00
gendeps [ dep ] - = self . basewhitelist
2011-01-12 15:58:48 +00:00
newdeps | = gendeps [ dep ]
newdeps - = seen
bitbake: Remove whitelisted vars from non-task deps
Though the value of variables in the BB_BASEHASH_WHITELIST is kept out of the
checksums, dependency on them is not, at least for variables and non-task
functions. In the code, the whitelist is removed from the overall task dep
list, but not the individual variable deps. The result of this is that
functions like sysroot_stage_all and oe_runmake end up with whitelisted
variables like TERM listed in their dependencies, which means that doing
a 'unset TERM' before building will result in all checksums for tasks that
depend on those changing, and shared state reuse not behaving correctly.
This is only really a potential issue for variables from the environment, as
it's the existance/removal of the variable that's an issue, not its value, and
the other whitelisted variables are set in our metadata. This which means in
practical terms the only cases where this is likely to be an issue are in
environments where one of the following are unset: TERM, LOGNAME, HOME, USER,
PWD, SHELL. This may seem like an unlikely circumstance, but is in fact a real
issue for those of us using autobuilders. Jenkins does not set TERM when
executing shell, which means shared state archives produced by your jenkins
server would not be fully reused by an actual user.
Fixed by removing the whitelisted elements from the individual variable deps,
not just the accumulated result.
(Bitbake rev: dac12560ac8431ee24609f8de25cb1645572d350)
Signed-off-by: Christopher Larson <chris_larson@mentor.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
2012-04-03 23:23:49 +00:00
alldeps = sorted ( seen )
for dep in alldeps :
2011-11-17 14:01:06 +00:00
data = data + dep
2013-09-16 07:18:34 +00:00
var = lookupcache [ dep ]
if var is not None :
2011-06-02 21:46:13 +00:00
data = data + str ( var )
2016-11-02 15:06:50 +00:00
datahash = hashlib . md5 ( data . encode ( " utf-8 " ) ) . hexdigest ( )
k = fn + " . " + task
2016-12-13 07:07:13 +00:00
if not ignore_mismatch and k in self . basehash and self . basehash [ k ] != datahash :
2016-11-02 15:06:50 +00:00
bb . error ( " When reparsing %s , the basehash value changed from %s to %s . The metadata is not deterministic and this needs to be fixed. " % ( k , self . basehash [ k ] , datahash ) )
self . basehash [ k ] = datahash
bitbake: Remove whitelisted vars from non-task deps
Though the value of variables in the BB_BASEHASH_WHITELIST is kept out of the
checksums, dependency on them is not, at least for variables and non-task
functions. In the code, the whitelist is removed from the overall task dep
list, but not the individual variable deps. The result of this is that
functions like sysroot_stage_all and oe_runmake end up with whitelisted
variables like TERM listed in their dependencies, which means that doing
a 'unset TERM' before building will result in all checksums for tasks that
depend on those changing, and shared state reuse not behaving correctly.
This is only really a potential issue for variables from the environment, as
it's the existance/removal of the variable that's an issue, not its value, and
the other whitelisted variables are set in our metadata. This which means in
practical terms the only cases where this is likely to be an issue are in
environments where one of the following are unset: TERM, LOGNAME, HOME, USER,
PWD, SHELL. This may seem like an unlikely circumstance, but is in fact a real
issue for those of us using autobuilders. Jenkins does not set TERM when
executing shell, which means shared state archives produced by your jenkins
server would not be fully reused by an actual user.
Fixed by removing the whitelisted elements from the individual variable deps,
not just the accumulated result.
(Bitbake rev: dac12560ac8431ee24609f8de25cb1645572d350)
Signed-off-by: Christopher Larson <chris_larson@mentor.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
2012-04-03 23:23:49 +00:00
taskdeps [ task ] = alldeps
2010-09-03 15:11:29 +00:00
2010-12-08 00:08:04 +00:00
self . taskdeps [ fn ] = taskdeps
self . gendeps [ fn ] = gendeps
self . lookupcache [ fn ] = lookupcache
2010-09-28 21:24:13 +00:00
return taskdeps
2010-09-03 15:11:29 +00:00
def finalise ( self , fn , d , variant ) :
2016-08-16 16:47:06 +00:00
mc = d . getVar ( " __BBMULTICONFIG " , False ) or " "
if variant or mc :
fn = bb . cache . realfn2virtual ( fn , variant , mc )
2010-09-03 15:11:29 +00:00
2012-03-06 23:37:35 +00:00
try :
taskdeps = self . _build_data ( fn , d )
except :
2016-01-04 17:35:10 +00:00
bb . warn ( " Error during finalise of %s " % fn )
2012-03-06 23:37:35 +00:00
raise
2010-09-03 15:11:29 +00:00
#Slow but can be useful for debugging mismatched basehashes
#for task in self.taskdeps[fn]:
2016-11-25 15:28:08 +00:00
# self.dump_sigtask(fn, task, d.getVar("STAMP"), False)
2010-09-03 15:11:29 +00:00
2010-09-28 21:24:13 +00:00
for task in taskdeps :
2010-09-03 15:11:29 +00:00
d . setVar ( " BB_BASEHASH_task- %s " % task , self . basehash [ fn + " . " + task ] )
2012-02-22 22:50:02 +00:00
def rundep_check ( self , fn , recipename , task , dep , depname , dataCache ) :
2012-01-20 16:16:08 +00:00
# Return True if we should keep the dependency, False to drop it
# We only manipulate the dependencies for packages not in the whitelist
if self . twl and not self . twl . search ( recipename ) :
# then process the actual dependencies
if self . twl . search ( depname ) :
return False
return True
2012-06-18 15:45:35 +00:00
def read_taint ( self , fn , task , stampbase ) :
taint = None
try :
with open ( stampbase + ' . ' + task + ' .taint ' , ' r ' ) as taintf :
taint = taintf . read ( )
except IOError :
pass
return taint
2010-09-03 15:11:29 +00:00
def get_taskhash ( self , fn , task , deps , dataCache ) :
k = fn + " . " + task
data = dataCache . basetaskhash [ k ]
2016-11-02 15:06:50 +00:00
self . basehash [ k ] = data
2010-11-22 12:52:27 +00:00
self . runtaskdeps [ k ] = [ ]
2016-04-02 16:10:48 +00:00
self . file_checksum_values [ k ] = [ ]
2012-01-20 16:16:08 +00:00
recipename = dataCache . pkg_fn [ fn ]
2016-04-02 16:10:48 +00:00
2011-11-23 08:04:00 +00:00
for dep in sorted ( deps , key = clean_basepath ) :
2012-01-20 16:16:08 +00:00
depname = dataCache . pkg_fn [ self . pkgnameextract . search ( dep ) . group ( ' fn ' ) ]
2012-02-22 22:50:02 +00:00
if not self . rundep_check ( fn , recipename , task , dep , depname , dataCache ) :
2012-01-20 16:16:08 +00:00
continue
2010-09-03 15:11:29 +00:00
if dep not in self . taskhash :
2010-11-22 16:35:03 +00:00
bb . fatal ( " %s is not in taskhash, caller isn ' t calling in dependency order? " , dep )
2010-09-03 15:11:29 +00:00
data = data + self . taskhash [ dep ]
2010-11-22 12:52:27 +00:00
self . runtaskdeps [ k ] . append ( dep )
2012-05-22 23:23:32 +00:00
if task in dataCache . file_checksums [ fn ] :
2016-01-26 13:34:32 +00:00
if self . checksum_cache :
checksums = self . checksum_cache . get_checksums ( dataCache . file_checksums [ fn ] [ task ] , recipename )
else :
checksums = bb . fetch2 . get_file_checksums ( dataCache . file_checksums [ fn ] [ task ] , recipename )
2012-05-22 23:23:32 +00:00
for ( f , cs ) in checksums :
2016-04-02 16:10:48 +00:00
self . file_checksum_values [ k ] . append ( ( f , cs ) )
bitbake: cache/fetch2/siggen: Ensure we track include history for file checksums
Currently, if you reference a file url, its checksum is included in the
task hash, however if you change to a different file at a different
location, perhaps taking advantage of the FILESPATH functionality, the
system will not reparse the file in question and change its checksum to
match the new file.
To correctly handle this, the system not only needs to know if the
existing file still exists or not, but also check the existance
of every file it would have looked at when computing the original file.
We already do this in the bitbake parsing code for class inclusion. This
change uses the same technique to log the file list we looked at and
if files in these locations exist when they previously did not, to
invalidate and reparse the file.
Since data stored in the cache is flattened text, we have to use a string
form of the data and split on the ":" character which is ugly, but is
an internal detail we can improve later if a better method is found.
The cache version changes to trigger a reparse since the previous
cache data is now incompatible.
[YOCTO #7019]
(Bitbake rev: 6c0706a28d72c591f1b75b6e3f3b645859387c7e)
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
2014-12-08 21:25:23 +00:00
if cs :
data = data + cs
2012-06-18 15:45:35 +00:00
2014-12-19 10:20:29 +00:00
taskdep = dataCache . task_deps [ fn ]
if ' nostamp ' in taskdep and task in taskdep [ ' nostamp ' ] :
# Nostamp tasks need an implicit taint so that they force any dependent tasks to run
import uuid
2015-09-30 13:55:56 +00:00
taint = str ( uuid . uuid4 ( ) )
data = data + taint
self . taints [ k ] = " nostamp: " + taint
2014-12-19 10:20:29 +00:00
2012-06-18 15:45:35 +00:00
taint = self . read_taint ( fn , task , dataCache . stamp [ fn ] )
if taint :
data = data + taint
2015-09-30 13:55:56 +00:00
self . taints [ k ] = taint
2016-05-09 13:01:12 +00:00
logger . warning ( " %s is tainted from a forced run " % k )
2012-06-18 15:45:35 +00:00
2016-05-12 07:30:35 +00:00
h = hashlib . md5 ( data . encode ( " utf-8 " ) ) . hexdigest ( )
2010-09-03 15:11:29 +00:00
self . taskhash [ k ] = h
#d.setVar("BB_TASKHASH_task-%s" % task, taskhash[task])
return h
2016-01-26 13:34:28 +00:00
def writeout_file_checksum_cache ( self ) :
""" Write/update the file checksum cache onto disk """
2016-01-26 13:34:32 +00:00
if self . checksum_cache :
self . checksum_cache . save_extras ( )
self . checksum_cache . save_merge ( )
else :
bb . fetch2 . fetcher_parse_save ( )
bb . fetch2 . fetcher_parse_done ( )
2016-01-26 13:34:28 +00:00
2010-09-03 15:11:29 +00:00
def dump_sigtask ( self , fn , task , stampbase , runtime ) :
2016-04-03 14:48:30 +00:00
2010-09-03 15:11:29 +00:00
k = fn + " . " + task
2016-04-03 14:48:30 +00:00
referencestamp = stampbase
if isinstance ( runtime , str ) and runtime . startswith ( " customfile " ) :
2010-09-13 14:40:31 +00:00
sigfile = stampbase
2016-04-03 14:48:30 +00:00
referencestamp = runtime [ 11 : ]
2011-11-25 11:20:33 +00:00
elif runtime and k in self . taskhash :
2010-09-03 15:11:29 +00:00
sigfile = stampbase + " . " + task + " .sigdata " + " . " + self . taskhash [ k ]
else :
sigfile = stampbase + " . " + task + " .sigbasedata " + " . " + self . basehash [ k ]
2010-09-13 14:40:31 +00:00
2010-12-08 00:38:58 +00:00
bb . utils . mkdirhier ( os . path . dirname ( sigfile ) )
2010-09-03 15:11:29 +00:00
data = { }
2016-04-02 16:11:26 +00:00
data [ ' task ' ] = task
2010-09-03 15:11:29 +00:00
data [ ' basewhitelist ' ] = self . basewhitelist
2010-09-13 11:17:33 +00:00
data [ ' taskwhitelist ' ] = self . taskwhitelist
2010-09-03 15:11:29 +00:00
data [ ' taskdeps ' ] = self . taskdeps [ fn ] [ task ]
data [ ' basehash ' ] = self . basehash [ k ]
data [ ' gendeps ' ] = { }
data [ ' varvals ' ] = { }
data [ ' varvals ' ] [ task ] = self . lookupcache [ fn ] [ task ]
for dep in self . taskdeps [ fn ] [ task ] :
2010-09-30 08:34:54 +00:00
if dep in self . basewhitelist :
continue
2010-09-03 15:11:29 +00:00
data [ ' gendeps ' ] [ dep ] = self . gendeps [ fn ] [ dep ]
data [ ' varvals ' ] [ dep ] = self . lookupcache [ fn ] [ dep ]
2011-11-25 11:20:33 +00:00
if runtime and k in self . taskhash :
2010-09-03 15:11:29 +00:00
data [ ' runtaskdeps ' ] = self . runtaskdeps [ k ]
2016-04-02 16:10:48 +00:00
data [ ' file_checksum_values ' ] = [ ( os . path . basename ( f ) , cs ) for f , cs in self . file_checksum_values [ k ] ]
2010-09-03 15:11:29 +00:00
data [ ' runtaskhashes ' ] = { }
for dep in data [ ' runtaskdeps ' ] :
data [ ' runtaskhashes ' ] [ dep ] = self . taskhash [ dep ]
2016-04-02 16:11:26 +00:00
data [ ' taskhash ' ] = self . taskhash [ k ]
2010-09-03 15:11:29 +00:00
2016-04-03 14:48:30 +00:00
taint = self . read_taint ( fn , task , referencestamp )
2012-06-18 15:45:35 +00:00
if taint :
data [ ' taint ' ] = taint
2015-09-30 13:55:56 +00:00
if runtime and k in self . taints :
if ' nostamp: ' in self . taints [ k ] :
data [ ' taint ' ] = self . taints [ k ]
2016-11-02 15:07:33 +00:00
computed_basehash = calc_basehash ( data )
if computed_basehash != self . basehash [ k ] :
bb . error ( " Basehash mismatch %s versus %s for %s " % ( computed_basehash , self . basehash [ k ] , k ) )
if runtime and k in self . taskhash :
computed_taskhash = calc_taskhash ( data )
if computed_taskhash != self . taskhash [ k ] :
bb . error ( " Taskhash mismatch %s versus %s for %s " % ( computed_taskhash , self . taskhash [ k ] , k ) )
sigfile = sigfile . replace ( self . taskhash [ k ] , computed_taskhash )
2012-07-17 00:48:57 +00:00
fd , tmpfile = tempfile . mkstemp ( dir = os . path . dirname ( sigfile ) , prefix = " sigtask. " )
try :
with os . fdopen ( fd , " wb " ) as stream :
p = pickle . dump ( data , stream , - 1 )
stream . flush ( )
2016-05-12 07:30:35 +00:00
os . chmod ( tmpfile , 0o664 )
2012-07-17 00:48:57 +00:00
os . rename ( tmpfile , sigfile )
2013-05-09 21:18:20 +00:00
except ( OSError , IOError ) as err :
2012-07-17 00:48:57 +00:00
try :
os . unlink ( tmpfile )
except OSError :
pass
raise err
2010-09-03 15:11:29 +00:00
2016-08-16 16:47:06 +00:00
def dump_sigs ( self , dataCaches , options ) :
2010-09-03 15:11:29 +00:00
for fn in self . taskdeps :
for task in self . taskdeps [ fn ] :
2016-08-16 16:47:06 +00:00
tid = fn + " : " + task
( mc , _ , _ ) = bb . runqueue . split_tid ( tid )
2010-09-03 15:11:29 +00:00
k = fn + " . " + task
if k not in self . taskhash :
continue
2016-08-16 16:47:06 +00:00
if dataCaches [ mc ] . basetaskhash [ k ] != self . basehash [ k ] :
2010-12-16 09:00:07 +00:00
bb . error ( " Bitbake ' s cached basehash does not match the one we just generated ( %s )! " % k )
2016-08-16 16:47:06 +00:00
bb . error ( " The mismatched hashes were %s and %s " % ( dataCaches [ mc ] . basetaskhash [ k ] , self . basehash [ k ] ) )
self . dump_sigtask ( fn , task , dataCaches [ mc ] . stamp [ fn ] , True )
2010-09-03 15:11:29 +00:00
2011-01-07 11:04:38 +00:00
class SignatureGeneratorBasicHash ( SignatureGeneratorBasic ) :
name = " basichash "
2012-09-18 10:32:04 +00:00
def stampfile ( self , stampbase , fn , taskname , extrainfo , clean = False ) :
2011-01-07 11:04:38 +00:00
if taskname != " do_setscene " and taskname . endswith ( " _setscene " ) :
k = fn + " . " + taskname [ : - 9 ]
else :
k = fn + " . " + taskname
2012-09-18 10:32:04 +00:00
if clean :
h = " * "
elif k in self . taskhash :
2012-03-06 06:04:36 +00:00
h = self . taskhash [ k ]
else :
# If k is not in basehash, then error
h = self . basehash [ k ]
2011-01-18 08:18:18 +00:00
return ( " %s . %s . %s . %s " % ( stampbase , taskname , h , extrainfo ) ) . rstrip ( ' . ' )
2011-01-07 11:04:38 +00:00
2012-09-18 10:32:04 +00:00
def stampcleanmask ( self , stampbase , fn , taskname , extrainfo ) :
return self . stampfile ( stampbase , fn , taskname , extrainfo , clean = True )
2012-06-18 15:45:35 +00:00
def invalidate_task ( self , task , d , fn ) :
2012-06-21 17:28:47 +00:00
bb . note ( " Tainting hash to force rebuild of task %s , %s " % ( fn , task ) )
2012-06-18 15:45:35 +00:00
bb . build . write_taint ( task , d , fn )
2010-09-13 14:40:31 +00:00
def dump_this_task ( outfile , d ) :
2011-02-16 22:41:37 +00:00
import bb . parse
2016-11-25 15:28:08 +00:00
fn = d . getVar ( " BB_FILENAME " )
task = " do_ " + d . getVar ( " BB_CURRENTTASK " )
2016-04-03 14:48:30 +00:00
referencestamp = bb . build . stamp_internal ( task , d , None , True )
bb . parse . siggen . dump_sigtask ( fn , task , outfile , " customfile: " + referencestamp )
2010-09-13 14:40:31 +00:00
2011-11-23 08:04:00 +00:00
def clean_basepath ( a ) :
2016-12-20 23:31:11 +00:00
mc = None
if a . startswith ( " multiconfig: " ) :
_ , mc , a = a . split ( " : " , 2 )
2014-11-07 17:47:37 +00:00
b = a . rsplit ( " / " , 2 ) [ 1 ] + a . rsplit ( " / " , 2 ) [ 2 ]
2011-11-23 08:04:00 +00:00
if a . startswith ( " virtual: " ) :
2014-11-07 17:47:37 +00:00
b = b + " : " + a . rsplit ( " : " , 1 ) [ 0 ]
2016-12-20 23:31:11 +00:00
if mc :
b = b + " :multiconfig: " + mc
2011-11-23 08:04:00 +00:00
return b
def clean_basepaths ( a ) :
b = { }
for x in a :
b [ clean_basepath ( x ) ] = a [ x ]
return b
2014-11-07 17:45:07 +00:00
def clean_basepaths_list ( a ) :
b = [ ]
for x in a :
b . append ( clean_basepath ( x ) )
return b
2012-08-27 20:44:35 +00:00
def compare_sigfiles ( a , b , recursecb = None ) :
2012-08-27 20:44:34 +00:00
output = [ ]
2016-08-11 03:37:00 +00:00
with open ( a , ' rb ' ) as f :
2016-08-20 08:58:13 +00:00
p1 = pickle . Unpickler ( f )
2016-08-11 03:37:00 +00:00
a_data = p1 . load ( )
with open ( b , ' rb ' ) as f :
p2 = pickle . Unpickler ( f )
b_data = p2 . load ( )
2010-09-03 15:11:29 +00:00
2011-11-30 17:24:55 +00:00
def dict_diff ( a , b , whitelist = set ( ) ) :
2010-09-03 15:11:29 +00:00
sa = set ( a . keys ( ) )
sb = set ( b . keys ( ) )
common = sa & sb
changed = set ( )
for i in common :
2011-11-30 17:24:55 +00:00
if a [ i ] != b [ i ] and i not in whitelist :
2010-09-03 15:11:29 +00:00
changed . add ( i )
2013-12-20 12:07:20 +00:00
added = sb - sa
removed = sa - sb
2011-01-01 23:55:54 +00:00
return changed , added , removed
2010-09-03 15:11:29 +00:00
2013-10-08 14:34:32 +00:00
def file_checksums_diff ( a , b ) :
from collections import Counter
# Handle old siginfo format
if isinstance ( a , dict ) :
a = [ ( os . path . basename ( f ) , cs ) for f , cs in a . items ( ) ]
if isinstance ( b , dict ) :
b = [ ( os . path . basename ( f ) , cs ) for f , cs in b . items ( ) ]
# Compare lists, ensuring we can handle duplicate filenames if they exist
removedcount = Counter ( a )
removedcount . subtract ( b )
addedcount = Counter ( b )
addedcount . subtract ( a )
added = [ ]
for x in b :
if addedcount [ x ] > 0 :
addedcount [ x ] - = 1
added . append ( x )
removed = [ ]
changed = [ ]
for x in a :
if removedcount [ x ] > 0 :
removedcount [ x ] - = 1
for y in added :
if y [ 0 ] == x [ 0 ] :
changed . append ( ( x [ 0 ] , x [ 1 ] , y [ 1 ] ) )
added . remove ( y )
break
else :
removed . append ( x )
added = [ x [ 0 ] for x in added ]
removed = [ x [ 0 ] for x in removed ]
return changed , added , removed
2010-09-13 11:17:33 +00:00
if ' basewhitelist ' in a_data and a_data [ ' basewhitelist ' ] != b_data [ ' basewhitelist ' ] :
2013-03-02 16:50:05 +00:00
output . append ( " basewhitelist changed from ' %s ' to ' %s ' " % ( a_data [ ' basewhitelist ' ] , b_data [ ' basewhitelist ' ] ) )
2012-02-10 16:53:45 +00:00
if a_data [ ' basewhitelist ' ] and b_data [ ' basewhitelist ' ] :
2012-08-27 20:44:34 +00:00
output . append ( " changed items: %s " % a_data [ ' basewhitelist ' ] . symmetric_difference ( b_data [ ' basewhitelist ' ] ) )
2010-09-03 15:11:29 +00:00
2010-09-13 11:17:33 +00:00
if ' taskwhitelist ' in a_data and a_data [ ' taskwhitelist ' ] != b_data [ ' taskwhitelist ' ] :
2013-03-02 16:50:05 +00:00
output . append ( " taskwhitelist changed from ' %s ' to ' %s ' " % ( a_data [ ' taskwhitelist ' ] , b_data [ ' taskwhitelist ' ] ) )
2012-02-10 16:53:45 +00:00
if a_data [ ' taskwhitelist ' ] and b_data [ ' taskwhitelist ' ] :
2012-08-27 20:44:34 +00:00
output . append ( " changed items: %s " % a_data [ ' taskwhitelist ' ] . symmetric_difference ( b_data [ ' taskwhitelist ' ] ) )
2010-09-03 15:11:29 +00:00
if a_data [ ' taskdeps ' ] != b_data [ ' taskdeps ' ] :
2012-08-27 20:44:34 +00:00
output . append ( " Task dependencies changed from: \n %s \n to: \n %s " % ( sorted ( a_data [ ' taskdeps ' ] ) , sorted ( b_data [ ' taskdeps ' ] ) ) )
2010-09-03 15:11:29 +00:00
if a_data [ ' basehash ' ] != b_data [ ' basehash ' ] :
2012-08-27 20:44:34 +00:00
output . append ( " basehash changed from %s to %s " % ( a_data [ ' basehash ' ] , b_data [ ' basehash ' ] ) )
2010-09-03 15:11:29 +00:00
2011-11-30 17:24:55 +00:00
changed , added , removed = dict_diff ( a_data [ ' gendeps ' ] , b_data [ ' gendeps ' ] , a_data [ ' basewhitelist ' ] & b_data [ ' basewhitelist ' ] )
2010-09-03 15:11:29 +00:00
if changed :
for dep in changed :
2013-03-02 16:50:05 +00:00
output . append ( " List of dependencies for variable %s changed from ' %s ' to ' %s ' " % ( dep , a_data [ ' gendeps ' ] [ dep ] , b_data [ ' gendeps ' ] [ dep ] ) )
2012-02-10 16:53:45 +00:00
if a_data [ ' gendeps ' ] [ dep ] and b_data [ ' gendeps ' ] [ dep ] :
2012-08-27 20:44:34 +00:00
output . append ( " changed items: %s " % a_data [ ' gendeps ' ] [ dep ] . symmetric_difference ( b_data [ ' gendeps ' ] [ dep ] ) )
2010-09-13 11:17:33 +00:00
if added :
for dep in added :
2012-08-27 20:44:34 +00:00
output . append ( " Dependency on variable %s was added " % ( dep ) )
2010-09-13 11:17:33 +00:00
if removed :
for dep in removed :
2012-08-27 20:44:34 +00:00
output . append ( " Dependency on Variable %s was removed " % ( dep ) )
2010-09-03 15:11:29 +00:00
changed , added , removed = dict_diff ( a_data [ ' varvals ' ] , b_data [ ' varvals ' ] )
if changed :
for dep in changed :
2013-03-02 16:50:05 +00:00
output . append ( " Variable %s value changed from ' %s ' to ' %s ' " % ( dep , a_data [ ' varvals ' ] [ dep ] , b_data [ ' varvals ' ] [ dep ] ) )
2010-09-03 15:11:29 +00:00
2016-05-16 21:51:08 +00:00
if not ' file_checksum_values ' in a_data :
a_data [ ' file_checksum_values ' ] = { }
if not ' file_checksum_values ' in b_data :
b_data [ ' file_checksum_values ' ] = { }
2013-10-08 14:34:32 +00:00
changed , added , removed = file_checksums_diff ( a_data [ ' file_checksum_values ' ] , b_data [ ' file_checksum_values ' ] )
2012-05-22 23:23:32 +00:00
if changed :
2013-10-08 14:34:32 +00:00
for f , old , new in changed :
output . append ( " Checksum for file %s changed from %s to %s " % ( f , old , new ) )
2012-05-22 23:23:32 +00:00
if added :
for f in added :
2012-08-27 20:44:34 +00:00
output . append ( " Dependency on checksum of file %s was added " % ( f ) )
2012-05-22 23:23:32 +00:00
if removed :
for f in removed :
2012-08-27 20:44:34 +00:00
output . append ( " Dependency on checksum of file %s was removed " % ( f ) )
2012-05-22 23:23:32 +00:00
2016-05-16 21:51:08 +00:00
if not ' runtaskdeps ' in a_data :
a_data [ ' runtaskdeps ' ] = { }
if not ' runtaskdeps ' in b_data :
b_data [ ' runtaskdeps ' ] = { }
2015-05-25 07:51:37 +00:00
if len ( a_data [ ' runtaskdeps ' ] ) != len ( b_data [ ' runtaskdeps ' ] ) :
changed = [ " Number of task dependencies changed " ]
else :
changed = [ ]
for idx , task in enumerate ( a_data [ ' runtaskdeps ' ] ) :
a = a_data [ ' runtaskdeps ' ] [ idx ]
b = b_data [ ' runtaskdeps ' ] [ idx ]
if a_data [ ' runtaskhashes ' ] [ a ] != b_data [ ' runtaskhashes ' ] [ b ] :
changed . append ( " %s with hash %s \n changed to \n %s with hash %s " % ( a , a_data [ ' runtaskhashes ' ] [ a ] , b , b_data [ ' runtaskhashes ' ] [ b ] ) )
2014-11-07 17:45:07 +00:00
if changed :
output . append ( " runtaskdeps changed from %s to %s " % ( clean_basepaths_list ( a_data [ ' runtaskdeps ' ] ) , clean_basepaths_list ( b_data [ ' runtaskdeps ' ] ) ) )
output . append ( " \n " . join ( changed ) )
2012-05-22 23:23:32 +00:00
2011-01-20 05:54:51 +00:00
if ' runtaskhashes ' in a_data and ' runtaskhashes ' in b_data :
2012-08-27 20:44:35 +00:00
a = a_data [ ' runtaskhashes ' ]
b = b_data [ ' runtaskhashes ' ]
2012-02-22 15:01:59 +00:00
changed , added , removed = dict_diff ( a , b )
if added :
for dep in added :
2012-08-27 20:44:32 +00:00
bdep_found = False
if removed :
for bdep in removed :
2013-12-20 12:07:20 +00:00
if b [ dep ] == a [ bdep ] :
2012-08-27 20:44:34 +00:00
#output.append("Dependency on task %s was replaced by %s with same hash" % (dep, bdep))
2012-08-27 20:44:32 +00:00
bdep_found = True
if not bdep_found :
2013-12-20 12:07:20 +00:00
output . append ( " Dependency on task %s was added with hash %s " % ( clean_basepath ( dep ) , b [ dep ] ) )
2012-02-22 15:01:59 +00:00
if removed :
for dep in removed :
2012-08-27 20:44:32 +00:00
adep_found = False
if added :
for adep in added :
2013-12-20 12:07:20 +00:00
if b [ adep ] == a [ dep ] :
2012-08-27 20:44:34 +00:00
#output.append("Dependency on task %s was replaced by %s with same hash" % (adep, dep))
2012-08-27 20:44:32 +00:00
adep_found = True
if not adep_found :
2013-12-20 12:07:20 +00:00
output . append ( " Dependency on task %s was removed with hash %s " % ( clean_basepath ( dep ) , a [ dep ] ) )
2012-02-22 15:01:59 +00:00
if changed :
for dep in changed :
2012-08-27 20:44:35 +00:00
output . append ( " Hash for dependent task %s changed from %s to %s " % ( clean_basepath ( dep ) , a [ dep ] , b [ dep ] ) )
if callable ( recursecb ) :
2013-12-19 09:37:39 +00:00
# If a dependent hash changed, might as well print the line above and then defer to the changes in
# that hash since in all likelyhood, they're the same changes this task also saw.
2012-08-27 20:44:35 +00:00
recout = recursecb ( dep , a [ dep ] , b [ dep ] )
if recout :
2013-12-19 09:37:39 +00:00
output = [ output [ - 1 ] ] + recout
2012-06-18 15:45:35 +00:00
a_taint = a_data . get ( ' taint ' , None )
b_taint = b_data . get ( ' taint ' , None )
if a_taint != b_taint :
2012-08-27 20:44:34 +00:00
output . append ( " Taint (by forced/invalidated task) changed from %s to %s " % ( a_taint , b_taint ) )
return output
2012-06-18 15:45:35 +00:00
2016-04-02 16:11:26 +00:00
def calc_basehash ( sigdata ) :
task = sigdata [ ' task ' ]
basedata = sigdata [ ' varvals ' ] [ task ]
if basedata is None :
basedata = ' '
alldeps = sigdata [ ' taskdeps ' ]
for dep in alldeps :
basedata = basedata + dep
val = sigdata [ ' varvals ' ] [ dep ]
if val is not None :
basedata = basedata + str ( val )
2016-05-12 07:30:35 +00:00
return hashlib . md5 ( basedata . encode ( " utf-8 " ) ) . hexdigest ( )
2016-04-02 16:11:26 +00:00
def calc_taskhash ( sigdata ) :
data = sigdata [ ' basehash ' ]
for dep in sigdata [ ' runtaskdeps ' ] :
data = data + sigdata [ ' runtaskhashes ' ] [ dep ]
for c in sigdata [ ' file_checksum_values ' ] :
data = data + c [ 1 ]
if ' taint ' in sigdata :
2016-04-02 22:15:06 +00:00
if ' nostamp: ' in sigdata [ ' taint ' ] :
data = data + sigdata [ ' taint ' ] [ 8 : ]
else :
data = data + sigdata [ ' taint ' ]
2016-04-02 16:11:26 +00:00
2016-05-12 07:30:35 +00:00
return hashlib . md5 ( data . encode ( " utf-8 " ) ) . hexdigest ( )
2016-04-02 16:11:26 +00:00
2010-09-30 08:35:23 +00:00
def dump_sigfile ( a ) :
2012-08-27 20:44:34 +00:00
output = [ ]
2016-08-11 03:37:00 +00:00
with open ( a , ' rb ' ) as f :
p1 = pickle . Unpickler ( f )
a_data = p1 . load ( )
2010-09-30 08:35:23 +00:00
2012-08-27 20:44:34 +00:00
output . append ( " basewhitelist: %s " % ( a_data [ ' basewhitelist ' ] ) )
2010-09-30 08:35:23 +00:00
2012-08-27 20:44:34 +00:00
output . append ( " taskwhitelist: %s " % ( a_data [ ' taskwhitelist ' ] ) )
2010-09-30 08:35:23 +00:00
2012-08-27 20:44:34 +00:00
output . append ( " Task dependencies: %s " % ( sorted ( a_data [ ' taskdeps ' ] ) ) )
2010-09-30 08:35:23 +00:00
2012-08-27 20:44:34 +00:00
output . append ( " basehash: %s " % ( a_data [ ' basehash ' ] ) )
2010-09-30 08:35:23 +00:00
for dep in a_data [ ' gendeps ' ] :
2012-08-27 20:44:34 +00:00
output . append ( " List of dependencies for variable %s is %s " % ( dep , a_data [ ' gendeps ' ] [ dep ] ) )
2010-09-30 08:35:23 +00:00
for dep in a_data [ ' varvals ' ] :
2012-08-27 20:44:34 +00:00
output . append ( " Variable %s value is %s " % ( dep , a_data [ ' varvals ' ] [ dep ] ) )
2010-09-30 08:35:23 +00:00
if ' runtaskdeps ' in a_data :
2012-08-27 20:44:34 +00:00
output . append ( " Tasks this task depends on: %s " % ( a_data [ ' runtaskdeps ' ] ) )
2010-09-30 08:35:23 +00:00
2012-05-22 23:23:32 +00:00
if ' file_checksum_values ' in a_data :
2012-08-27 20:44:34 +00:00
output . append ( " This task depends on the checksums of files: %s " % ( a_data [ ' file_checksum_values ' ] ) )
2012-05-22 23:23:32 +00:00
2010-09-30 08:35:23 +00:00
if ' runtaskhashes ' in a_data :
for dep in a_data [ ' runtaskhashes ' ] :
2012-08-27 20:44:34 +00:00
output . append ( " Hash for dependent task %s is %s " % ( dep , a_data [ ' runtaskhashes ' ] [ dep ] ) )
2012-06-18 15:45:35 +00:00
if ' taint ' in a_data :
2012-08-27 20:44:34 +00:00
output . append ( " Tainted (by forced/invalidated task): %s " % a_data [ ' taint ' ] )
2016-04-02 16:11:26 +00:00
if ' task ' in a_data :
computed_basehash = calc_basehash ( a_data )
output . append ( " Computed base hash is %s and from file %s " % ( computed_basehash , a_data [ ' basehash ' ] ) )
else :
output . append ( " Unable to compute base hash " )
2014-11-07 17:46:05 +00:00
2016-04-02 16:11:26 +00:00
computed_taskhash = calc_taskhash ( a_data )
output . append ( " Computed task hash is %s " % computed_taskhash )
2014-11-07 17:46:05 +00:00
2012-08-27 20:44:34 +00:00
return output