bitbake: Sync with upstream

git-svn-id: https://svn.o-hand.com/repos/poky/trunk@2480 311d38ba-8fff-0310-9ca6-ca027cbcb966
This commit is contained in:
Richard Purdie 2007-08-11 22:42:15 +00:00
parent 0197eb2d87
commit 18026165c3
5 changed files with 88 additions and 80 deletions

View File

@ -7,6 +7,12 @@ Changes in Bitbake 1.8.x:
- Sync fetcher code with that in trunk, adding SRCREV support for svn - Sync fetcher code with that in trunk, adding SRCREV support for svn
- Add ConfigParsed Event after configuration parsing is complete - Add ConfigParsed Event after configuration parsing is complete
- data.emit_var() - only call getVar if we need the variable - data.emit_var() - only call getVar if we need the variable
- Stop generating the A variable (seems to be legacy code)
- Make sure intertask depends get processed correcting in recursive depends
- Add pn-PN to overrides when evaluating PREFERRED_VERSION
- Improve the progress indicator by skipping tasks that have
already run before starting the build rather than during it
- Add profiling option (-P)
Changes in Bitbake 1.8.6: Changes in Bitbake 1.8.6:
- Correctly redirect stdin when forking - Correctly redirect stdin when forking

View File

@ -102,6 +102,8 @@ Default BBFILES are the .bb files in the current directory.""" )
parser.add_option( "-l", "--log-domains", help = """Show debug logging for the specified logging domains""", parser.add_option( "-l", "--log-domains", help = """Show debug logging for the specified logging domains""",
action = "append", dest = "debug_domains", default = [] ) action = "append", dest = "debug_domains", default = [] )
parser.add_option( "-P", "--profile", help = "profile the command and print a report",
action = "store_true", dest = "profile", default = False )
options, args = parser.parse_args(sys.argv) options, args = parser.parse_args(sys.argv)
@ -110,8 +112,23 @@ Default BBFILES are the .bb files in the current directory.""" )
configuration.pkgs_to_build.extend(args[1:]) configuration.pkgs_to_build.extend(args[1:])
cooker = bb.cooker.BBCooker(configuration) cooker = bb.cooker.BBCooker(configuration)
cooker.cook()
if configuration.profile:
try:
import cProfile as profile
except:
import profile
profile.runctx("cooker.cook()", globals(), locals(), "profile.log")
import pstats
p = pstats.Stats('profile.log')
p.sort_stats('time')
p.print_stats()
p.print_callers()
p.sort_stats('cumulative')
p.print_stats()
else:
cooker.cook()
if __name__ == "__main__": if __name__ == "__main__":
main() main()

View File

@ -80,6 +80,7 @@ def uri_replace(uri, uri_find, uri_replace, d):
return bb.encodeurl(result_decoded) return bb.encodeurl(result_decoded)
methods = [] methods = []
urldata_cache = {}
def fetcher_init(d): def fetcher_init(d):
""" """
@ -87,12 +88,16 @@ def fetcher_init(d):
Calls before this must not hit the cache. Calls before this must not hit the cache.
""" """
pd = persist_data.PersistData(d) pd = persist_data.PersistData(d)
# Clear any cached url data # When to drop SCM head revisions controled by user policy
pd.delDomain("BB_URLDATA") srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, 1) or "clear"
# When to drop SCM head revisions should be controled by user policy if srcrev_policy == "cache":
pd.delDomain("BB_URI_HEADREVS") bb.msg.debug(1, bb.msg.domain.Fetcher, "Keeping SRCREV cache due to cache policy of: %s" % srcrev_policy)
elif srcrev_policy == "clear":
bb.msg.debug(1, bb.msg.domain.Fetcher, "Clearing SRCREV cache due to cache policy of: %s" % srcrev_policy)
pd.delDomain("BB_URI_HEADREVS")
else:
bb.msg.fatal(bb.msg.domain.Fetcher, "Invalid SRCREV cache policy of: %s" % srcrev_policy)
# Make sure our domains exist # Make sure our domains exist
pd.addDomain("BB_URLDATA")
pd.addDomain("BB_URI_HEADREVS") pd.addDomain("BB_URI_HEADREVS")
pd.addDomain("BB_URI_LOCALCOUNT") pd.addDomain("BB_URI_LOCALCOUNT")
@ -102,45 +107,30 @@ def fetcher_init(d):
# 3. localpaths # 3. localpaths
# localpath can be called at any time # localpath can be called at any time
def init(urls, d, cache = True): def init(urls, d, setup = True):
urldata = {} urldata = {}
fn = bb.data.getVar('FILE', d, 1)
if cache: if fn in urldata_cache:
urldata = getdata(d) urldata = urldata_cache[fn]
for url in urls: for url in urls:
if url not in urldata: if url not in urldata:
ud = FetchData(url, d) urldata[url] = FetchData(url, d)
for m in methods:
if m.supports(url, ud, d):
ud.init(m, d)
ud.setup_localpath(d)
break
urldata[url] = ud
if cache: if setup:
fn = bb.data.getVar('FILE', d, 1) for url in urldata:
pd = persist_data.PersistData(d) if not urldata[url].setup:
pd.setValue("BB_URLDATA", fn, pickle.dumps(urldata, 0)) urldata[url].setup_localpath(d)
urldata_cache[fn] = urldata
return urldata return urldata
def getdata(d): def go(d):
urldata = {}
fn = bb.data.getVar('FILE', d, 1)
pd = persist_data.PersistData(d)
encdata = pd.getValue("BB_URLDATA", fn)
if encdata:
urldata = pickle.loads(str(encdata))
return urldata
def go(d, urldata = None):
""" """
Fetch all urls Fetch all urls
init must have previously been called
""" """
if not urldata: urldata = init([], d, True)
urldata = getdata(d)
for u in urldata: for u in urldata:
ud = urldata[u] ud = urldata[u]
@ -154,13 +144,12 @@ def go(d, urldata = None):
if ud.localfile and not m.forcefetch(u, ud, d): if ud.localfile and not m.forcefetch(u, ud, d):
Fetch.write_md5sum(u, ud, d) Fetch.write_md5sum(u, ud, d)
def localpaths(d, urldata = None): def localpaths(d):
""" """
Return a list of the local filenames, assuming successful fetch Return a list of the local filenames, assuming successful fetch
""" """
local = [] local = []
if not urldata: urldata = init([], d, True)
urldata = getdata(d)
for u in urldata: for u in urldata:
ud = urldata[u] ud = urldata[u]
@ -177,25 +166,14 @@ def get_srcrev(d):
have been set. have been set.
""" """
scms = [] scms = []
urldata = getdata(d) # Only call setup_localpath on URIs which suppports_srcrev()
if len(urldata) == 0: urldata = init(bb.data.getVar('SRC_URI', d, 1).split(), d, False)
src_uri = bb.data.getVar('SRC_URI', d, 1).split() for u in urldata:
for url in src_uri: ud = urldata[u]
if url not in urldata: if ud.method.suppports_srcrev():
ud = FetchData(url, d) if not ud.setup:
for m in methods: ud.setup_localpath(d)
if m.supports(url, ud, d): scms.append(u)
ud.init(m, d)
break
urldata[url] = ud
if ud.method.suppports_srcrev():
scms.append(url)
ud.setup_localpath(d)
else:
for u in urldata:
ud = urldata[u]
if ud.method.suppports_srcrev():
scms.append(u)
if len(scms) == 0: if len(scms) == 0:
bb.msg.error(bb.msg.domain.Fetcher, "SRCREV was used yet no valid SCM was found in SRC_URI") bb.msg.error(bb.msg.domain.Fetcher, "SRCREV was used yet no valid SCM was found in SRC_URI")
@ -212,7 +190,7 @@ def localpath(url, d, cache = True):
Called from the parser with cache=False since the cache isn't ready Called from the parser with cache=False since the cache isn't ready
at this point. Also called from classed in OE e.g. patch.bbclass at this point. Also called from classed in OE e.g. patch.bbclass
""" """
ud = init([url], d, cache) ud = init([url], d)
if ud[url].method: if ud[url].method:
return ud[url].localpath return ud[url].localpath
return url return url
@ -252,17 +230,22 @@ def runfetchcmd(cmd, d, quiet = False):
return output return output
class FetchData(object): class FetchData(object):
"""Class for fetcher variable store""" """
A class which represents the fetcher state for a given URI.
"""
def __init__(self, url, d): def __init__(self, url, d):
self.localfile = "" self.localfile = ""
(self.type, self.host, self.path, self.user, self.pswd, self.parm) = bb.decodeurl(data.expand(url, d)) (self.type, self.host, self.path, self.user, self.pswd, self.parm) = bb.decodeurl(data.expand(url, d))
self.date = Fetch.getSRCDate(self, d) self.date = Fetch.getSRCDate(self, d)
self.url = url self.url = url
self.setup = False
def init(self, method, d): for m in methods:
self.method = method if m.supports(url, self, d):
self.method = m
break
def setup_localpath(self, d): def setup_localpath(self, d):
self.setup = True
if "localpath" in self.parm: if "localpath" in self.parm:
self.localpath = self.parm["localpath"] self.localpath = self.parm["localpath"]
else: else:

View File

@ -391,26 +391,26 @@ def set_additional_vars(file, d, include):
"""Deduce rest of variables, e.g. ${A} out of ${SRC_URI}""" """Deduce rest of variables, e.g. ${A} out of ${SRC_URI}"""
return return
# Nothing seems to use this variable
#bb.msg.debug(2, bb.msg.domain.Parsing, "BB %s: set_additional_vars" % file)
# bb.msg.debug(2, bb.msg.domain.Parsing, "BB %s: set_additional_vars" % file) #src_uri = data.getVar('SRC_URI', d, 1)
#if not src_uri:
# return
# src_uri = data.getVar('SRC_URI', d, 1) #a = (data.getVar('A', d, 1) or '').split()
# if not src_uri:
# return
# a = (data.getVar('A', d, 1) or '').split() #from bb import fetch
#try:
# ud = fetch.init(src_uri.split(), d)
# a += fetch.localpaths(d, ud)
#except fetch.NoMethodError:
# pass
#except bb.MalformedUrl,e:
# raise ParseError("Unable to generate local paths for SRC_URI due to malformed uri: %s" % e)
#del fetch
# from bb import fetch #data.setVar('A', " ".join(a), d)
# try:
# ud = fetch.init(src_uri.split(), d)
# a += fetch.localpaths(d, ud)
# except fetch.NoMethodError:
# pass
# except bb.MalformedUrl,e:
# raise ParseError("Unable to generate local paths for SRC_URI due to malformed uri: %s" % e)
# del fetch
# data.setVar('A', " ".join(a), d)
# Add us to the handlers list # Add us to the handlers list

View File

@ -70,14 +70,14 @@ def obtain(fn, data):
return localfn return localfn
bb.mkdirhier(dldir) bb.mkdirhier(dldir)
try: try:
ud = bb.fetch.init([fn], data, False) bb.fetch.init([fn], data)
except bb.fetch.NoMethodError: except bb.fetch.NoMethodError:
(type, value, traceback) = sys.exc_info() (type, value, traceback) = sys.exc_info()
bb.msg.debug(1, bb.msg.domain.Parsing, "obtain: no method: %s" % value) bb.msg.debug(1, bb.msg.domain.Parsing, "obtain: no method: %s" % value)
return localfn return localfn
try: try:
bb.fetch.go(data, ud) bb.fetch.go(data)
except bb.fetch.MissingParameterError: except bb.fetch.MissingParameterError:
(type, value, traceback) = sys.exc_info() (type, value, traceback) = sys.exc_info()
bb.msg.debug(1, bb.msg.domain.Parsing, "obtain: missing parameters: %s" % value) bb.msg.debug(1, bb.msg.domain.Parsing, "obtain: missing parameters: %s" % value)
@ -181,7 +181,9 @@ def feeder(lineno, s, fn, data):
if val == None: if val == None:
val = groupd["value"] val = groupd["value"]
elif "colon" in groupd and groupd["colon"] != None: elif "colon" in groupd and groupd["colon"] != None:
val = bb.data.expand(groupd["value"], data) e = data.createCopy()
bb.data.update_data(e)
val = bb.data.expand(groupd["value"], e)
elif "append" in groupd and groupd["append"] != None: elif "append" in groupd and groupd["append"] != None:
val = "%s %s" % ((getFunc(groupd, key, data) or ""), groupd["value"]) val = "%s %s" % ((getFunc(groupd, key, data) or ""), groupd["value"])
elif "prepend" in groupd and groupd["prepend"] != None: elif "prepend" in groupd and groupd["prepend"] != None: