bitbake: bitbake: remove True option to getVar calls

getVar() now defaults to expanding by default, thus remove the True
option from getVar() calls with a regex search and replace.

Search made with the following regex: getVar ?\(( ?[^,()]*), True\)

(Bitbake rev: 3b45c479de8640f92dd1d9f147b02e1eecfaadc8)

Signed-off-by: Joshua Lock <joshua.g.lock@intel.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
This commit is contained in:
Joshua Lock 2016-11-25 15:28:08 +00:00 committed by Richard Purdie
parent 1d0c124cdf
commit 1fce7ecbbb
42 changed files with 279 additions and 279 deletions

View File

@ -89,7 +89,7 @@ def main():
tinfoil = tinfoil_init(False) tinfoil = tinfoil_init(False)
try: try:
for path in ([topdir] + for path in ([topdir] +
tinfoil.config_data.getVar('BBPATH', True).split(':')): tinfoil.config_data.getVar('BBPATH').split(':')):
pluginpath = os.path.join(path, 'lib', 'bblayers') pluginpath = os.path.join(path, 'lib', 'bblayers')
bb.utils.load_plugins(logger, plugins, pluginpath) bb.utils.load_plugins(logger, plugins, pluginpath)

View File

@ -228,7 +228,7 @@ def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, append
the_data = bb_cache.loadDataFull(fn, appends) the_data = bb_cache.loadDataFull(fn, appends)
the_data.setVar('BB_TASKHASH', workerdata["runq_hash"][task]) the_data.setVar('BB_TASKHASH', workerdata["runq_hash"][task])
bb.utils.set_process_name("%s:%s" % (the_data.getVar("PN", True), taskname.replace("do_", ""))) bb.utils.set_process_name("%s:%s" % (the_data.getVar("PN"), taskname.replace("do_", "")))
# exported_vars() returns a generator which *cannot* be passed to os.environ.update() # exported_vars() returns a generator which *cannot* be passed to os.environ.update()
# successfully. We also need to unset anything from the environment which shouldn't be there # successfully. We also need to unset anything from the environment which shouldn't be there
@ -247,7 +247,7 @@ def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, append
if task_exports: if task_exports:
for e in task_exports.split(): for e in task_exports.split():
the_data.setVarFlag(e, 'export', '1') the_data.setVarFlag(e, 'export', '1')
v = the_data.getVar(e, True) v = the_data.getVar(e)
if v is not None: if v is not None:
os.environ[e] = v os.environ[e] = v

View File

@ -38,7 +38,7 @@
The code to execute the first part of this process, a fetch, The code to execute the first part of this process, a fetch,
looks something like the following: looks something like the following:
<literallayout class='monospaced'> <literallayout class='monospaced'>
src_uri = (d.getVar('SRC_URI', True) or "").split() src_uri = (d.getVar('SRC_URI') or "").split()
fetcher = bb.fetch2.Fetch(src_uri, d) fetcher = bb.fetch2.Fetch(src_uri, d)
fetcher.download() fetcher.download()
</literallayout> </literallayout>
@ -52,7 +52,7 @@
<para> <para>
The instantiation of the fetch class is usually followed by: The instantiation of the fetch class is usually followed by:
<literallayout class='monospaced'> <literallayout class='monospaced'>
rootdir = l.getVar('WORKDIR', True) rootdir = l.getVar('WORKDIR')
fetcher.unpack(rootdir) fetcher.unpack(rootdir)
</literallayout> </literallayout>
This code unpacks the downloaded files to the This code unpacks the downloaded files to the

View File

@ -1165,7 +1165,7 @@
<literallayout class='monospaced'> <literallayout class='monospaced'>
python some_python_function () { python some_python_function () {
d.setVar("TEXT", "Hello World") d.setVar("TEXT", "Hello World")
print d.getVar("TEXT", True) print d.getVar("TEXT")
} }
</literallayout> </literallayout>
Because the Python "bb" and "os" modules are already Because the Python "bb" and "os" modules are already
@ -1180,7 +1180,7 @@
to freely set variable values to expandable expressions to freely set variable values to expandable expressions
without having them expanded prematurely. without having them expanded prematurely.
If you do wish to expand a variable within a Python If you do wish to expand a variable within a Python
function, use <filename>d.getVar("X", True)</filename>. function, use <filename>d.getVar("X")</filename>.
Or, for more complicated expressions, use Or, for more complicated expressions, use
<filename>d.expand()</filename>. <filename>d.expand()</filename>.
</note> </note>
@ -1232,7 +1232,7 @@
Here is an example: Here is an example:
<literallayout class='monospaced'> <literallayout class='monospaced'>
def get_depends(d): def get_depends(d):
if d.getVar('SOMECONDITION', True): if d.getVar('SOMECONDITION'):
return "dependencywithcond" return "dependencywithcond"
else: else:
return "dependency" return "dependency"
@ -1367,7 +1367,7 @@
based on the value of another variable: based on the value of another variable:
<literallayout class='monospaced'> <literallayout class='monospaced'>
python () { python () {
if d.getVar('SOMEVAR', True) == 'value': if d.getVar('SOMEVAR') == 'value':
d.setVar('ANOTHERVAR', 'value2') d.setVar('ANOTHERVAR', 'value2')
} }
</literallayout> </literallayout>

View File

@ -91,13 +91,13 @@ class TaskBase(event.Event):
def __init__(self, t, logfile, d): def __init__(self, t, logfile, d):
self._task = t self._task = t
self._package = d.getVar("PF", True) self._package = d.getVar("PF")
self.taskfile = d.getVar("FILE", True) self.taskfile = d.getVar("FILE")
self.taskname = self._task self.taskname = self._task
self.logfile = logfile self.logfile = logfile
self.time = time.time() self.time = time.time()
event.Event.__init__(self) event.Event.__init__(self)
self._message = "recipe %s: task %s: %s" % (d.getVar("PF", True), t, self.getDisplayName()) self._message = "recipe %s: task %s: %s" % (d.getVar("PF"), t, self.getDisplayName())
def getTask(self): def getTask(self):
return self._task return self._task
@ -226,17 +226,17 @@ def exec_func(func, d, dirs = None, pythonexception=False):
else: else:
lockfiles = None lockfiles = None
tempdir = d.getVar('T', True) tempdir = d.getVar('T')
# or func allows items to be executed outside of the normal # or func allows items to be executed outside of the normal
# task set, such as buildhistory # task set, such as buildhistory
task = d.getVar('BB_RUNTASK', True) or func task = d.getVar('BB_RUNTASK') or func
if task == func: if task == func:
taskfunc = task taskfunc = task
else: else:
taskfunc = "%s.%s" % (task, func) taskfunc = "%s.%s" % (task, func)
runfmt = d.getVar('BB_RUNFMT', True) or "run.{func}.{pid}" runfmt = d.getVar('BB_RUNFMT') or "run.{func}.{pid}"
runfn = runfmt.format(taskfunc=taskfunc, task=task, func=func, pid=os.getpid()) runfn = runfmt.format(taskfunc=taskfunc, task=task, func=func, pid=os.getpid())
runfile = os.path.join(tempdir, runfn) runfile = os.path.join(tempdir, runfn)
bb.utils.mkdirhier(os.path.dirname(runfile)) bb.utils.mkdirhier(os.path.dirname(runfile))
@ -368,7 +368,7 @@ exit $ret
cmd = runfile cmd = runfile
if d.getVarFlag(func, 'fakeroot', False): if d.getVarFlag(func, 'fakeroot', False):
fakerootcmd = d.getVar('FAKEROOT', True) fakerootcmd = d.getVar('FAKEROOT')
if fakerootcmd: if fakerootcmd:
cmd = [fakerootcmd, runfile] cmd = [fakerootcmd, runfile]
@ -429,7 +429,7 @@ exit $ret
else: else:
break break
tempdir = d.getVar('T', True) tempdir = d.getVar('T')
fifopath = os.path.join(tempdir, 'fifo.%s' % os.getpid()) fifopath = os.path.join(tempdir, 'fifo.%s' % os.getpid())
if os.path.exists(fifopath): if os.path.exists(fifopath):
os.unlink(fifopath) os.unlink(fifopath)
@ -442,7 +442,7 @@ exit $ret
with open(os.devnull, 'r+') as stdin: with open(os.devnull, 'r+') as stdin:
bb.process.run(cmd, shell=False, stdin=stdin, log=logfile, extrafiles=[(fifo,readfifo)]) bb.process.run(cmd, shell=False, stdin=stdin, log=logfile, extrafiles=[(fifo,readfifo)])
except bb.process.CmdError: except bb.process.CmdError:
logfn = d.getVar('BB_LOGFILE', True) logfn = d.getVar('BB_LOGFILE')
raise FuncFailed(func, logfn) raise FuncFailed(func, logfn)
finally: finally:
os.unlink(fifopath) os.unlink(fifopath)
@ -473,18 +473,18 @@ def _exec_task(fn, task, d, quieterr):
logger.debug(1, "Executing task %s", task) logger.debug(1, "Executing task %s", task)
localdata = _task_data(fn, task, d) localdata = _task_data(fn, task, d)
tempdir = localdata.getVar('T', True) tempdir = localdata.getVar('T')
if not tempdir: if not tempdir:
bb.fatal("T variable not set, unable to build") bb.fatal("T variable not set, unable to build")
# Change nice level if we're asked to # Change nice level if we're asked to
nice = localdata.getVar("BB_TASK_NICE_LEVEL", True) nice = localdata.getVar("BB_TASK_NICE_LEVEL")
if nice: if nice:
curnice = os.nice(0) curnice = os.nice(0)
nice = int(nice) - curnice nice = int(nice) - curnice
newnice = os.nice(nice) newnice = os.nice(nice)
logger.debug(1, "Renice to %s " % newnice) logger.debug(1, "Renice to %s " % newnice)
ionice = localdata.getVar("BB_TASK_IONICE_LEVEL", True) ionice = localdata.getVar("BB_TASK_IONICE_LEVEL")
if ionice: if ionice:
try: try:
cls, prio = ionice.split(".", 1) cls, prio = ionice.split(".", 1)
@ -495,7 +495,7 @@ def _exec_task(fn, task, d, quieterr):
bb.utils.mkdirhier(tempdir) bb.utils.mkdirhier(tempdir)
# Determine the logfile to generate # Determine the logfile to generate
logfmt = localdata.getVar('BB_LOGFMT', True) or 'log.{task}.{pid}' logfmt = localdata.getVar('BB_LOGFMT') or 'log.{task}.{pid}'
logbase = logfmt.format(task=task, pid=os.getpid()) logbase = logfmt.format(task=task, pid=os.getpid())
# Document the order of the tasks... # Document the order of the tasks...
@ -627,7 +627,7 @@ def exec_task(fn, task, d, profile = False):
quieterr = True quieterr = True
if profile: if profile:
profname = "profile-%s.log" % (d.getVar("PN", True) + "-" + task) profname = "profile-%s.log" % (d.getVar("PN") + "-" + task)
try: try:
import cProfile as profile import cProfile as profile
except: except:
@ -667,8 +667,8 @@ def stamp_internal(taskname, d, file_name, baseonly=False, noextra=False):
stamp = d.stamp[file_name] stamp = d.stamp[file_name]
extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or "" extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or ""
else: else:
stamp = d.getVar('STAMP', True) stamp = d.getVar('STAMP')
file_name = d.getVar('BB_FILENAME', True) file_name = d.getVar('BB_FILENAME')
extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info', True) or "" extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info', True) or ""
if baseonly: if baseonly:
@ -703,8 +703,8 @@ def stamp_cleanmask_internal(taskname, d, file_name):
stamp = d.stampclean[file_name] stamp = d.stampclean[file_name]
extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or "" extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or ""
else: else:
stamp = d.getVar('STAMPCLEAN', True) stamp = d.getVar('STAMPCLEAN')
file_name = d.getVar('BB_FILENAME', True) file_name = d.getVar('BB_FILENAME')
extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info', True) or "" extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info', True) or ""
if not stamp: if not stamp:
@ -741,7 +741,7 @@ def make_stamp(task, d, file_name = None):
# as it completes # as it completes
if not task.endswith("_setscene") and task != "do_setscene" and not file_name: if not task.endswith("_setscene") and task != "do_setscene" and not file_name:
stampbase = stamp_internal(task, d, None, True) stampbase = stamp_internal(task, d, None, True)
file_name = d.getVar('BB_FILENAME', True) file_name = d.getVar('BB_FILENAME')
bb.parse.siggen.dump_sigtask(file_name, task, stampbase, True) bb.parse.siggen.dump_sigtask(file_name, task, stampbase, True)
def del_stamp(task, d, file_name = None): def del_stamp(task, d, file_name = None):
@ -763,7 +763,7 @@ def write_taint(task, d, file_name = None):
if file_name: if file_name:
taintfn = d.stamp[file_name] + '.' + task + '.taint' taintfn = d.stamp[file_name] + '.' + task + '.taint'
else: else:
taintfn = d.getVar('STAMP', True) + '.' + task + '.taint' taintfn = d.getVar('STAMP') + '.' + task + '.taint'
bb.utils.mkdirhier(os.path.dirname(taintfn)) bb.utils.mkdirhier(os.path.dirname(taintfn))
# The specific content of the taint file is not really important, # The specific content of the taint file is not really important,
# we just need it to be random, so a random UUID is used # we just need it to be random, so a random UUID is used

View File

@ -296,7 +296,7 @@ def parse_recipe(bb_data, bbfile, appends, mc=''):
bb_data.setVar("__BBMULTICONFIG", mc) bb_data.setVar("__BBMULTICONFIG", mc)
# expand tmpdir to include this topdir # expand tmpdir to include this topdir
bb_data.setVar('TMPDIR', bb_data.getVar('TMPDIR', True) or "") bb_data.setVar('TMPDIR', bb_data.getVar('TMPDIR') or "")
bbfile_loc = os.path.abspath(os.path.dirname(bbfile)) bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
oldpath = os.path.abspath(os.getcwd()) oldpath = os.path.abspath(os.getcwd())
bb.parse.cached_mtime_noerror(bbfile_loc) bb.parse.cached_mtime_noerror(bbfile_loc)
@ -378,7 +378,7 @@ class Cache(NoCache):
# It will be used later for deciding whether we # It will be used later for deciding whether we
# need extra cache file dump/load support # need extra cache file dump/load support
self.caches_array = caches_array self.caches_array = caches_array
self.cachedir = data.getVar("CACHE", True) self.cachedir = data.getVar("CACHE")
self.clean = set() self.clean = set()
self.checked = set() self.checked = set()
self.depends_cache = {} self.depends_cache = {}
@ -792,8 +792,8 @@ class MultiProcessCache(object):
self.cachedata_extras = self.create_cachedata() self.cachedata_extras = self.create_cachedata()
def init_cache(self, d, cache_file_name=None): def init_cache(self, d, cache_file_name=None):
cachedir = (d.getVar("PERSISTENT_DIR", True) or cachedir = (d.getVar("PERSISTENT_DIR") or
d.getVar("CACHE", True)) d.getVar("CACHE"))
if cachedir in [None, '']: if cachedir in [None, '']:
return return
bb.utils.mkdirhier(cachedir) bb.utils.mkdirhier(cachedir)

View File

@ -323,7 +323,7 @@ class BBCooker:
# Need to preserve BB_CONSOLELOG over resets # Need to preserve BB_CONSOLELOG over resets
consolelog = None consolelog = None
if hasattr(self, "data"): if hasattr(self, "data"):
consolelog = self.data.getVar("BB_CONSOLELOG", True) consolelog = self.data.getVar("BB_CONSOLELOG")
if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset: if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
self.enableDataTracking() self.enableDataTracking()
@ -518,7 +518,7 @@ class BBCooker:
bb.msg.loggerVerboseLogs = True bb.msg.loggerVerboseLogs = True
# Change nice level if we're asked to # Change nice level if we're asked to
nice = self.data.getVar("BB_NICE_LEVEL", True) nice = self.data.getVar("BB_NICE_LEVEL")
if nice: if nice:
curnice = os.nice(0) curnice = os.nice(0)
nice = int(nice) - curnice nice = int(nice) - curnice
@ -531,7 +531,7 @@ class BBCooker:
for mc in self.multiconfigs: for mc in self.multiconfigs:
self.recipecaches[mc] = bb.cache.CacheData(self.caches_array) self.recipecaches[mc] = bb.cache.CacheData(self.caches_array)
self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS", True)) self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS"))
def updateConfigOpts(self, options, environment): def updateConfigOpts(self, options, environment):
clean = True clean = True
@ -611,7 +611,7 @@ class BBCooker:
fn = self.matchFile(fn) fn = self.matchFile(fn)
fn = bb.cache.realfn2virtual(fn, cls, mc) fn = bb.cache.realfn2virtual(fn, cls, mc)
elif len(pkgs_to_build) == 1: elif len(pkgs_to_build) == 1:
ignore = self.expanded_data.getVar("ASSUME_PROVIDED", True) or "" ignore = self.expanded_data.getVar("ASSUME_PROVIDED") or ""
if pkgs_to_build[0] in set(ignore.split()): if pkgs_to_build[0] in set(ignore.split()):
bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0]) bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
@ -995,7 +995,7 @@ class BBCooker:
bb.data.expandKeys(localdata) bb.data.expandKeys(localdata)
# Handle PREFERRED_PROVIDERS # Handle PREFERRED_PROVIDERS
for p in (localdata.getVar('PREFERRED_PROVIDERS', True) or "").split(): for p in (localdata.getVar('PREFERRED_PROVIDERS') or "").split():
try: try:
(providee, provider) = p.split(':') (providee, provider) = p.split(':')
except: except:
@ -1006,7 +1006,7 @@ class BBCooker:
self.recipecaches[mc].preferred[providee] = provider self.recipecaches[mc].preferred[providee] = provider
def findCoreBaseFiles(self, subdir, configfile): def findCoreBaseFiles(self, subdir, configfile):
corebase = self.data.getVar('COREBASE', True) or "" corebase = self.data.getVar('COREBASE') or ""
paths = [] paths = []
for root, dirs, files in os.walk(corebase + '/' + subdir): for root, dirs, files in os.walk(corebase + '/' + subdir):
for d in dirs: for d in dirs:
@ -1056,7 +1056,7 @@ class BBCooker:
""" """
matches = [] matches = []
bbpaths = self.data.getVar('BBPATH', True).split(':') bbpaths = self.data.getVar('BBPATH').split(':')
for path in bbpaths: for path in bbpaths:
dirpath = os.path.join(path, directory) dirpath = os.path.join(path, directory)
if os.path.exists(dirpath): if os.path.exists(dirpath):
@ -1078,7 +1078,7 @@ class BBCooker:
data = self.data data = self.data
# iterate configs # iterate configs
bbpaths = data.getVar('BBPATH', True).split(':') bbpaths = data.getVar('BBPATH').split(':')
for path in bbpaths: for path in bbpaths:
confpath = os.path.join(path, "conf", var) confpath = os.path.join(path, "conf", var)
if os.path.exists(confpath): if os.path.exists(confpath):
@ -1147,7 +1147,7 @@ class BBCooker:
bb.debug(1,'Processing %s in collection list' % (c)) bb.debug(1,'Processing %s in collection list' % (c))
# Get collection priority if defined explicitly # Get collection priority if defined explicitly
priority = self.data.getVar("BBFILE_PRIORITY_%s" % c, True) priority = self.data.getVar("BBFILE_PRIORITY_%s" % c)
if priority: if priority:
try: try:
prio = int(priority) prio = int(priority)
@ -1161,7 +1161,7 @@ class BBCooker:
collection_priorities[c] = None collection_priorities[c] = None
# Check dependencies and store information for priority calculation # Check dependencies and store information for priority calculation
deps = self.data.getVar("LAYERDEPENDS_%s" % c, True) deps = self.data.getVar("LAYERDEPENDS_%s" % c)
if deps: if deps:
try: try:
depDict = bb.utils.explode_dep_versions2(deps) depDict = bb.utils.explode_dep_versions2(deps)
@ -1170,7 +1170,7 @@ class BBCooker:
for dep, oplist in list(depDict.items()): for dep, oplist in list(depDict.items()):
if dep in collection_list: if dep in collection_list:
for opstr in oplist: for opstr in oplist:
layerver = self.data.getVar("LAYERVERSION_%s" % dep, True) layerver = self.data.getVar("LAYERVERSION_%s" % dep)
(op, depver) = opstr.split() (op, depver) = opstr.split()
if layerver: if layerver:
try: try:
@ -1191,7 +1191,7 @@ class BBCooker:
collection_depends[c] = [] collection_depends[c] = []
# Check recommends and store information for priority calculation # Check recommends and store information for priority calculation
recs = self.data.getVar("LAYERRECOMMENDS_%s" % c, True) recs = self.data.getVar("LAYERRECOMMENDS_%s" % c)
if recs: if recs:
try: try:
recDict = bb.utils.explode_dep_versions2(recs) recDict = bb.utils.explode_dep_versions2(recs)
@ -1201,7 +1201,7 @@ class BBCooker:
if rec in collection_list: if rec in collection_list:
if oplist: if oplist:
opstr = oplist[0] opstr = oplist[0]
layerver = self.data.getVar("LAYERVERSION_%s" % rec, True) layerver = self.data.getVar("LAYERVERSION_%s" % rec)
if layerver: if layerver:
(op, recver) = opstr.split() (op, recver) = opstr.split()
try: try:
@ -1235,7 +1235,7 @@ class BBCooker:
# Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
for c in collection_list: for c in collection_list:
calc_layer_priority(c) calc_layer_priority(c)
regex = self.data.getVar("BBFILE_PATTERN_%s" % c, True) regex = self.data.getVar("BBFILE_PATTERN_%s" % c)
if regex == None: if regex == None:
parselog.error("BBFILE_PATTERN_%s not defined" % c) parselog.error("BBFILE_PATTERN_%s not defined" % c)
errors = True errors = True
@ -1367,7 +1367,7 @@ class BBCooker:
taskdata[mc] = bb.taskdata.TaskData(self.configuration.abort) taskdata[mc] = bb.taskdata.TaskData(self.configuration.abort)
taskdata[mc].add_provider(self.data, self.recipecaches[mc], item) taskdata[mc].add_provider(self.data, self.recipecaches[mc], item)
buildname = self.data.getVar("BUILDNAME", True) buildname = self.data.getVar("BUILDNAME")
bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.expanded_data) bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.expanded_data)
# Execute the runqueue # Execute the runqueue
@ -1586,7 +1586,7 @@ class BBCooker:
bb.event.fire(bb.event.SanityCheck(False), self.data) bb.event.fire(bb.event.SanityCheck(False), self.data)
for mc in self.multiconfigs: for mc in self.multiconfigs:
ignore = self.databuilder.mcdata[mc].getVar("ASSUME_PROVIDED", True) or "" ignore = self.databuilder.mcdata[mc].getVar("ASSUME_PROVIDED") or ""
self.recipecaches[mc].ignored_dependencies = set(ignore.split()) self.recipecaches[mc].ignored_dependencies = set(ignore.split())
for dep in self.configuration.extra_assume_provided: for dep in self.configuration.extra_assume_provided:
@ -1627,7 +1627,7 @@ class BBCooker:
if len(pkgs_to_build) == 0: if len(pkgs_to_build) == 0:
raise NothingToBuild raise NothingToBuild
ignore = (self.expanded_data.getVar("ASSUME_PROVIDED", True) or "").split() ignore = (self.expanded_data.getVar("ASSUME_PROVIDED") or "").split()
for pkg in pkgs_to_build: for pkg in pkgs_to_build:
if pkg in ignore: if pkg in ignore:
parselog.warning("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg) parselog.warning("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
@ -1797,7 +1797,7 @@ class CookerCollectFiles(object):
collectlog.debug(1, "collecting .bb files") collectlog.debug(1, "collecting .bb files")
files = (config.getVar( "BBFILES", True) or "").split() files = (config.getVar( "BBFILES") or "").split()
config.setVar("BBFILES", " ".join(files)) config.setVar("BBFILES", " ".join(files))
# Sort files by priority # Sort files by priority
@ -1827,7 +1827,7 @@ class CookerCollectFiles(object):
if g not in newfiles: if g not in newfiles:
newfiles.append(g) newfiles.append(g)
bbmask = config.getVar('BBMASK', True) bbmask = config.getVar('BBMASK')
if bbmask: if bbmask:
# First validate the individual regular expressions and ignore any # First validate the individual regular expressions and ignore any
@ -1923,7 +1923,7 @@ class CookerCollectFiles(object):
for collection, pattern, regex, _ in self.bbfile_config_priorities: for collection, pattern, regex, _ in self.bbfile_config_priorities:
if regex in unmatched: if regex in unmatched:
if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection, True) != '1': if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection) != '1':
collectlog.warning("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern)) collectlog.warning("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
return priorities return priorities
@ -2080,7 +2080,7 @@ class CookerParser(object):
self.toparse = self.total - len(self.fromcache) self.toparse = self.total - len(self.fromcache)
self.progress_chunk = int(max(self.toparse / 100, 1)) self.progress_chunk = int(max(self.toparse / 100, 1))
self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS", True) or self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or
multiprocessing.cpu_count()), len(self.willparse)) multiprocessing.cpu_count()), len(self.willparse))
self.start() self.start()

View File

@ -212,7 +212,7 @@ def _inherit(bbclass, data):
def findConfigFile(configfile, data): def findConfigFile(configfile, data):
search = [] search = []
bbpath = data.getVar("BBPATH", True) bbpath = data.getVar("BBPATH")
if bbpath: if bbpath:
for i in bbpath.split(":"): for i in bbpath.split(":"):
search.append(os.path.join(i, "conf", configfile)) search.append(os.path.join(i, "conf", configfile))
@ -286,7 +286,7 @@ class CookerDataBuilder(object):
self.data_hash = self.data.get_hash() self.data_hash = self.data.get_hash()
self.mcdata[''] = self.data self.mcdata[''] = self.data
multiconfig = (self.data.getVar("BBMULTICONFIG", True) or "").split() multiconfig = (self.data.getVar("BBMULTICONFIG") or "").split()
for config in multiconfig: for config in multiconfig:
mcdata = self.parseConfigurationFiles(['conf/multiconfig/%s.conf' % config] + self.prefiles, self.postfiles) mcdata = self.parseConfigurationFiles(['conf/multiconfig/%s.conf' % config] + self.prefiles, self.postfiles)
bb.event.fire(bb.event.ConfigParsed(), mcdata) bb.event.fire(bb.event.ConfigParsed(), mcdata)
@ -319,7 +319,7 @@ class CookerDataBuilder(object):
data.setVar("TOPDIR", os.path.dirname(os.path.dirname(layerconf))) data.setVar("TOPDIR", os.path.dirname(os.path.dirname(layerconf)))
data = parse_config_file(layerconf, data) data = parse_config_file(layerconf, data)
layers = (data.getVar('BBLAYERS', True) or "").split() layers = (data.getVar('BBLAYERS') or "").split()
data = bb.data.createCopy(data) data = bb.data.createCopy(data)
approved = bb.utils.approved_variables() approved = bb.utils.approved_variables()
@ -342,7 +342,7 @@ class CookerDataBuilder(object):
data.delVar('LAYERDIR_RE') data.delVar('LAYERDIR_RE')
data.delVar('LAYERDIR') data.delVar('LAYERDIR')
if not data.getVar("BBPATH", True): if not data.getVar("BBPATH"):
msg = "The BBPATH variable is not set" msg = "The BBPATH variable is not set"
if not layerconf: if not layerconf:
msg += (" and bitbake did not find a conf/bblayers.conf file in" msg += (" and bitbake did not find a conf/bblayers.conf file in"
@ -357,7 +357,7 @@ class CookerDataBuilder(object):
data = parse_config_file(p, data) data = parse_config_file(p, data)
# Handle any INHERITs and inherit the base class # Handle any INHERITs and inherit the base class
bbclasses = ["base"] + (data.getVar('INHERIT', True) or "").split() bbclasses = ["base"] + (data.getVar('INHERIT') or "").split()
for bbclass in bbclasses: for bbclass in bbclasses:
data = _inherit(bbclass, data) data = _inherit(bbclass, data)

View File

@ -121,7 +121,7 @@ def inheritFromOS(d, savedenv, permitted):
for s in savedenv.keys(): for s in savedenv.keys():
if s in permitted: if s in permitted:
try: try:
d.setVar(s, savedenv.getVar(s, True), op = 'from env') d.setVar(s, savedenv.getVar(s), op = 'from env')
if s in exportlist: if s in exportlist:
d.setVarFlag(s, "export", True, op = 'auto env export') d.setVarFlag(s, "export", True, op = 'auto env export')
except TypeError: except TypeError:
@ -141,7 +141,7 @@ def emit_var(var, o=sys.__stdout__, d = init(), all=False):
try: try:
if all: if all:
oval = d.getVar(var, False) oval = d.getVar(var, False)
val = d.getVar(var, True) val = d.getVar(var)
except (KeyboardInterrupt, bb.build.FuncFailed): except (KeyboardInterrupt, bb.build.FuncFailed):
raise raise
except Exception as exc: except Exception as exc:
@ -208,9 +208,9 @@ def exported_vars(d):
k = list(exported_keys(d)) k = list(exported_keys(d))
for key in k: for key in k:
try: try:
value = d.getVar(key, True) value = d.getVar(key)
except Exception as err: except Exception as err:
bb.warn("%s: Unable to export ${%s}: %s" % (d.getVar("FILE", True), key, err)) bb.warn("%s: Unable to export ${%s}: %s" % (d.getVar("FILE"), key, err))
continue continue
if value is not None: if value is not None:
@ -225,7 +225,7 @@ def emit_func(func, o=sys.__stdout__, d = init()):
o.write('\n') o.write('\n')
emit_var(func, o, d, False) and o.write('\n') emit_var(func, o, d, False) and o.write('\n')
newdeps = bb.codeparser.ShellParser(func, logger).parse_shell(d.getVar(func, True)) newdeps = bb.codeparser.ShellParser(func, logger).parse_shell(d.getVar(func))
newdeps |= set((d.getVarFlag(func, "vardeps", True) or "").split()) newdeps |= set((d.getVarFlag(func, "vardeps", True) or "").split())
seen = set() seen = set()
while newdeps: while newdeps:
@ -235,7 +235,7 @@ def emit_func(func, o=sys.__stdout__, d = init()):
for dep in deps: for dep in deps:
if d.getVarFlag(dep, "func", False) and not d.getVarFlag(dep, "python", False): if d.getVarFlag(dep, "func", False) and not d.getVarFlag(dep, "python", False):
emit_var(dep, o, d, False) and o.write('\n') emit_var(dep, o, d, False) and o.write('\n')
newdeps |= bb.codeparser.ShellParser(dep, logger).parse_shell(d.getVar(dep, True)) newdeps |= bb.codeparser.ShellParser(dep, logger).parse_shell(d.getVar(dep))
newdeps |= set((d.getVarFlag(dep, "vardeps", True) or "").split()) newdeps |= set((d.getVarFlag(dep, "vardeps", True) or "").split())
newdeps -= seen newdeps -= seen
@ -295,7 +295,7 @@ def build_dependencies(key, keys, shelldeps, varflagsexcl, d):
def handle_contains(value, contains, d): def handle_contains(value, contains, d):
newvalue = "" newvalue = ""
for k in sorted(contains): for k in sorted(contains):
l = (d.getVar(k, True) or "").split() l = (d.getVar(k) or "").split()
for word in sorted(contains[k]): for word in sorted(contains[k]):
if word in l: if word in l:
newvalue += "\n%s{%s} = Set" % (k, word) newvalue += "\n%s{%s} = Set" % (k, word)
@ -313,7 +313,7 @@ def build_dependencies(key, keys, shelldeps, varflagsexcl, d):
if varflags.get("python"): if varflags.get("python"):
parser = bb.codeparser.PythonParser(key, logger) parser = bb.codeparser.PythonParser(key, logger)
if value and "\t" in value: if value and "\t" in value:
logger.warning("Variable %s contains tabs, please remove these (%s)" % (key, d.getVar("FILE", True))) logger.warning("Variable %s contains tabs, please remove these (%s)" % (key, d.getVar("FILE")))
parser.parse_python(value, filename=varflags.get("filename"), lineno=varflags.get("lineno")) parser.parse_python(value, filename=varflags.get("filename"), lineno=varflags.get("lineno"))
deps = deps | parser.references deps = deps | parser.references
deps = deps | (keys & parser.execs) deps = deps | (keys & parser.execs)
@ -368,7 +368,7 @@ def generate_dependencies(d):
keys = set(key for key in d if not key.startswith("__")) keys = set(key for key in d if not key.startswith("__"))
shelldeps = set(key for key in d.getVar("__exportlist", False) if d.getVarFlag(key, "export", False) and not d.getVarFlag(key, "unexport", False)) shelldeps = set(key for key in d.getVar("__exportlist", False) if d.getVarFlag(key, "export", False) and not d.getVarFlag(key, "unexport", False))
varflagsexcl = d.getVar('BB_SIGNATURE_EXCLUDE_FLAGS', True) varflagsexcl = d.getVar('BB_SIGNATURE_EXCLUDE_FLAGS')
deps = {} deps = {}
values = {} values = {}

View File

@ -146,7 +146,7 @@ class DataContext(dict):
self['d'] = metadata self['d'] = metadata
def __missing__(self, key): def __missing__(self, key):
value = self.metadata.getVar(key, True) value = self.metadata.getVar(key)
if value is None or self.metadata.getVarFlag(key, 'func', False): if value is None or self.metadata.getVarFlag(key, 'func', False):
raise KeyError(key) raise KeyError(key)
else: else:
@ -318,7 +318,7 @@ class VariableHistory(object):
the files in which they were added. the files in which they were added.
""" """
history = self.variable(var) history = self.variable(var)
finalitems = (d.getVar(var, True) or '').split() finalitems = (d.getVar(var) or '').split()
filemap = {} filemap = {}
isset = False isset = False
for event in history: for event in history:
@ -426,11 +426,11 @@ class DataSmart(MutableMapping):
# Can end up here recursively so setup dummy values # Can end up here recursively so setup dummy values
self.overrides = [] self.overrides = []
self.overridesset = set() self.overridesset = set()
self.overrides = (self.getVar("OVERRIDES", True) or "").split(":") or [] self.overrides = (self.getVar("OVERRIDES") or "").split(":") or []
self.overridesset = set(self.overrides) self.overridesset = set(self.overrides)
self.inoverride = False self.inoverride = False
self.expand_cache = {} self.expand_cache = {}
newoverrides = (self.getVar("OVERRIDES", True) or "").split(":") or [] newoverrides = (self.getVar("OVERRIDES") or "").split(":") or []
if newoverrides == self.overrides: if newoverrides == self.overrides:
break break
self.overrides = newoverrides self.overrides = newoverrides
@ -541,7 +541,7 @@ class DataSmart(MutableMapping):
nextnew = set() nextnew = set()
self.overridevars.update(new) self.overridevars.update(new)
for i in new: for i in new:
vardata = self.expandWithRefs(self.getVar(i, True), i) vardata = self.expandWithRefs(self.getVar(i), i)
nextnew.update(vardata.references) nextnew.update(vardata.references)
nextnew.update(vardata.contains.keys()) nextnew.update(vardata.contains.keys())
new = nextnew new = nextnew
@ -937,7 +937,7 @@ class DataSmart(MutableMapping):
bb.data.expandKeys(d) bb.data.expandKeys(d)
bb.data.update_data(d) bb.data.update_data(d)
config_whitelist = set((d.getVar("BB_HASHCONFIG_WHITELIST", True) or "").split()) config_whitelist = set((d.getVar("BB_HASHCONFIG_WHITELIST") or "").split())
keys = set(key for key in iter(d) if not key.startswith("__")) keys = set(key for key in iter(d) if not key.startswith("__"))
for key in keys: for key in keys:
if key in config_whitelist: if key in config_whitelist:

View File

@ -491,7 +491,7 @@ def fetcher_init(d):
Calls before this must not hit the cache. Calls before this must not hit the cache.
""" """
# When to drop SCM head revisions controlled by user policy # When to drop SCM head revisions controlled by user policy
srcrev_policy = d.getVar('BB_SRCREV_POLICY', True) or "clear" srcrev_policy = d.getVar('BB_SRCREV_POLICY') or "clear"
if srcrev_policy == "cache": if srcrev_policy == "cache":
logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
elif srcrev_policy == "clear": elif srcrev_policy == "clear":
@ -572,7 +572,7 @@ def verify_checksum(ud, d, precomputed={}):
if ud.method.recommends_checksum(ud) and not ud.md5_expected and not ud.sha256_expected: if ud.method.recommends_checksum(ud) and not ud.md5_expected and not ud.sha256_expected:
# If strict checking enabled and neither sum defined, raise error # If strict checking enabled and neither sum defined, raise error
strict = d.getVar("BB_STRICT_CHECKSUM", True) or "0" strict = d.getVar("BB_STRICT_CHECKSUM") or "0"
if strict == "1": if strict == "1":
logger.error('No checksum specified for %s, please add at least one to the recipe:\n' logger.error('No checksum specified for %s, please add at least one to the recipe:\n'
'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"' % 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"' %
@ -718,7 +718,7 @@ def subprocess_setup():
def get_autorev(d): def get_autorev(d):
# only not cache src rev in autorev case # only not cache src rev in autorev case
if d.getVar('BB_SRCREV_POLICY', True) != "cache": if d.getVar('BB_SRCREV_POLICY') != "cache":
d.setVar('BB_DONT_CACHE', '1') d.setVar('BB_DONT_CACHE', '1')
return "AUTOINC" return "AUTOINC"
@ -737,7 +737,7 @@ def get_srcrev(d, method_name='sortable_revision'):
""" """
scms = [] scms = []
fetcher = Fetch(d.getVar('SRC_URI', True).split(), d) fetcher = Fetch(d.getVar('SRC_URI').split(), d)
urldata = fetcher.ud urldata = fetcher.ud
for u in urldata: for u in urldata:
if urldata[u].method.supports_srcrev(): if urldata[u].method.supports_srcrev():
@ -757,7 +757,7 @@ def get_srcrev(d, method_name='sortable_revision'):
# #
# Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
# #
format = d.getVar('SRCREV_FORMAT', True) format = d.getVar('SRCREV_FORMAT')
if not format: if not format:
raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.") raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
@ -821,7 +821,7 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None):
origenv = d.getVar("BB_ORIGENV", False) origenv = d.getVar("BB_ORIGENV", False)
for var in exportvars: for var in exportvars:
val = d.getVar(var, True) or (origenv and origenv.getVar(var, True)) val = d.getVar(var) or (origenv and origenv.getVar(var))
if val: if val:
cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd) cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
@ -860,7 +860,7 @@ def check_network_access(d, info = "", url = None):
""" """
log remote network access, and error if BB_NO_NETWORK is set log remote network access, and error if BB_NO_NETWORK is set
""" """
if d.getVar("BB_NO_NETWORK", True) == "1": if d.getVar("BB_NO_NETWORK") == "1":
raise NetworkAccess(url, info) raise NetworkAccess(url, info)
else: else:
logger.debug(1, "Fetcher accessed the network with the command %s" % info) logger.debug(1, "Fetcher accessed the network with the command %s" % info)
@ -958,7 +958,7 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
# We may be obtaining a mirror tarball which needs further processing by the real fetcher # We may be obtaining a mirror tarball which needs further processing by the real fetcher
# If that tarball is a local file:// we need to provide a symlink to it # If that tarball is a local file:// we need to provide a symlink to it
dldir = ld.getVar("DL_DIR", True) dldir = ld.getVar("DL_DIR")
if origud.mirrortarball and os.path.basename(ud.localpath) == os.path.basename(origud.mirrortarball) \ if origud.mirrortarball and os.path.basename(ud.localpath) == os.path.basename(origud.mirrortarball) \
and os.path.basename(ud.localpath) != os.path.basename(origud.localpath): and os.path.basename(ud.localpath) != os.path.basename(origud.localpath):
# Create donestamp in old format to avoid triggering a re-download # Create donestamp in old format to avoid triggering a re-download
@ -1032,14 +1032,14 @@ def trusted_network(d, url):
BB_ALLOWED_NETWORKS is set globally or for a specific recipe. BB_ALLOWED_NETWORKS is set globally or for a specific recipe.
Note: modifies SRC_URI & mirrors. Note: modifies SRC_URI & mirrors.
""" """
if d.getVar('BB_NO_NETWORK', True) == "1": if d.getVar('BB_NO_NETWORK') == "1":
return True return True
pkgname = d.expand(d.getVar('PN', False)) pkgname = d.expand(d.getVar('PN', False))
trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname, False) trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname, False)
if not trusted_hosts: if not trusted_hosts:
trusted_hosts = d.getVar('BB_ALLOWED_NETWORKS', True) trusted_hosts = d.getVar('BB_ALLOWED_NETWORKS')
# Not enabled. # Not enabled.
if not trusted_hosts: if not trusted_hosts:
@ -1071,7 +1071,7 @@ def srcrev_internal_helper(ud, d, name):
""" """
srcrev = None srcrev = None
pn = d.getVar("PN", True) pn = d.getVar("PN")
attempts = [] attempts = []
if name != '' and pn: if name != '' and pn:
attempts.append("SRCREV_%s_pn-%s" % (name, pn)) attempts.append("SRCREV_%s_pn-%s" % (name, pn))
@ -1082,7 +1082,7 @@ def srcrev_internal_helper(ud, d, name):
attempts.append("SRCREV") attempts.append("SRCREV")
for a in attempts: for a in attempts:
srcrev = d.getVar(a, True) srcrev = d.getVar(a)
if srcrev and srcrev != "INVALID": if srcrev and srcrev != "INVALID":
break break
@ -1115,7 +1115,7 @@ def get_checksum_file_list(d):
""" """
fetch = Fetch([], d, cache = False, localonly = True) fetch = Fetch([], d, cache = False, localonly = True)
dl_dir = d.getVar('DL_DIR', True) dl_dir = d.getVar('DL_DIR')
filelist = [] filelist = []
for u in fetch.urls: for u in fetch.urls:
ud = fetch.ud[u] ud = fetch.ud[u]
@ -1129,9 +1129,9 @@ def get_checksum_file_list(d):
if f.startswith(dl_dir): if f.startswith(dl_dir):
# The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else # The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else
if os.path.exists(f): if os.path.exists(f):
bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN', True), os.path.basename(f))) bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN'), os.path.basename(f)))
else: else:
bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN', True), os.path.basename(f))) bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN'), os.path.basename(f)))
filelist.append(f + ":" + str(os.path.exists(f))) filelist.append(f + ":" + str(os.path.exists(f)))
return " ".join(filelist) return " ".join(filelist)
@ -1204,7 +1204,7 @@ class FetchData(object):
raise NonLocalMethod() raise NonLocalMethod()
if self.parm.get("proto", None) and "protocol" not in self.parm: if self.parm.get("proto", None) and "protocol" not in self.parm:
logger.warning('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN', True)) logger.warning('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN'))
self.parm["protocol"] = self.parm.get("proto", None) self.parm["protocol"] = self.parm.get("proto", None)
if hasattr(self.method, "urldata_init"): if hasattr(self.method, "urldata_init"):
@ -1217,7 +1217,7 @@ class FetchData(object):
elif self.localfile: elif self.localfile:
self.localpath = self.method.localpath(self, d) self.localpath = self.method.localpath(self, d)
dldir = d.getVar("DL_DIR", True) dldir = d.getVar("DL_DIR")
if not self.needdonestamp: if not self.needdonestamp:
return return
@ -1257,12 +1257,12 @@ class FetchData(object):
if "srcdate" in self.parm: if "srcdate" in self.parm:
return self.parm['srcdate'] return self.parm['srcdate']
pn = d.getVar("PN", True) pn = d.getVar("PN")
if pn: if pn:
return d.getVar("SRCDATE_%s" % pn, True) or d.getVar("SRCDATE", True) or d.getVar("DATE", True) return d.getVar("SRCDATE_%s" % pn) or d.getVar("SRCDATE") or d.getVar("DATE")
return d.getVar("SRCDATE", True) or d.getVar("DATE", True) return d.getVar("SRCDATE") or d.getVar("DATE")
class FetchMethod(object): class FetchMethod(object):
"""Base class for 'fetch'ing data""" """Base class for 'fetch'ing data"""
@ -1282,7 +1282,7 @@ class FetchMethod(object):
Can also setup variables in urldata for use in go (saving code duplication Can also setup variables in urldata for use in go (saving code duplication
and duplicate code execution) and duplicate code execution)
""" """
return os.path.join(d.getVar("DL_DIR", True), urldata.localfile) return os.path.join(d.getVar("DL_DIR"), urldata.localfile)
def supports_checksum(self, urldata): def supports_checksum(self, urldata):
""" """
@ -1450,7 +1450,7 @@ class FetchMethod(object):
if not cmd: if not cmd:
return return
path = data.getVar('PATH', True) path = data.getVar('PATH')
if path: if path:
cmd = "PATH=\"%s\" %s" % (path, cmd) cmd = "PATH=\"%s\" %s" % (path, cmd)
bb.note("Unpacking %s to %s/" % (file, unpackdir)) bb.note("Unpacking %s to %s/" % (file, unpackdir))
@ -1507,7 +1507,7 @@ class FetchMethod(object):
def generate_revision_key(self, ud, d, name): def generate_revision_key(self, ud, d, name):
key = self._revision_key(ud, d, name) key = self._revision_key(ud, d, name)
return "%s-%s" % (key, d.getVar("PN", True) or "") return "%s-%s" % (key, d.getVar("PN") or "")
class Fetch(object): class Fetch(object):
def __init__(self, urls, d, cache = True, localonly = False, connection_cache = None): def __init__(self, urls, d, cache = True, localonly = False, connection_cache = None):
@ -1515,14 +1515,14 @@ class Fetch(object):
raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time") raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time")
if len(urls) == 0: if len(urls) == 0:
urls = d.getVar("SRC_URI", True).split() urls = d.getVar("SRC_URI").split()
self.urls = urls self.urls = urls
self.d = d self.d = d
self.ud = {} self.ud = {}
self.connection_cache = connection_cache self.connection_cache = connection_cache
fn = d.getVar('FILE', True) fn = d.getVar('FILE')
mc = d.getVar('__BBMULTICONFIG', True) or "" mc = d.getVar('__BBMULTICONFIG') or ""
if cache and fn and mc + fn in urldata_cache: if cache and fn and mc + fn in urldata_cache:
self.ud = urldata_cache[mc + fn] self.ud = urldata_cache[mc + fn]
@ -1565,8 +1565,8 @@ class Fetch(object):
if not urls: if not urls:
urls = self.urls urls = self.urls
network = self.d.getVar("BB_NO_NETWORK", True) network = self.d.getVar("BB_NO_NETWORK")
premirroronly = (self.d.getVar("BB_FETCH_PREMIRRORONLY", True) == "1") premirroronly = (self.d.getVar("BB_FETCH_PREMIRRORONLY") == "1")
for u in urls: for u in urls:
ud = self.ud[u] ud = self.ud[u]
@ -1584,7 +1584,7 @@ class Fetch(object):
localpath = ud.localpath localpath = ud.localpath
elif m.try_premirror(ud, self.d): elif m.try_premirror(ud, self.d):
logger.debug(1, "Trying PREMIRRORS") logger.debug(1, "Trying PREMIRRORS")
mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True)) mirrors = mirror_from_string(self.d.getVar('PREMIRRORS'))
localpath = try_mirrors(self, self.d, ud, mirrors, False) localpath = try_mirrors(self, self.d, ud, mirrors, False)
if premirroronly: if premirroronly:
@ -1624,7 +1624,7 @@ class Fetch(object):
if not verified_stamp: if not verified_stamp:
m.clean(ud, self.d) m.clean(ud, self.d)
logger.debug(1, "Trying MIRRORS") logger.debug(1, "Trying MIRRORS")
mirrors = mirror_from_string(self.d.getVar('MIRRORS', True)) mirrors = mirror_from_string(self.d.getVar('MIRRORS'))
localpath = try_mirrors(self, self.d, ud, mirrors) localpath = try_mirrors(self, self.d, ud, mirrors)
if not localpath or ((not os.path.exists(localpath)) and localpath.find("*") == -1): if not localpath or ((not os.path.exists(localpath)) and localpath.find("*") == -1):
@ -1657,7 +1657,7 @@ class Fetch(object):
m = ud.method m = ud.method
logger.debug(1, "Testing URL %s", u) logger.debug(1, "Testing URL %s", u)
# First try checking uri, u, from PREMIRRORS # First try checking uri, u, from PREMIRRORS
mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True)) mirrors = mirror_from_string(self.d.getVar('PREMIRRORS'))
ret = try_mirrors(self, self.d, ud, mirrors, True) ret = try_mirrors(self, self.d, ud, mirrors, True)
if not ret: if not ret:
# Next try checking from the original uri, u # Next try checking from the original uri, u
@ -1665,7 +1665,7 @@ class Fetch(object):
ret = m.checkstatus(self, ud, self.d) ret = m.checkstatus(self, ud, self.d)
except: except:
# Finally, try checking uri, u, from MIRRORS # Finally, try checking uri, u, from MIRRORS
mirrors = mirror_from_string(self.d.getVar('MIRRORS', True)) mirrors = mirror_from_string(self.d.getVar('MIRRORS'))
ret = try_mirrors(self, self.d, ud, mirrors, True) ret = try_mirrors(self, self.d, ud, mirrors, True)
if not ret: if not ret:

View File

@ -108,13 +108,13 @@ class ClearCase(FetchMethod):
else: else:
ud.module = "" ud.module = ""
ud.basecmd = d.getVar("FETCHCMD_ccrc", True) or spawn.find_executable("cleartool") or spawn.find_executable("rcleartool") ud.basecmd = d.getVar("FETCHCMD_ccrc") or spawn.find_executable("cleartool") or spawn.find_executable("rcleartool")
if d.getVar("SRCREV", True) == "INVALID": if d.getVar("SRCREV") == "INVALID":
raise FetchError("Set a valid SRCREV for the clearcase fetcher in your recipe, e.g. SRCREV = \"/main/LATEST\" or any other label of your choice.") raise FetchError("Set a valid SRCREV for the clearcase fetcher in your recipe, e.g. SRCREV = \"/main/LATEST\" or any other label of your choice.")
ud.label = d.getVar("SRCREV", False) ud.label = d.getVar("SRCREV", False)
ud.customspec = d.getVar("CCASE_CUSTOM_CONFIG_SPEC", True) ud.customspec = d.getVar("CCASE_CUSTOM_CONFIG_SPEC")
ud.server = "%s://%s%s" % (ud.proto, ud.host, ud.path) ud.server = "%s://%s%s" % (ud.proto, ud.host, ud.path)
@ -124,7 +124,7 @@ class ClearCase(FetchMethod):
ud.viewname = "%s-view%s" % (ud.identifier, d.getVar("DATETIME", d, True)) ud.viewname = "%s-view%s" % (ud.identifier, d.getVar("DATETIME", d, True))
ud.csname = "%s-config-spec" % (ud.identifier) ud.csname = "%s-config-spec" % (ud.identifier)
ud.ccasedir = os.path.join(d.getVar("DL_DIR", True), ud.type) ud.ccasedir = os.path.join(d.getVar("DL_DIR"), ud.type)
ud.viewdir = os.path.join(ud.ccasedir, ud.viewname) ud.viewdir = os.path.join(ud.ccasedir, ud.viewname)
ud.configspecfile = os.path.join(ud.ccasedir, ud.csname) ud.configspecfile = os.path.join(ud.ccasedir, ud.csname)
ud.localfile = "%s.tar.gz" % (ud.identifier) ud.localfile = "%s.tar.gz" % (ud.identifier)
@ -144,7 +144,7 @@ class ClearCase(FetchMethod):
self.debug("configspecfile = %s" % ud.configspecfile) self.debug("configspecfile = %s" % ud.configspecfile)
self.debug("localfile = %s" % ud.localfile) self.debug("localfile = %s" % ud.localfile)
ud.localfile = os.path.join(d.getVar("DL_DIR", True), ud.localfile) ud.localfile = os.path.join(d.getVar("DL_DIR"), ud.localfile)
def _build_ccase_command(self, ud, command): def _build_ccase_command(self, ud, command):
""" """

View File

@ -87,10 +87,10 @@ class Cvs(FetchMethod):
cvsroot = ud.path cvsroot = ud.path
else: else:
cvsroot = ":" + method cvsroot = ":" + method
cvsproxyhost = d.getVar('CVS_PROXY_HOST', True) cvsproxyhost = d.getVar('CVS_PROXY_HOST')
if cvsproxyhost: if cvsproxyhost:
cvsroot += ";proxy=" + cvsproxyhost cvsroot += ";proxy=" + cvsproxyhost
cvsproxyport = d.getVar('CVS_PROXY_PORT', True) cvsproxyport = d.getVar('CVS_PROXY_PORT')
if cvsproxyport: if cvsproxyport:
cvsroot += ";proxyport=" + cvsproxyport cvsroot += ";proxyport=" + cvsproxyport
cvsroot += ":" + ud.user cvsroot += ":" + ud.user
@ -110,7 +110,7 @@ class Cvs(FetchMethod):
if ud.tag: if ud.tag:
options.append("-r %s" % ud.tag) options.append("-r %s" % ud.tag)
cvsbasecmd = d.getVar("FETCHCMD_cvs", True) cvsbasecmd = d.getVar("FETCHCMD_cvs")
cvscmd = cvsbasecmd + " '-d" + cvsroot + "' co " + " ".join(options) + " " + ud.module cvscmd = cvsbasecmd + " '-d" + cvsroot + "' co " + " ".join(options) + " " + ud.module
cvsupdatecmd = cvsbasecmd + " '-d" + cvsroot + "' update -d -P " + " ".join(options) cvsupdatecmd = cvsbasecmd + " '-d" + cvsroot + "' update -d -P " + " ".join(options)
@ -120,8 +120,8 @@ class Cvs(FetchMethod):
# create module directory # create module directory
logger.debug(2, "Fetch: checking for module directory") logger.debug(2, "Fetch: checking for module directory")
pkg = d.getVar('PN', True) pkg = d.getVar('PN')
pkgdir = os.path.join(d.getVar('CVSDIR', True), pkg) pkgdir = os.path.join(d.getVar('CVSDIR'), pkg)
moddir = os.path.join(pkgdir, localdir) moddir = os.path.join(pkgdir, localdir)
workdir = None workdir = None
if os.access(os.path.join(moddir, 'CVS'), os.R_OK): if os.access(os.path.join(moddir, 'CVS'), os.R_OK):
@ -164,8 +164,8 @@ class Cvs(FetchMethod):
def clean(self, ud, d): def clean(self, ud, d):
""" Clean CVS Files and tarballs """ """ Clean CVS Files and tarballs """
pkg = d.getVar('PN', True) pkg = d.getVar('PN')
pkgdir = os.path.join(d.getVar("CVSDIR", True), pkg) pkgdir = os.path.join(d.getVar("CVSDIR"), pkg)
bb.utils.remove(pkgdir, True) bb.utils.remove(pkgdir, True)
bb.utils.remove(ud.localpath) bb.utils.remove(ud.localpath)

View File

@ -182,9 +182,9 @@ class Git(FetchMethod):
if ud.usehead: if ud.usehead:
ud.unresolvedrev['default'] = 'HEAD' ud.unresolvedrev['default'] = 'HEAD'
ud.basecmd = d.getVar("FETCHCMD_git", True) or "git -c core.fsyncobjectfiles=0" ud.basecmd = d.getVar("FETCHCMD_git") or "git -c core.fsyncobjectfiles=0"
ud.write_tarballs = ((d.getVar("BB_GENERATE_MIRROR_TARBALLS", True) or "0") != "0") or ud.rebaseable ud.write_tarballs = ((d.getVar("BB_GENERATE_MIRROR_TARBALLS") or "0") != "0") or ud.rebaseable
ud.setup_revisons(d) ud.setup_revisons(d)
@ -207,8 +207,8 @@ class Git(FetchMethod):
for name in ud.names: for name in ud.names:
gitsrcname = gitsrcname + '_' + ud.revisions[name] gitsrcname = gitsrcname + '_' + ud.revisions[name]
ud.mirrortarball = 'git2_%s.tar.gz' % (gitsrcname) ud.mirrortarball = 'git2_%s.tar.gz' % (gitsrcname)
ud.fullmirror = os.path.join(d.getVar("DL_DIR", True), ud.mirrortarball) ud.fullmirror = os.path.join(d.getVar("DL_DIR"), ud.mirrortarball)
gitdir = d.getVar("GITDIR", True) or (d.getVar("DL_DIR", True) + "/git2/") gitdir = d.getVar("GITDIR") or (d.getVar("DL_DIR") + "/git2/")
ud.clonedir = os.path.join(gitdir, gitsrcname) ud.clonedir = os.path.join(gitdir, gitsrcname)
ud.localfile = ud.clonedir ud.localfile = ud.clonedir
@ -229,7 +229,7 @@ class Git(FetchMethod):
def try_premirror(self, ud, d): def try_premirror(self, ud, d):
# If we don't do this, updating an existing checkout with only premirrors # If we don't do this, updating an existing checkout with only premirrors
# is not possible # is not possible
if d.getVar("BB_FETCH_PREMIRRORONLY", True) is not None: if d.getVar("BB_FETCH_PREMIRRORONLY") is not None:
return True return True
if os.path.exists(ud.clonedir): if os.path.exists(ud.clonedir):
return False return False
@ -418,7 +418,7 @@ class Git(FetchMethod):
""" """
pupver = ('', '') pupver = ('', '')
tagregex = re.compile(d.getVar('UPSTREAM_CHECK_GITTAGREGEX', True) or "(?P<pver>([0-9][\.|_]?)+)") tagregex = re.compile(d.getVar('UPSTREAM_CHECK_GITTAGREGEX') or "(?P<pver>([0-9][\.|_]?)+)")
try: try:
output = self._lsremote(ud, d, "refs/tags/*") output = self._lsremote(ud, d, "refs/tags/*")
except bb.fetch2.FetchError or bb.fetch2.NetworkAccess: except bb.fetch2.FetchError or bb.fetch2.NetworkAccess:

View File

@ -78,15 +78,15 @@ class Hg(FetchMethod):
hgsrcname = '%s_%s_%s' % (ud.module.replace('/', '.'), \ hgsrcname = '%s_%s_%s' % (ud.module.replace('/', '.'), \
ud.host, ud.path.replace('/', '.')) ud.host, ud.path.replace('/', '.'))
ud.mirrortarball = 'hg_%s.tar.gz' % hgsrcname ud.mirrortarball = 'hg_%s.tar.gz' % hgsrcname
ud.fullmirror = os.path.join(d.getVar("DL_DIR", True), ud.mirrortarball) ud.fullmirror = os.path.join(d.getVar("DL_DIR"), ud.mirrortarball)
hgdir = d.getVar("HGDIR", True) or (d.getVar("DL_DIR", True) + "/hg/") hgdir = d.getVar("HGDIR") or (d.getVar("DL_DIR") + "/hg/")
ud.pkgdir = os.path.join(hgdir, hgsrcname) ud.pkgdir = os.path.join(hgdir, hgsrcname)
ud.moddir = os.path.join(ud.pkgdir, ud.module) ud.moddir = os.path.join(ud.pkgdir, ud.module)
ud.localfile = ud.moddir ud.localfile = ud.moddir
ud.basecmd = d.getVar("FETCHCMD_hg", True) or "/usr/bin/env hg" ud.basecmd = d.getVar("FETCHCMD_hg") or "/usr/bin/env hg"
ud.write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS", True) ud.write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS")
def need_update(self, ud, d): def need_update(self, ud, d):
revTag = ud.parm.get('rev', 'tip') revTag = ud.parm.get('rev', 'tip')
@ -99,7 +99,7 @@ class Hg(FetchMethod):
def try_premirror(self, ud, d): def try_premirror(self, ud, d):
# If we don't do this, updating an existing checkout with only premirrors # If we don't do this, updating an existing checkout with only premirrors
# is not possible # is not possible
if d.getVar("BB_FETCH_PREMIRRORONLY", True) is not None: if d.getVar("BB_FETCH_PREMIRRORONLY") is not None:
return True return True
if os.path.exists(ud.moddir): if os.path.exists(ud.moddir):
return False return False

View File

@ -63,13 +63,13 @@ class Local(FetchMethod):
newpath = path newpath = path
if path[0] == "/": if path[0] == "/":
return [path] return [path]
filespath = d.getVar('FILESPATH', True) filespath = d.getVar('FILESPATH')
if filespath: if filespath:
logger.debug(2, "Searching for %s in paths:\n %s" % (path, "\n ".join(filespath.split(":")))) logger.debug(2, "Searching for %s in paths:\n %s" % (path, "\n ".join(filespath.split(":"))))
newpath, hist = bb.utils.which(filespath, path, history=True) newpath, hist = bb.utils.which(filespath, path, history=True)
searched.extend(hist) searched.extend(hist)
if not newpath: if not newpath:
filesdir = d.getVar('FILESDIR', True) filesdir = d.getVar('FILESDIR')
if filesdir: if filesdir:
logger.debug(2, "Searching for %s in path: %s" % (path, filesdir)) logger.debug(2, "Searching for %s in path: %s" % (path, filesdir))
newpath = os.path.join(filesdir, path) newpath = os.path.join(filesdir, path)
@ -81,7 +81,7 @@ class Local(FetchMethod):
logger.debug(2, "Searching for %s in path: %s" % (path, newpath)) logger.debug(2, "Searching for %s in path: %s" % (path, newpath))
return searched return searched
if not os.path.exists(newpath): if not os.path.exists(newpath):
dldirfile = os.path.join(d.getVar("DL_DIR", True), path) dldirfile = os.path.join(d.getVar("DL_DIR"), path)
logger.debug(2, "Defaulting to %s for %s" % (dldirfile, path)) logger.debug(2, "Defaulting to %s for %s" % (dldirfile, path))
bb.utils.mkdirhier(os.path.dirname(dldirfile)) bb.utils.mkdirhier(os.path.dirname(dldirfile))
searched.append(dldirfile) searched.append(dldirfile)
@ -100,13 +100,13 @@ class Local(FetchMethod):
# no need to fetch local files, we'll deal with them in place. # no need to fetch local files, we'll deal with them in place.
if self.supports_checksum(urldata) and not os.path.exists(urldata.localpath): if self.supports_checksum(urldata) and not os.path.exists(urldata.localpath):
locations = [] locations = []
filespath = d.getVar('FILESPATH', True) filespath = d.getVar('FILESPATH')
if filespath: if filespath:
locations = filespath.split(":") locations = filespath.split(":")
filesdir = d.getVar('FILESDIR', True) filesdir = d.getVar('FILESDIR')
if filesdir: if filesdir:
locations.append(filesdir) locations.append(filesdir)
locations.append(d.getVar("DL_DIR", True)) locations.append(d.getVar("DL_DIR"))
msg = "Unable to find file " + urldata.url + " anywhere. The paths that were searched were:\n " + "\n ".join(locations) msg = "Unable to find file " + urldata.url + " anywhere. The paths that were searched were:\n " + "\n ".join(locations)
raise FetchError(msg) raise FetchError(msg)

View File

@ -87,12 +87,12 @@ class Npm(FetchMethod):
bb.utils.mkdirhier(ud.pkgdatadir) bb.utils.mkdirhier(ud.pkgdatadir)
ud.localpath = d.expand("${DL_DIR}/npm/%s" % ud.bbnpmmanifest) ud.localpath = d.expand("${DL_DIR}/npm/%s" % ud.bbnpmmanifest)
self.basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -O -t 2 -T 30 -nv --passive-ftp --no-check-certificate " self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget -O -t 2 -T 30 -nv --passive-ftp --no-check-certificate "
ud.prefixdir = prefixdir ud.prefixdir = prefixdir
ud.write_tarballs = ((d.getVar("BB_GENERATE_MIRROR_TARBALLS", True) or "0") != "0") ud.write_tarballs = ((d.getVar("BB_GENERATE_MIRROR_TARBALLS") or "0") != "0")
ud.mirrortarball = 'npm_%s-%s.tar.xz' % (ud.pkgname, ud.version) ud.mirrortarball = 'npm_%s-%s.tar.xz' % (ud.pkgname, ud.version)
ud.fullmirror = os.path.join(d.getVar("DL_DIR", True), ud.mirrortarball) ud.fullmirror = os.path.join(d.getVar("DL_DIR"), ud.mirrortarball)
def need_update(self, ud, d): def need_update(self, ud, d):
if os.path.exists(ud.localpath): if os.path.exists(ud.localpath):
@ -102,7 +102,7 @@ class Npm(FetchMethod):
def _runwget(self, ud, d, command, quiet): def _runwget(self, ud, d, command, quiet):
logger.debug(2, "Fetching %s using command '%s'" % (ud.url, command)) logger.debug(2, "Fetching %s using command '%s'" % (ud.url, command))
bb.fetch2.check_network_access(d, command) bb.fetch2.check_network_access(d, command)
dldir = d.getVar("DL_DIR", True) dldir = d.getVar("DL_DIR")
runfetchcmd(command, d, quiet, workdir=dldir) runfetchcmd(command, d, quiet, workdir=dldir)
def _unpackdep(self, ud, pkg, data, destdir, dldir, d): def _unpackdep(self, ud, pkg, data, destdir, dldir, d):
@ -116,7 +116,7 @@ class Npm(FetchMethod):
# Change to subdir before executing command # Change to subdir before executing command
if not os.path.exists(destdir): if not os.path.exists(destdir):
os.makedirs(destdir) os.makedirs(destdir)
path = d.getVar('PATH', True) path = d.getVar('PATH')
if path: if path:
cmd = "PATH=\"%s\" %s" % (path, cmd) cmd = "PATH=\"%s\" %s" % (path, cmd)
bb.note("Unpacking %s to %s/" % (file, destdir)) bb.note("Unpacking %s to %s/" % (file, destdir))
@ -132,7 +132,7 @@ class Npm(FetchMethod):
def unpack(self, ud, destdir, d): def unpack(self, ud, destdir, d):
dldir = d.getVar("DL_DIR", True) dldir = d.getVar("DL_DIR")
depdumpfile = "%s-%s.deps.json" % (ud.pkgname, ud.version) depdumpfile = "%s-%s.deps.json" % (ud.pkgname, ud.version)
with open("%s/npm/%s" % (dldir, depdumpfile)) as datafile: with open("%s/npm/%s" % (dldir, depdumpfile)) as datafile:
workobj = json.load(datafile) workobj = json.load(datafile)
@ -251,12 +251,12 @@ class Npm(FetchMethod):
lockdown = {} lockdown = {}
if not os.listdir(ud.pkgdatadir) and os.path.exists(ud.fullmirror): if not os.listdir(ud.pkgdatadir) and os.path.exists(ud.fullmirror):
dest = d.getVar("DL_DIR", True) dest = d.getVar("DL_DIR")
bb.utils.mkdirhier(dest) bb.utils.mkdirhier(dest)
runfetchcmd("tar -xJf %s" % (ud.fullmirror), d, workdir=dest) runfetchcmd("tar -xJf %s" % (ud.fullmirror), d, workdir=dest)
return return
shwrf = d.getVar('NPM_SHRINKWRAP', True) shwrf = d.getVar('NPM_SHRINKWRAP')
logger.debug(2, "NPM shrinkwrap file is %s" % shwrf) logger.debug(2, "NPM shrinkwrap file is %s" % shwrf)
if shwrf: if shwrf:
try: try:
@ -266,7 +266,7 @@ class Npm(FetchMethod):
raise FetchError('Error loading NPM_SHRINKWRAP file "%s" for %s: %s' % (shwrf, ud.pkgname, str(e))) raise FetchError('Error loading NPM_SHRINKWRAP file "%s" for %s: %s' % (shwrf, ud.pkgname, str(e)))
elif not ud.ignore_checksums: elif not ud.ignore_checksums:
logger.warning('Missing shrinkwrap file in NPM_SHRINKWRAP for %s, this will lead to unreliable builds!' % ud.pkgname) logger.warning('Missing shrinkwrap file in NPM_SHRINKWRAP for %s, this will lead to unreliable builds!' % ud.pkgname)
lckdf = d.getVar('NPM_LOCKDOWN', True) lckdf = d.getVar('NPM_LOCKDOWN')
logger.debug(2, "NPM lockdown file is %s" % lckdf) logger.debug(2, "NPM lockdown file is %s" % lckdf)
if lckdf: if lckdf:
try: try:
@ -292,7 +292,7 @@ class Npm(FetchMethod):
if os.path.islink(ud.fullmirror): if os.path.islink(ud.fullmirror):
os.unlink(ud.fullmirror) os.unlink(ud.fullmirror)
dldir = d.getVar("DL_DIR", True) dldir = d.getVar("DL_DIR")
logger.info("Creating tarball of npm data") logger.info("Creating tarball of npm data")
runfetchcmd("tar -cJf %s npm/%s npm/%s" % (ud.fullmirror, ud.bbnpmmanifest, ud.pkgname), d, runfetchcmd("tar -cJf %s npm/%s npm/%s" % (ud.fullmirror, ud.bbnpmmanifest, ud.pkgname), d,
workdir=dldir) workdir=dldir)

View File

@ -34,7 +34,7 @@ class Osc(FetchMethod):
# Create paths to osc checkouts # Create paths to osc checkouts
relpath = self._strip_leading_slashes(ud.path) relpath = self._strip_leading_slashes(ud.path)
ud.pkgdir = os.path.join(d.getVar('OSCDIR', True), ud.host) ud.pkgdir = os.path.join(d.getVar('OSCDIR'), ud.host)
ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module) ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module)
if 'rev' in ud.parm: if 'rev' in ud.parm:
@ -84,7 +84,7 @@ class Osc(FetchMethod):
logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
if os.access(os.path.join(d.getVar('OSCDIR', True), ud.path, ud.module), os.R_OK): if os.access(os.path.join(d.getVar('OSCDIR'), ud.path, ud.module), os.R_OK):
oscupdatecmd = self._buildosccommand(ud, d, "update") oscupdatecmd = self._buildosccommand(ud, d, "update")
logger.info("Update "+ ud.url) logger.info("Update "+ ud.url)
# update sources there # update sources there
@ -112,7 +112,7 @@ class Osc(FetchMethod):
Generate a .oscrc to be used for this run. Generate a .oscrc to be used for this run.
""" """
config_path = os.path.join(d.getVar('OSCDIR', True), "oscrc") config_path = os.path.join(d.getVar('OSCDIR'), "oscrc")
if (os.path.exists(config_path)): if (os.path.exists(config_path)):
os.remove(config_path) os.remove(config_path)
@ -121,8 +121,8 @@ class Osc(FetchMethod):
f.write("apisrv = %s\n" % ud.host) f.write("apisrv = %s\n" % ud.host)
f.write("scheme = http\n") f.write("scheme = http\n")
f.write("su-wrapper = su -c\n") f.write("su-wrapper = su -c\n")
f.write("build-root = %s\n" % d.getVar('WORKDIR', True)) f.write("build-root = %s\n" % d.getVar('WORKDIR'))
f.write("urllist = %s\n" % d.getVar("OSCURLLIST", True)) f.write("urllist = %s\n" % d.getVar("OSCURLLIST"))
f.write("extra-pkgs = gzip\n") f.write("extra-pkgs = gzip\n")
f.write("\n") f.write("\n")
f.write("[%s]\n" % ud.host) f.write("[%s]\n" % ud.host)

View File

@ -44,13 +44,13 @@ class Perforce(FetchMethod):
provided by the env, use it. If P4PORT is specified by the recipe, use provided by the env, use it. If P4PORT is specified by the recipe, use
its values, which may override the settings in P4CONFIG. its values, which may override the settings in P4CONFIG.
""" """
ud.basecmd = d.getVar('FETCHCMD_p4', True) ud.basecmd = d.getVar('FETCHCMD_p4')
if not ud.basecmd: if not ud.basecmd:
ud.basecmd = "/usr/bin/env p4" ud.basecmd = "/usr/bin/env p4"
ud.dldir = d.getVar('P4DIR', True) ud.dldir = d.getVar('P4DIR')
if not ud.dldir: if not ud.dldir:
ud.dldir = '%s/%s' % (d.getVar('DL_DIR', True), 'p4') ud.dldir = '%s/%s' % (d.getVar('DL_DIR'), 'p4')
path = ud.url.split('://')[1] path = ud.url.split('://')[1]
path = path.split(';')[0] path = path.split(';')[0]
@ -62,7 +62,7 @@ class Perforce(FetchMethod):
ud.path = path ud.path = path
ud.usingp4config = False ud.usingp4config = False
p4port = d.getVar('P4PORT', True) p4port = d.getVar('P4PORT')
if p4port: if p4port:
logger.debug(1, 'Using recipe provided P4PORT: %s' % p4port) logger.debug(1, 'Using recipe provided P4PORT: %s' % p4port)

View File

@ -56,12 +56,12 @@ class Repo(FetchMethod):
def download(self, ud, d): def download(self, ud, d):
"""Fetch url""" """Fetch url"""
if os.access(os.path.join(d.getVar("DL_DIR", True), ud.localfile), os.R_OK): if os.access(os.path.join(d.getVar("DL_DIR"), ud.localfile), os.R_OK):
logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath) logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath)
return return
gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", ".")) gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", "."))
repodir = d.getVar("REPODIR", True) or os.path.join(d.getVar("DL_DIR", True), "repo") repodir = d.getVar("REPODIR") or os.path.join(d.getVar("DL_DIR"), "repo")
codir = os.path.join(repodir, gitsrcname, ud.manifest) codir = os.path.join(repodir, gitsrcname, ud.manifest)
if ud.user: if ud.user:

View File

@ -104,7 +104,7 @@ class SFTP(FetchMethod):
port = '-P %d' % urlo.port port = '-P %d' % urlo.port
urlo.port = None urlo.port = None
dldir = d.getVar('DL_DIR', True) dldir = d.getVar('DL_DIR')
lpath = os.path.join(dldir, ud.localfile) lpath = os.path.join(dldir, ud.localfile)
user = '' user = ''

View File

@ -87,11 +87,11 @@ class SSH(FetchMethod):
m = __pattern__.match(urldata.url) m = __pattern__.match(urldata.url)
path = m.group('path') path = m.group('path')
host = m.group('host') host = m.group('host')
urldata.localpath = os.path.join(d.getVar('DL_DIR', True), urldata.localpath = os.path.join(d.getVar('DL_DIR'),
os.path.basename(os.path.normpath(path))) os.path.basename(os.path.normpath(path)))
def download(self, urldata, d): def download(self, urldata, d):
dldir = d.getVar('DL_DIR', True) dldir = d.getVar('DL_DIR')
m = __pattern__.match(urldata.url) m = __pattern__.match(urldata.url)
path = m.group('path') path = m.group('path')

View File

@ -50,7 +50,7 @@ class Svn(FetchMethod):
if not "module" in ud.parm: if not "module" in ud.parm:
raise MissingParameterError('module', ud.url) raise MissingParameterError('module', ud.url)
ud.basecmd = d.getVar('FETCHCMD_svn', True) ud.basecmd = d.getVar('FETCHCMD_svn')
ud.module = ud.parm["module"] ud.module = ud.parm["module"]

View File

@ -88,7 +88,7 @@ class Wget(FetchMethod):
if not ud.localfile: if not ud.localfile:
ud.localfile = data.expand(urllib.parse.unquote(ud.host + ud.path).replace("/", "."), d) ud.localfile = data.expand(urllib.parse.unquote(ud.host + ud.path).replace("/", "."), d)
self.basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -t 2 -T 30 --passive-ftp --no-check-certificate" self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget -t 2 -T 30 --passive-ftp --no-check-certificate"
def _runwget(self, ud, d, command, quiet): def _runwget(self, ud, d, command, quiet):
@ -104,7 +104,7 @@ class Wget(FetchMethod):
fetchcmd = self.basecmd fetchcmd = self.basecmd
if 'downloadfilename' in ud.parm: if 'downloadfilename' in ud.parm:
dldir = d.getVar("DL_DIR", True) dldir = d.getVar("DL_DIR")
bb.utils.mkdirhier(os.path.dirname(dldir + os.sep + ud.localfile)) bb.utils.mkdirhier(os.path.dirname(dldir + os.sep + ud.localfile))
fetchcmd += " -O " + dldir + os.sep + ud.localfile fetchcmd += " -O " + dldir + os.sep + ud.localfile
@ -543,7 +543,7 @@ class Wget(FetchMethod):
self.suffix_regex_comp = re.compile(psuffix_regex) self.suffix_regex_comp = re.compile(psuffix_regex)
# compile regex, can be specific by package or generic regex # compile regex, can be specific by package or generic regex
pn_regex = d.getVar('UPSTREAM_CHECK_REGEX', True) pn_regex = d.getVar('UPSTREAM_CHECK_REGEX')
if pn_regex: if pn_regex:
package_custom_regex_comp = re.compile(pn_regex) package_custom_regex_comp = re.compile(pn_regex)
else: else:
@ -564,7 +564,7 @@ class Wget(FetchMethod):
sanity check to ensure same name and type. sanity check to ensure same name and type.
""" """
package = ud.path.split("/")[-1] package = ud.path.split("/")[-1]
current_version = ['', d.getVar('PV', True), ''] current_version = ['', d.getVar('PV'), '']
"""possible to have no version in pkg name, such as spectrum-fw""" """possible to have no version in pkg name, such as spectrum-fw"""
if not re.search("\d+", package): if not re.search("\d+", package):
@ -579,7 +579,7 @@ class Wget(FetchMethod):
bb.debug(3, "latest_versionstring, regex: %s" % (package_regex.pattern)) bb.debug(3, "latest_versionstring, regex: %s" % (package_regex.pattern))
uri = "" uri = ""
regex_uri = d.getVar("UPSTREAM_CHECK_URI", True) regex_uri = d.getVar("UPSTREAM_CHECK_URI")
if not regex_uri: if not regex_uri:
path = ud.path.split(package)[0] path = ud.path.split(package)[0]
@ -588,7 +588,7 @@ class Wget(FetchMethod):
dirver_regex = re.compile("(?P<dirver>[^/]*(\d+\.)*\d+([-_]r\d+)*)/") dirver_regex = re.compile("(?P<dirver>[^/]*(\d+\.)*\d+([-_]r\d+)*)/")
m = dirver_regex.search(path) m = dirver_regex.search(path)
if m: if m:
pn = d.getVar('PN', True) pn = d.getVar('PN')
dirver = m.group('dirver') dirver = m.group('dirver')
dirver_pn_regex = re.compile("%s\d?" % (re.escape(pn))) dirver_pn_regex = re.compile("%s\d?" % (re.escape(pn)))

View File

@ -141,7 +141,7 @@ def getInterval(configuration):
spaceDefault = 50 * 1024 * 1024 spaceDefault = 50 * 1024 * 1024
inodeDefault = 5 * 1024 inodeDefault = 5 * 1024
interval = configuration.getVar("BB_DISKMON_WARNINTERVAL", True) interval = configuration.getVar("BB_DISKMON_WARNINTERVAL")
if not interval: if not interval:
return spaceDefault, inodeDefault return spaceDefault, inodeDefault
else: else:
@ -179,7 +179,7 @@ class diskMonitor:
self.enableMonitor = False self.enableMonitor = False
self.configuration = configuration self.configuration = configuration
BBDirs = configuration.getVar("BB_DISKMON_DIRS", True) or None BBDirs = configuration.getVar("BB_DISKMON_DIRS") or None
if BBDirs: if BBDirs:
self.devDict = getDiskData(BBDirs, configuration) self.devDict = getDiskData(BBDirs, configuration)
if self.devDict: if self.devDict:

View File

@ -123,7 +123,7 @@ def init_parser(d):
def resolve_file(fn, d): def resolve_file(fn, d):
if not os.path.isabs(fn): if not os.path.isabs(fn):
bbpath = d.getVar("BBPATH", True) bbpath = d.getVar("BBPATH")
newfn, attempts = bb.utils.which(bbpath, fn, history=True) newfn, attempts = bb.utils.which(bbpath, fn, history=True)
for af in attempts: for af in attempts:
mark_dependency(d, af) mark_dependency(d, af)

View File

@ -407,7 +407,7 @@ def _expand_versions(versions):
versions = itertools.chain(newversions, versions) versions = itertools.chain(newversions, versions)
def multi_finalize(fn, d): def multi_finalize(fn, d):
appends = (d.getVar("__BBAPPEND", True) or "").split() appends = (d.getVar("__BBAPPEND") or "").split()
for append in appends: for append in appends:
logger.debug(1, "Appending .bbappend file %s to %s", append, fn) logger.debug(1, "Appending .bbappend file %s to %s", append, fn)
bb.parse.BBHandler.handle(append, d, True) bb.parse.BBHandler.handle(append, d, True)
@ -422,16 +422,16 @@ def multi_finalize(fn, d):
d.setVar("__SKIPPED", e.args[0]) d.setVar("__SKIPPED", e.args[0])
datastores = {"": safe_d} datastores = {"": safe_d}
versions = (d.getVar("BBVERSIONS", True) or "").split() versions = (d.getVar("BBVERSIONS") or "").split()
if versions: if versions:
pv = orig_pv = d.getVar("PV", True) pv = orig_pv = d.getVar("PV")
baseversions = {} baseversions = {}
def verfunc(ver, d, pv_d = None): def verfunc(ver, d, pv_d = None):
if pv_d is None: if pv_d is None:
pv_d = d pv_d = d
overrides = d.getVar("OVERRIDES", True).split(":") overrides = d.getVar("OVERRIDES").split(":")
pv_d.setVar("PV", ver) pv_d.setVar("PV", ver)
overrides.append(ver) overrides.append(ver)
bpv = baseversions.get(ver) or orig_pv bpv = baseversions.get(ver) or orig_pv
@ -466,7 +466,7 @@ def multi_finalize(fn, d):
_create_variants(datastores, versions, verfunc, onlyfinalise) _create_variants(datastores, versions, verfunc, onlyfinalise)
extended = d.getVar("BBCLASSEXTEND", True) or "" extended = d.getVar("BBCLASSEXTEND") or ""
if extended: if extended:
# the following is to support bbextends with arguments, for e.g. multilib # the following is to support bbextends with arguments, for e.g. multilib
# an example is as follows: # an example is as follows:
@ -484,7 +484,7 @@ def multi_finalize(fn, d):
else: else:
extendedmap[ext] = ext extendedmap[ext] = ext
pn = d.getVar("PN", True) pn = d.getVar("PN")
def extendfunc(name, d): def extendfunc(name, d):
if name != extendedmap[name]: if name != extendedmap[name]:
d.setVar("BBEXTENDCURR", extendedmap[name]) d.setVar("BBEXTENDCURR", extendedmap[name])

View File

@ -66,7 +66,7 @@ def inherit(files, fn, lineno, d):
file = os.path.join('classes', '%s.bbclass' % file) file = os.path.join('classes', '%s.bbclass' % file)
if not os.path.isabs(file): if not os.path.isabs(file):
bbpath = d.getVar("BBPATH", True) bbpath = d.getVar("BBPATH")
abs_fn, attempts = bb.utils.which(bbpath, file, history=True) abs_fn, attempts = bb.utils.which(bbpath, file, history=True)
for af in attempts: for af in attempts:
if af != abs_fn: if af != abs_fn:

View File

@ -83,16 +83,16 @@ def include(parentfn, fn, lineno, data, error_out):
if not os.path.isabs(fn): if not os.path.isabs(fn):
dname = os.path.dirname(parentfn) dname = os.path.dirname(parentfn)
bbpath = "%s:%s" % (dname, data.getVar("BBPATH", True)) bbpath = "%s:%s" % (dname, data.getVar("BBPATH"))
abs_fn, attempts = bb.utils.which(bbpath, fn, history=True) abs_fn, attempts = bb.utils.which(bbpath, fn, history=True)
if abs_fn and bb.parse.check_dependency(data, abs_fn): if abs_fn and bb.parse.check_dependency(data, abs_fn):
logger.warning("Duplicate inclusion for %s in %s" % (abs_fn, data.getVar('FILE', True))) logger.warning("Duplicate inclusion for %s in %s" % (abs_fn, data.getVar('FILE')))
for af in attempts: for af in attempts:
bb.parse.mark_dependency(data, af) bb.parse.mark_dependency(data, af)
if abs_fn: if abs_fn:
fn = abs_fn fn = abs_fn
elif bb.parse.check_dependency(data, fn): elif bb.parse.check_dependency(data, fn):
logger.warning("Duplicate inclusion for %s in %s" % (fn, data.getVar('FILE', True))) logger.warning("Duplicate inclusion for %s in %s" % (fn, data.getVar('FILE')))
try: try:
bb.parse.handle(fn, data, True) bb.parse.handle(fn, data, True)

View File

@ -207,8 +207,8 @@ def connect(database):
def persist(domain, d): def persist(domain, d):
"""Convenience factory for SQLTable objects based upon metadata""" """Convenience factory for SQLTable objects based upon metadata"""
import bb.utils import bb.utils
cachedir = (d.getVar("PERSISTENT_DIR", True) or cachedir = (d.getVar("PERSISTENT_DIR") or
d.getVar("CACHE", True)) d.getVar("CACHE"))
if not cachedir: if not cachedir:
logger.critical("Please set the 'PERSISTENT_DIR' or 'CACHE' variable") logger.critical("Please set the 'PERSISTENT_DIR' or 'CACHE' variable")
sys.exit(1) sys.exit(1)

View File

@ -123,11 +123,11 @@ def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
# pn can contain '_', e.g. gcc-cross-x86_64 and an override cannot # pn can contain '_', e.g. gcc-cross-x86_64 and an override cannot
# hence we do this manually rather than use OVERRIDES # hence we do this manually rather than use OVERRIDES
preferred_v = cfgData.getVar("PREFERRED_VERSION_pn-%s" % pn, True) preferred_v = cfgData.getVar("PREFERRED_VERSION_pn-%s" % pn)
if not preferred_v: if not preferred_v:
preferred_v = cfgData.getVar("PREFERRED_VERSION_%s" % pn, True) preferred_v = cfgData.getVar("PREFERRED_VERSION_%s" % pn)
if not preferred_v: if not preferred_v:
preferred_v = cfgData.getVar("PREFERRED_VERSION", True) preferred_v = cfgData.getVar("PREFERRED_VERSION")
if preferred_v: if preferred_v:
m = re.match('(\d+:)*(.*)(_.*)*', preferred_v) m = re.match('(\d+:)*(.*)(_.*)*', preferred_v)
@ -289,7 +289,7 @@ def filterProviders(providers, item, cfgData, dataCache):
eligible = _filterProviders(providers, item, cfgData, dataCache) eligible = _filterProviders(providers, item, cfgData, dataCache)
prefervar = cfgData.getVar('PREFERRED_PROVIDER_%s' % item, True) prefervar = cfgData.getVar('PREFERRED_PROVIDER_%s' % item)
if prefervar: if prefervar:
dataCache.preferred[item] = prefervar dataCache.preferred[item] = prefervar
@ -318,7 +318,7 @@ def filterProvidersRunTime(providers, item, cfgData, dataCache):
eligible = _filterProviders(providers, item, cfgData, dataCache) eligible = _filterProviders(providers, item, cfgData, dataCache)
# First try and match any PREFERRED_RPROVIDER entry # First try and match any PREFERRED_RPROVIDER entry
prefervar = cfgData.getVar('PREFERRED_RPROVIDER_%s' % item, True) prefervar = cfgData.getVar('PREFERRED_RPROVIDER_%s' % item)
foundUnique = False foundUnique = False
if prefervar: if prefervar:
for p in eligible: for p in eligible:
@ -345,7 +345,7 @@ def filterProvidersRunTime(providers, item, cfgData, dataCache):
pn = dataCache.pkg_fn[p] pn = dataCache.pkg_fn[p]
provides = dataCache.pn_provides[pn] provides = dataCache.pn_provides[pn]
for provide in provides: for provide in provides:
prefervar = cfgData.getVar('PREFERRED_PROVIDER_%s' % provide, True) prefervar = cfgData.getVar('PREFERRED_PROVIDER_%s' % provide)
#logger.debug(1, "checking PREFERRED_PROVIDER_%s (value %s) against %s", provide, prefervar, pns.keys()) #logger.debug(1, "checking PREFERRED_PROVIDER_%s (value %s) against %s", provide, prefervar, pns.keys())
if prefervar in pns and pns[prefervar] not in preferred: if prefervar in pns and pns[prefervar] not in preferred:
var = "PREFERRED_PROVIDER_%s = %s" % (provide, prefervar) var = "PREFERRED_PROVIDER_%s = %s" % (provide, prefervar)

View File

@ -262,8 +262,8 @@ class RunQueueData:
self.rq = rq self.rq = rq
self.warn_multi_bb = False self.warn_multi_bb = False
self.stampwhitelist = cfgData.getVar("BB_STAMP_WHITELIST", True) or "" self.stampwhitelist = cfgData.getVar("BB_STAMP_WHITELIST") or ""
self.multi_provider_whitelist = (cfgData.getVar("MULTI_PROVIDER_WHITELIST", True) or "").split() self.multi_provider_whitelist = (cfgData.getVar("MULTI_PROVIDER_WHITELIST") or "").split()
self.setscenewhitelist = get_setscene_enforce_whitelist(cfgData) self.setscenewhitelist = get_setscene_enforce_whitelist(cfgData)
self.setscenewhitelist_checked = False self.setscenewhitelist_checked = False
self.init_progress_reporter = bb.progress.DummyMultiStageProcessProgressReporter() self.init_progress_reporter = bb.progress.DummyMultiStageProcessProgressReporter()
@ -976,10 +976,10 @@ class RunQueue:
self.cfgData = cfgData self.cfgData = cfgData
self.rqdata = RunQueueData(self, cooker, cfgData, dataCaches, taskData, targets) self.rqdata = RunQueueData(self, cooker, cfgData, dataCaches, taskData, targets)
self.stamppolicy = cfgData.getVar("BB_STAMP_POLICY", True) or "perfile" self.stamppolicy = cfgData.getVar("BB_STAMP_POLICY") or "perfile"
self.hashvalidate = cfgData.getVar("BB_HASHCHECK_FUNCTION", True) or None self.hashvalidate = cfgData.getVar("BB_HASHCHECK_FUNCTION") or None
self.setsceneverify = cfgData.getVar("BB_SETSCENE_VERIFY_FUNCTION2", True) or None self.setsceneverify = cfgData.getVar("BB_SETSCENE_VERIFY_FUNCTION2") or None
self.depvalidate = cfgData.getVar("BB_SETSCENE_DEPVALID", True) or None self.depvalidate = cfgData.getVar("BB_SETSCENE_DEPVALID") or None
self.state = runQueuePrepare self.state = runQueuePrepare
@ -997,8 +997,8 @@ class RunQueue:
magic = "decafbadbad" magic = "decafbadbad"
if fakeroot: if fakeroot:
magic = magic + "beef" magic = magic + "beef"
fakerootcmd = self.cfgData.getVar("FAKEROOTCMD", True) fakerootcmd = self.cfgData.getVar("FAKEROOTCMD")
fakerootenv = (self.cfgData.getVar("FAKEROOTBASEENV", True) or "").split() fakerootenv = (self.cfgData.getVar("FAKEROOTBASEENV") or "").split()
env = os.environ.copy() env = os.environ.copy()
for key, value in (var.split('=') for var in fakerootenv): for key, value in (var.split('=') for var in fakerootenv):
env[key] = value env[key] = value
@ -1024,9 +1024,9 @@ class RunQueue:
"logdefaultverboselogs" : bb.msg.loggerVerboseLogs, "logdefaultverboselogs" : bb.msg.loggerVerboseLogs,
"logdefaultdomain" : bb.msg.loggerDefaultDomains, "logdefaultdomain" : bb.msg.loggerDefaultDomains,
"prhost" : self.cooker.prhost, "prhost" : self.cooker.prhost,
"buildname" : self.cfgData.getVar("BUILDNAME", True), "buildname" : self.cfgData.getVar("BUILDNAME"),
"date" : self.cfgData.getVar("DATE", True), "date" : self.cfgData.getVar("DATE"),
"time" : self.cfgData.getVar("TIME", True), "time" : self.cfgData.getVar("TIME"),
} }
worker.stdin.write(b"<cookerconfig>" + pickle.dumps(self.cooker.configuration) + b"</cookerconfig>") worker.stdin.write(b"<cookerconfig>" + pickle.dumps(self.cooker.configuration) + b"</cookerconfig>")
@ -1427,8 +1427,8 @@ class RunQueueExecute:
self.cfgData = rq.cfgData self.cfgData = rq.cfgData
self.rqdata = rq.rqdata self.rqdata = rq.rqdata
self.number_tasks = int(self.cfgData.getVar("BB_NUMBER_THREADS", True) or 1) self.number_tasks = int(self.cfgData.getVar("BB_NUMBER_THREADS") or 1)
self.scheduler = self.cfgData.getVar("BB_SCHEDULER", True) or "speed" self.scheduler = self.cfgData.getVar("BB_SCHEDULER") or "speed"
self.runq_buildable = set() self.runq_buildable = set()
self.runq_running = set() self.runq_running = set()
@ -1630,7 +1630,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
if type(obj) is type and if type(obj) is type and
issubclass(obj, RunQueueScheduler)) issubclass(obj, RunQueueScheduler))
user_schedulers = self.cfgData.getVar("BB_SCHEDULERS", True) user_schedulers = self.cfgData.getVar("BB_SCHEDULERS")
if user_schedulers: if user_schedulers:
for sched in user_schedulers.split(): for sched in user_schedulers.split():
if not "." in sched: if not "." in sched:
@ -2402,9 +2402,9 @@ class runQueuePipe():
self.input.close() self.input.close()
def get_setscene_enforce_whitelist(d): def get_setscene_enforce_whitelist(d):
if d.getVar('BB_SETSCENE_ENFORCE', True) != '1': if d.getVar('BB_SETSCENE_ENFORCE') != '1':
return None return None
whitelist = (d.getVar("BB_SETSCENE_ENFORCE_WHITELIST", True) or "").split() whitelist = (d.getVar("BB_SETSCENE_ENFORCE_WHITELIST") or "").split()
outlist = [] outlist = []
for item in whitelist[:]: for item in whitelist[:]:
if item.startswith('%:'): if item.startswith('%:'):

View File

@ -13,7 +13,7 @@ def init(d):
siggens = [obj for obj in globals().values() siggens = [obj for obj in globals().values()
if type(obj) is type and issubclass(obj, SignatureGenerator)] if type(obj) is type and issubclass(obj, SignatureGenerator)]
desired = d.getVar("BB_SIGNATURE_HANDLER", True) or "noop" desired = d.getVar("BB_SIGNATURE_HANDLER") or "noop"
for sg in siggens: for sg in siggens:
if desired == sg.name: if desired == sg.name:
return sg(d) return sg(d)
@ -82,10 +82,10 @@ class SignatureGeneratorBasic(SignatureGenerator):
self.gendeps = {} self.gendeps = {}
self.lookupcache = {} self.lookupcache = {}
self.pkgnameextract = re.compile("(?P<fn>.*)\..*") self.pkgnameextract = re.compile("(?P<fn>.*)\..*")
self.basewhitelist = set((data.getVar("BB_HASHBASE_WHITELIST", True) or "").split()) self.basewhitelist = set((data.getVar("BB_HASHBASE_WHITELIST") or "").split())
self.taskwhitelist = None self.taskwhitelist = None
self.init_rundepcheck(data) self.init_rundepcheck(data)
checksum_cache_file = data.getVar("BB_HASH_CHECKSUM_CACHE_FILE", True) checksum_cache_file = data.getVar("BB_HASH_CHECKSUM_CACHE_FILE")
if checksum_cache_file: if checksum_cache_file:
self.checksum_cache = FileChecksumCache() self.checksum_cache = FileChecksumCache()
self.checksum_cache.init_cache(data, checksum_cache_file) self.checksum_cache.init_cache(data, checksum_cache_file)
@ -93,7 +93,7 @@ class SignatureGeneratorBasic(SignatureGenerator):
self.checksum_cache = None self.checksum_cache = None
def init_rundepcheck(self, data): def init_rundepcheck(self, data):
self.taskwhitelist = data.getVar("BB_HASHTASK_WHITELIST", True) or None self.taskwhitelist = data.getVar("BB_HASHTASK_WHITELIST") or None
if self.taskwhitelist: if self.taskwhitelist:
self.twl = re.compile(self.taskwhitelist) self.twl = re.compile(self.taskwhitelist)
else: else:
@ -160,7 +160,7 @@ class SignatureGeneratorBasic(SignatureGenerator):
#Slow but can be useful for debugging mismatched basehashes #Slow but can be useful for debugging mismatched basehashes
#for task in self.taskdeps[fn]: #for task in self.taskdeps[fn]:
# self.dump_sigtask(fn, task, d.getVar("STAMP", True), False) # self.dump_sigtask(fn, task, d.getVar("STAMP"), False)
for task in taskdeps: for task in taskdeps:
d.setVar("BB_BASEHASH_task-%s" % task, self.basehash[fn + "." + task]) d.setVar("BB_BASEHASH_task-%s" % task, self.basehash[fn + "." + task])
@ -345,8 +345,8 @@ class SignatureGeneratorBasicHash(SignatureGeneratorBasic):
def dump_this_task(outfile, d): def dump_this_task(outfile, d):
import bb.parse import bb.parse
fn = d.getVar("BB_FILENAME", True) fn = d.getVar("BB_FILENAME")
task = "do_" + d.getVar("BB_CURRENTTASK", True) task = "do_" + d.getVar("BB_CURRENTTASK")
referencestamp = bb.build.stamp_internal(task, d, None, True) referencestamp = bb.build.stamp_internal(task, d, None, True)
bb.parse.siggen.dump_sigtask(fn, task, outfile, "customfile:" + referencestamp) bb.parse.siggen.dump_sigtask(fn, task, outfile, "customfile:" + referencestamp)

View File

@ -68,7 +68,7 @@ class VariableReferenceTest(ReferenceTest):
def test_python_reference(self): def test_python_reference(self):
self.setEmptyVars(["BAR"]) self.setEmptyVars(["BAR"])
self.parseExpression("${@d.getVar('BAR', True) + 'foo'}") self.parseExpression("${@d.getVar('BAR') + 'foo'}")
self.assertReferences(set(["BAR"])) self.assertReferences(set(["BAR"]))
class ShellReferenceTest(ReferenceTest): class ShellReferenceTest(ReferenceTest):
@ -209,17 +209,17 @@ be. These unit tests are testing snippets."""
return " " + value return " " + value
def test_getvar_reference(self): def test_getvar_reference(self):
self.parseExpression("d.getVar('foo', True)") self.parseExpression("d.getVar('foo')")
self.assertReferences(set(["foo"])) self.assertReferences(set(["foo"]))
self.assertExecs(set()) self.assertExecs(set())
def test_getvar_computed_reference(self): def test_getvar_computed_reference(self):
self.parseExpression("d.getVar('f' + 'o' + 'o', True)") self.parseExpression("d.getVar('f' + 'o' + 'o')")
self.assertReferences(set()) self.assertReferences(set())
self.assertExecs(set()) self.assertExecs(set())
def test_getvar_exec_reference(self): def test_getvar_exec_reference(self):
self.parseExpression("eval('d.getVar(\"foo\", True)')") self.parseExpression("eval('d.getVar(\"foo\")')")
self.assertReferences(set()) self.assertReferences(set())
self.assertExecs(set(["eval"])) self.assertExecs(set(["eval"]))
@ -269,11 +269,11 @@ be. These unit tests are testing snippets."""
class DependencyReferenceTest(ReferenceTest): class DependencyReferenceTest(ReferenceTest):
pydata = """ pydata = """
d.getVar('somevar', True) d.getVar('somevar')
def test(d): def test(d):
foo = 'bar %s' % 'foo' foo = 'bar %s' % 'foo'
def test2(d): def test2(d):
d.getVar(foo, True) d.getVar(foo)
d.getVar('bar', False) d.getVar('bar', False)
test2(d) test2(d)

View File

@ -77,13 +77,13 @@ class DataExpansions(unittest.TestCase):
self.assertEqual(str(val), "boo value_of_foo") self.assertEqual(str(val), "boo value_of_foo")
def test_python_snippet_getvar(self): def test_python_snippet_getvar(self):
val = self.d.expand("${@d.getVar('foo', True) + ' ${bar}'}") val = self.d.expand("${@d.getVar('foo') + ' ${bar}'}")
self.assertEqual(str(val), "value_of_foo value_of_bar") self.assertEqual(str(val), "value_of_foo value_of_bar")
def test_python_unexpanded(self): def test_python_unexpanded(self):
self.d.setVar("bar", "${unsetvar}") self.d.setVar("bar", "${unsetvar}")
val = self.d.expand("${@d.getVar('foo', True) + ' ${bar}'}") val = self.d.expand("${@d.getVar('foo') + ' ${bar}'}")
self.assertEqual(str(val), "${@d.getVar('foo', True) + ' ${unsetvar}'}") self.assertEqual(str(val), "${@d.getVar('foo') + ' ${unsetvar}'}")
def test_python_snippet_syntax_error(self): def test_python_snippet_syntax_error(self):
self.d.setVar("FOO", "${@foo = 5}") self.d.setVar("FOO", "${@foo = 5}")
@ -99,7 +99,7 @@ class DataExpansions(unittest.TestCase):
self.assertRaises(bb.data_smart.ExpansionError, self.d.getVar, "FOO", True) self.assertRaises(bb.data_smart.ExpansionError, self.d.getVar, "FOO", True)
def test_value_containing_value(self): def test_value_containing_value(self):
val = self.d.expand("${@d.getVar('foo', True) + ' ${bar}'}") val = self.d.expand("${@d.getVar('foo') + ' ${bar}'}")
self.assertEqual(str(val), "value_of_foo value_of_bar") self.assertEqual(str(val), "value_of_foo value_of_bar")
def test_reference_undefined_var(self): def test_reference_undefined_var(self):
@ -109,7 +109,7 @@ class DataExpansions(unittest.TestCase):
def test_double_reference(self): def test_double_reference(self):
self.d.setVar("BAR", "bar value") self.d.setVar("BAR", "bar value")
self.d.setVar("FOO", "${BAR} foo ${BAR}") self.d.setVar("FOO", "${BAR} foo ${BAR}")
val = self.d.getVar("FOO", True) val = self.d.getVar("FOO")
self.assertEqual(str(val), "bar value foo bar value") self.assertEqual(str(val), "bar value foo bar value")
def test_direct_recursion(self): def test_direct_recursion(self):
@ -129,12 +129,12 @@ class DataExpansions(unittest.TestCase):
def test_incomplete_varexp_single_quotes(self): def test_incomplete_varexp_single_quotes(self):
self.d.setVar("FOO", "sed -i -e 's:IP{:I${:g' $pc") self.d.setVar("FOO", "sed -i -e 's:IP{:I${:g' $pc")
val = self.d.getVar("FOO", True) val = self.d.getVar("FOO")
self.assertEqual(str(val), "sed -i -e 's:IP{:I${:g' $pc") self.assertEqual(str(val), "sed -i -e 's:IP{:I${:g' $pc")
def test_nonstring(self): def test_nonstring(self):
self.d.setVar("TEST", 5) self.d.setVar("TEST", 5)
val = self.d.getVar("TEST", True) val = self.d.getVar("TEST")
self.assertEqual(str(val), "5") self.assertEqual(str(val), "5")
def test_rename(self): def test_rename(self):
@ -234,19 +234,19 @@ class TestConcat(unittest.TestCase):
def test_prepend(self): def test_prepend(self):
self.d.setVar("TEST", "${VAL}") self.d.setVar("TEST", "${VAL}")
self.d.prependVar("TEST", "${FOO}:") self.d.prependVar("TEST", "${FOO}:")
self.assertEqual(self.d.getVar("TEST", True), "foo:val") self.assertEqual(self.d.getVar("TEST"), "foo:val")
def test_append(self): def test_append(self):
self.d.setVar("TEST", "${VAL}") self.d.setVar("TEST", "${VAL}")
self.d.appendVar("TEST", ":${BAR}") self.d.appendVar("TEST", ":${BAR}")
self.assertEqual(self.d.getVar("TEST", True), "val:bar") self.assertEqual(self.d.getVar("TEST"), "val:bar")
def test_multiple_append(self): def test_multiple_append(self):
self.d.setVar("TEST", "${VAL}") self.d.setVar("TEST", "${VAL}")
self.d.prependVar("TEST", "${FOO}:") self.d.prependVar("TEST", "${FOO}:")
self.d.appendVar("TEST", ":val2") self.d.appendVar("TEST", ":val2")
self.d.appendVar("TEST", ":${BAR}") self.d.appendVar("TEST", ":${BAR}")
self.assertEqual(self.d.getVar("TEST", True), "foo:val:val2:bar") self.assertEqual(self.d.getVar("TEST"), "foo:val:val2:bar")
class TestConcatOverride(unittest.TestCase): class TestConcatOverride(unittest.TestCase):
def setUp(self): def setUp(self):
@ -259,13 +259,13 @@ class TestConcatOverride(unittest.TestCase):
self.d.setVar("TEST", "${VAL}") self.d.setVar("TEST", "${VAL}")
self.d.setVar("TEST_prepend", "${FOO}:") self.d.setVar("TEST_prepend", "${FOO}:")
bb.data.update_data(self.d) bb.data.update_data(self.d)
self.assertEqual(self.d.getVar("TEST", True), "foo:val") self.assertEqual(self.d.getVar("TEST"), "foo:val")
def test_append(self): def test_append(self):
self.d.setVar("TEST", "${VAL}") self.d.setVar("TEST", "${VAL}")
self.d.setVar("TEST_append", ":${BAR}") self.d.setVar("TEST_append", ":${BAR}")
bb.data.update_data(self.d) bb.data.update_data(self.d)
self.assertEqual(self.d.getVar("TEST", True), "val:bar") self.assertEqual(self.d.getVar("TEST"), "val:bar")
def test_multiple_append(self): def test_multiple_append(self):
self.d.setVar("TEST", "${VAL}") self.d.setVar("TEST", "${VAL}")
@ -273,47 +273,47 @@ class TestConcatOverride(unittest.TestCase):
self.d.setVar("TEST_append", ":val2") self.d.setVar("TEST_append", ":val2")
self.d.setVar("TEST_append", ":${BAR}") self.d.setVar("TEST_append", ":${BAR}")
bb.data.update_data(self.d) bb.data.update_data(self.d)
self.assertEqual(self.d.getVar("TEST", True), "foo:val:val2:bar") self.assertEqual(self.d.getVar("TEST"), "foo:val:val2:bar")
def test_append_unset(self): def test_append_unset(self):
self.d.setVar("TEST_prepend", "${FOO}:") self.d.setVar("TEST_prepend", "${FOO}:")
self.d.setVar("TEST_append", ":val2") self.d.setVar("TEST_append", ":val2")
self.d.setVar("TEST_append", ":${BAR}") self.d.setVar("TEST_append", ":${BAR}")
bb.data.update_data(self.d) bb.data.update_data(self.d)
self.assertEqual(self.d.getVar("TEST", True), "foo::val2:bar") self.assertEqual(self.d.getVar("TEST"), "foo::val2:bar")
def test_remove(self): def test_remove(self):
self.d.setVar("TEST", "${VAL} ${BAR}") self.d.setVar("TEST", "${VAL} ${BAR}")
self.d.setVar("TEST_remove", "val") self.d.setVar("TEST_remove", "val")
bb.data.update_data(self.d) bb.data.update_data(self.d)
self.assertEqual(self.d.getVar("TEST", True), "bar") self.assertEqual(self.d.getVar("TEST"), "bar")
def test_doubleref_remove(self): def test_doubleref_remove(self):
self.d.setVar("TEST", "${VAL} ${BAR}") self.d.setVar("TEST", "${VAL} ${BAR}")
self.d.setVar("TEST_remove", "val") self.d.setVar("TEST_remove", "val")
self.d.setVar("TEST_TEST", "${TEST} ${TEST}") self.d.setVar("TEST_TEST", "${TEST} ${TEST}")
bb.data.update_data(self.d) bb.data.update_data(self.d)
self.assertEqual(self.d.getVar("TEST_TEST", True), "bar bar") self.assertEqual(self.d.getVar("TEST_TEST"), "bar bar")
def test_empty_remove(self): def test_empty_remove(self):
self.d.setVar("TEST", "") self.d.setVar("TEST", "")
self.d.setVar("TEST_remove", "val") self.d.setVar("TEST_remove", "val")
bb.data.update_data(self.d) bb.data.update_data(self.d)
self.assertEqual(self.d.getVar("TEST", True), "") self.assertEqual(self.d.getVar("TEST"), "")
def test_remove_expansion(self): def test_remove_expansion(self):
self.d.setVar("BAR", "Z") self.d.setVar("BAR", "Z")
self.d.setVar("TEST", "${BAR}/X Y") self.d.setVar("TEST", "${BAR}/X Y")
self.d.setVar("TEST_remove", "${BAR}/X") self.d.setVar("TEST_remove", "${BAR}/X")
bb.data.update_data(self.d) bb.data.update_data(self.d)
self.assertEqual(self.d.getVar("TEST", True), "Y") self.assertEqual(self.d.getVar("TEST"), "Y")
def test_remove_expansion_items(self): def test_remove_expansion_items(self):
self.d.setVar("TEST", "A B C D") self.d.setVar("TEST", "A B C D")
self.d.setVar("BAR", "B D") self.d.setVar("BAR", "B D")
self.d.setVar("TEST_remove", "${BAR}") self.d.setVar("TEST_remove", "${BAR}")
bb.data.update_data(self.d) bb.data.update_data(self.d)
self.assertEqual(self.d.getVar("TEST", True), "A C") self.assertEqual(self.d.getVar("TEST"), "A C")
class TestOverrides(unittest.TestCase): class TestOverrides(unittest.TestCase):
def setUp(self): def setUp(self):
@ -323,17 +323,17 @@ class TestOverrides(unittest.TestCase):
def test_no_override(self): def test_no_override(self):
bb.data.update_data(self.d) bb.data.update_data(self.d)
self.assertEqual(self.d.getVar("TEST", True), "testvalue") self.assertEqual(self.d.getVar("TEST"), "testvalue")
def test_one_override(self): def test_one_override(self):
self.d.setVar("TEST_bar", "testvalue2") self.d.setVar("TEST_bar", "testvalue2")
bb.data.update_data(self.d) bb.data.update_data(self.d)
self.assertEqual(self.d.getVar("TEST", True), "testvalue2") self.assertEqual(self.d.getVar("TEST"), "testvalue2")
def test_one_override_unset(self): def test_one_override_unset(self):
self.d.setVar("TEST2_bar", "testvalue2") self.d.setVar("TEST2_bar", "testvalue2")
bb.data.update_data(self.d) bb.data.update_data(self.d)
self.assertEqual(self.d.getVar("TEST2", True), "testvalue2") self.assertEqual(self.d.getVar("TEST2"), "testvalue2")
self.assertCountEqual(list(self.d.keys()), ['TEST', 'TEST2', 'OVERRIDES', 'TEST2_bar']) self.assertCountEqual(list(self.d.keys()), ['TEST', 'TEST2', 'OVERRIDES', 'TEST2_bar'])
def test_multiple_override(self): def test_multiple_override(self):
@ -341,18 +341,18 @@ class TestOverrides(unittest.TestCase):
self.d.setVar("TEST_local", "testvalue3") self.d.setVar("TEST_local", "testvalue3")
self.d.setVar("TEST_foo", "testvalue4") self.d.setVar("TEST_foo", "testvalue4")
bb.data.update_data(self.d) bb.data.update_data(self.d)
self.assertEqual(self.d.getVar("TEST", True), "testvalue3") self.assertEqual(self.d.getVar("TEST"), "testvalue3")
self.assertCountEqual(list(self.d.keys()), ['TEST', 'TEST_foo', 'OVERRIDES', 'TEST_bar', 'TEST_local']) self.assertCountEqual(list(self.d.keys()), ['TEST', 'TEST_foo', 'OVERRIDES', 'TEST_bar', 'TEST_local'])
def test_multiple_combined_overrides(self): def test_multiple_combined_overrides(self):
self.d.setVar("TEST_local_foo_bar", "testvalue3") self.d.setVar("TEST_local_foo_bar", "testvalue3")
bb.data.update_data(self.d) bb.data.update_data(self.d)
self.assertEqual(self.d.getVar("TEST", True), "testvalue3") self.assertEqual(self.d.getVar("TEST"), "testvalue3")
def test_multiple_overrides_unset(self): def test_multiple_overrides_unset(self):
self.d.setVar("TEST2_local_foo_bar", "testvalue3") self.d.setVar("TEST2_local_foo_bar", "testvalue3")
bb.data.update_data(self.d) bb.data.update_data(self.d)
self.assertEqual(self.d.getVar("TEST2", True), "testvalue3") self.assertEqual(self.d.getVar("TEST2"), "testvalue3")
def test_keyexpansion_override(self): def test_keyexpansion_override(self):
self.d.setVar("LOCAL", "local") self.d.setVar("LOCAL", "local")
@ -361,21 +361,21 @@ class TestOverrides(unittest.TestCase):
self.d.setVar("TEST_foo", "testvalue4") self.d.setVar("TEST_foo", "testvalue4")
bb.data.update_data(self.d) bb.data.update_data(self.d)
bb.data.expandKeys(self.d) bb.data.expandKeys(self.d)
self.assertEqual(self.d.getVar("TEST", True), "testvalue3") self.assertEqual(self.d.getVar("TEST"), "testvalue3")
def test_rename_override(self): def test_rename_override(self):
self.d.setVar("ALTERNATIVE_ncurses-tools_class-target", "a") self.d.setVar("ALTERNATIVE_ncurses-tools_class-target", "a")
self.d.setVar("OVERRIDES", "class-target") self.d.setVar("OVERRIDES", "class-target")
bb.data.update_data(self.d) bb.data.update_data(self.d)
self.d.renameVar("ALTERNATIVE_ncurses-tools", "ALTERNATIVE_lib32-ncurses-tools") self.d.renameVar("ALTERNATIVE_ncurses-tools", "ALTERNATIVE_lib32-ncurses-tools")
self.assertEqual(self.d.getVar("ALTERNATIVE_lib32-ncurses-tools", True), "a") self.assertEqual(self.d.getVar("ALTERNATIVE_lib32-ncurses-tools"), "a")
def test_underscore_override(self): def test_underscore_override(self):
self.d.setVar("TEST_bar", "testvalue2") self.d.setVar("TEST_bar", "testvalue2")
self.d.setVar("TEST_some_val", "testvalue3") self.d.setVar("TEST_some_val", "testvalue3")
self.d.setVar("TEST_foo", "testvalue4") self.d.setVar("TEST_foo", "testvalue4")
self.d.setVar("OVERRIDES", "foo:bar:some_val") self.d.setVar("OVERRIDES", "foo:bar:some_val")
self.assertEqual(self.d.getVar("TEST", True), "testvalue3") self.assertEqual(self.d.getVar("TEST"), "testvalue3")
class TestKeyExpansion(unittest.TestCase): class TestKeyExpansion(unittest.TestCase):
def setUp(self): def setUp(self):
@ -389,7 +389,7 @@ class TestKeyExpansion(unittest.TestCase):
with LogRecord() as logs: with LogRecord() as logs:
bb.data.expandKeys(self.d) bb.data.expandKeys(self.d)
self.assertTrue(logContains("Variable key VAL_${FOO} (A) replaces original key VAL_foo (B)", logs)) self.assertTrue(logContains("Variable key VAL_${FOO} (A) replaces original key VAL_foo (B)", logs))
self.assertEqual(self.d.getVar("VAL_foo", True), "A") self.assertEqual(self.d.getVar("VAL_foo"), "A")
class TestFlags(unittest.TestCase): class TestFlags(unittest.TestCase):
def setUp(self): def setUp(self):

View File

@ -58,9 +58,9 @@ C = "3"
def test_parse_simple(self): def test_parse_simple(self):
f = self.parsehelper(self.testfile) f = self.parsehelper(self.testfile)
d = bb.parse.handle(f.name, self.d)[''] d = bb.parse.handle(f.name, self.d)['']
self.assertEqual(d.getVar("A", True), "1") self.assertEqual(d.getVar("A"), "1")
self.assertEqual(d.getVar("B", True), "2") self.assertEqual(d.getVar("B"), "2")
self.assertEqual(d.getVar("C", True), "3") self.assertEqual(d.getVar("C"), "3")
def test_parse_incomplete_function(self): def test_parse_incomplete_function(self):
testfileB = self.testfile.replace("}", "") testfileB = self.testfile.replace("}", "")
@ -80,9 +80,9 @@ unset B[flag]
def test_parse_unset(self): def test_parse_unset(self):
f = self.parsehelper(self.unsettest) f = self.parsehelper(self.unsettest)
d = bb.parse.handle(f.name, self.d)[''] d = bb.parse.handle(f.name, self.d)['']
self.assertEqual(d.getVar("A", True), None) self.assertEqual(d.getVar("A"), None)
self.assertEqual(d.getVarFlag("A","flag", True), None) self.assertEqual(d.getVarFlag("A","flag", True), None)
self.assertEqual(d.getVar("B", True), "2") self.assertEqual(d.getVar("B"), "2")
overridetest = """ overridetest = """
@ -95,11 +95,11 @@ PN = "gtk+"
def test_parse_overrides(self): def test_parse_overrides(self):
f = self.parsehelper(self.overridetest) f = self.parsehelper(self.overridetest)
d = bb.parse.handle(f.name, self.d)[''] d = bb.parse.handle(f.name, self.d)['']
self.assertEqual(d.getVar("RRECOMMENDS", True), "b") self.assertEqual(d.getVar("RRECOMMENDS"), "b")
bb.data.expandKeys(d) bb.data.expandKeys(d)
self.assertEqual(d.getVar("RRECOMMENDS", True), "b") self.assertEqual(d.getVar("RRECOMMENDS"), "b")
d.setVar("RRECOMMENDS_gtk+", "c") d.setVar("RRECOMMENDS_gtk+", "c")
self.assertEqual(d.getVar("RRECOMMENDS", True), "c") self.assertEqual(d.getVar("RRECOMMENDS"), "c")
overridetest2 = """ overridetest2 = """
EXTRA_OECONF = "" EXTRA_OECONF = ""
@ -112,7 +112,7 @@ EXTRA_OECONF_append = " c"
d = bb.parse.handle(f.name, self.d)[''] d = bb.parse.handle(f.name, self.d)['']
d.appendVar("EXTRA_OECONF", " d") d.appendVar("EXTRA_OECONF", " d")
d.setVar("OVERRIDES", "class-target") d.setVar("OVERRIDES", "class-target")
self.assertEqual(d.getVar("EXTRA_OECONF", True), "b c d") self.assertEqual(d.getVar("EXTRA_OECONF"), "b c d")
overridetest3 = """ overridetest3 = """
DESCRIPTION = "A" DESCRIPTION = "A"
@ -124,11 +124,11 @@ PN = "bc"
f = self.parsehelper(self.overridetest3) f = self.parsehelper(self.overridetest3)
d = bb.parse.handle(f.name, self.d)[''] d = bb.parse.handle(f.name, self.d)['']
bb.data.expandKeys(d) bb.data.expandKeys(d)
self.assertEqual(d.getVar("DESCRIPTION_bc-dev", True), "A B") self.assertEqual(d.getVar("DESCRIPTION_bc-dev"), "A B")
d.setVar("DESCRIPTION", "E") d.setVar("DESCRIPTION", "E")
d.setVar("DESCRIPTION_bc-dev", "C D") d.setVar("DESCRIPTION_bc-dev", "C D")
d.setVar("OVERRIDES", "bc-dev") d.setVar("OVERRIDES", "bc-dev")
self.assertEqual(d.getVar("DESCRIPTION", True), "C D") self.assertEqual(d.getVar("DESCRIPTION"), "C D")
classextend = """ classextend = """
@ -159,6 +159,6 @@ python () {
alldata = bb.parse.handle(f.name, self.d) alldata = bb.parse.handle(f.name, self.d)
d1 = alldata[''] d1 = alldata['']
d2 = alldata[cls.name] d2 = alldata[cls.name]
self.assertEqual(d1.getVar("VAR_var", True), "B") self.assertEqual(d1.getVar("VAR_var"), "B")
self.assertEqual(d2.getVar("VAR_var", True), None) self.assertEqual(d2.getVar("VAR_var"), None)

View File

@ -665,7 +665,7 @@ def build_environment(d):
for var in bb.data.keys(d): for var in bb.data.keys(d):
export = d.getVarFlag(var, "export", False) export = d.getVarFlag(var, "export", False)
if export: if export:
os.environ[var] = d.getVar(var, True) or "" os.environ[var] = d.getVar(var) or ""
def _check_unsafe_delete_path(path): def _check_unsafe_delete_path(path):
""" """
@ -953,7 +953,7 @@ def contains(variable, checkvalues, truevalue, falsevalue, d):
Arguments: Arguments:
variable -- the variable name. This will be fetched and expanded (using variable -- the variable name. This will be fetched and expanded (using
d.getVar(variable, True)) and then split into a set(). d.getVar(variable)) and then split into a set().
checkvalues -- if this is a string it is split on whitespace into a set(), checkvalues -- if this is a string it is split on whitespace into a set(),
otherwise coerced directly into a set(). otherwise coerced directly into a set().
@ -966,7 +966,7 @@ def contains(variable, checkvalues, truevalue, falsevalue, d):
d -- the data store. d -- the data store.
""" """
val = d.getVar(variable, True) val = d.getVar(variable)
if not val: if not val:
return falsevalue return falsevalue
val = set(val.split()) val = set(val.split())
@ -979,7 +979,7 @@ def contains(variable, checkvalues, truevalue, falsevalue, d):
return falsevalue return falsevalue
def contains_any(variable, checkvalues, truevalue, falsevalue, d): def contains_any(variable, checkvalues, truevalue, falsevalue, d):
val = d.getVar(variable, True) val = d.getVar(variable)
if not val: if not val:
return falsevalue return falsevalue
val = set(val.split()) val = set(val.split())
@ -1378,10 +1378,10 @@ def edit_bblayers_conf(bblayers_conf, add, remove):
def get_file_layer(filename, d): def get_file_layer(filename, d):
"""Determine the collection (as defined by a layer's layer.conf file) containing the specified file""" """Determine the collection (as defined by a layer's layer.conf file) containing the specified file"""
collections = (d.getVar('BBFILE_COLLECTIONS', True) or '').split() collections = (d.getVar('BBFILE_COLLECTIONS') or '').split()
collection_res = {} collection_res = {}
for collection in collections: for collection in collections:
collection_res[collection] = d.getVar('BBFILE_PATTERN_%s' % collection, True) or '' collection_res[collection] = d.getVar('BBFILE_PATTERN_%s' % collection) or ''
def path_to_layer(path): def path_to_layer(path):
# Use longest path so we handle nested layers # Use longest path so we handle nested layers
@ -1394,7 +1394,7 @@ def get_file_layer(filename, d):
return match return match
result = None result = None
bbfiles = (d.getVar('BBFILES', True) or '').split() bbfiles = (d.getVar('BBFILES') or '').split()
bbfilesmatch = False bbfilesmatch = False
for bbfilesentry in bbfiles: for bbfilesentry in bbfiles:
if fnmatch.fnmatch(filename, bbfilesentry): if fnmatch.fnmatch(filename, bbfilesentry):
@ -1471,7 +1471,7 @@ def export_proxies(d):
if v in os.environ.keys(): if v in os.environ.keys():
exported = True exported = True
else: else:
v_proxy = d.getVar(v, True) v_proxy = d.getVar(v)
if v_proxy is not None: if v_proxy is not None:
os.environ[v] = v_proxy os.environ[v] = v_proxy
exported = True exported = True

View File

@ -180,7 +180,7 @@ build results (as the layer priority order has effectively changed).
if first_regex: if first_regex:
# Find the BBFILES entries that match (which will have come from this conf/layer.conf file) # Find the BBFILES entries that match (which will have come from this conf/layer.conf file)
bbfiles = str(self.tinfoil.config_data.getVar('BBFILES', True)).split() bbfiles = str(self.tinfoil.config_data.getVar('BBFILES')).split()
bbfiles_layer = [] bbfiles_layer = []
for item in bbfiles: for item in bbfiles:
if first_regex.match(item): if first_regex.match(item):

View File

@ -12,7 +12,7 @@ class LayerPlugin():
def tinfoil_init(self, tinfoil): def tinfoil_init(self, tinfoil):
self.tinfoil = tinfoil self.tinfoil = tinfoil
self.bblayers = (self.tinfoil.config_data.getVar('BBLAYERS', True) or "").split() self.bblayers = (self.tinfoil.config_data.getVar('BBLAYERS') or "").split()
layerconfs = self.tinfoil.config_data.varhistory.get_variable_items_files('BBFILE_COLLECTIONS', self.tinfoil.config_data) layerconfs = self.tinfoil.config_data.varhistory.get_variable_items_files('BBFILE_COLLECTIONS', self.tinfoil.config_data)
self.bbfile_collections = {layer: os.path.dirname(os.path.dirname(path)) for layer, path in layerconfs.items()} self.bbfile_collections = {layer: os.path.dirname(os.path.dirname(path)) for layer, path in layerconfs.items()}

View File

@ -151,7 +151,7 @@ class LayerIndexPlugin(ActionPlugin):
def do_layerindex_fetch(self, args): def do_layerindex_fetch(self, args):
"""Fetches a layer from a layer index along with its dependent layers, and adds them to conf/bblayers.conf. """Fetches a layer from a layer index along with its dependent layers, and adds them to conf/bblayers.conf.
""" """
apiurl = self.tinfoil.config_data.getVar('BBLAYERS_LAYERINDEX_URL', True) apiurl = self.tinfoil.config_data.getVar('BBLAYERS_LAYERINDEX_URL')
if not apiurl: if not apiurl:
logger.error("Cannot get BBLAYERS_LAYERINDEX_URL") logger.error("Cannot get BBLAYERS_LAYERINDEX_URL")
return 1 return 1
@ -173,8 +173,8 @@ class LayerIndexPlugin(ActionPlugin):
return 1 return 1
ignore_layers = [] ignore_layers = []
for collection in self.tinfoil.config_data.getVar('BBFILE_COLLECTIONS', True).split(): for collection in self.tinfoil.config_data.getVar('BBFILE_COLLECTIONS').split():
lname = self.tinfoil.config_data.getVar('BBLAYERS_LAYERINDEX_NAME_%s' % collection, True) lname = self.tinfoil.config_data.getVar('BBLAYERS_LAYERINDEX_NAME_%s' % collection)
if lname: if lname:
ignore_layers.append(lname) ignore_layers.append(lname)
@ -225,7 +225,7 @@ class LayerIndexPlugin(ActionPlugin):
printedlayers.append(dependency) printedlayers.append(dependency)
if repourls: if repourls:
fetchdir = self.tinfoil.config_data.getVar('BBLAYERS_FETCH_DIR', True) fetchdir = self.tinfoil.config_data.getVar('BBLAYERS_FETCH_DIR')
if not fetchdir: if not fetchdir:
logger.error("Cannot get BBLAYERS_FETCH_DIR") logger.error("Cannot get BBLAYERS_FETCH_DIR")
return 1 return 1

View File

@ -62,7 +62,7 @@ are overlayed will also be listed, with a " (skipped)" suffix.
# factor - however, each layer.conf is free to either prepend or append to # factor - however, each layer.conf is free to either prepend or append to
# BBPATH (or indeed do crazy stuff with it). Thus the order in BBPATH might # BBPATH (or indeed do crazy stuff with it). Thus the order in BBPATH might
# not be exactly the order present in bblayers.conf either. # not be exactly the order present in bblayers.conf either.
bbpath = str(self.tinfoil.config_data.getVar('BBPATH', True)) bbpath = str(self.tinfoil.config_data.getVar('BBPATH'))
overlayed_class_found = False overlayed_class_found = False
for (classfile, classdirs) in classes.items(): for (classfile, classdirs) in classes.items():
if len(classdirs) > 1: if len(classdirs) > 1:
@ -114,7 +114,7 @@ skipped recipes will also be listed, with a " (skipped)" suffix.
def list_recipes(self, title, pnspec, show_overlayed_only, show_same_ver_only, show_filenames, show_multi_provider_only, inherits): def list_recipes(self, title, pnspec, show_overlayed_only, show_same_ver_only, show_filenames, show_multi_provider_only, inherits):
if inherits: if inherits:
bbpath = str(self.tinfoil.config_data.getVar('BBPATH', True)) bbpath = str(self.tinfoil.config_data.getVar('BBPATH'))
for classname in inherits: for classname in inherits:
classfile = 'classes/%s.bbclass' % classname classfile = 'classes/%s.bbclass' % classname
if not bb.utils.which(bbpath, classfile, history=False): if not bb.utils.which(bbpath, classfile, history=False):
@ -158,7 +158,7 @@ skipped recipes will also be listed, with a " (skipped)" suffix.
logger.plain("%s:", pn) logger.plain("%s:", pn)
logger.plain(" %s %s%s", layer.ljust(20), ver, skipped) logger.plain(" %s %s%s", layer.ljust(20), ver, skipped)
global_inherit = (self.tinfoil.config_data.getVar('INHERIT', True) or "").split() global_inherit = (self.tinfoil.config_data.getVar('INHERIT') or "").split()
cls_re = re.compile('classes/') cls_re = re.compile('classes/')
preffiles = [] preffiles = []
@ -319,12 +319,12 @@ NOTE: .bbappend files can impact the dependencies.
ignore_layers = (args.ignore or '').split(',') ignore_layers = (args.ignore or '').split(',')
pkg_fn = self.tinfoil.cooker_data.pkg_fn pkg_fn = self.tinfoil.cooker_data.pkg_fn
bbpath = str(self.tinfoil.config_data.getVar('BBPATH', True)) bbpath = str(self.tinfoil.config_data.getVar('BBPATH'))
self.require_re = re.compile(r"require\s+(.+)") self.require_re = re.compile(r"require\s+(.+)")
self.include_re = re.compile(r"include\s+(.+)") self.include_re = re.compile(r"include\s+(.+)")
self.inherit_re = re.compile(r"inherit\s+(.+)") self.inherit_re = re.compile(r"inherit\s+(.+)")
global_inherit = (self.tinfoil.config_data.getVar('INHERIT', True) or "").split() global_inherit = (self.tinfoil.config_data.getVar('INHERIT') or "").split()
# The bb's DEPENDS and RDEPENDS # The bb's DEPENDS and RDEPENDS
for f in pkg_fn: for f in pkg_fn:

View File

@ -420,7 +420,7 @@ class PRServiceConfigError(Exception):
def auto_start(d): def auto_start(d):
global singleton global singleton
host_params = list(filter(None, (d.getVar('PRSERV_HOST', True) or '').split(':'))) host_params = list(filter(None, (d.getVar('PRSERV_HOST') or '').split(':')))
if not host_params: if not host_params:
return None return None
@ -431,7 +431,7 @@ def auto_start(d):
if is_local_special(host_params[0], int(host_params[1])) and not singleton: if is_local_special(host_params[0], int(host_params[1])) and not singleton:
import bb.utils import bb.utils
cachedir = (d.getVar("PERSISTENT_DIR", True) or d.getVar("CACHE", True)) cachedir = (d.getVar("PERSISTENT_DIR") or d.getVar("CACHE"))
if not cachedir: if not cachedir:
logger.critical("Please set the 'PERSISTENT_DIR' or 'CACHE' variable") logger.critical("Please set the 'PERSISTENT_DIR' or 'CACHE' variable")
raise PRServiceConfigError raise PRServiceConfigError