2011-01-01 23:55:54 +00:00
|
|
|
#!/usr/bin/env python
|
2007-01-08 23:53:01 +00:00
|
|
|
# ex:ts=4:sw=4:sts=4:et
|
|
|
|
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
|
|
|
#
|
|
|
|
# Copyright (C) 2003, 2004 Chris Larson
|
|
|
|
# Copyright (C) 2003, 2004 Phil Blundell
|
|
|
|
# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
|
|
|
|
# Copyright (C) 2005 Holger Hans Peter Freyther
|
|
|
|
# Copyright (C) 2005 ROAD GmbH
|
2010-01-20 18:46:02 +00:00
|
|
|
# Copyright (C) 2006 - 2007 Richard Purdie
|
2007-01-08 23:53:01 +00:00
|
|
|
#
|
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License version 2 as
|
|
|
|
# published by the Free Software Foundation.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License along
|
|
|
|
# with this program; if not, write to the Free Software Foundation, Inc.,
|
|
|
|
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
|
|
|
|
2010-04-10 02:46:14 +00:00
|
|
|
from __future__ import print_function
|
2010-04-08 17:22:29 +00:00
|
|
|
import sys, os, glob, os.path, re, time
|
2010-12-07 18:00:22 +00:00
|
|
|
import atexit
|
|
|
|
import itertools
|
2010-06-10 17:35:31 +00:00
|
|
|
import logging
|
2010-11-19 03:21:54 +00:00
|
|
|
import multiprocessing
|
|
|
|
import signal
|
2010-12-07 18:00:22 +00:00
|
|
|
import sre_constants
|
|
|
|
import threading
|
2010-06-16 13:53:24 +00:00
|
|
|
from cStringIO import StringIO
|
|
|
|
from contextlib import closing
|
2007-01-08 23:53:01 +00:00
|
|
|
import bb
|
2010-04-08 17:22:29 +00:00
|
|
|
from bb import utils, data, parse, event, cache, providers, taskdata, command, runqueue
|
2007-01-08 23:53:01 +00:00
|
|
|
|
2010-06-10 17:35:31 +00:00
|
|
|
logger = logging.getLogger("BitBake")
|
|
|
|
collectlog = logging.getLogger("BitBake.Collection")
|
|
|
|
buildlog = logging.getLogger("BitBake.Build")
|
|
|
|
parselog = logging.getLogger("BitBake.Parsing")
|
|
|
|
providerlog = logging.getLogger("BitBake.Provider")
|
|
|
|
|
2010-01-20 18:46:02 +00:00
|
|
|
class MultipleMatches(Exception):
|
|
|
|
"""
|
|
|
|
Exception raised when multiple file matches are found
|
|
|
|
"""
|
|
|
|
|
|
|
|
class NothingToBuild(Exception):
|
|
|
|
"""
|
|
|
|
Exception raised when there is nothing to build
|
|
|
|
"""
|
|
|
|
|
2010-12-08 19:30:33 +00:00
|
|
|
class state:
|
|
|
|
initial, parsing, running, shutdown, stop = range(5)
|
2007-01-08 23:53:01 +00:00
|
|
|
|
|
|
|
#============================================================================#
|
|
|
|
# BBCooker
|
|
|
|
#============================================================================#
|
|
|
|
class BBCooker:
|
|
|
|
"""
|
|
|
|
Manages one bitbake build run
|
|
|
|
"""
|
|
|
|
|
2010-01-20 18:46:02 +00:00
|
|
|
def __init__(self, configuration, server):
|
2007-01-08 23:53:01 +00:00
|
|
|
self.status = None
|
2010-11-19 18:46:42 +00:00
|
|
|
self.appendlist = {}
|
2007-01-08 23:53:01 +00:00
|
|
|
|
2010-08-16 15:37:29 +00:00
|
|
|
if server:
|
2010-11-28 15:01:08 +00:00
|
|
|
self.server = server.BitBakeServer(self)
|
2010-01-20 18:46:02 +00:00
|
|
|
|
2007-04-01 15:04:49 +00:00
|
|
|
self.configuration = configuration
|
|
|
|
|
|
|
|
self.configuration.data = bb.data.init()
|
|
|
|
|
2010-08-16 15:37:29 +00:00
|
|
|
if not server:
|
|
|
|
bb.data.setVar("BB_WORKERCONTEXT", "1", self.configuration.data)
|
|
|
|
|
2008-09-30 20:46:17 +00:00
|
|
|
bb.data.inheritFromOS(self.configuration.data)
|
|
|
|
|
2010-03-22 17:48:31 +00:00
|
|
|
self.parseConfigurationFiles(self.configuration.file)
|
2007-04-01 15:04:49 +00:00
|
|
|
|
|
|
|
if not self.configuration.cmd:
|
2010-01-20 18:46:02 +00:00
|
|
|
self.configuration.cmd = bb.data.getVar("BB_DEFAULT_TASK", self.configuration.data, True) or "build"
|
2007-04-01 15:04:49 +00:00
|
|
|
|
2008-01-06 16:51:51 +00:00
|
|
|
bbpkgs = bb.data.getVar('BBPKGS', self.configuration.data, True)
|
2009-11-03 23:20:15 +00:00
|
|
|
if bbpkgs and len(self.configuration.pkgs_to_build) == 0:
|
2008-01-06 16:51:51 +00:00
|
|
|
self.configuration.pkgs_to_build.extend(bbpkgs.split())
|
|
|
|
|
2007-04-01 15:04:49 +00:00
|
|
|
#
|
|
|
|
# Special updated configuration we use for firing events
|
|
|
|
#
|
|
|
|
self.configuration.event_data = bb.data.createCopy(self.configuration.data)
|
|
|
|
bb.data.update_data(self.configuration.event_data)
|
|
|
|
|
2007-08-20 07:48:43 +00:00
|
|
|
# TOSTOP must not be set or our children will hang when they output
|
|
|
|
fd = sys.stdout.fileno()
|
|
|
|
if os.isatty(fd):
|
|
|
|
import termios
|
|
|
|
tcattr = termios.tcgetattr(fd)
|
|
|
|
if tcattr[3] & termios.TOSTOP:
|
2010-06-10 17:35:31 +00:00
|
|
|
buildlog.info("The terminal had the TOSTOP bit set, clearing...")
|
2007-08-20 07:48:43 +00:00
|
|
|
tcattr[3] = tcattr[3] & ~termios.TOSTOP
|
|
|
|
termios.tcsetattr(fd, termios.TCSANOW, tcattr)
|
|
|
|
|
2010-01-20 18:46:02 +00:00
|
|
|
self.command = bb.command.Command(self)
|
2010-12-08 19:30:33 +00:00
|
|
|
self.state = state.initial
|
2010-01-20 18:46:02 +00:00
|
|
|
|
|
|
|
def parseConfiguration(self):
|
|
|
|
|
|
|
|
|
2007-09-02 14:10:08 +00:00
|
|
|
# Change nice level if we're asked to
|
|
|
|
nice = bb.data.getVar("BB_NICE_LEVEL", self.configuration.data, True)
|
|
|
|
if nice:
|
|
|
|
curnice = os.nice(0)
|
|
|
|
nice = int(nice) - curnice
|
2010-06-10 17:35:31 +00:00
|
|
|
buildlog.verbose("Renice to %s " % os.nice(nice))
|
2010-01-20 18:46:02 +00:00
|
|
|
|
|
|
|
def parseCommandLine(self):
|
|
|
|
# Parse any commandline into actions
|
|
|
|
if self.configuration.show_environment:
|
|
|
|
self.commandlineAction = None
|
|
|
|
|
|
|
|
if 'world' in self.configuration.pkgs_to_build:
|
2010-06-10 17:35:31 +00:00
|
|
|
buildlog.error("'world' is not a valid target for --environment.")
|
2010-01-20 18:46:02 +00:00
|
|
|
elif len(self.configuration.pkgs_to_build) > 1:
|
2010-06-10 17:35:31 +00:00
|
|
|
buildlog.error("Only one target can be used with the --environment option.")
|
2010-01-20 18:46:02 +00:00
|
|
|
elif self.configuration.buildfile and len(self.configuration.pkgs_to_build) > 0:
|
2010-06-10 17:35:31 +00:00
|
|
|
buildlog.error("No target should be used with the --environment and --buildfile options.")
|
2010-01-20 18:46:02 +00:00
|
|
|
elif len(self.configuration.pkgs_to_build) > 0:
|
|
|
|
self.commandlineAction = ["showEnvironmentTarget", self.configuration.pkgs_to_build]
|
|
|
|
else:
|
|
|
|
self.commandlineAction = ["showEnvironment", self.configuration.buildfile]
|
|
|
|
elif self.configuration.buildfile is not None:
|
|
|
|
self.commandlineAction = ["buildFile", self.configuration.buildfile, self.configuration.cmd]
|
|
|
|
elif self.configuration.revisions_changed:
|
|
|
|
self.commandlineAction = ["compareRevisions"]
|
|
|
|
elif self.configuration.show_versions:
|
|
|
|
self.commandlineAction = ["showVersions"]
|
|
|
|
elif self.configuration.parse_only:
|
|
|
|
self.commandlineAction = ["parseFiles"]
|
|
|
|
elif self.configuration.dot_graph:
|
|
|
|
if self.configuration.pkgs_to_build:
|
|
|
|
self.commandlineAction = ["generateDotGraph", self.configuration.pkgs_to_build, self.configuration.cmd]
|
|
|
|
else:
|
|
|
|
self.commandlineAction = None
|
2010-06-10 17:35:31 +00:00
|
|
|
buildlog.error("Please specify a package name for dependency graph generation.")
|
2010-01-20 18:46:02 +00:00
|
|
|
else:
|
|
|
|
if self.configuration.pkgs_to_build:
|
|
|
|
self.commandlineAction = ["buildTargets", self.configuration.pkgs_to_build, self.configuration.cmd]
|
|
|
|
else:
|
|
|
|
self.commandlineAction = None
|
2010-06-10 17:35:31 +00:00
|
|
|
buildlog.error("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
|
2010-01-20 18:46:02 +00:00
|
|
|
|
|
|
|
def runCommands(self, server, data, abort):
|
|
|
|
"""
|
|
|
|
Run any queued asynchronous command
|
|
|
|
This is done by the idle handler so it runs in true context rather than
|
|
|
|
tied to any UI.
|
|
|
|
"""
|
|
|
|
|
|
|
|
return self.command.runAsyncCommand()
|
2007-08-20 07:48:43 +00:00
|
|
|
|
2007-04-01 15:04:49 +00:00
|
|
|
def showVersions(self):
|
2010-01-20 18:46:02 +00:00
|
|
|
|
|
|
|
# Need files parsed
|
|
|
|
self.updateCache()
|
|
|
|
|
2007-01-08 23:53:01 +00:00
|
|
|
pkg_pn = self.status.pkg_pn
|
|
|
|
preferred_versions = {}
|
|
|
|
latest_versions = {}
|
|
|
|
|
|
|
|
# Sort by priority
|
2009-06-11 20:10:04 +00:00
|
|
|
for pn in pkg_pn:
|
2010-03-24 23:56:12 +00:00
|
|
|
(last_ver, last_file, pref_ver, pref_file) = bb.providers.findBestProvider(pn, self.configuration.data, self.status)
|
2007-01-08 23:53:01 +00:00
|
|
|
preferred_versions[pn] = (pref_ver, pref_file)
|
|
|
|
latest_versions[pn] = (last_ver, last_file)
|
|
|
|
|
2010-06-10 17:35:31 +00:00
|
|
|
logger.plain("%-35s %25s %25s", "Package Name", "Latest Version", "Preferred Version")
|
|
|
|
logger.plain("%-35s %25s %25s\n", "============", "==============", "=================")
|
2010-01-20 18:46:02 +00:00
|
|
|
|
2009-06-11 20:10:04 +00:00
|
|
|
for p in sorted(pkg_pn):
|
2007-01-08 23:53:01 +00:00
|
|
|
pref = preferred_versions[p]
|
|
|
|
latest = latest_versions[p]
|
|
|
|
|
2010-01-20 18:46:02 +00:00
|
|
|
prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2]
|
|
|
|
lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
|
|
|
|
|
|
|
|
if pref == latest:
|
2007-01-08 23:53:01 +00:00
|
|
|
prefstr = ""
|
|
|
|
|
2010-06-10 17:35:31 +00:00
|
|
|
logger.plain("%-35s %25s %25s", p, lateststr, prefstr)
|
2007-01-08 23:53:01 +00:00
|
|
|
|
2010-01-20 18:46:02 +00:00
|
|
|
def showEnvironment(self, buildfile = None, pkgs_to_build = []):
|
2008-01-06 16:51:51 +00:00
|
|
|
"""
|
|
|
|
Show the outer or per-package environment
|
|
|
|
"""
|
|
|
|
fn = None
|
|
|
|
envdata = None
|
|
|
|
|
|
|
|
if buildfile:
|
|
|
|
fn = self.matchFile(buildfile)
|
|
|
|
elif len(pkgs_to_build) == 1:
|
|
|
|
self.updateCache()
|
|
|
|
|
|
|
|
localdata = data.createCopy(self.configuration.data)
|
|
|
|
bb.data.update_data(localdata)
|
|
|
|
bb.data.expandKeys(localdata)
|
|
|
|
|
2010-01-20 18:46:02 +00:00
|
|
|
taskdata = bb.taskdata.TaskData(self.configuration.abort)
|
|
|
|
taskdata.add_provider(localdata, self.status, pkgs_to_build[0])
|
|
|
|
taskdata.add_unresolved(localdata, self.status)
|
2008-01-06 16:51:51 +00:00
|
|
|
|
|
|
|
targetid = taskdata.getbuild_id(pkgs_to_build[0])
|
|
|
|
fnid = taskdata.build_targets[targetid][0]
|
|
|
|
fn = taskdata.fn_index[fnid]
|
|
|
|
else:
|
|
|
|
envdata = self.configuration.data
|
|
|
|
|
|
|
|
if fn:
|
2007-01-08 23:53:01 +00:00
|
|
|
try:
|
2010-11-19 18:46:42 +00:00
|
|
|
envdata = bb.cache.Cache.loadDataFull(fn, self.get_file_appends(fn), self.configuration.data)
|
2010-06-10 17:35:31 +00:00
|
|
|
except Exception, e:
|
|
|
|
parselog.exception("Unable to read %s", fn)
|
2010-01-20 18:46:02 +00:00
|
|
|
raise
|
|
|
|
|
2007-01-08 23:53:01 +00:00
|
|
|
# emit variables and shell functions
|
2010-06-10 17:35:31 +00:00
|
|
|
data.update_data(envdata)
|
|
|
|
with closing(StringIO()) as env:
|
|
|
|
data.emit_env(env, envdata, True)
|
|
|
|
logger.plain(env.getvalue())
|
2010-06-16 13:53:24 +00:00
|
|
|
|
2007-01-08 23:53:01 +00:00
|
|
|
# emit the metadata which isnt valid shell
|
2010-01-20 18:46:02 +00:00
|
|
|
data.expandKeys(envdata)
|
2008-01-06 16:51:51 +00:00
|
|
|
for e in envdata.keys():
|
|
|
|
if data.getVarFlag( e, 'python', envdata ):
|
2010-06-10 17:35:31 +00:00
|
|
|
logger.plain("\npython %s () {\n%s}\n", e, data.getVar(e, envdata, 1))
|
2007-01-08 23:53:01 +00:00
|
|
|
|
2010-01-20 18:46:02 +00:00
|
|
|
def generateDepTreeData(self, pkgs_to_build, task):
|
2007-01-08 23:53:01 +00:00
|
|
|
"""
|
2010-01-20 18:46:02 +00:00
|
|
|
Create a dependency tree of pkgs_to_build, returning the data.
|
2007-01-08 23:53:01 +00:00
|
|
|
"""
|
|
|
|
|
2010-01-20 18:46:02 +00:00
|
|
|
# Need files parsed
|
|
|
|
self.updateCache()
|
|
|
|
|
|
|
|
# If we are told to do the None task then query the default task
|
|
|
|
if (task == None):
|
|
|
|
task = self.configuration.cmd
|
|
|
|
|
|
|
|
pkgs_to_build = self.checkPackages(pkgs_to_build)
|
2007-01-08 23:53:01 +00:00
|
|
|
|
|
|
|
localdata = data.createCopy(self.configuration.data)
|
|
|
|
bb.data.update_data(localdata)
|
|
|
|
bb.data.expandKeys(localdata)
|
2010-01-20 18:46:02 +00:00
|
|
|
taskdata = bb.taskdata.TaskData(self.configuration.abort)
|
2007-01-08 23:53:01 +00:00
|
|
|
|
|
|
|
runlist = []
|
2010-01-20 18:46:02 +00:00
|
|
|
for k in pkgs_to_build:
|
|
|
|
taskdata.add_provider(localdata, self.status, k)
|
|
|
|
runlist.append([k, "do_%s" % task])
|
|
|
|
taskdata.add_unresolved(localdata, self.status)
|
|
|
|
|
2007-04-01 15:04:49 +00:00
|
|
|
rq = bb.runqueue.RunQueue(self, self.configuration.data, self.status, taskdata, runlist)
|
2010-08-18 10:30:53 +00:00
|
|
|
rq.rqdata.prepare()
|
2007-01-08 23:53:01 +00:00
|
|
|
|
2010-03-24 23:56:12 +00:00
|
|
|
seen_fnids = []
|
2010-01-20 18:46:02 +00:00
|
|
|
depend_tree = {}
|
|
|
|
depend_tree["depends"] = {}
|
|
|
|
depend_tree["tdepends"] = {}
|
|
|
|
depend_tree["pn"] = {}
|
|
|
|
depend_tree["rdepends-pn"] = {}
|
|
|
|
depend_tree["packages"] = {}
|
|
|
|
depend_tree["rdepends-pkg"] = {}
|
|
|
|
depend_tree["rrecs-pkg"] = {}
|
2008-01-06 23:06:42 +00:00
|
|
|
|
2010-11-19 20:39:22 +00:00
|
|
|
for task in xrange(len(rq.rqdata.runq_fnid)):
|
2010-08-18 10:30:53 +00:00
|
|
|
taskname = rq.rqdata.runq_task[task]
|
|
|
|
fnid = rq.rqdata.runq_fnid[task]
|
2007-01-08 23:53:01 +00:00
|
|
|
fn = taskdata.fn_index[fnid]
|
|
|
|
pn = self.status.pkg_fn[fn]
|
2007-05-22 11:50:37 +00:00
|
|
|
version = "%s:%s-%s" % self.status.pkg_pepvpr[fn]
|
2010-01-20 18:46:02 +00:00
|
|
|
if pn not in depend_tree["pn"]:
|
|
|
|
depend_tree["pn"][pn] = {}
|
|
|
|
depend_tree["pn"][pn]["filename"] = fn
|
|
|
|
depend_tree["pn"][pn]["version"] = version
|
2010-08-18 10:30:53 +00:00
|
|
|
for dep in rq.rqdata.runq_depends[task]:
|
|
|
|
depfn = taskdata.fn_index[rq.rqdata.runq_fnid[dep]]
|
2007-01-08 23:53:01 +00:00
|
|
|
deppn = self.status.pkg_fn[depfn]
|
2010-08-18 10:30:53 +00:00
|
|
|
dotname = "%s.%s" % (pn, rq.rqdata.runq_task[task])
|
2010-01-20 18:46:02 +00:00
|
|
|
if not dotname in depend_tree["tdepends"]:
|
|
|
|
depend_tree["tdepends"][dotname] = []
|
2010-08-18 10:30:53 +00:00
|
|
|
depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, rq.rqdata.runq_task[dep]))
|
2007-01-08 23:53:01 +00:00
|
|
|
if fnid not in seen_fnids:
|
|
|
|
seen_fnids.append(fnid)
|
|
|
|
packages = []
|
2010-01-20 18:46:02 +00:00
|
|
|
|
|
|
|
depend_tree["depends"][pn] = []
|
|
|
|
for dep in taskdata.depids[fnid]:
|
|
|
|
depend_tree["depends"][pn].append(taskdata.build_names_index[dep])
|
|
|
|
|
|
|
|
depend_tree["rdepends-pn"][pn] = []
|
|
|
|
for rdep in taskdata.rdepids[fnid]:
|
2010-03-24 23:56:12 +00:00
|
|
|
depend_tree["rdepends-pn"][pn].append(taskdata.run_names_index[rdep])
|
2010-01-20 18:46:02 +00:00
|
|
|
|
2007-01-08 23:53:01 +00:00
|
|
|
rdepends = self.status.rundeps[fn]
|
|
|
|
for package in rdepends:
|
2010-01-20 18:46:02 +00:00
|
|
|
depend_tree["rdepends-pkg"][package] = []
|
2010-11-16 20:01:51 +00:00
|
|
|
for rdepend in rdepends[package]:
|
2010-01-20 18:46:02 +00:00
|
|
|
depend_tree["rdepends-pkg"][package].append(rdepend)
|
2007-01-08 23:53:01 +00:00
|
|
|
packages.append(package)
|
2010-01-20 18:46:02 +00:00
|
|
|
|
2007-01-08 23:53:01 +00:00
|
|
|
rrecs = self.status.runrecs[fn]
|
|
|
|
for package in rrecs:
|
2010-01-20 18:46:02 +00:00
|
|
|
depend_tree["rrecs-pkg"][package] = []
|
2010-11-16 20:01:51 +00:00
|
|
|
for rdepend in rrecs[package]:
|
2010-01-20 18:46:02 +00:00
|
|
|
depend_tree["rrecs-pkg"][package].append(rdepend)
|
2007-01-08 23:53:01 +00:00
|
|
|
if not package in packages:
|
|
|
|
packages.append(package)
|
2010-01-20 18:46:02 +00:00
|
|
|
|
2007-01-08 23:53:01 +00:00
|
|
|
for package in packages:
|
2010-01-20 18:46:02 +00:00
|
|
|
if package not in depend_tree["packages"]:
|
|
|
|
depend_tree["packages"][package] = {}
|
|
|
|
depend_tree["packages"][package]["pn"] = pn
|
|
|
|
depend_tree["packages"][package]["filename"] = fn
|
|
|
|
depend_tree["packages"][package]["version"] = version
|
|
|
|
|
|
|
|
return depend_tree
|
|
|
|
|
|
|
|
|
|
|
|
def generateDepTreeEvent(self, pkgs_to_build, task):
|
|
|
|
"""
|
|
|
|
Create a task dependency graph of pkgs_to_build.
|
|
|
|
Generate an event with the result
|
|
|
|
"""
|
|
|
|
depgraph = self.generateDepTreeData(pkgs_to_build, task)
|
|
|
|
bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.configuration.data)
|
|
|
|
|
|
|
|
def generateDotGraphFiles(self, pkgs_to_build, task):
|
|
|
|
"""
|
|
|
|
Create a task dependency graph of pkgs_to_build.
|
|
|
|
Save the result to a set of .dot files.
|
|
|
|
"""
|
|
|
|
|
|
|
|
depgraph = self.generateDepTreeData(pkgs_to_build, task)
|
|
|
|
|
|
|
|
# Prints a flattened form of package-depends below where subpackages of a package are merged into the main pn
|
|
|
|
depends_file = file('pn-depends.dot', 'w' )
|
2010-04-10 02:46:14 +00:00
|
|
|
print("digraph depends {", file=depends_file)
|
2010-01-20 18:46:02 +00:00
|
|
|
for pn in depgraph["pn"]:
|
|
|
|
fn = depgraph["pn"][pn]["filename"]
|
|
|
|
version = depgraph["pn"][pn]["version"]
|
2010-04-10 02:46:14 +00:00
|
|
|
print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file)
|
2010-01-20 18:46:02 +00:00
|
|
|
for pn in depgraph["depends"]:
|
|
|
|
for depend in depgraph["depends"][pn]:
|
2010-04-10 02:46:14 +00:00
|
|
|
print('"%s" -> "%s"' % (pn, depend), file=depends_file)
|
2010-01-20 18:46:02 +00:00
|
|
|
for pn in depgraph["rdepends-pn"]:
|
|
|
|
for rdepend in depgraph["rdepends-pn"][pn]:
|
2010-04-10 02:46:14 +00:00
|
|
|
print('"%s" -> "%s" [style=dashed]' % (pn, rdepend), file=depends_file)
|
|
|
|
print("}", file=depends_file)
|
2010-06-10 17:35:31 +00:00
|
|
|
logger.info("PN dependencies saved to 'pn-depends.dot'")
|
2010-01-20 18:46:02 +00:00
|
|
|
|
|
|
|
depends_file = file('package-depends.dot', 'w' )
|
2010-04-10 02:46:14 +00:00
|
|
|
print("digraph depends {", file=depends_file)
|
2010-01-20 18:46:02 +00:00
|
|
|
for package in depgraph["packages"]:
|
|
|
|
pn = depgraph["packages"][package]["pn"]
|
|
|
|
fn = depgraph["packages"][package]["filename"]
|
|
|
|
version = depgraph["packages"][package]["version"]
|
|
|
|
if package == pn:
|
2010-04-10 02:46:14 +00:00
|
|
|
print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file)
|
2010-01-20 18:46:02 +00:00
|
|
|
else:
|
2010-04-10 02:46:14 +00:00
|
|
|
print('"%s" [label="%s(%s) %s\\n%s"]' % (package, package, pn, version, fn), file=depends_file)
|
2010-01-20 18:46:02 +00:00
|
|
|
for depend in depgraph["depends"][pn]:
|
2010-04-10 02:46:14 +00:00
|
|
|
print('"%s" -> "%s"' % (package, depend), file=depends_file)
|
2010-01-20 18:46:02 +00:00
|
|
|
for package in depgraph["rdepends-pkg"]:
|
|
|
|
for rdepend in depgraph["rdepends-pkg"][package]:
|
2010-04-10 02:46:14 +00:00
|
|
|
print('"%s" -> "%s" [style=dashed]' % (package, rdepend), file=depends_file)
|
2010-01-20 18:46:02 +00:00
|
|
|
for package in depgraph["rrecs-pkg"]:
|
|
|
|
for rdepend in depgraph["rrecs-pkg"][package]:
|
2010-04-10 02:46:14 +00:00
|
|
|
print('"%s" -> "%s" [style=dashed]' % (package, rdepend), file=depends_file)
|
|
|
|
print("}", file=depends_file)
|
2010-06-10 17:35:31 +00:00
|
|
|
logger.info("Package dependencies saved to 'package-depends.dot'")
|
2010-01-20 18:46:02 +00:00
|
|
|
|
|
|
|
tdepends_file = file('task-depends.dot', 'w' )
|
2010-04-10 02:46:14 +00:00
|
|
|
print("digraph depends {", file=tdepends_file)
|
2010-01-20 18:46:02 +00:00
|
|
|
for task in depgraph["tdepends"]:
|
|
|
|
(pn, taskname) = task.rsplit(".", 1)
|
|
|
|
fn = depgraph["pn"][pn]["filename"]
|
|
|
|
version = depgraph["pn"][pn]["version"]
|
2010-04-10 02:46:14 +00:00
|
|
|
print('"%s.%s" [label="%s %s\\n%s\\n%s"]' % (pn, taskname, pn, taskname, version, fn), file=tdepends_file)
|
2010-01-20 18:46:02 +00:00
|
|
|
for dep in depgraph["tdepends"][task]:
|
2010-04-10 02:46:14 +00:00
|
|
|
print('"%s" -> "%s"' % (task, dep), file=tdepends_file)
|
|
|
|
print("}", file=tdepends_file)
|
2010-06-10 17:35:31 +00:00
|
|
|
logger.info("Task dependencies saved to 'task-depends.dot'")
|
2007-01-08 23:53:01 +00:00
|
|
|
|
|
|
|
def buildDepgraph( self ):
|
|
|
|
all_depends = self.status.all_depends
|
|
|
|
pn_provides = self.status.pn_provides
|
|
|
|
|
|
|
|
localdata = data.createCopy(self.configuration.data)
|
|
|
|
bb.data.update_data(localdata)
|
|
|
|
bb.data.expandKeys(localdata)
|
|
|
|
|
2010-06-24 17:53:36 +00:00
|
|
|
matched = set()
|
2007-01-08 23:53:01 +00:00
|
|
|
def calc_bbfile_priority(filename):
|
2010-06-24 17:53:36 +00:00
|
|
|
for _, _, regex, pri in self.status.bbfile_config_priorities:
|
2007-01-08 23:53:01 +00:00
|
|
|
if regex.match(filename):
|
2010-06-24 17:53:36 +00:00
|
|
|
if not regex in matched:
|
|
|
|
matched.add(regex)
|
2007-01-08 23:53:01 +00:00
|
|
|
return pri
|
|
|
|
return 0
|
|
|
|
|
|
|
|
# Handle PREFERRED_PROVIDERS
|
|
|
|
for p in (bb.data.getVar('PREFERRED_PROVIDERS', localdata, 1) or "").split():
|
2007-09-02 14:10:08 +00:00
|
|
|
try:
|
|
|
|
(providee, provider) = p.split(':')
|
|
|
|
except:
|
2010-06-10 17:35:31 +00:00
|
|
|
providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
|
2007-09-02 14:10:08 +00:00
|
|
|
continue
|
2007-01-08 23:53:01 +00:00
|
|
|
if providee in self.status.preferred and self.status.preferred[providee] != provider:
|
2010-06-10 17:35:31 +00:00
|
|
|
providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.status.preferred[providee])
|
2007-01-08 23:53:01 +00:00
|
|
|
self.status.preferred[providee] = provider
|
|
|
|
|
|
|
|
# Calculate priorities for each file
|
2009-06-11 20:10:04 +00:00
|
|
|
for p in self.status.pkg_fn:
|
2007-01-08 23:53:01 +00:00
|
|
|
self.status.bbfile_priority[p] = calc_bbfile_priority(p)
|
|
|
|
|
2010-06-24 17:53:36 +00:00
|
|
|
for collection, pattern, regex, _ in self.status.bbfile_config_priorities:
|
|
|
|
if not regex in matched:
|
2010-06-10 17:35:31 +00:00
|
|
|
collectlog.warn("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
|
2010-06-24 17:53:36 +00:00
|
|
|
|
2007-01-08 23:53:01 +00:00
|
|
|
def buildWorldTargetList(self):
|
|
|
|
"""
|
|
|
|
Build package list for "bitbake world"
|
|
|
|
"""
|
|
|
|
all_depends = self.status.all_depends
|
|
|
|
pn_provides = self.status.pn_provides
|
2010-06-10 17:35:31 +00:00
|
|
|
parselog.debug(1, "collating packages for \"world\"")
|
2007-01-08 23:53:01 +00:00
|
|
|
for f in self.status.possible_world:
|
|
|
|
terminal = True
|
|
|
|
pn = self.status.pkg_fn[f]
|
|
|
|
|
|
|
|
for p in pn_provides[pn]:
|
|
|
|
if p.startswith('virtual/'):
|
2010-06-10 17:35:31 +00:00
|
|
|
parselog.debug(2, "World build skipping %s due to %s provider starting with virtual/", f, p)
|
2007-01-08 23:53:01 +00:00
|
|
|
terminal = False
|
|
|
|
break
|
|
|
|
for pf in self.status.providers[p]:
|
|
|
|
if self.status.pkg_fn[pf] != pn:
|
2010-06-10 17:35:31 +00:00
|
|
|
parselog.debug(2, "World build skipping %s due to both us and %s providing %s", f, pf, p)
|
2007-01-08 23:53:01 +00:00
|
|
|
terminal = False
|
|
|
|
break
|
|
|
|
if terminal:
|
|
|
|
self.status.world_target.add(pn)
|
|
|
|
|
|
|
|
# drop reference count now
|
|
|
|
self.status.possible_world = None
|
|
|
|
self.status.all_depends = None
|
|
|
|
|
|
|
|
def interactiveMode( self ):
|
|
|
|
"""Drop off into a shell"""
|
|
|
|
try:
|
|
|
|
from bb import shell
|
2010-06-10 17:35:31 +00:00
|
|
|
except ImportError:
|
|
|
|
parselog.exception("Interactive mode not available")
|
|
|
|
sys.exit(1)
|
2007-01-08 23:53:01 +00:00
|
|
|
else:
|
|
|
|
shell.start( self )
|
|
|
|
|
2010-04-14 21:38:18 +00:00
|
|
|
def _findLayerConf(self):
|
|
|
|
path = os.getcwd()
|
|
|
|
while path != "/":
|
|
|
|
bblayers = os.path.join(path, "conf", "bblayers.conf")
|
|
|
|
if os.path.exists(bblayers):
|
|
|
|
return bblayers
|
|
|
|
|
|
|
|
path, _ = os.path.split(path)
|
|
|
|
|
2010-03-22 17:48:31 +00:00
|
|
|
def parseConfigurationFiles(self, files):
|
2010-09-28 15:52:32 +00:00
|
|
|
def _parse(f, data, include=False):
|
2010-09-28 15:24:55 +00:00
|
|
|
try:
|
2010-09-28 15:52:32 +00:00
|
|
|
return bb.parse.handle(f, data, include)
|
2010-09-28 15:24:55 +00:00
|
|
|
except (IOError, bb.parse.ParseError) as exc:
|
|
|
|
parselog.critical("Unable to parse %s: %s" % (f, exc))
|
|
|
|
sys.exit(1)
|
2010-03-22 17:48:31 +00:00
|
|
|
|
2011-01-01 23:55:54 +00:00
|
|
|
data = self.configuration.data
|
|
|
|
bb.parse.init_parser(data)
|
2010-09-28 15:24:55 +00:00
|
|
|
for f in files:
|
|
|
|
data = _parse(f, data)
|
2010-03-22 17:48:31 +00:00
|
|
|
|
2010-09-28 15:24:55 +00:00
|
|
|
layerconf = self._findLayerConf()
|
|
|
|
if layerconf:
|
|
|
|
parselog.debug(2, "Found bblayers.conf (%s)", layerconf)
|
|
|
|
data = _parse(layerconf, data)
|
2010-03-22 17:48:31 +00:00
|
|
|
|
2010-09-28 15:24:55 +00:00
|
|
|
layers = (bb.data.getVar('BBLAYERS', data, True) or "").split()
|
2010-03-22 17:48:31 +00:00
|
|
|
|
2010-09-28 15:24:55 +00:00
|
|
|
data = bb.data.createCopy(data)
|
|
|
|
for layer in layers:
|
|
|
|
parselog.debug(2, "Adding layer %s", layer)
|
|
|
|
bb.data.setVar('LAYERDIR', layer, data)
|
|
|
|
data = _parse(os.path.join(layer, "conf", "layer.conf"), data)
|
2010-12-17 19:15:48 +00:00
|
|
|
data.expandVarref('LAYERDIR')
|
2010-03-22 17:48:31 +00:00
|
|
|
|
2010-09-28 15:24:55 +00:00
|
|
|
bb.data.delVar('LAYERDIR', data)
|
2010-04-14 21:27:29 +00:00
|
|
|
|
2010-09-28 15:24:55 +00:00
|
|
|
if not data.getVar("BBPATH", True):
|
|
|
|
raise SystemExit("The BBPATH variable is not set")
|
2010-03-22 17:48:31 +00:00
|
|
|
|
2010-09-28 15:24:55 +00:00
|
|
|
data = _parse(os.path.join("conf", "bitbake.conf"), data)
|
2007-01-08 23:53:01 +00:00
|
|
|
|
2010-09-28 15:24:55 +00:00
|
|
|
self.configuration.data = data
|
2007-01-08 23:53:01 +00:00
|
|
|
|
2010-09-28 15:24:55 +00:00
|
|
|
# Handle any INHERITs and inherit the base class
|
|
|
|
inherits = ["base"] + (bb.data.getVar('INHERIT', self.configuration.data, True ) or "").split()
|
|
|
|
for inherit in inherits:
|
|
|
|
self.configuration.data = _parse(os.path.join('classes', '%s.bbclass' % inherit), self.configuration.data, True )
|
2007-01-08 23:53:01 +00:00
|
|
|
|
2010-09-28 15:24:55 +00:00
|
|
|
# Nomally we only register event handlers at the end of parsing .bb files
|
|
|
|
# We register any handlers we've found so far here...
|
|
|
|
for var in bb.data.getVar('__BBHANDLERS', self.configuration.data) or []:
|
|
|
|
bb.event.register(var, bb.data.getVar(var, self.configuration.data))
|
2007-08-03 13:40:52 +00:00
|
|
|
|
2010-09-28 15:24:55 +00:00
|
|
|
if bb.data.getVar("BB_WORKERCONTEXT", self.configuration.data) is None:
|
|
|
|
bb.fetch.fetcher_init(self.configuration.data)
|
|
|
|
bb.codeparser.parser_cache_init(self.configuration.data)
|
2011-01-01 23:55:54 +00:00
|
|
|
bb.parse.init_parser(data)
|
2010-09-28 15:24:55 +00:00
|
|
|
bb.event.fire(bb.event.ConfigParsed(), self.configuration.data)
|
2007-01-08 23:53:01 +00:00
|
|
|
|
|
|
|
def handleCollections( self, collections ):
|
|
|
|
"""Handle collections"""
|
|
|
|
if collections:
|
|
|
|
collection_list = collections.split()
|
|
|
|
for c in collection_list:
|
|
|
|
regex = bb.data.getVar("BBFILE_PATTERN_%s" % c, self.configuration.data, 1)
|
|
|
|
if regex == None:
|
2010-06-10 17:35:31 +00:00
|
|
|
parselog.error("BBFILE_PATTERN_%s not defined" % c)
|
2007-01-08 23:53:01 +00:00
|
|
|
continue
|
|
|
|
priority = bb.data.getVar("BBFILE_PRIORITY_%s" % c, self.configuration.data, 1)
|
|
|
|
if priority == None:
|
2010-06-10 17:35:31 +00:00
|
|
|
parselog.error("BBFILE_PRIORITY_%s not defined" % c)
|
2007-01-08 23:53:01 +00:00
|
|
|
continue
|
|
|
|
try:
|
|
|
|
cre = re.compile(regex)
|
|
|
|
except re.error:
|
2010-06-10 17:35:31 +00:00
|
|
|
parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
|
2007-01-08 23:53:01 +00:00
|
|
|
continue
|
|
|
|
try:
|
|
|
|
pri = int(priority)
|
2010-06-24 17:53:36 +00:00
|
|
|
self.status.bbfile_config_priorities.append((c, regex, cre, pri))
|
2007-01-08 23:53:01 +00:00
|
|
|
except ValueError:
|
2010-06-10 17:35:31 +00:00
|
|
|
parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
|
2007-01-08 23:53:01 +00:00
|
|
|
|
2007-04-01 15:04:49 +00:00
|
|
|
def buildSetVars(self):
|
|
|
|
"""
|
|
|
|
Setup any variables needed before starting a build
|
|
|
|
"""
|
|
|
|
if not bb.data.getVar("BUILDNAME", self.configuration.data):
|
2010-06-07 18:50:05 +00:00
|
|
|
bb.data.setVar("BUILDNAME", time.strftime('%Y%m%d%H%M'), self.configuration.data)
|
2010-03-24 23:56:12 +00:00
|
|
|
bb.data.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', time.gmtime()), self.configuration.data)
|
2007-01-08 23:53:01 +00:00
|
|
|
|
2010-01-20 18:46:02 +00:00
|
|
|
def matchFiles(self, buildfile):
|
2007-01-08 23:53:01 +00:00
|
|
|
"""
|
2010-01-20 18:46:02 +00:00
|
|
|
Find the .bb files which match the expression in 'buildfile'.
|
2007-01-08 23:53:01 +00:00
|
|
|
"""
|
2010-01-20 18:46:02 +00:00
|
|
|
|
2007-04-01 15:04:49 +00:00
|
|
|
bf = os.path.abspath(buildfile)
|
2010-11-19 18:46:42 +00:00
|
|
|
filelist, masked = self.collect_bbfiles()
|
2007-04-01 15:04:49 +00:00
|
|
|
try:
|
|
|
|
os.stat(bf)
|
2010-01-20 18:46:02 +00:00
|
|
|
return [bf]
|
2007-04-01 15:04:49 +00:00
|
|
|
except OSError:
|
|
|
|
regexp = re.compile(buildfile)
|
|
|
|
matches = []
|
|
|
|
for f in filelist:
|
|
|
|
if regexp.search(f) and os.path.isfile(f):
|
|
|
|
bf = f
|
|
|
|
matches.append(f)
|
2010-01-20 18:46:02 +00:00
|
|
|
return matches
|
2007-08-12 23:06:49 +00:00
|
|
|
|
2010-01-20 18:46:02 +00:00
|
|
|
def matchFile(self, buildfile):
|
|
|
|
"""
|
|
|
|
Find the .bb file which matches the expression in 'buildfile'.
|
|
|
|
Raise an error if multiple files
|
|
|
|
"""
|
|
|
|
matches = self.matchFiles(buildfile)
|
|
|
|
if len(matches) != 1:
|
2010-06-10 17:35:31 +00:00
|
|
|
parselog.error("Unable to match %s (%s matches found):" % (buildfile, len(matches)))
|
2010-01-20 18:46:02 +00:00
|
|
|
for f in matches:
|
2010-06-10 17:35:31 +00:00
|
|
|
parselog.error(" %s" % f)
|
2010-01-20 18:46:02 +00:00
|
|
|
raise MultipleMatches
|
|
|
|
return matches[0]
|
|
|
|
|
|
|
|
def buildFile(self, buildfile, task):
|
2007-08-12 23:06:49 +00:00
|
|
|
"""
|
|
|
|
Build the file matching regexp buildfile
|
|
|
|
"""
|
|
|
|
|
2010-01-20 18:46:02 +00:00
|
|
|
# Parse the configuration here. We need to do it explicitly here since
|
|
|
|
# buildFile() doesn't use the cache
|
|
|
|
self.parseConfiguration()
|
|
|
|
|
|
|
|
# If we are told to do the None task then query the default task
|
|
|
|
if (task == None):
|
|
|
|
task = self.configuration.cmd
|
|
|
|
|
2010-11-19 18:46:42 +00:00
|
|
|
(fn, cls) = bb.cache.Cache.virtualfn2realfn(buildfile)
|
2010-03-21 22:02:56 +00:00
|
|
|
buildfile = self.matchFile(fn)
|
2010-11-19 18:46:42 +00:00
|
|
|
fn = bb.cache.Cache.realfn2virtual(buildfile, cls)
|
2010-03-21 22:02:56 +00:00
|
|
|
|
2010-01-20 18:46:02 +00:00
|
|
|
self.buildSetVars()
|
2007-01-08 23:53:01 +00:00
|
|
|
|
2010-11-19 18:46:42 +00:00
|
|
|
self.status = bb.cache.CacheData()
|
|
|
|
infos = bb.cache.Cache.parse(fn, self.get_file_appends(fn), \
|
|
|
|
self.configuration.data)
|
|
|
|
maininfo = None
|
|
|
|
for vfn, info in infos:
|
|
|
|
self.status.add_from_recipeinfo(vfn, info)
|
|
|
|
if vfn == fn:
|
|
|
|
maininfo = info
|
2008-03-03 22:01:45 +00:00
|
|
|
|
|
|
|
# Tweak some variables
|
2010-11-19 18:46:42 +00:00
|
|
|
item = maininfo.pn
|
2009-05-12 15:53:22 +00:00
|
|
|
self.status.ignored_dependencies = set()
|
2008-03-03 22:01:45 +00:00
|
|
|
self.status.bbfile_priority[fn] = 1
|
|
|
|
|
|
|
|
# Remove external dependencies
|
|
|
|
self.status.task_deps[fn]['depends'] = {}
|
|
|
|
self.status.deps[fn] = []
|
|
|
|
self.status.rundeps[fn] = []
|
|
|
|
self.status.runrecs[fn] = []
|
2007-01-08 23:53:01 +00:00
|
|
|
|
2007-04-01 15:04:49 +00:00
|
|
|
# Remove stamp for target if force mode active
|
|
|
|
if self.configuration.force:
|
2010-06-10 17:35:31 +00:00
|
|
|
logger.verbose("Remove stamp %s, %s", task, fn)
|
2010-01-20 18:46:02 +00:00
|
|
|
bb.build.del_stamp('do_%s' % task, self.status, fn)
|
2007-04-01 15:04:49 +00:00
|
|
|
|
2008-03-03 22:01:45 +00:00
|
|
|
# Setup taskdata structure
|
2010-01-20 18:46:02 +00:00
|
|
|
taskdata = bb.taskdata.TaskData(self.configuration.abort)
|
2008-03-03 22:01:45 +00:00
|
|
|
taskdata.add_provider(self.configuration.data, self.status, item)
|
2007-04-01 15:04:49 +00:00
|
|
|
|
2008-03-03 22:01:45 +00:00
|
|
|
buildname = bb.data.getVar("BUILDNAME", self.configuration.data)
|
2010-01-20 18:46:02 +00:00
|
|
|
bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.configuration.event_data)
|
2008-03-03 22:01:45 +00:00
|
|
|
|
2010-08-10 20:49:12 +00:00
|
|
|
# Clear locks
|
|
|
|
bb.fetch.persistent_database_connection = {}
|
|
|
|
|
2008-03-03 22:01:45 +00:00
|
|
|
# Execute the runqueue
|
2010-01-20 18:46:02 +00:00
|
|
|
runlist = [[item, "do_%s" % task]]
|
|
|
|
|
2008-03-03 22:01:45 +00:00
|
|
|
rq = bb.runqueue.RunQueue(self, self.configuration.data, self.status, taskdata, runlist)
|
2010-01-20 18:46:02 +00:00
|
|
|
|
|
|
|
def buildFileIdle(server, rq, abort):
|
|
|
|
|
2010-12-08 19:30:33 +00:00
|
|
|
if abort or self.state == state.stop:
|
2010-01-20 18:46:02 +00:00
|
|
|
rq.finish_runqueue(True)
|
2010-12-08 19:30:33 +00:00
|
|
|
elif self.state == state.shutdown:
|
2010-01-20 18:46:02 +00:00
|
|
|
rq.finish_runqueue(False)
|
2008-05-19 20:41:58 +00:00
|
|
|
failures = 0
|
2010-01-20 18:46:02 +00:00
|
|
|
try:
|
|
|
|
retval = rq.execute_runqueue()
|
2010-06-10 22:09:55 +00:00
|
|
|
except runqueue.TaskFailure as exc:
|
2010-10-21 21:14:23 +00:00
|
|
|
for fnid in exc.args:
|
|
|
|
buildlog.error("'%s' failed" % taskdata.fn_index[fnid])
|
2010-09-10 01:03:40 +00:00
|
|
|
failures += len(exc.args)
|
2010-01-20 18:46:02 +00:00
|
|
|
retval = False
|
|
|
|
if not retval:
|
|
|
|
bb.event.fire(bb.event.BuildCompleted(buildname, item, failures), self.configuration.event_data)
|
2010-03-25 17:31:23 +00:00
|
|
|
self.command.finishAsyncCommand()
|
2010-01-20 18:46:02 +00:00
|
|
|
return False
|
2010-08-18 16:37:15 +00:00
|
|
|
if retval is True:
|
|
|
|
return True
|
2010-01-20 18:46:02 +00:00
|
|
|
return 0.5
|
|
|
|
|
|
|
|
self.server.register_idle_function(buildFileIdle, rq)
|
2007-04-01 15:04:49 +00:00
|
|
|
|
2010-01-20 18:46:02 +00:00
|
|
|
def buildTargets(self, targets, task):
|
2007-04-01 15:04:49 +00:00
|
|
|
"""
|
|
|
|
Attempt to build the targets specified
|
|
|
|
"""
|
|
|
|
|
2010-01-20 18:46:02 +00:00
|
|
|
# Need files parsed
|
|
|
|
self.updateCache()
|
2007-04-01 15:04:49 +00:00
|
|
|
|
2010-01-20 18:46:02 +00:00
|
|
|
# If we are told to do the NULL task then query the default task
|
|
|
|
if (task == None):
|
|
|
|
task = self.configuration.cmd
|
2007-04-01 15:04:49 +00:00
|
|
|
|
2010-01-20 18:46:02 +00:00
|
|
|
targets = self.checkPackages(targets)
|
2007-04-01 15:04:49 +00:00
|
|
|
|
2010-01-20 18:46:02 +00:00
|
|
|
def buildTargetsIdle(server, rq, abort):
|
2010-12-08 19:30:33 +00:00
|
|
|
if abort or self.state == state.stop:
|
2010-01-20 18:46:02 +00:00
|
|
|
rq.finish_runqueue(True)
|
2010-12-08 19:30:33 +00:00
|
|
|
elif self.state == state.shutdown:
|
2010-01-20 18:46:02 +00:00
|
|
|
rq.finish_runqueue(False)
|
2008-05-19 20:41:58 +00:00
|
|
|
failures = 0
|
2010-01-20 18:46:02 +00:00
|
|
|
try:
|
|
|
|
retval = rq.execute_runqueue()
|
2010-06-10 22:09:55 +00:00
|
|
|
except runqueue.TaskFailure as exc:
|
2010-10-21 21:14:23 +00:00
|
|
|
for fnid in exc.args:
|
|
|
|
buildlog.error("'%s' failed" % taskdata.fn_index[fnid])
|
2010-09-10 01:03:40 +00:00
|
|
|
failures += len(exc.args)
|
2010-01-20 18:46:02 +00:00
|
|
|
retval = False
|
|
|
|
if not retval:
|
|
|
|
bb.event.fire(bb.event.BuildCompleted(buildname, targets, failures), self.configuration.event_data)
|
2010-03-25 17:31:23 +00:00
|
|
|
self.command.finishAsyncCommand()
|
2010-12-01 16:59:04 +00:00
|
|
|
return False
|
2010-08-18 16:37:15 +00:00
|
|
|
if retval is True:
|
|
|
|
return True
|
2010-01-20 18:46:02 +00:00
|
|
|
return 0.5
|
2007-04-01 15:04:49 +00:00
|
|
|
|
2010-01-20 18:46:02 +00:00
|
|
|
self.buildSetVars()
|
2007-04-01 15:04:49 +00:00
|
|
|
|
2010-01-20 18:46:02 +00:00
|
|
|
buildname = bb.data.getVar("BUILDNAME", self.configuration.data)
|
|
|
|
bb.event.fire(bb.event.BuildStarted(buildname, targets), self.configuration.event_data)
|
2007-01-08 23:53:01 +00:00
|
|
|
|
2010-01-20 18:46:02 +00:00
|
|
|
localdata = data.createCopy(self.configuration.data)
|
|
|
|
bb.data.update_data(localdata)
|
|
|
|
bb.data.expandKeys(localdata)
|
2007-01-08 23:53:01 +00:00
|
|
|
|
2010-01-20 18:46:02 +00:00
|
|
|
taskdata = bb.taskdata.TaskData(self.configuration.abort)
|
2007-01-08 23:53:01 +00:00
|
|
|
|
2010-01-20 18:46:02 +00:00
|
|
|
runlist = []
|
|
|
|
for k in targets:
|
|
|
|
taskdata.add_provider(localdata, self.status, k)
|
|
|
|
runlist.append([k, "do_%s" % task])
|
|
|
|
taskdata.add_unresolved(localdata, self.status)
|
2007-01-08 23:53:01 +00:00
|
|
|
|
2010-08-10 20:49:12 +00:00
|
|
|
# Clear locks
|
|
|
|
bb.fetch.persistent_database_connection = {}
|
|
|
|
|
2010-01-20 18:46:02 +00:00
|
|
|
rq = bb.runqueue.RunQueue(self, self.configuration.data, self.status, taskdata, runlist)
|
2007-01-08 23:53:01 +00:00
|
|
|
|
2010-01-20 18:46:02 +00:00
|
|
|
self.server.register_idle_function(buildTargetsIdle, rq)
|
2007-01-08 23:53:01 +00:00
|
|
|
|
2010-01-20 18:46:02 +00:00
|
|
|
def updateCache(self):
|
2010-12-08 19:30:33 +00:00
|
|
|
if self.state == state.running:
|
2010-01-20 18:46:02 +00:00
|
|
|
return
|
2008-10-17 09:46:35 +00:00
|
|
|
|
2010-12-08 19:30:33 +00:00
|
|
|
if self.state != state.parsing:
|
2010-01-20 18:46:02 +00:00
|
|
|
self.parseConfiguration ()
|
2007-01-08 23:53:01 +00:00
|
|
|
|
2010-01-20 18:46:02 +00:00
|
|
|
# Import Psyco if available and not disabled
|
|
|
|
import platform
|
|
|
|
if platform.machine() in ['i386', 'i486', 'i586', 'i686']:
|
|
|
|
if not self.configuration.disable_psyco:
|
|
|
|
try:
|
|
|
|
import psyco
|
|
|
|
except ImportError:
|
2010-06-10 17:35:31 +00:00
|
|
|
collectlog.info("Psyco JIT Compiler (http://psyco.sf.net) not available. Install it to increase performance.")
|
2010-01-20 18:46:02 +00:00
|
|
|
else:
|
|
|
|
psyco.bind( CookerParser.parse_next )
|
|
|
|
else:
|
2010-06-10 17:35:31 +00:00
|
|
|
collectlog.info("You have disabled Psyco. This decreases performance.")
|
2007-01-08 23:53:01 +00:00
|
|
|
|
2010-01-20 18:46:02 +00:00
|
|
|
self.status = bb.cache.CacheData()
|
2007-01-08 23:53:01 +00:00
|
|
|
|
2010-01-20 18:46:02 +00:00
|
|
|
ignore = bb.data.getVar("ASSUME_PROVIDED", self.configuration.data, 1) or ""
|
|
|
|
self.status.ignored_dependencies = set(ignore.split())
|
2010-03-24 23:56:12 +00:00
|
|
|
|
2010-01-20 18:46:02 +00:00
|
|
|
for dep in self.configuration.extra_assume_provided:
|
|
|
|
self.status.ignored_dependencies.add(dep)
|
2010-03-24 23:56:12 +00:00
|
|
|
|
2010-01-20 18:46:02 +00:00
|
|
|
self.handleCollections( bb.data.getVar("BBFILE_COLLECTIONS", self.configuration.data, 1) )
|
2007-01-08 23:53:01 +00:00
|
|
|
|
2010-01-20 18:46:02 +00:00
|
|
|
(filelist, masked) = self.collect_bbfiles()
|
|
|
|
bb.data.renameVar("__depends", "__base_depends", self.configuration.data)
|
2009-07-29 13:33:14 +00:00
|
|
|
|
2010-01-20 18:46:02 +00:00
|
|
|
self.parser = CookerParser(self, filelist, masked)
|
2010-12-08 19:30:33 +00:00
|
|
|
self.state = state.parsing
|
2007-01-08 23:53:01 +00:00
|
|
|
|
2010-01-20 18:46:02 +00:00
|
|
|
if not self.parser.parse_next():
|
2010-06-10 17:35:31 +00:00
|
|
|
collectlog.debug(1, "parsing complete")
|
2010-01-20 18:46:02 +00:00
|
|
|
self.buildDepgraph()
|
2010-12-08 19:30:33 +00:00
|
|
|
self.state = state.running
|
2010-01-20 18:46:02 +00:00
|
|
|
return None
|
2007-01-08 23:53:01 +00:00
|
|
|
|
2010-01-20 18:46:02 +00:00
|
|
|
return True
|
2007-01-08 23:53:01 +00:00
|
|
|
|
2010-01-20 18:46:02 +00:00
|
|
|
def checkPackages(self, pkgs_to_build):
|
2007-01-08 23:53:01 +00:00
|
|
|
|
2010-01-20 18:46:02 +00:00
|
|
|
if len(pkgs_to_build) == 0:
|
|
|
|
raise NothingToBuild
|
2007-01-08 23:53:01 +00:00
|
|
|
|
2010-01-20 18:46:02 +00:00
|
|
|
if 'world' in pkgs_to_build:
|
|
|
|
self.buildWorldTargetList()
|
|
|
|
pkgs_to_build.remove('world')
|
|
|
|
for t in self.status.world_target:
|
|
|
|
pkgs_to_build.append(t)
|
2007-01-08 23:53:01 +00:00
|
|
|
|
2010-01-20 18:46:02 +00:00
|
|
|
return pkgs_to_build
|
2007-01-08 23:53:01 +00:00
|
|
|
|
|
|
|
def get_bbfiles( self, path = os.getcwd() ):
|
|
|
|
"""Get list of default .bb files by reading out the current directory"""
|
|
|
|
contents = os.listdir(path)
|
|
|
|
bbfiles = []
|
|
|
|
for f in contents:
|
|
|
|
(root, ext) = os.path.splitext(f)
|
|
|
|
if ext == ".bb":
|
2010-03-24 23:56:12 +00:00
|
|
|
bbfiles.append(os.path.abspath(os.path.join(os.getcwd(), f)))
|
2007-01-08 23:53:01 +00:00
|
|
|
return bbfiles
|
|
|
|
|
|
|
|
def find_bbfiles( self, path ):
|
2010-07-16 14:10:22 +00:00
|
|
|
"""Find all the .bb and .bbappend files in a directory"""
|
2007-04-01 15:04:49 +00:00
|
|
|
from os.path import join
|
|
|
|
|
|
|
|
found = []
|
|
|
|
for dir, dirs, files in os.walk(path):
|
|
|
|
for ignored in ('SCCS', 'CVS', '.svn'):
|
|
|
|
if ignored in dirs:
|
|
|
|
dirs.remove(ignored)
|
2010-07-16 14:10:22 +00:00
|
|
|
found += [join(dir, f) for f in files if (f.endswith('.bb') or f.endswith('.bbappend'))]
|
2007-04-01 15:04:49 +00:00
|
|
|
|
|
|
|
return found
|
2007-01-08 23:53:01 +00:00
|
|
|
|
|
|
|
def collect_bbfiles( self ):
|
|
|
|
"""Collect all available .bb build files"""
|
|
|
|
parsed, cached, skipped, masked = 0, 0, 0, 0
|
|
|
|
|
2010-06-10 17:35:31 +00:00
|
|
|
collectlog.debug(1, "collecting .bb files")
|
2010-07-16 14:10:22 +00:00
|
|
|
|
2007-01-08 23:53:01 +00:00
|
|
|
files = (data.getVar( "BBFILES", self.configuration.data, 1 ) or "").split()
|
|
|
|
data.setVar("BBFILES", " ".join(files), self.configuration.data)
|
|
|
|
|
|
|
|
if not len(files):
|
|
|
|
files = self.get_bbfiles()
|
|
|
|
|
|
|
|
if not len(files):
|
2010-06-10 17:35:31 +00:00
|
|
|
collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
|
2010-03-01 08:20:29 +00:00
|
|
|
bb.event.fire(CookerExit(), self.configuration.event_data)
|
2007-01-08 23:53:01 +00:00
|
|
|
|
2009-12-06 19:52:52 +00:00
|
|
|
newfiles = set()
|
2007-01-08 23:53:01 +00:00
|
|
|
for f in files:
|
|
|
|
if os.path.isdir(f):
|
|
|
|
dirfiles = self.find_bbfiles(f)
|
2010-07-16 14:10:22 +00:00
|
|
|
newfiles.update(dirfiles)
|
2009-05-11 21:41:17 +00:00
|
|
|
else:
|
|
|
|
globbed = glob.glob(f)
|
|
|
|
if not globbed and os.path.exists(f):
|
|
|
|
globbed = [f]
|
2009-12-06 19:52:52 +00:00
|
|
|
newfiles.update(globbed)
|
2007-01-08 23:53:01 +00:00
|
|
|
|
|
|
|
bbmask = bb.data.getVar('BBMASK', self.configuration.data, 1)
|
|
|
|
|
2010-07-16 14:10:22 +00:00
|
|
|
if bbmask:
|
|
|
|
try:
|
|
|
|
bbmask_compiled = re.compile(bbmask)
|
|
|
|
except sre_constants.error:
|
2010-06-10 17:35:31 +00:00
|
|
|
collectlog.critical("BBMASK is not a valid regular expression, ignoring.")
|
|
|
|
return list(newfiles), 0
|
2007-01-08 23:53:01 +00:00
|
|
|
|
2010-07-16 14:10:22 +00:00
|
|
|
bbfiles = []
|
|
|
|
bbappend = []
|
2009-05-11 21:41:17 +00:00
|
|
|
for f in newfiles:
|
2010-07-16 14:10:22 +00:00
|
|
|
if bbmask and bbmask_compiled.search(f):
|
2010-06-10 17:35:31 +00:00
|
|
|
collectlog.debug(1, "skipping masked file %s", f)
|
2007-01-08 23:53:01 +00:00
|
|
|
masked += 1
|
|
|
|
continue
|
2010-07-16 14:10:22 +00:00
|
|
|
if f.endswith('.bb'):
|
|
|
|
bbfiles.append(f)
|
|
|
|
elif f.endswith('.bbappend'):
|
|
|
|
bbappend.append(f)
|
|
|
|
else:
|
2010-06-10 17:35:31 +00:00
|
|
|
collectlog.debug(1, "skipping %s: unknown file extension", f)
|
2010-07-16 14:10:22 +00:00
|
|
|
|
|
|
|
# Build a list of .bbappend files for each .bb file
|
|
|
|
for f in bbappend:
|
|
|
|
base = os.path.basename(f).replace('.bbappend', '.bb')
|
|
|
|
if not base in self.appendlist:
|
|
|
|
self.appendlist[base] = []
|
|
|
|
self.appendlist[base].append(f)
|
2010-09-28 15:24:55 +00:00
|
|
|
|
2010-07-16 14:10:22 +00:00
|
|
|
return (bbfiles, masked)
|
|
|
|
|
|
|
|
def get_file_appends(self, fn):
|
|
|
|
"""
|
|
|
|
Returns a list of .bbappend files to apply to fn
|
2010-11-19 18:46:42 +00:00
|
|
|
NB: collect_bbfiles() must have been called prior to this
|
2010-07-16 14:10:22 +00:00
|
|
|
"""
|
|
|
|
f = os.path.basename(fn)
|
|
|
|
if f in self.appendlist:
|
2010-09-28 15:24:55 +00:00
|
|
|
return self.appendlist[f]
|
2010-07-16 14:10:22 +00:00
|
|
|
return []
|
2007-01-08 23:53:01 +00:00
|
|
|
|
2010-11-13 13:23:54 +00:00
|
|
|
def pre_serve(self):
|
2010-01-20 18:46:02 +00:00
|
|
|
# Empty the environment. The environment will be populated as
|
|
|
|
# necessary from the data store.
|
2010-11-28 17:39:09 +00:00
|
|
|
#bb.utils.empty_environment()
|
|
|
|
return
|
2007-01-08 23:53:01 +00:00
|
|
|
|
2010-11-13 13:23:54 +00:00
|
|
|
def post_serve(self):
|
2010-01-20 18:46:02 +00:00
|
|
|
bb.event.fire(CookerExit(), self.configuration.event_data)
|
2010-03-24 23:56:12 +00:00
|
|
|
|
2010-11-13 13:23:54 +00:00
|
|
|
|
|
|
|
def server_main(cooker, func, *args):
|
2010-11-28 15:01:08 +00:00
|
|
|
cooker.pre_serve()
|
|
|
|
|
2010-11-13 13:23:54 +00:00
|
|
|
if cooker.configuration.profile:
|
|
|
|
try:
|
|
|
|
import cProfile as profile
|
|
|
|
except:
|
|
|
|
import profile
|
|
|
|
prof = profile.Profile()
|
|
|
|
|
|
|
|
ret = profile.Profile.runcall(prof, func, *args)
|
|
|
|
|
|
|
|
prof.dump_stats("profile.log")
|
|
|
|
|
|
|
|
# Redirect stdout to capture profile information
|
|
|
|
pout = open('profile.log.processed', 'w')
|
|
|
|
so = sys.stdout.fileno()
|
|
|
|
orig_so = os.dup(sys.stdout.fileno())
|
|
|
|
os.dup2(pout.fileno(), so)
|
|
|
|
|
|
|
|
import pstats
|
|
|
|
p = pstats.Stats('profile.log')
|
|
|
|
p.sort_stats('time')
|
|
|
|
p.print_stats()
|
|
|
|
p.print_callers()
|
|
|
|
p.sort_stats('cumulative')
|
|
|
|
p.print_stats()
|
|
|
|
|
|
|
|
os.dup2(orig_so, so)
|
|
|
|
pout.flush()
|
|
|
|
pout.close()
|
|
|
|
|
|
|
|
print("Raw profiling information saved to profile.log and processed statistics to profile.log.processed")
|
|
|
|
|
|
|
|
else:
|
2010-11-28 15:01:08 +00:00
|
|
|
ret = func(*args)
|
2010-11-13 13:23:54 +00:00
|
|
|
|
2010-11-28 15:01:08 +00:00
|
|
|
cooker.post_serve()
|
2010-11-13 13:23:54 +00:00
|
|
|
|
2010-11-28 15:01:08 +00:00
|
|
|
return ret
|
2010-11-13 13:23:54 +00:00
|
|
|
|
2010-12-08 20:08:18 +00:00
|
|
|
def shutdown(self):
|
|
|
|
self.state = state.shutdown
|
|
|
|
|
|
|
|
def stop(self):
|
|
|
|
self.state = state.stop
|
|
|
|
|
2010-01-20 18:46:02 +00:00
|
|
|
class CookerExit(bb.event.Event):
|
|
|
|
"""
|
|
|
|
Notify clients of the Cooker shutdown
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
bb.event.Event.__init__(self)
|
|
|
|
|
2010-12-07 18:00:22 +00:00
|
|
|
def parse_file(task):
|
|
|
|
filename, appends = task
|
2010-12-16 05:07:38 +00:00
|
|
|
try:
|
|
|
|
return True, bb.cache.Cache.parse(filename, appends, parse_file.cfg)
|
|
|
|
except Exception, exc:
|
|
|
|
exc.recipe = filename
|
|
|
|
raise exc
|
2010-12-07 18:00:22 +00:00
|
|
|
|
2010-11-19 03:21:54 +00:00
|
|
|
class CookerParser(object):
|
2010-01-20 18:46:02 +00:00
|
|
|
def __init__(self, cooker, filelist, masked):
|
|
|
|
self.filelist = filelist
|
|
|
|
self.cooker = cooker
|
2010-11-19 05:47:36 +00:00
|
|
|
self.cfgdata = cooker.configuration.data
|
2010-01-20 18:46:02 +00:00
|
|
|
|
|
|
|
# Accounting statistics
|
|
|
|
self.parsed = 0
|
|
|
|
self.cached = 0
|
|
|
|
self.error = 0
|
|
|
|
self.masked = masked
|
|
|
|
|
|
|
|
self.skipped = 0
|
|
|
|
self.virtuals = 0
|
2010-11-19 03:21:54 +00:00
|
|
|
self.total = len(filelist)
|
2010-01-20 18:46:02 +00:00
|
|
|
|
2010-11-19 03:21:54 +00:00
|
|
|
self.current = 0
|
2010-11-23 14:20:08 +00:00
|
|
|
self.num_processes = int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS", True) or
|
|
|
|
multiprocessing.cpu_count())
|
2007-01-08 23:53:01 +00:00
|
|
|
|
2010-12-07 18:00:22 +00:00
|
|
|
self.bb_cache = bb.cache.Cache(self.cfgdata)
|
2010-11-19 03:21:54 +00:00
|
|
|
self.fromcache = []
|
2010-12-07 18:00:22 +00:00
|
|
|
self.willparse = []
|
2010-11-19 03:21:54 +00:00
|
|
|
for filename in self.filelist:
|
|
|
|
appends = self.cooker.get_file_appends(filename)
|
2010-11-19 18:46:42 +00:00
|
|
|
if not self.bb_cache.cacheValid(filename):
|
2010-12-07 18:00:22 +00:00
|
|
|
self.willparse.append((filename, appends))
|
2010-11-19 03:21:54 +00:00
|
|
|
else:
|
|
|
|
self.fromcache.append((filename, appends))
|
2010-11-30 15:25:13 +00:00
|
|
|
self.toparse = self.total - len(self.fromcache)
|
2010-12-03 17:39:11 +00:00
|
|
|
self.progress_chunk = max(self.toparse / 100, 1)
|
2010-11-19 03:21:54 +00:00
|
|
|
|
2010-12-07 18:00:22 +00:00
|
|
|
self.start()
|
|
|
|
|
|
|
|
def start(self):
|
|
|
|
def init(cfg):
|
2010-11-19 03:21:54 +00:00
|
|
|
signal.signal(signal.SIGINT, signal.SIG_IGN)
|
2010-12-07 18:00:22 +00:00
|
|
|
parse_file.cfg = cfg
|
2010-11-19 03:21:54 +00:00
|
|
|
|
2010-12-07 18:00:22 +00:00
|
|
|
bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
|
|
|
|
|
|
|
|
self.pool = multiprocessing.Pool(self.num_processes, init, [self.cfgdata])
|
|
|
|
parsed = self.pool.imap(parse_file, self.willparse)
|
|
|
|
self.pool.close()
|
|
|
|
|
|
|
|
self.results = itertools.chain(self.load_cached(), parsed)
|
2010-11-19 03:21:54 +00:00
|
|
|
|
|
|
|
def shutdown(self, clean=True):
|
2010-12-07 18:00:22 +00:00
|
|
|
if clean:
|
|
|
|
event = bb.event.ParseCompleted(self.cached, self.parsed,
|
|
|
|
self.skipped, self.masked,
|
|
|
|
self.virtuals, self.error,
|
|
|
|
self.total)
|
|
|
|
bb.event.fire(event, self.cfgdata)
|
|
|
|
else:
|
|
|
|
self.pool.terminate()
|
|
|
|
self.pool.join()
|
|
|
|
|
2010-11-23 17:46:49 +00:00
|
|
|
sync = threading.Thread(target=self.bb_cache.sync)
|
|
|
|
sync.start()
|
|
|
|
atexit.register(lambda: sync.join())
|
2010-12-07 18:00:22 +00:00
|
|
|
|
2010-11-23 17:46:49 +00:00
|
|
|
codesync = threading.Thread(target=bb.codeparser.parser_cache_save(self.cooker.configuration.data))
|
|
|
|
codesync.start()
|
|
|
|
atexit.register(lambda: codesync.join())
|
2010-12-07 18:00:22 +00:00
|
|
|
|
|
|
|
def load_cached(self):
|
|
|
|
for filename, appends in self.fromcache:
|
|
|
|
cached, infos = self.bb_cache.load(filename, appends, self.cfgdata)
|
|
|
|
yield not cached, infos
|
2010-11-19 03:21:54 +00:00
|
|
|
|
|
|
|
def parse_next(self):
|
2010-12-07 18:00:22 +00:00
|
|
|
try:
|
|
|
|
parsed, result = self.results.next()
|
|
|
|
except StopIteration:
|
2010-11-19 03:21:54 +00:00
|
|
|
self.shutdown()
|
2010-01-20 18:46:02 +00:00
|
|
|
return False
|
2010-11-19 03:21:54 +00:00
|
|
|
except KeyboardInterrupt:
|
|
|
|
self.shutdown(clean=False)
|
|
|
|
raise
|
2010-12-07 18:00:22 +00:00
|
|
|
except Exception as exc:
|
|
|
|
self.shutdown(clean=False)
|
2010-12-16 05:07:38 +00:00
|
|
|
bb.fatal('Error parsing %s: %s' % (exc.recipe, exc))
|
2010-11-19 03:21:54 +00:00
|
|
|
|
|
|
|
self.current += 1
|
2010-12-07 18:00:22 +00:00
|
|
|
self.virtuals += len(result)
|
|
|
|
if parsed:
|
|
|
|
self.parsed += 1
|
|
|
|
if self.parsed % self.progress_chunk == 0:
|
|
|
|
bb.event.fire(bb.event.ParseProgress(self.parsed),
|
|
|
|
self.cfgdata)
|
|
|
|
else:
|
|
|
|
self.cached += 1
|
|
|
|
|
|
|
|
for virtualfn, info in result:
|
|
|
|
if info.skipped:
|
|
|
|
self.skipped += 1
|
|
|
|
else:
|
|
|
|
self.bb_cache.add_info(virtualfn, info, self.cooker.status,
|
|
|
|
parsed=parsed)
|
2010-01-20 18:46:02 +00:00
|
|
|
return True
|
2010-11-19 03:21:54 +00:00
|
|
|
|
2010-11-21 18:59:05 +00:00
|
|
|
def reparse(self, filename):
|
|
|
|
infos = self.bb_cache.parse(filename,
|
|
|
|
self.cooker.get_file_appends(filename),
|
|
|
|
self.cfgdata)
|
|
|
|
for vfn, info in infos:
|
|
|
|
self.cooker.status.add_from_recipeinfo(vfn, info)
|