2006-05-09 15:44:08 +00:00
|
|
|
# ex:ts=4:sw=4:sts=4:et
|
|
|
|
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
2007-01-08 23:53:01 +00:00
|
|
|
#
|
|
|
|
# BitBake 'Event' implementation
|
|
|
|
#
|
|
|
|
# Caching of bitbake variables before task execution
|
2006-05-09 15:44:08 +00:00
|
|
|
|
|
|
|
# Copyright (C) 2006 Richard Purdie
|
|
|
|
|
|
|
|
# but small sections based on code from bin/bitbake:
|
|
|
|
# Copyright (C) 2003, 2004 Chris Larson
|
|
|
|
# Copyright (C) 2003, 2004 Phil Blundell
|
|
|
|
# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
|
|
|
|
# Copyright (C) 2005 Holger Hans Peter Freyther
|
|
|
|
# Copyright (C) 2005 ROAD GmbH
|
2007-01-08 23:53:01 +00:00
|
|
|
#
|
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License version 2 as
|
|
|
|
# published by the Free Software Foundation.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License along
|
|
|
|
# with this program; if not, write to the Free Software Foundation, Inc.,
|
|
|
|
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
2006-05-09 15:44:08 +00:00
|
|
|
|
|
|
|
|
2010-04-08 17:22:29 +00:00
|
|
|
import os
|
2010-06-10 17:35:31 +00:00
|
|
|
import logging
|
2010-11-16 19:58:52 +00:00
|
|
|
from collections import defaultdict, namedtuple
|
2006-05-09 15:44:08 +00:00
|
|
|
import bb.data
|
|
|
|
import bb.utils
|
|
|
|
|
2010-06-10 17:35:31 +00:00
|
|
|
logger = logging.getLogger("BitBake.Cache")
|
|
|
|
|
2006-05-09 15:44:08 +00:00
|
|
|
try:
|
|
|
|
import cPickle as pickle
|
|
|
|
except ImportError:
|
|
|
|
import pickle
|
2010-11-18 03:27:25 +00:00
|
|
|
logger.info("Importing cPickle failed. "
|
|
|
|
"Falling back to a very slow implementation.")
|
2006-05-09 15:44:08 +00:00
|
|
|
|
2010-11-16 19:58:52 +00:00
|
|
|
__cache_version__ = "133"
|
|
|
|
|
|
|
|
recipe_fields = (
|
|
|
|
'pn',
|
|
|
|
'pv',
|
|
|
|
'pr',
|
|
|
|
'pe',
|
|
|
|
'defaultpref',
|
|
|
|
'depends',
|
|
|
|
'provides',
|
|
|
|
'task_deps',
|
|
|
|
'stamp',
|
|
|
|
'broken',
|
|
|
|
'not_world',
|
|
|
|
'skipped',
|
|
|
|
'timestamp',
|
|
|
|
'packages',
|
|
|
|
'packages_dynamic',
|
|
|
|
'rdepends',
|
|
|
|
'rdepends_pkg',
|
|
|
|
'rprovides',
|
|
|
|
'rprovides_pkg',
|
|
|
|
'rrecommends',
|
|
|
|
'rrecommends_pkg',
|
|
|
|
'nocache',
|
|
|
|
'variants',
|
|
|
|
'file_depends',
|
|
|
|
'tasks',
|
|
|
|
'basetaskhashes',
|
|
|
|
'hashfilename',
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
class RecipeInfo(namedtuple('RecipeInfo', recipe_fields)):
|
|
|
|
__slots__ = ()
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def listvar(cls, var, metadata):
|
|
|
|
return cls.getvar(var, metadata).split()
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def intvar(cls, var, metadata):
|
|
|
|
return int(cls.getvar(var, metadata) or 0)
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def depvar(cls, var, metadata):
|
|
|
|
return bb.utils.explode_deps(cls.getvar(var, metadata))
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def pkgvar(cls, var, packages, metadata):
|
|
|
|
return dict((pkg, cls.depvar("%s_%s" % (var, pkg), metadata))
|
|
|
|
for pkg in packages)
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def taskvar(cls, var, tasks, metadata):
|
|
|
|
return dict((task, cls.getvar("%s_task-%s" % (var, task), metadata))
|
|
|
|
for task in tasks)
|
|
|
|
@classmethod
|
|
|
|
def getvar(cls, var, metadata):
|
|
|
|
return metadata.getVar(var, True) or ''
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def from_metadata(cls, filename, metadata):
|
|
|
|
tasks = metadata.getVar('__BBTASKS', False)
|
|
|
|
|
|
|
|
packages = cls.listvar('PACKAGES', metadata)
|
|
|
|
return RecipeInfo(
|
|
|
|
tasks = tasks,
|
|
|
|
basetaskhashes = cls.taskvar('BB_BASEHASH', tasks, metadata),
|
|
|
|
hashfilename = cls.getvar('BB_HASHFILENAME', metadata),
|
|
|
|
|
|
|
|
file_depends = metadata.getVar('__depends', False),
|
|
|
|
task_deps = metadata.getVar('_task_deps', False) or
|
|
|
|
{'tasks': [], 'parents': {}},
|
|
|
|
variants = cls.listvar('__VARIANTS', metadata) + [''],
|
|
|
|
|
|
|
|
skipped = cls.getvar('__SKIPPED', metadata),
|
|
|
|
timestamp = bb.parse.cached_mtime(filename),
|
|
|
|
packages = packages,
|
|
|
|
pn = cls.getvar('PN', metadata),
|
|
|
|
pe = cls.getvar('PE', metadata),
|
|
|
|
pv = cls.getvar('PV', metadata),
|
|
|
|
pr = cls.getvar('PR', metadata),
|
|
|
|
nocache = cls.getvar('__BB_DONT_CACHE', metadata),
|
|
|
|
defaultpref = cls.intvar('DEFAULT_PREFERENCE', metadata),
|
|
|
|
broken = cls.getvar('BROKEN', metadata),
|
|
|
|
not_world = cls.getvar('EXCLUDE_FROM_WORLD', metadata),
|
|
|
|
stamp = cls.getvar('STAMP', metadata),
|
|
|
|
packages_dynamic = cls.listvar('PACKAGES_DYNAMIC', metadata),
|
|
|
|
depends = cls.depvar('DEPENDS', metadata),
|
|
|
|
provides = cls.depvar('PROVIDES', metadata),
|
|
|
|
rdepends = cls.depvar('RDEPENDS', metadata),
|
|
|
|
rprovides = cls.depvar('RPROVIDES', metadata),
|
|
|
|
rrecommends = cls.depvar('RRECOMMENDS', metadata),
|
|
|
|
rprovides_pkg = cls.pkgvar('RPROVIDES', packages, metadata),
|
|
|
|
rdepends_pkg = cls.pkgvar('RDEPENDS', packages, metadata),
|
|
|
|
rrecommends_pkg = cls.pkgvar('RRECOMMENDS', packages, metadata),
|
|
|
|
)
|
2006-05-09 15:44:08 +00:00
|
|
|
|
2010-11-18 18:14:27 +00:00
|
|
|
|
|
|
|
class Cache(object):
|
2006-05-09 15:44:08 +00:00
|
|
|
"""
|
|
|
|
BitBake Cache implementation
|
|
|
|
"""
|
|
|
|
|
2010-11-18 03:27:25 +00:00
|
|
|
def __init__(self, data):
|
2010-04-06 15:52:54 +00:00
|
|
|
self.cachedir = bb.data.getVar("CACHE", data, True)
|
2010-11-17 00:43:33 +00:00
|
|
|
self.clean = set()
|
|
|
|
self.checked = set()
|
2006-05-09 15:44:08 +00:00
|
|
|
self.depends_cache = {}
|
|
|
|
self.data = None
|
|
|
|
self.data_fn = None
|
2008-03-03 22:01:45 +00:00
|
|
|
self.cacheclean = True
|
2006-05-09 15:44:08 +00:00
|
|
|
|
|
|
|
if self.cachedir in [None, '']:
|
|
|
|
self.has_cache = False
|
2010-11-18 03:27:25 +00:00
|
|
|
logger.info("Not using a cache. "
|
|
|
|
"Set CACHE = <directory> to enable.")
|
2009-01-02 23:57:03 +00:00
|
|
|
return
|
2006-05-09 15:44:08 +00:00
|
|
|
|
2009-01-02 23:57:03 +00:00
|
|
|
self.has_cache = True
|
2010-04-12 00:03:55 +00:00
|
|
|
self.cachefile = os.path.join(self.cachedir, "bb_cache.dat")
|
2009-01-01 14:43:54 +00:00
|
|
|
|
2010-06-10 17:35:31 +00:00
|
|
|
logger.debug(1, "Using cache in '%s'", self.cachedir)
|
2010-06-10 16:46:02 +00:00
|
|
|
bb.utils.mkdirhier(self.cachedir)
|
2008-03-03 22:01:45 +00:00
|
|
|
|
|
|
|
# If any of configuration.data's dependencies are newer than the
|
|
|
|
# cache there isn't even any point in loading it...
|
|
|
|
newest_mtime = 0
|
2010-04-20 18:53:31 +00:00
|
|
|
deps = bb.data.getVar("__depends", data)
|
2010-06-04 12:04:39 +00:00
|
|
|
|
2010-11-18 03:27:25 +00:00
|
|
|
old_mtimes = [old_mtime for _, old_mtime in deps]
|
2010-06-04 12:04:39 +00:00
|
|
|
old_mtimes.append(newest_mtime)
|
|
|
|
newest_mtime = max(old_mtimes)
|
2008-03-03 22:01:45 +00:00
|
|
|
|
2008-03-14 11:44:34 +00:00
|
|
|
if bb.parse.cached_mtime_noerror(self.cachefile) >= newest_mtime:
|
2006-05-09 15:44:08 +00:00
|
|
|
try:
|
2008-03-03 22:01:45 +00:00
|
|
|
p = pickle.Unpickler(file(self.cachefile, "rb"))
|
2006-05-09 15:44:08 +00:00
|
|
|
self.depends_cache, version_data = p.load()
|
|
|
|
if version_data['CACHE_VER'] != __cache_version__:
|
2010-04-12 00:03:55 +00:00
|
|
|
raise ValueError('Cache Version Mismatch')
|
2006-05-09 15:44:08 +00:00
|
|
|
if version_data['BITBAKE_VER'] != bb.__version__:
|
2010-04-12 00:03:55 +00:00
|
|
|
raise ValueError('Bitbake Version Mismatch')
|
2007-04-01 15:04:49 +00:00
|
|
|
except EOFError:
|
2010-06-10 17:35:31 +00:00
|
|
|
logger.info("Truncated cache found, rebuilding...")
|
2007-04-01 15:04:49 +00:00
|
|
|
self.depends_cache = {}
|
2008-03-14 11:44:34 +00:00
|
|
|
except:
|
2010-06-10 17:35:31 +00:00
|
|
|
logger.info("Invalid cache found, rebuilding...")
|
2006-05-09 15:44:08 +00:00
|
|
|
self.depends_cache = {}
|
2008-03-03 22:01:45 +00:00
|
|
|
else:
|
2010-06-04 12:04:38 +00:00
|
|
|
if os.path.isfile(self.cachefile):
|
2010-06-10 17:35:31 +00:00
|
|
|
logger.info("Out of date cache found, rebuilding...")
|
2006-05-09 15:44:08 +00:00
|
|
|
|
2010-11-18 03:27:25 +00:00
|
|
|
@staticmethod
|
|
|
|
def virtualfn2realfn(virtualfn):
|
2009-01-01 14:43:54 +00:00
|
|
|
"""
|
|
|
|
Convert a virtual file name to a real one + the associated subclass keyword
|
|
|
|
"""
|
|
|
|
|
|
|
|
fn = virtualfn
|
|
|
|
cls = ""
|
|
|
|
if virtualfn.startswith('virtual:'):
|
|
|
|
cls = virtualfn.split(':', 2)[1]
|
|
|
|
fn = virtualfn.replace('virtual:' + cls + ':', '')
|
|
|
|
return (fn, cls)
|
|
|
|
|
2010-11-18 03:27:25 +00:00
|
|
|
@staticmethod
|
|
|
|
def realfn2virtual(realfn, cls):
|
2009-01-01 14:43:54 +00:00
|
|
|
"""
|
|
|
|
Convert a real filename + the associated subclass keyword to a virtual filename
|
|
|
|
"""
|
|
|
|
if cls == "":
|
|
|
|
return realfn
|
|
|
|
return "virtual:" + cls + ":" + realfn
|
2006-05-09 15:44:08 +00:00
|
|
|
|
2010-11-19 18:06:38 +00:00
|
|
|
@classmethod
|
|
|
|
def loadDataFull(cls, virtualfn, appends, cfgData):
|
2006-05-09 15:44:08 +00:00
|
|
|
"""
|
|
|
|
Return a complete set of data for fn.
|
|
|
|
To do this, we need to parse the file.
|
|
|
|
"""
|
2009-01-01 14:43:54 +00:00
|
|
|
|
2010-11-19 18:06:38 +00:00
|
|
|
(fn, virtual) = cls.virtualfn2realfn(virtualfn)
|
2009-01-01 14:43:54 +00:00
|
|
|
|
2010-06-10 17:35:31 +00:00
|
|
|
logger.debug(1, "Parsing %s (full)", fn)
|
2008-03-03 22:01:45 +00:00
|
|
|
|
2010-11-19 18:06:38 +00:00
|
|
|
bb_data = cls.load_bbfile(fn, appends, cfgData)
|
|
|
|
return bb_data[virtual]
|
2006-05-09 15:44:08 +00:00
|
|
|
|
2010-11-19 03:21:54 +00:00
|
|
|
@classmethod
|
|
|
|
def parse(cls, filename, appends, configdata):
|
|
|
|
"""Parse the specified filename, returning the recipe information"""
|
|
|
|
infos = []
|
|
|
|
datastores = cls.load_bbfile(filename, appends, configdata)
|
|
|
|
depends = set()
|
|
|
|
for variant, data in sorted(datastores.iteritems(),
|
|
|
|
key=lambda i: i[0],
|
|
|
|
reverse=True):
|
|
|
|
virtualfn = cls.realfn2virtual(filename, variant)
|
|
|
|
depends |= (data.getVar("__depends", False) or set())
|
|
|
|
if depends and not variant:
|
|
|
|
data.setVar("__depends", depends)
|
|
|
|
info = RecipeInfo.from_metadata(filename, data)
|
|
|
|
infos.append((virtualfn, info))
|
|
|
|
return infos
|
|
|
|
|
|
|
|
def load(self, filename, appends, configdata):
|
|
|
|
"""Obtain the recipe information for the specified filename,
|
|
|
|
using cached values if available, otherwise parsing.
|
|
|
|
|
|
|
|
Note that if it does parse to obtain the info, it will not
|
|
|
|
automatically add the information to the cache or to your
|
|
|
|
CacheData. Use the add or add_info method to do so after
|
|
|
|
running this, or use loadData instead."""
|
|
|
|
cached = self.cacheValid(filename)
|
|
|
|
if cached:
|
|
|
|
infos = []
|
|
|
|
info = self.depends_cache[filename]
|
|
|
|
for variant in info.variants:
|
|
|
|
virtualfn = self.realfn2virtual(filename, variant)
|
|
|
|
infos.append((virtualfn, self.depends_cache[virtualfn]))
|
|
|
|
else:
|
|
|
|
logger.debug(1, "Parsing %s", filename)
|
|
|
|
return self.parse(filename, appends, configdata)
|
2010-01-20 18:46:02 +00:00
|
|
|
|
2010-11-19 03:21:54 +00:00
|
|
|
return cached, infos
|
2010-01-20 18:46:02 +00:00
|
|
|
|
2010-11-19 03:21:54 +00:00
|
|
|
def loadData(self, fn, appends, cfgData, cacheData):
|
|
|
|
"""Load the recipe info for the specified filename,
|
|
|
|
parsing and adding to the cache if necessary, and adding
|
|
|
|
the recipe information to the supplied CacheData instance."""
|
|
|
|
skipped, virtuals = 0, 0
|
2010-11-16 19:58:52 +00:00
|
|
|
|
2010-11-19 03:21:54 +00:00
|
|
|
cached, infos = self.load(fn, appends, cfgData)
|
|
|
|
for virtualfn, info in infos:
|
|
|
|
if info.skipped:
|
2010-06-10 17:35:31 +00:00
|
|
|
logger.debug(1, "Skipping %s", virtualfn)
|
2010-11-16 19:58:52 +00:00
|
|
|
skipped += 1
|
2010-01-20 18:46:02 +00:00
|
|
|
else:
|
2010-11-19 03:21:54 +00:00
|
|
|
self.add_info(virtualfn, info, cacheData, not cached)
|
2010-01-20 18:46:02 +00:00
|
|
|
virtuals += 1
|
2010-11-16 19:58:52 +00:00
|
|
|
|
|
|
|
return cached, skipped, virtuals
|
2009-01-01 14:43:54 +00:00
|
|
|
|
2006-05-09 15:44:08 +00:00
|
|
|
def cacheValid(self, fn):
|
|
|
|
"""
|
|
|
|
Is the cache valid for fn?
|
|
|
|
Fast version, no timestamps checked.
|
|
|
|
"""
|
2010-11-19 03:21:54 +00:00
|
|
|
if fn not in self.checked:
|
|
|
|
self.cacheValidUpdate(fn)
|
|
|
|
|
2006-05-09 15:44:08 +00:00
|
|
|
# Is cache enabled?
|
|
|
|
if not self.has_cache:
|
|
|
|
return False
|
|
|
|
if fn in self.clean:
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
def cacheValidUpdate(self, fn):
|
|
|
|
"""
|
|
|
|
Is the cache valid for fn?
|
|
|
|
Make thorough (slower) checks including timestamps.
|
|
|
|
"""
|
|
|
|
# Is cache enabled?
|
|
|
|
if not self.has_cache:
|
|
|
|
return False
|
|
|
|
|
2010-11-17 00:43:33 +00:00
|
|
|
self.checked.add(fn)
|
2008-03-03 22:01:45 +00:00
|
|
|
|
|
|
|
# Pretend we're clean so getVar works
|
2010-11-17 00:43:33 +00:00
|
|
|
self.clean.add(fn)
|
2006-05-09 15:44:08 +00:00
|
|
|
|
|
|
|
# File isn't in depends_cache
|
|
|
|
if not fn in self.depends_cache:
|
2010-06-10 17:35:31 +00:00
|
|
|
logger.debug(2, "Cache: %s is not cached", fn)
|
2006-05-09 15:44:08 +00:00
|
|
|
self.remove(fn)
|
|
|
|
return False
|
|
|
|
|
2010-03-24 23:56:12 +00:00
|
|
|
mtime = bb.parse.cached_mtime_noerror(fn)
|
2008-03-14 11:44:34 +00:00
|
|
|
|
2008-03-03 22:01:45 +00:00
|
|
|
# Check file still exists
|
2008-03-14 11:44:34 +00:00
|
|
|
if mtime == 0:
|
2010-06-10 17:35:31 +00:00
|
|
|
logger.debug(2, "Cache: %s no longer exists", fn)
|
2008-03-03 22:01:45 +00:00
|
|
|
self.remove(fn)
|
|
|
|
return False
|
|
|
|
|
2010-11-16 19:58:52 +00:00
|
|
|
info = self.depends_cache[fn]
|
2006-05-09 15:44:08 +00:00
|
|
|
# Check the file's timestamp
|
2010-11-16 19:58:52 +00:00
|
|
|
if mtime != info.timestamp:
|
2010-06-10 17:35:31 +00:00
|
|
|
logger.debug(2, "Cache: %s changed", fn)
|
2006-05-09 15:44:08 +00:00
|
|
|
self.remove(fn)
|
|
|
|
return False
|
|
|
|
|
|
|
|
# Check dependencies are still valid
|
2010-11-16 19:58:52 +00:00
|
|
|
depends = info.file_depends
|
2008-03-14 11:44:34 +00:00
|
|
|
if depends:
|
2010-03-24 23:56:12 +00:00
|
|
|
for f, old_mtime in depends:
|
2008-03-14 11:44:34 +00:00
|
|
|
fmtime = bb.parse.cached_mtime_noerror(f)
|
|
|
|
# Check if file still exists
|
2009-11-03 23:20:15 +00:00
|
|
|
if old_mtime != 0 and fmtime == 0:
|
2010-11-18 18:14:38 +00:00
|
|
|
logger.debug(2, "Cache: %s's dependency %s was removed",
|
|
|
|
fn, f)
|
2008-03-14 11:44:34 +00:00
|
|
|
self.remove(fn)
|
|
|
|
return False
|
|
|
|
|
2008-03-28 17:19:49 +00:00
|
|
|
if (fmtime != old_mtime):
|
2010-11-18 03:27:25 +00:00
|
|
|
logger.debug(2, "Cache: %s's dependency %s changed",
|
|
|
|
fn, f)
|
2008-03-14 11:44:34 +00:00
|
|
|
self.remove(fn)
|
|
|
|
return False
|
2006-05-09 15:44:08 +00:00
|
|
|
|
2010-07-25 10:33:11 +00:00
|
|
|
invalid = False
|
2010-11-16 19:58:52 +00:00
|
|
|
for cls in info.variants:
|
2010-01-20 18:46:02 +00:00
|
|
|
virtualfn = self.realfn2virtual(fn, cls)
|
2010-11-17 00:43:33 +00:00
|
|
|
self.clean.add(virtualfn)
|
2010-11-16 19:58:52 +00:00
|
|
|
if virtualfn not in self.depends_cache:
|
2010-06-10 17:35:31 +00:00
|
|
|
logger.debug(2, "Cache: %s is not cached", virtualfn)
|
2010-07-25 10:33:11 +00:00
|
|
|
invalid = True
|
|
|
|
|
2010-11-18 03:27:25 +00:00
|
|
|
# If any one of the variants is not present, mark as invalid for all
|
2010-07-25 10:33:11 +00:00
|
|
|
if invalid:
|
2010-11-16 19:58:52 +00:00
|
|
|
for cls in info.variants:
|
2010-07-25 10:33:11 +00:00
|
|
|
virtualfn = self.realfn2virtual(fn, cls)
|
2010-11-13 23:36:37 +00:00
|
|
|
if virtualfn in self.clean:
|
|
|
|
logger.debug(2, "Cache: Removing %s from cache", virtualfn)
|
2010-11-17 00:43:33 +00:00
|
|
|
self.clean.remove(virtualfn)
|
2010-11-13 23:36:37 +00:00
|
|
|
if fn in self.clean:
|
|
|
|
logger.debug(2, "Cache: Marking %s as not clean", fn)
|
2010-11-17 00:43:33 +00:00
|
|
|
self.clean.remove(fn)
|
2010-07-25 10:33:11 +00:00
|
|
|
return False
|
2006-05-09 15:44:08 +00:00
|
|
|
|
2010-01-20 18:46:02 +00:00
|
|
|
return True
|
2006-05-09 15:44:08 +00:00
|
|
|
|
|
|
|
def remove(self, fn):
|
|
|
|
"""
|
|
|
|
Remove a fn from the cache
|
|
|
|
Called from the parser in error cases
|
|
|
|
"""
|
|
|
|
if fn in self.depends_cache:
|
2010-11-13 23:36:37 +00:00
|
|
|
logger.debug(1, "Removing %s from cache", fn)
|
2006-05-09 15:44:08 +00:00
|
|
|
del self.depends_cache[fn]
|
|
|
|
if fn in self.clean:
|
2010-11-13 23:36:37 +00:00
|
|
|
logger.debug(1, "Marking %s as unclean", fn)
|
2010-11-17 00:43:33 +00:00
|
|
|
self.clean.remove(fn)
|
2006-05-09 15:44:08 +00:00
|
|
|
|
|
|
|
def sync(self):
|
|
|
|
"""
|
|
|
|
Save the cache
|
2006-11-16 15:02:15 +00:00
|
|
|
Called from the parser when complete (or exiting)
|
2006-05-09 15:44:08 +00:00
|
|
|
"""
|
|
|
|
|
|
|
|
if not self.has_cache:
|
|
|
|
return
|
|
|
|
|
2008-03-03 22:01:45 +00:00
|
|
|
if self.cacheclean:
|
2010-10-28 01:16:47 +00:00
|
|
|
logger.debug(2, "Cache is clean, not saving.")
|
2008-03-03 22:01:45 +00:00
|
|
|
return
|
|
|
|
|
2010-11-19 05:28:09 +00:00
|
|
|
version_data = {
|
|
|
|
'CACHE_VER': __cache_version__,
|
|
|
|
'BITBAKE_VER': bb.__version__,
|
|
|
|
}
|
|
|
|
|
|
|
|
with open(self.cachefile, "wb") as cachefile:
|
|
|
|
pickle.Pickler(cachefile, -1).dump([self.depends_cache,
|
|
|
|
version_data])
|
|
|
|
|
2010-09-23 18:21:29 +00:00
|
|
|
del self.depends_cache
|
2006-05-09 15:44:08 +00:00
|
|
|
|
2010-11-18 03:27:25 +00:00
|
|
|
@staticmethod
|
|
|
|
def mtime(cachefile):
|
2006-11-16 15:02:15 +00:00
|
|
|
return bb.parse.cached_mtime_noerror(cachefile)
|
2006-05-09 15:44:08 +00:00
|
|
|
|
2010-11-19 03:21:54 +00:00
|
|
|
def add_info(self, filename, info, cacheData, parsed=None):
|
|
|
|
cacheData.add_from_recipeinfo(filename, info)
|
2010-11-19 15:03:09 +00:00
|
|
|
if not self.has_cache:
|
|
|
|
return
|
|
|
|
|
2010-11-19 05:28:09 +00:00
|
|
|
if 'SRCREVINACTION' not in info.pv and not info.nocache:
|
|
|
|
if parsed:
|
|
|
|
self.cacheclean = False
|
|
|
|
self.depends_cache[filename] = info
|
2010-11-19 03:21:54 +00:00
|
|
|
|
|
|
|
def add(self, file_name, data, cacheData, parsed=None):
|
2006-11-16 15:02:15 +00:00
|
|
|
"""
|
2010-03-24 23:56:12 +00:00
|
|
|
Save data we need into the cache
|
2006-11-16 15:02:15 +00:00
|
|
|
"""
|
2010-11-16 19:58:52 +00:00
|
|
|
realfn = self.virtualfn2realfn(file_name)[0]
|
|
|
|
info = RecipeInfo.from_metadata(realfn, data)
|
2010-11-19 03:21:54 +00:00
|
|
|
self.add_info(file_name, info, cacheData, parsed)
|
2006-11-16 15:02:15 +00:00
|
|
|
|
2010-11-18 03:27:25 +00:00
|
|
|
@staticmethod
|
|
|
|
def load_bbfile(bbfile, appends, config):
|
2006-05-09 15:44:08 +00:00
|
|
|
"""
|
|
|
|
Load and parse one .bb build file
|
|
|
|
Return the data and whether parsing resulted in the file being skipped
|
|
|
|
"""
|
2010-06-04 12:04:42 +00:00
|
|
|
chdir_back = False
|
2006-05-09 15:44:08 +00:00
|
|
|
|
2010-04-08 17:22:29 +00:00
|
|
|
from bb import data, parse
|
2006-05-09 15:44:08 +00:00
|
|
|
|
|
|
|
# expand tmpdir to include this topdir
|
2006-11-16 15:02:15 +00:00
|
|
|
data.setVar('TMPDIR', data.getVar('TMPDIR', config, 1) or "", config)
|
|
|
|
bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
|
2006-05-09 15:44:08 +00:00
|
|
|
oldpath = os.path.abspath(os.getcwd())
|
2010-06-04 12:04:42 +00:00
|
|
|
parse.cached_mtime_noerror(bbfile_loc)
|
2006-11-16 15:02:15 +00:00
|
|
|
bb_data = data.init_db(config)
|
2010-06-04 12:04:42 +00:00
|
|
|
# The ConfHandler first looks if there is a TOPDIR and if not
|
|
|
|
# then it would call getcwd().
|
|
|
|
# Previously, we chdir()ed to bbfile_loc, called the handler
|
|
|
|
# and finally chdir()ed back, a couple of thousand times. We now
|
|
|
|
# just fill in TOPDIR to point to bbfile_loc if there is no TOPDIR yet.
|
|
|
|
if not data.getVar('TOPDIR', bb_data):
|
|
|
|
chdir_back = True
|
|
|
|
data.setVar('TOPDIR', bbfile_loc, bb_data)
|
2006-05-09 15:44:08 +00:00
|
|
|
try:
|
2010-07-16 14:10:22 +00:00
|
|
|
if appends:
|
|
|
|
data.setVar('__BBAPPEND', " ".join(appends), bb_data)
|
2010-11-18 03:27:25 +00:00
|
|
|
bb_data = parse.handle(bbfile, bb_data)
|
|
|
|
if chdir_back:
|
|
|
|
os.chdir(oldpath)
|
2010-01-20 18:46:02 +00:00
|
|
|
return bb_data
|
2006-05-09 15:44:08 +00:00
|
|
|
except:
|
2010-11-18 03:27:25 +00:00
|
|
|
if chdir_back:
|
|
|
|
os.chdir(oldpath)
|
2006-05-09 15:44:08 +00:00
|
|
|
raise
|
|
|
|
|
2010-11-18 03:27:25 +00:00
|
|
|
|
2006-05-09 15:44:08 +00:00
|
|
|
def init(cooker):
|
|
|
|
"""
|
2010-03-24 23:56:12 +00:00
|
|
|
The Objective: Cache the minimum amount of data possible yet get to the
|
2006-05-09 15:44:08 +00:00
|
|
|
stage of building packages (i.e. tryBuild) without reparsing any .bb files.
|
|
|
|
|
2010-03-24 23:56:12 +00:00
|
|
|
To do this, we intercept getVar calls and only cache the variables we see
|
|
|
|
being accessed. We rely on the cache getVar calls being made for all
|
|
|
|
variables bitbake might need to use to reach this stage. For each cached
|
2006-05-09 15:44:08 +00:00
|
|
|
file we need to track:
|
|
|
|
|
|
|
|
* Its mtime
|
|
|
|
* The mtimes of all its dependencies
|
|
|
|
* Whether it caused a parse.SkipPackage exception
|
|
|
|
|
|
|
|
Files causing parsing errors are evicted from the cache.
|
|
|
|
|
|
|
|
"""
|
2010-04-06 15:52:54 +00:00
|
|
|
return Cache(cooker.configuration.data)
|
2006-05-09 15:44:08 +00:00
|
|
|
|
2006-11-16 15:02:15 +00:00
|
|
|
|
2010-11-18 18:14:27 +00:00
|
|
|
class CacheData(object):
|
2006-11-16 15:02:15 +00:00
|
|
|
"""
|
|
|
|
The data structures we compile from the cached data
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
"""
|
|
|
|
Direct cache variables
|
|
|
|
"""
|
2010-11-18 03:27:25 +00:00
|
|
|
self.providers = defaultdict(list)
|
2010-11-16 20:06:10 +00:00
|
|
|
self.rproviders = defaultdict(list)
|
|
|
|
self.packages = defaultdict(list)
|
|
|
|
self.packages_dynamic = defaultdict(list)
|
2006-11-16 15:02:15 +00:00
|
|
|
self.possible_world = []
|
2010-11-16 20:06:10 +00:00
|
|
|
self.pkg_pn = defaultdict(list)
|
2006-11-16 15:02:15 +00:00
|
|
|
self.pkg_fn = {}
|
2007-04-01 15:04:49 +00:00
|
|
|
self.pkg_pepvpr = {}
|
2006-11-16 15:02:15 +00:00
|
|
|
self.pkg_dp = {}
|
2010-11-16 20:06:10 +00:00
|
|
|
self.pn_provides = defaultdict(list)
|
2007-09-02 14:10:08 +00:00
|
|
|
self.fn_provides = {}
|
2008-03-14 11:44:34 +00:00
|
|
|
self.all_depends = []
|
2010-11-16 20:06:10 +00:00
|
|
|
self.deps = defaultdict(list)
|
|
|
|
self.rundeps = defaultdict(lambda: defaultdict(list))
|
|
|
|
self.runrecs = defaultdict(lambda: defaultdict(list))
|
2006-11-16 15:02:15 +00:00
|
|
|
self.task_queues = {}
|
|
|
|
self.task_deps = {}
|
|
|
|
self.stamp = {}
|
|
|
|
self.preferred = {}
|
2010-08-31 13:49:43 +00:00
|
|
|
self.tasks = {}
|
|
|
|
self.basetaskhash = {}
|
2010-10-05 21:21:34 +00:00
|
|
|
self.hashfn = {}
|
2006-11-16 15:02:15 +00:00
|
|
|
|
|
|
|
"""
|
|
|
|
Indirect Cache variables
|
|
|
|
(set elsewhere)
|
|
|
|
"""
|
|
|
|
self.ignored_dependencies = []
|
2009-05-12 15:53:22 +00:00
|
|
|
self.world_target = set()
|
2006-11-16 15:02:15 +00:00
|
|
|
self.bbfile_priority = {}
|
|
|
|
self.bbfile_config_priorities = []
|
2010-11-16 19:58:52 +00:00
|
|
|
|
|
|
|
def add_from_recipeinfo(self, fn, info):
|
|
|
|
self.task_deps[fn] = info.task_deps
|
|
|
|
self.pkg_fn[fn] = info.pn
|
|
|
|
self.pkg_pn[info.pn].append(fn)
|
|
|
|
self.pkg_pepvpr[fn] = (info.pe, info.pv, info.pr)
|
|
|
|
self.pkg_dp[fn] = info.defaultpref
|
|
|
|
self.stamp[fn] = info.stamp
|
|
|
|
|
|
|
|
provides = [info.pn]
|
|
|
|
for provide in info.provides:
|
|
|
|
if provide not in provides:
|
|
|
|
provides.append(provide)
|
|
|
|
self.fn_provides[fn] = provides
|
|
|
|
|
|
|
|
for provide in provides:
|
|
|
|
self.providers[provide].append(fn)
|
|
|
|
if provide not in self.pn_provides[info.pn]:
|
|
|
|
self.pn_provides[info.pn].append(provide)
|
|
|
|
|
|
|
|
for dep in info.depends:
|
|
|
|
if dep not in self.deps[fn]:
|
|
|
|
self.deps[fn].append(dep)
|
|
|
|
if dep not in self.all_depends:
|
|
|
|
self.all_depends.append(dep)
|
|
|
|
|
|
|
|
rprovides = info.rprovides
|
|
|
|
for package in info.packages:
|
|
|
|
self.packages[package].append(fn)
|
|
|
|
rprovides += info.rprovides_pkg[package]
|
|
|
|
|
|
|
|
for package in info.packages_dynamic:
|
|
|
|
self.packages_dynamic[package].append(fn)
|
|
|
|
|
|
|
|
for rprovide in rprovides:
|
|
|
|
self.rproviders[rprovide].append(fn)
|
|
|
|
|
|
|
|
# Build hash of runtime depends and rececommends
|
|
|
|
for package in info.packages + [info.pn]:
|
|
|
|
rundeps, runrecs = list(info.rdepends), list(info.rrecommends)
|
|
|
|
if package in info.packages:
|
|
|
|
rundeps += info.rdepends_pkg[package]
|
|
|
|
runrecs += info.rrecommends_pkg[package]
|
|
|
|
self.rundeps[fn][package] = rundeps
|
|
|
|
self.runrecs[fn][package] = runrecs
|
|
|
|
|
|
|
|
# Collect files we may need for possible world-dep
|
|
|
|
# calculations
|
|
|
|
if not info.broken and not info.not_world:
|
|
|
|
self.possible_world.append(fn)
|
|
|
|
|
|
|
|
self.hashfn[fn] = info.hashfilename
|
|
|
|
|
|
|
|
for task, taskhash in info.basetaskhashes.iteritems():
|
|
|
|
identifier = '%s.%s' % (fn, task)
|
|
|
|
self.basetaskhash[identifier] = taskhash
|