Currently we have a hierarchy of pkgdata directories and the code has to put together a search path and look through each in turn until it finds the data it needs. This has lead to a number of hardcoded paths and file globing which is unpredictable and undesirable. Worse, certain tricks that should be easy like a GL specific package architecture become problematic with the curretn search paths. With the modern sstate code, we can do better and construct a single pkgdata directory for each machine in just the same way as we do for the sysroot. This is already tried and well tested. With such a single directory, all the code that iterated through multiple pkgdata directories and simply be removed and give a significant simplification of the code. Even existing build directories adapt to the change well since the package contents doesn't change, just the location they're installed to and the stamp for them. The only complication is the we need a different shlibs directory for each multilib. These are only used by package.bbclass and the simple fix is to add MLPREFIX to the shlib directory name. This means the multilib packages will repackage and the sstate checksum will change but an existing build directory will adapt to the changes safely. It is close to release however I believe the benefits this patch give us are worth consideration for inclusion and give us more options for dealing with problems like the GL one. It also sets the ground work well for shlibs improvements in 1.6. (From OE-Core rev: 1b8e4abd2d9c0901d38d89d0f944fe1ffd019379) Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
95 lines
2.4 KiB
Python
95 lines
2.4 KiB
Python
import codecs
|
|
|
|
def packaged(pkg, d):
|
|
return os.access(get_subpkgedata_fn(pkg, d) + '.packaged', os.R_OK)
|
|
|
|
def read_pkgdatafile(fn):
|
|
pkgdata = {}
|
|
|
|
def decode(str):
|
|
c = codecs.getdecoder("string_escape")
|
|
return c(str)[0]
|
|
|
|
if os.access(fn, os.R_OK):
|
|
import re
|
|
f = open(fn, 'r')
|
|
lines = f.readlines()
|
|
f.close()
|
|
r = re.compile("([^:]+):\s*(.*)")
|
|
for l in lines:
|
|
m = r.match(l)
|
|
if m:
|
|
pkgdata[m.group(1)] = decode(m.group(2))
|
|
|
|
return pkgdata
|
|
|
|
def get_subpkgedata_fn(pkg, d):
|
|
return d.expand('${PKGDATA_DIR}/runtime/%s' % pkg)
|
|
|
|
def has_subpkgdata(pkg, d):
|
|
return os.access(get_subpkgedata_fn(pkg, d), os.R_OK)
|
|
|
|
def read_subpkgdata(pkg, d):
|
|
return read_pkgdatafile(get_subpkgedata_fn(pkg, d))
|
|
|
|
def has_pkgdata(pn, d):
|
|
fn = d.expand('${PKGDATA_DIR}/%s' % pn)
|
|
return os.access(fn, os.R_OK)
|
|
|
|
def read_pkgdata(pn, d):
|
|
fn = d.expand('${PKGDATA_DIR}/%s' % pn)
|
|
return read_pkgdatafile(fn)
|
|
|
|
#
|
|
# Collapse FOO_pkg variables into FOO
|
|
#
|
|
def read_subpkgdata_dict(pkg, d):
|
|
ret = {}
|
|
subd = read_pkgdatafile(get_subpkgedata_fn(pkg, d))
|
|
for var in subd:
|
|
newvar = var.replace("_" + pkg, "")
|
|
if newvar == var and var + "_" + pkg in subd:
|
|
continue
|
|
ret[newvar] = subd[var]
|
|
return ret
|
|
|
|
def _pkgmap(d):
|
|
"""Return a dictionary mapping package to recipe name."""
|
|
|
|
pkgdatadir = d.getVar("PKGDATA_DIR", True)
|
|
|
|
pkgmap = {}
|
|
try:
|
|
files = os.listdir(pkgdatadir)
|
|
except OSError:
|
|
bb.warn("No files in %s?" % pkgdatadir)
|
|
files = []
|
|
|
|
for pn in filter(lambda f: not os.path.isdir(os.path.join(pkgdatadir, f)), files):
|
|
try:
|
|
pkgdata = read_pkgdatafile(os.path.join(pkgdatadir, pn))
|
|
except OSError:
|
|
continue
|
|
|
|
packages = pkgdata.get("PACKAGES") or ""
|
|
for pkg in packages.split():
|
|
pkgmap[pkg] = pn
|
|
|
|
return pkgmap
|
|
|
|
def pkgmap(d):
|
|
"""Return a dictionary mapping package to recipe name.
|
|
Cache the mapping in the metadata"""
|
|
|
|
pkgmap_data = d.getVar("__pkgmap_data", False)
|
|
if pkgmap_data is None:
|
|
pkgmap_data = _pkgmap(d)
|
|
d.setVar("__pkgmap_data", pkgmap_data)
|
|
|
|
return pkgmap_data
|
|
|
|
def recipename(pkg, d):
|
|
"""Return the recipe name for the given binary package name."""
|
|
|
|
return pkgmap(d).get(pkg)
|