bitbake: Sync with 1.8 head. Adds locking to the fetcher to prevent parallel downloads, fixes key expansion issues and occasional missing dependency graph links

git-svn-id: https://svn.o-hand.com/repos/poky/trunk@2502 311d38ba-8fff-0310-9ca6-ca027cbcb966
This commit is contained in:
Richard Purdie 2007-08-16 09:55:21 +00:00
parent 11ce59b501
commit 7611768e23
5 changed files with 64 additions and 26 deletions

View File

@ -18,8 +18,14 @@ Changes in Bitbake 1.8.x:
- Fix local fetcher's localpath return values
- Apply OVERRIDES before performing immediate expansions
- Allow the -b -e option combination to take regular expressions
- Add plain message function to bb.msg
- Sort the list of providers before processing so dependency problems are
reproducible rather than effectively random
- Add locking for fetchers so only one tries to fetch a given file at a given time
- Fix int(0)/None confusion in runqueue.py which causes random gaps in dependency chains
- Fix handling of variables with expansion in the name using _append/_prepend
e.g. RRECOMMENDS_${PN}_append_xyz = "abc"
Changes in Bitbake 1.8.6:
- Correctly redirect stdin when forking

View File

@ -96,6 +96,19 @@ def getVar(var, d, exp = 0):
"""
return d.getVar(var,exp)
def renameVar(key, newkey, d):
"""Renames a variable from key to newkey
Example:
>>> d = init()
>>> setVar('TEST', 'testcontents', d)
>>> renameVar('TEST', 'TEST2', d)
>>> print getVar('TEST2', d)
testcontents
"""
d.renameVar(key, newkey)
def delVar(var, d):
"""Removes a variable from the data set
@ -276,24 +289,8 @@ def expandKeys(alterdata, readdata = None):
ekey = expand(key, readdata)
if key == ekey:
continue
val = getVar(key, alterdata)
if val is None:
continue
# import copy
# setVarFlags(ekey, copy.copy(getVarFlags(key, readdata)), alterdata)
setVar(ekey, val, alterdata)
for i in ('_append', '_prepend'):
dest = getVarFlag(ekey, i, alterdata) or []
src = getVarFlag(key, i, readdata) or []
dest.extend(src)
setVarFlag(ekey, i, dest, alterdata)
if key in alterdata._special_values[i]:
alterdata._special_values[i].remove(key)
alterdata._special_values[i].add(ekey)
delVar(key, alterdata)
renameVar(key, ekey, alterdata)
def expandData(alterdata, readdata = None):
"""For each variable in alterdata, expand it, and update the var contents.

View File

@ -170,6 +170,28 @@ class DataSmart:
return self.expand(value,var)
return value
def renameVar(self, key, newkey):
"""
Rename the variable key to newkey
"""
val = self.getVar(key, 0)
if val is None:
return
self.setVar(newkey, val)
for i in ('_append', '_prepend'):
dest = self.getVarFlag(newkey, i) or []
src = self.getVarFlag(key, i) or []
dest.extend(src)
self.setVarFlag(newkey, i, dest)
if self._special_values.has_key(i) and key in self._special_values[i]:
self._special_values[i].remove(key)
self._special_values[i].add(newkey)
self.delVar(key)
def delVar(self,var):
self.expand_cache = {}
self.dict[var] = {}

View File

@ -24,7 +24,7 @@ BitBake build tools.
#
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
import os, re
import os, re, fcntl
import bb
from bb import data
from bb import persist_data
@ -140,9 +140,21 @@ def go(d):
# Touch md5 file to show activity
os.utime(ud.md5, None)
continue
lf = open(ud.lockfile, "a+")
fcntl.flock(lf.fileno(), fcntl.LOCK_EX)
if ud.localfile and not m.forcefetch(u, ud, d) and os.path.exists(ud.md5):
# If someone else fetched this before we got the lock,
# notice and don't try again
os.utime(ud.md5, None)
fcntl.flock(lf.fileno(), fcntl.LOCK_UN)
lf.close
continue
m.go(u, ud, d)
if ud.localfile and not m.forcefetch(u, ud, d):
Fetch.write_md5sum(u, ud, d)
fcntl.flock(lf.fileno(), fcntl.LOCK_UN)
lf.close
def localpaths(d):
"""
@ -264,6 +276,7 @@ class FetchData(object):
else:
self.localpath = self.method.localpath(self.url, self, d)
self.md5 = self.localpath + '.md5'
self.lockfile = self.localpath + '.lock'
# if user sets localpath for file, use it instead.

View File

@ -22,7 +22,7 @@ Handles preparation and execution of a queue of tasks
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
from bb import msg, data, fetch, event, mkdirhier, utils
from bb import msg, data, event, mkdirhier, utils
from sets import Set
import bb, os, sys
import signal
@ -113,7 +113,7 @@ class RunQueue:
# Won't be in build_targets if ASSUME_PROVIDED
if depid in taskData.build_targets:
depdata = taskData.build_targets[depid][0]
if depdata:
if depdata is not None:
dep = taskData.fn_index[depdata]
depends.append(taskData.gettask_id(dep, taskname))
@ -123,7 +123,7 @@ class RunQueue:
for depid in taskData.rdepids[fnid]:
if depid in taskData.run_targets:
depdata = taskData.run_targets[depid][0]
if depdata:
if depdata is not None:
dep = taskData.fn_index[depdata]
depends.append(taskData.gettask_id(dep, taskname))
@ -133,7 +133,7 @@ class RunQueue:
if depid in taskData.build_targets:
# Won't be in build_targets if ASSUME_PROVIDED
depdata = taskData.build_targets[depid][0]
if depdata:
if depdata is not None:
dep = taskData.fn_index[depdata]
depends.append(taskData.gettask_id(dep, idepend.split(":")[1]))
@ -148,11 +148,11 @@ class RunQueue:
dep_seen.append(depid)
if depid in taskData.build_targets:
depdata = taskData.build_targets[depid][0]
if depdata:
if depdata is not None:
dep = taskData.fn_index[depdata]
# Need to avoid creating new tasks here
taskid = taskData.gettask_id(dep, taskname, False)
if taskid:
if taskid is not None:
depends.append(taskid)
fnid = taskData.tasks_fnid[taskid]
else:
@ -180,11 +180,11 @@ class RunQueue:
rdep_seen.append(rdepid)
if rdepid in taskData.run_targets:
depdata = taskData.run_targets[rdepid][0]
if depdata:
if depdata is not None:
dep = taskData.fn_index[depdata]
# Need to avoid creating new tasks here
taskid = taskData.gettask_id(dep, taskname, False)
if taskid:
if taskid is not None:
depends.append(taskid)
fnid = taskData.tasks_fnid[taskid]
else: