bitbake: build/runqueue: Add noextra stamp file parameter to fix multiconfig builds
We can't execute the same task for the same package_arch multiple times as the current setup has conflicting directories. Since these would usually have the same stamp/hash, we want to execute in sequence rather than in parallel, so for the purposes of task execution, don't consider the "extra-info" on the stamp files. We need to add a parameter to the stamp function to achieve this. This avoids multiple update-rc.d populate_sysroot tasks executing in parallel and breaking multiconfig builds. (Bitbake rev: a9041fc96a14e718c0c1d1676e705343b9e872d3) Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
This commit is contained in:
parent
e7b2b7d40d
commit
412a26e154
|
@ -633,7 +633,7 @@ def exec_task(fn, task, d, profile = False):
|
||||||
event.fire(failedevent, d)
|
event.fire(failedevent, d)
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
def stamp_internal(taskname, d, file_name, baseonly=False):
|
def stamp_internal(taskname, d, file_name, baseonly=False, noextra=False):
|
||||||
"""
|
"""
|
||||||
Internal stamp helper function
|
Internal stamp helper function
|
||||||
Makes sure the stamp directory exists
|
Makes sure the stamp directory exists
|
||||||
|
@ -656,6 +656,8 @@ def stamp_internal(taskname, d, file_name, baseonly=False):
|
||||||
|
|
||||||
if baseonly:
|
if baseonly:
|
||||||
return stamp
|
return stamp
|
||||||
|
if noextra:
|
||||||
|
extrainfo = ""
|
||||||
|
|
||||||
if not stamp:
|
if not stamp:
|
||||||
return
|
return
|
||||||
|
@ -751,12 +753,12 @@ def write_taint(task, d, file_name = None):
|
||||||
with open(taintfn, 'w') as taintf:
|
with open(taintfn, 'w') as taintf:
|
||||||
taintf.write(str(uuid.uuid4()))
|
taintf.write(str(uuid.uuid4()))
|
||||||
|
|
||||||
def stampfile(taskname, d, file_name = None):
|
def stampfile(taskname, d, file_name = None, noextra=False):
|
||||||
"""
|
"""
|
||||||
Return the stamp for a given task
|
Return the stamp for a given task
|
||||||
(d can be a data dict or dataCache)
|
(d can be a data dict or dataCache)
|
||||||
"""
|
"""
|
||||||
return stamp_internal(taskname, d, file_name)
|
return stamp_internal(taskname, d, file_name, noextra=noextra)
|
||||||
|
|
||||||
def add_tasks(tasklist, d):
|
def add_tasks(tasklist, d):
|
||||||
task_deps = d.getVar('_task_deps', False)
|
task_deps = d.getVar('_task_deps', False)
|
||||||
|
|
|
@ -137,7 +137,7 @@ class RunQueueScheduler(object):
|
||||||
for tid in self.rqdata.runtaskentries:
|
for tid in self.rqdata.runtaskentries:
|
||||||
(mc, fn, taskname) = split_tid(tid)
|
(mc, fn, taskname) = split_tid(tid)
|
||||||
taskfn = taskfn_fromtid(tid)
|
taskfn = taskfn_fromtid(tid)
|
||||||
self.stamps[tid] = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn)
|
self.stamps[tid] = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn, noextra=True)
|
||||||
if tid in self.rq.runq_buildable:
|
if tid in self.rq.runq_buildable:
|
||||||
self.buildable.append(tid)
|
self.buildable.append(tid)
|
||||||
|
|
||||||
|
@ -1805,7 +1805,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
|
||||||
self.rq.worker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, False, self.cooker.collection.get_file_appends(taskfn), taskdepdata)) + b"</runtask>")
|
self.rq.worker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, False, self.cooker.collection.get_file_appends(taskfn), taskdepdata)) + b"</runtask>")
|
||||||
self.rq.worker[mc].process.stdin.flush()
|
self.rq.worker[mc].process.stdin.flush()
|
||||||
|
|
||||||
self.build_stamps[task] = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn)
|
self.build_stamps[task] = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn, noextra=True)
|
||||||
self.build_stamps2.append(self.build_stamps[task])
|
self.build_stamps2.append(self.build_stamps[task])
|
||||||
self.runq_running.add(task)
|
self.runq_running.add(task)
|
||||||
self.stats.taskActive()
|
self.stats.taskActive()
|
||||||
|
|
Loading…
Reference in New Issue