bitbake/fetch2: Update forcefetch and mirror handling to clean up, simplfy and bug fix the code

Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
This commit is contained in:
Richard Purdie 2011-02-07 12:08:32 +00:00
parent 1d3fdc85c6
commit 37624b9745
5 changed files with 65 additions and 81 deletions

View File

@ -282,15 +282,6 @@ def subprocess_setup():
# SIGPIPE errors are known issues with gzip/bash # SIGPIPE errors are known issues with gzip/bash
signal.signal(signal.SIGPIPE, signal.SIG_DFL) signal.signal(signal.SIGPIPE, signal.SIG_DFL)
def download_update(result, target):
if os.path.exists(target):
return
if not result or not os.path.exists(result):
return
if target != result:
os.symlink(result, target)
return
def get_autorev(d): def get_autorev(d):
# only not cache src rev in autorev case # only not cache src rev in autorev case
if bb.data.getVar('BB_SRCREV_POLICY', d, True) != "cache": if bb.data.getVar('BB_SRCREV_POLICY', d, True) != "cache":
@ -401,7 +392,7 @@ def check_network_access(d, info = ""):
else: else:
logger.debug(1, "Fetcher accessed the network with the command %s" % info) logger.debug(1, "Fetcher accessed the network with the command %s" % info)
def try_mirrors(d, uri, mirrors, check = False, force = False): def try_mirrors(d, uri, mirrors, check = False):
""" """
Try to use a mirrored version of the sources. Try to use a mirrored version of the sources.
This method will be automatically called before the fetchers go. This method will be automatically called before the fetchers go.
@ -410,41 +401,31 @@ def try_mirrors(d, uri, mirrors, check = False, force = False):
uri is the original uri we're trying to download uri is the original uri we're trying to download
mirrors is the list of mirrors we're going to try mirrors is the list of mirrors we're going to try
""" """
fpath = os.path.join(data.getVar("DL_DIR", d, True), os.path.basename(uri))
if not check and os.access(fpath, os.R_OK) and not force:
logger.debug(1, "%s already exists, skipping checkout.", fpath)
return fpath
ld = d.createCopy() ld = d.createCopy()
for (find, replace) in mirrors: for (find, replace) in mirrors:
newuri = uri_replace(uri, find, replace, ld) newuri = uri_replace(uri, find, replace, ld)
if newuri != uri: if newuri == uri:
try: continue
ud = FetchData(newuri, ld) try:
except bb.fetch2.NoMethodError: ud = FetchData(newuri, ld)
logger.debug(1, "No method for %s", uri)
continue
ud.setup_localpath(ld) ud.setup_localpath(ld)
try: if check:
if check: found = ud.method.checkstatus(newuri, ud, ld)
found = ud.method.checkstatus(newuri, ud, ld) if found:
if found: return found
return found else:
else: if not ud.method.need_update(newuri, ud, ld):
ud.method.download(newuri, ud, ld)
if hasattr(ud.method,"build_mirror_data"):
ud.method.build_mirror_data(newuri, ud, ld)
return ud.localpath return ud.localpath
except (bb.fetch2.MissingParameterError, ud.method.download(newuri, ud, ld)
bb.fetch2.FetchError, if hasattr(ud.method,"build_mirror_data"):
bb.fetch2.MD5SumError): ud.method.build_mirror_data(newuri, ud, ld)
import sys return ud.localpath
(type, value, traceback) = sys.exc_info()
logger.debug(2, "Mirror fetch failure: %s", value) except bb.fetch2.BBFetchException:
bb.utils.remove(ud.localpath) logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, uri))
continue bb.utils.remove(ud.localpath)
continue
return None return None
def srcrev_internal_helper(ud, d, name): def srcrev_internal_helper(ud, d, name):
@ -481,6 +462,7 @@ class FetchData(object):
A class which represents the fetcher state for a given URI. A class which represents the fetcher state for a given URI.
""" """
def __init__(self, url, d): def __init__(self, url, d):
# localpath is the location of a downloaded result. If not set, the file is local.
self.localfile = "" self.localfile = ""
self.localpath = None self.localpath = None
self.lockfile = None self.lockfile = None
@ -594,11 +576,13 @@ class FetchMethod(object):
urls = property(getUrls, setUrls, None, "Urls property") urls = property(getUrls, setUrls, None, "Urls property")
def forcefetch(self, url, urldata, d): def need_update(self, url, ud, d):
""" """
Force a fetch, even if localpath exists? Force a fetch, even if localpath exists?
""" """
return False if os.path.exists(ud.localpath):
return False
return True
def supports_srcrev(self): def supports_srcrev(self):
""" """
@ -694,12 +678,7 @@ class FetchMethod(object):
""" """
Should premirrors be used? Should premirrors be used?
""" """
if urldata.method.forcefetch(url, urldata, d): return True
return True
elif os.path.exists(urldata.donestamp) and os.path.exists(urldata.localfile):
return False
else:
return True
def checkstatus(self, url, urldata, d): def checkstatus(self, url, urldata, d):
""" """
@ -842,36 +821,32 @@ class Fetch(object):
lf = bb.utils.lockfile(ud.lockfile) lf = bb.utils.lockfile(ud.lockfile)
if m.try_premirror(u, ud, self.d): if not m.need_update(u, ud, self.d):
# First try fetching uri, u, from PREMIRRORS localpath = ud.localpath
elif m.try_premirror(u, ud, self.d):
mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', self.d, True)) mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', self.d, True))
localpath = try_mirrors(self.d, u, mirrors, False, m.forcefetch(u, ud, self.d)) localpath = try_mirrors(self.d, u, mirrors, False)
elif os.path.exists(ud.localfile):
localpath = ud.localfile
download_update(localpath, ud.localpath) if bb.data.getVar("BB_FETCH_PREMIRRORONLY", self.d, True) is None:
if not localpath and m.need_update(u, ud, self.d):
try:
m.download(u, ud, self.d)
if hasattr(m, "build_mirror_data"):
m.build_mirror_data(u, ud, self.d)
localpath = ud.localpath
# Need to re-test forcefetch() which will return true if our copy is too old except BBFetchException:
if m.forcefetch(u, ud, self.d) or not localpath: # Remove any incomplete file
# Next try fetching from the original uri, u bb.utils.remove(ud.localpath)
try: mirrors = mirror_from_string(bb.data.getVar('MIRRORS', self.d, True))
m.download(u, ud, self.d) localpath = try_mirrors (self.d, u, mirrors)
if hasattr(m, "build_mirror_data"):
m.build_mirror_data(u, ud, self.d)
localpath = ud.localpath
download_update(localpath, ud.localpath)
except FetchError:
# Remove any incomplete file
bb.utils.remove(ud.localpath)
# Finally, try fetching uri, u, from MIRRORS
mirrors = mirror_from_string(bb.data.getVar('MIRRORS', self.d, True))
localpath = try_mirrors (self.d, u, mirrors)
if not localpath or not os.path.exists(localpath): if not localpath or not os.path.exists(localpath):
raise FetchError("Unable to fetch URL %s from any source." % u, u) raise FetchError("Unable to fetch URL %s from any source." % u, u)
download_update(localpath, ud.localpath) # The local fetcher can return an alternate path so we symlink
if os.path.exists(localpath) and not os.path.exists(ud.localpath):
os.symlink(localpath, ud.localpath)
if os.path.exists(ud.donestamp): if os.path.exists(ud.donestamp):
# Touch the done stamp file to show active use of the download # Touch the done stamp file to show active use of the download

View File

@ -65,9 +65,11 @@ class Cvs(FetchMethod):
ud.localfile = data.expand('%s_%s_%s_%s%s%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.tag, ud.date, norecurse, fullpath), d) ud.localfile = data.expand('%s_%s_%s_%s%s%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.tag, ud.date, norecurse, fullpath), d)
def forcefetch(self, url, ud, d): def need_update(self, url, ud, d):
if (ud.date == "now"): if (ud.date == "now"):
return True return True
if not os.path.exists(ud.localpath):
return True
return False return False
def download(self, loc, ud, d): def download(self, loc, ud, d):

View File

@ -80,7 +80,7 @@ class Git(FetchMethod):
def localpath(self, url, ud, d): def localpath(self, url, ud, d):
return ud.clonedir return ud.clonedir
def forcefetch(self, url, ud, d): def need_update(self, u, ud, d):
if not os.path.exists(ud.clonedir): if not os.path.exists(ud.clonedir):
return True return True
os.chdir(ud.clonedir) os.chdir(ud.clonedir)
@ -90,13 +90,12 @@ class Git(FetchMethod):
return False return False
def try_premirror(self, u, ud, d): def try_premirror(self, u, ud, d):
if 'noclone' in ud.parm: # If we don't do this, updating an existing checkout with only premirrors
return False # is not possible
if bb.data.getVar("BB_FETCH_PREMIRRORONLY", d, True) is not None:
return True
if os.path.exists(ud.clonedir): if os.path.exists(ud.clonedir):
return False return False
if os.path.exists(ud.localpath):
return False
return True return True
def download(self, loc, ud, d): def download(self, loc, ud, d):

View File

@ -64,9 +64,13 @@ class Hg(FetchMethod):
ud.localfile = data.expand('%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d) ud.localfile = data.expand('%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d)
def forcefetch(self, url, ud, d): def need_update(self, url, ud, d):
revTag = ud.parm.get('rev', 'tip') revTag = ud.parm.get('rev', 'tip')
return revTag == "tip" if revTag == "tip":
return True
if not os.path.exists(ud.localpath):
return True
return False
def _buildhgcommand(self, ud, d, command): def _buildhgcommand(self, ud, d, command):
""" """

View File

@ -53,8 +53,12 @@ class Svk(FetchMethod):
ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d) ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d)
def forcefetch(self, url, ud, d): def need_update(self, url, ud, d):
return ud.date == "now" if ud.date == "now":
return True
if not os.path.exists(ud.localpath):
return True
return False
def download(self, loc, ud, d): def download(self, loc, ud, d):
"""Fetch urls""" """Fetch urls"""