bitbake/fetcher: Deal with a ton of different bugs

The more we try and patch up the fetcher code, the more things break. The
code blocks in question are practically unreadable and are full of corner
cases where fetching could fail. In summary the issues noticed included:

a) Always fetching strange broken urls from the premirror for "noclone"
   git repositories
b) Not creating or rewriting .md5 stamp files inconsistently
c) Always fetching git source mirror tarballs from the premirror even
   if they already exist but the checkout directory does now
d) Passing "None" values to os.access() and os.path.extsts() checks under
   certain circumstances
e) Not using fetched git mirror tarballs if the preexist and always
   try and fetch them.

This patch rewrites the sections of code in question to be simpler and
more readable, fixing the above problems and most likely other odd
corner cases.

Signed-off-by: Richard Purdie <rpurdie@linux.intel.com>
This commit is contained in:
Richard Purdie 2010-10-19 12:30:21 +01:00
parent 1077021f70
commit 05c8ee60f1
2 changed files with 37 additions and 35 deletions

View File

@ -237,32 +237,14 @@ def go(d, urls = None):
for u in urls:
ud = urldata[u]
m = ud.method
premirror_fetch = True
localpath = ""
if ud.localfile:
if not m.try_premirror(u, ud, d):
premirror_fetch = False
# File already present along with md5 stamp file
# Touch md5 file to show activity
try:
os.utime(ud.md5, None)
except:
# Errors aren't fatal here
pass
if not ud.localfile:
continue
lf = bb.utils.lockfile(ud.lockfile)
if not m.try_premirror(u, ud, d):
premirror_fetch = False
# If someone else fetched this before we got the lock,
# notice and don't try again
try:
os.utime(ud.md5, None)
except:
# Errors aren't fatal here
pass
lf = bb.utils.lockfile(ud.lockfile)
if premirror_fetch:
if m.try_premirror(u, ud, d):
# First try fetching uri, u, from PREMIRRORS
mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', d, True))
localpath = try_mirrors(d, u, mirrors, False, m.forcefetch(u, ud, d))
@ -282,14 +264,18 @@ def go(d, urls = None):
if not localpath or not os.path.exists(localpath):
raise FetchError("Unable to fetch URL %s from any source." % u)
if localpath:
ud.localpath = localpath
if ud.localfile:
if not m.forcefetch(u, ud, d):
Fetch.write_md5sum(u, ud, d)
bb.utils.unlockfile(lf)
ud.localpath = localpath
if os.path.exists(ud.md5):
# Touch the md5 file to show active use of the download
try:
os.utime(ud.md5, None)
except:
# Errors aren't fatal here
pass
else:
Fetch.write_md5sum(u, ud, d)
bb.utils.unlockfile(lf)
def checkstatus(d):
"""

View File

@ -88,15 +88,21 @@ class Git(Fetch):
def forcefetch(self, url, ud, d):
if 'fullclone' in ud.parm:
return True
if os.path.exists(self.localpath(url, ud, d)):
if 'noclone' in ud.parm:
return False
if os.path.exists(ud.localpath):
return False
if not self._contains_ref(ud.tag, d):
return True
return False
def try_premirror(self, u, ud, d):
if 'noclone' in ud.parm:
return False
if os.path.exists(ud.clonedir):
return False
if os.path.exists(ud.localpath):
return False
return True
@ -113,16 +119,25 @@ class Git(Fetch):
coname = '%s' % (ud.tag)
codir = os.path.join(ud.clonedir, coname)
if not os.path.exists(ud.clonedir):
# If we have no existing clone and no mirror tarball, try and obtain one
if not os.path.exists(ud.clonedir) and not os.path.exists(repofile):
try:
Fetch.try_mirrors(ud.mirrortarball)
bb.mkdirhier(ud.clonedir)
os.chdir(ud.clonedir)
runfetchcmd("tar -xzf %s" % (repofile), d)
except:
runfetchcmd("%s clone -n %s://%s%s%s %s" % (ud.basecmd, ud.proto, username, ud.host, ud.path, ud.clonedir), d)
pass
# If the checkout doesn't exist and the mirror tarball does, extract it
if not os.path.exists(ud.clonedir) and os.path.exists(repofile):
bb.mkdirhier(ud.clonedir)
os.chdir(ud.clonedir)
runfetchcmd("tar -xzf %s" % (repofile), d)
# If the repo still doesn't exist, fallback to cloning it
if not os.path.exists(ud.clonedir):
runfetchcmd("%s clone -n %s://%s%s%s %s" % (ud.basecmd, ud.proto, username, ud.host, ud.path, ud.clonedir), d)
os.chdir(ud.clonedir)
# Update the checkout if needed
if not self._contains_ref(ud.tag, d) or 'fullclone' in ud.parm:
# Remove all but the .git directory
runfetchcmd("rm * -Rf", d)
@ -131,6 +146,7 @@ class Git(Fetch):
runfetchcmd("%s prune-packed" % ud.basecmd, d)
runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d)
# Generate a mirror tarball if needed
os.chdir(ud.clonedir)
mirror_tarballs = data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True)
if mirror_tarballs != "0" or 'fullclone' in ud.parm: