fetch2: Correct the clean() mechanism for the fetcher2 code

This create a clean() method in each of the fetcher modules
and correctly cleans the .done stamp file and lock files

Signed-off-by: Saul Wold <sgw@linux.intel.com>
This commit is contained in:
Saul Wold 2011-02-09 14:30:29 -08:00 committed by Richard Purdie
parent f1bbea4ab0
commit 14dea89521
7 changed files with 83 additions and 15 deletions

View File

@ -170,3 +170,14 @@ class Cvs(Fetch):
except OSError:
pass
raise FetchError(ud.module)
def clean(self, ud, d):
""" clean the git directory """
pkg = data.expand('${PN}', d)
localdata = data.createCopy(d)
data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata)
data.update_data(localdata)
pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg)
bb.utils.remove(pkgdir, True)
bb.utils.remove(ud.localpath)

View File

@ -491,6 +491,7 @@ class FetchData(object):
"""
def __init__(self, url, d):
# localpath is the location of a downloaded result. If not set, the file is local.
self.donestamp = None
self.localfile = ""
self.localpath = None
self.lockfile = None
@ -717,6 +718,12 @@ class FetchMethod(object):
return
def clean(self, urldata, d):
"""
Clean any existing full or partial download
"""
bb.utils.remove(urldata.localpath)
def try_premirror(self, url, urldata, d):
"""
Should premirrors be used?
@ -958,6 +965,33 @@ class Fetch(object):
if ud.lockfile:
bb.utils.unlockfile(lf)
def clean(self, urls = []):
"""
Clean files that the fetcher gets or places
"""
if len(urls) == 0:
urls = self.urls
for url in urls:
if url not in self.ud:
self.ud[url] = FetchData(url, d)
ud = self.ud[url]
ud.setup_localpath(self.d)
if not ud.localfile or self.localpath is None:
continue
if ud.lockfile:
lf = bb.utils.lockfile(ud.lockfile)
ud.method.clean(ud, self.d)
if ud.donestamp:
bb.utils.remove(ud.donestamp)
if ud.lockfile:
bb.utils.unlockfile(lf)
from . import cvs
from . import git
from . import local

View File

@ -167,3 +167,15 @@ class Cvs(FetchMethod):
runfetchcmd(cmd, d, cleanup = [ud.localpath])
def clean(self, ud, d):
""" Clean CVS Files and tarballs """
pkg = data.expand('${PN}', d)
localdata = data.createCopy(d)
data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata)
data.update_data(localdata)
pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg)
bb.utils.remove(pkgdir, True)
bb.utils.remove(ud.localpath)

View File

@ -172,6 +172,12 @@ class Git(FetchMethod):
runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d)
return True
def clean(self, ud, d):
""" clean the git directory """
bb.utils.remove(ud.localpath, True)
bb.utils.remove(ud.fullmirror)
def supports_srcrev(self):
return True

View File

@ -74,3 +74,7 @@ class Local(FetchMethod):
if os.path.exists(urldata.localpath):
return True
return False
def clean(self, urldata, d):
return

View File

@ -138,6 +138,13 @@ class Svn(FetchMethod):
# tar them up to a defined filename
runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d, cleanup = [ud.localpath])
def clean(self, ud, d):
""" Clean SVN specific files and dirs """
bb.utils.remove(ud.localpath)
bb.utils.remove(ud.moddir, True)
def supports_srcrev(self):
return True

View File

@ -464,24 +464,18 @@ addtask cleanall after do_clean
python do_cleanall() {
sstate_clean_cachefiles(d)
src_uri = (bb.data.getVar('SRC_URI', d, True) or "").split()
if len(src_uri) == 0:
return
localdata = bb.data.createCopy(d)
bb.data.update_data(localdata)
dl_dir = bb.data.getVar('DL_DIR', localdata, True)
dl_dir = os.path.realpath(dl_dir)
src_uri = (bb.data.getVar('SRC_URI', localdata, True) or "").split()
if len(src_uri) == 0:
return
fetcher = bb.fetch2.Fetch(src_uri, localdata)
for url in src_uri:
local = fetcher.localpath(url)
if local is None:
continue
local = os.path.realpath(local)
if local.startswith(dl_dir):
bb.note("Removing %s*" % local)
oe.path.remove(local + "*")
try:
fetcher = bb.fetch2.Fetch(src_uri, localdata)
fetcher.clean()
except bb.fetch2.BBFetchException, e:
raise bb.build.FuncFailed(e)
}
do_cleanall[nostamp] = "1"