2006-02-10 10:13:42 +00:00
|
|
|
# ex:ts=4:sw=4:sts=4:et
|
|
|
|
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
|
|
|
"""
|
|
|
|
BitBake 'Fetch' implementations
|
|
|
|
|
|
|
|
Classes for obtaining upstream sources for the
|
|
|
|
BitBake build tools.
|
|
|
|
"""
|
|
|
|
|
2007-01-08 23:53:01 +00:00
|
|
|
# Copyright (C) 2003, 2004 Chris Larson
|
|
|
|
#
|
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License version 2 as
|
|
|
|
# published by the Free Software Foundation.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License along
|
|
|
|
# with this program; if not, write to the Free Software Foundation, Inc.,
|
|
|
|
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
|
|
|
#
|
|
|
|
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
|
|
|
|
2010-06-20 19:07:34 +00:00
|
|
|
from __future__ import absolute_import
|
2010-04-11 05:04:52 +00:00
|
|
|
from __future__ import print_function
|
2009-11-03 23:14:53 +00:00
|
|
|
import os, re
|
2006-02-10 10:13:42 +00:00
|
|
|
import bb
|
|
|
|
from bb import data
|
2007-08-03 13:40:52 +00:00
|
|
|
from bb import persist_data
|
|
|
|
|
2009-07-19 16:49:36 +00:00
|
|
|
class MalformedUrl(Exception):
|
|
|
|
"""Exception raised when encountering an invalid url"""
|
|
|
|
|
2006-02-10 10:13:42 +00:00
|
|
|
class FetchError(Exception):
|
|
|
|
"""Exception raised when a download fails"""
|
|
|
|
|
|
|
|
class NoMethodError(Exception):
|
|
|
|
"""Exception raised when there is no method to obtain a supplied url or set of urls"""
|
|
|
|
|
|
|
|
class MissingParameterError(Exception):
|
|
|
|
"""Exception raised when a fetch method is missing a critical parameter in the url"""
|
|
|
|
|
2006-11-16 15:02:15 +00:00
|
|
|
class ParameterError(Exception):
|
|
|
|
"""Exception raised when a url cannot be proccessed due to invalid parameters."""
|
|
|
|
|
2006-02-10 10:13:42 +00:00
|
|
|
class MD5SumError(Exception):
|
|
|
|
"""Exception raised when a MD5SUM of a file does not match the expected one"""
|
|
|
|
|
2008-07-22 09:13:51 +00:00
|
|
|
class InvalidSRCREV(Exception):
|
|
|
|
"""Exception raised when an invalid SRCREV is encountered"""
|
|
|
|
|
2009-07-19 16:51:29 +00:00
|
|
|
def decodeurl(url):
|
|
|
|
"""Decodes an URL into the tokens (scheme, network location, path,
|
|
|
|
user, password, parameters).
|
|
|
|
"""
|
|
|
|
|
|
|
|
m = re.compile('(?P<type>[^:]*)://((?P<user>.+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url)
|
|
|
|
if not m:
|
|
|
|
raise MalformedUrl(url)
|
|
|
|
|
|
|
|
type = m.group('type')
|
|
|
|
location = m.group('location')
|
|
|
|
if not location:
|
|
|
|
raise MalformedUrl(url)
|
|
|
|
user = m.group('user')
|
|
|
|
parm = m.group('parm')
|
|
|
|
|
|
|
|
locidx = location.find('/')
|
2010-06-04 04:50:02 +00:00
|
|
|
if locidx != -1 and type.lower() != 'file':
|
2009-07-19 16:51:29 +00:00
|
|
|
host = location[:locidx]
|
|
|
|
path = location[locidx:]
|
|
|
|
else:
|
|
|
|
host = ""
|
|
|
|
path = location
|
|
|
|
if user:
|
|
|
|
m = re.compile('(?P<user>[^:]+)(:?(?P<pswd>.*))').match(user)
|
|
|
|
if m:
|
|
|
|
user = m.group('user')
|
|
|
|
pswd = m.group('pswd')
|
|
|
|
else:
|
|
|
|
user = ''
|
|
|
|
pswd = ''
|
|
|
|
|
|
|
|
p = {}
|
|
|
|
if parm:
|
|
|
|
for s in parm.split(';'):
|
2010-03-24 23:56:12 +00:00
|
|
|
s1, s2 = s.split('=')
|
2009-07-19 16:51:29 +00:00
|
|
|
p[s1] = s2
|
|
|
|
|
|
|
|
return (type, host, path, user, pswd, p)
|
|
|
|
|
|
|
|
def encodeurl(decoded):
|
|
|
|
"""Encodes a URL from tokens (scheme, network location, path,
|
|
|
|
user, password, parameters).
|
|
|
|
"""
|
|
|
|
|
|
|
|
(type, host, path, user, pswd, p) = decoded
|
|
|
|
|
|
|
|
if not type or not path:
|
2010-06-10 14:47:16 +00:00
|
|
|
raise MissingParameterError("Type or path url components missing when encoding %s" % decoded)
|
2009-07-19 16:51:29 +00:00
|
|
|
url = '%s://' % type
|
|
|
|
if user:
|
|
|
|
url += "%s" % user
|
|
|
|
if pswd:
|
|
|
|
url += ":%s" % pswd
|
|
|
|
url += "@"
|
|
|
|
if host:
|
|
|
|
url += "%s" % host
|
|
|
|
url += "%s" % path
|
|
|
|
if p:
|
2009-06-11 20:10:04 +00:00
|
|
|
for parm in p:
|
2009-07-19 16:51:29 +00:00
|
|
|
url += ";%s=%s" % (parm, p[parm])
|
|
|
|
|
|
|
|
return url
|
|
|
|
|
2006-02-10 10:13:42 +00:00
|
|
|
def uri_replace(uri, uri_find, uri_replace, d):
|
2006-11-16 15:02:15 +00:00
|
|
|
# bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: operating on %s" % uri)
|
2006-02-10 10:13:42 +00:00
|
|
|
if not uri or not uri_find or not uri_replace:
|
2006-11-16 15:02:15 +00:00
|
|
|
bb.msg.debug(1, bb.msg.domain.Fetcher, "uri_replace: passed an undefined value, not replacing")
|
2010-04-10 00:11:03 +00:00
|
|
|
uri_decoded = list(decodeurl(uri))
|
|
|
|
uri_find_decoded = list(decodeurl(uri_find))
|
|
|
|
uri_replace_decoded = list(decodeurl(uri_replace))
|
2010-03-24 23:56:12 +00:00
|
|
|
result_decoded = ['', '', '', '', '', {}]
|
2006-02-10 10:13:42 +00:00
|
|
|
for i in uri_find_decoded:
|
|
|
|
loc = uri_find_decoded.index(i)
|
|
|
|
result_decoded[loc] = uri_decoded[loc]
|
2010-04-12 15:14:11 +00:00
|
|
|
if isinstance(i, basestring):
|
2006-02-10 10:13:42 +00:00
|
|
|
if (re.match(i, uri_decoded[loc])):
|
|
|
|
result_decoded[loc] = re.sub(i, uri_replace_decoded[loc], uri_decoded[loc])
|
|
|
|
if uri_find_decoded.index(i) == 2:
|
|
|
|
if d:
|
|
|
|
localfn = bb.fetch.localpath(uri, d)
|
|
|
|
if localfn:
|
|
|
|
result_decoded[loc] = os.path.dirname(result_decoded[loc]) + "/" + os.path.basename(bb.fetch.localpath(uri, d))
|
2006-11-16 15:02:15 +00:00
|
|
|
# bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: matching %s against %s and replacing with %s" % (i, uri_decoded[loc], uri_replace_decoded[loc]))
|
2006-02-10 10:13:42 +00:00
|
|
|
else:
|
2006-11-16 15:02:15 +00:00
|
|
|
# bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: no match")
|
2006-02-10 10:13:42 +00:00
|
|
|
return uri
|
|
|
|
# else:
|
2009-06-11 20:10:04 +00:00
|
|
|
# for j in i:
|
2006-02-10 10:13:42 +00:00
|
|
|
# FIXME: apply replacements against options
|
2010-04-10 00:11:03 +00:00
|
|
|
return encodeurl(result_decoded)
|
2006-02-10 10:13:42 +00:00
|
|
|
|
|
|
|
methods = []
|
2007-08-11 22:42:15 +00:00
|
|
|
urldata_cache = {}
|
2009-07-30 20:24:31 +00:00
|
|
|
saved_headrevs = {}
|
2010-06-04 12:04:41 +00:00
|
|
|
persistent_database_connection = {}
|
2006-02-10 10:13:42 +00:00
|
|
|
|
2007-08-03 13:40:52 +00:00
|
|
|
def fetcher_init(d):
|
|
|
|
"""
|
2010-06-04 12:04:35 +00:00
|
|
|
Called to initialize the fetchers once the configuration data is known.
|
2007-08-03 13:40:52 +00:00
|
|
|
Calls before this must not hit the cache.
|
|
|
|
"""
|
2010-06-04 12:04:41 +00:00
|
|
|
pd = persist_data.PersistData(d, persistent_database_connection)
|
2010-06-04 12:04:37 +00:00
|
|
|
# When to drop SCM head revisions controlled by user policy
|
2007-08-11 22:42:15 +00:00
|
|
|
srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, 1) or "clear"
|
|
|
|
if srcrev_policy == "cache":
|
|
|
|
bb.msg.debug(1, bb.msg.domain.Fetcher, "Keeping SRCREV cache due to cache policy of: %s" % srcrev_policy)
|
|
|
|
elif srcrev_policy == "clear":
|
|
|
|
bb.msg.debug(1, bb.msg.domain.Fetcher, "Clearing SRCREV cache due to cache policy of: %s" % srcrev_policy)
|
2009-07-30 20:24:31 +00:00
|
|
|
try:
|
|
|
|
bb.fetch.saved_headrevs = pd.getKeyValues("BB_URI_HEADREVS")
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
pd.delDomain("BB_URI_HEADREVS")
|
2007-08-11 22:42:15 +00:00
|
|
|
else:
|
2010-06-10 14:47:16 +00:00
|
|
|
raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
|
2010-01-20 18:46:02 +00:00
|
|
|
|
|
|
|
for m in methods:
|
|
|
|
if hasattr(m, "init"):
|
|
|
|
m.init(d)
|
|
|
|
|
2007-08-03 13:40:52 +00:00
|
|
|
# Make sure our domains exist
|
|
|
|
pd.addDomain("BB_URI_HEADREVS")
|
|
|
|
pd.addDomain("BB_URI_LOCALCOUNT")
|
|
|
|
|
2009-07-29 13:33:14 +00:00
|
|
|
def fetcher_compare_revisons(d):
|
|
|
|
"""
|
|
|
|
Compare the revisions in the persistant cache with current values and
|
|
|
|
return true/false on whether they've changed.
|
|
|
|
"""
|
|
|
|
|
2010-06-04 12:04:41 +00:00
|
|
|
pd = persist_data.PersistData(d, persistent_database_connection)
|
2009-07-29 13:33:14 +00:00
|
|
|
data = pd.getKeyValues("BB_URI_HEADREVS")
|
2009-07-30 20:24:31 +00:00
|
|
|
data2 = bb.fetch.saved_headrevs
|
2009-07-29 13:33:14 +00:00
|
|
|
|
|
|
|
changed = False
|
|
|
|
for key in data:
|
|
|
|
if key not in data2 or data2[key] != data[key]:
|
|
|
|
bb.msg.debug(1, bb.msg.domain.Fetcher, "%s changed" % key)
|
|
|
|
changed = True
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
bb.msg.debug(2, bb.msg.domain.Fetcher, "%s did not change" % key)
|
|
|
|
return False
|
|
|
|
|
2007-08-03 13:40:52 +00:00
|
|
|
# Function call order is usually:
|
|
|
|
# 1. init
|
|
|
|
# 2. go
|
|
|
|
# 3. localpaths
|
|
|
|
# localpath can be called at any time
|
|
|
|
|
2007-08-11 22:42:15 +00:00
|
|
|
def init(urls, d, setup = True):
|
2007-08-03 13:40:52 +00:00
|
|
|
urldata = {}
|
2010-06-08 19:20:55 +00:00
|
|
|
|
2007-08-11 22:42:15 +00:00
|
|
|
fn = bb.data.getVar('FILE', d, 1)
|
|
|
|
if fn in urldata_cache:
|
|
|
|
urldata = urldata_cache[fn]
|
2007-08-03 13:40:52 +00:00
|
|
|
|
|
|
|
for url in urls:
|
|
|
|
if url not in urldata:
|
2007-08-11 22:42:15 +00:00
|
|
|
urldata[url] = FetchData(url, d)
|
2007-08-03 13:40:52 +00:00
|
|
|
|
2007-08-11 22:42:15 +00:00
|
|
|
if setup:
|
|
|
|
for url in urldata:
|
|
|
|
if not urldata[url].setup:
|
2010-03-24 23:56:12 +00:00
|
|
|
urldata[url].setup_localpath(d)
|
2007-08-03 13:40:52 +00:00
|
|
|
|
2007-08-11 22:42:15 +00:00
|
|
|
urldata_cache[fn] = urldata
|
2007-08-04 20:13:35 +00:00
|
|
|
return urldata
|
2007-08-03 13:40:52 +00:00
|
|
|
|
2010-10-07 14:42:15 +00:00
|
|
|
def mirror_from_string(data):
|
|
|
|
return [ i.split() for i in (data or "").replace('\\n','\n').split('\n') if i ]
|
|
|
|
|
2009-11-26 22:45:52 +00:00
|
|
|
def go(d, urls = None):
|
2007-08-03 13:40:52 +00:00
|
|
|
"""
|
|
|
|
Fetch all urls
|
2007-08-11 22:42:15 +00:00
|
|
|
init must have previously been called
|
2007-08-03 13:40:52 +00:00
|
|
|
"""
|
2009-11-26 22:45:52 +00:00
|
|
|
if not urls:
|
|
|
|
urls = d.getVar("SRC_URI", 1).split()
|
|
|
|
urldata = init(urls, d, True)
|
2007-08-03 13:40:52 +00:00
|
|
|
|
2009-11-26 22:45:52 +00:00
|
|
|
for u in urls:
|
2007-08-03 13:40:52 +00:00
|
|
|
ud = urldata[u]
|
|
|
|
m = ud.method
|
2010-09-09 16:29:23 +00:00
|
|
|
premirror_fetch = True
|
|
|
|
localpath = ""
|
|
|
|
|
2007-08-20 07:48:43 +00:00
|
|
|
if ud.localfile:
|
2010-09-08 13:03:41 +00:00
|
|
|
if not m.try_premirror(u, ud, d):
|
2010-09-09 16:29:23 +00:00
|
|
|
premirror_fetch = False
|
2007-08-20 07:48:43 +00:00
|
|
|
# File already present along with md5 stamp file
|
|
|
|
# Touch md5 file to show activity
|
2008-03-03 22:01:45 +00:00
|
|
|
try:
|
|
|
|
os.utime(ud.md5, None)
|
|
|
|
except:
|
|
|
|
# Errors aren't fatal here
|
|
|
|
pass
|
2010-09-09 16:29:23 +00:00
|
|
|
|
2007-11-24 16:49:12 +00:00
|
|
|
lf = bb.utils.lockfile(ud.lockfile)
|
2010-09-08 13:03:41 +00:00
|
|
|
if not m.try_premirror(u, ud, d):
|
2010-09-09 16:29:23 +00:00
|
|
|
premirror_fetch = False
|
2010-03-24 23:56:12 +00:00
|
|
|
# If someone else fetched this before we got the lock,
|
2007-08-20 07:48:43 +00:00
|
|
|
# notice and don't try again
|
2008-03-03 22:01:45 +00:00
|
|
|
try:
|
|
|
|
os.utime(ud.md5, None)
|
|
|
|
except:
|
|
|
|
# Errors aren't fatal here
|
|
|
|
pass
|
2010-02-01 16:56:16 +00:00
|
|
|
|
2010-09-09 16:29:23 +00:00
|
|
|
if premirror_fetch:
|
|
|
|
# First try fetching uri, u, from PREMIRRORS
|
2010-10-07 14:42:15 +00:00
|
|
|
mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', d, True))
|
2010-09-09 16:29:23 +00:00
|
|
|
localpath = try_mirrors(d, u, mirrors, False, m.forcefetch(u, ud, d))
|
2010-09-28 13:01:33 +00:00
|
|
|
elif os.path.exists(ud.localfile):
|
|
|
|
localpath = ud.localfile
|
2010-08-27 14:52:08 +00:00
|
|
|
|
|
|
|
# Need to re-test forcefetch() which will return true if our copy is too old
|
|
|
|
if m.forcefetch(u, ud, d) or not localpath:
|
2010-02-01 16:56:16 +00:00
|
|
|
# Next try fetching from the original uri, u
|
|
|
|
try:
|
|
|
|
m.go(u, ud, d)
|
2010-02-02 17:57:20 +00:00
|
|
|
localpath = ud.localpath
|
2010-10-05 11:44:08 +00:00
|
|
|
except FetchError:
|
2010-02-01 16:56:16 +00:00
|
|
|
# Finally, try fetching uri, u, from MIRRORS
|
2010-10-07 14:42:15 +00:00
|
|
|
mirrors = mirror_from_string(bb.data.getVar('MIRRORS', d, True))
|
2010-02-02 17:57:20 +00:00
|
|
|
localpath = try_mirrors (d, u, mirrors)
|
2010-10-05 11:44:08 +00:00
|
|
|
if not localpath or not os.path.exists(localpath):
|
|
|
|
raise FetchError("Unable to fetch URL %s from any source." % u)
|
2010-02-02 17:57:20 +00:00
|
|
|
|
|
|
|
if localpath:
|
|
|
|
ud.localpath = localpath
|
2010-02-01 16:56:16 +00:00
|
|
|
|
2007-08-20 07:48:43 +00:00
|
|
|
if ud.localfile:
|
|
|
|
if not m.forcefetch(u, ud, d):
|
|
|
|
Fetch.write_md5sum(u, ud, d)
|
2007-11-24 16:49:12 +00:00
|
|
|
bb.utils.unlockfile(lf)
|
2007-08-03 13:40:52 +00:00
|
|
|
|
2008-04-27 11:22:54 +00:00
|
|
|
|
|
|
|
def checkstatus(d):
|
|
|
|
"""
|
|
|
|
Check all urls exist upstream
|
|
|
|
init must have previously been called
|
|
|
|
"""
|
|
|
|
urldata = init([], d, True)
|
|
|
|
|
|
|
|
for u in urldata:
|
|
|
|
ud = urldata[u]
|
|
|
|
m = ud.method
|
|
|
|
bb.msg.note(1, bb.msg.domain.Fetcher, "Testing URL %s" % u)
|
2010-02-05 12:08:15 +00:00
|
|
|
# First try checking uri, u, from PREMIRRORS
|
2010-10-07 14:42:15 +00:00
|
|
|
mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', d, True))
|
2010-02-05 12:08:15 +00:00
|
|
|
ret = try_mirrors(d, u, mirrors, True)
|
|
|
|
if not ret:
|
|
|
|
# Next try checking from the original uri, u
|
|
|
|
try:
|
|
|
|
ret = m.checkstatus(u, ud, d)
|
|
|
|
except:
|
|
|
|
# Finally, try checking uri, u, from MIRRORS
|
2010-10-07 14:42:15 +00:00
|
|
|
mirrors = mirror_from_string(bb.data.getVar('MIRRORS', d, True))
|
2010-02-05 12:08:15 +00:00
|
|
|
ret = try_mirrors (d, u, mirrors, True)
|
|
|
|
|
2008-04-27 11:22:54 +00:00
|
|
|
if not ret:
|
2010-06-10 14:47:16 +00:00
|
|
|
raise FetchError("URL %s doesn't work" % u)
|
2008-04-27 11:22:54 +00:00
|
|
|
|
2007-08-11 22:42:15 +00:00
|
|
|
def localpaths(d):
|
2007-08-03 13:40:52 +00:00
|
|
|
"""
|
|
|
|
Return a list of the local filenames, assuming successful fetch
|
|
|
|
"""
|
2006-02-10 10:13:42 +00:00
|
|
|
local = []
|
2007-08-11 22:42:15 +00:00
|
|
|
urldata = init([], d, True)
|
2007-08-03 13:40:52 +00:00
|
|
|
|
|
|
|
for u in urldata:
|
2010-03-24 23:56:12 +00:00
|
|
|
ud = urldata[u]
|
2007-08-03 13:40:52 +00:00
|
|
|
local.append(ud.localpath)
|
|
|
|
|
2006-02-10 10:13:42 +00:00
|
|
|
return local
|
|
|
|
|
2007-11-07 23:49:55 +00:00
|
|
|
srcrev_internal_call = False
|
|
|
|
|
2007-08-03 13:40:52 +00:00
|
|
|
def get_srcrev(d):
|
|
|
|
"""
|
|
|
|
Return the version string for the current package
|
|
|
|
(usually to be used as PV)
|
|
|
|
Most packages usually only have one SCM so we just pass on the call.
|
2010-03-24 23:56:12 +00:00
|
|
|
In the multi SCM case, we build a value based on SRCREV_FORMAT which must
|
2007-08-03 13:40:52 +00:00
|
|
|
have been set.
|
|
|
|
"""
|
2007-11-07 23:49:55 +00:00
|
|
|
|
|
|
|
#
|
2010-03-24 23:56:12 +00:00
|
|
|
# Ugly code alert. localpath in the fetchers will try to evaluate SRCREV which
|
2007-11-07 23:49:55 +00:00
|
|
|
# could translate into a call to here. If it does, we need to catch this
|
|
|
|
# and provide some way so it knows get_srcrev is active instead of being
|
2010-03-24 23:56:12 +00:00
|
|
|
# some number etc. hence the srcrev_internal_call tracking and the magic
|
2007-11-07 23:49:55 +00:00
|
|
|
# "SRCREVINACTION" return value.
|
|
|
|
#
|
|
|
|
# Neater solutions welcome!
|
|
|
|
#
|
|
|
|
if bb.fetch.srcrev_internal_call:
|
|
|
|
return "SRCREVINACTION"
|
|
|
|
|
2007-08-03 13:40:52 +00:00
|
|
|
scms = []
|
2007-11-13 23:03:21 +00:00
|
|
|
|
2010-03-24 23:56:12 +00:00
|
|
|
# Only call setup_localpath on URIs which suppports_srcrev()
|
2007-08-11 22:42:15 +00:00
|
|
|
urldata = init(bb.data.getVar('SRC_URI', d, 1).split(), d, False)
|
|
|
|
for u in urldata:
|
|
|
|
ud = urldata[u]
|
|
|
|
if ud.method.suppports_srcrev():
|
|
|
|
if not ud.setup:
|
|
|
|
ud.setup_localpath(d)
|
|
|
|
scms.append(u)
|
2007-08-03 13:40:52 +00:00
|
|
|
|
|
|
|
if len(scms) == 0:
|
|
|
|
bb.msg.error(bb.msg.domain.Fetcher, "SRCREV was used yet no valid SCM was found in SRC_URI")
|
|
|
|
raise ParameterError
|
|
|
|
|
2010-07-25 10:32:42 +00:00
|
|
|
if bb.data.getVar('BB_SRCREV_POLICY', d, True) != "cache":
|
|
|
|
bb.data.setVar('__BB_DONT_CACHE', '1', d)
|
2008-09-30 21:54:47 +00:00
|
|
|
|
2007-08-03 13:40:52 +00:00
|
|
|
if len(scms) == 1:
|
|
|
|
return urldata[scms[0]].method.sortable_revision(scms[0], urldata[scms[0]], d)
|
|
|
|
|
2007-08-12 23:06:49 +00:00
|
|
|
#
|
|
|
|
# Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
|
|
|
|
#
|
|
|
|
format = bb.data.getVar('SRCREV_FORMAT', d, 1)
|
|
|
|
if not format:
|
|
|
|
bb.msg.error(bb.msg.domain.Fetcher, "The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
|
|
|
|
raise ParameterError
|
|
|
|
|
|
|
|
for scm in scms:
|
|
|
|
if 'name' in urldata[scm].parm:
|
|
|
|
name = urldata[scm].parm["name"]
|
|
|
|
rev = urldata[scm].method.sortable_revision(scm, urldata[scm], d)
|
|
|
|
format = format.replace(name, rev)
|
|
|
|
|
|
|
|
return format
|
2007-08-03 13:40:52 +00:00
|
|
|
|
|
|
|
def localpath(url, d, cache = True):
|
|
|
|
"""
|
2010-03-24 23:56:12 +00:00
|
|
|
Called from the parser with cache=False since the cache isn't ready
|
2007-08-03 13:40:52 +00:00
|
|
|
at this point. Also called from classed in OE e.g. patch.bbclass
|
|
|
|
"""
|
2007-08-11 22:42:15 +00:00
|
|
|
ud = init([url], d)
|
2007-08-03 13:40:52 +00:00
|
|
|
if ud[url].method:
|
|
|
|
return ud[url].localpath
|
2006-02-10 10:13:42 +00:00
|
|
|
return url
|
|
|
|
|
2007-08-03 13:40:52 +00:00
|
|
|
def runfetchcmd(cmd, d, quiet = False):
|
|
|
|
"""
|
|
|
|
Run cmd returning the command output
|
|
|
|
Raise an error if interrupted or cmd fails
|
|
|
|
Optionally echo command output to stdout
|
|
|
|
"""
|
|
|
|
|
|
|
|
# Need to export PATH as binary could be in metadata paths
|
|
|
|
# rather than host provided
|
2008-04-08 11:34:15 +00:00
|
|
|
# Also include some other variables.
|
|
|
|
# FIXME: Should really include all export varaiables?
|
2010-09-29 08:42:42 +00:00
|
|
|
exportvars = ['PATH', 'GIT_PROXY_COMMAND', 'GIT_PROXY_HOST',
|
|
|
|
'GIT_PROXY_PORT', 'GIT_CONFIG', 'http_proxy', 'ftp_proxy',
|
|
|
|
'https_proxy', 'no_proxy', 'ALL_PROXY', 'all_proxy',
|
|
|
|
'SSH_AUTH_SOCK', 'SSH_AGENT_PID', 'HOME']
|
2008-04-08 11:34:15 +00:00
|
|
|
|
|
|
|
for var in exportvars:
|
|
|
|
val = data.getVar(var, d, True)
|
|
|
|
if val:
|
2010-06-08 19:20:55 +00:00
|
|
|
cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
|
2008-04-08 11:34:15 +00:00
|
|
|
|
|
|
|
bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % cmd)
|
2007-08-03 13:40:52 +00:00
|
|
|
|
2008-04-08 11:34:15 +00:00
|
|
|
# redirect stderr to stdout
|
|
|
|
stdout_handle = os.popen(cmd + " 2>&1", "r")
|
2007-08-03 13:40:52 +00:00
|
|
|
output = ""
|
|
|
|
|
2010-06-20 19:08:07 +00:00
|
|
|
while True:
|
2007-08-03 13:40:52 +00:00
|
|
|
line = stdout_handle.readline()
|
|
|
|
if not line:
|
|
|
|
break
|
|
|
|
if not quiet:
|
2010-04-10 02:46:14 +00:00
|
|
|
print(line, end=' ')
|
2007-08-03 13:40:52 +00:00
|
|
|
output += line
|
|
|
|
|
2010-07-14 21:26:00 +00:00
|
|
|
status = stdout_handle.close() or 0
|
2007-08-03 13:40:52 +00:00
|
|
|
signal = status >> 8
|
|
|
|
exitstatus = status & 0xff
|
|
|
|
|
|
|
|
if signal:
|
2008-04-08 11:34:15 +00:00
|
|
|
raise FetchError("Fetch command %s failed with signal %s, output:\n%s" % (cmd, signal, output))
|
2007-08-03 13:40:52 +00:00
|
|
|
elif status != 0:
|
2008-04-08 11:34:15 +00:00
|
|
|
raise FetchError("Fetch command %s failed with exit code %s, output:\n%s" % (cmd, status, output))
|
2007-08-03 13:40:52 +00:00
|
|
|
|
|
|
|
return output
|
|
|
|
|
2010-08-27 13:04:33 +00:00
|
|
|
def try_mirrors(d, uri, mirrors, check = False, force = False):
|
2010-02-01 16:56:16 +00:00
|
|
|
"""
|
|
|
|
Try to use a mirrored version of the sources.
|
|
|
|
This method will be automatically called before the fetchers go.
|
|
|
|
|
|
|
|
d Is a bb.data instance
|
|
|
|
uri is the original uri we're trying to download
|
|
|
|
mirrors is the list of mirrors we're going to try
|
|
|
|
"""
|
|
|
|
fpath = os.path.join(data.getVar("DL_DIR", d, 1), os.path.basename(uri))
|
2010-08-27 13:04:33 +00:00
|
|
|
if not check and os.access(fpath, os.R_OK) and not force:
|
2010-02-01 16:56:16 +00:00
|
|
|
bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists, skipping checkout." % fpath)
|
2010-02-02 17:57:20 +00:00
|
|
|
return fpath
|
2010-02-01 16:56:16 +00:00
|
|
|
|
|
|
|
ld = d.createCopy()
|
|
|
|
for (find, replace) in mirrors:
|
|
|
|
newuri = uri_replace(uri, find, replace, ld)
|
|
|
|
if newuri != uri:
|
|
|
|
try:
|
|
|
|
ud = FetchData(newuri, ld)
|
|
|
|
except bb.fetch.NoMethodError:
|
2010-03-25 00:48:49 +00:00
|
|
|
bb.msg.debug(1, bb.msg.domain.Fetcher, "No method for %s" % uri)
|
2010-02-01 16:56:16 +00:00
|
|
|
continue
|
|
|
|
|
|
|
|
ud.setup_localpath(ld)
|
|
|
|
|
|
|
|
try:
|
2010-02-05 12:08:15 +00:00
|
|
|
if check:
|
|
|
|
ud.method.checkstatus(newuri, ud, ld)
|
2010-02-05 12:41:08 +00:00
|
|
|
else:
|
|
|
|
ud.method.go(newuri, ud, ld)
|
2010-02-02 17:57:20 +00:00
|
|
|
return ud.localpath
|
2010-02-01 16:56:16 +00:00
|
|
|
except (bb.fetch.MissingParameterError,
|
|
|
|
bb.fetch.FetchError,
|
|
|
|
bb.fetch.MD5SumError):
|
|
|
|
import sys
|
|
|
|
(type, value, traceback) = sys.exc_info()
|
|
|
|
bb.msg.debug(2, bb.msg.domain.Fetcher, "Mirror fetch failure: %s" % value)
|
2010-02-04 14:08:34 +00:00
|
|
|
continue
|
|
|
|
return None
|
2010-02-01 16:56:16 +00:00
|
|
|
|
|
|
|
|
2006-11-16 15:02:15 +00:00
|
|
|
class FetchData(object):
|
2007-08-11 22:42:15 +00:00
|
|
|
"""
|
|
|
|
A class which represents the fetcher state for a given URI.
|
|
|
|
"""
|
2007-08-03 13:40:52 +00:00
|
|
|
def __init__(self, url, d):
|
2006-11-16 15:02:15 +00:00
|
|
|
self.localfile = ""
|
2010-04-10 00:11:03 +00:00
|
|
|
(self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(data.expand(url, d))
|
2007-08-03 13:40:52 +00:00
|
|
|
self.date = Fetch.getSRCDate(self, d)
|
|
|
|
self.url = url
|
2008-11-05 10:31:40 +00:00
|
|
|
if not self.user and "user" in self.parm:
|
|
|
|
self.user = self.parm["user"]
|
|
|
|
if not self.pswd and "pswd" in self.parm:
|
|
|
|
self.pswd = self.parm["pswd"]
|
2007-08-11 22:42:15 +00:00
|
|
|
self.setup = False
|
|
|
|
for m in methods:
|
|
|
|
if m.supports(url, self, d):
|
|
|
|
self.method = m
|
2007-11-27 22:23:04 +00:00
|
|
|
return
|
|
|
|
raise NoMethodError("Missing implementation for url %s" % url)
|
2007-08-03 13:40:52 +00:00
|
|
|
|
|
|
|
def setup_localpath(self, d):
|
2007-08-11 22:42:15 +00:00
|
|
|
self.setup = True
|
2007-08-03 13:40:52 +00:00
|
|
|
if "localpath" in self.parm:
|
2007-11-07 23:49:55 +00:00
|
|
|
# if user sets localpath for file, use it instead.
|
2007-08-03 13:40:52 +00:00
|
|
|
self.localpath = self.parm["localpath"]
|
2010-09-06 11:28:00 +00:00
|
|
|
self.basename = os.path.basename(self.localpath)
|
2007-08-03 13:40:52 +00:00
|
|
|
else:
|
2010-03-21 22:25:25 +00:00
|
|
|
premirrors = bb.data.getVar('PREMIRRORS', d, True)
|
2010-02-03 16:08:09 +00:00
|
|
|
local = ""
|
|
|
|
if premirrors and self.url:
|
|
|
|
aurl = self.url.split(";")[0]
|
2010-10-07 14:42:15 +00:00
|
|
|
mirrors = mirror_from_string(premirrors)
|
2010-02-03 16:08:09 +00:00
|
|
|
for (find, replace) in mirrors:
|
|
|
|
if replace.startswith("file://"):
|
|
|
|
path = aurl.split("://")[1]
|
|
|
|
path = path.split(";")[0]
|
|
|
|
local = replace.split("://")[1] + os.path.basename(path)
|
|
|
|
if local == aurl or not os.path.exists(local) or os.path.isdir(local):
|
|
|
|
local = ""
|
|
|
|
self.localpath = local
|
|
|
|
if not local:
|
|
|
|
try:
|
|
|
|
bb.fetch.srcrev_internal_call = True
|
|
|
|
self.localpath = self.method.localpath(self.url, self, d)
|
|
|
|
finally:
|
|
|
|
bb.fetch.srcrev_internal_call = False
|
|
|
|
# We have to clear data's internal caches since the cached value of SRCREV is now wrong.
|
|
|
|
# Horrible...
|
|
|
|
bb.data.delVar("ISHOULDNEVEREXIST", d)
|
2010-02-02 17:57:20 +00:00
|
|
|
|
2010-08-13 10:18:14 +00:00
|
|
|
if self.localpath is not None:
|
|
|
|
# Note: These files should always be in DL_DIR whereas localpath may not be.
|
|
|
|
basepath = bb.data.expand("${DL_DIR}/%s" % os.path.basename(self.localpath), d)
|
|
|
|
self.md5 = basepath + '.md5'
|
|
|
|
self.lockfile = basepath + '.lock'
|
2006-11-16 15:02:15 +00:00
|
|
|
|
|
|
|
|
2006-02-10 10:13:42 +00:00
|
|
|
class Fetch(object):
|
|
|
|
"""Base class for 'fetch'ing data"""
|
|
|
|
|
|
|
|
def __init__(self, urls = []):
|
|
|
|
self.urls = []
|
|
|
|
|
2006-11-16 15:02:15 +00:00
|
|
|
def supports(self, url, urldata, d):
|
|
|
|
"""
|
|
|
|
Check to see if this fetch class supports a given url.
|
2006-02-10 10:13:42 +00:00
|
|
|
"""
|
|
|
|
return 0
|
|
|
|
|
2006-11-16 15:02:15 +00:00
|
|
|
def localpath(self, url, urldata, d):
|
|
|
|
"""
|
|
|
|
Return the local filename of a given url assuming a successful fetch.
|
2010-03-24 23:56:12 +00:00
|
|
|
Can also setup variables in urldata for use in go (saving code duplication
|
2006-11-16 15:02:15 +00:00
|
|
|
and duplicate code execution)
|
2006-02-10 10:13:42 +00:00
|
|
|
"""
|
|
|
|
return url
|
|
|
|
|
|
|
|
def setUrls(self, urls):
|
|
|
|
self.__urls = urls
|
|
|
|
|
|
|
|
def getUrls(self):
|
|
|
|
return self.__urls
|
|
|
|
|
|
|
|
urls = property(getUrls, setUrls, None, "Urls property")
|
|
|
|
|
2006-11-16 15:02:15 +00:00
|
|
|
def forcefetch(self, url, urldata, d):
|
|
|
|
"""
|
|
|
|
Force a fetch, even if localpath exists?
|
|
|
|
"""
|
|
|
|
return False
|
2006-02-10 10:13:42 +00:00
|
|
|
|
2007-08-03 13:40:52 +00:00
|
|
|
def suppports_srcrev(self):
|
|
|
|
"""
|
|
|
|
The fetcher supports auto source revisions (SRCREV)
|
|
|
|
"""
|
|
|
|
return False
|
|
|
|
|
2006-11-16 15:02:15 +00:00
|
|
|
def go(self, url, urldata, d):
|
|
|
|
"""
|
|
|
|
Fetch urls
|
|
|
|
Assumes localpath was called first
|
|
|
|
"""
|
2006-02-10 10:13:42 +00:00
|
|
|
raise NoMethodError("Missing implementation for url")
|
|
|
|
|
2010-09-08 14:04:17 +00:00
|
|
|
def try_premirror(self, url, urldata, d):
|
|
|
|
"""
|
|
|
|
Should premirrors be used?
|
|
|
|
"""
|
|
|
|
if urldata.method.forcefetch(url, urldata, d):
|
|
|
|
return True
|
|
|
|
elif os.path.exists(urldata.md5) and os.path.exists(urldata.localfile):
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
return True
|
|
|
|
|
2008-04-27 11:22:54 +00:00
|
|
|
def checkstatus(self, url, urldata, d):
|
|
|
|
"""
|
|
|
|
Check the status of a URL
|
|
|
|
Assumes localpath was called first
|
|
|
|
"""
|
|
|
|
bb.msg.note(1, bb.msg.domain.Fetcher, "URL %s could not be checked for status since no method exists." % url)
|
|
|
|
return True
|
|
|
|
|
2006-12-06 16:29:47 +00:00
|
|
|
def getSRCDate(urldata, d):
|
2006-02-10 10:13:42 +00:00
|
|
|
"""
|
|
|
|
Return the SRC Date for the component
|
|
|
|
|
|
|
|
d the bb.data module
|
|
|
|
"""
|
2006-12-06 16:29:47 +00:00
|
|
|
if "srcdate" in urldata.parm:
|
|
|
|
return urldata.parm['srcdate']
|
|
|
|
|
2006-11-16 15:02:15 +00:00
|
|
|
pn = data.getVar("PN", d, 1)
|
|
|
|
|
|
|
|
if pn:
|
2007-08-20 07:48:43 +00:00
|
|
|
return data.getVar("SRCDATE_%s" % pn, d, 1) or data.getVar("CVSDATE_%s" % pn, d, 1) or data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1)
|
2006-11-16 15:02:15 +00:00
|
|
|
|
|
|
|
return data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1)
|
2006-02-10 10:13:42 +00:00
|
|
|
getSRCDate = staticmethod(getSRCDate)
|
|
|
|
|
2007-11-13 23:03:21 +00:00
|
|
|
def srcrev_internal_helper(ud, d):
|
|
|
|
"""
|
|
|
|
Return:
|
|
|
|
a) a source revision if specified
|
2010-03-24 23:56:12 +00:00
|
|
|
b) True if auto srcrev is in action
|
|
|
|
c) False otherwise
|
2007-11-13 23:03:21 +00:00
|
|
|
"""
|
|
|
|
|
|
|
|
if 'rev' in ud.parm:
|
|
|
|
return ud.parm['rev']
|
|
|
|
|
|
|
|
if 'tag' in ud.parm:
|
|
|
|
return ud.parm['tag']
|
|
|
|
|
|
|
|
rev = None
|
|
|
|
if 'name' in ud.parm:
|
|
|
|
pn = data.getVar("PN", d, 1)
|
2010-09-09 14:09:50 +00:00
|
|
|
rev = data.getVar("SRCREV_%s_pn-%s" % (ud.parm['name'], pn), d, 1)
|
|
|
|
if not rev:
|
|
|
|
rev = data.getVar("SRCREV_pn-%s_%s" % (pn, ud.parm['name']), d, 1)
|
2010-10-09 16:24:16 +00:00
|
|
|
if not rev:
|
|
|
|
rev = data.getVar("SRCREV_%s" % (ud.parm['name']), d, 1)
|
2007-11-13 23:03:21 +00:00
|
|
|
if not rev:
|
|
|
|
rev = data.getVar("SRCREV", d, 1)
|
2008-07-22 09:13:51 +00:00
|
|
|
if rev == "INVALID":
|
|
|
|
raise InvalidSRCREV("Please set SRCREV to a valid value")
|
2007-11-13 23:03:21 +00:00
|
|
|
if not rev:
|
|
|
|
return False
|
|
|
|
if rev is "SRCREVINACTION":
|
|
|
|
return True
|
|
|
|
return rev
|
|
|
|
|
|
|
|
srcrev_internal_helper = staticmethod(srcrev_internal_helper)
|
|
|
|
|
2010-01-20 18:46:02 +00:00
|
|
|
def localcount_internal_helper(ud, d):
|
|
|
|
"""
|
|
|
|
Return:
|
|
|
|
a) a locked localcount if specified
|
|
|
|
b) None otherwise
|
|
|
|
"""
|
|
|
|
|
2010-03-24 23:56:12 +00:00
|
|
|
localcount = None
|
2010-01-20 18:46:02 +00:00
|
|
|
if 'name' in ud.parm:
|
|
|
|
pn = data.getVar("PN", d, 1)
|
|
|
|
localcount = data.getVar("LOCALCOUNT_" + ud.parm['name'], d, 1)
|
|
|
|
if not localcount:
|
|
|
|
localcount = data.getVar("LOCALCOUNT", d, 1)
|
|
|
|
return localcount
|
|
|
|
|
|
|
|
localcount_internal_helper = staticmethod(localcount_internal_helper)
|
|
|
|
|
2006-11-16 15:02:15 +00:00
|
|
|
def verify_md5sum(ud, got_sum):
|
2006-06-10 08:56:40 +00:00
|
|
|
"""
|
2006-11-16 15:02:15 +00:00
|
|
|
Verify the md5sum we wanted with the one we got
|
2006-06-10 08:56:40 +00:00
|
|
|
"""
|
2006-11-16 15:02:15 +00:00
|
|
|
wanted_sum = None
|
|
|
|
if 'md5sum' in ud.parm:
|
|
|
|
wanted_sum = ud.parm['md5sum']
|
|
|
|
if not wanted_sum:
|
|
|
|
return True
|
|
|
|
|
|
|
|
return wanted_sum == got_sum
|
|
|
|
verify_md5sum = staticmethod(verify_md5sum)
|
|
|
|
|
|
|
|
def write_md5sum(url, ud, d):
|
2008-05-01 10:59:24 +00:00
|
|
|
md5data = bb.utils.md5_file(ud.localpath)
|
2006-11-16 15:02:15 +00:00
|
|
|
# verify the md5sum
|
|
|
|
if not Fetch.verify_md5sum(ud, md5data):
|
|
|
|
raise MD5SumError(url)
|
|
|
|
|
|
|
|
md5out = file(ud.md5, 'w')
|
|
|
|
md5out.write(md5data)
|
|
|
|
md5out.close()
|
|
|
|
write_md5sum = staticmethod(write_md5sum)
|
2006-06-10 08:56:40 +00:00
|
|
|
|
2007-08-03 13:40:52 +00:00
|
|
|
def latest_revision(self, url, ud, d):
|
|
|
|
"""
|
|
|
|
Look in the cache for the latest revision, if not present ask the SCM.
|
|
|
|
"""
|
|
|
|
if not hasattr(self, "_latest_revision"):
|
|
|
|
raise ParameterError
|
|
|
|
|
2010-06-04 12:04:41 +00:00
|
|
|
pd = persist_data.PersistData(d, persistent_database_connection)
|
2009-06-10 10:21:43 +00:00
|
|
|
key = self.generate_revision_key(url, ud, d)
|
2007-08-03 13:40:52 +00:00
|
|
|
rev = pd.getValue("BB_URI_HEADREVS", key)
|
|
|
|
if rev != None:
|
|
|
|
return str(rev)
|
|
|
|
|
|
|
|
rev = self._latest_revision(url, ud, d)
|
|
|
|
pd.setValue("BB_URI_HEADREVS", key, rev)
|
|
|
|
return rev
|
|
|
|
|
|
|
|
def sortable_revision(self, url, ud, d):
|
|
|
|
"""
|
2010-03-24 23:56:12 +00:00
|
|
|
|
2007-08-03 13:40:52 +00:00
|
|
|
"""
|
2010-01-20 18:46:02 +00:00
|
|
|
if hasattr(self, "_sortable_revision"):
|
2007-08-03 13:40:52 +00:00
|
|
|
return self._sortable_revision(url, ud, d)
|
|
|
|
|
2010-06-04 12:04:41 +00:00
|
|
|
pd = persist_data.PersistData(d, persistent_database_connection)
|
2009-06-10 10:21:43 +00:00
|
|
|
key = self.generate_revision_key(url, ud, d)
|
|
|
|
|
2007-11-13 23:03:21 +00:00
|
|
|
latest_rev = self._build_revision(url, ud, d)
|
2007-08-03 13:40:52 +00:00
|
|
|
last_rev = pd.getValue("BB_URI_LOCALCOUNT", key + "_rev")
|
2010-01-20 18:46:02 +00:00
|
|
|
uselocalcount = bb.data.getVar("BB_LOCALCOUNT_OVERRIDE", d, True) or False
|
|
|
|
count = None
|
|
|
|
if uselocalcount:
|
|
|
|
count = Fetch.localcount_internal_helper(ud, d)
|
|
|
|
if count is None:
|
|
|
|
count = pd.getValue("BB_URI_LOCALCOUNT", key + "_count")
|
2007-08-03 13:40:52 +00:00
|
|
|
|
|
|
|
if last_rev == latest_rev:
|
|
|
|
return str(count + "+" + latest_rev)
|
|
|
|
|
2010-01-20 18:46:02 +00:00
|
|
|
buildindex_provided = hasattr(self, "_sortable_buildindex")
|
|
|
|
if buildindex_provided:
|
|
|
|
count = self._sortable_buildindex(url, ud, d, latest_rev)
|
|
|
|
|
2007-08-03 13:40:52 +00:00
|
|
|
if count is None:
|
|
|
|
count = "0"
|
2010-01-20 18:46:02 +00:00
|
|
|
elif uselocalcount or buildindex_provided:
|
|
|
|
count = str(count)
|
2007-08-03 13:40:52 +00:00
|
|
|
else:
|
|
|
|
count = str(int(count) + 1)
|
|
|
|
|
|
|
|
pd.setValue("BB_URI_LOCALCOUNT", key + "_rev", latest_rev)
|
|
|
|
pd.setValue("BB_URI_LOCALCOUNT", key + "_count", count)
|
|
|
|
|
|
|
|
return str(count + "+" + latest_rev)
|
|
|
|
|
2009-06-10 10:21:43 +00:00
|
|
|
def generate_revision_key(self, url, ud, d):
|
|
|
|
key = self._revision_key(url, ud, d)
|
|
|
|
return "%s-%s" % (key, bb.data.getVar("PN", d, True) or "")
|
2007-08-03 13:40:52 +00:00
|
|
|
|
2010-06-20 19:07:34 +00:00
|
|
|
from . import cvs
|
|
|
|
from . import git
|
|
|
|
from . import local
|
|
|
|
from . import svn
|
|
|
|
from . import wget
|
|
|
|
from . import svk
|
|
|
|
from . import ssh
|
|
|
|
from . import perforce
|
|
|
|
from . import bzr
|
|
|
|
from . import hg
|
|
|
|
from . import osc
|
|
|
|
from . import repo
|
2006-02-10 10:13:42 +00:00
|
|
|
|
|
|
|
methods.append(local.Local())
|
|
|
|
methods.append(wget.Wget())
|
2007-08-03 13:40:52 +00:00
|
|
|
methods.append(svn.Svn())
|
|
|
|
methods.append(git.Git())
|
|
|
|
methods.append(cvs.Cvs())
|
2006-03-20 17:45:11 +00:00
|
|
|
methods.append(svk.Svk())
|
2006-07-21 08:20:48 +00:00
|
|
|
methods.append(ssh.SSH())
|
2006-11-16 15:02:15 +00:00
|
|
|
methods.append(perforce.Perforce())
|
2007-09-05 07:48:15 +00:00
|
|
|
methods.append(bzr.Bzr())
|
2007-10-11 21:41:34 +00:00
|
|
|
methods.append(hg.Hg())
|
2008-10-02 22:15:38 +00:00
|
|
|
methods.append(osc.Osc())
|
2010-02-24 14:19:24 +00:00
|
|
|
methods.append(repo.Repo())
|