summaryrefslogtreecommitdiff
path: root/poky/bitbake/lib/bb/fetch2
diff options
context:
space:
mode:
Diffstat (limited to 'poky/bitbake/lib/bb/fetch2')
-rw-r--r--poky/bitbake/lib/bb/fetch2/__init__.py40
-rw-r--r--poky/bitbake/lib/bb/fetch2/bzr.py8
-rw-r--r--poky/bitbake/lib/bb/fetch2/clearcase.py2
-rw-r--r--poky/bitbake/lib/bb/fetch2/cvs.py4
-rw-r--r--poky/bitbake/lib/bb/fetch2/git.py51
-rw-r--r--poky/bitbake/lib/bb/fetch2/gitsm.py4
-rw-r--r--poky/bitbake/lib/bb/fetch2/hg.py16
-rw-r--r--poky/bitbake/lib/bb/fetch2/local.py4
-rw-r--r--poky/bitbake/lib/bb/fetch2/osc.py6
-rw-r--r--poky/bitbake/lib/bb/fetch2/perforce.py13
-rw-r--r--poky/bitbake/lib/bb/fetch2/repo.py2
-rw-r--r--poky/bitbake/lib/bb/fetch2/svn.py6
-rw-r--r--poky/bitbake/lib/bb/fetch2/wget.py17
13 files changed, 111 insertions, 62 deletions
diff --git a/poky/bitbake/lib/bb/fetch2/__init__.py b/poky/bitbake/lib/bb/fetch2/__init__.py
index 07b7ae41b..19169d780 100644
--- a/poky/bitbake/lib/bb/fetch2/__init__.py
+++ b/poky/bitbake/lib/bb/fetch2/__init__.py
@@ -290,7 +290,7 @@ class URI(object):
def _param_str_split(self, string, elmdelim, kvdelim="="):
ret = collections.OrderedDict()
- for k, v in [x.split(kvdelim, 1) for x in string.split(elmdelim)]:
+ for k, v in [x.split(kvdelim, 1) for x in string.split(elmdelim) if x]:
ret[k] = v
return ret
@@ -428,7 +428,7 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
uri_decoded = list(decodeurl(ud.url))
uri_find_decoded = list(decodeurl(uri_find))
uri_replace_decoded = list(decodeurl(uri_replace))
- logger.debug(2, "For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded))
+ logger.debug2("For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded))
result_decoded = ['', '', '', '', '', {}]
for loc, i in enumerate(uri_find_decoded):
result_decoded[loc] = uri_decoded[loc]
@@ -474,7 +474,7 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
result = encodeurl(result_decoded)
if result == ud.url:
return None
- logger.debug(2, "For url %s returning %s" % (ud.url, result))
+ logger.debug2("For url %s returning %s" % (ud.url, result))
return result
methods = []
@@ -499,9 +499,9 @@ def fetcher_init(d):
# When to drop SCM head revisions controlled by user policy
srcrev_policy = d.getVar('BB_SRCREV_POLICY') or "clear"
if srcrev_policy == "cache":
- logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
+ logger.debug("Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
elif srcrev_policy == "clear":
- logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
+ logger.debug("Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
revs.clear()
else:
raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
@@ -857,9 +857,9 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None):
cmd = 'export PSEUDO_DISABLED=1; ' + cmd
if workdir:
- logger.debug(1, "Running '%s' in %s" % (cmd, workdir))
+ logger.debug("Running '%s' in %s" % (cmd, workdir))
else:
- logger.debug(1, "Running %s", cmd)
+ logger.debug("Running %s", cmd)
success = False
error_message = ""
@@ -900,7 +900,7 @@ def check_network_access(d, info, url):
elif not trusted_network(d, url):
raise UntrustedUrl(url, info)
else:
- logger.debug(1, "Fetcher accessed the network with the command %s" % info)
+ logger.debug("Fetcher accessed the network with the command %s" % info)
def build_mirroruris(origud, mirrors, ld):
uris = []
@@ -926,7 +926,7 @@ def build_mirroruris(origud, mirrors, ld):
continue
if not trusted_network(ld, newuri):
- logger.debug(1, "Mirror %s not in the list of trusted networks, skipping" % (newuri))
+ logger.debug("Mirror %s not in the list of trusted networks, skipping" % (newuri))
continue
# Create a local copy of the mirrors minus the current line
@@ -939,8 +939,8 @@ def build_mirroruris(origud, mirrors, ld):
newud = FetchData(newuri, ld)
newud.setup_localpath(ld)
except bb.fetch2.BBFetchException as e:
- logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
- logger.debug(1, str(e))
+ logger.debug("Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
+ logger.debug(str(e))
try:
# setup_localpath of file:// urls may fail, we should still see
# if mirrors of the url exist
@@ -1043,8 +1043,8 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
elif isinstance(e, NoChecksumError):
raise
else:
- logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url))
- logger.debug(1, str(e))
+ logger.debug("Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url))
+ logger.debug(str(e))
try:
ud.method.clean(ud, ld)
except UnboundLocalError:
@@ -1688,7 +1688,7 @@ class Fetch(object):
if m.verify_donestamp(ud, self.d) and not m.need_update(ud, self.d):
done = True
elif m.try_premirror(ud, self.d):
- logger.debug(1, "Trying PREMIRRORS")
+ logger.debug("Trying PREMIRRORS")
mirrors = mirror_from_string(self.d.getVar('PREMIRRORS'))
done = m.try_mirrors(self, ud, self.d, mirrors)
if done:
@@ -1698,7 +1698,7 @@ class Fetch(object):
m.update_donestamp(ud, self.d)
except ChecksumError as e:
logger.warning("Checksum failure encountered with premirror download of %s - will attempt other sources." % u)
- logger.debug(1, str(e))
+ logger.debug(str(e))
done = False
if premirroronly:
@@ -1710,7 +1710,7 @@ class Fetch(object):
try:
if not trusted_network(self.d, ud.url):
raise UntrustedUrl(ud.url)
- logger.debug(1, "Trying Upstream")
+ logger.debug("Trying Upstream")
m.download(ud, self.d)
if hasattr(m, "build_mirror_data"):
m.build_mirror_data(ud, self.d)
@@ -1725,19 +1725,19 @@ class Fetch(object):
except BBFetchException as e:
if isinstance(e, ChecksumError):
logger.warning("Checksum failure encountered with download of %s - will attempt other sources if available" % u)
- logger.debug(1, str(e))
+ logger.debug(str(e))
if os.path.exists(ud.localpath):
rename_bad_checksum(ud, e.checksum)
elif isinstance(e, NoChecksumError):
raise
else:
logger.warning('Failed to fetch URL %s, attempting MIRRORS if available' % u)
- logger.debug(1, str(e))
+ logger.debug(str(e))
firsterr = e
# Remove any incomplete fetch
if not verified_stamp:
m.clean(ud, self.d)
- logger.debug(1, "Trying MIRRORS")
+ logger.debug("Trying MIRRORS")
mirrors = mirror_from_string(self.d.getVar('MIRRORS'))
done = m.try_mirrors(self, ud, self.d, mirrors)
@@ -1774,7 +1774,7 @@ class Fetch(object):
ud = self.ud[u]
ud.setup_localpath(self.d)
m = ud.method
- logger.debug(1, "Testing URL %s", u)
+ logger.debug("Testing URL %s", u)
# First try checking uri, u, from PREMIRRORS
mirrors = mirror_from_string(self.d.getVar('PREMIRRORS'))
ret = m.try_mirrors(self, ud, self.d, mirrors, True)
diff --git a/poky/bitbake/lib/bb/fetch2/bzr.py b/poky/bitbake/lib/bb/fetch2/bzr.py
index 566ace9f0..fc558f50b 100644
--- a/poky/bitbake/lib/bb/fetch2/bzr.py
+++ b/poky/bitbake/lib/bb/fetch2/bzr.py
@@ -74,16 +74,16 @@ class Bzr(FetchMethod):
if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK):
bzrcmd = self._buildbzrcommand(ud, d, "update")
- logger.debug(1, "BZR Update %s", ud.url)
+ logger.debug("BZR Update %s", ud.url)
bb.fetch2.check_network_access(d, bzrcmd, ud.url)
runfetchcmd(bzrcmd, d, workdir=os.path.join(ud.pkgdir, os.path.basename(ud.path)))
else:
bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True)
bzrcmd = self._buildbzrcommand(ud, d, "fetch")
bb.fetch2.check_network_access(d, bzrcmd, ud.url)
- logger.debug(1, "BZR Checkout %s", ud.url)
+ logger.debug("BZR Checkout %s", ud.url)
bb.utils.mkdirhier(ud.pkgdir)
- logger.debug(1, "Running %s", bzrcmd)
+ logger.debug("Running %s", bzrcmd)
runfetchcmd(bzrcmd, d, workdir=ud.pkgdir)
scmdata = ud.parm.get("scmdata", "")
@@ -109,7 +109,7 @@ class Bzr(FetchMethod):
"""
Return the latest upstream revision number
"""
- logger.debug(2, "BZR fetcher hitting network for %s", ud.url)
+ logger.debug2("BZR fetcher hitting network for %s", ud.url)
bb.fetch2.check_network_access(d, self._buildbzrcommand(ud, d, "revno"), ud.url)
diff --git a/poky/bitbake/lib/bb/fetch2/clearcase.py b/poky/bitbake/lib/bb/fetch2/clearcase.py
index 49d7ae1b0..1a9c86376 100644
--- a/poky/bitbake/lib/bb/fetch2/clearcase.py
+++ b/poky/bitbake/lib/bb/fetch2/clearcase.py
@@ -70,7 +70,7 @@ class ClearCase(FetchMethod):
return ud.type in ['ccrc']
def debug(self, msg):
- logger.debug(1, "ClearCase: %s", msg)
+ logger.debug("ClearCase: %s", msg)
def urldata_init(self, ud, d):
"""
diff --git a/poky/bitbake/lib/bb/fetch2/cvs.py b/poky/bitbake/lib/bb/fetch2/cvs.py
index 22abdef79..01de5ff4c 100644
--- a/poky/bitbake/lib/bb/fetch2/cvs.py
+++ b/poky/bitbake/lib/bb/fetch2/cvs.py
@@ -109,7 +109,7 @@ class Cvs(FetchMethod):
cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd)
# create module directory
- logger.debug(2, "Fetch: checking for module directory")
+ logger.debug2("Fetch: checking for module directory")
moddir = os.path.join(ud.pkgdir, localdir)
workdir = None
if os.access(os.path.join(moddir, 'CVS'), os.R_OK):
@@ -123,7 +123,7 @@ class Cvs(FetchMethod):
# check out sources there
bb.utils.mkdirhier(ud.pkgdir)
workdir = ud.pkgdir
- logger.debug(1, "Running %s", cvscmd)
+ logger.debug("Running %s", cvscmd)
bb.fetch2.check_network_access(d, cvscmd, ud.url)
cmd = cvscmd
diff --git a/poky/bitbake/lib/bb/fetch2/git.py b/poky/bitbake/lib/bb/fetch2/git.py
index 490d57fbb..e3ba80a3f 100644
--- a/poky/bitbake/lib/bb/fetch2/git.py
+++ b/poky/bitbake/lib/bb/fetch2/git.py
@@ -220,7 +220,12 @@ class Git(FetchMethod):
ud.shallow = False
if ud.usehead:
- ud.unresolvedrev['default'] = 'HEAD'
+ # When usehead is set let's associate 'HEAD' with the unresolved
+ # rev of this repository. This will get resolved into a revision
+ # later. If an actual revision happens to have also been provided
+ # then this setting will be overridden.
+ for name in ud.names:
+ ud.unresolvedrev[name] = 'HEAD'
ud.basecmd = d.getVar("FETCHCMD_git") or "git -c core.fsyncobjectfiles=0"
@@ -379,6 +384,35 @@ class Git(FetchMethod):
if missing_rev:
raise bb.fetch2.FetchError("Unable to find revision %s even from upstream" % missing_rev)
+ if self._contains_lfs(ud, d, ud.clonedir) and self._need_lfs(ud):
+ # Unpack temporary working copy, use it to run 'git checkout' to force pre-fetching
+ # of all LFS blobs needed at the the srcrev.
+ #
+ # It would be nice to just do this inline here by running 'git-lfs fetch'
+ # on the bare clonedir, but that operation requires a working copy on some
+ # releases of Git LFS.
+ tmpdir = tempfile.mkdtemp(dir=d.getVar('DL_DIR'))
+ try:
+ # Do the checkout. This implicitly involves a Git LFS fetch.
+ self.unpack(ud, tmpdir, d)
+
+ # Scoop up a copy of any stuff that Git LFS downloaded. Merge them into
+ # the bare clonedir.
+ #
+ # As this procedure is invoked repeatedly on incremental fetches as
+ # a recipe's SRCREV is bumped throughout its lifetime, this will
+ # result in a gradual accumulation of LFS blobs in <ud.clonedir>/lfs
+ # corresponding to all the blobs reachable from the different revs
+ # fetched across time.
+ #
+ # Only do this if the unpack resulted in a .git/lfs directory being
+ # created; this only happens if at least one blob needed to be
+ # downloaded.
+ if os.path.exists(os.path.join(tmpdir, "git", ".git", "lfs")):
+ runfetchcmd("tar -cf - lfs | tar -xf - -C %s" % ud.clonedir, d, workdir="%s/git/.git" % tmpdir)
+ finally:
+ bb.utils.remove(tmpdir, recurse=True)
+
def build_mirror_data(self, ud, d):
if ud.shallow and ud.write_shallow_tarballs:
if not os.path.exists(ud.fullshallow):
@@ -474,7 +508,7 @@ class Git(FetchMethod):
if os.path.exists(destdir):
bb.utils.prunedir(destdir)
- need_lfs = ud.parm.get("lfs", "1") == "1"
+ need_lfs = self._need_lfs(ud)
if not need_lfs:
ud.basecmd = "GIT_LFS_SKIP_SMUDGE=1 " + ud.basecmd
@@ -563,6 +597,9 @@ class Git(FetchMethod):
raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output))
return output.split()[0] != "0"
+ def _need_lfs(self, ud):
+ return ud.parm.get("lfs", "1") == "1"
+
def _contains_lfs(self, ud, d, wd):
"""
Check if the repository has 'lfs' (large file) content
@@ -573,8 +610,14 @@ class Git(FetchMethod):
else:
branchname = "master"
- cmd = "%s grep lfs origin/%s:.gitattributes | wc -l" % (
- ud.basecmd, ud.branches[ud.names[0]])
+ # The bare clonedir doesn't use the remote names; it has the branch immediately.
+ if wd == ud.clonedir:
+ refname = ud.branches[ud.names[0]]
+ else:
+ refname = "origin/%s" % ud.branches[ud.names[0]]
+
+ cmd = "%s grep lfs %s:.gitattributes | wc -l" % (
+ ud.basecmd, refname)
try:
output = runfetchcmd(cmd, d, quiet=True, workdir=wd)
diff --git a/poky/bitbake/lib/bb/fetch2/gitsm.py b/poky/bitbake/lib/bb/fetch2/gitsm.py
index d6e5c5c05..a4527bf36 100644
--- a/poky/bitbake/lib/bb/fetch2/gitsm.py
+++ b/poky/bitbake/lib/bb/fetch2/gitsm.py
@@ -78,7 +78,7 @@ class GitSM(Git):
module_hash = ""
if not module_hash:
- logger.debug(1, "submodule %s is defined, but is not initialized in the repository. Skipping", m)
+ logger.debug("submodule %s is defined, but is not initialized in the repository. Skipping", m)
continue
submodules.append(m)
@@ -179,7 +179,7 @@ class GitSM(Git):
(ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=ud.clonedir)
if len(need_update_list) > 0:
- logger.debug(1, 'gitsm: Submodules requiring update: %s' % (' '.join(need_update_list)))
+ logger.debug('gitsm: Submodules requiring update: %s' % (' '.join(need_update_list)))
return True
return False
diff --git a/poky/bitbake/lib/bb/fetch2/hg.py b/poky/bitbake/lib/bb/fetch2/hg.py
index 8f503701e..063e13008 100644
--- a/poky/bitbake/lib/bb/fetch2/hg.py
+++ b/poky/bitbake/lib/bb/fetch2/hg.py
@@ -150,7 +150,7 @@ class Hg(FetchMethod):
def download(self, ud, d):
"""Fetch url"""
- logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
+ logger.debug2("Fetch: checking for module directory '" + ud.moddir + "'")
# If the checkout doesn't exist and the mirror tarball does, extract it
if not os.path.exists(ud.pkgdir) and os.path.exists(ud.fullmirror):
@@ -160,7 +160,7 @@ class Hg(FetchMethod):
if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK):
# Found the source, check whether need pull
updatecmd = self._buildhgcommand(ud, d, "update")
- logger.debug(1, "Running %s", updatecmd)
+ logger.debug("Running %s", updatecmd)
try:
runfetchcmd(updatecmd, d, workdir=ud.moddir)
except bb.fetch2.FetchError:
@@ -168,7 +168,7 @@ class Hg(FetchMethod):
pullcmd = self._buildhgcommand(ud, d, "pull")
logger.info("Pulling " + ud.url)
# update sources there
- logger.debug(1, "Running %s", pullcmd)
+ logger.debug("Running %s", pullcmd)
bb.fetch2.check_network_access(d, pullcmd, ud.url)
runfetchcmd(pullcmd, d, workdir=ud.moddir)
try:
@@ -183,14 +183,14 @@ class Hg(FetchMethod):
logger.info("Fetch " + ud.url)
# check out sources there
bb.utils.mkdirhier(ud.pkgdir)
- logger.debug(1, "Running %s", fetchcmd)
+ logger.debug("Running %s", fetchcmd)
bb.fetch2.check_network_access(d, fetchcmd, ud.url)
runfetchcmd(fetchcmd, d, workdir=ud.pkgdir)
# Even when we clone (fetch), we still need to update as hg's clone
# won't checkout the specified revision if its on a branch
updatecmd = self._buildhgcommand(ud, d, "update")
- logger.debug(1, "Running %s", updatecmd)
+ logger.debug("Running %s", updatecmd)
runfetchcmd(updatecmd, d, workdir=ud.moddir)
def clean(self, ud, d):
@@ -247,9 +247,9 @@ class Hg(FetchMethod):
if scmdata != "nokeep":
proto = ud.parm.get('protocol', 'http')
if not os.access(os.path.join(codir, '.hg'), os.R_OK):
- logger.debug(2, "Unpack: creating new hg repository in '" + codir + "'")
+ logger.debug2("Unpack: creating new hg repository in '" + codir + "'")
runfetchcmd("%s init %s" % (ud.basecmd, codir), d)
- logger.debug(2, "Unpack: updating source in '" + codir + "'")
+ logger.debug2("Unpack: updating source in '" + codir + "'")
if ud.user and ud.pswd:
runfetchcmd("%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" pull %s" % (ud.basecmd, ud.user, ud.pswd, proto, ud.moddir), d, workdir=codir)
else:
@@ -259,5 +259,5 @@ class Hg(FetchMethod):
else:
runfetchcmd("%s up -C %s" % (ud.basecmd, revflag), d, workdir=codir)
else:
- logger.debug(2, "Unpack: extracting source to '" + codir + "'")
+ logger.debug2("Unpack: extracting source to '" + codir + "'")
runfetchcmd("%s archive -t files %s %s" % (ud.basecmd, revflag, codir), d, workdir=ud.moddir)
diff --git a/poky/bitbake/lib/bb/fetch2/local.py b/poky/bitbake/lib/bb/fetch2/local.py
index 25d4557db..e7d1c8c58 100644
--- a/poky/bitbake/lib/bb/fetch2/local.py
+++ b/poky/bitbake/lib/bb/fetch2/local.py
@@ -54,12 +54,12 @@ class Local(FetchMethod):
return [path]
filespath = d.getVar('FILESPATH')
if filespath:
- logger.debug(2, "Searching for %s in paths:\n %s" % (path, "\n ".join(filespath.split(":"))))
+ logger.debug2("Searching for %s in paths:\n %s" % (path, "\n ".join(filespath.split(":"))))
newpath, hist = bb.utils.which(filespath, path, history=True)
searched.extend(hist)
if not os.path.exists(newpath):
dldirfile = os.path.join(d.getVar("DL_DIR"), path)
- logger.debug(2, "Defaulting to %s for %s" % (dldirfile, path))
+ logger.debug2("Defaulting to %s for %s" % (dldirfile, path))
bb.utils.mkdirhier(os.path.dirname(dldirfile))
searched.append(dldirfile)
return searched
diff --git a/poky/bitbake/lib/bb/fetch2/osc.py b/poky/bitbake/lib/bb/fetch2/osc.py
index 3a6cd2951..d9ce44390 100644
--- a/poky/bitbake/lib/bb/fetch2/osc.py
+++ b/poky/bitbake/lib/bb/fetch2/osc.py
@@ -84,13 +84,13 @@ class Osc(FetchMethod):
Fetch url
"""
- logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
+ logger.debug2("Fetch: checking for module directory '" + ud.moddir + "'")
if os.access(os.path.join(d.getVar('OSCDIR'), ud.path, ud.module), os.R_OK):
oscupdatecmd = self._buildosccommand(ud, d, "update")
logger.info("Update "+ ud.url)
# update sources there
- logger.debug(1, "Running %s", oscupdatecmd)
+ logger.debug("Running %s", oscupdatecmd)
bb.fetch2.check_network_access(d, oscupdatecmd, ud.url)
runfetchcmd(oscupdatecmd, d, workdir=ud.moddir)
else:
@@ -98,7 +98,7 @@ class Osc(FetchMethod):
logger.info("Fetch " + ud.url)
# check out sources there
bb.utils.mkdirhier(ud.pkgdir)
- logger.debug(1, "Running %s", oscfetchcmd)
+ logger.debug("Running %s", oscfetchcmd)
bb.fetch2.check_network_access(d, oscfetchcmd, ud.url)
runfetchcmd(oscfetchcmd, d, workdir=ud.pkgdir)
diff --git a/poky/bitbake/lib/bb/fetch2/perforce.py b/poky/bitbake/lib/bb/fetch2/perforce.py
index 6f3c95b6c..e2a41a4a1 100644
--- a/poky/bitbake/lib/bb/fetch2/perforce.py
+++ b/poky/bitbake/lib/bb/fetch2/perforce.py
@@ -90,16 +90,16 @@ class Perforce(FetchMethod):
p4port = d.getVar('P4PORT')
if p4port:
- logger.debug(1, 'Using recipe provided P4PORT: %s' % p4port)
+ logger.debug('Using recipe provided P4PORT: %s' % p4port)
ud.host = p4port
else:
- logger.debug(1, 'Trying to use P4CONFIG to automatically set P4PORT...')
+ logger.debug('Trying to use P4CONFIG to automatically set P4PORT...')
ud.usingp4config = True
p4cmd = '%s info | grep "Server address"' % ud.basecmd
bb.fetch2.check_network_access(d, p4cmd, ud.url)
ud.host = runfetchcmd(p4cmd, d, True)
ud.host = ud.host.split(': ')[1].strip()
- logger.debug(1, 'Determined P4PORT to be: %s' % ud.host)
+ logger.debug('Determined P4PORT to be: %s' % ud.host)
if not ud.host:
raise FetchError('Could not determine P4PORT from P4CONFIG')
@@ -119,6 +119,7 @@ class Perforce(FetchMethod):
cleanedpath = ud.path.replace('/...', '').replace('/', '.')
cleanedhost = ud.host.replace(':', '.')
+ cleanedmodule = ""
# Merge the path and module into the final depot location
if ud.module:
if ud.module.find('/') == 0:
@@ -133,7 +134,7 @@ class Perforce(FetchMethod):
ud.setup_revisions(d)
- ud.localfile = d.expand('%s_%s_%s.tar.gz' % (cleanedhost, cleanedpath, ud.revision))
+ ud.localfile = d.expand('%s_%s_%s_%s.tar.gz' % (cleanedhost, cleanedpath, cleandedmodule, ud.revision))
def _buildp4command(self, ud, d, command, depot_filename=None):
"""
@@ -207,7 +208,7 @@ class Perforce(FetchMethod):
for filename in p4fileslist:
item = filename.split(' - ')
lastaction = item[1].split()
- logger.debug(1, 'File: %s Last Action: %s' % (item[0], lastaction[0]))
+ logger.debug('File: %s Last Action: %s' % (item[0], lastaction[0]))
if lastaction[0] == 'delete':
continue
filelist.append(item[0])
@@ -254,7 +255,7 @@ class Perforce(FetchMethod):
raise FetchError('Could not determine the latest perforce changelist')
tipcset = tip.split(' ')[1]
- logger.debug(1, 'p4 tip found to be changelist %s' % tipcset)
+ logger.debug('p4 tip found to be changelist %s' % tipcset)
return tipcset
def sortable_revision(self, ud, d, name):
diff --git a/poky/bitbake/lib/bb/fetch2/repo.py b/poky/bitbake/lib/bb/fetch2/repo.py
index 2bdbbd409..fa4cb8149 100644
--- a/poky/bitbake/lib/bb/fetch2/repo.py
+++ b/poky/bitbake/lib/bb/fetch2/repo.py
@@ -47,7 +47,7 @@ class Repo(FetchMethod):
"""Fetch url"""
if os.access(os.path.join(d.getVar("DL_DIR"), ud.localfile), os.R_OK):
- logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath)
+ logger.debug("%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath)
return
repodir = d.getVar("REPODIR") or (d.getVar("DL_DIR") + "/repo")
diff --git a/poky/bitbake/lib/bb/fetch2/svn.py b/poky/bitbake/lib/bb/fetch2/svn.py
index 971a5add4..8856ef1c6 100644
--- a/poky/bitbake/lib/bb/fetch2/svn.py
+++ b/poky/bitbake/lib/bb/fetch2/svn.py
@@ -116,7 +116,7 @@ class Svn(FetchMethod):
def download(self, ud, d):
"""Fetch url"""
- logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
+ logger.debug2("Fetch: checking for module directory '" + ud.moddir + "'")
lf = bb.utils.lockfile(ud.svnlock)
@@ -129,7 +129,7 @@ class Svn(FetchMethod):
runfetchcmd(ud.basecmd + " upgrade", d, workdir=ud.moddir)
except FetchError:
pass
- logger.debug(1, "Running %s", svncmd)
+ logger.debug("Running %s", svncmd)
bb.fetch2.check_network_access(d, svncmd, ud.url)
runfetchcmd(svncmd, d, workdir=ud.moddir)
else:
@@ -137,7 +137,7 @@ class Svn(FetchMethod):
logger.info("Fetch " + ud.url)
# check out sources there
bb.utils.mkdirhier(ud.pkgdir)
- logger.debug(1, "Running %s", svncmd)
+ logger.debug("Running %s", svncmd)
bb.fetch2.check_network_access(d, svncmd, ud.url)
runfetchcmd(svncmd, d, workdir=ud.pkgdir)
diff --git a/poky/bitbake/lib/bb/fetch2/wget.py b/poky/bitbake/lib/bb/fetch2/wget.py
index e6d9f528d..78a49676f 100644
--- a/poky/bitbake/lib/bb/fetch2/wget.py
+++ b/poky/bitbake/lib/bb/fetch2/wget.py
@@ -52,6 +52,12 @@ class WgetProgressHandler(bb.progress.LineFilterProgressHandler):
class Wget(FetchMethod):
+
+ # CDNs like CloudFlare may do a 'browser integrity test' which can fail
+ # with the standard wget/urllib User-Agent, so pretend to be a modern
+ # browser.
+ user_agent = "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:84.0) Gecko/20100101 Firefox/84.0"
+
"""Class to fetch urls via 'wget'"""
def supports(self, ud, d):
"""
@@ -82,7 +88,7 @@ class Wget(FetchMethod):
progresshandler = WgetProgressHandler(d)
- logger.debug(2, "Fetching %s using command '%s'" % (ud.url, command))
+ logger.debug2("Fetching %s using command '%s'" % (ud.url, command))
bb.fetch2.check_network_access(d, command, ud.url)
runfetchcmd(command + ' --progress=dot -v', d, quiet, log=progresshandler, workdir=workdir)
@@ -297,7 +303,7 @@ class Wget(FetchMethod):
# Some servers (FusionForge, as used on Alioth) require that the
# optional Accept header is set.
r.add_header("Accept", "*/*")
- r.add_header("User-Agent", "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.12) Gecko/20101027 Ubuntu/9.10 (karmic) Firefox/3.6.12")
+ r.add_header("User-Agent", self.user_agent)
def add_basic_auth(login_str, request):
'''Adds Basic auth to http request, pass in login:password as string'''
import base64
@@ -320,11 +326,11 @@ class Wget(FetchMethod):
pass
except urllib.error.URLError as e:
if try_again:
- logger.debug(2, "checkstatus: trying again")
+ logger.debug2("checkstatus: trying again")
return self.checkstatus(fetch, ud, d, False)
else:
# debug for now to avoid spamming the logs in e.g. remote sstate searches
- logger.debug(2, "checkstatus() urlopen failed: %s" % e)
+ logger.debug2("checkstatus() urlopen failed: %s" % e)
return False
return True
@@ -401,9 +407,8 @@ class Wget(FetchMethod):
"""
f = tempfile.NamedTemporaryFile()
with tempfile.TemporaryDirectory(prefix="wget-index-") as workdir, tempfile.NamedTemporaryFile(dir=workdir, prefix="wget-listing-") as f:
- agent = "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.12) Gecko/20101027 Ubuntu/9.10 (karmic) Firefox/3.6.12"
fetchcmd = self.basecmd
- fetchcmd += " -O " + f.name + " --user-agent='" + agent + "' '" + uri + "'"
+ fetchcmd += " -O " + f.name + " --user-agent='" + self.user_agent + "' '" + uri + "'"
try:
self._runwget(ud, d, fetchcmd, True, workdir=workdir)
fetchresult = f.read()