summaryrefslogtreecommitdiff
path: root/poky/bitbake/lib/bb/fetch2
diff options
context:
space:
mode:
Diffstat (limited to 'poky/bitbake/lib/bb/fetch2')
-rw-r--r--poky/bitbake/lib/bb/fetch2/__init__.py28
-rw-r--r--poky/bitbake/lib/bb/fetch2/git.py22
-rw-r--r--poky/bitbake/lib/bb/fetch2/gitsm.py263
-rw-r--r--poky/bitbake/lib/bb/fetch2/hg.py2
-rw-r--r--poky/bitbake/lib/bb/fetch2/npm.py2
-rw-r--r--poky/bitbake/lib/bb/fetch2/wget.py77
6 files changed, 187 insertions, 207 deletions
diff --git a/poky/bitbake/lib/bb/fetch2/__init__.py b/poky/bitbake/lib/bb/fetch2/__init__.py
index 709372e16..8fecc809d 100644
--- a/poky/bitbake/lib/bb/fetch2/__init__.py
+++ b/poky/bitbake/lib/bb/fetch2/__init__.py
@@ -524,7 +524,7 @@ def fetcher_parse_save():
def fetcher_parse_done():
_checksum_cache.save_merge()
-def fetcher_compare_revisions():
+def fetcher_compare_revisions(d):
"""
Compare the revisions in the persistant cache with current values and
return true/false on whether they've changed.
@@ -777,7 +777,8 @@ def get_srcrev(d, method_name='sortable_revision'):
#
format = d.getVar('SRCREV_FORMAT')
if not format:
- raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
+ raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.\n"\
+ "The SCMs are:\n%s" % '\n'.join(scms))
name_to_rev = {}
seenautoinc = False
@@ -858,7 +859,10 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None):
# Disable pseudo as it may affect ssh, potentially causing it to hang.
cmd = 'export PSEUDO_DISABLED=1; ' + cmd
- logger.debug(1, "Running %s", cmd)
+ if workdir:
+ logger.debug(1, "Running '%s' in %s" % (cmd, workdir))
+ else:
+ logger.debug(1, "Running %s", cmd)
success = False
error_message = ""
@@ -894,7 +898,7 @@ def check_network_access(d, info, url):
log remote network access, and error if BB_NO_NETWORK is set or the given
URI is untrusted
"""
- if d.getVar("BB_NO_NETWORK") == "1":
+ if bb.utils.to_boolean(d.getVar("BB_NO_NETWORK")):
raise NetworkAccess(url, info)
elif not trusted_network(d, url):
raise UntrustedUrl(url, info)
@@ -1027,7 +1031,7 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
raise
except IOError as e:
- if e.errno in [os.errno.ESTALE]:
+ if e.errno in [errno.ESTALE]:
logger.warning("Stale Error Observed %s." % ud.url)
return False
raise
@@ -1094,7 +1098,7 @@ def trusted_network(d, url):
BB_ALLOWED_NETWORKS is set globally or for a specific recipe.
Note: modifies SRC_URI & mirrors.
"""
- if d.getVar('BB_NO_NETWORK') == "1":
+ if bb.utils.to_boolean(d.getVar("BB_NO_NETWORK")):
return True
pkgname = d.expand(d.getVar('PN', False))
@@ -1403,7 +1407,7 @@ class FetchMethod(object):
Fetch urls
Assumes localpath was called first
"""
- raise NoMethodError(url)
+ raise NoMethodError(urldata.url)
def unpack(self, urldata, rootdir, data):
iterate = False
@@ -1547,7 +1551,7 @@ class FetchMethod(object):
Check the status of a URL
Assumes localpath was called first
"""
- logger.info("URL %s could not be checked for status since no method exists.", url)
+ logger.info("URL %s could not be checked for status since no method exists.", urldata.url)
return True
def latest_revision(self, ud, d, name):
@@ -1555,7 +1559,7 @@ class FetchMethod(object):
Look in the cache for the latest revision, if not present ask the SCM.
"""
if not hasattr(self, "_latest_revision"):
- raise ParameterError("The fetcher for this URL does not support _latest_revision", url)
+ raise ParameterError("The fetcher for this URL does not support _latest_revision", ud.url)
revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
key = self.generate_revision_key(ud, d, name)
@@ -1638,7 +1642,7 @@ class Fetch(object):
urls = self.urls
network = self.d.getVar("BB_NO_NETWORK")
- premirroronly = (self.d.getVar("BB_FETCH_PREMIRRORONLY") == "1")
+ premirroronly = bb.utils.to_boolean(self.d.getVar("BB_FETCH_PREMIRRORONLY"))
for u in urls:
ud = self.ud[u]
@@ -1716,7 +1720,7 @@ class Fetch(object):
update_stamp(ud, self.d)
except IOError as e:
- if e.errno in [os.errno.ESTALE]:
+ if e.errno in [errno.ESTALE]:
logger.error("Stale Error Observed %s." % u)
raise ChecksumError("Stale Error Detected")
@@ -1786,7 +1790,7 @@ class Fetch(object):
for url in urls:
if url not in self.ud:
- self.ud[url] = FetchData(url, d)
+ self.ud[url] = FetchData(url, self.d)
ud = self.ud[url]
ud.setup_localpath(self.d)
diff --git a/poky/bitbake/lib/bb/fetch2/git.py b/poky/bitbake/lib/bb/fetch2/git.py
index 59a2ee8f8..8185bf4db 100644
--- a/poky/bitbake/lib/bb/fetch2/git.py
+++ b/poky/bitbake/lib/bb/fetch2/git.py
@@ -199,7 +199,7 @@ class Git(FetchMethod):
depth_default = 1
ud.shallow_depths = collections.defaultdict(lambda: depth_default)
- revs_default = d.getVar("BB_GIT_SHALLOW_REVS", True)
+ revs_default = d.getVar("BB_GIT_SHALLOW_REVS")
ud.shallow_revs = []
ud.branches = {}
for pos, name in enumerate(ud.names):
@@ -318,7 +318,7 @@ class Git(FetchMethod):
def try_premirror(self, ud, d):
# If we don't do this, updating an existing checkout with only premirrors
# is not possible
- if d.getVar("BB_FETCH_PREMIRRORONLY") is not None:
+ if bb.utils.to_boolean(d.getVar("BB_FETCH_PREMIRRORONLY")):
return True
if os.path.exists(ud.clonedir):
return False
@@ -522,9 +522,17 @@ class Git(FetchMethod):
def clean(self, ud, d):
""" clean the git directory """
- bb.utils.remove(ud.localpath, True)
- bb.utils.remove(ud.fullmirror)
- bb.utils.remove(ud.fullmirror + ".done")
+ to_remove = [ud.localpath, ud.fullmirror, ud.fullmirror + ".done"]
+ # The localpath is a symlink to clonedir when it is cloned from a
+ # mirror, so remove both of them.
+ if os.path.islink(ud.localpath):
+ clonedir = os.path.realpath(ud.localpath)
+ to_remove.append(clonedir)
+
+ for r in to_remove:
+ if os.path.exists(r):
+ bb.note('Removing %s' % r)
+ bb.utils.remove(r, True)
def supports_srcrev(self):
return True
@@ -615,7 +623,7 @@ class Git(FetchMethod):
"""
pupver = ('', '')
- tagregex = re.compile(d.getVar('UPSTREAM_CHECK_GITTAGREGEX') or "(?P<pver>([0-9][\.|_]?)+)")
+ tagregex = re.compile(d.getVar('UPSTREAM_CHECK_GITTAGREGEX') or r"(?P<pver>([0-9][\.|_]?)+)")
try:
output = self._lsremote(ud, d, "refs/tags/*")
except (bb.fetch2.FetchError, bb.fetch2.NetworkAccess) as e:
@@ -630,7 +638,7 @@ class Git(FetchMethod):
tag_head = line.split("/")[-1]
# Ignore non-released branches
- m = re.search("(alpha|beta|rc|final)+", tag_head)
+ m = re.search(r"(alpha|beta|rc|final)+", tag_head)
if m:
continue
diff --git a/poky/bitbake/lib/bb/fetch2/gitsm.py b/poky/bitbake/lib/bb/fetch2/gitsm.py
index 35729dbc0..32389130b 100644
--- a/poky/bitbake/lib/bb/fetch2/gitsm.py
+++ b/poky/bitbake/lib/bb/fetch2/gitsm.py
@@ -45,60 +45,97 @@ class GitSM(Git):
"""
return ud.type in ['gitsm']
- @staticmethod
- def parse_gitmodules(gitmodules):
- modules = {}
- module = ""
- for line in gitmodules.splitlines():
- if line.startswith('[submodule'):
- module = line.split('"')[1]
- modules[module] = {}
- elif module and line.strip().startswith('path'):
- path = line.split('=')[1].strip()
- modules[module]['path'] = path
- elif module and line.strip().startswith('url'):
- url = line.split('=')[1].strip()
- modules[module]['url'] = url
- return modules
-
- def update_submodules(self, ud, d):
+ def process_submodules(self, ud, workdir, function, d):
+ """
+ Iterate over all of the submodules in this repository and execute
+ the 'function' for each of them.
+ """
+
submodules = []
paths = {}
+ revision = {}
uris = {}
- local_paths = {}
-
+ subrevision = {}
+
+ def parse_gitmodules(gitmodules):
+ modules = {}
+ module = ""
+ for line in gitmodules.splitlines():
+ if line.startswith('[submodule'):
+ module = line.split('"')[1]
+ modules[module] = {}
+ elif module and line.strip().startswith('path'):
+ path = line.split('=')[1].strip()
+ modules[module]['path'] = path
+ elif module and line.strip().startswith('url'):
+ url = line.split('=')[1].strip()
+ modules[module]['url'] = url
+ return modules
+
+ # Collect the defined submodules, and their attributes
for name in ud.names:
try:
- gitmodules = runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revisions[name]), d, quiet=True, workdir=ud.clonedir)
+ gitmodules = runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revisions[name]), d, quiet=True, workdir=workdir)
except:
# No submodules to update
continue
- for m, md in self.parse_gitmodules(gitmodules).items():
+ for m, md in parse_gitmodules(gitmodules).items():
+ try:
+ module_hash = runfetchcmd("%s ls-tree -z -d %s %s" % (ud.basecmd, ud.revisions[name], md['path']), d, quiet=True, workdir=workdir)
+ except:
+ # If the command fails, we don't have a valid file to check. If it doesn't
+ # fail -- it still might be a failure, see next check...
+ module_hash = ""
+
+ if not module_hash:
+ logger.debug(1, "submodule %s is defined, but is not initialized in the repository. Skipping", m)
+ continue
+
submodules.append(m)
paths[m] = md['path']
+ revision[m] = ud.revisions[name]
uris[m] = md['url']
+ subrevision[m] = module_hash.split()[2]
+
+ # Convert relative to absolute uri based on parent uri
if uris[m].startswith('..'):
newud = copy.copy(ud)
- newud.path = os.path.realpath(os.path.join(newud.path, md['url']))
+ newud.path = os.path.realpath(os.path.join(newud.path, uris[m]))
uris[m] = Git._get_repo_url(self, newud)
for module in submodules:
- module_hash = runfetchcmd("%s ls-tree -z -d %s %s" % (ud.basecmd, ud.revisions[name], paths[module]), d, quiet=True, workdir=ud.clonedir)
- module_hash = module_hash.split()[2]
+ # Translate the module url into a SRC_URI
+
+ if "://" in uris[module]:
+ # Properly formated URL already
+ proto = uris[module].split(':', 1)[0]
+ url = uris[module].replace('%s:' % proto, 'gitsm:', 1)
+ else:
+ if ":" in uris[module]:
+ # Most likely an SSH style reference
+ proto = "ssh"
+ if ":/" in uris[module]:
+ # Absolute reference, easy to convert..
+ url = "gitsm://" + uris[module].replace(':/', '/', 1)
+ else:
+ # Relative reference, no way to know if this is right!
+ logger.warning("Submodule included by %s refers to relative ssh reference %s. References may fail if not absolute." % (ud.url, uris[module]))
+ url = "gitsm://" + uris[module].replace(':', '/', 1)
+ else:
+ # This has to be a file reference
+ proto = "file"
+ url = "gitsm://" + uris[module]
- # Build new SRC_URI
- proto = uris[module].split(':', 1)[0]
- url = uris[module].replace('%s:' % proto, 'gitsm:', 1)
url += ';protocol=%s' % proto
url += ";name=%s" % module
- url += ";bareclone=1;nocheckout=1;nobranch=1"
+ url += ";subpath=%s" % paths[module]
ld = d.createCopy()
# Not necessary to set SRC_URI, since we're passing the URI to
# Fetch.
#ld.setVar('SRC_URI', url)
- ld.setVar('SRCREV_%s' % module, module_hash)
+ ld.setVar('SRCREV_%s' % module, subrevision[module])
# Workaround for issues with SRCPV/SRCREV_FORMAT errors
# error refer to 'multiple' repositories. Only the repository
@@ -106,145 +143,83 @@ class GitSM(Git):
ld.setVar('SRCPV', d.getVar('SRCPV'))
ld.setVar('SRCREV_FORMAT', module)
- newfetch = Fetch([url], ld, cache=False)
- newfetch.download()
- local_paths[module] = newfetch.localpath(url)
-
- # Correct the submodule references to the local download version...
- runfetchcmd("%(basecmd)s config submodule.%(module)s.url %(url)s" % {'basecmd': ud.basecmd, 'module': module, 'url' : local_paths[module]}, d, workdir=ud.clonedir)
-
- symlink_path = os.path.join(ud.clonedir, 'modules', paths[module])
- if not os.path.exists(symlink_path):
- try:
- os.makedirs(os.path.dirname(symlink_path), exist_ok=True)
- except OSError:
- pass
- os.symlink(local_paths[module], symlink_path)
+ function(ud, url, module, paths[module], ld)
- return True
+ return submodules != []
def need_update(self, ud, d):
- main_repo_needs_update = Git.need_update(self, ud, d)
-
- # First check that the main repository has enough history fetched. If it doesn't, then we don't
- # even have the .gitmodules and gitlinks for the submodules to attempt asking whether the
- # submodules' histories are recent enough.
- if main_repo_needs_update:
+ if Git.need_update(self, ud, d):
return True
- # Now check that the submodule histories are new enough. The git-submodule command doesn't have
- # any clean interface for doing this aside from just attempting the checkout (with network
- # fetched disabled).
- return not self.update_submodules(ud, d)
+ try:
+ # Check for the nugget dropped by the download operation
+ known_srcrevs = runfetchcmd("%s config --get-all bitbake.srcrev" % \
+ (ud.basecmd), d, workdir=ud.clonedir)
- def download(self, ud, d):
- Git.download(self, ud, d)
+ if ud.revisions[ud.names[0]] not in known_srcrevs.split():
+ return True
+ except bb.fetch2.FetchError:
+ # No srcrev nuggets, so this is new and needs to be updated
+ return True
- if not ud.shallow or ud.localpath != ud.fullshallow:
- self.update_submodules(ud, d)
+ return False
- def copy_submodules(self, submodules, ud, destdir, d):
- if ud.bareclone:
- repo_conf = destdir
- else:
- repo_conf = os.path.join(destdir, '.git')
+ def download(self, ud, d):
+ def download_submodule(ud, url, module, modpath, d):
+ url += ";bareclone=1;nobranch=1"
- if submodules and not os.path.exists(os.path.join(repo_conf, 'modules')):
- os.mkdir(os.path.join(repo_conf, 'modules'))
+ # Is the following still needed?
+ #url += ";nocheckout=1"
- for module, md in submodules.items():
- srcpath = os.path.join(ud.clonedir, 'modules', md['path'])
- modpath = os.path.join(repo_conf, 'modules', md['path'])
+ try:
+ newfetch = Fetch([url], d, cache=False)
+ newfetch.download()
+ # Drop a nugget to add each of the srcrevs we've fetched (used by need_update)
+ runfetchcmd("%s config --add bitbake.srcrev %s" % \
+ (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=ud.clonedir)
+ except Exception as e:
+ logger.error('gitsm: submodule download failed: %s %s' % (type(e).__name__, str(e)))
+ raise
- if os.path.exists(srcpath):
- if os.path.exists(os.path.join(srcpath, '.git')):
- srcpath = os.path.join(srcpath, '.git')
+ Git.download(self, ud, d)
+ self.process_submodules(ud, ud.clonedir, download_submodule, d)
- target = modpath
- if os.path.exists(modpath):
- target = os.path.dirname(modpath)
+ def unpack(self, ud, destdir, d):
+ def unpack_submodules(ud, url, module, modpath, d):
+ url += ";bareclone=1;nobranch=1"
- os.makedirs(os.path.dirname(target), exist_ok=True)
- runfetchcmd("cp -fpLR %s %s" % (srcpath, target), d)
- elif os.path.exists(modpath):
- # Module already exists, likely unpacked from a shallow mirror clone
- pass
+ # Figure out where we clone over the bare submodules...
+ if ud.bareclone:
+ repo_conf = ud.destdir
else:
- # This is fatal, as we do NOT want git-submodule to hit the network
- raise bb.fetch2.FetchError('Submodule %s does not exist in %s or %s.' % (module, srcpath, modpath))
-
- def clone_shallow_local(self, ud, dest, d):
- super(GitSM, self).clone_shallow_local(ud, dest, d)
-
- # Copy over the submodules' fetched histories too.
- repo_conf = os.path.join(dest, '.git')
+ repo_conf = os.path.join(ud.destdir, '.git')
- submodules = []
- for name in ud.names:
try:
- gitmodules = runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revision), d, quiet=True, workdir=dest)
- except:
- # No submodules to update
- continue
+ newfetch = Fetch([url], d, cache=False)
+ newfetch.unpack(root=os.path.dirname(os.path.join(repo_conf, 'modules', modpath)))
+ except Exception as e:
+ logger.error('gitsm: submodule unpack failed: %s %s' % (type(e).__name__, str(e)))
+ raise
- submodules = self.parse_gitmodules(gitmodules)
- self.copy_submodules(submodules, ud, dest, d)
+ local_path = newfetch.localpath(url)
- def unpack(self, ud, destdir, d):
- Git.unpack(self, ud, destdir, d)
+ # Correct the submodule references to the local download version...
+ runfetchcmd("%(basecmd)s config submodule.%(module)s.url %(url)s" % {'basecmd': ud.basecmd, 'module': module, 'url' : local_path}, d, workdir=ud.destdir)
- # Copy over the submodules' fetched histories too.
- if ud.bareclone:
- repo_conf = ud.destdir
- else:
- repo_conf = os.path.join(ud.destdir, '.git')
+ if ud.shallow:
+ runfetchcmd("%(basecmd)s config submodule.%(module)s.shallow true" % {'basecmd': ud.basecmd, 'module': module}, d, workdir=ud.destdir)
- update_submodules = False
- paths = {}
- uris = {}
- local_paths = {}
- for name in ud.names:
+ # Ensure the submodule repository is NOT set to bare, since we're checking it out...
try:
- gitmodules = runfetchcmd("%s show HEAD:.gitmodules" % (ud.basecmd), d, quiet=True, workdir=ud.destdir)
+ runfetchcmd("%s config core.bare false" % (ud.basecmd), d, quiet=True, workdir=os.path.join(repo_conf, 'modules', modpath))
except:
- # No submodules to update
- continue
+ logger.error("Unable to set git config core.bare to false for %s" % os.path.join(repo_conf, 'modules', modpath))
+ raise
- submodules = self.parse_gitmodules(gitmodules)
- self.copy_submodules(submodules, ud, ud.destdir, d)
-
- submodules_queue = [(module, os.path.join(repo_conf, 'modules', md['path'])) for module, md in submodules.items()]
- while len(submodules_queue) != 0:
- module, modpath = submodules_queue.pop()
-
- # add submodule children recursively
- try:
- gitmodules = runfetchcmd("%s show HEAD:.gitmodules" % (ud.basecmd), d, quiet=True, workdir=modpath)
- for m, md in self.parse_gitmodules(gitmodules).items():
- submodules_queue.append([m, os.path.join(modpath, 'modules', md['path'])])
- except:
- # no children
- pass
-
-
- # There are submodules to update
- update_submodules = True
-
- # Determine (from the submodule) the correct url to reference
- try:
- output = runfetchcmd("%(basecmd)s config remote.origin.url" % {'basecmd': ud.basecmd}, d, workdir=modpath)
- except bb.fetch2.FetchError as e:
- # No remote url defined in this submodule
- continue
-
- local_paths[module] = output
-
- # Setup the local URL properly (like git submodule init or sync would do...)
- runfetchcmd("%(basecmd)s config submodule.%(module)s.url %(url)s" % {'basecmd': ud.basecmd, 'module': module, 'url' : local_paths[module]}, d, workdir=ud.destdir)
+ Git.unpack(self, ud, destdir, d)
- # Ensure the submodule repository is NOT set to bare, since we're checking it out...
- runfetchcmd("%s config core.bare false" % (ud.basecmd), d, quiet=True, workdir=modpath)
+ ret = self.process_submodules(ud, ud.destdir, unpack_submodules, d)
- if update_submodules:
+ if not ud.bareclone and ret:
# Run submodule update, this sets up the directories -- without touching the config
runfetchcmd("%s submodule update --recursive --no-fetch" % (ud.basecmd), d, quiet=True, workdir=ud.destdir)
diff --git a/poky/bitbake/lib/bb/fetch2/hg.py b/poky/bitbake/lib/bb/fetch2/hg.py
index 936d04311..5a2985e16 100644
--- a/poky/bitbake/lib/bb/fetch2/hg.py
+++ b/poky/bitbake/lib/bb/fetch2/hg.py
@@ -99,7 +99,7 @@ class Hg(FetchMethod):
def try_premirror(self, ud, d):
# If we don't do this, updating an existing checkout with only premirrors
# is not possible
- if d.getVar("BB_FETCH_PREMIRRORONLY") is not None:
+ if bb.utils.to_boolean(d.getVar("BB_FETCH_PREMIRRORONLY")):
return True
if os.path.exists(ud.moddir):
return False
diff --git a/poky/bitbake/lib/bb/fetch2/npm.py b/poky/bitbake/lib/bb/fetch2/npm.py
index 408dfc3d0..65bf5a364 100644
--- a/poky/bitbake/lib/bb/fetch2/npm.py
+++ b/poky/bitbake/lib/bb/fetch2/npm.py
@@ -226,7 +226,7 @@ class Npm(FetchMethod):
self._getshrinkeddependencies(obj, data['dependencies'][obj], data['dependencies'][obj]['version'], d, ud, lockdown, manifest, False)
return
outputurl = "invalid"
- if ('resolved' not in data) or (not data['resolved'].startswith('http')):
+ if ('resolved' not in data) or (not data['resolved'].startswith('http://') and not data['resolved'].startswith('https://')):
# will be the case for ${PN}
fetchcmd = "npm view %s@%s dist.tarball --registry %s" % (pkg, version, ud.registry)
logger.debug(2, "Found this matching URL: %s" % str(fetchcmd))
diff --git a/poky/bitbake/lib/bb/fetch2/wget.py b/poky/bitbake/lib/bb/fetch2/wget.py
index 8f505b6de..3bb3e3bb0 100644
--- a/poky/bitbake/lib/bb/fetch2/wget.py
+++ b/poky/bitbake/lib/bb/fetch2/wget.py
@@ -33,11 +33,14 @@ import logging
import errno
import bb
import bb.progress
+import socket
+import http.client
import urllib.request, urllib.parse, urllib.error
from bb.fetch2 import FetchMethod
from bb.fetch2 import FetchError
from bb.fetch2 import logger
from bb.fetch2 import runfetchcmd
+from bb.fetch2 import FetchConnectionCache
from bb.utils import export_proxies
from bs4 import BeautifulSoup
from bs4 import SoupStrainer
@@ -132,10 +135,6 @@ class Wget(FetchMethod):
return True
def checkstatus(self, fetch, ud, d, try_again=True):
- import urllib.request, urllib.error, urllib.parse, socket, http.client
- from urllib.response import addinfourl
- from bb.fetch2 import FetchConnectionCache
-
class HTTPConnectionCache(http.client.HTTPConnection):
if fetch.connection_cache:
def connect(self):
@@ -168,7 +167,7 @@ class Wget(FetchMethod):
"""
host = req.host
if not host:
- raise urlllib2.URLError('no host given')
+ raise urllib.error.URLError('no host given')
h = http_class(host, timeout=req.timeout) # will parse host:port
h.set_debuglevel(self._debuglevel)
@@ -185,7 +184,7 @@ class Wget(FetchMethod):
# request.
# Don't close connection when connection_cache is enabled,
- if fetch.connection_cache is None:
+ if fetch.connection_cache is None:
headers["Connection"] = "close"
else:
headers["Connection"] = "Keep-Alive" # Works for HTTP/1.0
@@ -252,7 +251,7 @@ class Wget(FetchMethod):
pass
closed = False
- resp = addinfourl(fp_dummy(), r.msg, req.get_full_url())
+ resp = urllib.response.addinfourl(fp_dummy(), r.msg, req.get_full_url())
resp.code = r.status
resp.msg = r.reason
@@ -271,17 +270,16 @@ class Wget(FetchMethod):
fp.read()
fp.close()
- newheaders = dict((k,v) for k,v in list(req.headers.items())
+ newheaders = dict((k, v) for k, v in list(req.headers.items())
if k.lower() not in ("content-length", "content-type"))
return self.parent.open(urllib.request.Request(req.get_full_url(),
headers=newheaders,
origin_req_host=req.origin_req_host,
unverifiable=True))
- """
- Some servers (e.g. GitHub archives, hosted on Amazon S3) return 403
- Forbidden when they actually mean 405 Method Not Allowed.
- """
+
+ # Some servers (e.g. GitHub archives, hosted on Amazon S3) return 403
+ # Forbidden when they actually mean 405 Method Not Allowed.
http_error_403 = http_error_405
@@ -292,15 +290,15 @@ class Wget(FetchMethod):
"""
def redirect_request(self, req, fp, code, msg, headers, newurl):
newreq = urllib.request.HTTPRedirectHandler.redirect_request(self, req, fp, code, msg, headers, newurl)
- newreq.get_method = lambda: req.get_method()
+ newreq.get_method = req.get_method
return newreq
exported_proxies = export_proxies(d)
handlers = [FixedHTTPRedirectHandler, HTTPMethodFallback]
- if export_proxies:
+ if exported_proxies:
handlers.append(urllib.request.ProxyHandler())
handlers.append(CacheHTTPHandler())
- # XXX: Since Python 2.7.9 ssl cert validation is enabled by default
+ # Since Python 2.7.9 ssl cert validation is enabled by default
# see PEP-0476, this causes verification errors on some https servers
# so disable by default.
import ssl
@@ -319,19 +317,19 @@ class Wget(FetchMethod):
'''Adds Basic auth to http request, pass in login:password as string'''
import base64
encodeuser = base64.b64encode(login_str.encode('utf-8')).decode("utf-8")
- authheader = "Basic %s" % encodeuser
+ authheader = "Basic %s" % encodeuser
r.add_header("Authorization", authheader)
- if ud.user:
- add_basic_auth(ud.user, r)
+ if ud.user and ud.pswd:
+ add_basic_auth(ud.user + ':' + ud.pswd, r)
try:
- import netrc, urllib.parse
+ import netrc
n = netrc.netrc()
login, unused, password = n.authenticators(urllib.parse.urlparse(uri).hostname)
add_basic_auth("%s:%s" % (login, password), r)
except (TypeError, ImportError, IOError, netrc.NetrcParseError):
- pass
+ pass
with opener.open(r) as response:
pass
@@ -396,18 +394,14 @@ class Wget(FetchMethod):
(oldpn, oldpv, oldsuffix) = old
(newpn, newpv, newsuffix) = new
- """
- Check for a new suffix type that we have never heard of before
- """
- if (newsuffix):
+ # Check for a new suffix type that we have never heard of before
+ if newsuffix:
m = self.suffix_regex_comp.search(newsuffix)
if not m:
bb.warn("%s has a possible unknown suffix: %s" % (newpn, newsuffix))
return False
- """
- Not our package so ignore it
- """
+ # Not our package so ignore it
if oldpn != newpn:
return False
@@ -473,15 +467,14 @@ class Wget(FetchMethod):
return ""
- def _check_latest_version_by_dir(self, dirver, package, package_regex,
- current_version, ud, d):
+ def _check_latest_version_by_dir(self, dirver, package, package_regex, current_version, ud, d):
"""
- Scan every directory in order to get upstream version.
+ Scan every directory in order to get upstream version.
"""
version_dir = ['', '', '']
version = ['', '', '']
- dirver_regex = re.compile("(?P<pfx>\D*)(?P<ver>(\d+[\.\-_])+(\d+))")
+ dirver_regex = re.compile(r"(?P<pfx>\D*)(?P<ver>(\d+[\.\-_])+(\d+))")
s = dirver_regex.search(dirver)
if s:
version_dir[1] = s.group('ver')
@@ -541,26 +534,26 @@ class Wget(FetchMethod):
gst-fluendo-mp3
"""
# match most patterns which uses "-" as separator to version digits
- pn_prefix1 = "[a-zA-Z][a-zA-Z0-9]*([-_][a-zA-Z]\w+)*\+?[-_]"
+ pn_prefix1 = r"[a-zA-Z][a-zA-Z0-9]*([-_][a-zA-Z]\w+)*\+?[-_]"
# a loose pattern such as for unzip552.tar.gz
- pn_prefix2 = "[a-zA-Z]+"
+ pn_prefix2 = r"[a-zA-Z]+"
# a loose pattern such as for 80325-quicky-0.4.tar.gz
- pn_prefix3 = "[0-9]+[-]?[a-zA-Z]+"
+ pn_prefix3 = r"[0-9]+[-]?[a-zA-Z]+"
# Save the Package Name (pn) Regex for use later
- pn_regex = "(%s|%s|%s)" % (pn_prefix1, pn_prefix2, pn_prefix3)
+ pn_regex = r"(%s|%s|%s)" % (pn_prefix1, pn_prefix2, pn_prefix3)
# match version
- pver_regex = "(([A-Z]*\d+[a-zA-Z]*[\.\-_]*)+)"
+ pver_regex = r"(([A-Z]*\d+[a-zA-Z]*[\.\-_]*)+)"
# match arch
parch_regex = "-source|_all_"
# src.rpm extension was added only for rpm package. Can be removed if the rpm
# packaged will always be considered as having to be manually upgraded
- psuffix_regex = "(tar\.gz|tgz|tar\.bz2|zip|xz|tar\.lz|rpm|bz2|orig\.tar\.gz|tar\.xz|src\.tar\.gz|src\.tgz|svnr\d+\.tar\.bz2|stable\.tar\.gz|src\.rpm)"
+ psuffix_regex = r"(tar\.gz|tgz|tar\.bz2|zip|xz|tar\.lz|rpm|bz2|orig\.tar\.gz|tar\.xz|src\.tar\.gz|src\.tgz|svnr\d+\.tar\.bz2|stable\.tar\.gz|src\.rpm)"
# match name, version and archive type of a package
- package_regex_comp = re.compile("(?P<name>%s?\.?v?)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s$)"
+ package_regex_comp = re.compile(r"(?P<name>%s?\.?v?)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s$)"
% (pn_regex, pver_regex, parch_regex, psuffix_regex))
self.suffix_regex_comp = re.compile(psuffix_regex)
@@ -572,7 +565,7 @@ class Wget(FetchMethod):
version = self._parse_path(package_regex_comp, package)
if version:
package_custom_regex_comp = re.compile(
- "(?P<name>%s)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s)" %
+ r"(?P<name>%s)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s)" %
(re.escape(version[0]), pver_regex, parch_regex, psuffix_regex))
else:
package_custom_regex_comp = None
@@ -589,7 +582,7 @@ class Wget(FetchMethod):
current_version = ['', d.getVar('PV'), '']
"""possible to have no version in pkg name, such as spectrum-fw"""
- if not re.search("\d+", package):
+ if not re.search(r"\d+", package):
current_version[1] = re.sub('_', '.', current_version[1])
current_version[1] = re.sub('-', '.', current_version[1])
return (current_version[1], '')
@@ -607,13 +600,13 @@ class Wget(FetchMethod):
# search for version matches on folders inside the path, like:
# "5.7" in http://download.gnome.org/sources/${PN}/5.7/${PN}-${PV}.tar.gz
- dirver_regex = re.compile("(?P<dirver>[^/]*(\d+\.)*\d+([-_]r\d+)*)/")
+ dirver_regex = re.compile(r"(?P<dirver>[^/]*(\d+\.)*\d+([-_]r\d+)*)/")
m = dirver_regex.search(path)
if m:
pn = d.getVar('PN')
dirver = m.group('dirver')
- dirver_pn_regex = re.compile("%s\d?" % (re.escape(pn)))
+ dirver_pn_regex = re.compile(r"%s\d?" % (re.escape(pn)))
if not dirver_pn_regex.search(dirver):
return (self._check_latest_version_by_dir(dirver,
package, package_regex, current_version, ud, d), '')