summaryrefslogtreecommitdiff
path: root/poky/bitbake/lib/bb/fetch2
diff options
context:
space:
mode:
Diffstat (limited to 'poky/bitbake/lib/bb/fetch2')
-rw-r--r--poky/bitbake/lib/bb/fetch2/__init__.py39
-rw-r--r--poky/bitbake/lib/bb/fetch2/git.py28
-rw-r--r--poky/bitbake/lib/bb/fetch2/gitsm.py66
-rw-r--r--poky/bitbake/lib/bb/fetch2/local.py15
-rw-r--r--poky/bitbake/lib/bb/fetch2/osc.py3
-rw-r--r--poky/bitbake/lib/bb/fetch2/perforce.py81
-rw-r--r--poky/bitbake/lib/bb/fetch2/ssh.py7
-rw-r--r--poky/bitbake/lib/bb/fetch2/wget.py5
8 files changed, 192 insertions, 52 deletions
diff --git a/poky/bitbake/lib/bb/fetch2/__init__.py b/poky/bitbake/lib/bb/fetch2/__init__.py
index eb112f069..551bfb70f 100644
--- a/poky/bitbake/lib/bb/fetch2/__init__.py
+++ b/poky/bitbake/lib/bb/fetch2/__init__.py
@@ -1195,8 +1195,6 @@ def get_checksum_file_list(d):
paths = ud.method.localpaths(ud, d)
for f in paths:
pth = ud.decodedurl
- if '*' in pth:
- f = os.path.join(os.path.abspath(f), pth)
if f.startswith(dl_dir):
# The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else
if os.path.exists(f):
@@ -1365,9 +1363,6 @@ class FetchMethod(object):
# We cannot compute checksums for directories
if os.path.isdir(urldata.localpath):
return False
- if urldata.localpath.find("*") != -1:
- return False
-
return True
def recommends_checksum(self, urldata):
@@ -1430,11 +1425,6 @@ class FetchMethod(object):
iterate = False
file = urldata.localpath
- # Localpath can't deal with 'dir/*' entries, so it converts them to '.',
- # but it must be corrected back for local files copying
- if urldata.basename == '*' and file.endswith('/.'):
- file = '%s/%s' % (file.rstrip('/.'), urldata.path)
-
try:
unpack = bb.utils.to_boolean(urldata.parm.get('unpack'), True)
except ValueError as exc:
@@ -1530,7 +1520,7 @@ class FetchMethod(object):
if urlpath.find("/") != -1:
destdir = urlpath.rsplit("/", 1)[0] + '/'
bb.utils.mkdirhier("%s/%s" % (unpackdir, destdir))
- cmd = 'cp -fpPRH %s %s' % (file, destdir)
+ cmd = 'cp -fpPRH "%s" "%s"' % (file, destdir)
if not cmd:
return
@@ -1613,10 +1603,15 @@ class FetchMethod(object):
"""
if os.path.exists(ud.localpath):
return True
- if ud.localpath.find("*") != -1:
- return True
return False
+ def implicit_urldata(self, ud, d):
+ """
+ Get a list of FetchData objects for any implicit URLs that will also
+ be downloaded when we fetch the given URL.
+ """
+ return []
+
class Fetch(object):
def __init__(self, urls, d, cache = True, localonly = False, connection_cache = None):
if localonly and cache:
@@ -1842,6 +1837,24 @@ class Fetch(object):
if ud.lockfile:
bb.utils.unlockfile(lf)
+ def expanded_urldata(self, urls=None):
+ """
+ Get an expanded list of FetchData objects covering both the given
+ URLS and any additional implicit URLs that are added automatically by
+ the appropriate FetchMethod.
+ """
+
+ if not urls:
+ urls = self.urls
+
+ urldata = []
+ for url in urls:
+ ud = self.ud[url]
+ urldata.append(ud)
+ urldata += ud.method.implicit_urldata(ud, self.d)
+
+ return urldata
+
class FetchConnectionCache(object):
"""
A class which represents an container for socket connections.
diff --git a/poky/bitbake/lib/bb/fetch2/git.py b/poky/bitbake/lib/bb/fetch2/git.py
index 5b3793a70..07064c694 100644
--- a/poky/bitbake/lib/bb/fetch2/git.py
+++ b/poky/bitbake/lib/bb/fetch2/git.py
@@ -236,7 +236,7 @@ class Git(FetchMethod):
ud.unresolvedrev[name] = ud.revisions[name]
ud.revisions[name] = self.latest_revision(ud, d, name)
- gitsrcname = '%s%s' % (ud.host.replace(':', '.'), ud.path.replace('/', '.').replace('*', '.'))
+ gitsrcname = '%s%s' % (ud.host.replace(':', '.'), ud.path.replace('/', '.').replace('*', '.').replace(' ','_'))
if gitsrcname.startswith('.'):
gitsrcname = gitsrcname[1:]
@@ -342,7 +342,7 @@ class Git(FetchMethod):
# We do this since git will use a "-l" option automatically for local urls where possible
if repourl.startswith("file://"):
repourl = repourl[7:]
- clone_cmd = "LANG=C %s clone --bare --mirror %s %s --progress" % (ud.basecmd, repourl, ud.clonedir)
+ clone_cmd = "LANG=C %s clone --bare --mirror \"%s\" %s --progress" % (ud.basecmd, repourl, ud.clonedir)
if ud.proto.lower() != 'file':
bb.fetch2.check_network_access(d, clone_cmd, ud.url)
progresshandler = GitProgressHandler(d)
@@ -354,8 +354,8 @@ class Git(FetchMethod):
if "origin" in output:
runfetchcmd("%s remote rm origin" % ud.basecmd, d, workdir=ud.clonedir)
- runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, repourl), d, workdir=ud.clonedir)
- fetch_cmd = "LANG=C %s fetch -f --prune --progress %s refs/*:refs/*" % (ud.basecmd, repourl)
+ runfetchcmd("%s remote add --mirror=fetch origin \"%s\"" % (ud.basecmd, repourl), d, workdir=ud.clonedir)
+ fetch_cmd = "LANG=C %s fetch -f --progress \"%s\" refs/*:refs/*" % (ud.basecmd, repourl)
if ud.proto.lower() != 'file':
bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
progresshandler = GitProgressHandler(d)
@@ -475,6 +475,9 @@ class Git(FetchMethod):
need_lfs = ud.parm.get("lfs", "1") == "1"
+ if not need_lfs:
+ ud.basecmd = "GIT_LFS_SKIP_SMUDGE=1 " + ud.basecmd
+
source_found = False
source_error = []
@@ -501,12 +504,12 @@ class Git(FetchMethod):
raise bb.fetch2.UnpackError("No up to date source found: " + "; ".join(source_error), ud.url)
repourl = self._get_repo_url(ud)
- runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, repourl), d, workdir=destdir)
+ runfetchcmd("%s remote set-url origin \"%s\"" % (ud.basecmd, repourl), d, workdir=destdir)
if self._contains_lfs(ud, d, destdir):
if need_lfs and not self._find_git_lfs(d):
raise bb.fetch2.FetchError("Repository %s has LFS content, install git-lfs on host to download (or set lfs=0 to ignore it)" % (repourl))
- else:
+ elif not need_lfs:
bb.note("Repository %s has LFS content but it is not being fetched" % (repourl))
if not ud.nocheckout:
@@ -563,8 +566,15 @@ class Git(FetchMethod):
"""
Check if the repository has 'lfs' (large file) content
"""
- cmd = "%s grep lfs HEAD:.gitattributes | wc -l" % (
- ud.basecmd)
+
+ if not ud.nobranch:
+ branchname = ud.branches[ud.names[0]]
+ else:
+ branchname = "master"
+
+ cmd = "%s grep lfs origin/%s:.gitattributes | wc -l" % (
+ ud.basecmd, ud.branches[ud.names[0]])
+
try:
output = runfetchcmd(cmd, d, quiet=True, workdir=wd)
if int(output) > 0:
@@ -613,7 +623,7 @@ class Git(FetchMethod):
d.setVar('_BB_GIT_IN_LSREMOTE', '1')
try:
repourl = self._get_repo_url(ud)
- cmd = "%s ls-remote %s %s" % \
+ cmd = "%s ls-remote \"%s\" %s" % \
(ud.basecmd, repourl, search)
if ud.proto.lower() != 'file':
bb.fetch2.check_network_access(d, cmd, repourl)
diff --git a/poky/bitbake/lib/bb/fetch2/gitsm.py b/poky/bitbake/lib/bb/fetch2/gitsm.py
index e7083001d..d6e5c5c05 100644
--- a/poky/bitbake/lib/bb/fetch2/gitsm.py
+++ b/poky/bitbake/lib/bb/fetch2/gitsm.py
@@ -143,12 +143,43 @@ class GitSM(Git):
try:
# Check for the nugget dropped by the download operation
known_srcrevs = runfetchcmd("%s config --get-all bitbake.srcrev" % \
- (ud.basecmd), d, workdir=ud.clonedir)
+ (ud.basecmd), d, workdir=ud.clonedir)
- if ud.revisions[ud.names[0]] not in known_srcrevs.split():
- return True
+ if ud.revisions[ud.names[0]] in known_srcrevs.split():
+ return False
except bb.fetch2.FetchError:
- # No srcrev nuggets, so this is new and needs to be updated
+ pass
+
+ need_update_list = []
+ def need_update_submodule(ud, url, module, modpath, workdir, d):
+ url += ";bareclone=1;nobranch=1"
+
+ try:
+ newfetch = Fetch([url], d, cache=False)
+ new_ud = newfetch.ud[url]
+ if new_ud.method.need_update(new_ud, d):
+ need_update_list.append(modpath)
+ except Exception as e:
+ logger.error('gitsm: submodule update check failed: %s %s' % (type(e).__name__, str(e)))
+ need_update_result = True
+
+ # If we're using a shallow mirror tarball it needs to be unpacked
+ # temporarily so that we can examine the .gitmodules file
+ if ud.shallow and os.path.exists(ud.fullshallow) and not os.path.exists(ud.clonedir):
+ tmpdir = tempfile.mkdtemp(dir=d.getVar("DL_DIR"))
+ runfetchcmd("tar -xzf %s" % ud.fullshallow, d, workdir=tmpdir)
+ self.process_submodules(ud, tmpdir, need_update_submodule, d)
+ shutil.rmtree(tmpdir)
+ else:
+ self.process_submodules(ud, ud.clonedir, need_update_submodule, d)
+ if len(need_update_list) == 0:
+ # We already have the required commits of all submodules. Drop
+ # a nugget so we don't need to check again.
+ runfetchcmd("%s config --add bitbake.srcrev %s" % \
+ (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=ud.clonedir)
+
+ if len(need_update_list) > 0:
+ logger.debug(1, 'gitsm: Submodules requiring update: %s' % (' '.join(need_update_list)))
return True
return False
@@ -163,9 +194,6 @@ class GitSM(Git):
try:
newfetch = Fetch([url], d, cache=False)
newfetch.download()
- # Drop a nugget to add each of the srcrevs we've fetched (used by need_update)
- runfetchcmd("%s config --add bitbake.srcrev %s" % \
- (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=workdir)
except Exception as e:
logger.error('gitsm: submodule download failed: %s %s' % (type(e).__name__, str(e)))
raise
@@ -181,6 +209,9 @@ class GitSM(Git):
shutil.rmtree(tmpdir)
else:
self.process_submodules(ud, ud.clonedir, download_submodule, d)
+ # Drop a nugget for the srcrev we've fetched (used by need_update)
+ runfetchcmd("%s config --add bitbake.srcrev %s" % \
+ (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=ud.clonedir)
def unpack(self, ud, destdir, d):
def unpack_submodules(ud, url, module, modpath, workdir, d):
@@ -223,3 +254,24 @@ class GitSM(Git):
# up the configuration and checks out the files. The main project config should remain
# unmodified, and no download from the internet should occur.
runfetchcmd("%s submodule update --recursive --no-fetch" % (ud.basecmd), d, quiet=True, workdir=ud.destdir)
+
+ def implicit_urldata(self, ud, d):
+ import shutil, subprocess, tempfile
+
+ urldata = []
+ def add_submodule(ud, url, module, modpath, workdir, d):
+ url += ";bareclone=1;nobranch=1"
+ newfetch = Fetch([url], d, cache=False)
+ urldata.extend(newfetch.expanded_urldata())
+
+ # If we're using a shallow mirror tarball it needs to be unpacked
+ # temporarily so that we can examine the .gitmodules file
+ if ud.shallow and os.path.exists(ud.fullshallow) and ud.method.need_update(ud, d):
+ tmpdir = tempfile.mkdtemp(dir=d.getVar("DL_DIR"))
+ subprocess.check_call("tar -xzf %s" % ud.fullshallow, cwd=tmpdir, shell=True)
+ self.process_submodules(ud, tmpdir, add_submodule, d)
+ shutil.rmtree(tmpdir)
+ else:
+ self.process_submodules(ud, ud.clonedir, add_submodule, d)
+
+ return urldata
diff --git a/poky/bitbake/lib/bb/fetch2/local.py b/poky/bitbake/lib/bb/fetch2/local.py
index 01d9ff9f8..25d4557db 100644
--- a/poky/bitbake/lib/bb/fetch2/local.py
+++ b/poky/bitbake/lib/bb/fetch2/local.py
@@ -17,7 +17,7 @@ import os
import urllib.request, urllib.parse, urllib.error
import bb
import bb.utils
-from bb.fetch2 import FetchMethod, FetchError
+from bb.fetch2 import FetchMethod, FetchError, ParameterError
from bb.fetch2 import logger
class Local(FetchMethod):
@@ -33,6 +33,8 @@ class Local(FetchMethod):
ud.basename = os.path.basename(ud.decodedurl)
ud.basepath = ud.decodedurl
ud.needdonestamp = False
+ if "*" in ud.decodedurl:
+ raise bb.fetch2.ParameterError("file:// urls using globbing are no longer supported. Please place the files in a directory and reference that instead.", ud.url)
return
def localpath(self, urldata, d):
@@ -55,12 +57,6 @@ class Local(FetchMethod):
logger.debug(2, "Searching for %s in paths:\n %s" % (path, "\n ".join(filespath.split(":"))))
newpath, hist = bb.utils.which(filespath, path, history=True)
searched.extend(hist)
- if (not newpath or not os.path.exists(newpath)) and path.find("*") != -1:
- # For expressions using '*', best we can do is take the first directory in FILESPATH that exists
- newpath, hist = bb.utils.which(filespath, ".", history=True)
- searched.extend(hist)
- logger.debug(2, "Searching for %s in path: %s" % (path, newpath))
- return searched
if not os.path.exists(newpath):
dldirfile = os.path.join(d.getVar("DL_DIR"), path)
logger.debug(2, "Defaulting to %s for %s" % (dldirfile, path))
@@ -70,8 +66,6 @@ class Local(FetchMethod):
return searched
def need_update(self, ud, d):
- if ud.url.find("*") != -1:
- return False
if os.path.exists(ud.localpath):
return False
return True
@@ -95,9 +89,6 @@ class Local(FetchMethod):
"""
Check the status of the url
"""
- if urldata.localpath.find("*") != -1:
- logger.info("URL %s looks like a glob and was therefore not checked.", urldata.url)
- return True
if os.path.exists(urldata.localpath):
return True
return False
diff --git a/poky/bitbake/lib/bb/fetch2/osc.py b/poky/bitbake/lib/bb/fetch2/osc.py
index 8f091efd0..3a6cd2951 100644
--- a/poky/bitbake/lib/bb/fetch2/osc.py
+++ b/poky/bitbake/lib/bb/fetch2/osc.py
@@ -8,12 +8,15 @@ Based on the svn "Fetch" implementation.
"""
import logging
+import os
import bb
from bb.fetch2 import FetchMethod
from bb.fetch2 import FetchError
from bb.fetch2 import MissingParameterError
from bb.fetch2 import runfetchcmd
+logger = logging.getLogger(__name__)
+
class Osc(FetchMethod):
"""Class to fetch a module or modules from Opensuse build server
repositories."""
diff --git a/poky/bitbake/lib/bb/fetch2/perforce.py b/poky/bitbake/lib/bb/fetch2/perforce.py
index f57c2a4f5..6f3c95b6c 100644
--- a/poky/bitbake/lib/bb/fetch2/perforce.py
+++ b/poky/bitbake/lib/bb/fetch2/perforce.py
@@ -1,6 +1,20 @@
"""
BitBake 'Fetch' implementation for perforce
+Supported SRC_URI options are:
+
+- module
+ The top-level location to fetch while preserving the remote paths
+
+ The value of module can point to either a directory or a file. The result,
+ in both cases, is that the fetcher will preserve all file paths starting
+ from the module path. That is, the top-level directory in the module value
+ will also be the top-level directory in P4DIR.
+
+- remotepath
+ If the value "keep" is given, the full depot location of each file is
+ preserved in P4DIR. This option overrides the effect of the module option.
+
"""
# Copyright (C) 2003, 2004 Chris Larson
@@ -17,6 +31,36 @@ from bb.fetch2 import FetchError
from bb.fetch2 import logger
from bb.fetch2 import runfetchcmd
+class PerforceProgressHandler (bb.progress.BasicProgressHandler):
+ """
+ Implements basic progress information for perforce, based on the number of
+ files to be downloaded.
+
+ The p4 print command will print one line per file, therefore it can be used
+ to "count" the number of files already completed and give an indication of
+ the progress.
+ """
+ def __init__(self, d, num_files):
+ self._num_files = num_files
+ self._count = 0
+ super(PerforceProgressHandler, self).__init__(d)
+
+ # Send an initial progress event so the bar gets shown
+ self._fire_progress(-1)
+
+ def write(self, string):
+ self._count = self._count + 1
+
+ percent = int(100.0 * float(self._count) / float(self._num_files))
+
+ # In case something goes wrong, we try to preserve our sanity
+ if percent > 100:
+ percent = 100
+
+ self.update(percent)
+
+ super(PerforceProgressHandler, self).write(string)
+
class Perforce(FetchMethod):
""" Class to fetch from perforce repositories """
def supports(self, ud, d):
@@ -58,14 +102,33 @@ class Perforce(FetchMethod):
logger.debug(1, 'Determined P4PORT to be: %s' % ud.host)
if not ud.host:
raise FetchError('Could not determine P4PORT from P4CONFIG')
-
+
+ # Fetcher options
+ ud.module = ud.parm.get('module')
+ ud.keepremotepath = (ud.parm.get('remotepath', '') == 'keep')
+
if ud.path.find('/...') >= 0:
ud.pathisdir = True
else:
ud.pathisdir = False
+ # Avoid using the "/..." syntax in SRC_URI when a module value is given
+ if ud.pathisdir and ud.module:
+ raise FetchError('SRC_URI depot path cannot not end in /... when a module value is given')
+
cleanedpath = ud.path.replace('/...', '').replace('/', '.')
cleanedhost = ud.host.replace(':', '.')
+
+ # Merge the path and module into the final depot location
+ if ud.module:
+ if ud.module.find('/') == 0:
+ raise FetchError('module cannot begin with /')
+ ud.path = os.path.join(ud.path, ud.module)
+
+ # Append the module path to the local pkg name
+ cleanedmodule = ud.module.replace('/...', '').replace('/', '.')
+ cleanedpath += '--%s' % cleanedmodule
+
ud.pkgdir = os.path.join(ud.dldir, cleanedhost, cleanedpath)
ud.setup_revisions(d)
@@ -95,10 +158,20 @@ class Perforce(FetchMethod):
pathnrev = '%s' % (ud.path)
if depot_filename:
- if ud.pathisdir: # Remove leading path to obtain filename
+ if ud.keepremotepath:
+ # preserve everything, remove the leading //
+ filename = depot_filename.lstrip('/')
+ elif ud.module:
+ # remove everything up to the module path
+ modulepath = ud.module.rstrip('/...')
+ filename = depot_filename[depot_filename.rfind(modulepath):]
+ elif ud.pathisdir:
+ # Remove leading (visible) path to obtain the filepath
filename = depot_filename[len(ud.path)-1:]
else:
+ # Remove everything, except the filename
filename = depot_filename[depot_filename.rfind('/'):]
+
filename = filename[:filename.find('#')] # Remove trailing '#rev'
if command == 'changes':
@@ -150,10 +223,12 @@ class Perforce(FetchMethod):
bb.utils.remove(ud.pkgdir, True)
bb.utils.mkdirhier(ud.pkgdir)
+ progresshandler = PerforceProgressHandler(d, len(filelist))
+
for afile in filelist:
p4fetchcmd = self._buildp4command(ud, d, 'print', afile)
bb.fetch2.check_network_access(d, p4fetchcmd, ud.url)
- runfetchcmd(p4fetchcmd, d, workdir=ud.pkgdir)
+ runfetchcmd(p4fetchcmd, d, workdir=ud.pkgdir, log=progresshandler)
runfetchcmd('tar -czf %s p4' % (ud.localpath), d, cleanup=[ud.localpath], workdir=ud.pkgdir)
diff --git a/poky/bitbake/lib/bb/fetch2/ssh.py b/poky/bitbake/lib/bb/fetch2/ssh.py
index 5e982ecf3..2c8557e1f 100644
--- a/poky/bitbake/lib/bb/fetch2/ssh.py
+++ b/poky/bitbake/lib/bb/fetch2/ssh.py
@@ -31,8 +31,7 @@ IETF secsh internet draft:
#
import re, os
-from bb.fetch2 import FetchMethod
-from bb.fetch2 import runfetchcmd
+from bb.fetch2 import check_network_access, FetchMethod, ParameterError, runfetchcmd
__pattern__ = re.compile(r'''
@@ -65,7 +64,7 @@ class SSH(FetchMethod):
def urldata_init(self, urldata, d):
if 'protocol' in urldata.parm and urldata.parm['protocol'] == 'git':
- raise bb.fetch2.ParameterError(
+ raise ParameterError(
"Invalid protocol - if you wish to fetch from a git " +
"repository using ssh, you need to use " +
"git:// prefix with protocol=ssh", urldata.url)
@@ -105,7 +104,7 @@ class SSH(FetchMethod):
dldir
)
- bb.fetch2.check_network_access(d, cmd, urldata.url)
+ check_network_access(d, cmd, urldata.url)
runfetchcmd(cmd, d)
diff --git a/poky/bitbake/lib/bb/fetch2/wget.py b/poky/bitbake/lib/bb/fetch2/wget.py
index f7d1de26b..e6d9f528d 100644
--- a/poky/bitbake/lib/bb/fetch2/wget.py
+++ b/poky/bitbake/lib/bb/fetch2/wget.py
@@ -208,10 +208,7 @@ class Wget(FetchMethod):
fetch.connection_cache.remove_connection(h.host, h.port)
raise urllib.error.URLError(err)
else:
- try:
- r = h.getresponse(buffering=True)
- except TypeError: # buffering kw not supported
- r = h.getresponse()
+ r = h.getresponse()
# Pick apart the HTTPResponse object to get the addinfourl
# object initialized properly.