summaryrefslogtreecommitdiff
path: root/yocto-poky/bitbake/lib/bb/fetch2/__init__.py
diff options
context:
space:
mode:
Diffstat (limited to 'yocto-poky/bitbake/lib/bb/fetch2/__init__.py')
-rw-r--r--yocto-poky/bitbake/lib/bb/fetch2/__init__.py240
1 files changed, 110 insertions, 130 deletions
diff --git a/yocto-poky/bitbake/lib/bb/fetch2/__init__.py b/yocto-poky/bitbake/lib/bb/fetch2/__init__.py
index a9c044b6a..1fa67020c 100644
--- a/yocto-poky/bitbake/lib/bb/fetch2/__init__.py
+++ b/yocto-poky/bitbake/lib/bb/fetch2/__init__.py
@@ -29,11 +29,9 @@ from __future__ import absolute_import
from __future__ import print_function
import os, re
import signal
-import glob
import logging
import urllib
import urlparse
-import operator
import bb.persist_data, bb.utils
import bb.checksum
from bb import data
@@ -329,7 +327,7 @@ class URI(object):
def path(self, path):
self._path = path
- if re.compile("^/").match(path):
+ if not path or re.compile("^/").match(path):
self.relative = False
else:
self.relative = True
@@ -377,9 +375,12 @@ def decodeurl(url):
if locidx != -1 and type.lower() != 'file':
host = location[:locidx]
path = location[locidx:]
- else:
+ elif type.lower() == 'file':
host = ""
path = location
+ else:
+ host = location
+ path = ""
if user:
m = re.compile('(?P<user>[^:]+)(:?(?P<pswd>.*))').match(user)
if m:
@@ -515,13 +516,13 @@ def fetcher_init(d):
if hasattr(m, "init"):
m.init(d)
-def fetcher_parse_save(d):
- _checksum_cache.save_extras(d)
+def fetcher_parse_save():
+ _checksum_cache.save_extras()
-def fetcher_parse_done(d):
- _checksum_cache.save_merge(d)
+def fetcher_parse_done():
+ _checksum_cache.save_merge()
-def fetcher_compare_revisions(d):
+def fetcher_compare_revisions():
"""
Compare the revisions in the persistant cache with current values and
return true/false on whether they've changed.
@@ -574,10 +575,10 @@ def verify_checksum(ud, d, precomputed={}):
else:
sha256data = bb.utils.sha256_file(ud.localpath)
- if ud.method.recommends_checksum(ud):
+ if ud.method.recommends_checksum(ud) and not ud.md5_expected and not ud.sha256_expected:
# If strict checking enabled and neither sum defined, raise error
strict = d.getVar("BB_STRICT_CHECKSUM", True) or "0"
- if (strict == "1") and not (ud.md5_expected or ud.sha256_expected):
+ if strict == "1":
logger.error('No checksum specified for %s, please add at least one to the recipe:\n'
'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"' %
(ud.localpath, ud.md5_name, md5data,
@@ -585,34 +586,22 @@ def verify_checksum(ud, d, precomputed={}):
raise NoChecksumError('Missing SRC_URI checksum', ud.url)
# Log missing sums so user can more easily add them
- if not ud.md5_expected:
- logger.warn('Missing md5 SRC_URI checksum for %s, consider adding to the recipe:\n'
- 'SRC_URI[%s] = "%s"',
- ud.localpath, ud.md5_name, md5data)
-
- if not ud.sha256_expected:
- logger.warn('Missing sha256 SRC_URI checksum for %s, consider adding to the recipe:\n'
- 'SRC_URI[%s] = "%s"',
- ud.localpath, ud.sha256_name, sha256data)
-
- md5mismatch = False
- sha256mismatch = False
-
- if ud.md5_expected != md5data:
- md5mismatch = True
-
- if ud.sha256_expected != sha256data:
- sha256mismatch = True
+ logger.warn('Missing md5 SRC_URI checksum for %s, consider adding to the recipe:\n'
+ 'SRC_URI[%s] = "%s"',
+ ud.localpath, ud.md5_name, md5data)
+ logger.warn('Missing sha256 SRC_URI checksum for %s, consider adding to the recipe:\n'
+ 'SRC_URI[%s] = "%s"',
+ ud.localpath, ud.sha256_name, sha256data)
# We want to alert the user if a checksum is defined in the recipe but
# it does not match.
msg = ""
mismatch = False
- if md5mismatch and ud.md5_expected:
+ if ud.md5_expected and ud.md5_expected != md5data:
msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'md5', md5data, ud.md5_expected)
mismatch = True;
- if sha256mismatch and ud.sha256_expected:
+ if ud.sha256_expected and ud.sha256_expected != sha256data:
msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'sha256', sha256data, ud.sha256_expected)
mismatch = True;
@@ -637,6 +626,9 @@ def verify_donestamp(ud, d, origud=None):
Returns True, if the donestamp exists and is valid, False otherwise. When
returning False, any existing done stamps are removed.
"""
+ if not ud.needdonestamp:
+ return True
+
if not os.path.exists(ud.donestamp):
return False
@@ -685,7 +677,8 @@ def verify_donestamp(ud, d, origud=None):
# incorrect stamp file.
logger.warn("Checksum mismatch for local file %s\n"
"Cleaning and trying again." % ud.localpath)
- rename_bad_checksum(ud, e.checksum)
+ if os.path.exists(ud.localpath):
+ rename_bad_checksum(ud, e.checksum)
bb.utils.remove(ud.donestamp)
return False
@@ -695,6 +688,9 @@ def update_stamp(ud, d):
donestamp is file stamp indicating the whole fetching is done
this function update the stamp after verifying the checksum
"""
+ if not ud.needdonestamp:
+ return
+
if os.path.exists(ud.donestamp):
# Touch the done stamp file to show active use of the download
try:
@@ -703,11 +699,21 @@ def update_stamp(ud, d):
# Errors aren't fatal here
pass
else:
- checksums = verify_checksum(ud, d)
- # Store the checksums for later re-verification against the recipe
- with open(ud.donestamp, "wb") as cachefile:
- p = pickle.Pickler(cachefile, pickle.HIGHEST_PROTOCOL)
- p.dump(checksums)
+ try:
+ checksums = verify_checksum(ud, d)
+ # Store the checksums for later re-verification against the recipe
+ with open(ud.donestamp, "wb") as cachefile:
+ p = pickle.Pickler(cachefile, pickle.HIGHEST_PROTOCOL)
+ p.dump(checksums)
+ except ChecksumError as e:
+ # Checksums failed to verify, trigger re-download and remove the
+ # incorrect stamp file.
+ logger.warn("Checksum mismatch for local file %s\n"
+ "Cleaning and trying again." % ud.localpath)
+ if os.path.exists(ud.localpath):
+ rename_bad_checksum(ud, e.checksum)
+ bb.utils.remove(ud.donestamp)
+ raise
def subprocess_setup():
# Python installs a SIGPIPE handler by default. This is usually not what
@@ -718,7 +724,7 @@ def subprocess_setup():
def get_autorev(d):
# only not cache src rev in autorev case
if d.getVar('BB_SRCREV_POLICY', True) != "cache":
- d.setVar('__BB_DONT_CACHE', '1')
+ d.setVar('BB_DONT_CACHE', '1')
return "AUTOINC"
def get_srcrev(d, method_name='sortable_revision'):
@@ -920,6 +926,10 @@ def rename_bad_checksum(ud, suffix):
def try_mirror_url(fetch, origud, ud, ld, check = False):
# Return of None or a value means we're finished
# False means try another url
+
+ if ud.lockfile and ud.lockfile != origud.lockfile:
+ lf = bb.utils.lockfile(ud.lockfile)
+
try:
if check:
found = ud.method.checkstatus(fetch, ud, ld)
@@ -946,8 +956,9 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
if origud.mirrortarball and os.path.basename(ud.localpath) == os.path.basename(origud.mirrortarball) \
and os.path.basename(ud.localpath) != os.path.basename(origud.localpath):
# Create donestamp in old format to avoid triggering a re-download
- bb.utils.mkdirhier(os.path.dirname(ud.donestamp))
- open(ud.donestamp, 'w').close()
+ if ud.donestamp:
+ bb.utils.mkdirhier(os.path.dirname(ud.donestamp))
+ open(ud.donestamp, 'w').close()
dest = os.path.join(dldir, os.path.basename(ud.localpath))
if not os.path.exists(dest):
os.symlink(ud.localpath, dest)
@@ -973,7 +984,8 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
if isinstance(e, ChecksumError):
logger.warn("Mirror checksum failure for url %s (original url: %s)\nCleaning and trying again." % (ud.url, origud.url))
logger.warn(str(e))
- rename_bad_checksum(ud, e.checksum)
+ if os.path.exists(ud.localpath):
+ rename_bad_checksum(ud, e.checksum)
elif isinstance(e, NoChecksumError):
raise
else:
@@ -984,6 +996,10 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
except UnboundLocalError:
pass
return False
+ finally:
+ if ud.lockfile and ud.lockfile != origud.lockfile:
+ bb.utils.unlockfile(lf)
+
def try_mirrors(fetch, d, origud, mirrors, check = False):
"""
@@ -1014,7 +1030,7 @@ def trusted_network(d, url):
return True
pkgname = d.expand(d.getVar('PN', False))
- trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname)
+ trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname, False)
if not trusted_hosts:
trusted_hosts = d.getVar('BB_ALLOWED_NETWORKS', True)
@@ -1028,6 +1044,7 @@ def trusted_network(d, url):
if not network:
return True
+ network = network.split(':')[0]
network = network.lower()
for host in trusted_hosts.split(" "):
@@ -1120,48 +1137,7 @@ def get_file_checksums(filelist, pn):
it proceeds
"""
-
- def checksum_file(f):
- try:
- checksum = _checksum_cache.get_checksum(f)
- except OSError as e:
- bb.warn("Unable to get checksum for %s SRC_URI entry %s: %s" % (pn, os.path.basename(f), e))
- return None
- return checksum
-
- def checksum_dir(pth):
- # Handle directories recursively
- dirchecksums = []
- for root, dirs, files in os.walk(pth):
- for name in files:
- fullpth = os.path.join(root, name)
- checksum = checksum_file(fullpth)
- if checksum:
- dirchecksums.append((fullpth, checksum))
- return dirchecksums
-
- checksums = []
- for pth in filelist.split():
- exist = pth.split(":")[1]
- if exist == "False":
- continue
- pth = pth.split(":")[0]
- if '*' in pth:
- # Handle globs
- for f in glob.glob(pth):
- if os.path.isdir(f):
- checksums.extend(checksum_dir(f))
- else:
- checksum = checksum_file(f)
- checksums.append((f, checksum))
- elif os.path.isdir(pth):
- checksums.extend(checksum_dir(pth))
- else:
- checksum = checksum_file(pth)
- checksums.append((pth, checksum))
-
- checksums.sort(key=operator.itemgetter(1))
- return checksums
+ return _checksum_cache.get_checksums(filelist, pn)
class FetchData(object):
@@ -1171,6 +1147,7 @@ class FetchData(object):
def __init__(self, url, d, localonly = False):
# localpath is the location of a downloaded result. If not set, the file is local.
self.donestamp = None
+ self.needdonestamp = True
self.localfile = ""
self.localpath = None
self.lockfile = None
@@ -1197,13 +1174,13 @@ class FetchData(object):
elif self.type not in ["http", "https", "ftp", "ftps", "sftp"]:
self.md5_expected = None
else:
- self.md5_expected = d.getVarFlag("SRC_URI", self.md5_name)
+ self.md5_expected = d.getVarFlag("SRC_URI", self.md5_name, True)
if self.sha256_name in self.parm:
self.sha256_expected = self.parm[self.sha256_name]
elif self.type not in ["http", "https", "ftp", "ftps", "sftp"]:
self.sha256_expected = None
else:
- self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name)
+ self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name, True)
self.ignore_checksums = False
self.names = self.parm.get("name",'default').split(',')
@@ -1235,13 +1212,20 @@ class FetchData(object):
self.localpath = self.method.localpath(self, d)
dldir = d.getVar("DL_DIR", True)
+
+ if not self.needdonestamp:
+ return
+
# Note: .done and .lock files should always be in DL_DIR whereas localpath may not be.
if self.localpath and self.localpath.startswith(dldir):
basepath = self.localpath
elif self.localpath:
basepath = dldir + os.sep + os.path.basename(self.localpath)
- else:
+ elif self.basepath or self.basename:
basepath = dldir + os.sep + (self.basepath or self.basename)
+ else:
+ bb.fatal("Can't determine lock path for url %s" % url)
+
self.donestamp = basepath + '.done'
self.lockfile = basepath + '.lock'
@@ -1355,6 +1339,11 @@ class FetchMethod(object):
iterate = False
file = urldata.localpath
+ # Localpath can't deal with 'dir/*' entries, so it converts them to '.',
+ # but it must be corrected back for local files copying
+ if urldata.basename == '*' and file.endswith('/.'):
+ file = '%s/%s' % (file.rstrip('/.'), urldata.path)
+
try:
unpack = bb.utils.to_boolean(urldata.parm.get('unpack'), True)
except ValueError as exc:
@@ -1407,51 +1396,40 @@ class FetchMethod(object):
cmd = 'rpm2cpio.sh %s | cpio -id' % (file)
elif file.endswith('.deb') or file.endswith('.ipk'):
cmd = 'ar -p %s data.tar.gz | zcat | tar --no-same-owner -xpf -' % file
+ elif file.endswith('.tar.7z'):
+ cmd = '7z x -so %s | tar xf - ' % file
+ elif file.endswith('.7z'):
+ cmd = '7za x -y %s 1>/dev/null' % file
+
+ # If 'subdir' param exists, create a dir and use it as destination for unpack cmd
+ if 'subdir' in urldata.parm:
+ unpackdir = '%s/%s' % (rootdir, urldata.parm.get('subdir'))
+ bb.utils.mkdirhier(unpackdir)
+ else:
+ unpackdir = rootdir
if not unpack or not cmd:
# If file == dest, then avoid any copies, as we already put the file into dest!
- dest = os.path.join(rootdir, os.path.basename(file))
- if (file != dest) and not (os.path.exists(dest) and os.path.samefile(file, dest)):
- if os.path.isdir(file):
- # If for example we're asked to copy file://foo/bar, we need to unpack the result into foo/bar
- basepath = getattr(urldata, "basepath", None)
- destdir = "."
- if basepath and basepath.endswith("/"):
- basepath = basepath.rstrip("/")
- elif basepath:
- basepath = os.path.dirname(basepath)
- if basepath and basepath.find("/") != -1:
- destdir = basepath[:basepath.rfind('/')]
- destdir = destdir.strip('/')
- if destdir != "." and not os.access("%s/%s" % (rootdir, destdir), os.F_OK):
- os.makedirs("%s/%s" % (rootdir, destdir))
- cmd = 'cp -fpPR %s %s/%s/' % (file, rootdir, destdir)
- #cmd = 'tar -cf - -C "%d" -ps . | tar -xf - -C "%s/%s/"' % (file, rootdir, destdir)
- else:
- # The "destdir" handling was specifically done for FILESPATH
- # items. So, only do so for file:// entries.
- if urldata.type == "file" and urldata.path.find("/") != -1:
- destdir = urldata.path.rsplit("/", 1)[0]
- if urldata.parm.get('subdir') != None:
- destdir = urldata.parm.get('subdir') + "/" + destdir
- else:
- if urldata.parm.get('subdir') != None:
- destdir = urldata.parm.get('subdir')
- else:
- destdir = "."
- bb.utils.mkdirhier("%s/%s" % (rootdir, destdir))
- cmd = 'cp -f %s %s/%s/' % (file, rootdir, destdir)
+ dest = os.path.join(unpackdir, os.path.basename(file))
+ if file != dest and not (os.path.exists(dest) and os.path.samefile(file, dest)):
+ destdir = '.'
+ # For file:// entries all intermediate dirs in path must be created at destination
+ if urldata.type == "file":
+ # Trailing '/' does a copying to wrong place
+ urlpath = urldata.path.rstrip('/')
+ # Want files places relative to cwd so no leading '/'
+ urlpath = urlpath.lstrip('/')
+ if urlpath.find("/") != -1:
+ destdir = urlpath.rsplit("/", 1)[0] + '/'
+ bb.utils.mkdirhier("%s/%s" % (unpackdir, destdir))
+ cmd = 'cp -fpPR %s %s' % (file, destdir)
if not cmd:
return
- # Change to subdir before executing command
+ # Change to unpackdir before executing command
save_cwd = os.getcwd();
- os.chdir(rootdir)
- if 'subdir' in urldata.parm:
- newdir = ("%s/%s" % (rootdir, urldata.parm.get('subdir')))
- bb.utils.mkdirhier(newdir)
- os.chdir(newdir)
+ os.chdir(unpackdir)
path = data.getVar('PATH', True)
if path:
@@ -1578,7 +1556,8 @@ class Fetch(object):
m = ud.method
localpath = ""
- lf = bb.utils.lockfile(ud.lockfile)
+ if ud.lockfile:
+ lf = bb.utils.lockfile(ud.lockfile)
try:
self.d.setVar("BB_NO_NETWORK", network)
@@ -1617,7 +1596,8 @@ class Fetch(object):
if isinstance(e, ChecksumError):
logger.warn("Checksum failure encountered with download of %s - will attempt other sources if available" % u)
logger.debug(1, str(e))
- rename_bad_checksum(ud, e.checksum)
+ if os.path.exists(ud.localpath):
+ rename_bad_checksum(ud, e.checksum)
elif isinstance(e, NoChecksumError):
raise
else:
@@ -1644,7 +1624,8 @@ class Fetch(object):
raise
finally:
- bb.utils.unlockfile(lf)
+ if ud.lockfile:
+ bb.utils.unlockfile(lf)
def checkstatus(self, urls=None):
"""
@@ -1686,9 +1667,6 @@ class Fetch(object):
ud = self.ud[u]
ud.setup_localpath(self.d)
- if self.d.expand(self.localpath) is None:
- continue
-
if ud.lockfile:
lf = bb.utils.lockfile(ud.lockfile)
@@ -1775,6 +1753,7 @@ from . import hg
from . import osc
from . import repo
from . import clearcase
+from . import npm
methods.append(local.Local())
methods.append(wget.Wget())
@@ -1791,3 +1770,4 @@ methods.append(hg.Hg())
methods.append(osc.Osc())
methods.append(repo.Repo())
methods.append(clearcase.ClearCase())
+methods.append(npm.Npm())