summaryrefslogtreecommitdiff
path: root/poky/bitbake/lib/bb/fetch2
diff options
context:
space:
mode:
Diffstat (limited to 'poky/bitbake/lib/bb/fetch2')
-rw-r--r--poky/bitbake/lib/bb/fetch2/__init__.py1864
-rw-r--r--poky/bitbake/lib/bb/fetch2/bzr.py139
-rw-r--r--poky/bitbake/lib/bb/fetch2/clearcase.py260
-rw-r--r--poky/bitbake/lib/bb/fetch2/cvs.py172
-rw-r--r--poky/bitbake/lib/bb/fetch2/git.py664
-rw-r--r--poky/bitbake/lib/bb/fetch2/gitannex.py91
-rw-r--r--poky/bitbake/lib/bb/fetch2/gitsm.py135
-rw-r--r--poky/bitbake/lib/bb/fetch2/hg.py270
-rw-r--r--poky/bitbake/lib/bb/fetch2/local.py119
-rw-r--r--poky/bitbake/lib/bb/fetch2/npm.py309
-rw-r--r--poky/bitbake/lib/bb/fetch2/osc.py132
-rw-r--r--poky/bitbake/lib/bb/fetch2/perforce.py209
-rw-r--r--poky/bitbake/lib/bb/fetch2/repo.py97
-rw-r--r--poky/bitbake/lib/bb/fetch2/s3.py98
-rw-r--r--poky/bitbake/lib/bb/fetch2/sftp.py125
-rw-r--r--poky/bitbake/lib/bb/fetch2/ssh.py125
-rw-r--r--poky/bitbake/lib/bb/fetch2/svn.py193
-rw-r--r--poky/bitbake/lib/bb/fetch2/wget.py626
18 files changed, 5628 insertions, 0 deletions
diff --git a/poky/bitbake/lib/bb/fetch2/__init__.py b/poky/bitbake/lib/bb/fetch2/__init__.py
new file mode 100644
index 000000000..72d6092de
--- /dev/null
+++ b/poky/bitbake/lib/bb/fetch2/__init__.py
@@ -0,0 +1,1864 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' implementations
+
+Classes for obtaining upstream sources for the
+BitBake build tools.
+"""
+
+# Copyright (C) 2003, 2004 Chris Larson
+# Copyright (C) 2012 Intel Corporation
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+# Based on functions from the base bb module, Copyright 2003 Holger Schurig
+
+import os, re
+import signal
+import logging
+import urllib.request, urllib.parse, urllib.error
+if 'git' not in urllib.parse.uses_netloc:
+ urllib.parse.uses_netloc.append('git')
+import operator
+import collections
+import subprocess
+import pickle
+import errno
+import bb.persist_data, bb.utils
+import bb.checksum
+import bb.process
+import bb.event
+
+__version__ = "2"
+_checksum_cache = bb.checksum.FileChecksumCache()
+
+logger = logging.getLogger("BitBake.Fetcher")
+
+class BBFetchException(Exception):
+ """Class all fetch exceptions inherit from"""
+ def __init__(self, message):
+ self.msg = message
+ Exception.__init__(self, message)
+
+ def __str__(self):
+ return self.msg
+
+class UntrustedUrl(BBFetchException):
+ """Exception raised when encountering a host not listed in BB_ALLOWED_NETWORKS"""
+ def __init__(self, url, message=''):
+ if message:
+ msg = message
+ else:
+ msg = "The URL: '%s' is not trusted and cannot be used" % url
+ self.url = url
+ BBFetchException.__init__(self, msg)
+ self.args = (url,)
+
+class MalformedUrl(BBFetchException):
+ """Exception raised when encountering an invalid url"""
+ def __init__(self, url, message=''):
+ if message:
+ msg = message
+ else:
+ msg = "The URL: '%s' is invalid and cannot be interpreted" % url
+ self.url = url
+ BBFetchException.__init__(self, msg)
+ self.args = (url,)
+
+class FetchError(BBFetchException):
+ """General fetcher exception when something happens incorrectly"""
+ def __init__(self, message, url = None):
+ if url:
+ msg = "Fetcher failure for URL: '%s'. %s" % (url, message)
+ else:
+ msg = "Fetcher failure: %s" % message
+ self.url = url
+ BBFetchException.__init__(self, msg)
+ self.args = (message, url)
+
+class ChecksumError(FetchError):
+ """Exception when mismatched checksum encountered"""
+ def __init__(self, message, url = None, checksum = None):
+ self.checksum = checksum
+ FetchError.__init__(self, message, url)
+
+class NoChecksumError(FetchError):
+ """Exception when no checksum is specified, but BB_STRICT_CHECKSUM is set"""
+
+class UnpackError(BBFetchException):
+ """General fetcher exception when something happens incorrectly when unpacking"""
+ def __init__(self, message, url):
+ msg = "Unpack failure for URL: '%s'. %s" % (url, message)
+ self.url = url
+ BBFetchException.__init__(self, msg)
+ self.args = (message, url)
+
+class NoMethodError(BBFetchException):
+ """Exception raised when there is no method to obtain a supplied url or set of urls"""
+ def __init__(self, url):
+ msg = "Could not find a fetcher which supports the URL: '%s'" % url
+ self.url = url
+ BBFetchException.__init__(self, msg)
+ self.args = (url,)
+
+class MissingParameterError(BBFetchException):
+ """Exception raised when a fetch method is missing a critical parameter in the url"""
+ def __init__(self, missing, url):
+ msg = "URL: '%s' is missing the required parameter '%s'" % (url, missing)
+ self.url = url
+ self.missing = missing
+ BBFetchException.__init__(self, msg)
+ self.args = (missing, url)
+
+class ParameterError(BBFetchException):
+ """Exception raised when a url cannot be proccessed due to invalid parameters."""
+ def __init__(self, message, url):
+ msg = "URL: '%s' has invalid parameters. %s" % (url, message)
+ self.url = url
+ BBFetchException.__init__(self, msg)
+ self.args = (message, url)
+
+class NetworkAccess(BBFetchException):
+ """Exception raised when network access is disabled but it is required."""
+ def __init__(self, url, cmd):
+ msg = "Network access disabled through BB_NO_NETWORK (or set indirectly due to use of BB_FETCH_PREMIRRORONLY) but access requested with command %s (for url %s)" % (cmd, url)
+ self.url = url
+ self.cmd = cmd
+ BBFetchException.__init__(self, msg)
+ self.args = (url, cmd)
+
+class NonLocalMethod(Exception):
+ def __init__(self):
+ Exception.__init__(self)
+
+class MissingChecksumEvent(bb.event.Event):
+ def __init__(self, url, md5sum, sha256sum):
+ self.url = url
+ self.checksums = {'md5sum': md5sum,
+ 'sha256sum': sha256sum}
+ bb.event.Event.__init__(self)
+
+
+class URI(object):
+ """
+ A class representing a generic URI, with methods for
+ accessing the URI components, and stringifies to the
+ URI.
+
+ It is constructed by calling it with a URI, or setting
+ the attributes manually:
+
+ uri = URI("http://example.com/")
+
+ uri = URI()
+ uri.scheme = 'http'
+ uri.hostname = 'example.com'
+ uri.path = '/'
+
+ It has the following attributes:
+
+ * scheme (read/write)
+ * userinfo (authentication information) (read/write)
+ * username (read/write)
+ * password (read/write)
+
+ Note, password is deprecated as of RFC 3986.
+
+ * hostname (read/write)
+ * port (read/write)
+ * hostport (read only)
+ "hostname:port", if both are set, otherwise just "hostname"
+ * path (read/write)
+ * path_quoted (read/write)
+ A URI quoted version of path
+ * params (dict) (read/write)
+ * query (dict) (read/write)
+ * relative (bool) (read only)
+ True if this is a "relative URI", (e.g. file:foo.diff)
+
+ It stringifies to the URI itself.
+
+ Some notes about relative URIs: while it's specified that
+ a URI beginning with <scheme>:// should either be directly
+ followed by a hostname or a /, the old URI handling of the
+ fetch2 library did not comform to this. Therefore, this URI
+ class has some kludges to make sure that URIs are parsed in
+ a way comforming to bitbake's current usage. This URI class
+ supports the following:
+
+ file:relative/path.diff (IETF compliant)
+ git:relative/path.git (IETF compliant)
+ git:///absolute/path.git (IETF compliant)
+ file:///absolute/path.diff (IETF compliant)
+
+ file://relative/path.diff (not IETF compliant)
+
+ But it does not support the following:
+
+ file://hostname/absolute/path.diff (would be IETF compliant)
+
+ Note that the last case only applies to a list of
+ "whitelisted" schemes (currently only file://), that requires
+ its URIs to not have a network location.
+ """
+
+ _relative_schemes = ['file', 'git']
+ _netloc_forbidden = ['file']
+
+ def __init__(self, uri=None):
+ self.scheme = ''
+ self.userinfo = ''
+ self.hostname = ''
+ self.port = None
+ self._path = ''
+ self.params = {}
+ self.query = {}
+ self.relative = False
+
+ if not uri:
+ return
+
+ # We hijack the URL parameters, since the way bitbake uses
+ # them are not quite RFC compliant.
+ uri, param_str = (uri.split(";", 1) + [None])[:2]
+
+ urlp = urllib.parse.urlparse(uri)
+ self.scheme = urlp.scheme
+
+ reparse = 0
+
+ # Coerce urlparse to make URI scheme use netloc
+ if not self.scheme in urllib.parse.uses_netloc:
+ urllib.parse.uses_params.append(self.scheme)
+ reparse = 1
+
+ # Make urlparse happy(/ier) by converting local resources
+ # to RFC compliant URL format. E.g.:
+ # file://foo.diff -> file:foo.diff
+ if urlp.scheme in self._netloc_forbidden:
+ uri = re.sub("(?<=:)//(?!/)", "", uri, 1)
+ reparse = 1
+
+ if reparse:
+ urlp = urllib.parse.urlparse(uri)
+
+ # Identify if the URI is relative or not
+ if urlp.scheme in self._relative_schemes and \
+ re.compile("^\w+:(?!//)").match(uri):
+ self.relative = True
+
+ if not self.relative:
+ self.hostname = urlp.hostname or ''
+ self.port = urlp.port
+
+ self.userinfo += urlp.username or ''
+
+ if urlp.password:
+ self.userinfo += ':%s' % urlp.password
+
+ self.path = urllib.parse.unquote(urlp.path)
+
+ if param_str:
+ self.params = self._param_str_split(param_str, ";")
+ if urlp.query:
+ self.query = self._param_str_split(urlp.query, "&")
+
+ def __str__(self):
+ userinfo = self.userinfo
+ if userinfo:
+ userinfo += '@'
+
+ return "%s:%s%s%s%s%s%s" % (
+ self.scheme,
+ '' if self.relative else '//',
+ userinfo,
+ self.hostport,
+ self.path_quoted,
+ self._query_str(),
+ self._param_str())
+
+ def _param_str(self):
+ return (
+ ''.join([';', self._param_str_join(self.params, ";")])
+ if self.params else '')
+
+ def _query_str(self):
+ return (
+ ''.join(['?', self._param_str_join(self.query, "&")])
+ if self.query else '')
+
+ def _param_str_split(self, string, elmdelim, kvdelim="="):
+ ret = collections.OrderedDict()
+ for k, v in [x.split(kvdelim, 1) for x in string.split(elmdelim)]:
+ ret[k] = v
+ return ret
+
+ def _param_str_join(self, dict_, elmdelim, kvdelim="="):
+ return elmdelim.join([kvdelim.join([k, v]) for k, v in dict_.items()])
+
+ @property
+ def hostport(self):
+ if not self.port:
+ return self.hostname
+ return "%s:%d" % (self.hostname, self.port)
+
+ @property
+ def path_quoted(self):
+ return urllib.parse.quote(self.path)
+
+ @path_quoted.setter
+ def path_quoted(self, path):
+ self.path = urllib.parse.unquote(path)
+
+ @property
+ def path(self):
+ return self._path
+
+ @path.setter
+ def path(self, path):
+ self._path = path
+
+ if not path or re.compile("^/").match(path):
+ self.relative = False
+ else:
+ self.relative = True
+
+ @property
+ def username(self):
+ if self.userinfo:
+ return (self.userinfo.split(":", 1))[0]
+ return ''
+
+ @username.setter
+ def username(self, username):
+ password = self.password
+ self.userinfo = username
+ if password:
+ self.userinfo += ":%s" % password
+
+ @property
+ def password(self):
+ if self.userinfo and ":" in self.userinfo:
+ return (self.userinfo.split(":", 1))[1]
+ return ''
+
+ @password.setter
+ def password(self, password):
+ self.userinfo = "%s:%s" % (self.username, password)
+
+def decodeurl(url):
+ """Decodes an URL into the tokens (scheme, network location, path,
+ user, password, parameters).
+ """
+
+ m = re.compile('(?P<type>[^:]*)://((?P<user>[^/;]+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url)
+ if not m:
+ raise MalformedUrl(url)
+
+ type = m.group('type')
+ location = m.group('location')
+ if not location:
+ raise MalformedUrl(url)
+ user = m.group('user')
+ parm = m.group('parm')
+
+ locidx = location.find('/')
+ if locidx != -1 and type.lower() != 'file':
+ host = location[:locidx]
+ path = location[locidx:]
+ elif type.lower() == 'file':
+ host = ""
+ path = location
+ else:
+ host = location
+ path = ""
+ if user:
+ m = re.compile('(?P<user>[^:]+)(:?(?P<pswd>.*))').match(user)
+ if m:
+ user = m.group('user')
+ pswd = m.group('pswd')
+ else:
+ user = ''
+ pswd = ''
+
+ p = collections.OrderedDict()
+ if parm:
+ for s in parm.split(';'):
+ if s:
+ if not '=' in s:
+ raise MalformedUrl(url, "The URL: '%s' is invalid: parameter %s does not specify a value (missing '=')" % (url, s))
+ s1, s2 = s.split('=')
+ p[s1] = s2
+
+ return type, host, urllib.parse.unquote(path), user, pswd, p
+
+def encodeurl(decoded):
+ """Encodes a URL from tokens (scheme, network location, path,
+ user, password, parameters).
+ """
+
+ type, host, path, user, pswd, p = decoded
+
+ if not type:
+ raise MissingParameterError('type', "encoded from the data %s" % str(decoded))
+ url = '%s://' % type
+ if user and type != "file":
+ url += "%s" % user
+ if pswd:
+ url += ":%s" % pswd
+ url += "@"
+ if host and type != "file":
+ url += "%s" % host
+ if path:
+ # Standardise path to ensure comparisons work
+ while '//' in path:
+ path = path.replace("//", "/")
+ url += "%s" % urllib.parse.quote(path)
+ if p:
+ for parm in p:
+ url += ";%s=%s" % (parm, p[parm])
+
+ return url
+
+def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
+ if not ud.url or not uri_find or not uri_replace:
+ logger.error("uri_replace: passed an undefined value, not replacing")
+ return None
+ uri_decoded = list(decodeurl(ud.url))
+ uri_find_decoded = list(decodeurl(uri_find))
+ uri_replace_decoded = list(decodeurl(uri_replace))
+ logger.debug(2, "For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded))
+ result_decoded = ['', '', '', '', '', {}]
+ for loc, i in enumerate(uri_find_decoded):
+ result_decoded[loc] = uri_decoded[loc]
+ regexp = i
+ if loc == 0 and regexp and not regexp.endswith("$"):
+ # Leaving the type unanchored can mean "https" matching "file" can become "files"
+ # which is clearly undesirable.
+ regexp += "$"
+ if loc == 5:
+ # Handle URL parameters
+ if i:
+ # Any specified URL parameters must match
+ for k in uri_replace_decoded[loc]:
+ if uri_decoded[loc][k] != uri_replace_decoded[loc][k]:
+ return None
+ # Overwrite any specified replacement parameters
+ for k in uri_replace_decoded[loc]:
+ for l in replacements:
+ uri_replace_decoded[loc][k] = uri_replace_decoded[loc][k].replace(l, replacements[l])
+ result_decoded[loc][k] = uri_replace_decoded[loc][k]
+ elif (re.match(regexp, uri_decoded[loc])):
+ if not uri_replace_decoded[loc]:
+ result_decoded[loc] = ""
+ else:
+ for k in replacements:
+ uri_replace_decoded[loc] = uri_replace_decoded[loc].replace(k, replacements[k])
+ #bb.note("%s %s %s" % (regexp, uri_replace_decoded[loc], uri_decoded[loc]))
+ result_decoded[loc] = re.sub(regexp, uri_replace_decoded[loc], uri_decoded[loc], 1)
+ if loc == 2:
+ # Handle path manipulations
+ basename = None
+ if uri_decoded[0] != uri_replace_decoded[0] and mirrortarball:
+ # If the source and destination url types differ, must be a mirrortarball mapping
+ basename = os.path.basename(mirrortarball)
+ # Kill parameters, they make no sense for mirror tarballs
+ uri_decoded[5] = {}
+ elif ud.localpath and ud.method.supports_checksum(ud):
+ basename = os.path.basename(ud.localpath)
+ if basename and not result_decoded[loc].endswith(basename):
+ result_decoded[loc] = os.path.join(result_decoded[loc], basename)
+ else:
+ return None
+ result = encodeurl(result_decoded)
+ if result == ud.url:
+ return None
+ logger.debug(2, "For url %s returning %s" % (ud.url, result))
+ return result
+
+methods = []
+urldata_cache = {}
+saved_headrevs = {}
+
+def fetcher_init(d):
+ """
+ Called to initialize the fetchers once the configuration data is known.
+ Calls before this must not hit the cache.
+ """
+ # When to drop SCM head revisions controlled by user policy
+ srcrev_policy = d.getVar('BB_SRCREV_POLICY') or "clear"
+ if srcrev_policy == "cache":
+ logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
+ elif srcrev_policy == "clear":
+ logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
+ revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
+ try:
+ bb.fetch2.saved_headrevs = revs.items()
+ except:
+ pass
+ revs.clear()
+ else:
+ raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
+
+ _checksum_cache.init_cache(d)
+
+ for m in methods:
+ if hasattr(m, "init"):
+ m.init(d)
+
+def fetcher_parse_save():
+ _checksum_cache.save_extras()
+
+def fetcher_parse_done():
+ _checksum_cache.save_merge()
+
+def fetcher_compare_revisions():
+ """
+ Compare the revisions in the persistant cache with current values and
+ return true/false on whether they've changed.
+ """
+
+ data = bb.persist_data.persist('BB_URI_HEADREVS', d).items()
+ data2 = bb.fetch2.saved_headrevs
+
+ changed = False
+ for key in data:
+ if key not in data2 or data2[key] != data[key]:
+ logger.debug(1, "%s changed", key)
+ changed = True
+ return True
+ else:
+ logger.debug(2, "%s did not change", key)
+ return False
+
+def mirror_from_string(data):
+ mirrors = (data or "").replace('\\n',' ').split()
+ # Split into pairs
+ if len(mirrors) % 2 != 0:
+ bb.warn('Invalid mirror data %s, should have paired members.' % data)
+ return list(zip(*[iter(mirrors)]*2))
+
+def verify_checksum(ud, d, precomputed={}):
+ """
+ verify the MD5 and SHA256 checksum for downloaded src
+
+ Raises a FetchError if one or both of the SRC_URI checksums do not match
+ the downloaded file, or if BB_STRICT_CHECKSUM is set and there are no
+ checksums specified.
+
+ Returns a dict of checksums that can be stored in a done stamp file and
+ passed in as precomputed parameter in a later call to avoid re-computing
+ the checksums from the file. This allows verifying the checksums of the
+ file against those in the recipe each time, rather than only after
+ downloading. See https://bugzilla.yoctoproject.org/show_bug.cgi?id=5571.
+ """
+
+ _MD5_KEY = "md5"
+ _SHA256_KEY = "sha256"
+
+ if ud.ignore_checksums or not ud.method.supports_checksum(ud):
+ return {}
+
+ if _MD5_KEY in precomputed:
+ md5data = precomputed[_MD5_KEY]
+ else:
+ md5data = bb.utils.md5_file(ud.localpath)
+
+ if _SHA256_KEY in precomputed:
+ sha256data = precomputed[_SHA256_KEY]
+ else:
+ sha256data = bb.utils.sha256_file(ud.localpath)
+
+ if ud.method.recommends_checksum(ud) and not ud.md5_expected and not ud.sha256_expected:
+ # If strict checking enabled and neither sum defined, raise error
+ strict = d.getVar("BB_STRICT_CHECKSUM") or "0"
+ if strict == "1":
+ logger.error('No checksum specified for %s, please add at least one to the recipe:\n'
+ 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"' %
+ (ud.localpath, ud.md5_name, md5data,
+ ud.sha256_name, sha256data))
+ raise NoChecksumError('Missing SRC_URI checksum', ud.url)
+
+ bb.event.fire(MissingChecksumEvent(ud.url, md5data, sha256data), d)
+
+ if strict == "ignore":
+ return {
+ _MD5_KEY: md5data,
+ _SHA256_KEY: sha256data
+ }
+
+ # Log missing sums so user can more easily add them
+ logger.warning('Missing md5 SRC_URI checksum for %s, consider adding to the recipe:\n'
+ 'SRC_URI[%s] = "%s"',
+ ud.localpath, ud.md5_name, md5data)
+ logger.warning('Missing sha256 SRC_URI checksum for %s, consider adding to the recipe:\n'
+ 'SRC_URI[%s] = "%s"',
+ ud.localpath, ud.sha256_name, sha256data)
+
+ # We want to alert the user if a checksum is defined in the recipe but
+ # it does not match.
+ msg = ""
+ mismatch = False
+ if ud.md5_expected and ud.md5_expected != md5data:
+ msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'md5', md5data, ud.md5_expected)
+ mismatch = True;
+
+ if ud.sha256_expected and ud.sha256_expected != sha256data:
+ msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'sha256', sha256data, ud.sha256_expected)
+ mismatch = True;
+
+ if mismatch:
+ msg = msg + '\nIf this change is expected (e.g. you have upgraded to a new version without updating the checksums) then you can use these lines within the recipe:\nSRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"\nOtherwise you should retry the download and/or check with upstream to determine if the file has become corrupted or otherwise unexpectedly modified.\n' % (ud.md5_name, md5data, ud.sha256_name, sha256data)
+
+ if len(msg):
+ raise ChecksumError('Checksum mismatch!%s' % msg, ud.url, md5data)
+
+ return {
+ _MD5_KEY: md5data,
+ _SHA256_KEY: sha256data
+ }
+
+
+def verify_donestamp(ud, d, origud=None):
+ """
+ Check whether the done stamp file has the right checksums (if the fetch
+ method supports them). If it doesn't, delete the done stamp and force
+ a re-download.
+
+ Returns True, if the donestamp exists and is valid, False otherwise. When
+ returning False, any existing done stamps are removed.
+ """
+ if not ud.needdonestamp or (origud and not origud.needdonestamp):
+ return True
+
+ if not os.path.exists(ud.localpath):
+ # local path does not exist
+ if os.path.exists(ud.donestamp):
+ # done stamp exists, but the downloaded file does not; the done stamp
+ # must be incorrect, re-trigger the download
+ bb.utils.remove(ud.donestamp)
+ return False
+
+ if (not ud.method.supports_checksum(ud) or
+ (origud and not origud.method.supports_checksum(origud))):
+ # if done stamp exists and checksums not supported; assume the local
+ # file is current
+ return os.path.exists(ud.donestamp)
+
+ precomputed_checksums = {}
+ # Only re-use the precomputed checksums if the donestamp is newer than the
+ # file. Do not rely on the mtime of directories, though. If ud.localpath is
+ # a directory, there will probably not be any checksums anyway.
+ if os.path.exists(ud.donestamp) and (os.path.isdir(ud.localpath) or
+ os.path.getmtime(ud.localpath) < os.path.getmtime(ud.donestamp)):
+ try:
+ with open(ud.donestamp, "rb") as cachefile:
+ pickled = pickle.Unpickler(cachefile)
+ precomputed_checksums.update(pickled.load())
+ except Exception as e:
+ # Avoid the warnings on the upgrade path from emtpy done stamp
+ # files to those containing the checksums.
+ if not isinstance(e, EOFError):
+ # Ignore errors, they aren't fatal
+ logger.warning("Couldn't load checksums from donestamp %s: %s "
+ "(msg: %s)" % (ud.donestamp, type(e).__name__,
+ str(e)))
+
+ try:
+ checksums = verify_checksum(ud, d, precomputed_checksums)
+ # If the cache file did not have the checksums, compute and store them
+ # as an upgrade path from the previous done stamp file format.
+ if checksums != precomputed_checksums:
+ with open(ud.donestamp, "wb") as cachefile:
+ p = pickle.Pickler(cachefile, 2)
+ p.dump(checksums)
+ return True
+ except ChecksumError as e:
+ # Checksums failed to verify, trigger re-download and remove the
+ # incorrect stamp file.
+ logger.warning("Checksum mismatch for local file %s\n"
+ "Cleaning and trying again." % ud.localpath)
+ if os.path.exists(ud.localpath):
+ rename_bad_checksum(ud, e.checksum)
+ bb.utils.remove(ud.donestamp)
+ return False
+
+
+def update_stamp(ud, d):
+ """
+ donestamp is file stamp indicating the whole fetching is done
+ this function update the stamp after verifying the checksum
+ """
+ if not ud.needdonestamp:
+ return
+
+ if os.path.exists(ud.donestamp):
+ # Touch the done stamp file to show active use of the download
+ try:
+ os.utime(ud.donestamp, None)
+ except:
+ # Errors aren't fatal here
+ pass
+ else:
+ try:
+ checksums = verify_checksum(ud, d)
+ # Store the checksums for later re-verification against the recipe
+ with open(ud.donestamp, "wb") as cachefile:
+ p = pickle.Pickler(cachefile, 2)
+ p.dump(checksums)
+ except ChecksumError as e:
+ # Checksums failed to verify, trigger re-download and remove the
+ # incorrect stamp file.
+ logger.warning("Checksum mismatch for local file %s\n"
+ "Cleaning and trying again." % ud.localpath)
+ if os.path.exists(ud.localpath):
+ rename_bad_checksum(ud, e.checksum)
+ bb.utils.remove(ud.donestamp)
+ raise
+
+def subprocess_setup():
+ # Python installs a SIGPIPE handler by default. This is usually not what
+ # non-Python subprocesses expect.
+ # SIGPIPE errors are known issues with gzip/bash
+ signal.signal(signal.SIGPIPE, signal.SIG_DFL)
+
+def get_autorev(d):
+ # only not cache src rev in autorev case
+ if d.getVar('BB_SRCREV_POLICY') != "cache":
+ d.setVar('BB_DONT_CACHE', '1')
+ return "AUTOINC"
+
+def get_srcrev(d, method_name='sortable_revision'):
+ """
+ Return the revision string, usually for use in the version string (PV) of the current package
+ Most packages usually only have one SCM so we just pass on the call.
+ In the multi SCM case, we build a value based on SRCREV_FORMAT which must
+ have been set.
+
+ The idea here is that we put the string "AUTOINC+" into return value if the revisions are not
+ incremental, other code is then responsible for turning that into an increasing value (if needed)
+
+ A method_name can be supplied to retrieve an alternatively formatted revision from a fetcher, if
+ that fetcher provides a method with the given name and the same signature as sortable_revision.
+ """
+
+ scms = []
+ fetcher = Fetch(d.getVar('SRC_URI').split(), d)
+ urldata = fetcher.ud
+ for u in urldata:
+ if urldata[u].method.supports_srcrev():
+ scms.append(u)
+
+ if len(scms) == 0:
+ raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI")
+
+ if len(scms) == 1 and len(urldata[scms[0]].names) == 1:
+ autoinc, rev = getattr(urldata[scms[0]].method, method_name)(urldata[scms[0]], d, urldata[scms[0]].names[0])
+ if len(rev) > 10:
+ rev = rev[:10]
+ if autoinc:
+ return "AUTOINC+" + rev
+ return rev
+
+ #
+ # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
+ #
+ format = d.getVar('SRCREV_FORMAT')
+ if not format:
+ raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
+
+ name_to_rev = {}
+ seenautoinc = False
+ for scm in scms:
+ ud = urldata[scm]
+ for name in ud.names:
+ autoinc, rev = getattr(ud.method, method_name)(ud, d, name)
+ seenautoinc = seenautoinc or autoinc
+ if len(rev) > 10:
+ rev = rev[:10]
+ name_to_rev[name] = rev
+ # Replace names by revisions in the SRCREV_FORMAT string. The approach used
+ # here can handle names being prefixes of other names and names appearing
+ # as substrings in revisions (in which case the name should not be
+ # expanded). The '|' regular expression operator tries matches from left to
+ # right, so we need to sort the names with the longest ones first.
+ names_descending_len = sorted(name_to_rev, key=len, reverse=True)
+ name_to_rev_re = "|".join(re.escape(name) for name in names_descending_len)
+ format = re.sub(name_to_rev_re, lambda match: name_to_rev[match.group(0)], format)
+
+ if seenautoinc:
+ format = "AUTOINC+" + format
+
+ return format
+
+def localpath(url, d):
+ fetcher = bb.fetch2.Fetch([url], d)
+ return fetcher.localpath(url)
+
+def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None):
+ """
+ Run cmd returning the command output
+ Raise an error if interrupted or cmd fails
+ Optionally echo command output to stdout
+ Optionally remove the files/directories listed in cleanup upon failure
+ """
+
+ # Need to export PATH as binary could be in metadata paths
+ # rather than host provided
+ # Also include some other variables.
+ # FIXME: Should really include all export varaiables?
+ exportvars = ['HOME', 'PATH',
+ 'HTTP_PROXY', 'http_proxy',
+ 'HTTPS_PROXY', 'https_proxy',
+ 'FTP_PROXY', 'ftp_proxy',
+ 'FTPS_PROXY', 'ftps_proxy',
+ 'NO_PROXY', 'no_proxy',
+ 'ALL_PROXY', 'all_proxy',
+ 'GIT_PROXY_COMMAND',
+ 'GIT_SSL_CAINFO',
+ 'GIT_SMART_HTTP',
+ 'SSH_AUTH_SOCK', 'SSH_AGENT_PID',
+ 'SOCKS5_USER', 'SOCKS5_PASSWD',
+ 'DBUS_SESSION_BUS_ADDRESS',
+ 'P4CONFIG']
+
+ if not cleanup:
+ cleanup = []
+
+ # If PATH contains WORKDIR which contains PV which contains SRCPV we
+ # can end up in circular recursion here so give the option of breaking it
+ # in a data store copy.
+ try:
+ d.getVar("PV")
+ except bb.data_smart.ExpansionError:
+ d = bb.data.createCopy(d)
+ d.setVar("PV", "fetcheravoidrecurse")
+
+ origenv = d.getVar("BB_ORIGENV", False)
+ for var in exportvars:
+ val = d.getVar(var) or (origenv and origenv.getVar(var))
+ if val:
+ cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
+
+ # Disable pseudo as it may affect ssh, potentially causing it to hang.
+ cmd = 'export PSEUDO_DISABLED=1; ' + cmd
+
+ logger.debug(1, "Running %s", cmd)
+
+ success = False
+ error_message = ""
+
+ try:
+ (output, errors) = bb.process.run(cmd, log=log, shell=True, stderr=subprocess.PIPE, cwd=workdir)
+ success = True
+ except bb.process.NotFoundError as e:
+ error_message = "Fetch command %s" % (e.command)
+ except bb.process.ExecutionError as e:
+ if e.stdout:
+ output = "output:\n%s\n%s" % (e.stdout, e.stderr)
+ elif e.stderr:
+ output = "output:\n%s" % e.stderr
+ else:
+ output = "no output"
+ error_message = "Fetch command %s failed with exit code %s, %s" % (e.command, e.exitcode, output)
+ except bb.process.CmdError as e:
+ error_message = "Fetch command %s could not be run:\n%s" % (e.command, e.msg)
+ if not success:
+ for f in cleanup:
+ try:
+ bb.utils.remove(f, True)
+ except OSError:
+ pass
+
+ raise FetchError(error_message)
+
+ return output
+
+def check_network_access(d, info, url):
+ """
+ log remote network access, and error if BB_NO_NETWORK is set or the given
+ URI is untrusted
+ """
+ if d.getVar("BB_NO_NETWORK") == "1":
+ raise NetworkAccess(url, info)
+ elif not trusted_network(d, url):
+ raise UntrustedUrl(url, info)
+ else:
+ logger.debug(1, "Fetcher accessed the network with the command %s" % info)
+
+def build_mirroruris(origud, mirrors, ld):
+ uris = []
+ uds = []
+
+ replacements = {}
+ replacements["TYPE"] = origud.type
+ replacements["HOST"] = origud.host
+ replacements["PATH"] = origud.path
+ replacements["BASENAME"] = origud.path.split("/")[-1]
+ replacements["MIRRORNAME"] = origud.host.replace(':','.') + origud.path.replace('/', '.').replace('*', '.')
+
+ def adduri(ud, uris, uds, mirrors, tarballs):
+ for line in mirrors:
+ try:
+ (find, replace) = line
+ except ValueError:
+ continue
+
+ for tarball in tarballs:
+ newuri = uri_replace(ud, find, replace, replacements, ld, tarball)
+ if not newuri or newuri in uris or newuri == origud.url:
+ continue
+
+ if not trusted_network(ld, newuri):
+ logger.debug(1, "Mirror %s not in the list of trusted networks, skipping" % (newuri))
+ continue
+
+ # Create a local copy of the mirrors minus the current line
+ # this will prevent us from recursively processing the same line
+ # as well as indirect recursion A -> B -> C -> A
+ localmirrors = list(mirrors)
+ localmirrors.remove(line)
+
+ try:
+ newud = FetchData(newuri, ld)
+ newud.setup_localpath(ld)
+ except bb.fetch2.BBFetchException as e:
+ logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
+ logger.debug(1, str(e))
+ try:
+ # setup_localpath of file:// urls may fail, we should still see
+ # if mirrors of the url exist
+ adduri(newud, uris, uds, localmirrors, tarballs)
+ except UnboundLocalError:
+ pass
+ continue
+ uris.append(newuri)
+ uds.append(newud)
+
+ adduri(newud, uris, uds, localmirrors, tarballs)
+
+ adduri(origud, uris, uds, mirrors, origud.mirrortarballs or [None])
+
+ return uris, uds
+
+def rename_bad_checksum(ud, suffix):
+ """
+ Renames files to have suffix from parameter
+ """
+
+ if ud.localpath is None:
+ return
+
+ new_localpath = "%s_bad-checksum_%s" % (ud.localpath, suffix)
+ bb.warn("Renaming %s to %s" % (ud.localpath, new_localpath))
+ bb.utils.movefile(ud.localpath, new_localpath)
+
+
+def try_mirror_url(fetch, origud, ud, ld, check = False):
+ # Return of None or a value means we're finished
+ # False means try another url
+
+ if ud.lockfile and ud.lockfile != origud.lockfile:
+ lf = bb.utils.lockfile(ud.lockfile)
+
+ try:
+ if check:
+ found = ud.method.checkstatus(fetch, ud, ld)
+ if found:
+ return found
+ return False
+
+ if not verify_donestamp(ud, ld, origud) or ud.method.need_update(ud, ld):
+ ud.method.download(ud, ld)
+ if hasattr(ud.method,"build_mirror_data"):
+ ud.method.build_mirror_data(ud, ld)
+
+ if not ud.localpath or not os.path.exists(ud.localpath):
+ return False
+
+ if ud.localpath == origud.localpath:
+ return ud.localpath
+
+ # We may be obtaining a mirror tarball which needs further processing by the real fetcher
+ # If that tarball is a local file:// we need to provide a symlink to it
+ dldir = ld.getVar("DL_DIR")
+
+ if origud.mirrortarballs and os.path.basename(ud.localpath) in origud.mirrortarballs and os.path.basename(ud.localpath) != os.path.basename(origud.localpath):
+ # Create donestamp in old format to avoid triggering a re-download
+ if ud.donestamp:
+ bb.utils.mkdirhier(os.path.dirname(ud.donestamp))
+ open(ud.donestamp, 'w').close()
+ dest = os.path.join(dldir, os.path.basename(ud.localpath))
+ if not os.path.exists(dest):
+ # In case this is executing without any file locks held (as is
+ # the case for file:// URLs), two tasks may end up here at the
+ # same time, in which case we do not want the second task to
+ # fail when the link has already been created by the first task.
+ try:
+ os.symlink(ud.localpath, dest)
+ except FileExistsError:
+ pass
+ if not verify_donestamp(origud, ld) or origud.method.need_update(origud, ld):
+ origud.method.download(origud, ld)
+ if hasattr(origud.method, "build_mirror_data"):
+ origud.method.build_mirror_data(origud, ld)
+ return origud.localpath
+ # Otherwise the result is a local file:// and we symlink to it
+ if not os.path.exists(origud.localpath):
+ if os.path.islink(origud.localpath):
+ # Broken symbolic link
+ os.unlink(origud.localpath)
+
+ # As per above, in case two tasks end up here simultaneously.
+ try:
+ os.symlink(ud.localpath, origud.localpath)
+ except FileExistsError:
+ pass
+ update_stamp(origud, ld)
+ return ud.localpath
+
+ except bb.fetch2.NetworkAccess:
+ raise
+
+ except IOError as e:
+ if e.errno in [os.errno.ESTALE]:
+ logger.warning("Stale Error Observed %s." % ud.url)
+ return False
+ raise
+
+ except bb.fetch2.BBFetchException as e:
+ if isinstance(e, ChecksumError):
+ logger.warning("Mirror checksum failure for url %s (original url: %s)\nCleaning and trying again." % (ud.url, origud.url))
+ logger.warning(str(e))
+ if os.path.exists(ud.localpath):
+ rename_bad_checksum(ud, e.checksum)
+ elif isinstance(e, NoChecksumError):
+ raise
+ else:
+ logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url))
+ logger.debug(1, str(e))
+ try:
+ ud.method.clean(ud, ld)
+ except UnboundLocalError:
+ pass
+ return False
+ finally:
+ if ud.lockfile and ud.lockfile != origud.lockfile:
+ bb.utils.unlockfile(lf)
+
+
+def try_mirrors(fetch, d, origud, mirrors, check = False):
+ """
+ Try to use a mirrored version of the sources.
+ This method will be automatically called before the fetchers go.
+
+ d Is a bb.data instance
+ uri is the original uri we're trying to download
+ mirrors is the list of mirrors we're going to try
+ """
+ ld = d.createCopy()
+
+ uris, uds = build_mirroruris(origud, mirrors, ld)
+
+ for index, uri in enumerate(uris):
+ ret = try_mirror_url(fetch, origud, uds[index], ld, check)
+ if ret != False:
+ return ret
+ return None
+
+def trusted_network(d, url):
+ """
+ Use a trusted url during download if networking is enabled and
+ BB_ALLOWED_NETWORKS is set globally or for a specific recipe.
+ Note: modifies SRC_URI & mirrors.
+ """
+ if d.getVar('BB_NO_NETWORK') == "1":
+ return True
+
+ pkgname = d.expand(d.getVar('PN', False))
+ trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname, False)
+
+ if not trusted_hosts:
+ trusted_hosts = d.getVar('BB_ALLOWED_NETWORKS')
+
+ # Not enabled.
+ if not trusted_hosts:
+ return True
+
+ scheme, network, path, user, passwd, param = decodeurl(url)
+
+ if not network:
+ return True
+
+ network = network.split(':')[0]
+ network = network.lower()
+
+ for host in trusted_hosts.split(" "):
+ host = host.lower()
+ if host.startswith("*.") and ("." + network).endswith(host[1:]):
+ return True
+ if host == network:
+ return True
+
+ return False
+
+def srcrev_internal_helper(ud, d, name):
+ """
+ Return:
+ a) a source revision if specified
+ b) latest revision if SRCREV="AUTOINC"
+ c) None if not specified
+ """
+
+ srcrev = None
+ pn = d.getVar("PN")
+ attempts = []
+ if name != '' and pn:
+ attempts.append("SRCREV_%s_pn-%s" % (name, pn))
+ if name != '':
+ attempts.append("SRCREV_%s" % name)
+ if pn:
+ attempts.append("SRCREV_pn-%s" % pn)
+ attempts.append("SRCREV")
+
+ for a in attempts:
+ srcrev = d.getVar(a)
+ if srcrev and srcrev != "INVALID":
+ break
+
+ if 'rev' in ud.parm and 'tag' in ud.parm:
+ raise FetchError("Please specify a ;rev= parameter or a ;tag= parameter in the url %s but not both." % (ud.url))
+
+ if 'rev' in ud.parm or 'tag' in ud.parm:
+ if 'rev' in ud.parm:
+ parmrev = ud.parm['rev']
+ else:
+ parmrev = ud.parm['tag']
+ if srcrev == "INVALID" or not srcrev:
+ return parmrev
+ if srcrev != parmrev:
+ raise FetchError("Conflicting revisions (%s from SRCREV and %s from the url) found, please specify one valid value" % (srcrev, parmrev))
+ return parmrev
+
+ if srcrev == "INVALID" or not srcrev:
+ raise FetchError("Please set a valid SRCREV for url %s (possible key names are %s, or use a ;rev=X URL parameter)" % (str(attempts), ud.url), ud.url)
+ if srcrev == "AUTOINC":
+ srcrev = ud.method.latest_revision(ud, d, name)
+
+ return srcrev
+
+def get_checksum_file_list(d):
+ """ Get a list of files checksum in SRC_URI
+
+ Returns the resolved local paths of all local file entries in
+ SRC_URI as a space-separated string
+ """
+ fetch = Fetch([], d, cache = False, localonly = True)
+
+ dl_dir = d.getVar('DL_DIR')
+ filelist = []
+ for u in fetch.urls:
+ ud = fetch.ud[u]
+
+ if ud and isinstance(ud.method, local.Local):
+ paths = ud.method.localpaths(ud, d)
+ for f in paths:
+ pth = ud.decodedurl
+ if '*' in pth:
+ f = os.path.join(os.path.abspath(f), pth)
+ if f.startswith(dl_dir):
+ # The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else
+ if os.path.exists(f):
+ bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN'), os.path.basename(f)))
+ else:
+ bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN'), os.path.basename(f)))
+ filelist.append(f + ":" + str(os.path.exists(f)))
+
+ return " ".join(filelist)
+
+def get_file_checksums(filelist, pn):
+ """Get a list of the checksums for a list of local files
+
+ Returns the checksums for a list of local files, caching the results as
+ it proceeds
+
+ """
+ return _checksum_cache.get_checksums(filelist, pn)
+
+
+class FetchData(object):
+ """
+ A class which represents the fetcher state for a given URI.
+ """
+ def __init__(self, url, d, localonly = False):
+ # localpath is the location of a downloaded result. If not set, the file is local.
+ self.donestamp = None
+ self.needdonestamp = True
+ self.localfile = ""
+ self.localpath = None
+ self.lockfile = None
+ self.mirrortarballs = []
+ self.basename = None
+ self.basepath = None
+ (self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(d.expand(url))
+ self.date = self.getSRCDate(d)
+ self.url = url
+ if not self.user and "user" in self.parm:
+ self.user = self.parm["user"]
+ if not self.pswd and "pswd" in self.parm:
+ self.pswd = self.parm["pswd"]
+ self.setup = False
+
+ if "name" in self.parm:
+ self.md5_name = "%s.md5sum" % self.parm["name"]
+ self.sha256_name = "%s.sha256sum" % self.parm["name"]
+ else:
+ self.md5_name = "md5sum"
+ self.sha256_name = "sha256sum"
+ if self.md5_name in self.parm:
+ self.md5_expected = self.parm[self.md5_name]
+ elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3"]:
+ self.md5_expected = None
+ else:
+ self.md5_expected = d.getVarFlag("SRC_URI", self.md5_name)
+ if self.sha256_name in self.parm:
+ self.sha256_expected = self.parm[self.sha256_name]
+ elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3"]:
+ self.sha256_expected = None
+ else:
+ self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name)
+ self.ignore_checksums = False
+
+ self.names = self.parm.get("name",'default').split(',')
+
+ self.method = None
+ for m in methods:
+ if m.supports(self, d):
+ self.method = m
+ break
+
+ if not self.method:
+ raise NoMethodError(url)
+
+ if localonly and not isinstance(self.method, local.Local):
+ raise NonLocalMethod()
+
+ if self.parm.get("proto", None) and "protocol" not in self.parm:
+ logger.warning('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN'))
+ self.parm["protocol"] = self.parm.get("proto", None)
+
+ if hasattr(self.method, "urldata_init"):
+ self.method.urldata_init(self, d)
+
+ if "localpath" in self.parm:
+ # if user sets localpath for file, use it instead.
+ self.localpath = self.parm["localpath"]
+ self.basename = os.path.basename(self.localpath)
+ elif self.localfile:
+ self.localpath = self.method.localpath(self, d)
+
+ dldir = d.getVar("DL_DIR")
+
+ if not self.needdonestamp:
+ return
+
+ # Note: .done and .lock files should always be in DL_DIR whereas localpath may not be.
+ if self.localpath and self.localpath.startswith(dldir):
+ basepath = self.localpath
+ elif self.localpath:
+ basepath = dldir + os.sep + os.path.basename(self.localpath)
+ elif self.basepath or self.basename:
+ basepath = dldir + os.sep + (self.basepath or self.basename)
+ else:
+ bb.fatal("Can't determine lock path for url %s" % url)
+
+ self.donestamp = basepath + '.done'
+ self.lockfile = basepath + '.lock'
+
+ def setup_revisions(self, d):
+ self.revisions = {}
+ for name in self.names:
+ self.revisions[name] = srcrev_internal_helper(self, d, name)
+
+ # add compatibility code for non name specified case
+ if len(self.names) == 1:
+ self.revision = self.revisions[self.names[0]]
+
+ def setup_localpath(self, d):
+ if not self.localpath:
+ self.localpath = self.method.localpath(self, d)
+
+ def getSRCDate(self, d):
+ """
+ Return the SRC Date for the component
+
+ d the bb.data module
+ """
+ if "srcdate" in self.parm:
+ return self.parm['srcdate']
+
+ pn = d.getVar("PN")
+
+ if pn:
+ return d.getVar("SRCDATE_%s" % pn) or d.getVar("SRCDATE") or d.getVar("DATE")
+
+ return d.getVar("SRCDATE") or d.getVar("DATE")
+
+class FetchMethod(object):
+ """Base class for 'fetch'ing data"""
+
+ def __init__(self, urls=None):
+ self.urls = []
+
+ def supports(self, urldata, d):
+ """
+ Check to see if this fetch class supports a given url.
+ """
+ return 0
+
+ def localpath(self, urldata, d):
+ """
+ Return the local filename of a given url assuming a successful fetch.
+ Can also setup variables in urldata for use in go (saving code duplication
+ and duplicate code execution)
+ """
+ return os.path.join(d.getVar("DL_DIR"), urldata.localfile)
+
+ def supports_checksum(self, urldata):
+ """
+ Is localpath something that can be represented by a checksum?
+ """
+
+ # We cannot compute checksums for directories
+ if os.path.isdir(urldata.localpath) == True:
+ return False
+ if urldata.localpath.find("*") != -1:
+ return False
+
+ return True
+
+ def recommends_checksum(self, urldata):
+ """
+ Is the backend on where checksumming is recommended (should warnings
+ be displayed if there is no checksum)?
+ """
+ return False
+
+ def _strip_leading_slashes(self, relpath):
+ """
+ Remove leading slash as os.path.join can't cope
+ """
+ while os.path.isabs(relpath):
+ relpath = relpath[1:]
+ return relpath
+
+ def setUrls(self, urls):
+ self.__urls = urls
+
+ def getUrls(self):
+ return self.__urls
+
+ urls = property(getUrls, setUrls, None, "Urls property")
+
+ def need_update(self, ud, d):
+ """
+ Force a fetch, even if localpath exists?
+ """
+ if os.path.exists(ud.localpath):
+ return False
+ return True
+
+ def supports_srcrev(self):
+ """
+ The fetcher supports auto source revisions (SRCREV)
+ """
+ return False
+
+ def download(self, urldata, d):
+ """
+ Fetch urls
+ Assumes localpath was called first
+ """
+ raise NoMethodError(url)
+
+ def unpack(self, urldata, rootdir, data):
+ iterate = False
+ file = urldata.localpath
+
+ # Localpath can't deal with 'dir/*' entries, so it converts them to '.',
+ # but it must be corrected back for local files copying
+ if urldata.basename == '*' and file.endswith('/.'):
+ file = '%s/%s' % (file.rstrip('/.'), urldata.path)
+
+ try:
+ unpack = bb.utils.to_boolean(urldata.parm.get('unpack'), True)
+ except ValueError as exc:
+ bb.fatal("Invalid value for 'unpack' parameter for %s: %s" %
+ (file, urldata.parm.get('unpack')))
+
+ base, ext = os.path.splitext(file)
+ if ext in ['.gz', '.bz2', '.Z', '.xz', '.lz']:
+ efile = os.path.join(rootdir, os.path.basename(base))
+ else:
+ efile = file
+ cmd = None
+
+ if unpack:
+ if file.endswith('.tar'):
+ cmd = 'tar x --no-same-owner -f %s' % file
+ elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
+ cmd = 'tar xz --no-same-owner -f %s' % file
+ elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'):
+ cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file
+ elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'):
+ cmd = 'gzip -dc %s > %s' % (file, efile)
+ elif file.endswith('.bz2'):
+ cmd = 'bzip2 -dc %s > %s' % (file, efile)
+ elif file.endswith('.txz') or file.endswith('.tar.xz'):
+ cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file
+ elif file.endswith('.xz'):
+ cmd = 'xz -dc %s > %s' % (file, efile)
+ elif file.endswith('.tar.lz'):
+ cmd = 'lzip -dc %s | tar x --no-same-owner -f -' % file
+ elif file.endswith('.lz'):
+ cmd = 'lzip -dc %s > %s' % (file, efile)
+ elif file.endswith('.tar.7z'):
+ cmd = '7z x -so %s | tar x --no-same-owner -f -' % file
+ elif file.endswith('.7z'):
+ cmd = '7za x -y %s 1>/dev/null' % file
+ elif file.endswith('.zip') or file.endswith('.jar'):
+ try:
+ dos = bb.utils.to_boolean(urldata.parm.get('dos'), False)
+ except ValueError as exc:
+ bb.fatal("Invalid value for 'dos' parameter for %s: %s" %
+ (file, urldata.parm.get('dos')))
+ cmd = 'unzip -q -o'
+ if dos:
+ cmd = '%s -a' % cmd
+ cmd = "%s '%s'" % (cmd, file)
+ elif file.endswith('.rpm') or file.endswith('.srpm'):
+ if 'extract' in urldata.parm:
+ unpack_file = urldata.parm.get('extract')
+ cmd = 'rpm2cpio.sh %s | cpio -id %s' % (file, unpack_file)
+ iterate = True
+ iterate_file = unpack_file
+ else:
+ cmd = 'rpm2cpio.sh %s | cpio -id' % (file)
+ elif file.endswith('.deb') or file.endswith('.ipk'):
+ output = subprocess.check_output('ar -t %s' % file, preexec_fn=subprocess_setup, shell=True)
+ datafile = None
+ if output:
+ for line in output.decode().splitlines():
+ if line.startswith('data.tar.'):
+ datafile = line
+ break
+ else:
+ raise UnpackError("Unable to unpack deb/ipk package - does not contain data.tar.* file", urldata.url)
+ else:
+ raise UnpackError("Unable to unpack deb/ipk package - could not list contents", urldata.url)
+ cmd = 'ar x %s %s && tar --no-same-owner -xpf %s && rm %s' % (file, datafile, datafile, datafile)
+
+ # If 'subdir' param exists, create a dir and use it as destination for unpack cmd
+ if 'subdir' in urldata.parm:
+ subdir = urldata.parm.get('subdir')
+ if os.path.isabs(subdir):
+ if not os.path.realpath(subdir).startswith(os.path.realpath(rootdir)):
+ raise UnpackError("subdir argument isn't a subdirectory of unpack root %s" % rootdir, urldata.url)
+ unpackdir = subdir
+ else:
+ unpackdir = os.path.join(rootdir, subdir)
+ bb.utils.mkdirhier(unpackdir)
+ else:
+ unpackdir = rootdir
+
+ if not unpack or not cmd:
+ # If file == dest, then avoid any copies, as we already put the file into dest!
+ dest = os.path.join(unpackdir, os.path.basename(file))
+ if file != dest and not (os.path.exists(dest) and os.path.samefile(file, dest)):
+ destdir = '.'
+ # For file:// entries all intermediate dirs in path must be created at destination
+ if urldata.type == "file":
+ # Trailing '/' does a copying to wrong place
+ urlpath = urldata.path.rstrip('/')
+ # Want files places relative to cwd so no leading '/'
+ urlpath = urlpath.lstrip('/')
+ if urlpath.find("/") != -1:
+ destdir = urlpath.rsplit("/", 1)[0] + '/'
+ bb.utils.mkdirhier("%s/%s" % (unpackdir, destdir))
+ cmd = 'cp -fpPRH %s %s' % (file, destdir)
+
+ if not cmd:
+ return
+
+ path = data.getVar('PATH')
+ if path:
+ cmd = "PATH=\"%s\" %s" % (path, cmd)
+ bb.note("Unpacking %s to %s/" % (file, unpackdir))
+ ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True, cwd=unpackdir)
+
+ if ret != 0:
+ raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), urldata.url)
+
+ if iterate is True:
+ iterate_urldata = urldata
+ iterate_urldata.localpath = "%s/%s" % (rootdir, iterate_file)
+ self.unpack(urldata, rootdir, data)
+
+ return
+
+ def clean(self, urldata, d):
+ """
+ Clean any existing full or partial download
+ """
+ bb.utils.remove(urldata.localpath)
+
+ def try_premirror(self, urldata, d):
+ """
+ Should premirrors be used?
+ """
+ return True
+
+ def checkstatus(self, fetch, urldata, d):
+ """
+ Check the status of a URL
+ Assumes localpath was called first
+ """
+ logger.info("URL %s could not be checked for status since no method exists.", url)
+ return True
+
+ def latest_revision(self, ud, d, name):
+ """
+ Look in the cache for the latest revision, if not present ask the SCM.
+ """
+ if not hasattr(self, "_latest_revision"):
+ raise ParameterError("The fetcher for this URL does not support _latest_revision", url)
+
+ revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
+ key = self.generate_revision_key(ud, d, name)
+ try:
+ return revs[key]
+ except KeyError:
+ revs[key] = rev = self._latest_revision(ud, d, name)
+ return rev
+
+ def sortable_revision(self, ud, d, name):
+ latest_rev = self._build_revision(ud, d, name)
+ return True, str(latest_rev)
+
+ def generate_revision_key(self, ud, d, name):
+ key = self._revision_key(ud, d, name)
+ return "%s-%s" % (key, d.getVar("PN") or "")
+
+ def latest_versionstring(self, ud, d):
+ """
+ Compute the latest release name like "x.y.x" in "x.y.x+gitHASH"
+ by searching through the tags output of ls-remote, comparing
+ versions and returning the highest match as a (version, revision) pair.
+ """
+ return ('', '')
+
+class Fetch(object):
+ def __init__(self, urls, d, cache = True, localonly = False, connection_cache = None):
+ if localonly and cache:
+ raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time")
+
+ if len(urls) == 0:
+ urls = d.getVar("SRC_URI").split()
+ self.urls = urls
+ self.d = d
+ self.ud = {}
+ self.connection_cache = connection_cache
+
+ fn = d.getVar('FILE')
+ mc = d.getVar('__BBMULTICONFIG') or ""
+ if cache and fn and mc + fn in urldata_cache:
+ self.ud = urldata_cache[mc + fn]
+
+ for url in urls:
+ if url not in self.ud:
+ try:
+ self.ud[url] = FetchData(url, d, localonly)
+ except NonLocalMethod:
+ if localonly:
+ self.ud[url] = None
+ pass
+
+ if fn and cache:
+ urldata_cache[mc + fn] = self.ud
+
+ def localpath(self, url):
+ if url not in self.urls:
+ self.ud[url] = FetchData(url, self.d)
+
+ self.ud[url].setup_localpath(self.d)
+ return self.d.expand(self.ud[url].localpath)
+
+ def localpaths(self):
+ """
+ Return a list of the local filenames, assuming successful fetch
+ """
+ local = []
+
+ for u in self.urls:
+ ud = self.ud[u]
+ ud.setup_localpath(self.d)
+ local.append(ud.localpath)
+
+ return local
+
+ def download(self, urls=None):
+ """
+ Fetch all urls
+ """
+ if not urls:
+ urls = self.urls
+
+ network = self.d.getVar("BB_NO_NETWORK")
+ premirroronly = (self.d.getVar("BB_FETCH_PREMIRRORONLY") == "1")
+
+ for u in urls:
+ ud = self.ud[u]
+ ud.setup_localpath(self.d)
+ m = ud.method
+ localpath = ""
+
+ if ud.lockfile:
+ lf = bb.utils.lockfile(ud.lockfile)
+
+ try:
+ self.d.setVar("BB_NO_NETWORK", network)
+
+ if verify_donestamp(ud, self.d) and not m.need_update(ud, self.d):
+ localpath = ud.localpath
+ elif m.try_premirror(ud, self.d):
+ logger.debug(1, "Trying PREMIRRORS")
+ mirrors = mirror_from_string(self.d.getVar('PREMIRRORS'))
+ localpath = try_mirrors(self, self.d, ud, mirrors, False)
+ if localpath:
+ try:
+ # early checksum verification so that if the checksum of the premirror
+ # contents mismatch the fetcher can still try upstream and mirrors
+ update_stamp(ud, self.d)
+ except ChecksumError as e:
+ logger.warning("Checksum failure encountered with premirror download of %s - will attempt other sources." % u)
+ logger.debug(1, str(e))
+ localpath = ""
+
+ if premirroronly:
+ self.d.setVar("BB_NO_NETWORK", "1")
+
+ firsterr = None
+ verified_stamp = verify_donestamp(ud, self.d)
+ if not localpath and (not verified_stamp or m.need_update(ud, self.d)):
+ try:
+ if not trusted_network(self.d, ud.url):
+ raise UntrustedUrl(ud.url)
+ logger.debug(1, "Trying Upstream")
+ m.download(ud, self.d)
+ if hasattr(m, "build_mirror_data"):
+ m.build_mirror_data(ud, self.d)
+ localpath = ud.localpath
+ # early checksum verify, so that if checksum mismatched,
+ # fetcher still have chance to fetch from mirror
+ update_stamp(ud, self.d)
+
+ except bb.fetch2.NetworkAccess:
+ raise
+
+ except BBFetchException as e:
+ if isinstance(e, ChecksumError):
+ logger.warning("Checksum failure encountered with download of %s - will attempt other sources if available" % u)
+ logger.debug(1, str(e))
+ if os.path.exists(ud.localpath):
+ rename_bad_checksum(ud, e.checksum)
+ elif isinstance(e, NoChecksumError):
+ raise
+ else:
+ logger.warning('Failed to fetch URL %s, attempting MIRRORS if available' % u)
+ logger.debug(1, str(e))
+ firsterr = e
+ # Remove any incomplete fetch
+ if not verified_stamp:
+ m.clean(ud, self.d)
+ logger.debug(1, "Trying MIRRORS")
+ mirrors = mirror_from_string(self.d.getVar('MIRRORS'))
+ localpath = try_mirrors(self, self.d, ud, mirrors)
+
+ if not localpath or ((not os.path.exists(localpath)) and localpath.find("*") == -1):
+ if firsterr:
+ logger.error(str(firsterr))
+ raise FetchError("Unable to fetch URL from any source.", u)
+
+ update_stamp(ud, self.d)
+
+ except IOError as e:
+ if e.errno in [os.errno.ESTALE]:
+ logger.error("Stale Error Observed %s." % u)
+ raise ChecksumError("Stale Error Detected")
+
+ except BBFetchException as e:
+ if isinstance(e, ChecksumError):
+ logger.error("Checksum failure fetching %s" % u)
+ raise
+
+ finally:
+ if ud.lockfile:
+ bb.utils.unlockfile(lf)
+
+ def checkstatus(self, urls=None):
+ """
+ Check all urls exist upstream
+ """
+
+ if not urls:
+ urls = self.urls
+
+ for u in urls:
+ ud = self.ud[u]
+ ud.setup_localpath(self.d)
+ m = ud.method
+ logger.debug(1, "Testing URL %s", u)
+ # First try checking uri, u, from PREMIRRORS
+ mirrors = mirror_from_string(self.d.getVar('PREMIRRORS'))
+ ret = try_mirrors(self, self.d, ud, mirrors, True)
+ if not ret:
+ # Next try checking from the original uri, u
+ ret = m.checkstatus(self, ud, self.d)
+ if not ret:
+ # Finally, try checking uri, u, from MIRRORS
+ mirrors = mirror_from_string(self.d.getVar('MIRRORS'))
+ ret = try_mirrors(self, self.d, ud, mirrors, True)
+
+ if not ret:
+ raise FetchError("URL %s doesn't work" % u, u)
+
+ def unpack(self, root, urls=None):
+ """
+ Unpack urls to root
+ """
+
+ if not urls:
+ urls = self.urls
+
+ for u in urls:
+ ud = self.ud[u]
+ ud.setup_localpath(self.d)
+
+ if ud.lockfile:
+ lf = bb.utils.lockfile(ud.lockfile)
+
+ ud.method.unpack(ud, root, self.d)
+
+ if ud.lockfile:
+ bb.utils.unlockfile(lf)
+
+ def clean(self, urls=None):
+ """
+ Clean files that the fetcher gets or places
+ """
+
+ if not urls:
+ urls = self.urls
+
+ for url in urls:
+ if url not in self.ud:
+ self.ud[url] = FetchData(url, d)
+ ud = self.ud[url]
+ ud.setup_localpath(self.d)
+
+ if not ud.localfile and ud.localpath is None:
+ continue
+
+ if ud.lockfile:
+ lf = bb.utils.lockfile(ud.lockfile)
+
+ ud.method.clean(ud, self.d)
+ if ud.donestamp:
+ bb.utils.remove(ud.donestamp)
+
+ if ud.lockfile:
+ bb.utils.unlockfile(lf)
+
+class FetchConnectionCache(object):
+ """
+ A class which represents an container for socket connections.
+ """
+ def __init__(self):
+ self.cache = {}
+
+ def get_connection_name(self, host, port):
+ return host + ':' + str(port)
+
+ def add_connection(self, host, port, connection):
+ cn = self.get_connection_name(host, port)
+
+ if cn not in self.cache:
+ self.cache[cn] = connection
+
+ def get_connection(self, host, port):
+ connection = None
+
+ cn = self.get_connection_name(host, port)
+ if cn in self.cache:
+ connection = self.cache[cn]
+
+ return connection
+
+ def remove_connection(self, host, port):
+ cn = self.get_connection_name(host, port)
+ if cn in self.cache:
+ self.cache[cn].close()
+ del self.cache[cn]
+
+ def close_connections(self):
+ for cn in list(self.cache.keys()):
+ self.cache[cn].close()
+ del self.cache[cn]
+
+from . import cvs
+from . import git
+from . import gitsm
+from . import gitannex
+from . import local
+from . import svn
+from . import wget
+from . import ssh
+from . import sftp
+from . import s3
+from . import perforce
+from . import bzr
+from . import hg
+from . import osc
+from . import repo
+from . import clearcase
+from . import npm
+
+methods.append(local.Local())
+methods.append(wget.Wget())
+methods.append(svn.Svn())
+methods.append(git.Git())
+methods.append(gitsm.GitSM())
+methods.append(gitannex.GitANNEX())
+methods.append(cvs.Cvs())
+methods.append(ssh.SSH())
+methods.append(sftp.SFTP())
+methods.append(s3.S3())
+methods.append(perforce.Perforce())
+methods.append(bzr.Bzr())
+methods.append(hg.Hg())
+methods.append(osc.Osc())
+methods.append(repo.Repo())
+methods.append(clearcase.ClearCase())
+methods.append(npm.Npm())
diff --git a/poky/bitbake/lib/bb/fetch2/bzr.py b/poky/bitbake/lib/bb/fetch2/bzr.py
new file mode 100644
index 000000000..16123f8af
--- /dev/null
+++ b/poky/bitbake/lib/bb/fetch2/bzr.py
@@ -0,0 +1,139 @@
+"""
+BitBake 'Fetch' implementation for bzr.
+
+"""
+
+# Copyright (C) 2007 Ross Burton
+# Copyright (C) 2007 Richard Purdie
+#
+# Classes for obtaining upstream sources for the
+# BitBake build tools.
+# Copyright (C) 2003, 2004 Chris Larson
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import os
+import sys
+import logging
+import bb
+from bb.fetch2 import FetchMethod
+from bb.fetch2 import FetchError
+from bb.fetch2 import runfetchcmd
+from bb.fetch2 import logger
+
+class Bzr(FetchMethod):
+ def supports(self, ud, d):
+ return ud.type in ['bzr']
+
+ def urldata_init(self, ud, d):
+ """
+ init bzr specific variable within url data
+ """
+ # Create paths to bzr checkouts
+ relpath = self._strip_leading_slashes(ud.path)
+ ud.pkgdir = os.path.join(d.expand('${BZRDIR}'), ud.host, relpath)
+
+ ud.setup_revisions(d)
+
+ if not ud.revision:
+ ud.revision = self.latest_revision(ud, d)
+
+ ud.localfile = d.expand('bzr_%s_%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.revision))
+
+ def _buildbzrcommand(self, ud, d, command):
+ """
+ Build up an bzr commandline based on ud
+ command is "fetch", "update", "revno"
+ """
+
+ basecmd = d.expand('${FETCHCMD_bzr}')
+
+ proto = ud.parm.get('protocol', 'http')
+
+ bzrroot = ud.host + ud.path
+
+ options = []
+
+ if command == "revno":
+ bzrcmd = "%s revno %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot)
+ else:
+ if ud.revision:
+ options.append("-r %s" % ud.revision)
+
+ if command == "fetch":
+ bzrcmd = "%s branch %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot)
+ elif command == "update":
+ bzrcmd = "%s pull %s --overwrite" % (basecmd, " ".join(options))
+ else:
+ raise FetchError("Invalid bzr command %s" % command, ud.url)
+
+ return bzrcmd
+
+ def download(self, ud, d):
+ """Fetch url"""
+
+ if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK):
+ bzrcmd = self._buildbzrcommand(ud, d, "update")
+ logger.debug(1, "BZR Update %s", ud.url)
+ bb.fetch2.check_network_access(d, bzrcmd, ud.url)
+ runfetchcmd(bzrcmd, d, workdir=os.path.join(ud.pkgdir, os.path.basename(ud.path)))
+ else:
+ bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True)
+ bzrcmd = self._buildbzrcommand(ud, d, "fetch")
+ bb.fetch2.check_network_access(d, bzrcmd, ud.url)
+ logger.debug(1, "BZR Checkout %s", ud.url)
+ bb.utils.mkdirhier(ud.pkgdir)
+ logger.debug(1, "Running %s", bzrcmd)
+ runfetchcmd(bzrcmd, d, workdir=ud.pkgdir)
+
+ scmdata = ud.parm.get("scmdata", "")
+ if scmdata == "keep":
+ tar_flags = ""
+ else:
+ tar_flags = "--exclude='.bzr' --exclude='.bzrtags'"
+
+ # tar them up to a defined filename
+ runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(ud.pkgdir)),
+ d, cleanup=[ud.localpath], workdir=ud.pkgdir)
+
+ def supports_srcrev(self):
+ return True
+
+ def _revision_key(self, ud, d, name):
+ """
+ Return a unique key for the url
+ """
+ return "bzr:" + ud.pkgdir
+
+ def _latest_revision(self, ud, d, name):
+ """
+ Return the latest upstream revision number
+ """
+ logger.debug(2, "BZR fetcher hitting network for %s", ud.url)
+
+ bb.fetch2.check_network_access(d, self._buildbzrcommand(ud, d, "revno"), ud.url)
+
+ output = runfetchcmd(self._buildbzrcommand(ud, d, "revno"), d, True)
+
+ return output.strip()
+
+ def sortable_revision(self, ud, d, name):
+ """
+ Return a sortable revision number which in our case is the revision number
+ """
+
+ return False, self._build_revision(ud, d)
+
+ def _build_revision(self, ud, d):
+ return ud.revision
diff --git a/poky/bitbake/lib/bb/fetch2/clearcase.py b/poky/bitbake/lib/bb/fetch2/clearcase.py
new file mode 100644
index 000000000..36beab6a5
--- /dev/null
+++ b/poky/bitbake/lib/bb/fetch2/clearcase.py
@@ -0,0 +1,260 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' clearcase implementation
+
+The clearcase fetcher is used to retrieve files from a ClearCase repository.
+
+Usage in the recipe:
+
+ SRC_URI = "ccrc://cc.example.org/ccrc;vob=/example_vob;module=/example_module"
+ SRCREV = "EXAMPLE_CLEARCASE_TAG"
+ PV = "${@d.getVar("SRCREV", False).replace("/", "+")}"
+
+The fetcher uses the rcleartool or cleartool remote client, depending on which one is available.
+
+Supported SRC_URI options are:
+
+- vob
+ (required) The name of the clearcase VOB (with prepending "/")
+
+- module
+ The module in the selected VOB (with prepending "/")
+
+ The module and vob parameters are combined to create
+ the following load rule in the view config spec:
+ load <vob><module>
+
+- proto
+ http or https
+
+Related variables:
+
+ CCASE_CUSTOM_CONFIG_SPEC
+ Write a config spec to this variable in your recipe to use it instead
+ of the default config spec generated by this fetcher.
+ Please note that the SRCREV loses its functionality if you specify
+ this variable. SRCREV is still used to label the archive after a fetch,
+ but it doesn't define what's fetched.
+
+User credentials:
+ cleartool:
+ The login of cleartool is handled by the system. No special steps needed.
+
+ rcleartool:
+ In order to use rcleartool with authenticated users an `rcleartool login` is
+ necessary before using the fetcher.
+"""
+# Copyright (C) 2014 Siemens AG
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+
+import os
+import sys
+import shutil
+import bb
+from bb.fetch2 import FetchMethod
+from bb.fetch2 import FetchError
+from bb.fetch2 import runfetchcmd
+from bb.fetch2 import logger
+from distutils import spawn
+
+class ClearCase(FetchMethod):
+ """Class to fetch urls via 'clearcase'"""
+ def init(self, d):
+ pass
+
+ def supports(self, ud, d):
+ """
+ Check to see if a given url can be fetched with Clearcase.
+ """
+ return ud.type in ['ccrc']
+
+ def debug(self, msg):
+ logger.debug(1, "ClearCase: %s", msg)
+
+ def urldata_init(self, ud, d):
+ """
+ init ClearCase specific variable within url data
+ """
+ ud.proto = "https"
+ if 'protocol' in ud.parm:
+ ud.proto = ud.parm['protocol']
+ if not ud.proto in ('http', 'https'):
+ raise fetch2.ParameterError("Invalid protocol type", ud.url)
+
+ ud.vob = ''
+ if 'vob' in ud.parm:
+ ud.vob = ud.parm['vob']
+ else:
+ msg = ud.url+": vob must be defined so the fetcher knows what to get."
+ raise MissingParameterError('vob', msg)
+
+ if 'module' in ud.parm:
+ ud.module = ud.parm['module']
+ else:
+ ud.module = ""
+
+ ud.basecmd = d.getVar("FETCHCMD_ccrc") or spawn.find_executable("cleartool") or spawn.find_executable("rcleartool")
+
+ if d.getVar("SRCREV") == "INVALID":
+ raise FetchError("Set a valid SRCREV for the clearcase fetcher in your recipe, e.g. SRCREV = \"/main/LATEST\" or any other label of your choice.")
+
+ ud.label = d.getVar("SRCREV", False)
+ ud.customspec = d.getVar("CCASE_CUSTOM_CONFIG_SPEC")
+
+ ud.server = "%s://%s%s" % (ud.proto, ud.host, ud.path)
+
+ ud.identifier = "clearcase-%s%s-%s" % ( ud.vob.replace("/", ""),
+ ud.module.replace("/", "."),
+ ud.label.replace("/", "."))
+
+ ud.viewname = "%s-view%s" % (ud.identifier, d.getVar("DATETIME", d, True))
+ ud.csname = "%s-config-spec" % (ud.identifier)
+ ud.ccasedir = os.path.join(d.getVar("DL_DIR"), ud.type)
+ ud.viewdir = os.path.join(ud.ccasedir, ud.viewname)
+ ud.configspecfile = os.path.join(ud.ccasedir, ud.csname)
+ ud.localfile = "%s.tar.gz" % (ud.identifier)
+
+ self.debug("host = %s" % ud.host)
+ self.debug("path = %s" % ud.path)
+ self.debug("server = %s" % ud.server)
+ self.debug("proto = %s" % ud.proto)
+ self.debug("type = %s" % ud.type)
+ self.debug("vob = %s" % ud.vob)
+ self.debug("module = %s" % ud.module)
+ self.debug("basecmd = %s" % ud.basecmd)
+ self.debug("label = %s" % ud.label)
+ self.debug("ccasedir = %s" % ud.ccasedir)
+ self.debug("viewdir = %s" % ud.viewdir)
+ self.debug("viewname = %s" % ud.viewname)
+ self.debug("configspecfile = %s" % ud.configspecfile)
+ self.debug("localfile = %s" % ud.localfile)
+
+ ud.localfile = os.path.join(d.getVar("DL_DIR"), ud.localfile)
+
+ def _build_ccase_command(self, ud, command):
+ """
+ Build up a commandline based on ud
+ command is: mkview, setcs, rmview
+ """
+ options = []
+
+ if "rcleartool" in ud.basecmd:
+ options.append("-server %s" % ud.server)
+
+ basecmd = "%s %s" % (ud.basecmd, command)
+
+ if command is 'mkview':
+ if not "rcleartool" in ud.basecmd:
+ # Cleartool needs a -snapshot view
+ options.append("-snapshot")
+ options.append("-tag %s" % ud.viewname)
+ options.append(ud.viewdir)
+
+ elif command is 'rmview':
+ options.append("-force")
+ options.append("%s" % ud.viewdir)
+
+ elif command is 'setcs':
+ options.append("-overwrite")
+ options.append(ud.configspecfile)
+
+ else:
+ raise FetchError("Invalid ccase command %s" % command)
+
+ ccasecmd = "%s %s" % (basecmd, " ".join(options))
+ self.debug("ccasecmd = %s" % ccasecmd)
+ return ccasecmd
+
+ def _write_configspec(self, ud, d):
+ """
+ Create config spec file (ud.configspecfile) for ccase view
+ """
+ config_spec = ""
+ custom_config_spec = d.getVar("CCASE_CUSTOM_CONFIG_SPEC", d)
+ if custom_config_spec is not None:
+ for line in custom_config_spec.split("\\n"):
+ config_spec += line+"\n"
+ bb.warn("A custom config spec has been set, SRCREV is only relevant for the tarball name.")
+ else:
+ config_spec += "element * CHECKEDOUT\n"
+ config_spec += "element * %s\n" % ud.label
+ config_spec += "load %s%s\n" % (ud.vob, ud.module)
+
+ logger.info("Using config spec: \n%s" % config_spec)
+
+ with open(ud.configspecfile, 'w') as f:
+ f.write(config_spec)
+
+ def _remove_view(self, ud, d):
+ if os.path.exists(ud.viewdir):
+ cmd = self._build_ccase_command(ud, 'rmview');
+ logger.info("cleaning up [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname)
+ bb.fetch2.check_network_access(d, cmd, ud.url)
+ output = runfetchcmd(cmd, d, workdir=ud.ccasedir)
+ logger.info("rmview output: %s", output)
+
+ def need_update(self, ud, d):
+ if ("LATEST" in ud.label) or (ud.customspec and "LATEST" in ud.customspec):
+ ud.identifier += "-%s" % d.getVar("DATETIME",d, True)
+ return True
+ if os.path.exists(ud.localpath):
+ return False
+ return True
+
+ def supports_srcrev(self):
+ return True
+
+ def sortable_revision(self, ud, d, name):
+ return False, ud.identifier
+
+ def download(self, ud, d):
+ """Fetch url"""
+
+ # Make a fresh view
+ bb.utils.mkdirhier(ud.ccasedir)
+ self._write_configspec(ud, d)
+ cmd = self._build_ccase_command(ud, 'mkview')
+ logger.info("creating view [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname)
+ bb.fetch2.check_network_access(d, cmd, ud.url)
+ try:
+ runfetchcmd(cmd, d)
+ except FetchError as e:
+ if "CRCLI2008E" in e.msg:
+ raise FetchError("%s\n%s\n" % (e.msg, "Call `rcleartool login` in your console to authenticate to the clearcase server before running bitbake."))
+ else:
+ raise e
+
+ # Set configspec: Setting the configspec effectively fetches the files as defined in the configspec
+ cmd = self._build_ccase_command(ud, 'setcs');
+ logger.info("fetching data [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname)
+ bb.fetch2.check_network_access(d, cmd, ud.url)
+ output = runfetchcmd(cmd, d, workdir=ud.viewdir)
+ logger.info("%s", output)
+
+ # Copy the configspec to the viewdir so we have it in our source tarball later
+ shutil.copyfile(ud.configspecfile, os.path.join(ud.viewdir, ud.csname))
+
+ # Clean clearcase meta-data before tar
+
+ runfetchcmd('tar -czf "%s" .' % (ud.localpath), d, cleanup = [ud.localpath])
+
+ # Clean up so we can create a new view next time
+ self.clean(ud, d);
+
+ def clean(self, ud, d):
+ self._remove_view(ud, d)
+ bb.utils.remove(ud.configspecfile)
diff --git a/poky/bitbake/lib/bb/fetch2/cvs.py b/poky/bitbake/lib/bb/fetch2/cvs.py
new file mode 100644
index 000000000..490c95471
--- /dev/null
+++ b/poky/bitbake/lib/bb/fetch2/cvs.py
@@ -0,0 +1,172 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' implementations
+
+Classes for obtaining upstream sources for the
+BitBake build tools.
+
+"""
+
+# Copyright (C) 2003, 2004 Chris Larson
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+#Based on functions from the base bb module, Copyright 2003 Holger Schurig
+#
+
+import os
+import logging
+import bb
+from bb.fetch2 import FetchMethod, FetchError, MissingParameterError, logger
+from bb.fetch2 import runfetchcmd
+
+class Cvs(FetchMethod):
+ """
+ Class to fetch a module or modules from cvs repositories
+ """
+ def supports(self, ud, d):
+ """
+ Check to see if a given url can be fetched with cvs.
+ """
+ return ud.type in ['cvs']
+
+ def urldata_init(self, ud, d):
+ if not "module" in ud.parm:
+ raise MissingParameterError("module", ud.url)
+ ud.module = ud.parm["module"]
+
+ ud.tag = ud.parm.get('tag', "")
+
+ # Override the default date in certain cases
+ if 'date' in ud.parm:
+ ud.date = ud.parm['date']
+ elif ud.tag:
+ ud.date = ""
+
+ norecurse = ''
+ if 'norecurse' in ud.parm:
+ norecurse = '_norecurse'
+
+ fullpath = ''
+ if 'fullpath' in ud.parm:
+ fullpath = '_fullpath'
+
+ ud.localfile = d.expand('%s_%s_%s_%s%s%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.tag, ud.date, norecurse, fullpath))
+
+ def need_update(self, ud, d):
+ if (ud.date == "now"):
+ return True
+ if not os.path.exists(ud.localpath):
+ return True
+ return False
+
+ def download(self, ud, d):
+
+ method = ud.parm.get('method', 'pserver')
+ localdir = ud.parm.get('localdir', ud.module)
+ cvs_port = ud.parm.get('port', '')
+
+ cvs_rsh = None
+ if method == "ext":
+ if "rsh" in ud.parm:
+ cvs_rsh = ud.parm["rsh"]
+
+ if method == "dir":
+ cvsroot = ud.path
+ else:
+ cvsroot = ":" + method
+ cvsproxyhost = d.getVar('CVS_PROXY_HOST')
+ if cvsproxyhost:
+ cvsroot += ";proxy=" + cvsproxyhost
+ cvsproxyport = d.getVar('CVS_PROXY_PORT')
+ if cvsproxyport:
+ cvsroot += ";proxyport=" + cvsproxyport
+ cvsroot += ":" + ud.user
+ if ud.pswd:
+ cvsroot += ":" + ud.pswd
+ cvsroot += "@" + ud.host + ":" + cvs_port + ud.path
+
+ options = []
+ if 'norecurse' in ud.parm:
+ options.append("-l")
+ if ud.date:
+ # treat YYYYMMDDHHMM specially for CVS
+ if len(ud.date) == 12:
+ options.append("-D \"%s %s:%s UTC\"" % (ud.date[0:8], ud.date[8:10], ud.date[10:12]))
+ else:
+ options.append("-D \"%s UTC\"" % ud.date)
+ if ud.tag:
+ options.append("-r %s" % ud.tag)
+
+ cvsbasecmd = d.getVar("FETCHCMD_cvs")
+ cvscmd = cvsbasecmd + " '-d" + cvsroot + "' co " + " ".join(options) + " " + ud.module
+ cvsupdatecmd = cvsbasecmd + " '-d" + cvsroot + "' update -d -P " + " ".join(options)
+
+ if cvs_rsh:
+ cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd)
+ cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd)
+
+ # create module directory
+ logger.debug(2, "Fetch: checking for module directory")
+ pkg = d.getVar('PN')
+ pkgdir = os.path.join(d.getVar('CVSDIR'), pkg)
+ moddir = os.path.join(pkgdir, localdir)
+ workdir = None
+ if os.access(os.path.join(moddir, 'CVS'), os.R_OK):
+ logger.info("Update " + ud.url)
+ bb.fetch2.check_network_access(d, cvsupdatecmd, ud.url)
+ # update sources there
+ workdir = moddir
+ cmd = cvsupdatecmd
+ else:
+ logger.info("Fetch " + ud.url)
+ # check out sources there
+ bb.utils.mkdirhier(pkgdir)
+ workdir = pkgdir
+ logger.debug(1, "Running %s", cvscmd)
+ bb.fetch2.check_network_access(d, cvscmd, ud.url)
+ cmd = cvscmd
+
+ runfetchcmd(cmd, d, cleanup=[moddir], workdir=workdir)
+
+ if not os.access(moddir, os.R_OK):
+ raise FetchError("Directory %s was not readable despite sucessful fetch?!" % moddir, ud.url)
+
+ scmdata = ud.parm.get("scmdata", "")
+ if scmdata == "keep":
+ tar_flags = ""
+ else:
+ tar_flags = "--exclude='CVS'"
+
+ # tar them up to a defined filename
+ workdir = None
+ if 'fullpath' in ud.parm:
+ workdir = pkgdir
+ cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, localdir)
+ else:
+ workdir = os.path.dirname(os.path.realpath(moddir))
+ cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(moddir))
+
+ runfetchcmd(cmd, d, cleanup=[ud.localpath], workdir=workdir)
+
+ def clean(self, ud, d):
+ """ Clean CVS Files and tarballs """
+
+ pkg = d.getVar('PN')
+ pkgdir = os.path.join(d.getVar("CVSDIR"), pkg)
+
+ bb.utils.remove(pkgdir, True)
+ bb.utils.remove(ud.localpath)
+
diff --git a/poky/bitbake/lib/bb/fetch2/git.py b/poky/bitbake/lib/bb/fetch2/git.py
new file mode 100644
index 000000000..3de83bed1
--- /dev/null
+++ b/poky/bitbake/lib/bb/fetch2/git.py
@@ -0,0 +1,664 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' git implementation
+
+git fetcher support the SRC_URI with format of:
+SRC_URI = "git://some.host/somepath;OptionA=xxx;OptionB=xxx;..."
+
+Supported SRC_URI options are:
+
+- branch
+ The git branch to retrieve from. The default is "master"
+
+ This option also supports multiple branch fetching, with branches
+ separated by commas. In multiple branches case, the name option
+ must have the same number of names to match the branches, which is
+ used to specify the SRC_REV for the branch
+ e.g:
+ SRC_URI="git://some.host/somepath;branch=branchX,branchY;name=nameX,nameY"
+ SRCREV_nameX = "xxxxxxxxxxxxxxxxxxxx"
+ SRCREV_nameY = "YYYYYYYYYYYYYYYYYYYY"
+
+- tag
+ The git tag to retrieve. The default is "master"
+
+- protocol
+ The method to use to access the repository. Common options are "git",
+ "http", "https", "file", "ssh" and "rsync". The default is "git".
+
+- rebaseable
+ rebaseable indicates that the upstream git repo may rebase in the future,
+ and current revision may disappear from upstream repo. This option will
+ remind fetcher to preserve local cache carefully for future use.
+ The default value is "0", set rebaseable=1 for rebaseable git repo.
+
+- nocheckout
+ Don't checkout source code when unpacking. set this option for the recipe
+ who has its own routine to checkout code.
+ The default is "0", set nocheckout=1 if needed.
+
+- bareclone
+ Create a bare clone of the source code and don't checkout the source code
+ when unpacking. Set this option for the recipe who has its own routine to
+ checkout code and tracking branch requirements.
+ The default is "0", set bareclone=1 if needed.
+
+- nobranch
+ Don't check the SHA validation for branch. set this option for the recipe
+ referring to commit which is valid in tag instead of branch.
+ The default is "0", set nobranch=1 if needed.
+
+- usehead
+ For local git:// urls to use the current branch HEAD as the revision for use with
+ AUTOREV. Implies nobranch.
+
+"""
+
+#Copyright (C) 2005 Richard Purdie
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import collections
+import errno
+import fnmatch
+import os
+import re
+import subprocess
+import tempfile
+import bb
+import bb.progress
+from bb.fetch2 import FetchMethod
+from bb.fetch2 import runfetchcmd
+from bb.fetch2 import logger
+
+
+class GitProgressHandler(bb.progress.LineFilterProgressHandler):
+ """Extract progress information from git output"""
+ def __init__(self, d):
+ self._buffer = ''
+ self._count = 0
+ super(GitProgressHandler, self).__init__(d)
+ # Send an initial progress event so the bar gets shown
+ self._fire_progress(-1)
+
+ def write(self, string):
+ self._buffer += string
+ stages = ['Counting objects', 'Compressing objects', 'Receiving objects', 'Resolving deltas']
+ stage_weights = [0.2, 0.05, 0.5, 0.25]
+ stagenum = 0
+ for i, stage in reversed(list(enumerate(stages))):
+ if stage in self._buffer:
+ stagenum = i
+ self._buffer = ''
+ break
+ self._status = stages[stagenum]
+ percs = re.findall(r'(\d+)%', string)
+ if percs:
+ progress = int(round((int(percs[-1]) * stage_weights[stagenum]) + (sum(stage_weights[:stagenum]) * 100)))
+ rates = re.findall(r'([\d.]+ [a-zA-Z]*/s+)', string)
+ if rates:
+ rate = rates[-1]
+ else:
+ rate = None
+ self.update(progress, rate)
+ else:
+ if stagenum == 0:
+ percs = re.findall(r': (\d+)', string)
+ if percs:
+ count = int(percs[-1])
+ if count > self._count:
+ self._count = count
+ self._fire_progress(-count)
+ super(GitProgressHandler, self).write(string)
+
+
+class Git(FetchMethod):
+ bitbake_dir = os.path.abspath(os.path.join(os.path.dirname(os.path.join(os.path.abspath(__file__))), '..', '..', '..'))
+ make_shallow_path = os.path.join(bitbake_dir, 'bin', 'git-make-shallow')
+
+ """Class to fetch a module or modules from git repositories"""
+ def init(self, d):
+ pass
+
+ def supports(self, ud, d):
+ """
+ Check to see if a given url can be fetched with git.
+ """
+ return ud.type in ['git']
+
+ def supports_checksum(self, urldata):
+ return False
+
+ def urldata_init(self, ud, d):
+ """
+ init git specific variable within url data
+ so that the git method like latest_revision() can work
+ """
+ if 'protocol' in ud.parm:
+ ud.proto = ud.parm['protocol']
+ elif not ud.host:
+ ud.proto = 'file'
+ else:
+ ud.proto = "git"
+
+ if not ud.proto in ('git', 'file', 'ssh', 'http', 'https', 'rsync'):
+ raise bb.fetch2.ParameterError("Invalid protocol type", ud.url)
+
+ ud.nocheckout = ud.parm.get("nocheckout","0") == "1"
+
+ ud.rebaseable = ud.parm.get("rebaseable","0") == "1"
+
+ ud.nobranch = ud.parm.get("nobranch","0") == "1"
+
+ # usehead implies nobranch
+ ud.usehead = ud.parm.get("usehead","0") == "1"
+ if ud.usehead:
+ if ud.proto != "file":
+ raise bb.fetch2.ParameterError("The usehead option is only for use with local ('protocol=file') git repositories", ud.url)
+ ud.nobranch = 1
+
+ # bareclone implies nocheckout
+ ud.bareclone = ud.parm.get("bareclone","0") == "1"
+ if ud.bareclone:
+ ud.nocheckout = 1
+
+ ud.unresolvedrev = {}
+ branches = ud.parm.get("branch", "master").split(',')
+ if len(branches) != len(ud.names):
+ raise bb.fetch2.ParameterError("The number of name and branch parameters is not balanced", ud.url)
+
+ ud.cloneflags = "-s -n"
+ if ud.bareclone:
+ ud.cloneflags += " --mirror"
+
+ ud.shallow = d.getVar("BB_GIT_SHALLOW") == "1"
+ ud.shallow_extra_refs = (d.getVar("BB_GIT_SHALLOW_EXTRA_REFS") or "").split()
+
+ depth_default = d.getVar("BB_GIT_SHALLOW_DEPTH")
+ if depth_default is not None:
+ try:
+ depth_default = int(depth_default or 0)
+ except ValueError:
+ raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH: %s" % depth_default)
+ else:
+ if depth_default < 0:
+ raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH: %s" % depth_default)
+ else:
+ depth_default = 1
+ ud.shallow_depths = collections.defaultdict(lambda: depth_default)
+
+ revs_default = d.getVar("BB_GIT_SHALLOW_REVS", True)
+ ud.shallow_revs = []
+ ud.branches = {}
+ for pos, name in enumerate(ud.names):
+ branch = branches[pos]
+ ud.branches[name] = branch
+ ud.unresolvedrev[name] = branch
+
+ shallow_depth = d.getVar("BB_GIT_SHALLOW_DEPTH_%s" % name)
+ if shallow_depth is not None:
+ try:
+ shallow_depth = int(shallow_depth or 0)
+ except ValueError:
+ raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH_%s: %s" % (name, shallow_depth))
+ else:
+ if shallow_depth < 0:
+ raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH_%s: %s" % (name, shallow_depth))
+ ud.shallow_depths[name] = shallow_depth
+
+ revs = d.getVar("BB_GIT_SHALLOW_REVS_%s" % name)
+ if revs is not None:
+ ud.shallow_revs.extend(revs.split())
+ elif revs_default is not None:
+ ud.shallow_revs.extend(revs_default.split())
+
+ if (ud.shallow and
+ not ud.shallow_revs and
+ all(ud.shallow_depths[n] == 0 for n in ud.names)):
+ # Shallow disabled for this URL
+ ud.shallow = False
+
+ if ud.usehead:
+ ud.unresolvedrev['default'] = 'HEAD'
+
+ ud.basecmd = d.getVar("FETCHCMD_git") or "git -c core.fsyncobjectfiles=0"
+
+ write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS") or "0"
+ ud.write_tarballs = write_tarballs != "0" or ud.rebaseable
+ ud.write_shallow_tarballs = (d.getVar("BB_GENERATE_SHALLOW_TARBALLS") or write_tarballs) != "0"
+
+ ud.setup_revisions(d)
+
+ for name in ud.names:
+ # Ensure anything that doesn't look like a sha256 checksum/revision is translated into one
+ if not ud.revisions[name] or len(ud.revisions[name]) != 40 or (False in [c in "abcdef0123456789" for c in ud.revisions[name]]):
+ if ud.revisions[name]:
+ ud.unresolvedrev[name] = ud.revisions[name]
+ ud.revisions[name] = self.latest_revision(ud, d, name)
+
+ gitsrcname = '%s%s' % (ud.host.replace(':', '.'), ud.path.replace('/', '.').replace('*', '.'))
+ if gitsrcname.startswith('.'):
+ gitsrcname = gitsrcname[1:]
+
+ # for rebaseable git repo, it is necessary to keep mirror tar ball
+ # per revision, so that even the revision disappears from the
+ # upstream repo in the future, the mirror will remain intact and still
+ # contains the revision
+ if ud.rebaseable:
+ for name in ud.names:
+ gitsrcname = gitsrcname + '_' + ud.revisions[name]
+
+ dl_dir = d.getVar("DL_DIR")
+ gitdir = d.getVar("GITDIR") or (dl_dir + "/git2/")
+ ud.clonedir = os.path.join(gitdir, gitsrcname)
+ ud.localfile = ud.clonedir
+
+ mirrortarball = 'git2_%s.tar.gz' % gitsrcname
+ ud.fullmirror = os.path.join(dl_dir, mirrortarball)
+ ud.mirrortarballs = [mirrortarball]
+ if ud.shallow:
+ tarballname = gitsrcname
+ if ud.bareclone:
+ tarballname = "%s_bare" % tarballname
+
+ if ud.shallow_revs:
+ tarballname = "%s_%s" % (tarballname, "_".join(sorted(ud.shallow_revs)))
+
+ for name, revision in sorted(ud.revisions.items()):
+ tarballname = "%s_%s" % (tarballname, ud.revisions[name][:7])
+ depth = ud.shallow_depths[name]
+ if depth:
+ tarballname = "%s-%s" % (tarballname, depth)
+
+ shallow_refs = []
+ if not ud.nobranch:
+ shallow_refs.extend(ud.branches.values())
+ if ud.shallow_extra_refs:
+ shallow_refs.extend(r.replace('refs/heads/', '').replace('*', 'ALL') for r in ud.shallow_extra_refs)
+ if shallow_refs:
+ tarballname = "%s_%s" % (tarballname, "_".join(sorted(shallow_refs)).replace('/', '.'))
+
+ fetcher = self.__class__.__name__.lower()
+ ud.shallowtarball = '%sshallow_%s.tar.gz' % (fetcher, tarballname)
+ ud.fullshallow = os.path.join(dl_dir, ud.shallowtarball)
+ ud.mirrortarballs.insert(0, ud.shallowtarball)
+
+ def localpath(self, ud, d):
+ return ud.clonedir
+
+ def need_update(self, ud, d):
+ if not os.path.exists(ud.clonedir):
+ return True
+ for name in ud.names:
+ if not self._contains_ref(ud, d, name, ud.clonedir):
+ return True
+ if ud.shallow and ud.write_shallow_tarballs and not os.path.exists(ud.fullshallow):
+ return True
+ if ud.write_tarballs and not os.path.exists(ud.fullmirror):
+ return True
+ return False
+
+ def try_premirror(self, ud, d):
+ # If we don't do this, updating an existing checkout with only premirrors
+ # is not possible
+ if d.getVar("BB_FETCH_PREMIRRORONLY") is not None:
+ return True
+ if os.path.exists(ud.clonedir):
+ return False
+ return True
+
+ def download(self, ud, d):
+ """Fetch url"""
+
+ no_clone = not os.path.exists(ud.clonedir)
+ need_update = no_clone or self.need_update(ud, d)
+
+ # A current clone is preferred to either tarball, a shallow tarball is
+ # preferred to an out of date clone, and a missing clone will use
+ # either tarball.
+ if ud.shallow and os.path.exists(ud.fullshallow) and need_update:
+ ud.localpath = ud.fullshallow
+ return
+ elif os.path.exists(ud.fullmirror) and no_clone:
+ bb.utils.mkdirhier(ud.clonedir)
+ runfetchcmd("tar -xzf %s" % ud.fullmirror, d, workdir=ud.clonedir)
+
+ repourl = self._get_repo_url(ud)
+
+ # If the repo still doesn't exist, fallback to cloning it
+ if not os.path.exists(ud.clonedir):
+ # We do this since git will use a "-l" option automatically for local urls where possible
+ if repourl.startswith("file://"):
+ repourl = repourl[7:]
+ clone_cmd = "LANG=C %s clone --bare --mirror %s %s --progress" % (ud.basecmd, repourl, ud.clonedir)
+ if ud.proto.lower() != 'file':
+ bb.fetch2.check_network_access(d, clone_cmd, ud.url)
+ progresshandler = GitProgressHandler(d)
+ runfetchcmd(clone_cmd, d, log=progresshandler)
+
+ # Update the checkout if needed
+ needupdate = False
+ for name in ud.names:
+ if not self._contains_ref(ud, d, name, ud.clonedir):
+ needupdate = True
+ if needupdate:
+ try:
+ runfetchcmd("%s remote rm origin" % ud.basecmd, d, workdir=ud.clonedir)
+ except bb.fetch2.FetchError:
+ logger.debug(1, "No Origin")
+
+ runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, repourl), d, workdir=ud.clonedir)
+ fetch_cmd = "LANG=C %s fetch -f --prune --progress %s refs/*:refs/*" % (ud.basecmd, repourl)
+ if ud.proto.lower() != 'file':
+ bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
+ progresshandler = GitProgressHandler(d)
+ runfetchcmd(fetch_cmd, d, log=progresshandler, workdir=ud.clonedir)
+ runfetchcmd("%s prune-packed" % ud.basecmd, d, workdir=ud.clonedir)
+ runfetchcmd("%s pack-refs --all" % ud.basecmd, d, workdir=ud.clonedir)
+ runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d, workdir=ud.clonedir)
+ try:
+ os.unlink(ud.fullmirror)
+ except OSError as exc:
+ if exc.errno != errno.ENOENT:
+ raise
+ for name in ud.names:
+ if not self._contains_ref(ud, d, name, ud.clonedir):
+ raise bb.fetch2.FetchError("Unable to find revision %s in branch %s even from upstream" % (ud.revisions[name], ud.branches[name]))
+
+ def build_mirror_data(self, ud, d):
+ if ud.shallow and ud.write_shallow_tarballs:
+ if not os.path.exists(ud.fullshallow):
+ if os.path.islink(ud.fullshallow):
+ os.unlink(ud.fullshallow)
+ tempdir = tempfile.mkdtemp(dir=d.getVar('DL_DIR'))
+ shallowclone = os.path.join(tempdir, 'git')
+ try:
+ self.clone_shallow_local(ud, shallowclone, d)
+
+ logger.info("Creating tarball of git repository")
+ runfetchcmd("tar -czf %s ." % ud.fullshallow, d, workdir=shallowclone)
+ runfetchcmd("touch %s.done" % ud.fullshallow, d)
+ finally:
+ bb.utils.remove(tempdir, recurse=True)
+ elif ud.write_tarballs and not os.path.exists(ud.fullmirror):
+ if os.path.islink(ud.fullmirror):
+ os.unlink(ud.fullmirror)
+
+ logger.info("Creating tarball of git repository")
+ runfetchcmd("tar -czf %s ." % ud.fullmirror, d, workdir=ud.clonedir)
+ runfetchcmd("touch %s.done" % ud.fullmirror, d)
+
+ def clone_shallow_local(self, ud, dest, d):
+ """Clone the repo and make it shallow.
+
+ The upstream url of the new clone isn't set at this time, as it'll be
+ set correctly when unpacked."""
+ runfetchcmd("%s clone %s %s %s" % (ud.basecmd, ud.cloneflags, ud.clonedir, dest), d)
+
+ to_parse, shallow_branches = [], []
+ for name in ud.names:
+ revision = ud.revisions[name]
+ depth = ud.shallow_depths[name]
+ if depth:
+ to_parse.append('%s~%d^{}' % (revision, depth - 1))
+
+ # For nobranch, we need a ref, otherwise the commits will be
+ # removed, and for non-nobranch, we truncate the branch to our
+ # srcrev, to avoid keeping unnecessary history beyond that.
+ branch = ud.branches[name]
+ if ud.nobranch:
+ ref = "refs/shallow/%s" % name
+ elif ud.bareclone:
+ ref = "refs/heads/%s" % branch
+ else:
+ ref = "refs/remotes/origin/%s" % branch
+
+ shallow_branches.append(ref)
+ runfetchcmd("%s update-ref %s %s" % (ud.basecmd, ref, revision), d, workdir=dest)
+
+ # Map srcrev+depths to revisions
+ parsed_depths = runfetchcmd("%s rev-parse %s" % (ud.basecmd, " ".join(to_parse)), d, workdir=dest)
+
+ # Resolve specified revisions
+ parsed_revs = runfetchcmd("%s rev-parse %s" % (ud.basecmd, " ".join('"%s^{}"' % r for r in ud.shallow_revs)), d, workdir=dest)
+ shallow_revisions = parsed_depths.splitlines() + parsed_revs.splitlines()
+
+ # Apply extra ref wildcards
+ all_refs = runfetchcmd('%s for-each-ref "--format=%%(refname)"' % ud.basecmd,
+ d, workdir=dest).splitlines()
+ for r in ud.shallow_extra_refs:
+ if not ud.bareclone:
+ r = r.replace('refs/heads/', 'refs/remotes/origin/')
+
+ if '*' in r:
+ matches = filter(lambda a: fnmatch.fnmatchcase(a, r), all_refs)
+ shallow_branches.extend(matches)
+ else:
+ shallow_branches.append(r)
+
+ # Make the repository shallow
+ shallow_cmd = [self.make_shallow_path, '-s']
+ for b in shallow_branches:
+ shallow_cmd.append('-r')
+ shallow_cmd.append(b)
+ shallow_cmd.extend(shallow_revisions)
+ runfetchcmd(subprocess.list2cmdline(shallow_cmd), d, workdir=dest)
+
+ def unpack(self, ud, destdir, d):
+ """ unpack the downloaded src to destdir"""
+
+ subdir = ud.parm.get("subpath", "")
+ if subdir != "":
+ readpathspec = ":%s" % subdir
+ def_destsuffix = "%s/" % os.path.basename(subdir.rstrip('/'))
+ else:
+ readpathspec = ""
+ def_destsuffix = "git/"
+
+ destsuffix = ud.parm.get("destsuffix", def_destsuffix)
+ destdir = ud.destdir = os.path.join(destdir, destsuffix)
+ if os.path.exists(destdir):
+ bb.utils.prunedir(destdir)
+
+ if ud.shallow and (not os.path.exists(ud.clonedir) or self.need_update(ud, d)):
+ bb.utils.mkdirhier(destdir)
+ runfetchcmd("tar -xzf %s" % ud.fullshallow, d, workdir=destdir)
+ else:
+ runfetchcmd("%s clone %s %s/ %s" % (ud.basecmd, ud.cloneflags, ud.clonedir, destdir), d)
+
+ repourl = self._get_repo_url(ud)
+ runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, repourl), d, workdir=destdir)
+ if not ud.nocheckout:
+ if subdir != "":
+ runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d,
+ workdir=destdir)
+ runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d, workdir=destdir)
+ elif not ud.nobranch:
+ branchname = ud.branches[ud.names[0]]
+ runfetchcmd("%s checkout -B %s %s" % (ud.basecmd, branchname, \
+ ud.revisions[ud.names[0]]), d, workdir=destdir)
+ runfetchcmd("%s branch %s --set-upstream-to origin/%s" % (ud.basecmd, branchname, \
+ branchname), d, workdir=destdir)
+ else:
+ runfetchcmd("%s checkout %s" % (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=destdir)
+
+ return True
+
+ def clean(self, ud, d):
+ """ clean the git directory """
+
+ bb.utils.remove(ud.localpath, True)
+ bb.utils.remove(ud.fullmirror)
+ bb.utils.remove(ud.fullmirror + ".done")
+
+ def supports_srcrev(self):
+ return True
+
+ def _contains_ref(self, ud, d, name, wd):
+ cmd = ""
+ if ud.nobranch:
+ cmd = "%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % (
+ ud.basecmd, ud.revisions[name])
+ else:
+ cmd = "%s branch --contains %s --list %s 2> /dev/null | wc -l" % (
+ ud.basecmd, ud.revisions[name], ud.branches[name])
+ try:
+ output = runfetchcmd(cmd, d, quiet=True, workdir=wd)
+ except bb.fetch2.FetchError:
+ return False
+ if len(output.split()) > 1:
+ raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output))
+ return output.split()[0] != "0"
+
+ def _get_repo_url(self, ud):
+ """
+ Return the repository URL
+ """
+ if ud.user:
+ username = ud.user + '@'
+ else:
+ username = ""
+ return "%s://%s%s%s" % (ud.proto, username, ud.host, ud.path)
+
+ def _revision_key(self, ud, d, name):
+ """
+ Return a unique key for the url
+ """
+ return "git:" + ud.host + ud.path.replace('/', '.') + ud.unresolvedrev[name]
+
+ def _lsremote(self, ud, d, search):
+ """
+ Run git ls-remote with the specified search string
+ """
+ # Prevent recursion e.g. in OE if SRCPV is in PV, PV is in WORKDIR,
+ # and WORKDIR is in PATH (as a result of RSS), our call to
+ # runfetchcmd() exports PATH so this function will get called again (!)
+ # In this scenario the return call of the function isn't actually
+ # important - WORKDIR isn't needed in PATH to call git ls-remote
+ # anyway.
+ if d.getVar('_BB_GIT_IN_LSREMOTE', False):
+ return ''
+ d.setVar('_BB_GIT_IN_LSREMOTE', '1')
+ try:
+ repourl = self._get_repo_url(ud)
+ cmd = "%s ls-remote %s %s" % \
+ (ud.basecmd, repourl, search)
+ if ud.proto.lower() != 'file':
+ bb.fetch2.check_network_access(d, cmd, repourl)
+ output = runfetchcmd(cmd, d, True)
+ if not output:
+ raise bb.fetch2.FetchError("The command %s gave empty output unexpectedly" % cmd, ud.url)
+ finally:
+ d.delVar('_BB_GIT_IN_LSREMOTE')
+ return output
+
+ def _latest_revision(self, ud, d, name):
+ """
+ Compute the HEAD revision for the url
+ """
+ output = self._lsremote(ud, d, "")
+ # Tags of the form ^{} may not work, need to fallback to other form
+ if ud.unresolvedrev[name][:5] == "refs/" or ud.usehead:
+ head = ud.unresolvedrev[name]
+ tag = ud.unresolvedrev[name]
+ else:
+ head = "refs/heads/%s" % ud.unresolvedrev[name]
+ tag = "refs/tags/%s" % ud.unresolvedrev[name]
+ for s in [head, tag + "^{}", tag]:
+ for l in output.strip().split('\n'):
+ sha1, ref = l.split()
+ if s == ref:
+ return sha1
+ raise bb.fetch2.FetchError("Unable to resolve '%s' in upstream git repository in git ls-remote output for %s" % \
+ (ud.unresolvedrev[name], ud.host+ud.path))
+
+ def latest_versionstring(self, ud, d):
+ """
+ Compute the latest release name like "x.y.x" in "x.y.x+gitHASH"
+ by searching through the tags output of ls-remote, comparing
+ versions and returning the highest match.
+ """
+ pupver = ('', '')
+
+ tagregex = re.compile(d.getVar('UPSTREAM_CHECK_GITTAGREGEX') or "(?P<pver>([0-9][\.|_]?)+)")
+ try:
+ output = self._lsremote(ud, d, "refs/tags/*")
+ except (bb.fetch2.FetchError, bb.fetch2.NetworkAccess) as e:
+ bb.note("Could not list remote: %s" % str(e))
+ return pupver
+
+ verstring = ""
+ revision = ""
+ for line in output.split("\n"):
+ if not line:
+ break
+
+ tag_head = line.split("/")[-1]
+ # Ignore non-released branches
+ m = re.search("(alpha|beta|rc|final)+", tag_head)
+ if m:
+ continue
+
+ # search for version in the line
+ tag = tagregex.search(tag_head)
+ if tag == None:
+ continue
+
+ tag = tag.group('pver')
+ tag = tag.replace("_", ".")
+
+ if verstring and bb.utils.vercmp(("0", tag, ""), ("0", verstring, "")) < 0:
+ continue
+
+ verstring = tag
+ revision = line.split()[0]
+ pupver = (verstring, revision)
+
+ return pupver
+
+ def _build_revision(self, ud, d, name):
+ return ud.revisions[name]
+
+ def gitpkgv_revision(self, ud, d, name):
+ """
+ Return a sortable revision number by counting commits in the history
+ Based on gitpkgv.bblass in meta-openembedded
+ """
+ rev = self._build_revision(ud, d, name)
+ localpath = ud.localpath
+ rev_file = os.path.join(localpath, "oe-gitpkgv_" + rev)
+ if not os.path.exists(localpath):
+ commits = None
+ else:
+ if not os.path.exists(rev_file) or not os.path.getsize(rev_file):
+ from pipes import quote
+ commits = bb.fetch2.runfetchcmd(
+ "git rev-list %s -- | wc -l" % quote(rev),
+ d, quiet=True).strip().lstrip('0')
+ if commits:
+ open(rev_file, "w").write("%d\n" % int(commits))
+ else:
+ commits = open(rev_file, "r").readline(128).strip()
+ if commits:
+ return False, "%s+%s" % (commits, rev[:7])
+ else:
+ return True, str(rev)
+
+ def checkstatus(self, fetch, ud, d):
+ try:
+ self._lsremote(ud, d, "")
+ return True
+ except bb.fetch2.FetchError:
+ return False
diff --git a/poky/bitbake/lib/bb/fetch2/gitannex.py b/poky/bitbake/lib/bb/fetch2/gitannex.py
new file mode 100644
index 000000000..a9b69caab
--- /dev/null
+++ b/poky/bitbake/lib/bb/fetch2/gitannex.py
@@ -0,0 +1,91 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' git annex implementation
+"""
+
+# Copyright (C) 2014 Otavio Salvador
+# Copyright (C) 2014 O.S. Systems Software LTDA.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import os
+import bb
+from bb.fetch2.git import Git
+from bb.fetch2 import runfetchcmd
+from bb.fetch2 import logger
+
+class GitANNEX(Git):
+ def supports(self, ud, d):
+ """
+ Check to see if a given url can be fetched with git.
+ """
+ return ud.type in ['gitannex']
+
+ def urldata_init(self, ud, d):
+ super(GitANNEX, self).urldata_init(ud, d)
+ if ud.shallow:
+ ud.shallow_extra_refs += ['refs/heads/git-annex', 'refs/heads/synced/*']
+
+ def uses_annex(self, ud, d, wd):
+ for name in ud.names:
+ try:
+ runfetchcmd("%s rev-list git-annex" % (ud.basecmd), d, quiet=True, workdir=wd)
+ return True
+ except bb.fetch.FetchError:
+ pass
+
+ return False
+
+ def update_annex(self, ud, d, wd):
+ try:
+ runfetchcmd("%s annex get --all" % (ud.basecmd), d, quiet=True, workdir=wd)
+ except bb.fetch.FetchError:
+ return False
+ runfetchcmd("chmod u+w -R %s/annex" % (ud.clonedir), d, quiet=True, workdir=wd)
+
+ return True
+
+ def download(self, ud, d):
+ Git.download(self, ud, d)
+
+ if not ud.shallow or ud.localpath != ud.fullshallow:
+ if self.uses_annex(ud, d, ud.clonedir):
+ self.update_annex(ud, d, ud.clonedir)
+
+ def clone_shallow_local(self, ud, dest, d):
+ super(GitANNEX, self).clone_shallow_local(ud, dest, d)
+
+ try:
+ runfetchcmd("%s annex init" % ud.basecmd, d, workdir=dest)
+ except bb.fetch.FetchError:
+ pass
+
+ if self.uses_annex(ud, d, dest):
+ runfetchcmd("%s annex get" % ud.basecmd, d, workdir=dest)
+ runfetchcmd("chmod u+w -R %s/.git/annex" % (dest), d, quiet=True, workdir=dest)
+
+ def unpack(self, ud, destdir, d):
+ Git.unpack(self, ud, destdir, d)
+
+ try:
+ runfetchcmd("%s annex init" % (ud.basecmd), d, workdir=ud.destdir)
+ except bb.fetch.FetchError:
+ pass
+
+ annex = self.uses_annex(ud, d, ud.destdir)
+ if annex:
+ runfetchcmd("%s annex get" % (ud.basecmd), d, workdir=ud.destdir)
+ runfetchcmd("chmod u+w -R %s/.git/annex" % (ud.destdir), d, quiet=True, workdir=ud.destdir)
+
diff --git a/poky/bitbake/lib/bb/fetch2/gitsm.py b/poky/bitbake/lib/bb/fetch2/gitsm.py
new file mode 100644
index 000000000..0aff1008e
--- /dev/null
+++ b/poky/bitbake/lib/bb/fetch2/gitsm.py
@@ -0,0 +1,135 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' git submodules implementation
+
+Inherits from and extends the Git fetcher to retrieve submodules of a git repository
+after cloning.
+
+SRC_URI = "gitsm://<see Git fetcher for syntax>"
+
+See the Git fetcher, git://, for usage documentation.
+
+NOTE: Switching a SRC_URI from "git://" to "gitsm://" requires a clean of your recipe.
+
+"""
+
+# Copyright (C) 2013 Richard Purdie
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import os
+import bb
+from bb.fetch2.git import Git
+from bb.fetch2 import runfetchcmd
+from bb.fetch2 import logger
+
+class GitSM(Git):
+ def supports(self, ud, d):
+ """
+ Check to see if a given url can be fetched with git.
+ """
+ return ud.type in ['gitsm']
+
+ def uses_submodules(self, ud, d, wd):
+ for name in ud.names:
+ try:
+ runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revisions[name]), d, quiet=True, workdir=wd)
+ return True
+ except bb.fetch.FetchError:
+ pass
+ return False
+
+ def _set_relative_paths(self, repopath):
+ """
+ Fix submodule paths to be relative instead of absolute,
+ so that when we move the repo it doesn't break
+ (In Git 1.7.10+ this is done automatically)
+ """
+ submodules = []
+ with open(os.path.join(repopath, '.gitmodules'), 'r') as f:
+ for line in f.readlines():
+ if line.startswith('[submodule'):
+ submodules.append(line.split('"')[1])
+
+ for module in submodules:
+ repo_conf = os.path.join(repopath, module, '.git')
+ if os.path.exists(repo_conf):
+ with open(repo_conf, 'r') as f:
+ lines = f.readlines()
+ newpath = ''
+ for i, line in enumerate(lines):
+ if line.startswith('gitdir:'):
+ oldpath = line.split(': ')[-1].rstrip()
+ if oldpath.startswith('/'):
+ newpath = '../' * (module.count('/') + 1) + '.git/modules/' + module
+ lines[i] = 'gitdir: %s\n' % newpath
+ break
+ if newpath:
+ with open(repo_conf, 'w') as f:
+ for line in lines:
+ f.write(line)
+
+ repo_conf2 = os.path.join(repopath, '.git', 'modules', module, 'config')
+ if os.path.exists(repo_conf2):
+ with open(repo_conf2, 'r') as f:
+ lines = f.readlines()
+ newpath = ''
+ for i, line in enumerate(lines):
+ if line.lstrip().startswith('worktree = '):
+ oldpath = line.split(' = ')[-1].rstrip()
+ if oldpath.startswith('/'):
+ newpath = '../' * (module.count('/') + 3) + module
+ lines[i] = '\tworktree = %s\n' % newpath
+ break
+ if newpath:
+ with open(repo_conf2, 'w') as f:
+ for line in lines:
+ f.write(line)
+
+ def update_submodules(self, ud, d):
+ # We have to convert bare -> full repo, do the submodule bit, then convert back
+ tmpclonedir = ud.clonedir + ".tmp"
+ gitdir = tmpclonedir + os.sep + ".git"
+ bb.utils.remove(tmpclonedir, True)
+ os.mkdir(tmpclonedir)
+ os.rename(ud.clonedir, gitdir)
+ runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*true/bare = false/'", d)
+ runfetchcmd(ud.basecmd + " reset --hard", d, workdir=tmpclonedir)
+ runfetchcmd(ud.basecmd + " checkout -f " + ud.revisions[ud.names[0]], d, workdir=tmpclonedir)
+ runfetchcmd(ud.basecmd + " submodule update --init --recursive", d, workdir=tmpclonedir)
+ self._set_relative_paths(tmpclonedir)
+ runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*false/bare = true/'", d, workdir=tmpclonedir)
+ os.rename(gitdir, ud.clonedir,)
+ bb.utils.remove(tmpclonedir, True)
+
+ def download(self, ud, d):
+ Git.download(self, ud, d)
+
+ if not ud.shallow or ud.localpath != ud.fullshallow:
+ submodules = self.uses_submodules(ud, d, ud.clonedir)
+ if submodules:
+ self.update_submodules(ud, d)
+
+ def clone_shallow_local(self, ud, dest, d):
+ super(GitSM, self).clone_shallow_local(ud, dest, d)
+
+ runfetchcmd('cp -fpPRH "%s/modules" "%s/"' % (ud.clonedir, os.path.join(dest, '.git')), d)
+
+ def unpack(self, ud, destdir, d):
+ Git.unpack(self, ud, destdir, d)
+
+ if self.uses_submodules(ud, d, ud.destdir):
+ runfetchcmd(ud.basecmd + " checkout " + ud.revisions[ud.names[0]], d, workdir=ud.destdir)
+ runfetchcmd(ud.basecmd + " submodule update --init --recursive", d, workdir=ud.destdir)
diff --git a/poky/bitbake/lib/bb/fetch2/hg.py b/poky/bitbake/lib/bb/fetch2/hg.py
new file mode 100644
index 000000000..d0857e63f
--- /dev/null
+++ b/poky/bitbake/lib/bb/fetch2/hg.py
@@ -0,0 +1,270 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' implementation for mercurial DRCS (hg).
+
+"""
+
+# Copyright (C) 2003, 2004 Chris Larson
+# Copyright (C) 2004 Marcin Juszkiewicz
+# Copyright (C) 2007 Robert Schuster
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+# Based on functions from the base bb module, Copyright 2003 Holger Schurig
+
+import os
+import sys
+import logging
+import bb
+import errno
+from bb.fetch2 import FetchMethod
+from bb.fetch2 import FetchError
+from bb.fetch2 import MissingParameterError
+from bb.fetch2 import runfetchcmd
+from bb.fetch2 import logger
+
+class Hg(FetchMethod):
+ """Class to fetch from mercurial repositories"""
+ def supports(self, ud, d):
+ """
+ Check to see if a given url can be fetched with mercurial.
+ """
+ return ud.type in ['hg']
+
+ def supports_checksum(self, urldata):
+ """
+ Don't require checksums for local archives created from
+ repository checkouts.
+ """
+ return False
+
+ def urldata_init(self, ud, d):
+ """
+ init hg specific variable within url data
+ """
+ if not "module" in ud.parm:
+ raise MissingParameterError('module', ud.url)
+
+ ud.module = ud.parm["module"]
+
+ if 'protocol' in ud.parm:
+ ud.proto = ud.parm['protocol']
+ elif not ud.host:
+ ud.proto = 'file'
+ else:
+ ud.proto = "hg"
+
+ ud.setup_revisions(d)
+
+ if 'rev' in ud.parm:
+ ud.revision = ud.parm['rev']
+ elif not ud.revision:
+ ud.revision = self.latest_revision(ud, d)
+
+ # Create paths to mercurial checkouts
+ hgsrcname = '%s_%s_%s' % (ud.module.replace('/', '.'), \
+ ud.host, ud.path.replace('/', '.'))
+ mirrortarball = 'hg_%s.tar.gz' % hgsrcname
+ ud.fullmirror = os.path.join(d.getVar("DL_DIR"), mirrortarball)
+ ud.mirrortarballs = [mirrortarball]
+
+ hgdir = d.getVar("HGDIR") or (d.getVar("DL_DIR") + "/hg/")
+ ud.pkgdir = os.path.join(hgdir, hgsrcname)
+ ud.moddir = os.path.join(ud.pkgdir, ud.module)
+ ud.localfile = ud.moddir
+ ud.basecmd = d.getVar("FETCHCMD_hg") or "/usr/bin/env hg"
+
+ ud.write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS")
+
+ def need_update(self, ud, d):
+ revTag = ud.parm.get('rev', 'tip')
+ if revTag == "tip":
+ return True
+ if not os.path.exists(ud.localpath):
+ return True
+ return False
+
+ def try_premirror(self, ud, d):
+ # If we don't do this, updating an existing checkout with only premirrors
+ # is not possible
+ if d.getVar("BB_FETCH_PREMIRRORONLY") is not None:
+ return True
+ if os.path.exists(ud.moddir):
+ return False
+ return True
+
+ def _buildhgcommand(self, ud, d, command):
+ """
+ Build up an hg commandline based on ud
+ command is "fetch", "update", "info"
+ """
+
+ proto = ud.parm.get('protocol', 'http')
+
+ host = ud.host
+ if proto == "file":
+ host = "/"
+ ud.host = "localhost"
+
+ if not ud.user:
+ hgroot = host + ud.path
+ else:
+ if ud.pswd:
+ hgroot = ud.user + ":" + ud.pswd + "@" + host + ud.path
+ else:
+ hgroot = ud.user + "@" + host + ud.path
+
+ if command == "info":
+ return "%s identify -i %s://%s/%s" % (ud.basecmd, proto, hgroot, ud.module)
+
+ options = [];
+
+ # Don't specify revision for the fetch; clone the entire repo.
+ # This avoids an issue if the specified revision is a tag, because
+ # the tag actually exists in the specified revision + 1, so it won't
+ # be available when used in any successive commands.
+ if ud.revision and command != "fetch":
+ options.append("-r %s" % ud.revision)
+
+ if command == "fetch":
+ if ud.user and ud.pswd:
+ cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" clone %s %s://%s/%s %s" % (ud.basecmd, ud.user, ud.pswd, proto, " ".join(options), proto, hgroot, ud.module, ud.module)
+ else:
+ cmd = "%s clone %s %s://%s/%s %s" % (ud.basecmd, " ".join(options), proto, hgroot, ud.module, ud.module)
+ elif command == "pull":
+ # do not pass options list; limiting pull to rev causes the local
+ # repo not to contain it and immediately following "update" command
+ # will crash
+ if ud.user and ud.pswd:
+ cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" pull" % (ud.basecmd, ud.user, ud.pswd, proto)
+ else:
+ cmd = "%s pull" % (ud.basecmd)
+ elif command == "update":
+ if ud.user and ud.pswd:
+ cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" update -C %s" % (ud.basecmd, ud.user, ud.pswd, proto, " ".join(options))
+ else:
+ cmd = "%s update -C %s" % (ud.basecmd, " ".join(options))
+ else:
+ raise FetchError("Invalid hg command %s" % command, ud.url)
+
+ return cmd
+
+ def download(self, ud, d):
+ """Fetch url"""
+
+ logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
+
+ # If the checkout doesn't exist and the mirror tarball does, extract it
+ if not os.path.exists(ud.pkgdir) and os.path.exists(ud.fullmirror):
+ bb.utils.mkdirhier(ud.pkgdir)
+ runfetchcmd("tar -xzf %s" % (ud.fullmirror), d, workdir=ud.pkgdir)
+
+ if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK):
+ # Found the source, check whether need pull
+ updatecmd = self._buildhgcommand(ud, d, "update")
+ logger.debug(1, "Running %s", updatecmd)
+ try:
+ runfetchcmd(updatecmd, d, workdir=ud.moddir)
+ except bb.fetch2.FetchError:
+ # Runnning pull in the repo
+ pullcmd = self._buildhgcommand(ud, d, "pull")
+ logger.info("Pulling " + ud.url)
+ # update sources there
+ logger.debug(1, "Running %s", pullcmd)
+ bb.fetch2.check_network_access(d, pullcmd, ud.url)
+ runfetchcmd(pullcmd, d, workdir=ud.moddir)
+ try:
+ os.unlink(ud.fullmirror)
+ except OSError as exc:
+ if exc.errno != errno.ENOENT:
+ raise
+
+ # No source found, clone it.
+ if not os.path.exists(ud.moddir):
+ fetchcmd = self._buildhgcommand(ud, d, "fetch")
+ logger.info("Fetch " + ud.url)
+ # check out sources there
+ bb.utils.mkdirhier(ud.pkgdir)
+ logger.debug(1, "Running %s", fetchcmd)
+ bb.fetch2.check_network_access(d, fetchcmd, ud.url)
+ runfetchcmd(fetchcmd, d, workdir=ud.pkgdir)
+
+ # Even when we clone (fetch), we still need to update as hg's clone
+ # won't checkout the specified revision if its on a branch
+ updatecmd = self._buildhgcommand(ud, d, "update")
+ logger.debug(1, "Running %s", updatecmd)
+ runfetchcmd(updatecmd, d, workdir=ud.moddir)
+
+ def clean(self, ud, d):
+ """ Clean the hg dir """
+
+ bb.utils.remove(ud.localpath, True)
+ bb.utils.remove(ud.fullmirror)
+ bb.utils.remove(ud.fullmirror + ".done")
+
+ def supports_srcrev(self):
+ return True
+
+ def _latest_revision(self, ud, d, name):
+ """
+ Compute tip revision for the url
+ """
+ bb.fetch2.check_network_access(d, self._buildhgcommand(ud, d, "info"), ud.url)
+ output = runfetchcmd(self._buildhgcommand(ud, d, "info"), d)
+ return output.strip()
+
+ def _build_revision(self, ud, d, name):
+ return ud.revision
+
+ def _revision_key(self, ud, d, name):
+ """
+ Return a unique key for the url
+ """
+ return "hg:" + ud.moddir
+
+ def build_mirror_data(self, ud, d):
+ # Generate a mirror tarball if needed
+ if ud.write_tarballs == "1" and not os.path.exists(ud.fullmirror):
+ # it's possible that this symlink points to read-only filesystem with PREMIRROR
+ if os.path.islink(ud.fullmirror):
+ os.unlink(ud.fullmirror)
+
+ logger.info("Creating tarball of hg repository")
+ runfetchcmd("tar -czf %s %s" % (ud.fullmirror, ud.module), d, workdir=ud.pkgdir)
+ runfetchcmd("touch %s.done" % (ud.fullmirror), d, workdir=ud.pkgdir)
+
+ def localpath(self, ud, d):
+ return ud.pkgdir
+
+ def unpack(self, ud, destdir, d):
+ """
+ Make a local clone or export for the url
+ """
+
+ revflag = "-r %s" % ud.revision
+ subdir = ud.parm.get("destsuffix", ud.module)
+ codir = "%s/%s" % (destdir, subdir)
+
+ scmdata = ud.parm.get("scmdata", "")
+ if scmdata != "nokeep":
+ if not os.access(os.path.join(codir, '.hg'), os.R_OK):
+ logger.debug(2, "Unpack: creating new hg repository in '" + codir + "'")
+ runfetchcmd("%s init %s" % (ud.basecmd, codir), d)
+ logger.debug(2, "Unpack: updating source in '" + codir + "'")
+ runfetchcmd("%s pull %s" % (ud.basecmd, ud.moddir), d, workdir=codir)
+ runfetchcmd("%s up -C %s" % (ud.basecmd, revflag), d, workdir=codir)
+ else:
+ logger.debug(2, "Unpack: extracting source to '" + codir + "'")
+ runfetchcmd("%s archive -t files %s %s" % (ud.basecmd, revflag, codir), d, workdir=ud.moddir)
diff --git a/poky/bitbake/lib/bb/fetch2/local.py b/poky/bitbake/lib/bb/fetch2/local.py
new file mode 100644
index 000000000..a114ac12e
--- /dev/null
+++ b/poky/bitbake/lib/bb/fetch2/local.py
@@ -0,0 +1,119 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' implementations
+
+Classes for obtaining upstream sources for the
+BitBake build tools.
+
+"""
+
+# Copyright (C) 2003, 2004 Chris Larson
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+# Based on functions from the base bb module, Copyright 2003 Holger Schurig
+
+import os
+import urllib.request, urllib.parse, urllib.error
+import bb
+import bb.utils
+from bb.fetch2 import FetchMethod, FetchError
+from bb.fetch2 import logger
+
+class Local(FetchMethod):
+ def supports(self, urldata, d):
+ """
+ Check to see if a given url represents a local fetch.
+ """
+ return urldata.type in ['file']
+
+ def urldata_init(self, ud, d):
+ # We don't set localfile as for this fetcher the file is already local!
+ ud.decodedurl = urllib.parse.unquote(ud.url.split("://")[1].split(";")[0])
+ ud.basename = os.path.basename(ud.decodedurl)
+ ud.basepath = ud.decodedurl
+ ud.needdonestamp = False
+ return
+
+ def localpath(self, urldata, d):
+ """
+ Return the local filename of a given url assuming a successful fetch.
+ """
+ return self.localpaths(urldata, d)[-1]
+
+ def localpaths(self, urldata, d):
+ """
+ Return the local filename of a given url assuming a successful fetch.
+ """
+ searched = []
+ path = urldata.decodedurl
+ newpath = path
+ if path[0] == "/":
+ return [path]
+ filespath = d.getVar('FILESPATH')
+ if filespath:
+ logger.debug(2, "Searching for %s in paths:\n %s" % (path, "\n ".join(filespath.split(":"))))
+ newpath, hist = bb.utils.which(filespath, path, history=True)
+ searched.extend(hist)
+ if (not newpath or not os.path.exists(newpath)) and path.find("*") != -1:
+ # For expressions using '*', best we can do is take the first directory in FILESPATH that exists
+ newpath, hist = bb.utils.which(filespath, ".", history=True)
+ searched.extend(hist)
+ logger.debug(2, "Searching for %s in path: %s" % (path, newpath))
+ return searched
+ if not os.path.exists(newpath):
+ dldirfile = os.path.join(d.getVar("DL_DIR"), path)
+ logger.debug(2, "Defaulting to %s for %s" % (dldirfile, path))
+ bb.utils.mkdirhier(os.path.dirname(dldirfile))
+ searched.append(dldirfile)
+ return searched
+ return searched
+
+ def need_update(self, ud, d):
+ if ud.url.find("*") != -1:
+ return False
+ if os.path.exists(ud.localpath):
+ return False
+ return True
+
+ def download(self, urldata, d):
+ """Fetch urls (no-op for Local method)"""
+ # no need to fetch local files, we'll deal with them in place.
+ if self.supports_checksum(urldata) and not os.path.exists(urldata.localpath):
+ locations = []
+ filespath = d.getVar('FILESPATH')
+ if filespath:
+ locations = filespath.split(":")
+ locations.append(d.getVar("DL_DIR"))
+
+ msg = "Unable to find file " + urldata.url + " anywhere. The paths that were searched were:\n " + "\n ".join(locations)
+ raise FetchError(msg)
+
+ return True
+
+ def checkstatus(self, fetch, urldata, d):
+ """
+ Check the status of the url
+ """
+ if urldata.localpath.find("*") != -1:
+ logger.info("URL %s looks like a glob and was therefore not checked.", urldata.url)
+ return True
+ if os.path.exists(urldata.localpath):
+ return True
+ return False
+
+ def clean(self, urldata, d):
+ return
+
diff --git a/poky/bitbake/lib/bb/fetch2/npm.py b/poky/bitbake/lib/bb/fetch2/npm.py
new file mode 100644
index 000000000..730c346a9
--- /dev/null
+++ b/poky/bitbake/lib/bb/fetch2/npm.py
@@ -0,0 +1,309 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' NPM implementation
+
+The NPM fetcher is used to retrieve files from the npmjs repository
+
+Usage in the recipe:
+
+ SRC_URI = "npm://registry.npmjs.org/;name=${PN};version=${PV}"
+ Suported SRC_URI options are:
+
+ - name
+ - version
+
+ npm://registry.npmjs.org/${PN}/-/${PN}-${PV}.tgz would become npm://registry.npmjs.org;name=${PN};version=${PV}
+ The fetcher all triggers off the existence of ud.localpath. If that exists and has the ".done" stamp, its assumed the fetch is good/done
+
+"""
+
+import os
+import sys
+import urllib.request, urllib.parse, urllib.error
+import json
+import subprocess
+import signal
+import bb
+from bb.fetch2 import FetchMethod
+from bb.fetch2 import FetchError
+from bb.fetch2 import ChecksumError
+from bb.fetch2 import runfetchcmd
+from bb.fetch2 import logger
+from bb.fetch2 import UnpackError
+from bb.fetch2 import ParameterError
+from distutils import spawn
+
+def subprocess_setup():
+ # Python installs a SIGPIPE handler by default. This is usually not what
+ # non-Python subprocesses expect.
+ # SIGPIPE errors are known issues with gzip/bash
+ signal.signal(signal.SIGPIPE, signal.SIG_DFL)
+
+class Npm(FetchMethod):
+
+ """Class to fetch urls via 'npm'"""
+ def init(self, d):
+ pass
+
+ def supports(self, ud, d):
+ """
+ Check to see if a given url can be fetched with npm
+ """
+ return ud.type in ['npm']
+
+ def debug(self, msg):
+ logger.debug(1, "NpmFetch: %s", msg)
+
+ def clean(self, ud, d):
+ logger.debug(2, "Calling cleanup %s" % ud.pkgname)
+ bb.utils.remove(ud.localpath, False)
+ bb.utils.remove(ud.pkgdatadir, True)
+ bb.utils.remove(ud.fullmirror, False)
+
+ def urldata_init(self, ud, d):
+ """
+ init NPM specific variable within url data
+ """
+ if 'downloadfilename' in ud.parm:
+ ud.basename = ud.parm['downloadfilename']
+ else:
+ ud.basename = os.path.basename(ud.path)
+
+ # can't call it ud.name otherwise fetcher base class will start doing sha1stuff
+ # TODO: find a way to get an sha1/sha256 manifest of pkg & all deps
+ ud.pkgname = ud.parm.get("name", None)
+ if not ud.pkgname:
+ raise ParameterError("NPM fetcher requires a name parameter", ud.url)
+ ud.version = ud.parm.get("version", None)
+ if not ud.version:
+ raise ParameterError("NPM fetcher requires a version parameter", ud.url)
+ ud.bbnpmmanifest = "%s-%s.deps.json" % (ud.pkgname, ud.version)
+ ud.bbnpmmanifest = ud.bbnpmmanifest.replace('/', '-')
+ ud.registry = "http://%s" % (ud.url.replace('npm://', '', 1).split(';'))[0]
+ prefixdir = "npm/%s" % ud.pkgname
+ ud.pkgdatadir = d.expand("${DL_DIR}/%s" % prefixdir)
+ if not os.path.exists(ud.pkgdatadir):
+ bb.utils.mkdirhier(ud.pkgdatadir)
+ ud.localpath = d.expand("${DL_DIR}/npm/%s" % ud.bbnpmmanifest)
+
+ self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget -O -t 2 -T 30 -nv --passive-ftp --no-check-certificate "
+ ud.prefixdir = prefixdir
+
+ ud.write_tarballs = ((d.getVar("BB_GENERATE_MIRROR_TARBALLS") or "0") != "0")
+ mirrortarball = 'npm_%s-%s.tar.xz' % (ud.pkgname, ud.version)
+ mirrortarball = mirrortarball.replace('/', '-')
+ ud.fullmirror = os.path.join(d.getVar("DL_DIR"), mirrortarball)
+ ud.mirrortarballs = [mirrortarball]
+
+ def need_update(self, ud, d):
+ if os.path.exists(ud.localpath):
+ return False
+ return True
+
+ def _runwget(self, ud, d, command, quiet):
+ logger.debug(2, "Fetching %s using command '%s'" % (ud.url, command))
+ bb.fetch2.check_network_access(d, command, ud.url)
+ dldir = d.getVar("DL_DIR")
+ runfetchcmd(command, d, quiet, workdir=dldir)
+
+ def _unpackdep(self, ud, pkg, data, destdir, dldir, d):
+ file = data[pkg]['tgz']
+ logger.debug(2, "file to extract is %s" % file)
+ if file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
+ cmd = 'tar xz --strip 1 --no-same-owner --warning=no-unknown-keyword -f %s/%s' % (dldir, file)
+ else:
+ bb.fatal("NPM package %s downloaded not a tarball!" % file)
+
+ # Change to subdir before executing command
+ if not os.path.exists(destdir):
+ os.makedirs(destdir)
+ path = d.getVar('PATH')
+ if path:
+ cmd = "PATH=\"%s\" %s" % (path, cmd)
+ bb.note("Unpacking %s to %s/" % (file, destdir))
+ ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True, cwd=destdir)
+
+ if ret != 0:
+ raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), ud.url)
+
+ if 'deps' not in data[pkg]:
+ return
+ for dep in data[pkg]['deps']:
+ self._unpackdep(ud, dep, data[pkg]['deps'], "%s/node_modules/%s" % (destdir, dep), dldir, d)
+
+
+ def unpack(self, ud, destdir, d):
+ dldir = d.getVar("DL_DIR")
+ with open("%s/npm/%s" % (dldir, ud.bbnpmmanifest)) as datafile:
+ workobj = json.load(datafile)
+ dldir = "%s/%s" % (os.path.dirname(ud.localpath), ud.pkgname)
+
+ if 'subdir' in ud.parm:
+ unpackdir = '%s/%s' % (destdir, ud.parm.get('subdir'))
+ else:
+ unpackdir = '%s/npmpkg' % destdir
+
+ self._unpackdep(ud, ud.pkgname, workobj, unpackdir, dldir, d)
+
+ def _parse_view(self, output):
+ '''
+ Parse the output of npm view --json; the last JSON result
+ is assumed to be the one that we're interested in.
+ '''
+ pdata = None
+ outdeps = {}
+ datalines = []
+ bracelevel = 0
+ for line in output.splitlines():
+ if bracelevel:
+ datalines.append(line)
+ elif '{' in line:
+ datalines = []
+ datalines.append(line)
+ bracelevel = bracelevel + line.count('{') - line.count('}')
+ if datalines:
+ pdata = json.loads('\n'.join(datalines))
+ return pdata
+
+ def _getdependencies(self, pkg, data, version, d, ud, optional=False, fetchedlist=None):
+ if fetchedlist is None:
+ fetchedlist = []
+ pkgfullname = pkg
+ if version != '*' and not '/' in version:
+ pkgfullname += "@'%s'" % version
+ logger.debug(2, "Calling getdeps on %s" % pkg)
+ fetchcmd = "npm view %s --json --registry %s" % (pkgfullname, ud.registry)
+ output = runfetchcmd(fetchcmd, d, True)
+ pdata = self._parse_view(output)
+ if not pdata:
+ raise FetchError("The command '%s' returned no output" % fetchcmd)
+ if optional:
+ pkg_os = pdata.get('os', None)
+ if pkg_os:
+ if not isinstance(pkg_os, list):
+ pkg_os = [pkg_os]
+ blacklist = False
+ for item in pkg_os:
+ if item.startswith('!'):
+ blacklist = True
+ break
+ if (not blacklist and 'linux' not in pkg_os) or '!linux' in pkg_os:
+ logger.debug(2, "Skipping %s since it's incompatible with Linux" % pkg)
+ return
+ #logger.debug(2, "Output URL is %s - %s - %s" % (ud.basepath, ud.basename, ud.localfile))
+ outputurl = pdata['dist']['tarball']
+ data[pkg] = {}
+ data[pkg]['tgz'] = os.path.basename(outputurl)
+ if outputurl in fetchedlist:
+ return
+
+ self._runwget(ud, d, "%s --directory-prefix=%s %s" % (self.basecmd, ud.prefixdir, outputurl), False)
+ fetchedlist.append(outputurl)
+
+ dependencies = pdata.get('dependencies', {})
+ optionalDependencies = pdata.get('optionalDependencies', {})
+ dependencies.update(optionalDependencies)
+ depsfound = {}
+ optdepsfound = {}
+ data[pkg]['deps'] = {}
+ for dep in dependencies:
+ if dep in optionalDependencies:
+ optdepsfound[dep] = dependencies[dep]
+ else:
+ depsfound[dep] = dependencies[dep]
+ for dep, version in optdepsfound.items():
+ self._getdependencies(dep, data[pkg]['deps'], version, d, ud, optional=True, fetchedlist=fetchedlist)
+ for dep, version in depsfound.items():
+ self._getdependencies(dep, data[pkg]['deps'], version, d, ud, fetchedlist=fetchedlist)
+
+ def _getshrinkeddependencies(self, pkg, data, version, d, ud, lockdown, manifest, toplevel=True):
+ logger.debug(2, "NPM shrinkwrap file is %s" % data)
+ if toplevel:
+ name = data.get('name', None)
+ if name and name != pkg:
+ for obj in data.get('dependencies', []):
+ if obj == pkg:
+ self._getshrinkeddependencies(obj, data['dependencies'][obj], data['dependencies'][obj]['version'], d, ud, lockdown, manifest, False)
+ return
+ outputurl = "invalid"
+ if ('resolved' not in data) or (not data['resolved'].startswith('http')):
+ # will be the case for ${PN}
+ fetchcmd = "npm view %s@%s dist.tarball --registry %s" % (pkg, version, ud.registry)
+ logger.debug(2, "Found this matching URL: %s" % str(fetchcmd))
+ outputurl = runfetchcmd(fetchcmd, d, True)
+ else:
+ outputurl = data['resolved']
+ self._runwget(ud, d, "%s --directory-prefix=%s %s" % (self.basecmd, ud.prefixdir, outputurl), False)
+ manifest[pkg] = {}
+ manifest[pkg]['tgz'] = os.path.basename(outputurl).rstrip()
+ manifest[pkg]['deps'] = {}
+
+ if pkg in lockdown:
+ sha1_expected = lockdown[pkg][version]
+ sha1_data = bb.utils.sha1_file("npm/%s/%s" % (ud.pkgname, manifest[pkg]['tgz']))
+ if sha1_expected != sha1_data:
+ msg = "\nFile: '%s' has %s checksum %s when %s was expected" % (manifest[pkg]['tgz'], 'sha1', sha1_data, sha1_expected)
+ raise ChecksumError('Checksum mismatch!%s' % msg)
+ else:
+ logger.debug(2, "No lockdown data for %s@%s" % (pkg, version))
+
+ if 'dependencies' in data:
+ for obj in data['dependencies']:
+ logger.debug(2, "Found dep is %s" % str(obj))
+ self._getshrinkeddependencies(obj, data['dependencies'][obj], data['dependencies'][obj]['version'], d, ud, lockdown, manifest[pkg]['deps'], False)
+
+ def download(self, ud, d):
+ """Fetch url"""
+ jsondepobj = {}
+ shrinkobj = {}
+ lockdown = {}
+
+ if not os.listdir(ud.pkgdatadir) and os.path.exists(ud.fullmirror):
+ dest = d.getVar("DL_DIR")
+ bb.utils.mkdirhier(dest)
+ runfetchcmd("tar -xJf %s" % (ud.fullmirror), d, workdir=dest)
+ return
+
+ if ud.parm.get("noverify", None) != '1':
+ shwrf = d.getVar('NPM_SHRINKWRAP')
+ logger.debug(2, "NPM shrinkwrap file is %s" % shwrf)
+ if shwrf:
+ try:
+ with open(shwrf) as datafile:
+ shrinkobj = json.load(datafile)
+ except Exception as e:
+ raise FetchError('Error loading NPM_SHRINKWRAP file "%s" for %s: %s' % (shwrf, ud.pkgname, str(e)))
+ elif not ud.ignore_checksums:
+ logger.warning('Missing shrinkwrap file in NPM_SHRINKWRAP for %s, this will lead to unreliable builds!' % ud.pkgname)
+ lckdf = d.getVar('NPM_LOCKDOWN')
+ logger.debug(2, "NPM lockdown file is %s" % lckdf)
+ if lckdf:
+ try:
+ with open(lckdf) as datafile:
+ lockdown = json.load(datafile)
+ except Exception as e:
+ raise FetchError('Error loading NPM_LOCKDOWN file "%s" for %s: %s' % (lckdf, ud.pkgname, str(e)))
+ elif not ud.ignore_checksums:
+ logger.warning('Missing lockdown file in NPM_LOCKDOWN for %s, this will lead to unreproducible builds!' % ud.pkgname)
+
+ if ('name' not in shrinkobj):
+ self._getdependencies(ud.pkgname, jsondepobj, ud.version, d, ud)
+ else:
+ self._getshrinkeddependencies(ud.pkgname, shrinkobj, ud.version, d, ud, lockdown, jsondepobj)
+
+ with open(ud.localpath, 'w') as outfile:
+ json.dump(jsondepobj, outfile)
+
+ def build_mirror_data(self, ud, d):
+ # Generate a mirror tarball if needed
+ if ud.write_tarballs and not os.path.exists(ud.fullmirror):
+ # it's possible that this symlink points to read-only filesystem with PREMIRROR
+ if os.path.islink(ud.fullmirror):
+ os.unlink(ud.fullmirror)
+
+ dldir = d.getVar("DL_DIR")
+ logger.info("Creating tarball of npm data")
+ runfetchcmd("tar -cJf %s npm/%s npm/%s" % (ud.fullmirror, ud.bbnpmmanifest, ud.pkgname), d,
+ workdir=dldir)
+ runfetchcmd("touch %s.done" % (ud.fullmirror), d, workdir=dldir)
diff --git a/poky/bitbake/lib/bb/fetch2/osc.py b/poky/bitbake/lib/bb/fetch2/osc.py
new file mode 100644
index 000000000..2b4f7d9c1
--- /dev/null
+++ b/poky/bitbake/lib/bb/fetch2/osc.py
@@ -0,0 +1,132 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+Bitbake "Fetch" implementation for osc (Opensuse build service client).
+Based on the svn "Fetch" implementation.
+
+"""
+
+import os
+import sys
+import logging
+import bb
+from bb.fetch2 import FetchMethod
+from bb.fetch2 import FetchError
+from bb.fetch2 import MissingParameterError
+from bb.fetch2 import runfetchcmd
+
+class Osc(FetchMethod):
+ """Class to fetch a module or modules from Opensuse build server
+ repositories."""
+
+ def supports(self, ud, d):
+ """
+ Check to see if a given url can be fetched with osc.
+ """
+ return ud.type in ['osc']
+
+ def urldata_init(self, ud, d):
+ if not "module" in ud.parm:
+ raise MissingParameterError('module', ud.url)
+
+ ud.module = ud.parm["module"]
+
+ # Create paths to osc checkouts
+ relpath = self._strip_leading_slashes(ud.path)
+ ud.pkgdir = os.path.join(d.getVar('OSCDIR'), ud.host)
+ ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module)
+
+ if 'rev' in ud.parm:
+ ud.revision = ud.parm['rev']
+ else:
+ pv = d.getVar("PV", False)
+ rev = bb.fetch2.srcrev_internal_helper(ud, d)
+ if rev and rev != True:
+ ud.revision = rev
+ else:
+ ud.revision = ""
+
+ ud.localfile = d.expand('%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.path.replace('/', '.'), ud.revision))
+
+ def _buildosccommand(self, ud, d, command):
+ """
+ Build up an ocs commandline based on ud
+ command is "fetch", "update", "info"
+ """
+
+ basecmd = d.expand('${FETCHCMD_osc}')
+
+ proto = ud.parm.get('protocol', 'ocs')
+
+ options = []
+
+ config = "-c %s" % self.generate_config(ud, d)
+
+ if ud.revision:
+ options.append("-r %s" % ud.revision)
+
+ coroot = self._strip_leading_slashes(ud.path)
+
+ if command == "fetch":
+ osccmd = "%s %s co %s/%s %s" % (basecmd, config, coroot, ud.module, " ".join(options))
+ elif command == "update":
+ osccmd = "%s %s up %s" % (basecmd, config, " ".join(options))
+ else:
+ raise FetchError("Invalid osc command %s" % command, ud.url)
+
+ return osccmd
+
+ def download(self, ud, d):
+ """
+ Fetch url
+ """
+
+ logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
+
+ if os.access(os.path.join(d.getVar('OSCDIR'), ud.path, ud.module), os.R_OK):
+ oscupdatecmd = self._buildosccommand(ud, d, "update")
+ logger.info("Update "+ ud.url)
+ # update sources there
+ logger.debug(1, "Running %s", oscupdatecmd)
+ bb.fetch2.check_network_access(d, oscupdatecmd, ud.url)
+ runfetchcmd(oscupdatecmd, d, workdir=ud.moddir)
+ else:
+ oscfetchcmd = self._buildosccommand(ud, d, "fetch")
+ logger.info("Fetch " + ud.url)
+ # check out sources there
+ bb.utils.mkdirhier(ud.pkgdir)
+ logger.debug(1, "Running %s", oscfetchcmd)
+ bb.fetch2.check_network_access(d, oscfetchcmd, ud.url)
+ runfetchcmd(oscfetchcmd, d, workdir=ud.pkgdir)
+
+ # tar them up to a defined filename
+ runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d,
+ cleanup=[ud.localpath], workdir=os.path.join(ud.pkgdir + ud.path))
+
+ def supports_srcrev(self):
+ return False
+
+ def generate_config(self, ud, d):
+ """
+ Generate a .oscrc to be used for this run.
+ """
+
+ config_path = os.path.join(d.getVar('OSCDIR'), "oscrc")
+ if (os.path.exists(config_path)):
+ os.remove(config_path)
+
+ f = open(config_path, 'w')
+ f.write("[general]\n")
+ f.write("apisrv = %s\n" % ud.host)
+ f.write("scheme = http\n")
+ f.write("su-wrapper = su -c\n")
+ f.write("build-root = %s\n" % d.getVar('WORKDIR'))
+ f.write("urllist = %s\n" % d.getVar("OSCURLLIST"))
+ f.write("extra-pkgs = gzip\n")
+ f.write("\n")
+ f.write("[%s]\n" % ud.host)
+ f.write("user = %s\n" % ud.parm["user"])
+ f.write("pass = %s\n" % ud.parm["pswd"])
+ f.close()
+
+ return config_path
diff --git a/poky/bitbake/lib/bb/fetch2/perforce.py b/poky/bitbake/lib/bb/fetch2/perforce.py
new file mode 100644
index 000000000..3debad59f
--- /dev/null
+++ b/poky/bitbake/lib/bb/fetch2/perforce.py
@@ -0,0 +1,209 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' implementation for perforce
+
+"""
+
+# Copyright (C) 2003, 2004 Chris Larson
+# Copyright (C) 2016 Kodak Alaris, Inc.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+# Based on functions from the base bb module, Copyright 2003 Holger Schurig
+
+import os
+import logging
+import bb
+from bb.fetch2 import FetchMethod
+from bb.fetch2 import FetchError
+from bb.fetch2 import logger
+from bb.fetch2 import runfetchcmd
+
+class Perforce(FetchMethod):
+ """ Class to fetch from perforce repositories """
+ def supports(self, ud, d):
+ """ Check to see if a given url can be fetched with perforce. """
+ return ud.type in ['p4']
+
+ def urldata_init(self, ud, d):
+ """
+ Initialize perforce specific variables within url data. If P4CONFIG is
+ provided by the env, use it. If P4PORT is specified by the recipe, use
+ its values, which may override the settings in P4CONFIG.
+ """
+ ud.basecmd = d.getVar('FETCHCMD_p4')
+ if not ud.basecmd:
+ ud.basecmd = "/usr/bin/env p4"
+
+ ud.dldir = d.getVar('P4DIR')
+ if not ud.dldir:
+ ud.dldir = '%s/%s' % (d.getVar('DL_DIR'), 'p4')
+
+ path = ud.url.split('://')[1]
+ path = path.split(';')[0]
+ delim = path.find('@');
+ if delim != -1:
+ (ud.user, ud.pswd) = path.split('@')[0].split(':')
+ ud.path = path.split('@')[1]
+ else:
+ ud.path = path
+
+ ud.usingp4config = False
+ p4port = d.getVar('P4PORT')
+
+ if p4port:
+ logger.debug(1, 'Using recipe provided P4PORT: %s' % p4port)
+ ud.host = p4port
+ else:
+ logger.debug(1, 'Trying to use P4CONFIG to automatically set P4PORT...')
+ ud.usingp4config = True
+ p4cmd = '%s info | grep "Server address"' % ud.basecmd
+ bb.fetch2.check_network_access(d, p4cmd, ud.url)
+ ud.host = runfetchcmd(p4cmd, d, True)
+ ud.host = ud.host.split(': ')[1].strip()
+ logger.debug(1, 'Determined P4PORT to be: %s' % ud.host)
+ if not ud.host:
+ raise FetchError('Could not determine P4PORT from P4CONFIG')
+
+ if ud.path.find('/...') >= 0:
+ ud.pathisdir = True
+ else:
+ ud.pathisdir = False
+
+ cleanedpath = ud.path.replace('/...', '').replace('/', '.')
+ cleanedhost = ud.host.replace(':', '.')
+ ud.pkgdir = os.path.join(ud.dldir, cleanedhost, cleanedpath)
+
+ ud.setup_revisions(d)
+
+ ud.localfile = d.expand('%s_%s_%s.tar.gz' % (cleanedhost, cleanedpath, ud.revision))
+
+ def _buildp4command(self, ud, d, command, depot_filename=None):
+ """
+ Build a p4 commandline. Valid commands are "changes", "print", and
+ "files". depot_filename is the full path to the file in the depot
+ including the trailing '#rev' value.
+ """
+ p4opt = ""
+
+ if ud.user:
+ p4opt += ' -u "%s"' % (ud.user)
+
+ if ud.pswd:
+ p4opt += ' -P "%s"' % (ud.pswd)
+
+ if ud.host and not ud.usingp4config:
+ p4opt += ' -p %s' % (ud.host)
+
+ if hasattr(ud, 'revision') and ud.revision:
+ pathnrev = '%s@%s' % (ud.path, ud.revision)
+ else:
+ pathnrev = '%s' % (ud.path)
+
+ if depot_filename:
+ if ud.pathisdir: # Remove leading path to obtain filename
+ filename = depot_filename[len(ud.path)-1:]
+ else:
+ filename = depot_filename[depot_filename.rfind('/'):]
+ filename = filename[:filename.find('#')] # Remove trailing '#rev'
+
+ if command == 'changes':
+ p4cmd = '%s%s changes -m 1 //%s' % (ud.basecmd, p4opt, pathnrev)
+ elif command == 'print':
+ if depot_filename != None:
+ p4cmd = '%s%s print -o "p4/%s" "%s"' % (ud.basecmd, p4opt, filename, depot_filename)
+ else:
+ raise FetchError('No depot file name provided to p4 %s' % command, ud.url)
+ elif command == 'files':
+ p4cmd = '%s%s files //%s' % (ud.basecmd, p4opt, pathnrev)
+ else:
+ raise FetchError('Invalid p4 command %s' % command, ud.url)
+
+ return p4cmd
+
+ def _p4listfiles(self, ud, d):
+ """
+ Return a list of the file names which are present in the depot using the
+ 'p4 files' command, including trailing '#rev' file revision indicator
+ """
+ p4cmd = self._buildp4command(ud, d, 'files')
+ bb.fetch2.check_network_access(d, p4cmd, ud.url)
+ p4fileslist = runfetchcmd(p4cmd, d, True)
+ p4fileslist = [f.rstrip() for f in p4fileslist.splitlines()]
+
+ if not p4fileslist:
+ raise FetchError('Unable to fetch listing of p4 files from %s@%s' % (ud.host, ud.path))
+
+ count = 0
+ filelist = []
+
+ for filename in p4fileslist:
+ item = filename.split(' - ')
+ lastaction = item[1].split()
+ logger.debug(1, 'File: %s Last Action: %s' % (item[0], lastaction[0]))
+ if lastaction[0] == 'delete':
+ continue
+ filelist.append(item[0])
+
+ return filelist
+
+ def download(self, ud, d):
+ """ Get the list of files, fetch each one """
+ filelist = self._p4listfiles(ud, d)
+ if not filelist:
+ raise FetchError('No files found in depot %s@%s' % (ud.host, ud.path))
+
+ bb.utils.remove(ud.pkgdir, True)
+ bb.utils.mkdirhier(ud.pkgdir)
+
+ for afile in filelist:
+ p4fetchcmd = self._buildp4command(ud, d, 'print', afile)
+ bb.fetch2.check_network_access(d, p4fetchcmd, ud.url)
+ runfetchcmd(p4fetchcmd, d, workdir=ud.pkgdir)
+
+ runfetchcmd('tar -czf %s p4' % (ud.localpath), d, cleanup=[ud.localpath], workdir=ud.pkgdir)
+
+ def clean(self, ud, d):
+ """ Cleanup p4 specific files and dirs"""
+ bb.utils.remove(ud.localpath)
+ bb.utils.remove(ud.pkgdir, True)
+
+ def supports_srcrev(self):
+ return True
+
+ def _revision_key(self, ud, d, name):
+ """ Return a unique key for the url """
+ return 'p4:%s' % ud.pkgdir
+
+ def _latest_revision(self, ud, d, name):
+ """ Return the latest upstream scm revision number """
+ p4cmd = self._buildp4command(ud, d, "changes")
+ bb.fetch2.check_network_access(d, p4cmd, ud.url)
+ tip = runfetchcmd(p4cmd, d, True)
+
+ if not tip:
+ raise FetchError('Could not determine the latest perforce changelist')
+
+ tipcset = tip.split(' ')[1]
+ logger.debug(1, 'p4 tip found to be changelist %s' % tipcset)
+ return tipcset
+
+ def sortable_revision(self, ud, d, name):
+ """ Return a sortable revision number """
+ return False, self._build_revision(ud, d)
+
+ def _build_revision(self, ud, d):
+ return ud.revision
+
diff --git a/poky/bitbake/lib/bb/fetch2/repo.py b/poky/bitbake/lib/bb/fetch2/repo.py
new file mode 100644
index 000000000..c22d9b557
--- /dev/null
+++ b/poky/bitbake/lib/bb/fetch2/repo.py
@@ -0,0 +1,97 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake "Fetch" repo (git) implementation
+
+"""
+
+# Copyright (C) 2009 Tom Rini <trini@embeddedalley.com>
+#
+# Based on git.py which is:
+#Copyright (C) 2005 Richard Purdie
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import os
+import bb
+from bb.fetch2 import FetchMethod
+from bb.fetch2 import runfetchcmd
+from bb.fetch2 import logger
+
+class Repo(FetchMethod):
+ """Class to fetch a module or modules from repo (git) repositories"""
+ def supports(self, ud, d):
+ """
+ Check to see if a given url can be fetched with repo.
+ """
+ return ud.type in ["repo"]
+
+ def urldata_init(self, ud, d):
+ """
+ We don"t care about the git rev of the manifests repository, but
+ we do care about the manifest to use. The default is "default".
+ We also care about the branch or tag to be used. The default is
+ "master".
+ """
+
+ ud.proto = ud.parm.get('protocol', 'git')
+ ud.branch = ud.parm.get('branch', 'master')
+ ud.manifest = ud.parm.get('manifest', 'default.xml')
+ if not ud.manifest.endswith('.xml'):
+ ud.manifest += '.xml'
+
+ ud.localfile = d.expand("repo_%s%s_%s_%s.tar.gz" % (ud.host, ud.path.replace("/", "."), ud.manifest, ud.branch))
+
+ def download(self, ud, d):
+ """Fetch url"""
+
+ if os.access(os.path.join(d.getVar("DL_DIR"), ud.localfile), os.R_OK):
+ logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath)
+ return
+
+ gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", "."))
+ repodir = d.getVar("REPODIR") or os.path.join(d.getVar("DL_DIR"), "repo")
+ codir = os.path.join(repodir, gitsrcname, ud.manifest)
+
+ if ud.user:
+ username = ud.user + "@"
+ else:
+ username = ""
+
+ repodir = os.path.join(codir, "repo")
+ bb.utils.mkdirhier(repodir)
+ if not os.path.exists(os.path.join(repodir, ".repo")):
+ bb.fetch2.check_network_access(d, "repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), ud.url)
+ runfetchcmd("repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), d, workdir=repodir)
+
+ bb.fetch2.check_network_access(d, "repo sync %s" % ud.url, ud.url)
+ runfetchcmd("repo sync", d, workdir=repodir)
+
+ scmdata = ud.parm.get("scmdata", "")
+ if scmdata == "keep":
+ tar_flags = ""
+ else:
+ tar_flags = "--exclude='.repo' --exclude='.git'"
+
+ # Create a cache
+ runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.join(".", "*") ), d, workdir=codir)
+
+ def supports_srcrev(self):
+ return False
+
+ def _build_revision(self, ud, d):
+ return ud.manifest
+
+ def _want_sortable_revision(self, ud, d):
+ return False
diff --git a/poky/bitbake/lib/bb/fetch2/s3.py b/poky/bitbake/lib/bb/fetch2/s3.py
new file mode 100644
index 000000000..162928862
--- /dev/null
+++ b/poky/bitbake/lib/bb/fetch2/s3.py
@@ -0,0 +1,98 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' implementation for Amazon AWS S3.
+
+Class for fetching files from Amazon S3 using the AWS Command Line Interface.
+The aws tool must be correctly installed and configured prior to use.
+
+"""
+
+# Copyright (C) 2017, Andre McCurdy <armccurdy@gmail.com>
+#
+# Based in part on bb.fetch2.wget:
+# Copyright (C) 2003, 2004 Chris Larson
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+# Based on functions from the base bb module, Copyright 2003 Holger Schurig
+
+import os
+import bb
+import urllib.request, urllib.parse, urllib.error
+from bb.fetch2 import FetchMethod
+from bb.fetch2 import FetchError
+from bb.fetch2 import runfetchcmd
+
+class S3(FetchMethod):
+ """Class to fetch urls via 'aws s3'"""
+
+ def supports(self, ud, d):
+ """
+ Check to see if a given url can be fetched with s3.
+ """
+ return ud.type in ['s3']
+
+ def recommends_checksum(self, urldata):
+ return True
+
+ def urldata_init(self, ud, d):
+ if 'downloadfilename' in ud.parm:
+ ud.basename = ud.parm['downloadfilename']
+ else:
+ ud.basename = os.path.basename(ud.path)
+
+ ud.localfile = d.expand(urllib.parse.unquote(ud.basename))
+
+ ud.basecmd = d.getVar("FETCHCMD_s3") or "/usr/bin/env aws s3"
+
+ def download(self, ud, d):
+ """
+ Fetch urls
+ Assumes localpath was called first
+ """
+
+ cmd = '%s cp s3://%s%s %s' % (ud.basecmd, ud.host, ud.path, ud.localpath)
+ bb.fetch2.check_network_access(d, cmd, ud.url)
+ runfetchcmd(cmd, d)
+
+ # Additional sanity checks copied from the wget class (although there
+ # are no known issues which mean these are required, treat the aws cli
+ # tool with a little healthy suspicion).
+
+ if not os.path.exists(ud.localpath):
+ raise FetchError("The aws cp command returned success for s3://%s%s but %s doesn't exist?!" % (ud.host, ud.path, ud.localpath))
+
+ if os.path.getsize(ud.localpath) == 0:
+ os.remove(ud.localpath)
+ raise FetchError("The aws cp command for s3://%s%s resulted in a zero size file?! Deleting and failing since this isn't right." % (ud.host, ud.path))
+
+ return True
+
+ def checkstatus(self, fetch, ud, d):
+ """
+ Check the status of a URL
+ """
+
+ cmd = '%s ls s3://%s%s' % (ud.basecmd, ud.host, ud.path)
+ bb.fetch2.check_network_access(d, cmd, ud.url)
+ output = runfetchcmd(cmd, d)
+
+ # "aws s3 ls s3://mybucket/foo" will exit with success even if the file
+ # is not found, so check output of the command to confirm success.
+
+ if not output:
+ raise FetchError("The aws ls command for s3://%s%s gave empty output" % (ud.host, ud.path))
+
+ return True
diff --git a/poky/bitbake/lib/bb/fetch2/sftp.py b/poky/bitbake/lib/bb/fetch2/sftp.py
new file mode 100644
index 000000000..81884a6aa
--- /dev/null
+++ b/poky/bitbake/lib/bb/fetch2/sftp.py
@@ -0,0 +1,125 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake SFTP Fetch implementation
+
+Class for fetching files via SFTP. It tries to adhere to the (now
+expired) IETF Internet Draft for "Uniform Resource Identifier (URI)
+Scheme for Secure File Transfer Protocol (SFTP) and Secure Shell
+(SSH)" (SECSH URI).
+
+It uses SFTP (as to adhere to the SECSH URI specification). It only
+supports key based authentication, not password. This class, unlike
+the SSH fetcher, does not support fetching a directory tree from the
+remote.
+
+ http://tools.ietf.org/html/draft-ietf-secsh-scp-sftp-ssh-uri-04
+ https://www.iana.org/assignments/uri-schemes/prov/sftp
+ https://tools.ietf.org/html/draft-ietf-secsh-filexfer-13
+
+Please note that '/' is used as host path seperator, and not ":"
+as you may be used to from the scp/sftp commands. You can use a
+~ (tilde) to specify a path relative to your home directory.
+(The /~user/ syntax, for specyfing a path relative to another
+user's home directory is not supported.) Note that the tilde must
+still follow the host path seperator ("/"). See exampels below.
+
+Example SRC_URIs:
+
+SRC_URI = "sftp://host.example.com/dir/path.file.txt"
+
+A path relative to your home directory.
+
+SRC_URI = "sftp://host.example.com/~/dir/path.file.txt"
+
+You can also specify a username (specyfing password in the
+URI is not supported, use SSH keys to authenticate):
+
+SRC_URI = "sftp://user@host.example.com/dir/path.file.txt"
+
+"""
+
+# Copyright (C) 2013, Olof Johansson <olof.johansson@axis.com>
+#
+# Based in part on bb.fetch2.wget:
+# Copyright (C) 2003, 2004 Chris Larson
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+# Based on functions from the base bb module, Copyright 2003 Holger Schurig
+
+import os
+import bb
+import urllib.request, urllib.parse, urllib.error
+from bb.fetch2 import URI
+from bb.fetch2 import FetchMethod
+from bb.fetch2 import runfetchcmd
+
+class SFTP(FetchMethod):
+ """Class to fetch urls via 'sftp'"""
+
+ def supports(self, ud, d):
+ """
+ Check to see if a given url can be fetched with sftp.
+ """
+ return ud.type in ['sftp']
+
+ def recommends_checksum(self, urldata):
+ return True
+
+ def urldata_init(self, ud, d):
+ if 'protocol' in ud.parm and ud.parm['protocol'] == 'git':
+ raise bb.fetch2.ParameterError(
+ "Invalid protocol - if you wish to fetch from a " +
+ "git repository using ssh, you need to use the " +
+ "git:// prefix with protocol=ssh", ud.url)
+
+ if 'downloadfilename' in ud.parm:
+ ud.basename = ud.parm['downloadfilename']
+ else:
+ ud.basename = os.path.basename(ud.path)
+
+ ud.localfile = d.expand(urllib.parse.unquote(ud.basename))
+
+ def download(self, ud, d):
+ """Fetch urls"""
+
+ urlo = URI(ud.url)
+ basecmd = 'sftp -oBatchMode=yes'
+ port = ''
+ if urlo.port:
+ port = '-P %d' % urlo.port
+ urlo.port = None
+
+ dldir = d.getVar('DL_DIR')
+ lpath = os.path.join(dldir, ud.localfile)
+
+ user = ''
+ if urlo.userinfo:
+ user = urlo.userinfo + '@'
+
+ path = urlo.path
+
+ # Supoprt URIs relative to the user's home directory, with
+ # the tilde syntax. (E.g. <sftp://example.com/~/foo.diff>).
+ if path[:3] == '/~/':
+ path = path[3:]
+
+ remote = '%s%s:%s' % (user, urlo.hostname, path)
+
+ cmd = '%s %s %s %s' % (basecmd, port, remote, lpath)
+
+ bb.fetch2.check_network_access(d, cmd, ud.url)
+ runfetchcmd(cmd, d)
+ return True
diff --git a/poky/bitbake/lib/bb/fetch2/ssh.py b/poky/bitbake/lib/bb/fetch2/ssh.py
new file mode 100644
index 000000000..6047ee417
--- /dev/null
+++ b/poky/bitbake/lib/bb/fetch2/ssh.py
@@ -0,0 +1,125 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+'''
+BitBake 'Fetch' implementations
+
+This implementation is for Secure Shell (SSH), and attempts to comply with the
+IETF secsh internet draft:
+ http://tools.ietf.org/wg/secsh/draft-ietf-secsh-scp-sftp-ssh-uri/
+
+ Currently does not support the sftp parameters, as this uses scp
+ Also does not support the 'fingerprint' connection parameter.
+
+ Please note that '/' is used as host, path separator not ':' as you may
+ be used to, also '~' can be used to specify user HOME, but again after '/'
+
+ Example SRC_URI:
+ SRC_URI = "ssh://user@host.example.com/dir/path/file.txt"
+ SRC_URI = "ssh://user@host.example.com/~/file.txt"
+'''
+
+# Copyright (C) 2006 OpenedHand Ltd.
+#
+#
+# Based in part on svk.py:
+# Copyright (C) 2006 Holger Hans Peter Freyther
+# Based on svn.py:
+# Copyright (C) 2003, 2004 Chris Larson
+# Based on functions from the base bb module:
+# Copyright 2003 Holger Schurig
+#
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import re, os
+from bb.fetch2 import FetchMethod
+from bb.fetch2 import FetchError
+from bb.fetch2 import logger
+from bb.fetch2 import runfetchcmd
+
+
+__pattern__ = re.compile(r'''
+ \s* # Skip leading whitespace
+ ssh:// # scheme
+ ( # Optional username/password block
+ (?P<user>\S+) # username
+ (:(?P<pass>\S+))? # colon followed by the password (optional)
+ )?
+ (?P<cparam>(;[^;]+)*)? # connection parameters block (optional)
+ @
+ (?P<host>\S+?) # non-greedy match of the host
+ (:(?P<port>[0-9]+))? # colon followed by the port (optional)
+ /
+ (?P<path>[^;]+) # path on the remote system, may be absolute or relative,
+ # and may include the use of '~' to reference the remote home
+ # directory
+ (?P<sparam>(;[^;]+)*)? # parameters block (optional)
+ $
+''', re.VERBOSE)
+
+class SSH(FetchMethod):
+ '''Class to fetch a module or modules via Secure Shell'''
+
+ def supports(self, urldata, d):
+ return __pattern__.match(urldata.url) != None
+
+ def supports_checksum(self, urldata):
+ return False
+
+ def urldata_init(self, urldata, d):
+ if 'protocol' in urldata.parm and urldata.parm['protocol'] == 'git':
+ raise bb.fetch2.ParameterError(
+ "Invalid protocol - if you wish to fetch from a git " +
+ "repository using ssh, you need to use " +
+ "git:// prefix with protocol=ssh", urldata.url)
+ m = __pattern__.match(urldata.url)
+ path = m.group('path')
+ host = m.group('host')
+ urldata.localpath = os.path.join(d.getVar('DL_DIR'),
+ os.path.basename(os.path.normpath(path)))
+
+ def download(self, urldata, d):
+ dldir = d.getVar('DL_DIR')
+
+ m = __pattern__.match(urldata.url)
+ path = m.group('path')
+ host = m.group('host')
+ port = m.group('port')
+ user = m.group('user')
+ password = m.group('pass')
+
+ if port:
+ portarg = '-P %s' % port
+ else:
+ portarg = ''
+
+ if user:
+ fr = user
+ if password:
+ fr += ':%s' % password
+ fr += '@%s' % host
+ else:
+ fr = host
+ fr += ':%s' % path
+
+ cmd = 'scp -B -r %s %s %s/' % (
+ portarg,
+ fr,
+ dldir
+ )
+
+ bb.fetch2.check_network_access(d, cmd, urldata.url)
+
+ runfetchcmd(cmd, d)
+
diff --git a/poky/bitbake/lib/bb/fetch2/svn.py b/poky/bitbake/lib/bb/fetch2/svn.py
new file mode 100644
index 000000000..3f172eec9
--- /dev/null
+++ b/poky/bitbake/lib/bb/fetch2/svn.py
@@ -0,0 +1,193 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' implementation for svn.
+
+"""
+
+# Copyright (C) 2003, 2004 Chris Larson
+# Copyright (C) 2004 Marcin Juszkiewicz
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+# Based on functions from the base bb module, Copyright 2003 Holger Schurig
+
+import os
+import sys
+import logging
+import bb
+import re
+from bb.fetch2 import FetchMethod
+from bb.fetch2 import FetchError
+from bb.fetch2 import MissingParameterError
+from bb.fetch2 import runfetchcmd
+from bb.fetch2 import logger
+
+class Svn(FetchMethod):
+ """Class to fetch a module or modules from svn repositories"""
+ def supports(self, ud, d):
+ """
+ Check to see if a given url can be fetched with svn.
+ """
+ return ud.type in ['svn']
+
+ def urldata_init(self, ud, d):
+ """
+ init svn specific variable within url data
+ """
+ if not "module" in ud.parm:
+ raise MissingParameterError('module', ud.url)
+
+ ud.basecmd = d.getVar('FETCHCMD_svn')
+
+ ud.module = ud.parm["module"]
+
+ if not "path_spec" in ud.parm:
+ ud.path_spec = ud.module
+ else:
+ ud.path_spec = ud.parm["path_spec"]
+
+ # Create paths to svn checkouts
+ relpath = self._strip_leading_slashes(ud.path)
+ ud.pkgdir = os.path.join(d.expand('${SVNDIR}'), ud.host, relpath)
+ ud.moddir = os.path.join(ud.pkgdir, ud.module)
+
+ ud.setup_revisions(d)
+
+ if 'rev' in ud.parm:
+ ud.revision = ud.parm['rev']
+
+ ud.localfile = d.expand('%s_%s_%s_%s_.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision))
+
+ def _buildsvncommand(self, ud, d, command):
+ """
+ Build up an svn commandline based on ud
+ command is "fetch", "update", "info"
+ """
+
+ proto = ud.parm.get('protocol', 'svn')
+
+ svn_ssh = None
+ if proto == "svn+ssh" and "ssh" in ud.parm:
+ svn_ssh = ud.parm["ssh"]
+
+ svnroot = ud.host + ud.path
+
+ options = []
+
+ options.append("--no-auth-cache")
+
+ if ud.user:
+ options.append("--username %s" % ud.user)
+
+ if ud.pswd:
+ options.append("--password %s" % ud.pswd)
+
+ if command == "info":
+ svncmd = "%s info %s %s://%s/%s/" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module)
+ elif command == "log1":
+ svncmd = "%s log --limit 1 %s %s://%s/%s/" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module)
+ else:
+ suffix = ""
+ if ud.revision:
+ options.append("-r %s" % ud.revision)
+ suffix = "@%s" % (ud.revision)
+
+ if command == "fetch":
+ transportuser = ud.parm.get("transportuser", "")
+ svncmd = "%s co %s %s://%s%s/%s%s %s" % (ud.basecmd, " ".join(options), proto, transportuser, svnroot, ud.module, suffix, ud.path_spec)
+ elif command == "update":
+ svncmd = "%s update %s" % (ud.basecmd, " ".join(options))
+ else:
+ raise FetchError("Invalid svn command %s" % command, ud.url)
+
+ if svn_ssh:
+ svncmd = "SVN_SSH=\"%s\" %s" % (svn_ssh, svncmd)
+
+ return svncmd
+
+ def download(self, ud, d):
+ """Fetch url"""
+
+ logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
+
+ if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK):
+ svnupdatecmd = self._buildsvncommand(ud, d, "update")
+ logger.info("Update " + ud.url)
+ # We need to attempt to run svn upgrade first in case its an older working format
+ try:
+ runfetchcmd(ud.basecmd + " upgrade", d, workdir=ud.moddir)
+ except FetchError:
+ pass
+ logger.debug(1, "Running %s", svnupdatecmd)
+ bb.fetch2.check_network_access(d, svnupdatecmd, ud.url)
+ runfetchcmd(svnupdatecmd, d, workdir=ud.moddir)
+ else:
+ svnfetchcmd = self._buildsvncommand(ud, d, "fetch")
+ logger.info("Fetch " + ud.url)
+ # check out sources there
+ bb.utils.mkdirhier(ud.pkgdir)
+ logger.debug(1, "Running %s", svnfetchcmd)
+ bb.fetch2.check_network_access(d, svnfetchcmd, ud.url)
+ runfetchcmd(svnfetchcmd, d, workdir=ud.pkgdir)
+
+ scmdata = ud.parm.get("scmdata", "")
+ if scmdata == "keep":
+ tar_flags = ""
+ else:
+ tar_flags = "--exclude='.svn'"
+
+ # tar them up to a defined filename
+ runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.path_spec), d,
+ cleanup=[ud.localpath], workdir=ud.pkgdir)
+
+ def clean(self, ud, d):
+ """ Clean SVN specific files and dirs """
+
+ bb.utils.remove(ud.localpath)
+ bb.utils.remove(ud.moddir, True)
+
+
+ def supports_srcrev(self):
+ return True
+
+ def _revision_key(self, ud, d, name):
+ """
+ Return a unique key for the url
+ """
+ return "svn:" + ud.moddir
+
+ def _latest_revision(self, ud, d, name):
+ """
+ Return the latest upstream revision number
+ """
+ bb.fetch2.check_network_access(d, self._buildsvncommand(ud, d, "log1"), ud.url)
+
+ output = runfetchcmd("LANG=C LC_ALL=C " + self._buildsvncommand(ud, d, "log1"), d, True)
+
+ # skip the first line, as per output of svn log
+ # then we expect the revision on the 2nd line
+ revision = re.search('^r([0-9]*)', output.splitlines()[1]).group(1)
+
+ return revision
+
+ def sortable_revision(self, ud, d, name):
+ """
+ Return a sortable revision number which in our case is the revision number
+ """
+
+ return False, self._build_revision(ud, d)
+
+ def _build_revision(self, ud, d):
+ return ud.revision
diff --git a/poky/bitbake/lib/bb/fetch2/wget.py b/poky/bitbake/lib/bb/fetch2/wget.py
new file mode 100644
index 000000000..8f505b6de
--- /dev/null
+++ b/poky/bitbake/lib/bb/fetch2/wget.py
@@ -0,0 +1,626 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Fetch' implementations
+
+Classes for obtaining upstream sources for the
+BitBake build tools.
+
+"""
+
+# Copyright (C) 2003, 2004 Chris Larson
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+# Based on functions from the base bb module, Copyright 2003 Holger Schurig
+
+import re
+import tempfile
+import subprocess
+import os
+import logging
+import errno
+import bb
+import bb.progress
+import urllib.request, urllib.parse, urllib.error
+from bb.fetch2 import FetchMethod
+from bb.fetch2 import FetchError
+from bb.fetch2 import logger
+from bb.fetch2 import runfetchcmd
+from bb.utils import export_proxies
+from bs4 import BeautifulSoup
+from bs4 import SoupStrainer
+
+class WgetProgressHandler(bb.progress.LineFilterProgressHandler):
+ """
+ Extract progress information from wget output.
+ Note: relies on --progress=dot (with -v or without -q/-nv) being
+ specified on the wget command line.
+ """
+ def __init__(self, d):
+ super(WgetProgressHandler, self).__init__(d)
+ # Send an initial progress event so the bar gets shown
+ self._fire_progress(0)
+
+ def writeline(self, line):
+ percs = re.findall(r'(\d+)%\s+([\d.]+[A-Z])', line)
+ if percs:
+ progress = int(percs[-1][0])
+ rate = percs[-1][1] + '/s'
+ self.update(progress, rate)
+ return False
+ return True
+
+
+class Wget(FetchMethod):
+ """Class to fetch urls via 'wget'"""
+ def supports(self, ud, d):
+ """
+ Check to see if a given url can be fetched with wget.
+ """
+ return ud.type in ['http', 'https', 'ftp']
+
+ def recommends_checksum(self, urldata):
+ return True
+
+ def urldata_init(self, ud, d):
+ if 'protocol' in ud.parm:
+ if ud.parm['protocol'] == 'git':
+ raise bb.fetch2.ParameterError("Invalid protocol - if you wish to fetch from a git repository using http, you need to instead use the git:// prefix with protocol=http", ud.url)
+
+ if 'downloadfilename' in ud.parm:
+ ud.basename = ud.parm['downloadfilename']
+ else:
+ ud.basename = os.path.basename(ud.path)
+
+ ud.localfile = d.expand(urllib.parse.unquote(ud.basename))
+ if not ud.localfile:
+ ud.localfile = d.expand(urllib.parse.unquote(ud.host + ud.path).replace("/", "."))
+
+ self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget -t 2 -T 30 --passive-ftp --no-check-certificate"
+
+ def _runwget(self, ud, d, command, quiet, workdir=None):
+
+ progresshandler = WgetProgressHandler(d)
+
+ logger.debug(2, "Fetching %s using command '%s'" % (ud.url, command))
+ bb.fetch2.check_network_access(d, command, ud.url)
+ runfetchcmd(command + ' --progress=dot -v', d, quiet, log=progresshandler, workdir=workdir)
+
+ def download(self, ud, d):
+ """Fetch urls"""
+
+ fetchcmd = self.basecmd
+
+ if 'downloadfilename' in ud.parm:
+ dldir = d.getVar("DL_DIR")
+ bb.utils.mkdirhier(os.path.dirname(dldir + os.sep + ud.localfile))
+ fetchcmd += " -O " + dldir + os.sep + ud.localfile
+
+ if ud.user and ud.pswd:
+ fetchcmd += " --user=%s --password=%s --auth-no-challenge" % (ud.user, ud.pswd)
+
+ uri = ud.url.split(";")[0]
+ if os.path.exists(ud.localpath):
+ # file exists, but we didnt complete it.. trying again..
+ fetchcmd += d.expand(" -c -P ${DL_DIR} '%s'" % uri)
+ else:
+ fetchcmd += d.expand(" -P ${DL_DIR} '%s'" % uri)
+
+ self._runwget(ud, d, fetchcmd, False)
+
+ # Sanity check since wget can pretend it succeed when it didn't
+ # Also, this used to happen if sourceforge sent us to the mirror page
+ if not os.path.exists(ud.localpath):
+ raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri)
+
+ if os.path.getsize(ud.localpath) == 0:
+ os.remove(ud.localpath)
+ raise FetchError("The fetch of %s resulted in a zero size file?! Deleting and failing since this isn't right." % (uri), uri)
+
+ return True
+
+ def checkstatus(self, fetch, ud, d, try_again=True):
+ import urllib.request, urllib.error, urllib.parse, socket, http.client
+ from urllib.response import addinfourl
+ from bb.fetch2 import FetchConnectionCache
+
+ class HTTPConnectionCache(http.client.HTTPConnection):
+ if fetch.connection_cache:
+ def connect(self):
+ """Connect to the host and port specified in __init__."""
+
+ sock = fetch.connection_cache.get_connection(self.host, self.port)
+ if sock:
+ self.sock = sock
+ else:
+ self.sock = socket.create_connection((self.host, self.port),
+ self.timeout, self.source_address)
+ fetch.connection_cache.add_connection(self.host, self.port, self.sock)
+
+ if self._tunnel_host:
+ self._tunnel()
+
+ class CacheHTTPHandler(urllib.request.HTTPHandler):
+ def http_open(self, req):
+ return self.do_open(HTTPConnectionCache, req)
+
+ def do_open(self, http_class, req):
+ """Return an addinfourl object for the request, using http_class.
+
+ http_class must implement the HTTPConnection API from httplib.
+ The addinfourl return value is a file-like object. It also
+ has methods and attributes including:
+ - info(): return a mimetools.Message object for the headers
+ - geturl(): return the original request URL
+ - code: HTTP status code
+ """
+ host = req.host
+ if not host:
+ raise urlllib2.URLError('no host given')
+
+ h = http_class(host, timeout=req.timeout) # will parse host:port
+ h.set_debuglevel(self._debuglevel)
+
+ headers = dict(req.unredirected_hdrs)
+ headers.update(dict((k, v) for k, v in list(req.headers.items())
+ if k not in headers))
+
+ # We want to make an HTTP/1.1 request, but the addinfourl
+ # class isn't prepared to deal with a persistent connection.
+ # It will try to read all remaining data from the socket,
+ # which will block while the server waits for the next request.
+ # So make sure the connection gets closed after the (only)
+ # request.
+
+ # Don't close connection when connection_cache is enabled,
+ if fetch.connection_cache is None:
+ headers["Connection"] = "close"
+ else:
+ headers["Connection"] = "Keep-Alive" # Works for HTTP/1.0
+
+ headers = dict(
+ (name.title(), val) for name, val in list(headers.items()))
+
+ if req._tunnel_host:
+ tunnel_headers = {}
+ proxy_auth_hdr = "Proxy-Authorization"
+ if proxy_auth_hdr in headers:
+ tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr]
+ # Proxy-Authorization should not be sent to origin
+ # server.
+ del headers[proxy_auth_hdr]
+ h.set_tunnel(req._tunnel_host, headers=tunnel_headers)
+
+ try:
+ h.request(req.get_method(), req.selector, req.data, headers)
+ except socket.error as err: # XXX what error?
+ # Don't close connection when cache is enabled.
+ # Instead, try to detect connections that are no longer
+ # usable (for example, closed unexpectedly) and remove
+ # them from the cache.
+ if fetch.connection_cache is None:
+ h.close()
+ elif isinstance(err, OSError) and err.errno == errno.EBADF:
+ # This happens when the server closes the connection despite the Keep-Alive.
+ # Apparently urllib then uses the file descriptor, expecting it to be
+ # connected, when in reality the connection is already gone.
+ # We let the request fail and expect it to be
+ # tried once more ("try_again" in check_status()),
+ # with the dead connection removed from the cache.
+ # If it still fails, we give up, which can happend for bad
+ # HTTP proxy settings.
+ fetch.connection_cache.remove_connection(h.host, h.port)
+ raise urllib.error.URLError(err)
+ else:
+ try:
+ r = h.getresponse(buffering=True)
+ except TypeError: # buffering kw not supported
+ r = h.getresponse()
+
+ # Pick apart the HTTPResponse object to get the addinfourl
+ # object initialized properly.
+
+ # Wrap the HTTPResponse object in socket's file object adapter
+ # for Windows. That adapter calls recv(), so delegate recv()
+ # to read(). This weird wrapping allows the returned object to
+ # have readline() and readlines() methods.
+
+ # XXX It might be better to extract the read buffering code
+ # out of socket._fileobject() and into a base class.
+ r.recv = r.read
+
+ # no data, just have to read
+ r.read()
+ class fp_dummy(object):
+ def read(self):
+ return ""
+ def readline(self):
+ return ""
+ def close(self):
+ pass
+ closed = False
+
+ resp = addinfourl(fp_dummy(), r.msg, req.get_full_url())
+ resp.code = r.status
+ resp.msg = r.reason
+
+ # Close connection when server request it.
+ if fetch.connection_cache is not None:
+ if 'Connection' in r.msg and r.msg['Connection'] == 'close':
+ fetch.connection_cache.remove_connection(h.host, h.port)
+
+ return resp
+
+ class HTTPMethodFallback(urllib.request.BaseHandler):
+ """
+ Fallback to GET if HEAD is not allowed (405 HTTP error)
+ """
+ def http_error_405(self, req, fp, code, msg, headers):
+ fp.read()
+ fp.close()
+
+ newheaders = dict((k,v) for k,v in list(req.headers.items())
+ if k.lower() not in ("content-length", "content-type"))
+ return self.parent.open(urllib.request.Request(req.get_full_url(),
+ headers=newheaders,
+ origin_req_host=req.origin_req_host,
+ unverifiable=True))
+
+ """
+ Some servers (e.g. GitHub archives, hosted on Amazon S3) return 403
+ Forbidden when they actually mean 405 Method Not Allowed.
+ """
+ http_error_403 = http_error_405
+
+
+ class FixedHTTPRedirectHandler(urllib.request.HTTPRedirectHandler):
+ """
+ urllib2.HTTPRedirectHandler resets the method to GET on redirect,
+ when we want to follow redirects using the original method.
+ """
+ def redirect_request(self, req, fp, code, msg, headers, newurl):
+ newreq = urllib.request.HTTPRedirectHandler.redirect_request(self, req, fp, code, msg, headers, newurl)
+ newreq.get_method = lambda: req.get_method()
+ return newreq
+ exported_proxies = export_proxies(d)
+
+ handlers = [FixedHTTPRedirectHandler, HTTPMethodFallback]
+ if export_proxies:
+ handlers.append(urllib.request.ProxyHandler())
+ handlers.append(CacheHTTPHandler())
+ # XXX: Since Python 2.7.9 ssl cert validation is enabled by default
+ # see PEP-0476, this causes verification errors on some https servers
+ # so disable by default.
+ import ssl
+ if hasattr(ssl, '_create_unverified_context'):
+ handlers.append(urllib.request.HTTPSHandler(context=ssl._create_unverified_context()))
+ opener = urllib.request.build_opener(*handlers)
+
+ try:
+ uri = ud.url.split(";")[0]
+ r = urllib.request.Request(uri)
+ r.get_method = lambda: "HEAD"
+ # Some servers (FusionForge, as used on Alioth) require that the
+ # optional Accept header is set.
+ r.add_header("Accept", "*/*")
+ def add_basic_auth(login_str, request):
+ '''Adds Basic auth to http request, pass in login:password as string'''
+ import base64
+ encodeuser = base64.b64encode(login_str.encode('utf-8')).decode("utf-8")
+ authheader = "Basic %s" % encodeuser
+ r.add_header("Authorization", authheader)
+
+ if ud.user:
+ add_basic_auth(ud.user, r)
+
+ try:
+ import netrc, urllib.parse
+ n = netrc.netrc()
+ login, unused, password = n.authenticators(urllib.parse.urlparse(uri).hostname)
+ add_basic_auth("%s:%s" % (login, password), r)
+ except (TypeError, ImportError, IOError, netrc.NetrcParseError):
+ pass
+
+ with opener.open(r) as response:
+ pass
+ except urllib.error.URLError as e:
+ if try_again:
+ logger.debug(2, "checkstatus: trying again")
+ return self.checkstatus(fetch, ud, d, False)
+ else:
+ # debug for now to avoid spamming the logs in e.g. remote sstate searches
+ logger.debug(2, "checkstatus() urlopen failed: %s" % e)
+ return False
+ return True
+
+ def _parse_path(self, regex, s):
+ """
+ Find and group name, version and archive type in the given string s
+ """
+
+ m = regex.search(s)
+ if m:
+ pname = ''
+ pver = ''
+ ptype = ''
+
+ mdict = m.groupdict()
+ if 'name' in mdict.keys():
+ pname = mdict['name']
+ if 'pver' in mdict.keys():
+ pver = mdict['pver']
+ if 'type' in mdict.keys():
+ ptype = mdict['type']
+
+ bb.debug(3, "_parse_path: %s, %s, %s" % (pname, pver, ptype))
+
+ return (pname, pver, ptype)
+
+ return None
+
+ def _modelate_version(self, version):
+ if version[0] in ['.', '-']:
+ if version[1].isdigit():
+ version = version[1] + version[0] + version[2:len(version)]
+ else:
+ version = version[1:len(version)]
+
+ version = re.sub('-', '.', version)
+ version = re.sub('_', '.', version)
+ version = re.sub('(rc)+', '.1000.', version)
+ version = re.sub('(beta)+', '.100.', version)
+ version = re.sub('(alpha)+', '.10.', version)
+ if version[0] == 'v':
+ version = version[1:len(version)]
+ return version
+
+ def _vercmp(self, old, new):
+ """
+ Check whether 'new' is newer than 'old' version. We use existing vercmp() for the
+ purpose. PE is cleared in comparison as it's not for build, and PR is cleared too
+ for simplicity as it's somehow difficult to get from various upstream format
+ """
+
+ (oldpn, oldpv, oldsuffix) = old
+ (newpn, newpv, newsuffix) = new
+
+ """
+ Check for a new suffix type that we have never heard of before
+ """
+ if (newsuffix):
+ m = self.suffix_regex_comp.search(newsuffix)
+ if not m:
+ bb.warn("%s has a possible unknown suffix: %s" % (newpn, newsuffix))
+ return False
+
+ """
+ Not our package so ignore it
+ """
+ if oldpn != newpn:
+ return False
+
+ oldpv = self._modelate_version(oldpv)
+ newpv = self._modelate_version(newpv)
+
+ return bb.utils.vercmp(("0", oldpv, ""), ("0", newpv, ""))
+
+ def _fetch_index(self, uri, ud, d):
+ """
+ Run fetch checkstatus to get directory information
+ """
+ f = tempfile.NamedTemporaryFile()
+ with tempfile.TemporaryDirectory(prefix="wget-index-") as workdir, tempfile.NamedTemporaryFile(dir=workdir, prefix="wget-listing-") as f:
+ agent = "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.12) Gecko/20101027 Ubuntu/9.10 (karmic) Firefox/3.6.12"
+ fetchcmd = self.basecmd
+ fetchcmd += " -O " + f.name + " --user-agent='" + agent + "' '" + uri + "'"
+ try:
+ self._runwget(ud, d, fetchcmd, True, workdir=workdir)
+ fetchresult = f.read()
+ except bb.fetch2.BBFetchException:
+ fetchresult = ""
+
+ return fetchresult
+
+ def _check_latest_version(self, url, package, package_regex, current_version, ud, d):
+ """
+ Return the latest version of a package inside a given directory path
+ If error or no version, return ""
+ """
+ valid = 0
+ version = ['', '', '']
+
+ bb.debug(3, "VersionURL: %s" % (url))
+ soup = BeautifulSoup(self._fetch_index(url, ud, d), "html.parser", parse_only=SoupStrainer("a"))
+ if not soup:
+ bb.debug(3, "*** %s NO SOUP" % (url))
+ return ""
+
+ for line in soup.find_all('a', href=True):
+ bb.debug(3, "line['href'] = '%s'" % (line['href']))
+ bb.debug(3, "line = '%s'" % (str(line)))
+
+ newver = self._parse_path(package_regex, line['href'])
+ if not newver:
+ newver = self._parse_path(package_regex, str(line))
+
+ if newver:
+ bb.debug(3, "Upstream version found: %s" % newver[1])
+ if valid == 0:
+ version = newver
+ valid = 1
+ elif self._vercmp(version, newver) < 0:
+ version = newver
+
+ pupver = re.sub('_', '.', version[1])
+
+ bb.debug(3, "*** %s -> UpstreamVersion = %s (CurrentVersion = %s)" %
+ (package, pupver or "N/A", current_version[1]))
+
+ if valid:
+ return pupver
+
+ return ""
+
+ def _check_latest_version_by_dir(self, dirver, package, package_regex,
+ current_version, ud, d):
+ """
+ Scan every directory in order to get upstream version.
+ """
+ version_dir = ['', '', '']
+ version = ['', '', '']
+
+ dirver_regex = re.compile("(?P<pfx>\D*)(?P<ver>(\d+[\.\-_])+(\d+))")
+ s = dirver_regex.search(dirver)
+ if s:
+ version_dir[1] = s.group('ver')
+ else:
+ version_dir[1] = dirver
+
+ dirs_uri = bb.fetch.encodeurl([ud.type, ud.host,
+ ud.path.split(dirver)[0], ud.user, ud.pswd, {}])
+ bb.debug(3, "DirURL: %s, %s" % (dirs_uri, package))
+
+ soup = BeautifulSoup(self._fetch_index(dirs_uri, ud, d), "html.parser", parse_only=SoupStrainer("a"))
+ if not soup:
+ return version[1]
+
+ for line in soup.find_all('a', href=True):
+ s = dirver_regex.search(line['href'].strip("/"))
+ if s:
+ sver = s.group('ver')
+
+ # When prefix is part of the version directory it need to
+ # ensure that only version directory is used so remove previous
+ # directories if exists.
+ #
+ # Example: pfx = '/dir1/dir2/v' and version = '2.5' the expected
+ # result is v2.5.
+ spfx = s.group('pfx').split('/')[-1]
+
+ version_dir_new = ['', sver, '']
+ if self._vercmp(version_dir, version_dir_new) <= 0:
+ dirver_new = spfx + sver
+ path = ud.path.replace(dirver, dirver_new, True) \
+ .split(package)[0]
+ uri = bb.fetch.encodeurl([ud.type, ud.host, path,
+ ud.user, ud.pswd, {}])
+
+ pupver = self._check_latest_version(uri,
+ package, package_regex, current_version, ud, d)
+ if pupver:
+ version[1] = pupver
+
+ version_dir = version_dir_new
+
+ return version[1]
+
+ def _init_regexes(self, package, ud, d):
+ """
+ Match as many patterns as possible such as:
+ gnome-common-2.20.0.tar.gz (most common format)
+ gtk+-2.90.1.tar.gz
+ xf86-input-synaptics-12.6.9.tar.gz
+ dri2proto-2.3.tar.gz
+ blktool_4.orig.tar.gz
+ libid3tag-0.15.1b.tar.gz
+ unzip552.tar.gz
+ icu4c-3_6-src.tgz
+ genext2fs_1.3.orig.tar.gz
+ gst-fluendo-mp3
+ """
+ # match most patterns which uses "-" as separator to version digits
+ pn_prefix1 = "[a-zA-Z][a-zA-Z0-9]*([-_][a-zA-Z]\w+)*\+?[-_]"
+ # a loose pattern such as for unzip552.tar.gz
+ pn_prefix2 = "[a-zA-Z]+"
+ # a loose pattern such as for 80325-quicky-0.4.tar.gz
+ pn_prefix3 = "[0-9]+[-]?[a-zA-Z]+"
+ # Save the Package Name (pn) Regex for use later
+ pn_regex = "(%s|%s|%s)" % (pn_prefix1, pn_prefix2, pn_prefix3)
+
+ # match version
+ pver_regex = "(([A-Z]*\d+[a-zA-Z]*[\.\-_]*)+)"
+
+ # match arch
+ parch_regex = "-source|_all_"
+
+ # src.rpm extension was added only for rpm package. Can be removed if the rpm
+ # packaged will always be considered as having to be manually upgraded
+ psuffix_regex = "(tar\.gz|tgz|tar\.bz2|zip|xz|tar\.lz|rpm|bz2|orig\.tar\.gz|tar\.xz|src\.tar\.gz|src\.tgz|svnr\d+\.tar\.bz2|stable\.tar\.gz|src\.rpm)"
+
+ # match name, version and archive type of a package
+ package_regex_comp = re.compile("(?P<name>%s?\.?v?)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s$)"
+ % (pn_regex, pver_regex, parch_regex, psuffix_regex))
+ self.suffix_regex_comp = re.compile(psuffix_regex)
+
+ # compile regex, can be specific by package or generic regex
+ pn_regex = d.getVar('UPSTREAM_CHECK_REGEX')
+ if pn_regex:
+ package_custom_regex_comp = re.compile(pn_regex)
+ else:
+ version = self._parse_path(package_regex_comp, package)
+ if version:
+ package_custom_regex_comp = re.compile(
+ "(?P<name>%s)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s)" %
+ (re.escape(version[0]), pver_regex, parch_regex, psuffix_regex))
+ else:
+ package_custom_regex_comp = None
+
+ return package_custom_regex_comp
+
+ def latest_versionstring(self, ud, d):
+ """
+ Manipulate the URL and try to obtain the latest package version
+
+ sanity check to ensure same name and type.
+ """
+ package = ud.path.split("/")[-1]
+ current_version = ['', d.getVar('PV'), '']
+
+ """possible to have no version in pkg name, such as spectrum-fw"""
+ if not re.search("\d+", package):
+ current_version[1] = re.sub('_', '.', current_version[1])
+ current_version[1] = re.sub('-', '.', current_version[1])
+ return (current_version[1], '')
+
+ package_regex = self._init_regexes(package, ud, d)
+ if package_regex is None:
+ bb.warn("latest_versionstring: package %s don't match pattern" % (package))
+ return ('', '')
+ bb.debug(3, "latest_versionstring, regex: %s" % (package_regex.pattern))
+
+ uri = ""
+ regex_uri = d.getVar("UPSTREAM_CHECK_URI")
+ if not regex_uri:
+ path = ud.path.split(package)[0]
+
+ # search for version matches on folders inside the path, like:
+ # "5.7" in http://download.gnome.org/sources/${PN}/5.7/${PN}-${PV}.tar.gz
+ dirver_regex = re.compile("(?P<dirver>[^/]*(\d+\.)*\d+([-_]r\d+)*)/")
+ m = dirver_regex.search(path)
+ if m:
+ pn = d.getVar('PN')
+ dirver = m.group('dirver')
+
+ dirver_pn_regex = re.compile("%s\d?" % (re.escape(pn)))
+ if not dirver_pn_regex.search(dirver):
+ return (self._check_latest_version_by_dir(dirver,
+ package, package_regex, current_version, ud, d), '')
+
+ uri = bb.fetch.encodeurl([ud.type, ud.host, path, ud.user, ud.pswd, {}])
+ else:
+ uri = regex_uri
+
+ return (self._check_latest_version(uri, package, package_regex,
+ current_version, ud, d), '')