summaryrefslogtreecommitdiff
path: root/poky/bitbake/lib
diff options
context:
space:
mode:
Diffstat (limited to 'poky/bitbake/lib')
-rw-r--r--poky/bitbake/lib/bb/__init__.py2
-rw-r--r--poky/bitbake/lib/bb/cooker.py54
-rw-r--r--poky/bitbake/lib/bb/cookerdata.py17
-rw-r--r--poky/bitbake/lib/bb/data.py10
-rw-r--r--poky/bitbake/lib/bb/event.py11
-rw-r--r--poky/bitbake/lib/bb/fetch2/__init__.py4
-rw-r--r--poky/bitbake/lib/bb/fetch2/git.py10
-rw-r--r--poky/bitbake/lib/bb/fetch2/gitsm.py2
-rw-r--r--poky/bitbake/lib/bb/fetch2/npmsw.py26
-rw-r--r--poky/bitbake/lib/bb/fetch2/wget.py10
-rw-r--r--poky/bitbake/lib/bb/runqueue.py18
-rw-r--r--poky/bitbake/lib/bb/server/process.py10
-rw-r--r--poky/bitbake/lib/bb/siggen.py2
-rw-r--r--poky/bitbake/lib/bb/tests/fetch.py123
-rw-r--r--poky/bitbake/lib/bb/utils.py19
-rw-r--r--poky/bitbake/lib/bblayers/action.py5
16 files changed, 220 insertions, 103 deletions
diff --git a/poky/bitbake/lib/bb/__init__.py b/poky/bitbake/lib/bb/__init__.py
index 4e90964173..56be5a831e 100644
--- a/poky/bitbake/lib/bb/__init__.py
+++ b/poky/bitbake/lib/bb/__init__.py
@@ -9,7 +9,7 @@
# SPDX-License-Identifier: GPL-2.0-only
#
-__version__ = "2.2.0"
+__version__ = "2.3.1"
import sys
if sys.version_info < (3, 8, 0):
diff --git a/poky/bitbake/lib/bb/cooker.py b/poky/bitbake/lib/bb/cooker.py
index c5e9fa2941..1797a1d4ca 100644
--- a/poky/bitbake/lib/bb/cooker.py
+++ b/poky/bitbake/lib/bb/cooker.py
@@ -229,24 +229,26 @@ class BBCooker:
self.handlePRServ()
def setupConfigWatcher(self):
- if self.configwatcher:
- self.configwatcher.close()
- self.confignotifier = None
- self.configwatcher = None
- self.configwatcher = pyinotify.WatchManager()
- self.configwatcher.bbseen = set()
- self.configwatcher.bbwatchedfiles = set()
- self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications)
+ with bb.utils.lock_timeout(self.inotify_threadlock):
+ if self.configwatcher:
+ self.configwatcher.close()
+ self.confignotifier = None
+ self.configwatcher = None
+ self.configwatcher = pyinotify.WatchManager()
+ self.configwatcher.bbseen = set()
+ self.configwatcher.bbwatchedfiles = set()
+ self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications)
def setupParserWatcher(self):
- if self.watcher:
- self.watcher.close()
- self.notifier = None
- self.watcher = None
- self.watcher = pyinotify.WatchManager()
- self.watcher.bbseen = set()
- self.watcher.bbwatchedfiles = set()
- self.notifier = pyinotify.Notifier(self.watcher, self.notifications)
+ with bb.utils.lock_timeout(self.inotify_threadlock):
+ if self.watcher:
+ self.watcher.close()
+ self.notifier = None
+ self.watcher = None
+ self.watcher = pyinotify.WatchManager()
+ self.watcher.bbseen = set()
+ self.watcher.bbwatchedfiles = set()
+ self.notifier = pyinotify.Notifier(self.watcher, self.notifications)
def process_inotify_updates(self):
with bb.utils.lock_timeout(self.inotify_threadlock):
@@ -337,12 +339,21 @@ class BBCooker:
providerlog.error("Root privilege is required to modify max_user_watches.")
raise
+ def handle_inotify_updates(self):
+ # reload files for which we got notifications
+ for p in self.inotify_modified_files:
+ bb.parse.update_cache(p)
+ if p in bb.parse.BBHandler.cached_statements:
+ del bb.parse.BBHandler.cached_statements[p]
+ self.inotify_modified_files = []
+
def sigterm_exception(self, signum, stackframe):
if signum == signal.SIGTERM:
bb.warn("Cooker received SIGTERM, shutting down...")
elif signum == signal.SIGHUP:
bb.warn("Cooker received SIGHUP, shutting down...")
self.state = state.forceshutdown
+ bb.event._should_exit.set()
def setFeatures(self, features):
# we only accept a new feature set if we're in state initial, so we can reset without problems
@@ -365,6 +376,7 @@ class BBCooker:
if mod not in self.orig_sysmodules:
del sys.modules[mod]
+ self.handle_inotify_updates()
self.setupConfigWatcher()
# Need to preserve BB_CONSOLELOG over resets
@@ -1518,6 +1530,7 @@ class BBCooker:
msg = None
interrupted = 0
if halt or self.state == state.forceshutdown:
+ bb.event._should_exit.set()
rq.finish_runqueue(True)
msg = "Forced shutdown"
interrupted = 2
@@ -1610,12 +1623,7 @@ class BBCooker:
if self.state == state.running:
return
- # reload files for which we got notifications
- for p in self.inotify_modified_files:
- bb.parse.update_cache(p)
- if p in bb.parse.BBHandler.cached_statements:
- del bb.parse.BBHandler.cached_statements[p]
- self.inotify_modified_files = []
+ self.handle_inotify_updates()
if not self.baseconfig_valid:
logger.debug("Reloading base configuration data")
@@ -1758,6 +1766,7 @@ class BBCooker:
self.state = state.forceshutdown
else:
self.state = state.shutdown
+ bb.event._should_exit.set()
if self.parser:
self.parser.shutdown(clean=False)
@@ -1768,6 +1777,7 @@ class BBCooker:
self.parser.shutdown(clean=False)
self.parser.final_cleanup()
self.state = state.initial
+ bb.event._should_exit.clear()
def reset(self):
if hasattr(bb.parse, "siggen"):
diff --git a/poky/bitbake/lib/bb/cookerdata.py b/poky/bitbake/lib/bb/cookerdata.py
index 1658bee93c..adde0e7444 100644
--- a/poky/bitbake/lib/bb/cookerdata.py
+++ b/poky/bitbake/lib/bb/cookerdata.py
@@ -160,12 +160,7 @@ def catch_parse_error(func):
def wrapped(fn, *args):
try:
return func(fn, *args)
- except IOError as exc:
- import traceback
- parselog.critical(traceback.format_exc())
- parselog.critical("Unable to parse %s: %s" % (fn, exc))
- raise bb.BBHandledException()
- except bb.data_smart.ExpansionError as exc:
+ except Exception as exc:
import traceback
bbdir = os.path.dirname(__file__) + os.sep
@@ -177,9 +172,6 @@ def catch_parse_error(func):
break
parselog.critical("Unable to parse %s" % fn, exc_info=(exc_class, exc, tb))
raise bb.BBHandledException()
- except bb.parse.ParseError as exc:
- parselog.critical(str(exc))
- raise bb.BBHandledException()
return wrapped
@catch_parse_error
@@ -302,14 +294,9 @@ class CookerDataBuilder(object):
bb.event.fire(bb.event.MultiConfigParsed(mcdata), self.data)
self.data_hash = data_hash.hexdigest()
- except (SyntaxError, bb.BBHandledException):
- raise bb.BBHandledException()
except bb.data_smart.ExpansionError as e:
logger.error(str(e))
raise bb.BBHandledException()
- except Exception:
- logger.exception("Error parsing configuration files")
- raise bb.BBHandledException()
bb.codeparser.update_module_dependencies(self.data)
@@ -473,7 +460,7 @@ class CookerDataBuilder(object):
msg += (" and bitbake did not find a conf/bblayers.conf file in"
" the expected location.\nMaybe you accidentally"
" invoked bitbake from the wrong directory?")
- raise SystemExit(msg)
+ bb.fatal(msg)
if not data.getVar("TOPDIR"):
data.setVar("TOPDIR", os.path.abspath(os.getcwd()))
diff --git a/poky/bitbake/lib/bb/data.py b/poky/bitbake/lib/bb/data.py
index f3ae062022..3ee8f5e7db 100644
--- a/poky/bitbake/lib/bb/data.py
+++ b/poky/bitbake/lib/bb/data.py
@@ -114,8 +114,8 @@ def emit_var(var, o=sys.__stdout__, d = init(), all=False):
if d.getVarFlag(var, 'python', False) and func:
return False
- export = d.getVarFlag(var, "export", False)
- unexport = d.getVarFlag(var, "unexport", False)
+ export = bb.utils.to_boolean(d.getVarFlag(var, "export"))
+ unexport = bb.utils.to_boolean(d.getVarFlag(var, "unexport"))
if not all and not export and not unexport and not func:
return False
@@ -188,8 +188,8 @@ def emit_env(o=sys.__stdout__, d = init(), all=False):
def exported_keys(d):
return (key for key in d.keys() if not key.startswith('__') and
- d.getVarFlag(key, 'export', False) and
- not d.getVarFlag(key, 'unexport', False))
+ bb.utils.to_boolean(d.getVarFlag(key, 'export')) and
+ not bb.utils.to_boolean(d.getVarFlag(key, 'unexport')))
def exported_vars(d):
k = list(exported_keys(d))
@@ -375,7 +375,7 @@ def generate_dependencies(d, ignored_vars):
mod_funcs = set(bb.codeparser.modulecode_deps.keys())
keys = set(key for key in d if not key.startswith("__")) | mod_funcs
- shelldeps = set(key for key in d.getVar("__exportlist", False) if d.getVarFlag(key, "export", False) and not d.getVarFlag(key, "unexport", False))
+ shelldeps = set(key for key in d.getVar("__exportlist", False) if bb.utils.to_boolean(d.getVarFlag(key, "export")) and not bb.utils.to_boolean(d.getVarFlag(key, "unexport")))
varflagsexcl = d.getVar('BB_SIGNATURE_EXCLUDE_FLAGS')
codeparserd = d.createCopy()
diff --git a/poky/bitbake/lib/bb/event.py b/poky/bitbake/lib/bb/event.py
index 8b05f93e2f..37cc630c63 100644
--- a/poky/bitbake/lib/bb/event.py
+++ b/poky/bitbake/lib/bb/event.py
@@ -69,6 +69,7 @@ _eventfilter = None
_uiready = False
_thread_lock = threading.Lock()
_heartbeat_enabled = False
+_should_exit = threading.Event()
def enable_threadlock():
# Always needed now
@@ -86,6 +87,16 @@ def disable_heartbeat():
global _heartbeat_enabled
_heartbeat_enabled = False
+#
+# In long running code, this function should be called periodically
+# to check if we should exit due to an interuption (.e.g Ctrl+C from the UI)
+#
+def check_for_interrupts(d):
+ global _should_exit
+ if _should_exit.is_set():
+ bb.warn("Exiting due to interrupt.")
+ raise bb.BBHandledException()
+
def execute_handler(name, handler, event, d):
event.data = d
try:
diff --git a/poky/bitbake/lib/bb/fetch2/__init__.py b/poky/bitbake/lib/bb/fetch2/__init__.py
index 747356dfa1..cf65727a20 100644
--- a/poky/bitbake/lib/bb/fetch2/__init__.py
+++ b/poky/bitbake/lib/bb/fetch2/__init__.py
@@ -855,7 +855,9 @@ FETCH_EXPORT_VARS = ['HOME', 'PATH',
'AWS_PROFILE',
'AWS_ACCESS_KEY_ID',
'AWS_SECRET_ACCESS_KEY',
- 'AWS_DEFAULT_REGION']
+ 'AWS_DEFAULT_REGION',
+ 'GIT_CACHE_PATH',
+ 'SSL_CERT_DIR']
def get_fetcher_environment(d):
newenv = {}
diff --git a/poky/bitbake/lib/bb/fetch2/git.py b/poky/bitbake/lib/bb/fetch2/git.py
index 2e3d32515f..5bb8393133 100644
--- a/poky/bitbake/lib/bb/fetch2/git.py
+++ b/poky/bitbake/lib/bb/fetch2/git.py
@@ -417,8 +417,7 @@ class Git(FetchMethod):
# It would be nice to just do this inline here by running 'git-lfs fetch'
# on the bare clonedir, but that operation requires a working copy on some
# releases of Git LFS.
- tmpdir = tempfile.mkdtemp(dir=d.getVar('DL_DIR'))
- try:
+ with tempfile.TemporaryDirectory(dir=d.getVar('DL_DIR')) as tmpdir:
# Do the checkout. This implicitly involves a Git LFS fetch.
Git.unpack(self, ud, tmpdir, d)
@@ -436,8 +435,6 @@ class Git(FetchMethod):
# downloaded.
if os.path.exists(os.path.join(tmpdir, "git", ".git", "lfs")):
runfetchcmd("tar -cf - lfs | tar -xf - -C %s" % ud.clonedir, d, workdir="%s/git/.git" % tmpdir)
- finally:
- bb.utils.remove(tmpdir, recurse=True)
def build_mirror_data(self, ud, d):
@@ -660,11 +657,6 @@ class Git(FetchMethod):
Check if the repository has 'lfs' (large file) content
"""
- if not ud.nobranch:
- branchname = ud.branches[ud.names[0]]
- else:
- branchname = "master"
-
# The bare clonedir doesn't use the remote names; it has the branch immediately.
if wd == ud.clonedir:
refname = ud.branches[ud.names[0]]
diff --git a/poky/bitbake/lib/bb/fetch2/gitsm.py b/poky/bitbake/lib/bb/fetch2/gitsm.py
index fee40cdcb4..f8e239bc5d 100644
--- a/poky/bitbake/lib/bb/fetch2/gitsm.py
+++ b/poky/bitbake/lib/bb/fetch2/gitsm.py
@@ -90,7 +90,7 @@ class GitSM(Git):
# Convert relative to absolute uri based on parent uri
if uris[m].startswith('..') or uris[m].startswith('./'):
newud = copy.copy(ud)
- newud.path = os.path.realpath(os.path.join(newud.path, uris[m]))
+ newud.path = os.path.normpath(os.path.join(newud.path, uris[m]))
uris[m] = Git._get_repo_url(self, newud)
for module in submodules:
diff --git a/poky/bitbake/lib/bb/fetch2/npmsw.py b/poky/bitbake/lib/bb/fetch2/npmsw.py
index a8c4d3528f..36fcbfba15 100644
--- a/poky/bitbake/lib/bb/fetch2/npmsw.py
+++ b/poky/bitbake/lib/bb/fetch2/npmsw.py
@@ -129,10 +129,28 @@ class NpmShrinkWrap(FetchMethod):
localpath = os.path.join(d.getVar("DL_DIR"), localfile)
+ # Handle local tarball and link sources
+ elif version.startswith("file"):
+ localpath = version[5:]
+ if not version.endswith(".tgz"):
+ unpack = False
+
# Handle git sources
- elif version.startswith("git"):
+ elif version.startswith(("git", "bitbucket","gist")) or (
+ not version.endswith((".tgz", ".tar", ".tar.gz"))
+ and not version.startswith((".", "@", "/"))
+ and "/" in version
+ ):
if version.startswith("github:"):
version = "git+https://github.com/" + version[len("github:"):]
+ elif version.startswith("gist:"):
+ version = "git+https://gist.github.com/" + version[len("gist:"):]
+ elif version.startswith("bitbucket:"):
+ version = "git+https://bitbucket.org/" + version[len("bitbucket:"):]
+ elif version.startswith("gitlab:"):
+ version = "git+https://gitlab.com/" + version[len("gitlab:"):]
+ elif not version.startswith(("git+","git:")):
+ version = "git+https://github.com/" + version
regex = re.compile(r"""
^
git\+
@@ -158,12 +176,6 @@ class NpmShrinkWrap(FetchMethod):
url = str(uri)
- # Handle local tarball and link sources
- elif version.startswith("file"):
- localpath = version[5:]
- if not version.endswith(".tgz"):
- unpack = False
-
else:
raise ParameterError("Unsupported dependency: %s" % name, ud.url)
diff --git a/poky/bitbake/lib/bb/fetch2/wget.py b/poky/bitbake/lib/bb/fetch2/wget.py
index 696e918030..859b4f99e2 100644
--- a/poky/bitbake/lib/bb/fetch2/wget.py
+++ b/poky/bitbake/lib/bb/fetch2/wget.py
@@ -26,7 +26,6 @@ from bb.fetch2 import FetchMethod
from bb.fetch2 import FetchError
from bb.fetch2 import logger
from bb.fetch2 import runfetchcmd
-from bb.utils import export_proxies
from bs4 import BeautifulSoup
from bs4 import SoupStrainer
@@ -361,10 +360,11 @@ class Wget(FetchMethod):
try:
import netrc
- n = netrc.netrc()
- login, unused, password = n.authenticators(urllib.parse.urlparse(uri).hostname)
- add_basic_auth("%s:%s" % (login, password), r)
- except (TypeError, ImportError, IOError, netrc.NetrcParseError):
+ auth_data = netrc.netrc().authenticators(urllib.parse.urlparse(uri).hostname)
+ if auth_data:
+ login, _, password = auth_data
+ add_basic_auth("%s:%s" % (login, password), r)
+ except (FileNotFoundError, netrc.NetrcParseError):
pass
with opener.open(r, timeout=30) as response:
diff --git a/poky/bitbake/lib/bb/runqueue.py b/poky/bitbake/lib/bb/runqueue.py
index ce711b6252..e629ab7e7b 100644
--- a/poky/bitbake/lib/bb/runqueue.py
+++ b/poky/bitbake/lib/bb/runqueue.py
@@ -655,6 +655,7 @@ class RunQueueData:
self.init_progress_reporter.start()
self.init_progress_reporter.next_stage()
+ bb.event.check_for_interrupts(self.cooker.data)
# Step A - Work out a list of tasks to run
#
@@ -803,6 +804,7 @@ class RunQueueData:
#self.dump_data()
self.init_progress_reporter.next_stage()
+ bb.event.check_for_interrupts(self.cooker.data)
# Resolve recursive 'recrdeptask' dependencies (Part B)
#
@@ -899,6 +901,7 @@ class RunQueueData:
self.runtaskentries[tid].depends.difference_update(recursivetasksselfref)
self.init_progress_reporter.next_stage()
+ bb.event.check_for_interrupts(self.cooker.data)
#self.dump_data()
@@ -980,6 +983,7 @@ class RunQueueData:
mark_active(tid, 1)
self.init_progress_reporter.next_stage()
+ bb.event.check_for_interrupts(self.cooker.data)
# Step C - Prune all inactive tasks
#
@@ -1019,6 +1023,7 @@ class RunQueueData:
bb.msg.fatal("RunQueue", "Could not find any tasks with the tasknames %s to run within the recipes of the taskgraphs of the targets %s" % (str(self.cooker.configuration.runall), str(self.targets)))
self.init_progress_reporter.next_stage()
+ bb.event.check_for_interrupts(self.cooker.data)
# Handle runonly
if self.cooker.configuration.runonly:
@@ -1059,6 +1064,7 @@ class RunQueueData:
logger.verbose("Assign Weightings")
self.init_progress_reporter.next_stage()
+ bb.event.check_for_interrupts(self.cooker.data)
# Generate a list of reverse dependencies to ease future calculations
for tid in self.runtaskentries:
@@ -1066,6 +1072,7 @@ class RunQueueData:
self.runtaskentries[dep].revdeps.add(tid)
self.init_progress_reporter.next_stage()
+ bb.event.check_for_interrupts(self.cooker.data)
# Identify tasks at the end of dependency chains
# Error on circular dependency loops (length two)
@@ -1082,12 +1089,14 @@ class RunQueueData:
logger.verbose("Compute totals (have %s endpoint(s))", len(endpoints))
self.init_progress_reporter.next_stage()
+ bb.event.check_for_interrupts(self.cooker.data)
# Calculate task weights
# Check of higher length circular dependencies
self.runq_weight = self.calculate_task_weights(endpoints)
self.init_progress_reporter.next_stage()
+ bb.event.check_for_interrupts(self.cooker.data)
# Sanity Check - Check for multiple tasks building the same provider
for mc in self.dataCaches:
@@ -1188,6 +1197,7 @@ class RunQueueData:
self.init_progress_reporter.next_stage()
self.init_progress_reporter.next_stage()
+ bb.event.check_for_interrupts(self.cooker.data)
# Iterate over the task list looking for tasks with a 'setscene' function
self.runq_setscene_tids = set()
@@ -1200,6 +1210,7 @@ class RunQueueData:
self.runq_setscene_tids.add(tid)
self.init_progress_reporter.next_stage()
+ bb.event.check_for_interrupts(self.cooker.data)
# Invalidate task if force mode active
if self.cooker.configuration.force:
@@ -1216,6 +1227,7 @@ class RunQueueData:
invalidate_task(fn + ":" + st, True)
self.init_progress_reporter.next_stage()
+ bb.event.check_for_interrupts(self.cooker.data)
# Create and print to the logs a virtual/xxxx -> PN (fn) table
for mc in taskData:
@@ -1228,6 +1240,7 @@ class RunQueueData:
bb.parse.siggen.tasks_resolved(virtmap, virtpnmap, self.dataCaches[mc])
self.init_progress_reporter.next_stage()
+ bb.event.check_for_interrupts(self.cooker.data)
bb.parse.siggen.set_setscene_tasks(self.runq_setscene_tids)
@@ -1240,6 +1253,7 @@ class RunQueueData:
dealtwith.add(tid)
todeal.remove(tid)
self.prepare_task_hash(tid)
+ bb.event.check_for_interrupts(self.cooker.data)
bb.parse.siggen.writeout_file_checksum_cache()
@@ -1483,6 +1497,7 @@ class RunQueue:
"""
retval = True
+ bb.event.check_for_interrupts(self.cooker.data)
if self.state is runQueuePrepare:
# NOTE: if you add, remove or significantly refactor the stages of this
@@ -1941,8 +1956,7 @@ class RunQueueExecute:
try:
module = __import__(modname, fromlist=(name,))
except ImportError as exc:
- logger.critical("Unable to import scheduler '%s' from '%s': %s" % (name, modname, exc))
- raise SystemExit(1)
+ bb.fatal("Unable to import scheduler '%s' from '%s': %s" % (name, modname, exc))
else:
schedulers.add(getattr(module, name))
return schedulers
diff --git a/poky/bitbake/lib/bb/server/process.py b/poky/bitbake/lib/bb/server/process.py
index 916ee0a0e5..db417c8428 100644
--- a/poky/bitbake/lib/bb/server/process.py
+++ b/poky/bitbake/lib/bb/server/process.py
@@ -405,7 +405,11 @@ class ProcessServer():
nextsleep = 0.1
fds = []
- self.cooker.process_inotify_updates()
+ try:
+ self.cooker.process_inotify_updates()
+ except Exception as exc:
+ serverlog("Exception %s in inofify updates broke the idle_thread, exiting" % traceback.format_exc())
+ self.quit = True
with bb.utils.lock_timeout(self._idlefuncsLock):
items = list(self._idlefuns.items())
@@ -473,6 +477,10 @@ class ProcessServer():
if not self.idle:
self.idle = threading.Thread(target=self.idle_thread)
self.idle.start()
+ elif self.idle and not self.idle.is_alive():
+ serverlog("Idle thread terminated, main thread exiting too")
+ bb.error("Idle thread terminated, main thread exiting too")
+ self.quit = True
if nextsleep is not None:
if self.xmlrpc:
diff --git a/poky/bitbake/lib/bb/siggen.py b/poky/bitbake/lib/bb/siggen.py
index 26e0243b00..c4ff9d8de1 100644
--- a/poky/bitbake/lib/bb/siggen.py
+++ b/poky/bitbake/lib/bb/siggen.py
@@ -598,7 +598,7 @@ class SignatureGeneratorUniHashMixIn(object):
# A unique hash equal to the taskhash is not very interesting,
# so it is reported it at debug level 2. If they differ, that
# is much more interesting, so it is reported at debug level 1
- hashequiv_logger.debug((1, 2)[unihash == taskhash], 'Found unihash %s in place of %s for %s from %s' % (unihash, taskhash, tid, self.server))
+ hashequiv_logger.bbdebug((1, 2)[unihash == taskhash], 'Found unihash %s in place of %s for %s from %s' % (unihash, taskhash, tid, self.server))
else:
hashequiv_logger.debug2('No reported unihash for %s:%s from %s' % (tid, taskhash, self.server))
except ConnectionError as e:
diff --git a/poky/bitbake/lib/bb/tests/fetch.py b/poky/bitbake/lib/bb/tests/fetch.py
index f3890321d6..73eefc5938 100644
--- a/poky/bitbake/lib/bb/tests/fetch.py
+++ b/poky/bitbake/lib/bb/tests/fetch.py
@@ -2199,6 +2199,12 @@ class GitShallowTest(FetcherTest):
self.assertIn("fstests.doap", dir)
class GitLfsTest(FetcherTest):
+ def skipIfNoGitLFS():
+ import shutil
+ if not shutil.which('git-lfs'):
+ return unittest.skip('git-lfs not installed')
+ return lambda f: f
+
def setUp(self):
FetcherTest.setUp(self)
@@ -2232,6 +2238,44 @@ class GitLfsTest(FetcherTest):
ud = fetcher.ud[uri]
return fetcher, ud
+ def get_real_git_lfs_file(self):
+ self.d.setVar('PATH', os.environ.get('PATH'))
+ fetcher, ud = self.fetch()
+ fetcher.unpack(self.d.getVar('WORKDIR'))
+ unpacked_lfs_file = os.path.join(self.d.getVar('WORKDIR'), 'git', "Cat_poster_1.jpg")
+ return unpacked_lfs_file
+
+ @skipIfNoGitLFS()
+ @skipIfNoNetwork()
+ def test_real_git_lfs_repo_succeeds_without_lfs_param(self):
+ self.d.setVar('SRC_URI', "git://gitlab.com/gitlab-examples/lfs.git;protocol=https;branch=master")
+ f = self.get_real_git_lfs_file()
+ self.assertTrue(os.path.exists(f))
+ self.assertEqual("c0baab607a97839c9a328b4310713307", bb.utils.md5_file(f))
+
+ @skipIfNoGitLFS()
+ @skipIfNoNetwork()
+ def test_real_git_lfs_repo_succeeds(self):
+ self.d.setVar('SRC_URI', "git://gitlab.com/gitlab-examples/lfs.git;protocol=https;branch=master;lfs=1")
+ f = self.get_real_git_lfs_file()
+ self.assertTrue(os.path.exists(f))
+ self.assertEqual("c0baab607a97839c9a328b4310713307", bb.utils.md5_file(f))
+
+ @skipIfNoGitLFS()
+ @skipIfNoNetwork()
+ def test_real_git_lfs_repo_succeeds(self):
+ self.d.setVar('SRC_URI', "git://gitlab.com/gitlab-examples/lfs.git;protocol=https;branch=master;lfs=0")
+ f = self.get_real_git_lfs_file()
+ # This is the actual non-smudged placeholder file on the repo if git-lfs does not run
+ lfs_file = (
+ 'version https://git-lfs.github.com/spec/v1\n'
+ 'oid sha256:34be66b1a39a1955b46a12588df9d5f6fc1da790e05cf01f3c7422f4bbbdc26b\n'
+ 'size 11423554\n'
+ )
+
+ with open(f) as fh:
+ self.assertEqual(lfs_file, fh.read())
+
def test_lfs_enabled(self):
import shutil
@@ -2250,12 +2294,16 @@ class GitLfsTest(FetcherTest):
shutil.rmtree(self.gitdir, ignore_errors=True)
fetcher.unpack(self.d.getVar('WORKDIR'))
- # If git-lfs cannot be found, the unpack should throw an error
- with self.assertRaises(bb.fetch2.FetchError):
- fetcher.download()
- ud.method._find_git_lfs = lambda d: False
- shutil.rmtree(self.gitdir, ignore_errors=True)
- fetcher.unpack(self.d.getVar('WORKDIR'))
+ old_find_git_lfs = ud.method._find_git_lfs
+ try:
+ # If git-lfs cannot be found, the unpack should throw an error
+ with self.assertRaises(bb.fetch2.FetchError):
+ fetcher.download()
+ ud.method._find_git_lfs = lambda d: False
+ shutil.rmtree(self.gitdir, ignore_errors=True)
+ fetcher.unpack(self.d.getVar('WORKDIR'))
+ finally:
+ ud.method._find_git_lfs = old_find_git_lfs
def test_lfs_disabled(self):
import shutil
@@ -2270,17 +2318,21 @@ class GitLfsTest(FetcherTest):
fetcher, ud = self.fetch()
self.assertIsNotNone(ud.method._find_git_lfs)
- # If git-lfs can be found, the unpack should be successful. A
- # live copy of git-lfs is not required for this case, so
- # unconditionally forge its presence.
- ud.method._find_git_lfs = lambda d: True
- shutil.rmtree(self.gitdir, ignore_errors=True)
- fetcher.unpack(self.d.getVar('WORKDIR'))
+ old_find_git_lfs = ud.method._find_git_lfs
+ try:
+ # If git-lfs can be found, the unpack should be successful. A
+ # live copy of git-lfs is not required for this case, so
+ # unconditionally forge its presence.
+ ud.method._find_git_lfs = lambda d: True
+ shutil.rmtree(self.gitdir, ignore_errors=True)
+ fetcher.unpack(self.d.getVar('WORKDIR'))
+ # If git-lfs cannot be found, the unpack should be successful
- # If git-lfs cannot be found, the unpack should be successful
- ud.method._find_git_lfs = lambda d: False
- shutil.rmtree(self.gitdir, ignore_errors=True)
- fetcher.unpack(self.d.getVar('WORKDIR'))
+ ud.method._find_git_lfs = lambda d: False
+ shutil.rmtree(self.gitdir, ignore_errors=True)
+ fetcher.unpack(self.d.getVar('WORKDIR'))
+ finally:
+ ud.method._find_git_lfs = old_find_git_lfs
class GitURLWithSpacesTest(FetcherTest):
test_git_urls = {
@@ -2614,6 +2666,45 @@ class NPMTest(FetcherTest):
@skipIfNoNpm()
@skipIfNoNetwork()
+ def test_npmsw_git(self):
+ swfile = self.create_shrinkwrap_file({
+ 'dependencies': {
+ 'cookie': {
+ 'version': 'github:jshttp/cookie.git#aec1177c7da67e3b3273df96cf476824dbc9ae09',
+ 'from': 'github:jshttp/cookie.git'
+ }
+ }
+ })
+ fetcher = bb.fetch.Fetch(['npmsw://' + swfile], self.d)
+ fetcher.download()
+ self.assertTrue(os.path.exists(os.path.join(self.dldir, 'git2', 'github.com.jshttp.cookie.git')))
+
+ swfile = self.create_shrinkwrap_file({
+ 'dependencies': {
+ 'cookie': {
+ 'version': 'jshttp/cookie.git#aec1177c7da67e3b3273df96cf476824dbc9ae09',
+ 'from': 'jshttp/cookie.git'
+ }
+ }
+ })
+ fetcher = bb.fetch.Fetch(['npmsw://' + swfile], self.d)
+ fetcher.download()
+ self.assertTrue(os.path.exists(os.path.join(self.dldir, 'git2', 'github.com.jshttp.cookie.git')))
+
+ swfile = self.create_shrinkwrap_file({
+ 'dependencies': {
+ 'nodejs': {
+ 'version': 'gitlab:gitlab-examples/nodejs.git#892a1f16725e56cc3a2cb0d677be42935c8fc262',
+ 'from': 'gitlab:gitlab-examples/nodejs'
+ }
+ }
+ })
+ fetcher = bb.fetch.Fetch(['npmsw://' + swfile], self.d)
+ fetcher.download()
+ self.assertTrue(os.path.exists(os.path.join(self.dldir, 'git2', 'gitlab.com.gitlab-examples.nodejs.git')))
+
+ @skipIfNoNpm()
+ @skipIfNoNetwork()
def test_npmsw_dev(self):
swfile = self.create_shrinkwrap_file({
'dependencies': {
diff --git a/poky/bitbake/lib/bb/utils.py b/poky/bitbake/lib/bb/utils.py
index 4446997e42..0624a4f3e9 100644
--- a/poky/bitbake/lib/bb/utils.py
+++ b/poky/bitbake/lib/bb/utils.py
@@ -1698,22 +1698,11 @@ def disable_network(uid=None, gid=None):
f.write("%s %s 1" % (gid, gid))
def export_proxies(d):
+ from bb.fetch2 import get_fetcher_environment
""" export common proxies variables from datastore to environment """
-
- variables = ['http_proxy', 'HTTP_PROXY', 'https_proxy', 'HTTPS_PROXY',
- 'ftp_proxy', 'FTP_PROXY', 'no_proxy', 'NO_PROXY',
- 'GIT_PROXY_COMMAND', 'SSL_CERT_FILE', 'SSL_CERT_DIR']
-
- origenv = d.getVar("BB_ORIGENV")
-
- for name in variables:
- value = d.getVar(name)
- if not value and origenv:
- value = origenv.getVar(name)
- if value:
- os.environ[name] = value
-
-
+ newenv = get_fetcher_environment(d)
+ for v in newenv:
+ os.environ[v] = newenv[v]
def load_plugins(logger, plugins, pluginpath):
def load_plugin(name):
diff --git a/poky/bitbake/lib/bblayers/action.py b/poky/bitbake/lib/bblayers/action.py
index 454c251410..0d7fd6edd1 100644
--- a/poky/bitbake/lib/bblayers/action.py
+++ b/poky/bitbake/lib/bblayers/action.py
@@ -11,6 +11,7 @@ import shutil
import sys
import tempfile
+from bb.cookerdata import findTopdir
import bb.utils
from bblayers.common import LayerPlugin
@@ -37,7 +38,7 @@ class ActionPlugin(LayerPlugin):
sys.stderr.write("Specified layer directory %s doesn't contain a conf/layer.conf file\n" % layerdir)
return 1
- bblayers_conf = os.path.join('conf', 'bblayers.conf')
+ bblayers_conf = os.path.join(findTopdir(),'conf', 'bblayers.conf')
if not os.path.exists(bblayers_conf):
sys.stderr.write("Unable to find bblayers.conf\n")
return 1
@@ -65,7 +66,7 @@ class ActionPlugin(LayerPlugin):
def do_remove_layer(self, args):
"""Remove one or more layers from bblayers.conf."""
- bblayers_conf = os.path.join('conf', 'bblayers.conf')
+ bblayers_conf = os.path.join(findTopdir() ,'conf', 'bblayers.conf')
if not os.path.exists(bblayers_conf):
sys.stderr.write("Unable to find bblayers.conf\n")
return 1