summaryrefslogtreecommitdiff
path: root/poky/bitbake/lib
diff options
context:
space:
mode:
authorjmbills <jason.m.bills@intel.com>2021-10-04 22:42:48 +0300
committerGitHub <noreply@github.com>2021-10-04 22:42:48 +0300
commit0c9e31989c615598b5d042ffab385606660c93c0 (patch)
tree8019999b0ca042482e5193d6cabc06220c71d776 /poky/bitbake/lib
parent04cd92067d2481643df5010cb39b2134b648cf4d (diff)
parentffe6d597d9e3d4407cf8062b5d6505a80ce08f41 (diff)
downloadopenbmc-0c9e31989c615598b5d042ffab385606660c93c0.tar.xz
Merge pull request #72 from Intel-BMC/update2021-0.751-0.75
Update
Diffstat (limited to 'poky/bitbake/lib')
-rw-r--r--poky/bitbake/lib/bb/__init__.py6
-rw-r--r--poky/bitbake/lib/bb/asyncrpc/client.py21
-rw-r--r--poky/bitbake/lib/bb/asyncrpc/serv.py149
-rw-r--r--poky/bitbake/lib/bb/cache.py4
-rw-r--r--poky/bitbake/lib/bb/command.py14
-rw-r--r--poky/bitbake/lib/bb/cooker.py5
-rw-r--r--poky/bitbake/lib/bb/data_smart.py123
-rw-r--r--poky/bitbake/lib/bb/fetch2/__init__.py50
-rw-r--r--poky/bitbake/lib/bb/fetch2/wget.py151
-rw-r--r--poky/bitbake/lib/bb/parse/ast.py4
-rw-r--r--poky/bitbake/lib/bb/providers.py2
-rw-r--r--poky/bitbake/lib/bb/runqueue.py58
-rw-r--r--poky/bitbake/lib/bb/server/process.py8
-rw-r--r--poky/bitbake/lib/bb/siggen.py4
-rw-r--r--poky/bitbake/lib/bb/tests/codeparser.py4
-rw-r--r--poky/bitbake/lib/bb/tests/data.py95
-rw-r--r--poky/bitbake/lib/bb/tests/parse.py18
-rw-r--r--poky/bitbake/lib/bb/tests/utils.py18
-rw-r--r--poky/bitbake/lib/bb/ui/taskexp.py5
-rw-r--r--poky/bitbake/lib/bb/utils.py18
-rw-r--r--poky/bitbake/lib/bblayers/query.py2
-rw-r--r--poky/bitbake/lib/hashserv/server.py4
-rw-r--r--poky/bitbake/lib/hashserv/tests.py54
-rw-r--r--poky/bitbake/lib/prserv/client.py48
-rw-r--r--poky/bitbake/lib/prserv/db.py65
-rw-r--r--poky/bitbake/lib/prserv/serv.py257
-rw-r--r--poky/bitbake/lib/toaster/orm/models.py2
-rw-r--r--poky/bitbake/lib/toaster/toastergui/views.py4
-rw-r--r--poky/bitbake/lib/toaster/toastermain/management/commands/buildimport.py2
29 files changed, 733 insertions, 462 deletions
diff --git a/poky/bitbake/lib/bb/__init__.py b/poky/bitbake/lib/bb/__init__.py
index a144bd609..5c248d365 100644
--- a/poky/bitbake/lib/bb/__init__.py
+++ b/poky/bitbake/lib/bb/__init__.py
@@ -9,11 +9,11 @@
# SPDX-License-Identifier: GPL-2.0-only
#
-__version__ = "1.51.0"
+__version__ = "1.51.1"
import sys
-if sys.version_info < (3, 5, 0):
- raise RuntimeError("Sorry, python 3.5.0 or later is required for this version of bitbake")
+if sys.version_info < (3, 6, 0):
+ raise RuntimeError("Sorry, python 3.6.0 or later is required for this version of bitbake")
class BBHandledException(Exception):
diff --git a/poky/bitbake/lib/bb/asyncrpc/client.py b/poky/bitbake/lib/bb/asyncrpc/client.py
index 79919c5be..50e60d5c3 100644
--- a/poky/bitbake/lib/bb/asyncrpc/client.py
+++ b/poky/bitbake/lib/bb/asyncrpc/client.py
@@ -11,13 +11,14 @@ from . import chunkify, DEFAULT_MAX_CHUNK
class AsyncClient(object):
- def __init__(self, proto_name, proto_version, logger):
+ def __init__(self, proto_name, proto_version, logger, timeout=30):
self.reader = None
self.writer = None
self.max_chunk = DEFAULT_MAX_CHUNK
self.proto_name = proto_name
self.proto_version = proto_version
self.logger = logger
+ self.timeout = timeout
async def connect_tcp(self, address, port):
async def connect_sock():
@@ -70,14 +71,18 @@ class AsyncClient(object):
async def send_message(self, msg):
async def get_line():
- line = await self.reader.readline()
+ try:
+ line = await asyncio.wait_for(self.reader.readline(), self.timeout)
+ except asyncio.TimeoutError:
+ raise ConnectionError("Timed out waiting for server")
+
if not line:
raise ConnectionError("Connection closed")
line = line.decode("utf-8")
if not line.endswith("\n"):
- raise ConnectionError("Bad message %r" % msg)
+ raise ConnectionError("Bad message %r" % (line))
return line
@@ -114,6 +119,16 @@ class Client(object):
self.client = self._get_async_client()
self.loop = asyncio.new_event_loop()
+ # Override any pre-existing loop.
+ # Without this, the PR server export selftest triggers a hang
+ # when running with Python 3.7. The drawback is that there is
+ # potential for issues if the PR and hash equiv (or some new)
+ # clients need to both be instantiated in the same process.
+ # This should be revisited if/when Python 3.9 becomes the
+ # minimum required version for BitBake, as it seems not
+ # required (but harmless) with it.
+ asyncio.set_event_loop(self.loop)
+
self._add_methods('connect_tcp', 'close', 'ping')
@abc.abstractmethod
diff --git a/poky/bitbake/lib/bb/asyncrpc/serv.py b/poky/bitbake/lib/bb/asyncrpc/serv.py
index ef20cb71d..b4cffff21 100644
--- a/poky/bitbake/lib/bb/asyncrpc/serv.py
+++ b/poky/bitbake/lib/bb/asyncrpc/serv.py
@@ -9,6 +9,7 @@ import os
import signal
import socket
import sys
+import multiprocessing
from . import chunkify, DEFAULT_MAX_CHUNK
@@ -130,53 +131,55 @@ class AsyncServerConnection(object):
class AsyncServer(object):
- def __init__(self, logger, loop=None):
- if loop is None:
- self.loop = asyncio.new_event_loop()
- self.close_loop = True
- else:
- self.loop = loop
- self.close_loop = False
-
+ def __init__(self, logger):
self._cleanup_socket = None
self.logger = logger
+ self.start = None
+ self.address = None
+ self.loop = None
def start_tcp_server(self, host, port):
- self.server = self.loop.run_until_complete(
- asyncio.start_server(self.handle_client, host, port, loop=self.loop)
- )
-
- for s in self.server.sockets:
- self.logger.debug('Listening on %r' % (s.getsockname(),))
- # Newer python does this automatically. Do it manually here for
- # maximum compatibility
- s.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, 1)
- s.setsockopt(socket.SOL_TCP, socket.TCP_QUICKACK, 1)
-
- name = self.server.sockets[0].getsockname()
- if self.server.sockets[0].family == socket.AF_INET6:
- self.address = "[%s]:%d" % (name[0], name[1])
- else:
- self.address = "%s:%d" % (name[0], name[1])
+ def start_tcp():
+ self.server = self.loop.run_until_complete(
+ asyncio.start_server(self.handle_client, host, port)
+ )
+
+ for s in self.server.sockets:
+ self.logger.debug('Listening on %r' % (s.getsockname(),))
+ # Newer python does this automatically. Do it manually here for
+ # maximum compatibility
+ s.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, 1)
+ s.setsockopt(socket.SOL_TCP, socket.TCP_QUICKACK, 1)
+
+ name = self.server.sockets[0].getsockname()
+ if self.server.sockets[0].family == socket.AF_INET6:
+ self.address = "[%s]:%d" % (name[0], name[1])
+ else:
+ self.address = "%s:%d" % (name[0], name[1])
+
+ self.start = start_tcp
def start_unix_server(self, path):
def cleanup():
os.unlink(path)
- cwd = os.getcwd()
- try:
- # Work around path length limits in AF_UNIX
- os.chdir(os.path.dirname(path))
- self.server = self.loop.run_until_complete(
- asyncio.start_unix_server(self.handle_client, os.path.basename(path), loop=self.loop)
- )
- finally:
- os.chdir(cwd)
+ def start_unix():
+ cwd = os.getcwd()
+ try:
+ # Work around path length limits in AF_UNIX
+ os.chdir(os.path.dirname(path))
+ self.server = self.loop.run_until_complete(
+ asyncio.start_unix_server(self.handle_client, os.path.basename(path))
+ )
+ finally:
+ os.chdir(cwd)
- self.logger.debug('Listening on %r' % path)
+ self.logger.debug('Listening on %r' % path)
- self._cleanup_socket = cleanup
- self.address = "unix://%s" % os.path.abspath(path)
+ self._cleanup_socket = cleanup
+ self.address = "unix://%s" % os.path.abspath(path)
+
+ self.start = start_unix
@abc.abstractmethod
def accept_client(self, reader, writer):
@@ -201,12 +204,13 @@ class AsyncServer(object):
pass
def signal_handler(self):
+ self.logger.debug("Got exit signal")
self.loop.stop()
- def serve_forever(self):
- asyncio.set_event_loop(self.loop)
+ def _serve_forever(self):
try:
self.loop.add_signal_handler(signal.SIGTERM, self.signal_handler)
+ signal.pthread_sigmask(signal.SIG_UNBLOCK, [signal.SIGTERM])
self.run_loop_forever()
self.server.close()
@@ -214,10 +218,69 @@ class AsyncServer(object):
self.loop.run_until_complete(self.server.wait_closed())
self.logger.debug('Server shutting down')
finally:
- if self.close_loop:
- if sys.version_info >= (3, 6):
- self.loop.run_until_complete(self.loop.shutdown_asyncgens())
- self.loop.close()
-
if self._cleanup_socket is not None:
self._cleanup_socket()
+
+ def serve_forever(self):
+ """
+ Serve requests in the current process
+ """
+ # Create loop and override any loop that may have existed in
+ # a parent process. It is possible that the usecases of
+ # serve_forever might be constrained enough to allow using
+ # get_event_loop here, but better safe than sorry for now.
+ self.loop = asyncio.new_event_loop()
+ asyncio.set_event_loop(self.loop)
+ self.start()
+ self._serve_forever()
+
+ def serve_as_process(self, *, prefunc=None, args=()):
+ """
+ Serve requests in a child process
+ """
+ def run(queue):
+ # Create loop and override any loop that may have existed
+ # in a parent process. Without doing this and instead
+ # using get_event_loop, at the very minimum the hashserv
+ # unit tests will hang when running the second test.
+ # This happens since get_event_loop in the spawned server
+ # process for the second testcase ends up with the loop
+ # from the hashserv client created in the unit test process
+ # when running the first testcase. The problem is somewhat
+ # more general, though, as any potential use of asyncio in
+ # Cooker could create a loop that needs to replaced in this
+ # new process.
+ self.loop = asyncio.new_event_loop()
+ asyncio.set_event_loop(self.loop)
+ try:
+ self.start()
+ finally:
+ queue.put(self.address)
+ queue.close()
+
+ if prefunc is not None:
+ prefunc(self, *args)
+
+ self._serve_forever()
+
+ if sys.version_info >= (3, 6):
+ self.loop.run_until_complete(self.loop.shutdown_asyncgens())
+ self.loop.close()
+
+ queue = multiprocessing.Queue()
+
+ # Temporarily block SIGTERM. The server process will inherit this
+ # block which will ensure it doesn't receive the SIGTERM until the
+ # handler is ready for it
+ mask = signal.pthread_sigmask(signal.SIG_BLOCK, [signal.SIGTERM])
+ try:
+ self.process = multiprocessing.Process(target=run, args=(queue,))
+ self.process.start()
+
+ self.address = queue.get()
+ queue.close()
+ queue.join_thread()
+
+ return self.process
+ finally:
+ signal.pthread_sigmask(signal.SIG_SETMASK, mask)
diff --git a/poky/bitbake/lib/bb/cache.py b/poky/bitbake/lib/bb/cache.py
index 27eb27179..73bc6e966 100644
--- a/poky/bitbake/lib/bb/cache.py
+++ b/poky/bitbake/lib/bb/cache.py
@@ -53,12 +53,12 @@ class RecipeInfoCommon(object):
@classmethod
def pkgvar(cls, var, packages, metadata):
- return dict((pkg, cls.depvar("%s_%s" % (var, pkg), metadata))
+ return dict((pkg, cls.depvar("%s:%s" % (var, pkg), metadata))
for pkg in packages)
@classmethod
def taskvar(cls, var, tasks, metadata):
- return dict((task, cls.getvar("%s_task-%s" % (var, task), metadata))
+ return dict((task, cls.getvar("%s:task-%s" % (var, task), metadata))
for task in tasks)
@classmethod
diff --git a/poky/bitbake/lib/bb/command.py b/poky/bitbake/lib/bb/command.py
index f530cf844..a81dcb136 100644
--- a/poky/bitbake/lib/bb/command.py
+++ b/poky/bitbake/lib/bb/command.py
@@ -65,9 +65,17 @@ class Command:
# Ensure cooker is ready for commands
if command != "updateConfig" and command != "setFeatures":
- self.cooker.init_configdata()
- if not self.remotedatastores:
- self.remotedatastores = bb.remotedata.RemoteDatastores(self.cooker)
+ try:
+ self.cooker.init_configdata()
+ if not self.remotedatastores:
+ self.remotedatastores = bb.remotedata.RemoteDatastores(self.cooker)
+ except (Exception, SystemExit) as exc:
+ import traceback
+ if isinstance(exc, bb.BBHandledException):
+ # We need to start returning real exceptions here. Until we do, we can't
+ # tell if an exception is an instance of bb.BBHandledException
+ return None, "bb.BBHandledException()\n" + traceback.format_exc()
+ return None, traceback.format_exc()
if hasattr(CommandsSync, command):
# Can run synchronous commands straight away
diff --git a/poky/bitbake/lib/bb/cooker.py b/poky/bitbake/lib/bb/cooker.py
index 39e10e613..db991702e 100644
--- a/poky/bitbake/lib/bb/cooker.py
+++ b/poky/bitbake/lib/bb/cooker.py
@@ -382,7 +382,7 @@ class BBCooker:
try:
self.prhost = prserv.serv.auto_start(self.data)
except prserv.serv.PRServiceConfigError as e:
- bb.fatal("Unable to start PR Server, exitting")
+ bb.fatal("Unable to start PR Server, exiting, check the bitbake-cookerdaemon.log")
if self.data.getVar("BB_HASHSERVE") == "auto":
# Create a new hash server bound to a unix domain socket
@@ -390,8 +390,7 @@ class BBCooker:
dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db"
self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR")
self.hashserv = hashserv.create_server(self.hashservaddr, dbfile, sync=False)
- self.hashserv.process = multiprocessing.Process(target=self.hashserv.serve_forever)
- self.hashserv.process.start()
+ self.hashserv.serve_as_process()
self.data.setVar("BB_HASHSERVE", self.hashservaddr)
self.databuilder.origdata.setVar("BB_HASHSERVE", self.hashservaddr)
self.databuilder.data.setVar("BB_HASHSERVE", self.hashservaddr)
diff --git a/poky/bitbake/lib/bb/data_smart.py b/poky/bitbake/lib/bb/data_smart.py
index f48726a34..65528c6ae 100644
--- a/poky/bitbake/lib/bb/data_smart.py
+++ b/poky/bitbake/lib/bb/data_smart.py
@@ -26,9 +26,9 @@ from bb.COW import COWDictBase
logger = logging.getLogger("BitBake.Data")
-__setvar_keyword__ = ["_append", "_prepend", "_remove"]
-__setvar_regexp__ = re.compile(r'(?P<base>.*?)(?P<keyword>_append|_prepend|_remove)(_(?P<add>[^A-Z]*))?$')
-__expand_var_regexp__ = re.compile(r"\${[a-zA-Z0-9\-_+./~]+?}")
+__setvar_keyword__ = [":append", ":prepend", ":remove"]
+__setvar_regexp__ = re.compile(r'(?P<base>.*?)(?P<keyword>:append|:prepend|:remove)(:(?P<add>[^A-Z]*))?$')
+__expand_var_regexp__ = re.compile(r"\${[a-zA-Z0-9\-_+./~:]+?}")
__expand_python_regexp__ = re.compile(r"\${@.+?}")
__whitespace_split__ = re.compile(r'(\s)')
__override_regexp__ = re.compile(r'[a-z0-9]+')
@@ -277,7 +277,7 @@ class VariableHistory(object):
for (r, override) in d.overridedata[var]:
for event in self.variable(r):
loginfo = event.copy()
- if 'flag' in loginfo and not loginfo['flag'].startswith("_"):
+ if 'flag' in loginfo and not loginfo['flag'].startswith(("_", ":")):
continue
loginfo['variable'] = var
loginfo['op'] = 'override[%s]:%s' % (override, loginfo['op'])
@@ -342,7 +342,7 @@ class VariableHistory(object):
for event in history:
if 'flag' in event:
continue
- if event['op'] == '_remove':
+ if event['op'] == ':remove':
continue
if isset and event['op'] == 'set?':
continue
@@ -481,7 +481,15 @@ class DataSmart(MutableMapping):
def setVar(self, var, value, **loginfo):
#print("var=" + str(var) + " val=" + str(value))
- var = var.replace(":", "_")
+
+ if "_append" in var or "_prepend" in var or "_remove" in var:
+ info = "%s" % var
+ if "filename" in loginfo:
+ info += " file: %s" % loginfo[filename]
+ if "lineno" in loginfo:
+ info += " line: %s" % loginfo[lineno]
+ bb.fatal("Variable %s contains an operation using the old override syntax. Please convert this layer/metadata before attempting to use with a newer bitbake." % info)
+
self.expand_cache = {}
parsing=False
if 'parsing' in loginfo:
@@ -510,7 +518,7 @@ class DataSmart(MutableMapping):
# pay the cookie monster
# more cookies for the cookie monster
- if '_' in var:
+ if ':' in var:
self._setvar_update_overrides(base, **loginfo)
if base in self.overridevars:
@@ -521,27 +529,27 @@ class DataSmart(MutableMapping):
self._makeShadowCopy(var)
if not parsing:
- if "_append" in self.dict[var]:
- del self.dict[var]["_append"]
- if "_prepend" in self.dict[var]:
- del self.dict[var]["_prepend"]
- if "_remove" in self.dict[var]:
- del self.dict[var]["_remove"]
+ if ":append" in self.dict[var]:
+ del self.dict[var][":append"]
+ if ":prepend" in self.dict[var]:
+ del self.dict[var][":prepend"]
+ if ":remove" in self.dict[var]:
+ del self.dict[var][":remove"]
if var in self.overridedata:
active = []
self.need_overrides()
for (r, o) in self.overridedata[var]:
if o in self.overridesset:
active.append(r)
- elif "_" in o:
- if set(o.split("_")).issubset(self.overridesset):
+ elif ":" in o:
+ if set(o.split(":")).issubset(self.overridesset):
active.append(r)
for a in active:
self.delVar(a)
del self.overridedata[var]
# more cookies for the cookie monster
- if '_' in var:
+ if ':' in var:
self._setvar_update_overrides(var, **loginfo)
# setting var
@@ -567,8 +575,8 @@ class DataSmart(MutableMapping):
def _setvar_update_overrides(self, var, **loginfo):
# aka pay the cookie monster
- override = var[var.rfind('_')+1:]
- shortvar = var[:var.rfind('_')]
+ override = var[var.rfind(':')+1:]
+ shortvar = var[:var.rfind(':')]
while override and __override_regexp__.match(override):
if shortvar not in self.overridedata:
self.overridedata[shortvar] = []
@@ -577,9 +585,9 @@ class DataSmart(MutableMapping):
self.overridedata[shortvar] = list(self.overridedata[shortvar])
self.overridedata[shortvar].append([var, override])
override = None
- if "_" in shortvar:
- override = var[shortvar.rfind('_')+1:]
- shortvar = var[:shortvar.rfind('_')]
+ if ":" in shortvar:
+ override = var[shortvar.rfind(':')+1:]
+ shortvar = var[:shortvar.rfind(':')]
if len(shortvar) == 0:
override = None
@@ -590,8 +598,6 @@ class DataSmart(MutableMapping):
"""
Rename the variable key to newkey
"""
- key = key.replace(":", "_")
- newkey = newkey.replace(":", "_")
if key == newkey:
bb.warn("Calling renameVar with equivalent keys (%s) is invalid" % key)
return
@@ -620,7 +626,7 @@ class DataSmart(MutableMapping):
self.overridedata[newkey].append([v.replace(key, newkey), o])
self.renameVar(v, v.replace(key, newkey))
- if '_' in newkey and val is None:
+ if ':' in newkey and val is None:
self._setvar_update_overrides(newkey, **loginfo)
loginfo['variable'] = key
@@ -632,15 +638,14 @@ class DataSmart(MutableMapping):
def appendVar(self, var, value, **loginfo):
loginfo['op'] = 'append'
self.varhistory.record(**loginfo)
- self.setVar(var + "_append", value, ignore=True, parsing=True)
+ self.setVar(var + ":append", value, ignore=True, parsing=True)
def prependVar(self, var, value, **loginfo):
loginfo['op'] = 'prepend'
self.varhistory.record(**loginfo)
- self.setVar(var + "_prepend", value, ignore=True, parsing=True)
+ self.setVar(var + ":prepend", value, ignore=True, parsing=True)
def delVar(self, var, **loginfo):
- var = var.replace(":", "_")
self.expand_cache = {}
loginfo['detail'] = ""
@@ -649,9 +654,9 @@ class DataSmart(MutableMapping):
self.dict[var] = {}
if var in self.overridedata:
del self.overridedata[var]
- if '_' in var:
- override = var[var.rfind('_')+1:]
- shortvar = var[:var.rfind('_')]
+ if ':' in var:
+ override = var[var.rfind(':')+1:]
+ shortvar = var[:var.rfind(':')]
while override and override.islower():
try:
if shortvar in self.overridedata:
@@ -661,14 +666,13 @@ class DataSmart(MutableMapping):
except ValueError as e:
pass
override = None
- if "_" in shortvar:
- override = var[shortvar.rfind('_')+1:]
- shortvar = var[:shortvar.rfind('_')]
+ if ":" in shortvar:
+ override = var[shortvar.rfind(':')+1:]
+ shortvar = var[:shortvar.rfind(':')]
if len(shortvar) == 0:
override = None
def setVarFlag(self, var, flag, value, **loginfo):
- var = var.replace(":", "_")
self.expand_cache = {}
if 'op' not in loginfo:
@@ -679,7 +683,7 @@ class DataSmart(MutableMapping):
self._makeShadowCopy(var)
self.dict[var][flag] = value
- if flag == "_defaultval" and '_' in var:
+ if flag == "_defaultval" and ':' in var:
self._setvar_update_overrides(var, **loginfo)
if flag == "_defaultval" and var in self.overridevars:
self._setvar_update_overridevars(var, value)
@@ -692,7 +696,6 @@ class DataSmart(MutableMapping):
self.dict["__exportlist"]["_content"].add(var)
def getVarFlag(self, var, flag, expand=True, noweakdefault=False, parsing=False, retparser=False):
- var = var.replace(":", "_")
if flag == "_content":
cachename = var
else:
@@ -712,11 +715,11 @@ class DataSmart(MutableMapping):
active = {}
self.need_overrides()
for (r, o) in overridedata:
- # What about double overrides both with "_" in the name?
+ # FIXME What about double overrides both with "_" in the name?
if o in self.overridesset:
active[o] = r
- elif "_" in o:
- if set(o.split("_")).issubset(self.overridesset):
+ elif ":" in o:
+ if set(o.split(":")).issubset(self.overridesset):
active[o] = r
mod = True
@@ -724,10 +727,10 @@ class DataSmart(MutableMapping):
mod = False
for o in self.overrides:
for a in active.copy():
- if a.endswith("_" + o):
+ if a.endswith(":" + o):
t = active[a]
del active[a]
- active[a.replace("_" + o, "")] = t
+ active[a.replace(":" + o, "")] = t
mod = True
elif a == o:
match = active[a]
@@ -746,31 +749,31 @@ class DataSmart(MutableMapping):
value = copy.copy(local_var["_defaultval"])
- if flag == "_content" and local_var is not None and "_append" in local_var and not parsing:
- if not value:
- value = ""
+ if flag == "_content" and local_var is not None and ":append" in local_var and not parsing:
self.need_overrides()
- for (r, o) in local_var["_append"]:
+ for (r, o) in local_var[":append"]:
match = True
if o:
- for o2 in o.split("_"):
+ for o2 in o.split(":"):
if not o2 in self.overrides:
match = False
if match:
+ if value is None:
+ value = ""
value = value + r
- if flag == "_content" and local_var is not None and "_prepend" in local_var and not parsing:
- if not value:
- value = ""
+ if flag == "_content" and local_var is not None and ":prepend" in local_var and not parsing:
self.need_overrides()
- for (r, o) in local_var["_prepend"]:
+ for (r, o) in local_var[":prepend"]:
match = True
if o:
- for o2 in o.split("_"):
+ for o2 in o.split(":"):
if not o2 in self.overrides:
match = False
if match:
+ if value is None:
+ value = ""
value = r + value
parser = None
@@ -779,12 +782,12 @@ class DataSmart(MutableMapping):
if expand:
value = parser.value
- if value and flag == "_content" and local_var is not None and "_remove" in local_var and not parsing:
+ if value and flag == "_content" and local_var is not None and ":remove" in local_var and not parsing:
self.need_overrides()
- for (r, o) in local_var["_remove"]:
+ for (r, o) in local_var[":remove"]:
match = True
if o:
- for o2 in o.split("_"):
+ for o2 in o.split(":"):
if not o2 in self.overrides:
match = False
if match:
@@ -820,7 +823,6 @@ class DataSmart(MutableMapping):
return value
def delVarFlag(self, var, flag, **loginfo):
- var = var.replace(":", "_")
self.expand_cache = {}
local_var, _ = self._findVar(var)
@@ -838,7 +840,6 @@ class DataSmart(MutableMapping):
del self.dict[var][flag]
def appendVarFlag(self, var, flag, value, **loginfo):
- var = var.replace(":", "_")
loginfo['op'] = 'append'
loginfo['flag'] = flag
self.varhistory.record(**loginfo)
@@ -846,7 +847,6 @@ class DataSmart(MutableMapping):
self.setVarFlag(var, flag, newvalue, ignore=True)
def prependVarFlag(self, var, flag, value, **loginfo):
- var = var.replace(":", "_")
loginfo['op'] = 'prepend'
loginfo['flag'] = flag
self.varhistory.record(**loginfo)
@@ -854,7 +854,6 @@ class DataSmart(MutableMapping):
self.setVarFlag(var, flag, newvalue, ignore=True)
def setVarFlags(self, var, flags, **loginfo):
- var = var.replace(":", "_")
self.expand_cache = {}
infer_caller_details(loginfo)
if not var in self.dict:
@@ -869,13 +868,12 @@ class DataSmart(MutableMapping):
self.dict[var][i] = flags[i]
def getVarFlags(self, var, expand = False, internalflags=False):
- var = var.replace(":", "_")
local_var, _ = self._findVar(var)
flags = {}
if local_var:
for i in local_var:
- if i.startswith("_") and not internalflags:
+ if i.startswith(("_", ":")) and not internalflags:
continue
flags[i] = local_var[i]
if expand and i in expand:
@@ -886,7 +884,6 @@ class DataSmart(MutableMapping):
def delVarFlags(self, var, **loginfo):
- var = var.replace(":", "_")
self.expand_cache = {}
if not var in self.dict:
self._makeShadowCopy(var)
@@ -974,8 +971,8 @@ class DataSmart(MutableMapping):
for (r, o) in self.overridedata[var]:
if o in self.overridesset:
overrides.add(var)
- elif "_" in o:
- if set(o.split("_")).issubset(self.overridesset):
+ elif ":" in o:
+ if set(o.split(":")).issubset(self.overridesset):
overrides.add(var)
for k in keylist(self.dict):
diff --git a/poky/bitbake/lib/bb/fetch2/__init__.py b/poky/bitbake/lib/bb/fetch2/__init__.py
index 0d49e1da3..914fa5c02 100644
--- a/poky/bitbake/lib/bb/fetch2/__init__.py
+++ b/poky/bitbake/lib/bb/fetch2/__init__.py
@@ -808,6 +808,29 @@ def localpath(url, d):
fetcher = bb.fetch2.Fetch([url], d)
return fetcher.localpath(url)
+# Need to export PATH as binary could be in metadata paths
+# rather than host provided
+# Also include some other variables.
+FETCH_EXPORT_VARS = ['HOME', 'PATH',
+ 'HTTP_PROXY', 'http_proxy',
+ 'HTTPS_PROXY', 'https_proxy',
+ 'FTP_PROXY', 'ftp_proxy',
+ 'FTPS_PROXY', 'ftps_proxy',
+ 'NO_PROXY', 'no_proxy',
+ 'ALL_PROXY', 'all_proxy',
+ 'GIT_PROXY_COMMAND',
+ 'GIT_SSH',
+ 'GIT_SSL_CAINFO',
+ 'GIT_SMART_HTTP',
+ 'SSH_AUTH_SOCK', 'SSH_AGENT_PID',
+ 'SOCKS5_USER', 'SOCKS5_PASSWD',
+ 'DBUS_SESSION_BUS_ADDRESS',
+ 'P4CONFIG',
+ 'SSL_CERT_FILE',
+ 'AWS_ACCESS_KEY_ID',
+ 'AWS_SECRET_ACCESS_KEY',
+ 'AWS_DEFAULT_REGION']
+
def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None):
"""
Run cmd returning the command output
@@ -816,28 +839,7 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None):
Optionally remove the files/directories listed in cleanup upon failure
"""
- # Need to export PATH as binary could be in metadata paths
- # rather than host provided
- # Also include some other variables.
- # FIXME: Should really include all export varaiables?
- exportvars = ['HOME', 'PATH',
- 'HTTP_PROXY', 'http_proxy',
- 'HTTPS_PROXY', 'https_proxy',
- 'FTP_PROXY', 'ftp_proxy',
- 'FTPS_PROXY', 'ftps_proxy',
- 'NO_PROXY', 'no_proxy',
- 'ALL_PROXY', 'all_proxy',
- 'GIT_PROXY_COMMAND',
- 'GIT_SSH',
- 'GIT_SSL_CAINFO',
- 'GIT_SMART_HTTP',
- 'SSH_AUTH_SOCK', 'SSH_AGENT_PID',
- 'SOCKS5_USER', 'SOCKS5_PASSWD',
- 'DBUS_SESSION_BUS_ADDRESS',
- 'P4CONFIG',
- 'AWS_ACCESS_KEY_ID',
- 'AWS_SECRET_ACCESS_KEY',
- 'AWS_DEFAULT_REGION']
+ exportvars = FETCH_EXPORT_VARS
if not cleanup:
cleanup = []
@@ -1146,11 +1148,11 @@ def srcrev_internal_helper(ud, d, name):
pn = d.getVar("PN")
attempts = []
if name != '' and pn:
- attempts.append("SRCREV_%s_pn-%s" % (name, pn))
+ attempts.append("SRCREV_%s:pn-%s" % (name, pn))
if name != '':
attempts.append("SRCREV_%s" % name)
if pn:
- attempts.append("SRCREV_pn-%s" % pn)
+ attempts.append("SRCREV:pn-%s" % pn)
attempts.append("SRCREV")
for a in attempts:
diff --git a/poky/bitbake/lib/bb/fetch2/wget.py b/poky/bitbake/lib/bb/fetch2/wget.py
index 784df70c9..9a49e64a0 100644
--- a/poky/bitbake/lib/bb/fetch2/wget.py
+++ b/poky/bitbake/lib/bb/fetch2/wget.py
@@ -52,13 +52,19 @@ class WgetProgressHandler(bb.progress.LineFilterProgressHandler):
class Wget(FetchMethod):
+ """Class to fetch urls via 'wget'"""
# CDNs like CloudFlare may do a 'browser integrity test' which can fail
# with the standard wget/urllib User-Agent, so pretend to be a modern
# browser.
user_agent = "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:84.0) Gecko/20100101 Firefox/84.0"
- """Class to fetch urls via 'wget'"""
+ def check_certs(self, d):
+ """
+ Should certificates be checked?
+ """
+ return (d.getVar("BB_CHECK_SSL_CERTS") or "1") != "0"
+
def supports(self, ud, d):
"""
Check to see if a given url can be fetched with wget.
@@ -82,7 +88,10 @@ class Wget(FetchMethod):
if not ud.localfile:
ud.localfile = d.expand(urllib.parse.unquote(ud.host + ud.path).replace("/", "."))
- self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget -t 2 -T 30 --passive-ftp --no-check-certificate"
+ self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget -t 2 -T 30 --passive-ftp"
+
+ if not self.check_certs(d):
+ self.basecmd += " --no-check-certificate"
def _runwget(self, ud, d, command, quiet, workdir=None):
@@ -282,64 +291,90 @@ class Wget(FetchMethod):
newreq = urllib.request.HTTPRedirectHandler.redirect_request(self, req, fp, code, msg, headers, newurl)
newreq.get_method = req.get_method
return newreq
- exported_proxies = export_proxies(d)
-
- handlers = [FixedHTTPRedirectHandler, HTTPMethodFallback]
- if exported_proxies:
- handlers.append(urllib.request.ProxyHandler())
- handlers.append(CacheHTTPHandler())
- # Since Python 2.7.9 ssl cert validation is enabled by default
- # see PEP-0476, this causes verification errors on some https servers
- # so disable by default.
- import ssl
- if hasattr(ssl, '_create_unverified_context'):
- handlers.append(urllib.request.HTTPSHandler(context=ssl._create_unverified_context()))
- opener = urllib.request.build_opener(*handlers)
-
- try:
- uri = ud.url.split(";")[0]
- r = urllib.request.Request(uri)
- r.get_method = lambda: "HEAD"
- # Some servers (FusionForge, as used on Alioth) require that the
- # optional Accept header is set.
- r.add_header("Accept", "*/*")
- r.add_header("User-Agent", self.user_agent)
- def add_basic_auth(login_str, request):
- '''Adds Basic auth to http request, pass in login:password as string'''
- import base64
- encodeuser = base64.b64encode(login_str.encode('utf-8')).decode("utf-8")
- authheader = "Basic %s" % encodeuser
- r.add_header("Authorization", authheader)
-
- if ud.user and ud.pswd:
- add_basic_auth(ud.user + ':' + ud.pswd, r)
- try:
- import netrc
- n = netrc.netrc()
- login, unused, password = n.authenticators(urllib.parse.urlparse(uri).hostname)
- add_basic_auth("%s:%s" % (login, password), r)
- except (TypeError, ImportError, IOError, netrc.NetrcParseError):
- pass
-
- with opener.open(r) as response:
- pass
- except urllib.error.URLError as e:
- if try_again:
- logger.debug2("checkstatus: trying again")
- return self.checkstatus(fetch, ud, d, False)
- else:
- # debug for now to avoid spamming the logs in e.g. remote sstate searches
- logger.debug2("checkstatus() urlopen failed: %s" % e)
- return False
- except ConnectionResetError as e:
- if try_again:
- logger.debug2("checkstatus: trying again")
- return self.checkstatus(fetch, ud, d, False)
+ # We need to update the environment here as both the proxy and HTTPS
+ # handlers need variables set. The proxy needs http_proxy and friends to
+ # be set, and HTTPSHandler ends up calling into openssl to load the
+ # certificates. In buildtools configurations this will be looking at the
+ # wrong place for certificates by default: we set SSL_CERT_FILE to the
+ # right location in the buildtools environment script but as BitBake
+ # prunes prunes the environment this is lost. When binaries are executed
+ # runfetchcmd ensures these values are in the environment, but this is
+ # pure Python so we need to update the environment.
+ #
+ # Avoid tramping the environment too much by using bb.utils.environment
+ # to scope the changes to the build_opener request, which is when the
+ # environment lookups happen.
+ newenv = {}
+ for name in bb.fetch2.FETCH_EXPORT_VARS:
+ value = d.getVar(name)
+ if not value:
+ origenv = d.getVar("BB_ORIGENV")
+ if origenv:
+ value = origenv.getVar(name)
+ if value:
+ newenv[name] = value
+
+ with bb.utils.environment(**newenv):
+ import ssl
+
+ if self.check_certs(d):
+ context = ssl.create_default_context()
else:
- # debug for now to avoid spamming the logs in e.g. remote sstate searches
- logger.debug2("checkstatus() urlopen failed: %s" % e)
- return False
+ context = ssl._create_unverified_context()
+
+ handlers = [FixedHTTPRedirectHandler,
+ HTTPMethodFallback,
+ urllib.request.ProxyHandler(),
+ CacheHTTPHandler(),
+ urllib.request.HTTPSHandler(context=context)]
+ opener = urllib.request.build_opener(*handlers)
+
+ try:
+ uri = ud.url.split(";")[0]
+ r = urllib.request.Request(uri)
+ r.get_method = lambda: "HEAD"
+ # Some servers (FusionForge, as used on Alioth) require that the
+ # optional Accept header is set.
+ r.add_header("Accept", "*/*")
+ r.add_header("User-Agent", self.user_agent)
+ def add_basic_auth(login_str, request):
+ '''Adds Basic auth to http request, pass in login:password as string'''
+ import base64
+ encodeuser = base64.b64encode(login_str.encode('utf-8')).decode("utf-8")
+ authheader = "Basic %s" % encodeuser
+ r.add_header("Authorization", authheader)
+
+ if ud.user and ud.pswd:
+ add_basic_auth(ud.user + ':' + ud.pswd, r)
+
+ try:
+ import netrc
+ n = netrc.netrc()
+ login, unused, password = n.authenticators(urllib.parse.urlparse(uri).hostname)
+ add_basic_auth("%s:%s" % (login, password), r)
+ except (TypeError, ImportError, IOError, netrc.NetrcParseError):
+ pass
+
+ with opener.open(r) as response:
+ pass
+ except urllib.error.URLError as e:
+ if try_again:
+ logger.debug2("checkstatus: trying again")
+ return self.checkstatus(fetch, ud, d, False)
+ else:
+ # debug for now to avoid spamming the logs in e.g. remote sstate searches
+ logger.debug2("checkstatus() urlopen failed: %s" % e)
+ return False
+ except ConnectionResetError as e:
+ if try_again:
+ logger.debug2("checkstatus: trying again")
+ return self.checkstatus(fetch, ud, d, False)
+ else:
+ # debug for now to avoid spamming the logs in e.g. remote sstate searches
+ logger.debug2("checkstatus() urlopen failed: %s" % e)
+ return False
+
return True
def _parse_path(self, regex, s):
diff --git a/poky/bitbake/lib/bb/parse/ast.py b/poky/bitbake/lib/bb/parse/ast.py
index db2bdc35e..743ea0dfc 100644
--- a/poky/bitbake/lib/bb/parse/ast.py
+++ b/poky/bitbake/lib/bb/parse/ast.py
@@ -97,7 +97,6 @@ class DataNode(AstNode):
def eval(self, data):
groupd = self.groupd
key = groupd["var"]
- key = key.replace(":", "_")
loginfo = {
'variable': key,
'file': self.filename,
@@ -146,7 +145,7 @@ class DataNode(AstNode):
data.setVar(key, val, parsing=True, **loginfo)
class MethodNode(AstNode):
- tr_tbl = str.maketrans('/.+-@%&', '_______')
+ tr_tbl = str.maketrans('/.+-@%&~', '________')
def __init__(self, filename, lineno, func_name, body, python, fakeroot):
AstNode.__init__(self, filename, lineno)
@@ -208,7 +207,6 @@ class ExportFuncsNode(AstNode):
def eval(self, data):
for func in self.n:
- func = func.replace(":", "_")
calledfunc = self.classname + "_" + func
if data.getVar(func, False) and not data.getVarFlag(func, 'export_func', False):
diff --git a/poky/bitbake/lib/bb/providers.py b/poky/bitbake/lib/bb/providers.py
index 516d45e4a..8c1c31a5c 100644
--- a/poky/bitbake/lib/bb/providers.py
+++ b/poky/bitbake/lib/bb/providers.py
@@ -94,7 +94,7 @@ def versionVariableMatch(cfgData, keyword, pn):
# pn can contain '_', e.g. gcc-cross-x86_64 and an override cannot
# hence we do this manually rather than use OVERRIDES
- ver = cfgData.getVar("%s_VERSION_pn-%s" % (keyword, pn))
+ ver = cfgData.getVar("%s_VERSION:pn-%s" % (keyword, pn))
if not ver:
ver = cfgData.getVar("%s_VERSION_%s" % (keyword, pn))
if not ver:
diff --git a/poky/bitbake/lib/bb/runqueue.py b/poky/bitbake/lib/bb/runqueue.py
index 6c41fe6d4..25e012125 100644
--- a/poky/bitbake/lib/bb/runqueue.py
+++ b/poky/bitbake/lib/bb/runqueue.py
@@ -2443,6 +2443,11 @@ class RunQueueExecute:
if update_tasks:
self.sqdone = False
+ for tid in [t[0] for t in update_tasks]:
+ h = pending_hash_index(tid, self.rqdata)
+ if h in self.sqdata.hashes and tid != self.sqdata.hashes[h]:
+ self.sq_deferred[tid] = self.sqdata.hashes[h]
+ bb.note("Deferring %s after %s" % (tid, self.sqdata.hashes[h]))
update_scenequeue_data([t[0] for t in update_tasks], self.sqdata, self.rqdata, self.rq, self.cooker, self.stampcache, self, summary=False)
for (tid, harddepfail, origvalid) in update_tasks:
@@ -2786,6 +2791,19 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
sqdata.stamppresent = set()
sqdata.valid = set()
+ sqdata.hashes = {}
+ sqrq.sq_deferred = {}
+ for mc in sorted(sqdata.multiconfigs):
+ for tid in sorted(sqdata.sq_revdeps):
+ if mc_from_tid(tid) != mc:
+ continue
+ h = pending_hash_index(tid, rqdata)
+ if h not in sqdata.hashes:
+ sqdata.hashes[h] = tid
+ else:
+ sqrq.sq_deferred[tid] = sqdata.hashes[h]
+ bb.note("Deferring %s after %s" % (tid, sqdata.hashes[h]))
+
update_scenequeue_data(sqdata.sq_revdeps, sqdata, rqdata, rq, cooker, stampcache, sqrq, summary=True)
# Compute a list of 'stale' sstate tasks where the current hash does not match the one
@@ -2850,32 +2868,20 @@ def update_scenequeue_data(tids, sqdata, rqdata, rq, cooker, stampcache, sqrq, s
sqdata.valid |= rq.validate_hashes(tocheck, cooker.data, len(sqdata.stamppresent), False, summary=summary)
- sqdata.hashes = {}
- sqrq.sq_deferred = {}
- for mc in sorted(sqdata.multiconfigs):
- for tid in sorted(sqdata.sq_revdeps):
- if mc_from_tid(tid) != mc:
- continue
- if tid in sqdata.stamppresent:
- continue
- if tid in sqdata.valid:
- continue
- if tid in sqdata.noexec:
- continue
- if tid in sqrq.scenequeue_notcovered:
- continue
- if tid in sqrq.scenequeue_covered:
- continue
-
- h = pending_hash_index(tid, rqdata)
- if h not in sqdata.hashes:
- if tid in tids:
- sqdata.outrightfail.add(tid)
- sqdata.hashes[h] = tid
- else:
- sqrq.sq_deferred[tid] = sqdata.hashes[h]
- bb.note("Deferring %s after %s" % (tid, sqdata.hashes[h]))
-
+ for tid in tids:
+ if tid in sqdata.stamppresent:
+ continue
+ if tid in sqdata.valid:
+ continue
+ if tid in sqdata.noexec:
+ continue
+ if tid in sqrq.scenequeue_covered:
+ continue
+ if tid in sqrq.scenequeue_notcovered:
+ continue
+ if tid in sqrq.sq_deferred:
+ continue
+ sqdata.outrightfail.add(tid)
class TaskFailure(Exception):
"""
diff --git a/poky/bitbake/lib/bb/server/process.py b/poky/bitbake/lib/bb/server/process.py
index a0955722e..b593830cc 100644
--- a/poky/bitbake/lib/bb/server/process.py
+++ b/poky/bitbake/lib/bb/server/process.py
@@ -26,6 +26,7 @@ import errno
import re
import datetime
import pickle
+import traceback
import bb.server.xmlrpcserver
from bb import daemonize
from multiprocessing import queues
@@ -217,8 +218,9 @@ class ProcessServer():
self.command_channel_reply.send(self.cooker.command.runCommand(command))
serverlog("Command Completed")
except Exception as e:
- serverlog('Exception in server main event loop running command %s (%s)' % (command, str(e)))
- logger.exception('Exception in server main event loop running command %s (%s)' % (command, str(e)))
+ stack = traceback.format_exc()
+ serverlog('Exception in server main event loop running command %s (%s)' % (command, stack))
+ logger.exception('Exception in server main event loop running command %s (%s)' % (command, stack))
if self.xmlrpc in ready:
self.xmlrpc.handle_requests()
@@ -471,7 +473,7 @@ class BitBakeServer(object):
try:
r = ready.get()
except EOFError:
- # Trap the child exitting/closing the pipe and error out
+ # Trap the child exiting/closing the pipe and error out
r = None
if not r or r[0] != "r":
ready.close()
diff --git a/poky/bitbake/lib/bb/siggen.py b/poky/bitbake/lib/bb/siggen.py
index 07692e673..3f9fe5064 100644
--- a/poky/bitbake/lib/bb/siggen.py
+++ b/poky/bitbake/lib/bb/siggen.py
@@ -228,7 +228,7 @@ class SignatureGeneratorBasic(SignatureGenerator):
# self.dump_sigtask(fn, task, d.getVar("STAMP"), False)
for task in taskdeps:
- d.setVar("BB_BASEHASH_task-%s" % task, self.basehash[fn + ":" + task])
+ d.setVar("BB_BASEHASH:task-%s" % task, self.basehash[fn + ":" + task])
def postparsing_clean_cache(self):
#
@@ -325,7 +325,7 @@ class SignatureGeneratorBasic(SignatureGenerator):
h = hashlib.sha256(data.encode("utf-8")).hexdigest()
self.taskhash[tid] = h
- #d.setVar("BB_TASKHASH_task-%s" % task, taskhash[task])
+ #d.setVar("BB_TASKHASH:task-%s" % task, taskhash[task])
return h
def writeout_file_checksum_cache(self):
diff --git a/poky/bitbake/lib/bb/tests/codeparser.py b/poky/bitbake/lib/bb/tests/codeparser.py
index 826a2d2f6..f48520479 100644
--- a/poky/bitbake/lib/bb/tests/codeparser.py
+++ b/poky/bitbake/lib/bb/tests/codeparser.py
@@ -111,9 +111,9 @@ ${D}${libdir}/pkgconfig/*.pc
self.assertExecs(set(["sed"]))
def test_parameter_expansion_modifiers(self):
- # - and + are also valid modifiers for parameter expansion, but are
+ # -,+ and : are also valid modifiers for parameter expansion, but are
# valid characters in bitbake variable names, so are not included here
- for i in ('=', ':-', ':=', '?', ':?', ':+', '#', '%', '##', '%%'):
+ for i in ('=', '?', '#', '%', '##', '%%'):
name = "foo%sbar" % i
self.parseExpression("${%s}" % name)
self.assertNotIn(name, self.references)
diff --git a/poky/bitbake/lib/bb/tests/data.py b/poky/bitbake/lib/bb/tests/data.py
index 1d4a64b10..e667c7c7d 100644
--- a/poky/bitbake/lib/bb/tests/data.py
+++ b/poky/bitbake/lib/bb/tests/data.py
@@ -245,35 +245,35 @@ class TestConcatOverride(unittest.TestCase):
def test_prepend(self):
self.d.setVar("TEST", "${VAL}")
- self.d.setVar("TEST_prepend", "${FOO}:")
+ self.d.setVar("TEST:prepend", "${FOO}:")
self.assertEqual(self.d.getVar("TEST"), "foo:val")
def test_append(self):
self.d.setVar("TEST", "${VAL}")
- self.d.setVar("TEST_append", ":${BAR}")
+ self.d.setVar("TEST:append", ":${BAR}")
self.assertEqual(self.d.getVar("TEST"), "val:bar")
def test_multiple_append(self):
self.d.setVar("TEST", "${VAL}")
- self.d.setVar("TEST_prepend", "${FOO}:")
- self.d.setVar("TEST_append", ":val2")
- self.d.setVar("TEST_append", ":${BAR}")
+ self.d.setVar("TEST:prepend", "${FOO}:")
+ self.d.setVar("TEST:append", ":val2")
+ self.d.setVar("TEST:append", ":${BAR}")
self.assertEqual(self.d.getVar("TEST"), "foo:val:val2:bar")
def test_append_unset(self):
- self.d.setVar("TEST_prepend", "${FOO}:")
- self.d.setVar("TEST_append", ":val2")
- self.d.setVar("TEST_append", ":${BAR}")
+ self.d.setVar("TEST:prepend", "${FOO}:")
+ self.d.setVar("TEST:append", ":val2")
+ self.d.setVar("TEST:append", ":${BAR}")
self.assertEqual(self.d.getVar("TEST"), "foo::val2:bar")
def test_remove(self):
self.d.setVar("TEST", "${VAL} ${BAR}")
- self.d.setVar("TEST_remove", "val")
+ self.d.setVar("TEST:remove", "val")
self.assertEqual(self.d.getVar("TEST"), " bar")
def test_remove_cleared(self):
self.d.setVar("TEST", "${VAL} ${BAR}")
- self.d.setVar("TEST_remove", "val")
+ self.d.setVar("TEST:remove", "val")
self.d.setVar("TEST", "${VAL} ${BAR}")
self.assertEqual(self.d.getVar("TEST"), "val bar")
@@ -281,42 +281,42 @@ class TestConcatOverride(unittest.TestCase):
# (including that whitespace is preserved)
def test_remove_inactive_override(self):
self.d.setVar("TEST", "${VAL} ${BAR} 123")
- self.d.setVar("TEST_remove_inactiveoverride", "val")
+ self.d.setVar("TEST:remove:inactiveoverride", "val")
self.assertEqual(self.d.getVar("TEST"), "val bar 123")
def test_doubleref_remove(self):
self.d.setVar("TEST", "${VAL} ${BAR}")
- self.d.setVar("TEST_remove", "val")
+ self.d.setVar("TEST:remove", "val")
self.d.setVar("TEST_TEST", "${TEST} ${TEST}")
self.assertEqual(self.d.getVar("TEST_TEST"), " bar bar")
def test_empty_remove(self):
self.d.setVar("TEST", "")
- self.d.setVar("TEST_remove", "val")
+ self.d.setVar("TEST:remove", "val")
self.assertEqual(self.d.getVar("TEST"), "")
def test_remove_expansion(self):
self.d.setVar("BAR", "Z")
self.d.setVar("TEST", "${BAR}/X Y")
- self.d.setVar("TEST_remove", "${BAR}/X")
+ self.d.setVar("TEST:remove", "${BAR}/X")
self.assertEqual(self.d.getVar("TEST"), " Y")
def test_remove_expansion_items(self):
self.d.setVar("TEST", "A B C D")
self.d.setVar("BAR", "B D")
- self.d.setVar("TEST_remove", "${BAR}")
+ self.d.setVar("TEST:remove", "${BAR}")
self.assertEqual(self.d.getVar("TEST"), "A C ")
def test_remove_preserve_whitespace(self):
# When the removal isn't active, the original value should be preserved
self.d.setVar("TEST", " A B")
- self.d.setVar("TEST_remove", "C")
+ self.d.setVar("TEST:remove", "C")
self.assertEqual(self.d.getVar("TEST"), " A B")
def test_remove_preserve_whitespace2(self):
# When the removal is active preserve the whitespace
self.d.setVar("TEST", " A B")
- self.d.setVar("TEST_remove", "B")
+ self.d.setVar("TEST:remove", "B")
self.assertEqual(self.d.getVar("TEST"), " A ")
class TestOverrides(unittest.TestCase):
@@ -329,70 +329,70 @@ class TestOverrides(unittest.TestCase):
self.assertEqual(self.d.getVar("TEST"), "testvalue")
def test_one_override(self):
- self.d.setVar("TEST_bar", "testvalue2")
+ self.d.setVar("TEST:bar", "testvalue2")
self.assertEqual(self.d.getVar("TEST"), "testvalue2")
def test_one_override_unset(self):
- self.d.setVar("TEST2_bar", "testvalue2")
+ self.d.setVar("TEST2:bar", "testvalue2")
self.assertEqual(self.d.getVar("TEST2"), "testvalue2")
- self.assertCountEqual(list(self.d.keys()), ['TEST', 'TEST2', 'OVERRIDES', 'TEST2_bar'])
+ self.assertCountEqual(list(self.d.keys()), ['TEST', 'TEST2', 'OVERRIDES', 'TEST2:bar'])
def test_multiple_override(self):
- self.d.setVar("TEST_bar", "testvalue2")
- self.d.setVar("TEST_local", "testvalue3")
- self.d.setVar("TEST_foo", "testvalue4")
+ self.d.setVar("TEST:bar", "testvalue2")
+ self.d.setVar("TEST:local", "testvalue3")
+ self.d.setVar("TEST:foo", "testvalue4")
self.assertEqual(self.d.getVar("TEST"), "testvalue3")
- self.assertCountEqual(list(self.d.keys()), ['TEST', 'TEST_foo', 'OVERRIDES', 'TEST_bar', 'TEST_local'])
+ self.assertCountEqual(list(self.d.keys()), ['TEST', 'TEST:foo', 'OVERRIDES', 'TEST:bar', 'TEST:local'])
def test_multiple_combined_overrides(self):
- self.d.setVar("TEST_local_foo_bar", "testvalue3")
+ self.d.setVar("TEST:local:foo:bar", "testvalue3")
self.assertEqual(self.d.getVar("TEST"), "testvalue3")
def test_multiple_overrides_unset(self):
- self.d.setVar("TEST2_local_foo_bar", "testvalue3")
+ self.d.setVar("TEST2:local:foo:bar", "testvalue3")
self.assertEqual(self.d.getVar("TEST2"), "testvalue3")
def test_keyexpansion_override(self):
self.d.setVar("LOCAL", "local")
- self.d.setVar("TEST_bar", "testvalue2")
- self.d.setVar("TEST_${LOCAL}", "testvalue3")
- self.d.setVar("TEST_foo", "testvalue4")
+ self.d.setVar("TEST:bar", "testvalue2")
+ self.d.setVar("TEST:${LOCAL}", "testvalue3")
+ self.d.setVar("TEST:foo", "testvalue4")
bb.data.expandKeys(self.d)
self.assertEqual(self.d.getVar("TEST"), "testvalue3")
def test_rename_override(self):
- self.d.setVar("ALTERNATIVE_ncurses-tools_class-target", "a")
+ self.d.setVar("ALTERNATIVE:ncurses-tools:class-target", "a")
self.d.setVar("OVERRIDES", "class-target")
- self.d.renameVar("ALTERNATIVE_ncurses-tools", "ALTERNATIVE_lib32-ncurses-tools")
- self.assertEqual(self.d.getVar("ALTERNATIVE_lib32-ncurses-tools"), "a")
+ self.d.renameVar("ALTERNATIVE:ncurses-tools", "ALTERNATIVE:lib32-ncurses-tools")
+ self.assertEqual(self.d.getVar("ALTERNATIVE:lib32-ncurses-tools"), "a")
def test_underscore_override(self):
- self.d.setVar("TEST_bar", "testvalue2")
- self.d.setVar("TEST_some_val", "testvalue3")
- self.d.setVar("TEST_foo", "testvalue4")
+ self.d.setVar("TEST:bar", "testvalue2")
+ self.d.setVar("TEST:some_val", "testvalue3")
+ self.d.setVar("TEST:foo", "testvalue4")
self.d.setVar("OVERRIDES", "foo:bar:some_val")
self.assertEqual(self.d.getVar("TEST"), "testvalue3")
def test_remove_with_override(self):
- self.d.setVar("TEST_bar", "testvalue2")
- self.d.setVar("TEST_some_val", "testvalue3 testvalue5")
- self.d.setVar("TEST_some_val_remove", "testvalue3")
- self.d.setVar("TEST_foo", "testvalue4")
+ self.d.setVar("TEST:bar", "testvalue2")
+ self.d.setVar("TEST:some_val", "testvalue3 testvalue5")
+ self.d.setVar("TEST:some_val:remove", "testvalue3")
+ self.d.setVar("TEST:foo", "testvalue4")
self.d.setVar("OVERRIDES", "foo:bar:some_val")
self.assertEqual(self.d.getVar("TEST"), " testvalue5")
def test_append_and_override_1(self):
- self.d.setVar("TEST_append", "testvalue2")
- self.d.setVar("TEST_bar", "testvalue3")
+ self.d.setVar("TEST:append", "testvalue2")
+ self.d.setVar("TEST:bar", "testvalue3")
self.assertEqual(self.d.getVar("TEST"), "testvalue3testvalue2")
def test_append_and_override_2(self):
- self.d.setVar("TEST_append_bar", "testvalue2")
+ self.d.setVar("TEST:append:bar", "testvalue2")
self.assertEqual(self.d.getVar("TEST"), "testvaluetestvalue2")
def test_append_and_override_3(self):
- self.d.setVar("TEST_bar_append", "testvalue2")
+ self.d.setVar("TEST:bar:append", "testvalue2")
self.assertEqual(self.d.getVar("TEST"), "testvalue2")
# Test an override with _<numeric> in it based on a real world OE issue
@@ -400,11 +400,16 @@ class TestOverrides(unittest.TestCase):
self.d.setVar("TARGET_ARCH", "x86_64")
self.d.setVar("PN", "test-${TARGET_ARCH}")
self.d.setVar("VERSION", "1")
- self.d.setVar("VERSION_pn-test-${TARGET_ARCH}", "2")
+ self.d.setVar("VERSION:pn-test-${TARGET_ARCH}", "2")
self.d.setVar("OVERRIDES", "pn-${PN}")
bb.data.expandKeys(self.d)
self.assertEqual(self.d.getVar("VERSION"), "2")
+ def test_append_and_unused_override(self):
+ # Had a bug where an unused override append could return "" instead of None
+ self.d.setVar("BAR:append:unusedoverride", "testvalue2")
+ self.assertEqual(self.d.getVar("BAR"), None)
+
class TestKeyExpansion(unittest.TestCase):
def setUp(self):
self.d = bb.data.init()
@@ -498,7 +503,7 @@ class TaskHash(unittest.TestCase):
d.setVar("VAR", "val")
# Adding an inactive removal shouldn't change the hash
d.setVar("BAR", "notbar")
- d.setVar("MYCOMMAND_remove", "${BAR}")
+ d.setVar("MYCOMMAND:remove", "${BAR}")
nexthash = gettask_bashhash("mytask", d)
self.assertEqual(orighash, nexthash)
diff --git a/poky/bitbake/lib/bb/tests/parse.py b/poky/bitbake/lib/bb/tests/parse.py
index 9e21e1842..4d17f82ed 100644
--- a/poky/bitbake/lib/bb/tests/parse.py
+++ b/poky/bitbake/lib/bb/tests/parse.py
@@ -98,8 +98,8 @@ exportD = "d"
overridetest = """
-RRECOMMENDS_${PN} = "a"
-RRECOMMENDS_${PN}_libc = "b"
+RRECOMMENDS:${PN} = "a"
+RRECOMMENDS:${PN}:libc = "b"
OVERRIDES = "libc:${PN}"
PN = "gtk+"
"""
@@ -110,13 +110,13 @@ PN = "gtk+"
self.assertEqual(d.getVar("RRECOMMENDS"), "b")
bb.data.expandKeys(d)
self.assertEqual(d.getVar("RRECOMMENDS"), "b")
- d.setVar("RRECOMMENDS_gtk+", "c")
+ d.setVar("RRECOMMENDS:gtk+", "c")
self.assertEqual(d.getVar("RRECOMMENDS"), "c")
overridetest2 = """
EXTRA_OECONF = ""
-EXTRA_OECONF_class-target = "b"
-EXTRA_OECONF_append = " c"
+EXTRA_OECONF:class-target = "b"
+EXTRA_OECONF:append = " c"
"""
def test_parse_overrides(self):
@@ -128,7 +128,7 @@ EXTRA_OECONF_append = " c"
overridetest3 = """
DESCRIPTION = "A"
-DESCRIPTION_${PN}-dev = "${DESCRIPTION} B"
+DESCRIPTION:${PN}-dev = "${DESCRIPTION} B"
PN = "bc"
"""
@@ -136,15 +136,15 @@ PN = "bc"
f = self.parsehelper(self.overridetest3)
d = bb.parse.handle(f.name, self.d)['']
bb.data.expandKeys(d)
- self.assertEqual(d.getVar("DESCRIPTION_bc-dev"), "A B")
+ self.assertEqual(d.getVar("DESCRIPTION:bc-dev"), "A B")
d.setVar("DESCRIPTION", "E")
- d.setVar("DESCRIPTION_bc-dev", "C D")
+ d.setVar("DESCRIPTION:bc-dev", "C D")
d.setVar("OVERRIDES", "bc-dev")
self.assertEqual(d.getVar("DESCRIPTION"), "C D")
classextend = """
-VAR_var_override1 = "B"
+VAR_var:override1 = "B"
EXTRA = ":override1"
OVERRIDES = "nothing${EXTRA}"
diff --git a/poky/bitbake/lib/bb/tests/utils.py b/poky/bitbake/lib/bb/tests/utils.py
index a7ff33db5..4d5e21b99 100644
--- a/poky/bitbake/lib/bb/tests/utils.py
+++ b/poky/bitbake/lib/bb/tests/utils.py
@@ -666,3 +666,21 @@ class GetReferencedVars(unittest.TestCase):
layers = [{"SRC_URI"}, {"QT_GIT", "QT_MODULE", "QT_MODULE_BRANCH_PARAM", "QT_GIT_PROTOCOL"}, {"QT_GIT_PROJECT", "QT_MODULE_BRANCH", "BPN"}, {"PN", "SPECIAL_PKGSUFFIX"}]
self.check_referenced("${SRC_URI}", layers)
+
+
+class EnvironmentTests(unittest.TestCase):
+ def test_environment(self):
+ os.environ["A"] = "this is A"
+ self.assertIn("A", os.environ)
+ self.assertEqual(os.environ["A"], "this is A")
+ self.assertNotIn("B", os.environ)
+
+ with bb.utils.environment(B="this is B"):
+ self.assertIn("A", os.environ)
+ self.assertEqual(os.environ["A"], "this is A")
+ self.assertIn("B", os.environ)
+ self.assertEqual(os.environ["B"], "this is B")
+
+ self.assertIn("A", os.environ)
+ self.assertEqual(os.environ["A"], "this is A")
+ self.assertNotIn("B", os.environ)
diff --git a/poky/bitbake/lib/bb/ui/taskexp.py b/poky/bitbake/lib/bb/ui/taskexp.py
index 2b246710c..c00eaf663 100644
--- a/poky/bitbake/lib/bb/ui/taskexp.py
+++ b/poky/bitbake/lib/bb/ui/taskexp.py
@@ -8,6 +8,7 @@
#
import sys
+import traceback
try:
import gi
@@ -196,6 +197,7 @@ def main(server, eventHandler, params):
gtkgui.start()
try:
+ params.updateToServer(server, os.environ.copy())
params.updateFromServer(server)
cmdline = params.parseActions()
if not cmdline:
@@ -218,6 +220,9 @@ def main(server, eventHandler, params):
except client.Fault as x:
print("XMLRPC Fault getting commandline:\n %s" % x)
return
+ except Exception as e:
+ print("Exception in startup:\n %s" % traceback.format_exc())
+ return
if gtkthread.quit.isSet():
return
diff --git a/poky/bitbake/lib/bb/utils.py b/poky/bitbake/lib/bb/utils.py
index 6ba1d2a37..70634910f 100644
--- a/poky/bitbake/lib/bb/utils.py
+++ b/poky/bitbake/lib/bb/utils.py
@@ -1178,7 +1178,7 @@ def edit_metadata(meta_lines, variables, varfunc, match_overrides=False):
variables: a list of variable names to look for. Functions
may also be specified, but must be specified with '()' at
the end of the name. Note that the function doesn't have
- any intrinsic understanding of _append, _prepend, _remove,
+ any intrinsic understanding of :append, :prepend, :remove,
or overrides, so these are considered as part of the name.
These values go into a regular expression, so regular
expression syntax is allowed.
@@ -1681,3 +1681,19 @@ def rename(src, dst):
shutil.move(src, dst)
else:
raise err
+
+@contextmanager
+def environment(**envvars):
+ """
+ Context manager to selectively update the environment with the specified mapping.
+ """
+ backup = dict(os.environ)
+ try:
+ os.environ.update(envvars)
+ yield
+ finally:
+ for var in envvars:
+ if var in backup:
+ os.environ[var] = backup[var]
+ else:
+ del os.environ[var]
diff --git a/poky/bitbake/lib/bblayers/query.py b/poky/bitbake/lib/bblayers/query.py
index 947422a72..6e94c8307 100644
--- a/poky/bitbake/lib/bblayers/query.py
+++ b/poky/bitbake/lib/bblayers/query.py
@@ -154,7 +154,7 @@ skipped recipes will also be listed, with a " (skipped)" suffix.
def print_item(f, pn, ver, layer, ispref):
if not selected_layer or layer == selected_layer:
if not bare and f in skiplist:
- skipped = ' (skipped)'
+ skipped = ' (skipped: %s)' % self.tinfoil.cooker.skiplist[f].skipreason
else:
skipped = ''
if show_filenames:
diff --git a/poky/bitbake/lib/hashserv/server.py b/poky/bitbake/lib/hashserv/server.py
index 8e8498973..a059e5211 100644
--- a/poky/bitbake/lib/hashserv/server.py
+++ b/poky/bitbake/lib/hashserv/server.py
@@ -410,11 +410,11 @@ class ServerClient(bb.asyncrpc.AsyncServerConnection):
class Server(bb.asyncrpc.AsyncServer):
- def __init__(self, db, loop=None, upstream=None, read_only=False):
+ def __init__(self, db, upstream=None, read_only=False):
if upstream and read_only:
raise bb.asyncrpc.ServerError("Read-only hashserv cannot pull from an upstream server")
- super().__init__(logger, loop)
+ super().__init__(logger)
self.request_stats = Stats()
self.db = db
diff --git a/poky/bitbake/lib/hashserv/tests.py b/poky/bitbake/lib/hashserv/tests.py
index e2b762dbf..e851535c5 100644
--- a/poky/bitbake/lib/hashserv/tests.py
+++ b/poky/bitbake/lib/hashserv/tests.py
@@ -15,28 +15,32 @@ import tempfile
import threading
import unittest
import socket
+import time
+import signal
-def _run_server(server, idx):
- # logging.basicConfig(level=logging.DEBUG, filename='bbhashserv.log', filemode='w',
- # format='%(levelname)s %(filename)s:%(lineno)d %(message)s')
+def server_prefunc(server, idx):
+ logging.basicConfig(level=logging.DEBUG, filename='bbhashserv.log', filemode='w',
+ format='%(levelname)s %(filename)s:%(lineno)d %(message)s')
+ server.logger.debug("Running server %d" % idx)
sys.stdout = open('bbhashserv-%d.log' % idx, 'w')
sys.stderr = sys.stdout
- server.serve_forever()
-
class HashEquivalenceTestSetup(object):
METHOD = 'TestMethod'
server_index = 0
- def start_server(self, dbpath=None, upstream=None, read_only=False):
+ def start_server(self, dbpath=None, upstream=None, read_only=False, prefunc=server_prefunc):
self.server_index += 1
if dbpath is None:
dbpath = os.path.join(self.temp_dir.name, "db%d.sqlite" % self.server_index)
- def cleanup_thread(thread):
- thread.terminate()
- thread.join()
+ def cleanup_server(server):
+ if server.process.exitcode is not None:
+ return
+
+ server.process.terminate()
+ server.process.join()
server = create_server(self.get_server_addr(self.server_index),
dbpath,
@@ -44,9 +48,8 @@ class HashEquivalenceTestSetup(object):
read_only=read_only)
server.dbpath = dbpath
- server.thread = multiprocessing.Process(target=_run_server, args=(server, self.server_index))
- server.thread.start()
- self.addCleanup(cleanup_thread, server.thread)
+ server.serve_as_process(prefunc=prefunc, args=(self.server_index,))
+ self.addCleanup(cleanup_server, server)
def cleanup_client(client):
client.close()
@@ -283,6 +286,33 @@ class HashEquivalenceCommonTests(object):
self.assertClientGetHash(self.client, taskhash2, None)
+ def test_slow_server_start(self):
+ """
+ Ensures that the server will exit correctly even if it gets a SIGTERM
+ before entering the main loop
+ """
+
+ event = multiprocessing.Event()
+
+ def prefunc(server, idx):
+ nonlocal event
+ server_prefunc(server, idx)
+ event.wait()
+
+ def do_nothing(signum, frame):
+ pass
+
+ old_signal = signal.signal(signal.SIGTERM, do_nothing)
+ self.addCleanup(signal.signal, signal.SIGTERM, old_signal)
+
+ _, server = self.start_server(prefunc=prefunc)
+ server.process.terminate()
+ time.sleep(30)
+ event.set()
+ server.process.join(300)
+ self.assertIsNotNone(server.process.exitcode, "Server did not exit in a timely manner!")
+
+
class TestHashEquivalenceUnixServer(HashEquivalenceTestSetup, HashEquivalenceCommonTests, unittest.TestCase):
def get_server_addr(self, server_idx):
return "unix://" + os.path.join(self.temp_dir.name, 'sock%d' % server_idx)
diff --git a/poky/bitbake/lib/prserv/client.py b/poky/bitbake/lib/prserv/client.py
new file mode 100644
index 000000000..a3f19ddaf
--- /dev/null
+++ b/poky/bitbake/lib/prserv/client.py
@@ -0,0 +1,48 @@
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import logging
+import bb.asyncrpc
+
+logger = logging.getLogger("BitBake.PRserv")
+
+class PRAsyncClient(bb.asyncrpc.AsyncClient):
+ def __init__(self):
+ super().__init__('PRSERVICE', '1.0', logger)
+
+ async def getPR(self, version, pkgarch, checksum):
+ response = await self.send_message(
+ {'get-pr': {'version': version, 'pkgarch': pkgarch, 'checksum': checksum}}
+ )
+ if response:
+ return response['value']
+
+ async def importone(self, version, pkgarch, checksum, value):
+ response = await self.send_message(
+ {'import-one': {'version': version, 'pkgarch': pkgarch, 'checksum': checksum, 'value': value}}
+ )
+ if response:
+ return response['value']
+
+ async def export(self, version, pkgarch, checksum, colinfo):
+ response = await self.send_message(
+ {'export': {'version': version, 'pkgarch': pkgarch, 'checksum': checksum, 'colinfo': colinfo}}
+ )
+ if response:
+ return (response['metainfo'], response['datainfo'])
+
+ async def is_readonly(self):
+ response = await self.send_message(
+ {'is-readonly': {}}
+ )
+ if response:
+ return response['readonly']
+
+class PRClient(bb.asyncrpc.Client):
+ def __init__(self):
+ super().__init__()
+ self._add_methods('getPR', 'importone', 'export', 'is_readonly')
+
+ def _get_async_client(self):
+ return PRAsyncClient()
diff --git a/poky/bitbake/lib/prserv/db.py b/poky/bitbake/lib/prserv/db.py
index cb2a2461e..2710d4a22 100644
--- a/poky/bitbake/lib/prserv/db.py
+++ b/poky/bitbake/lib/prserv/db.py
@@ -30,21 +30,29 @@ if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3):
#
class PRTable(object):
- def __init__(self, conn, table, nohist):
+ def __init__(self, conn, table, nohist, read_only):
self.conn = conn
self.nohist = nohist
+ self.read_only = read_only
self.dirty = False
if nohist:
self.table = "%s_nohist" % table
else:
self.table = "%s_hist" % table
- self._execute("CREATE TABLE IF NOT EXISTS %s \
- (version TEXT NOT NULL, \
- pkgarch TEXT NOT NULL, \
- checksum TEXT NOT NULL, \
- value INTEGER, \
- PRIMARY KEY (version, pkgarch, checksum));" % self.table)
+ if self.read_only:
+ table_exists = self._execute(
+ "SELECT count(*) FROM sqlite_master \
+ WHERE type='table' AND name='%s'" % (self.table))
+ if not table_exists:
+ raise prserv.NotFoundError
+ else:
+ self._execute("CREATE TABLE IF NOT EXISTS %s \
+ (version TEXT NOT NULL, \
+ pkgarch TEXT NOT NULL, \
+ checksum TEXT NOT NULL, \
+ value INTEGER, \
+ PRIMARY KEY (version, pkgarch, checksum));" % self.table)
def _execute(self, *query):
"""Execute a query, waiting to acquire a lock if necessary"""
@@ -59,8 +67,9 @@ class PRTable(object):
raise exc
def sync(self):
- self.conn.commit()
- self._execute("BEGIN EXCLUSIVE TRANSACTION")
+ if not self.read_only:
+ self.conn.commit()
+ self._execute("BEGIN EXCLUSIVE TRANSACTION")
def sync_if_dirty(self):
if self.dirty:
@@ -75,6 +84,15 @@ class PRTable(object):
return row[0]
else:
#no value found, try to insert
+ if self.read_only:
+ data = self._execute("SELECT ifnull(max(value)+1,0) FROM %s where version=? AND pkgarch=?;" % (self.table),
+ (version, pkgarch))
+ row = data.fetchone()
+ if row is not None:
+ return row[0]
+ else:
+ return 0
+
try:
self._execute("INSERT INTO %s VALUES (?, ?, ?, (select ifnull(max(value)+1,0) from %s where version=? AND pkgarch=?));"
% (self.table,self.table),
@@ -103,6 +121,15 @@ class PRTable(object):
return row[0]
else:
#no value found, try to insert
+ if self.read_only:
+ data = self._execute("SELECT ifnull(max(value)+1,0) FROM %s where version=? AND pkgarch=?;" % (self.table),
+ (version, pkgarch))
+ row = data.fetchone()
+ if row is not None:
+ return row[0]
+ else:
+ return 0
+
try:
self._execute("INSERT OR REPLACE INTO %s VALUES (?, ?, ?, (select ifnull(max(value)+1,0) from %s where version=? AND pkgarch=?));"
% (self.table,self.table),
@@ -128,6 +155,9 @@ class PRTable(object):
return self._getValueHist(version, pkgarch, checksum)
def _importHist(self, version, pkgarch, checksum, value):
+ if self.read_only:
+ return None
+
val = None
data = self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table,
(version, pkgarch, checksum))
@@ -152,6 +182,9 @@ class PRTable(object):
return val
def _importNohist(self, version, pkgarch, checksum, value):
+ if self.read_only:
+ return None
+
try:
#try to insert
self._execute("INSERT INTO %s VALUES (?, ?, ?, ?);" % (self.table),
@@ -245,19 +278,23 @@ class PRTable(object):
class PRData(object):
"""Object representing the PR database"""
- def __init__(self, filename, nohist=True):
+ def __init__(self, filename, nohist=True, read_only=False):
self.filename=os.path.abspath(filename)
self.nohist=nohist
+ self.read_only = read_only
#build directory hierarchy
try:
os.makedirs(os.path.dirname(self.filename))
except OSError as e:
if e.errno != errno.EEXIST:
raise e
- self.connection=sqlite3.connect(self.filename, isolation_level="EXCLUSIVE", check_same_thread = False)
+ uri = "file:%s%s" % (self.filename, "?mode=ro" if self.read_only else "")
+ logger.debug("Opening PRServ database '%s'" % (uri))
+ self.connection=sqlite3.connect(uri, uri=True, isolation_level="EXCLUSIVE", check_same_thread = False)
self.connection.row_factory=sqlite3.Row
- self.connection.execute("pragma synchronous = off;")
- self.connection.execute("PRAGMA journal_mode = MEMORY;")
+ if not self.read_only:
+ self.connection.execute("pragma synchronous = off;")
+ self.connection.execute("PRAGMA journal_mode = MEMORY;")
self._tables={}
def disconnect(self):
@@ -270,7 +307,7 @@ class PRData(object):
if tblname in self._tables:
return self._tables[tblname]
else:
- tableobj = self._tables[tblname] = PRTable(self.connection, tblname, self.nohist)
+ tableobj = self._tables[tblname] = PRTable(self.connection, tblname, self.nohist, self.read_only)
return tableobj
def __delitem__(self, tblname):
diff --git a/poky/bitbake/lib/prserv/serv.py b/poky/bitbake/lib/prserv/serv.py
index 5e322bf83..0a20b927c 100644
--- a/poky/bitbake/lib/prserv/serv.py
+++ b/poky/bitbake/lib/prserv/serv.py
@@ -4,157 +4,135 @@
import os,sys,logging
import signal, time
-from xmlrpc.server import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
import socket
import io
import sqlite3
-import bb.server.xmlrpcclient
import prserv
import prserv.db
import errno
-import multiprocessing
+import bb.asyncrpc
logger = logging.getLogger("BitBake.PRserv")
-class Handler(SimpleXMLRPCRequestHandler):
- def _dispatch(self,method,params):
- try:
- value=self.server.funcs[method](*params)
- except:
- import traceback
- traceback.print_exc()
- raise
- return value
-
PIDPREFIX = "/tmp/PRServer_%s_%s.pid"
singleton = None
-
-class PRServer(SimpleXMLRPCServer):
- def __init__(self, dbfile, logfile, interface):
- ''' constructor '''
+class PRServerClient(bb.asyncrpc.AsyncServerConnection):
+ def __init__(self, reader, writer, table, read_only):
+ super().__init__(reader, writer, 'PRSERVICE', logger)
+ self.handlers.update({
+ 'get-pr': self.handle_get_pr,
+ 'import-one': self.handle_import_one,
+ 'export': self.handle_export,
+ 'is-readonly': self.handle_is_readonly,
+ })
+ self.table = table
+ self.read_only = read_only
+
+ def validate_proto_version(self):
+ return (self.proto_version == (1, 0))
+
+ async def dispatch_message(self, msg):
try:
- SimpleXMLRPCServer.__init__(self, interface,
- logRequests=False, allow_none=True)
- except socket.error:
- ip=socket.gethostbyname(interface[0])
- port=interface[1]
- msg="PR Server unable to bind to %s:%s\n" % (ip, port)
- sys.stderr.write(msg)
- raise PRServiceConfigError
-
- self.dbfile=dbfile
- self.logfile=logfile
- self.host, self.port = self.socket.getsockname()
-
- self.register_function(self.getPR, "getPR")
- self.register_function(self.ping, "ping")
- self.register_function(self.export, "export")
- self.register_function(self.importone, "importone")
- self.register_introspection_functions()
-
- self.iter_count = 0
- # 60 iterations between syncs or sync if dirty every ~30 seconds
- self.iterations_between_sync = 60
-
- def sigint_handler(self, signum, stack):
- if self.table:
- self.table.sync()
-
- def sigterm_handler(self, signum, stack):
- if self.table:
- self.table.sync()
- raise(SystemExit)
-
- def process_request(self, request, client_address):
- if request is None:
- return
- try:
- self.finish_request(request, client_address)
- self.shutdown_request(request)
- self.iter_count = (self.iter_count + 1) % self.iterations_between_sync
- if self.iter_count == 0:
- self.table.sync_if_dirty()
+ await super().dispatch_message(msg)
except:
- self.handle_error(request, client_address)
- self.shutdown_request(request)
self.table.sync()
- self.table.sync_if_dirty()
+ raise
- def serve_forever(self, poll_interval=0.5):
- signal.signal(signal.SIGINT, self.sigint_handler)
- signal.signal(signal.SIGTERM, self.sigterm_handler)
+ self.table.sync_if_dirty()
- self.db = prserv.db.PRData(self.dbfile)
- self.table = self.db["PRMAIN"]
- return super().serve_forever(poll_interval)
+ async def handle_get_pr(self, request):
+ version = request['version']
+ pkgarch = request['pkgarch']
+ checksum = request['checksum']
- def export(self, version=None, pkgarch=None, checksum=None, colinfo=True):
+ response = None
try:
- return self.table.export(version, pkgarch, checksum, colinfo)
+ value = self.table.getValue(version, pkgarch, checksum)
+ response = {'value': value}
+ except prserv.NotFoundError:
+ logger.error("can not find value for (%s, %s)",version, checksum)
except sqlite3.Error as exc:
logger.error(str(exc))
- return None
- def importone(self, version, pkgarch, checksum, value):
- return self.table.importone(version, pkgarch, checksum, value)
+ self.write_message(response)
- def ping(self):
- return True
+ async def handle_import_one(self, request):
+ response = None
+ if not self.read_only:
+ version = request['version']
+ pkgarch = request['pkgarch']
+ checksum = request['checksum']
+ value = request['value']
- def getinfo(self):
- return (self.host, self.port)
+ value = self.table.importone(version, pkgarch, checksum, value)
+ if value is not None:
+ response = {'value': value}
+
+ self.write_message(response)
+
+ async def handle_export(self, request):
+ version = request['version']
+ pkgarch = request['pkgarch']
+ checksum = request['checksum']
+ colinfo = request['colinfo']
- def getPR(self, version, pkgarch, checksum):
try:
- return self.table.getValue(version, pkgarch, checksum)
- except prserv.NotFoundError:
- logger.error("can not find value for (%s, %s)",version, checksum)
- return None
+ (metainfo, datainfo) = self.table.export(version, pkgarch, checksum, colinfo)
except sqlite3.Error as exc:
logger.error(str(exc))
- return None
+ metainfo = datainfo = None
-class PRServSingleton(object):
- def __init__(self, dbfile, logfile, interface):
+ response = {'metainfo': metainfo, 'datainfo': datainfo}
+ self.write_message(response)
+
+ async def handle_is_readonly(self, request):
+ response = {'readonly': self.read_only}
+ self.write_message(response)
+
+class PRServer(bb.asyncrpc.AsyncServer):
+ def __init__(self, dbfile, read_only=False):
+ super().__init__(logger)
self.dbfile = dbfile
- self.logfile = logfile
- self.interface = interface
- self.host = None
- self.port = None
+ self.table = None
+ self.read_only = read_only
- def start(self):
- self.prserv = PRServer(self.dbfile, self.logfile, self.interface)
- self.process = multiprocessing.Process(target=self.prserv.serve_forever)
- self.process.start()
+ def accept_client(self, reader, writer):
+ return PRServerClient(reader, writer, self.table, self.read_only)
- self.host, self.port = self.prserv.getinfo()
+ def _serve_forever(self):
+ self.db = prserv.db.PRData(self.dbfile, read_only=self.read_only)
+ self.table = self.db["PRMAIN"]
- def getinfo(self):
- return (self.host, self.port)
+ logger.info("Started PRServer with DBfile: %s, Address: %s, PID: %s" %
+ (self.dbfile, self.address, str(os.getpid())))
-class PRServerConnection(object):
- def __init__(self, host, port):
- if is_local_special(host, port):
- host, port = singleton.getinfo()
- self.host = host
- self.port = port
- self.connection, self.transport = bb.server.xmlrpcclient._create_server(self.host, self.port)
+ super()._serve_forever()
- def getPR(self, version, pkgarch, checksum):
- return self.connection.getPR(version, pkgarch, checksum)
+ self.table.sync_if_dirty()
+ self.db.disconnect()
- def ping(self):
- return self.connection.ping()
+ def signal_handler(self):
+ super().signal_handler()
+ if self.table:
+ self.table.sync()
- def export(self,version=None, pkgarch=None, checksum=None, colinfo=True):
- return self.connection.export(version, pkgarch, checksum, colinfo)
+class PRServSingleton(object):
+ def __init__(self, dbfile, logfile, host, port):
+ self.dbfile = dbfile
+ self.logfile = logfile
+ self.host = host
+ self.port = port
- def importone(self, version, pkgarch, checksum, value):
- return self.connection.importone(version, pkgarch, checksum, value)
+ def start(self):
+ self.prserv = PRServer(self.dbfile)
+ self.prserv.start_tcp_server(socket.gethostbyname(self.host), self.port)
+ self.process = self.prserv.serve_as_process()
- def getinfo(self):
- return self.host, self.port
+ if not self.prserv.address:
+ raise PRServiceConfigError
+ if not self.port:
+ self.port = int(self.prserv.address.rsplit(':', 1)[1])
def run_as_daemon(func, pidfile, logfile):
"""
@@ -226,7 +204,7 @@ def run_as_daemon(func, pidfile, logfile):
os.remove(pidfile)
os._exit(0)
-def start_daemon(dbfile, host, port, logfile):
+def start_daemon(dbfile, host, port, logfile, read_only=False):
ip = socket.gethostbyname(host)
pidfile = PIDPREFIX % (ip, port)
try:
@@ -240,15 +218,13 @@ def start_daemon(dbfile, host, port, logfile):
% pidfile)
return 1
- server = PRServer(os.path.abspath(dbfile), os.path.abspath(logfile), (ip,port))
- run_as_daemon(server.serve_forever, pidfile, os.path.abspath(logfile))
+ dbfile = os.path.abspath(dbfile)
+ def daemon_main():
+ server = PRServer(dbfile, read_only=read_only)
+ server.start_tcp_server(ip, port)
+ server.serve_forever()
- # Sometimes, the port (i.e. localhost:0) indicated by the user does not match with
- # the one the server actually is listening, so at least warn the user about it
- _,rport = server.getinfo()
- if port != rport:
- sys.stdout.write("Server is listening at port %s instead of %s\n"
- % (rport,port))
+ run_as_daemon(daemon_main, pidfile, os.path.abspath(logfile))
return 0
def stop_daemon(host, port):
@@ -302,7 +278,7 @@ def is_running(pid):
return True
def is_local_special(host, port):
- if host.strip().upper() == 'localhost'.upper() and (not port):
+ if (host == 'localhost' or host == '127.0.0.1') and not port:
return True
else:
return False
@@ -326,7 +302,9 @@ def auto_start(d):
'Usage: PRSERV_HOST = "<hostname>:<port>"']))
raise PRServiceConfigError
- if is_local_special(host_params[0], int(host_params[1])):
+ host = host_params[0].strip().lower()
+ port = int(host_params[1])
+ if is_local_special(host, port):
import bb.utils
cachedir = (d.getVar("PERSISTENT_DIR") or d.getVar("CACHE"))
if not cachedir:
@@ -340,20 +318,16 @@ def auto_start(d):
auto_shutdown()
if not singleton:
bb.utils.mkdirhier(cachedir)
- singleton = PRServSingleton(os.path.abspath(dbfile), os.path.abspath(logfile), ("localhost",0))
+ singleton = PRServSingleton(os.path.abspath(dbfile), os.path.abspath(logfile), host, port)
singleton.start()
if singleton:
- host, port = singleton.getinfo()
- else:
- host = host_params[0]
- port = int(host_params[1])
+ host = singleton.host
+ port = singleton.port
try:
- connection = PRServerConnection(host,port)
- connection.ping()
- realhost, realport = connection.getinfo()
- return str(realhost) + ":" + str(realport)
-
+ ping(host, port)
+ return str(host) + ":" + str(port)
+
except Exception:
logger.critical("PRservice %s:%d not available" % (host, port))
raise PRServiceConfigError
@@ -366,8 +340,21 @@ def auto_shutdown():
singleton = None
def ping(host, port):
- conn=PRServerConnection(host, port)
+ from . import client
+
+ conn = client.PRClient()
+ conn.connect_tcp(host, port)
return conn.ping()
def connect(host, port):
- return PRServerConnection(host, port)
+ from . import client
+
+ global singleton
+
+ if host.strip().lower() == 'localhost' and not port:
+ host = 'localhost'
+ port = singleton.port
+
+ conn = client.PRClient()
+ conn.connect_tcp(host, port)
+ return conn
diff --git a/poky/bitbake/lib/toaster/orm/models.py b/poky/bitbake/lib/toaster/orm/models.py
index 7f7e922ad..4c94b407d 100644
--- a/poky/bitbake/lib/toaster/orm/models.py
+++ b/poky/bitbake/lib/toaster/orm/models.py
@@ -1719,7 +1719,7 @@ class CustomImageRecipe(Recipe):
"""Generate the contents for the recipe file."""
# If we have no excluded packages we only need to _append
if self.excludes_set.count() == 0:
- packages_conf = "IMAGE_INSTALL_append = \" "
+ packages_conf = "IMAGE_INSTALL:append = \" "
for pkg in self.appends_set.all():
packages_conf += pkg.name+' '
diff --git a/poky/bitbake/lib/toaster/toastergui/views.py b/poky/bitbake/lib/toaster/toastergui/views.py
index 9a5e48e3b..74f9d5696 100644
--- a/poky/bitbake/lib/toaster/toastergui/views.py
+++ b/poky/bitbake/lib/toaster/toastergui/views.py
@@ -1708,7 +1708,7 @@ if True:
except ProjectVariable.DoesNotExist:
pass
try:
- return_data['image_install_append'] = ProjectVariable.objects.get(project = prj, name = "IMAGE_INSTALL_append").value,
+ return_data['image_install:append'] = ProjectVariable.objects.get(project = prj, name = "IMAGE_INSTALL:append").value,
except ProjectVariable.DoesNotExist:
pass
try:
@@ -1839,7 +1839,7 @@ if True:
except ProjectVariable.DoesNotExist:
pass
try:
- context['image_install_append'] = ProjectVariable.objects.get(project = prj, name = "IMAGE_INSTALL_append").value
+ context['image_install:append'] = ProjectVariable.objects.get(project = prj, name = "IMAGE_INSTALL:append").value
context['image_install_append_defined'] = "1"
except ProjectVariable.DoesNotExist:
pass
diff --git a/poky/bitbake/lib/toaster/toastermain/management/commands/buildimport.py b/poky/bitbake/lib/toaster/toastermain/management/commands/buildimport.py
index 59da6ff7a..e25b55e5a 100644
--- a/poky/bitbake/lib/toaster/toastermain/management/commands/buildimport.py
+++ b/poky/bitbake/lib/toaster/toastermain/management/commands/buildimport.py
@@ -451,7 +451,7 @@ class Command(BaseCommand):
# Catch vars relevant to Toaster (in case no Toaster section)
self.update_project_vars(project,'DISTRO')
self.update_project_vars(project,'MACHINE')
- self.update_project_vars(project,'IMAGE_INSTALL_append')
+ self.update_project_vars(project,'IMAGE_INSTALL:append')
self.update_project_vars(project,'IMAGE_FSTYPES')
self.update_project_vars(project,'PACKAGE_CLASSES')
# These vars are typically only assigned by Toaster