summaryrefslogtreecommitdiff
path: root/poky/bitbake/lib/bb/cache.py
diff options
context:
space:
mode:
Diffstat (limited to 'poky/bitbake/lib/bb/cache.py')
-rw-r--r--poky/bitbake/lib/bb/cache.py238
1 files changed, 165 insertions, 73 deletions
diff --git a/poky/bitbake/lib/bb/cache.py b/poky/bitbake/lib/bb/cache.py
index d1be83617..9e0c931a0 100644
--- a/poky/bitbake/lib/bb/cache.py
+++ b/poky/bitbake/lib/bb/cache.py
@@ -19,16 +19,20 @@
import os
import logging
import pickle
-from collections import defaultdict
+from collections import defaultdict, Mapping
import bb.utils
+from bb import PrefixLoggerAdapter
import re
logger = logging.getLogger("BitBake.Cache")
-__cache_version__ = "152"
+__cache_version__ = "153"
-def getCacheFile(path, filename, data_hash):
- return os.path.join(path, filename + "." + data_hash)
+def getCacheFile(path, filename, mc, data_hash):
+ mcspec = ''
+ if mc:
+ mcspec = ".%s" % mc
+ return os.path.join(path, filename + mcspec + "." + data_hash)
# RecipeInfoCommon defines common data retrieving methods
# from meta data for caches. CoreRecipeInfo as well as other
@@ -324,7 +328,7 @@ class NoCache(object):
bb_data = self.load_bbfile(virtualfn, appends, virtonly=True)
return bb_data[virtual]
- def load_bbfile(self, bbfile, appends, virtonly = False):
+ def load_bbfile(self, bbfile, appends, virtonly = False, mc=None):
"""
Load and parse one .bb build file
Return the data and whether parsing resulted in the file being skipped
@@ -337,6 +341,10 @@ class NoCache(object):
datastores = parse_recipe(bb_data, bbfile, appends, mc)
return datastores
+ if mc is not None:
+ bb_data = self.databuilder.mcdata[mc].createCopy()
+ return parse_recipe(bb_data, bbfile, appends, mc)
+
bb_data = self.data.createCopy()
datastores = parse_recipe(bb_data, bbfile, appends)
@@ -354,14 +362,15 @@ class Cache(NoCache):
"""
BitBake Cache implementation
"""
-
- def __init__(self, databuilder, data_hash, caches_array):
+ def __init__(self, databuilder, mc, data_hash, caches_array):
super().__init__(databuilder)
data = databuilder.data
# Pass caches_array information into Cache Constructor
# It will be used later for deciding whether we
# need extra cache file dump/load support
+ self.mc = mc
+ self.logger = PrefixLoggerAdapter("Cache: %s: " % (mc if mc else "default"), logger)
self.caches_array = caches_array
self.cachedir = data.getVar("CACHE")
self.clean = set()
@@ -374,31 +383,47 @@ class Cache(NoCache):
if self.cachedir in [None, '']:
self.has_cache = False
- logger.info("Not using a cache. "
- "Set CACHE = <directory> to enable.")
+ self.logger.info("Not using a cache. "
+ "Set CACHE = <directory> to enable.")
return
self.has_cache = True
- self.cachefile = getCacheFile(self.cachedir, "bb_cache.dat", self.data_hash)
- logger.debug(1, "Cache dir: %s", self.cachedir)
+ def getCacheFile(self, cachefile):
+ return getCacheFile(self.cachedir, cachefile, self.mc, self.data_hash)
+
+ def prepare_cache(self, progress):
+ if not self.has_cache:
+ return 0
+
+ loaded = 0
+
+ self.cachefile = self.getCacheFile("bb_cache.dat")
+
+ self.logger.debug(1, "Cache dir: %s", self.cachedir)
bb.utils.mkdirhier(self.cachedir)
cache_ok = True
if self.caches_array:
for cache_class in self.caches_array:
- cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
- cache_ok = cache_ok and os.path.exists(cachefile)
+ cachefile = self.getCacheFile(cache_class.cachefile)
+ cache_exists = os.path.exists(cachefile)
+ self.logger.debug(2, "Checking if %s exists: %r", cachefile, cache_exists)
+ cache_ok = cache_ok and cache_exists
cache_class.init_cacheData(self)
if cache_ok:
- self.load_cachefile()
+ loaded = self.load_cachefile(progress)
elif os.path.isfile(self.cachefile):
- logger.info("Out of date cache found, rebuilding...")
+ self.logger.info("Out of date cache found, rebuilding...")
else:
- logger.debug(1, "Cache file %s not found, building..." % self.cachefile)
+ self.logger.debug(1, "Cache file %s not found, building..." % self.cachefile)
# We don't use the symlink, its just for debugging convinience
- symlink = os.path.join(self.cachedir, "bb_cache.dat")
+ if self.mc:
+ symlink = os.path.join(self.cachedir, "bb_cache.dat.%s" % self.mc)
+ else:
+ symlink = os.path.join(self.cachedir, "bb_cache.dat")
+
if os.path.exists(symlink):
bb.utils.remove(symlink)
try:
@@ -406,22 +431,31 @@ class Cache(NoCache):
except OSError:
pass
- def load_cachefile(self):
- cachesize = 0
- previous_progress = 0
- previous_percent = 0
+ return loaded
+
+ def cachesize(self):
+ if not self.has_cache:
+ return 0
- # Calculate the correct cachesize of all those cache files
+ cachesize = 0
for cache_class in self.caches_array:
- cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
- with open(cachefile, "rb") as cachefile:
- cachesize += os.fstat(cachefile.fileno()).st_size
+ cachefile = self.getCacheFile(cache_class.cachefile)
+ try:
+ with open(cachefile, "rb") as cachefile:
+ cachesize += os.fstat(cachefile.fileno()).st_size
+ except FileNotFoundError:
+ pass
+
+ return cachesize
- bb.event.fire(bb.event.CacheLoadStarted(cachesize), self.data)
+ def load_cachefile(self, progress):
+ cachesize = self.cachesize()
+ previous_progress = 0
+ previous_percent = 0
for cache_class in self.caches_array:
- cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
- logger.debug(1, 'Loading cache file: %s' % cachefile)
+ cachefile = self.getCacheFile(cache_class.cachefile)
+ self.logger.debug(1, 'Loading cache file: %s' % cachefile)
with open(cachefile, "rb") as cachefile:
pickled = pickle.Unpickler(cachefile)
# Check cache version information
@@ -429,15 +463,15 @@ class Cache(NoCache):
cache_ver = pickled.load()
bitbake_ver = pickled.load()
except Exception:
- logger.info('Invalid cache, rebuilding...')
- return
+ self.logger.info('Invalid cache, rebuilding...')
+ return 0
if cache_ver != __cache_version__:
- logger.info('Cache version mismatch, rebuilding...')
- return
+ self.logger.info('Cache version mismatch, rebuilding...')
+ return 0
elif bitbake_ver != bb.__version__:
- logger.info('Bitbake version mismatch, rebuilding...')
- return
+ self.logger.info('Bitbake version mismatch, rebuilding...')
+ return 0
# Load the rest of the cache file
current_progress = 0
@@ -460,29 +494,17 @@ class Cache(NoCache):
self.depends_cache[key] = [value]
# only fire events on even percentage boundaries
current_progress = cachefile.tell() + previous_progress
- if current_progress > cachesize:
- # we might have calculated incorrect total size because a file
- # might've been written out just after we checked its size
- cachesize = current_progress
- current_percent = 100 * current_progress / cachesize
- if current_percent > previous_percent:
- previous_percent = current_percent
- bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize),
- self.data)
+ progress(cachefile.tell() + previous_progress)
previous_progress += current_progress
- # Note: depends cache number is corresponding to the parsing file numbers.
- # The same file has several caches, still regarded as one item in the cache
- bb.event.fire(bb.event.CacheLoadCompleted(cachesize,
- len(self.depends_cache)),
- self.data)
+ return len(self.depends_cache)
def parse(self, filename, appends):
"""Parse the specified filename, returning the recipe information"""
- logger.debug(1, "Parsing %s", filename)
+ self.logger.debug(1, "Parsing %s", filename)
infos = []
- datastores = self.load_bbfile(filename, appends)
+ datastores = self.load_bbfile(filename, appends, mc=self.mc)
depends = []
variants = []
# Process the "real" fn last so we can store variants list
@@ -534,7 +556,7 @@ class Cache(NoCache):
cached, infos = self.load(fn, appends)
for virtualfn, info_array in infos:
if info_array[0].skipped:
- logger.debug(1, "Skipping %s: %s", virtualfn, info_array[0].skipreason)
+ self.logger.debug(1, "Skipping %s: %s", virtualfn, info_array[0].skipreason)
skipped += 1
else:
self.add_info(virtualfn, info_array, cacheData, not cached)
@@ -570,21 +592,21 @@ class Cache(NoCache):
# File isn't in depends_cache
if not fn in self.depends_cache:
- logger.debug(2, "Cache: %s is not cached", fn)
+ self.logger.debug(2, "%s is not cached", fn)
return False
mtime = bb.parse.cached_mtime_noerror(fn)
# Check file still exists
if mtime == 0:
- logger.debug(2, "Cache: %s no longer exists", fn)
+ self.logger.debug(2, "%s no longer exists", fn)
self.remove(fn)
return False
info_array = self.depends_cache[fn]
# Check the file's timestamp
if mtime != info_array[0].timestamp:
- logger.debug(2, "Cache: %s changed", fn)
+ self.logger.debug(2, "%s changed", fn)
self.remove(fn)
return False
@@ -595,14 +617,14 @@ class Cache(NoCache):
fmtime = bb.parse.cached_mtime_noerror(f)
# Check if file still exists
if old_mtime != 0 and fmtime == 0:
- logger.debug(2, "Cache: %s's dependency %s was removed",
- fn, f)
+ self.logger.debug(2, "%s's dependency %s was removed",
+ fn, f)
self.remove(fn)
return False
if (fmtime != old_mtime):
- logger.debug(2, "Cache: %s's dependency %s changed",
- fn, f)
+ self.logger.debug(2, "%s's dependency %s changed",
+ fn, f)
self.remove(fn)
return False
@@ -614,18 +636,18 @@ class Cache(NoCache):
# Have to be careful about spaces and colons in filenames
flist = self.filelist_regex.split(fl)
for f in flist:
- if not f or "*" in f:
+ if not f:
continue
f, exist = f.split(":")
if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)):
- logger.debug(2, "Cache: %s's file checksum list file %s changed",
- fn, f)
+ self.logger.debug(2, "%s's file checksum list file %s changed",
+ fn, f)
self.remove(fn)
return False
- if appends != info_array[0].appends:
- logger.debug(2, "Cache: appends for %s changed", fn)
- logger.debug(2, "%s to %s" % (str(appends), str(info_array[0].appends)))
+ if tuple(appends) != tuple(info_array[0].appends):
+ self.logger.debug(2, "appends for %s changed", fn)
+ self.logger.debug(2, "%s to %s" % (str(appends), str(info_array[0].appends)))
self.remove(fn)
return False
@@ -634,10 +656,10 @@ class Cache(NoCache):
virtualfn = variant2virtual(fn, cls)
self.clean.add(virtualfn)
if virtualfn not in self.depends_cache:
- logger.debug(2, "Cache: %s is not cached", virtualfn)
+ self.logger.debug(2, "%s is not cached", virtualfn)
invalid = True
elif len(self.depends_cache[virtualfn]) != len(self.caches_array):
- logger.debug(2, "Cache: Extra caches missing for %s?" % virtualfn)
+ self.logger.debug(2, "Extra caches missing for %s?" % virtualfn)
invalid = True
# If any one of the variants is not present, mark as invalid for all
@@ -645,10 +667,10 @@ class Cache(NoCache):
for cls in info_array[0].variants:
virtualfn = variant2virtual(fn, cls)
if virtualfn in self.clean:
- logger.debug(2, "Cache: Removing %s from cache", virtualfn)
+ self.logger.debug(2, "Removing %s from cache", virtualfn)
self.clean.remove(virtualfn)
if fn in self.clean:
- logger.debug(2, "Cache: Marking %s as not clean", fn)
+ self.logger.debug(2, "Marking %s as not clean", fn)
self.clean.remove(fn)
return False
@@ -661,10 +683,10 @@ class Cache(NoCache):
Called from the parser in error cases
"""
if fn in self.depends_cache:
- logger.debug(1, "Removing %s from cache", fn)
+ self.logger.debug(1, "Removing %s from cache", fn)
del self.depends_cache[fn]
if fn in self.clean:
- logger.debug(1, "Marking %s as unclean", fn)
+ self.logger.debug(1, "Marking %s as unclean", fn)
self.clean.remove(fn)
def sync(self):
@@ -677,12 +699,13 @@ class Cache(NoCache):
return
if self.cacheclean:
- logger.debug(2, "Cache is clean, not saving.")
+ self.logger.debug(2, "Cache is clean, not saving.")
return
for cache_class in self.caches_array:
cache_class_name = cache_class.__name__
- cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
+ cachefile = self.getCacheFile(cache_class.cachefile)
+ self.logger.debug(2, "Writing %s", cachefile)
with open(cachefile, "wb") as f:
p = pickle.Pickler(f, pickle.HIGHEST_PROTOCOL)
p.dump(__cache_version__)
@@ -701,8 +724,18 @@ class Cache(NoCache):
return bb.parse.cached_mtime_noerror(cachefile)
def add_info(self, filename, info_array, cacheData, parsed=None, watcher=None):
+ if self.mc is not None:
+ (fn, cls, mc) = virtualfn2realfn(filename)
+ if mc:
+ self.logger.error("Unexpected multiconfig %s", filename)
+ return
+
+ vfn = realfn2virtual(fn, cls, self.mc)
+ else:
+ vfn = filename
+
if isinstance(info_array[0], CoreRecipeInfo) and (not info_array[0].skipped):
- cacheData.add_from_recipeinfo(filename, info_array)
+ cacheData.add_from_recipeinfo(vfn, info_array)
if watcher:
watcher(info_array[0].file_depends)
@@ -727,6 +760,65 @@ class Cache(NoCache):
info_array.append(cache_class(realfn, data))
self.add_info(file_name, info_array, cacheData, parsed)
+class MulticonfigCache(Mapping):
+ def __init__(self, databuilder, data_hash, caches_array):
+ def progress(p):
+ nonlocal current_progress
+ nonlocal previous_progress
+ nonlocal previous_percent
+ nonlocal cachesize
+
+ current_progress = previous_progress + p
+
+ if current_progress > cachesize:
+ # we might have calculated incorrect total size because a file
+ # might've been written out just after we checked its size
+ cachesize = current_progress
+ current_percent = 100 * current_progress / cachesize
+ if current_percent > previous_percent:
+ previous_percent = current_percent
+ bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize),
+ databuilder.data)
+
+
+ cachesize = 0
+ current_progress = 0
+ previous_progress = 0
+ previous_percent = 0
+ self.__caches = {}
+
+ for mc, mcdata in databuilder.mcdata.items():
+ self.__caches[mc] = Cache(databuilder, mc, data_hash, caches_array)
+
+ cachesize += self.__caches[mc].cachesize()
+
+ bb.event.fire(bb.event.CacheLoadStarted(cachesize), databuilder.data)
+ loaded = 0
+
+ for c in self.__caches.values():
+ loaded += c.prepare_cache(progress)
+ previous_progress = current_progress
+
+ # Note: depends cache number is corresponding to the parsing file numbers.
+ # The same file has several caches, still regarded as one item in the cache
+ bb.event.fire(bb.event.CacheLoadCompleted(cachesize, loaded), databuilder.data)
+
+ def __len__(self):
+ return len(self.__caches)
+
+ def __getitem__(self, key):
+ return self.__caches[key]
+
+ def __contains__(self, key):
+ return key in self.__caches
+
+ def __iter__(self):
+ for k in self.__caches:
+ yield k
+
+ def keys(self):
+ return self.__caches[key]
+
def init(cooker):
"""