summaryrefslogtreecommitdiff
path: root/poky/bitbake/lib/bb/cache.py
diff options
context:
space:
mode:
Diffstat (limited to 'poky/bitbake/lib/bb/cache.py')
-rw-r--r--poky/bitbake/lib/bb/cache.py60
1 files changed, 30 insertions, 30 deletions
diff --git a/poky/bitbake/lib/bb/cache.py b/poky/bitbake/lib/bb/cache.py
index b8054e028..aea2b8bc1 100644
--- a/poky/bitbake/lib/bb/cache.py
+++ b/poky/bitbake/lib/bb/cache.py
@@ -26,7 +26,7 @@ import re
logger = logging.getLogger("BitBake.Cache")
-__cache_version__ = "153"
+__cache_version__ = "154"
def getCacheFile(path, filename, mc, data_hash):
mcspec = ''
@@ -94,6 +94,7 @@ class CoreRecipeInfo(RecipeInfoCommon):
if not self.packages:
self.packages.append(self.pn)
self.packages_dynamic = self.listvar('PACKAGES_DYNAMIC', metadata)
+ self.rprovides_pkg = self.pkgvar('RPROVIDES', self.packages, metadata)
self.skipreason = self.getvar('__SKIPPED', metadata)
if self.skipreason:
@@ -120,7 +121,6 @@ class CoreRecipeInfo(RecipeInfoCommon):
self.depends = self.depvar('DEPENDS', metadata)
self.rdepends = self.depvar('RDEPENDS', metadata)
self.rrecommends = self.depvar('RRECOMMENDS', metadata)
- self.rprovides_pkg = self.pkgvar('RPROVIDES', self.packages, metadata)
self.rdepends_pkg = self.pkgvar('RDEPENDS', self.packages, metadata)
self.rrecommends_pkg = self.pkgvar('RRECOMMENDS', self.packages, metadata)
self.inherits = self.getvar('__inherit_cache', metadata, expand=False)
@@ -215,7 +215,7 @@ class CoreRecipeInfo(RecipeInfoCommon):
if not self.not_world:
cachedata.possible_world.append(fn)
#else:
- # logger.debug(2, "EXCLUDE FROM WORLD: %s", fn)
+ # logger.debug2("EXCLUDE FROM WORLD: %s", fn)
# create a collection of all targets for sanity checking
# tasks, such as upstream versions, license, and tools for
@@ -238,7 +238,7 @@ def virtualfn2realfn(virtualfn):
Convert a virtual file name to a real one + the associated subclass keyword
"""
mc = ""
- if virtualfn.startswith('mc:'):
+ if virtualfn.startswith('mc:') and virtualfn.count(':') >= 2:
elems = virtualfn.split(':')
mc = elems[1]
virtualfn = ":".join(elems[2:])
@@ -268,7 +268,7 @@ def variant2virtual(realfn, variant):
"""
if variant == "":
return realfn
- if variant.startswith("mc:"):
+ if variant.startswith("mc:") and variant.count(':') >= 2:
elems = variant.split(":")
if elems[2]:
return "mc:" + elems[1] + ":virtual:" + ":".join(elems[2:]) + ":" + realfn
@@ -323,7 +323,7 @@ class NoCache(object):
Return a complete set of data for fn.
To do this, we need to parse the file.
"""
- logger.debug(1, "Parsing %s (full)" % virtualfn)
+ logger.debug("Parsing %s (full)" % virtualfn)
(fn, virtual, mc) = virtualfn2realfn(virtualfn)
bb_data = self.load_bbfile(virtualfn, appends, virtonly=True)
return bb_data[virtual]
@@ -400,7 +400,7 @@ class Cache(NoCache):
self.cachefile = self.getCacheFile("bb_cache.dat")
- self.logger.debug(1, "Cache dir: %s", self.cachedir)
+ self.logger.debug("Cache dir: %s", self.cachedir)
bb.utils.mkdirhier(self.cachedir)
cache_ok = True
@@ -408,7 +408,7 @@ class Cache(NoCache):
for cache_class in self.caches_array:
cachefile = self.getCacheFile(cache_class.cachefile)
cache_exists = os.path.exists(cachefile)
- self.logger.debug(2, "Checking if %s exists: %r", cachefile, cache_exists)
+ self.logger.debug2("Checking if %s exists: %r", cachefile, cache_exists)
cache_ok = cache_ok and cache_exists
cache_class.init_cacheData(self)
if cache_ok:
@@ -416,7 +416,7 @@ class Cache(NoCache):
elif os.path.isfile(self.cachefile):
self.logger.info("Out of date cache found, rebuilding...")
else:
- self.logger.debug(1, "Cache file %s not found, building..." % self.cachefile)
+ self.logger.debug("Cache file %s not found, building..." % self.cachefile)
# We don't use the symlink, its just for debugging convinience
if self.mc:
@@ -453,7 +453,7 @@ class Cache(NoCache):
for cache_class in self.caches_array:
cachefile = self.getCacheFile(cache_class.cachefile)
- self.logger.debug(1, 'Loading cache file: %s' % cachefile)
+ self.logger.debug('Loading cache file: %s' % cachefile)
with open(cachefile, "rb") as cachefile:
pickled = pickle.Unpickler(cachefile)
# Check cache version information
@@ -500,7 +500,7 @@ class Cache(NoCache):
def parse(self, filename, appends):
"""Parse the specified filename, returning the recipe information"""
- self.logger.debug(1, "Parsing %s", filename)
+ self.logger.debug("Parsing %s", filename)
infos = []
datastores = self.load_bbfile(filename, appends, mc=self.mc)
depends = []
@@ -554,7 +554,7 @@ class Cache(NoCache):
cached, infos = self.load(fn, appends)
for virtualfn, info_array in infos:
if info_array[0].skipped:
- self.logger.debug(1, "Skipping %s: %s", virtualfn, info_array[0].skipreason)
+ self.logger.debug("Skipping %s: %s", virtualfn, info_array[0].skipreason)
skipped += 1
else:
self.add_info(virtualfn, info_array, cacheData, not cached)
@@ -590,21 +590,21 @@ class Cache(NoCache):
# File isn't in depends_cache
if not fn in self.depends_cache:
- self.logger.debug(2, "%s is not cached", fn)
+ self.logger.debug2("%s is not cached", fn)
return False
mtime = bb.parse.cached_mtime_noerror(fn)
# Check file still exists
if mtime == 0:
- self.logger.debug(2, "%s no longer exists", fn)
+ self.logger.debug2("%s no longer exists", fn)
self.remove(fn)
return False
info_array = self.depends_cache[fn]
# Check the file's timestamp
if mtime != info_array[0].timestamp:
- self.logger.debug(2, "%s changed", fn)
+ self.logger.debug2("%s changed", fn)
self.remove(fn)
return False
@@ -615,13 +615,13 @@ class Cache(NoCache):
fmtime = bb.parse.cached_mtime_noerror(f)
# Check if file still exists
if old_mtime != 0 and fmtime == 0:
- self.logger.debug(2, "%s's dependency %s was removed",
+ self.logger.debug2("%s's dependency %s was removed",
fn, f)
self.remove(fn)
return False
if (fmtime != old_mtime):
- self.logger.debug(2, "%s's dependency %s changed",
+ self.logger.debug2("%s's dependency %s changed",
fn, f)
self.remove(fn)
return False
@@ -638,14 +638,14 @@ class Cache(NoCache):
continue
f, exist = f.split(":")
if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)):
- self.logger.debug(2, "%s's file checksum list file %s changed",
+ self.logger.debug2("%s's file checksum list file %s changed",
fn, f)
self.remove(fn)
return False
if tuple(appends) != tuple(info_array[0].appends):
- self.logger.debug(2, "appends for %s changed", fn)
- self.logger.debug(2, "%s to %s" % (str(appends), str(info_array[0].appends)))
+ self.logger.debug2("appends for %s changed", fn)
+ self.logger.debug2("%s to %s" % (str(appends), str(info_array[0].appends)))
self.remove(fn)
return False
@@ -654,10 +654,10 @@ class Cache(NoCache):
virtualfn = variant2virtual(fn, cls)
self.clean.add(virtualfn)
if virtualfn not in self.depends_cache:
- self.logger.debug(2, "%s is not cached", virtualfn)
+ self.logger.debug2("%s is not cached", virtualfn)
invalid = True
elif len(self.depends_cache[virtualfn]) != len(self.caches_array):
- self.logger.debug(2, "Extra caches missing for %s?" % virtualfn)
+ self.logger.debug2("Extra caches missing for %s?" % virtualfn)
invalid = True
# If any one of the variants is not present, mark as invalid for all
@@ -665,10 +665,10 @@ class Cache(NoCache):
for cls in info_array[0].variants:
virtualfn = variant2virtual(fn, cls)
if virtualfn in self.clean:
- self.logger.debug(2, "Removing %s from cache", virtualfn)
+ self.logger.debug2("Removing %s from cache", virtualfn)
self.clean.remove(virtualfn)
if fn in self.clean:
- self.logger.debug(2, "Marking %s as not clean", fn)
+ self.logger.debug2("Marking %s as not clean", fn)
self.clean.remove(fn)
return False
@@ -681,10 +681,10 @@ class Cache(NoCache):
Called from the parser in error cases
"""
if fn in self.depends_cache:
- self.logger.debug(1, "Removing %s from cache", fn)
+ self.logger.debug("Removing %s from cache", fn)
del self.depends_cache[fn]
if fn in self.clean:
- self.logger.debug(1, "Marking %s as unclean", fn)
+ self.logger.debug("Marking %s as unclean", fn)
self.clean.remove(fn)
def sync(self):
@@ -697,13 +697,13 @@ class Cache(NoCache):
return
if self.cacheclean:
- self.logger.debug(2, "Cache is clean, not saving.")
+ self.logger.debug2("Cache is clean, not saving.")
return
for cache_class in self.caches_array:
cache_class_name = cache_class.__name__
cachefile = self.getCacheFile(cache_class.cachefile)
- self.logger.debug(2, "Writing %s", cachefile)
+ self.logger.debug2("Writing %s", cachefile)
with open(cachefile, "wb") as f:
p = pickle.Pickler(f, pickle.HIGHEST_PROTOCOL)
p.dump(__cache_version__)
@@ -879,7 +879,7 @@ class MultiProcessCache(object):
bb.utils.mkdirhier(cachedir)
self.cachefile = os.path.join(cachedir,
cache_file_name or self.__class__.cache_file_name)
- logger.debug(1, "Using cache in '%s'", self.cachefile)
+ logger.debug("Using cache in '%s'", self.cachefile)
glf = bb.utils.lockfile(self.cachefile + ".lock")
@@ -985,7 +985,7 @@ class SimpleCache(object):
bb.utils.mkdirhier(cachedir)
self.cachefile = os.path.join(cachedir,
cache_file_name or self.__class__.cache_file_name)
- logger.debug(1, "Using cache in '%s'", self.cachefile)
+ logger.debug("Using cache in '%s'", self.cachefile)
glf = bb.utils.lockfile(self.cachefile + ".lock")