From 82c905dc58a36aeae40b1b273a12f63fb1973cf4 Mon Sep 17 00:00:00 2001 From: Andrew Geissler Date: Mon, 13 Apr 2020 13:39:40 -0500 Subject: meta-openembedded and poky: subtree updates Squash of the following due to dependencies among them and OpenBMC changes: meta-openembedded: subtree update:d0748372d2..9201611135 meta-openembedded: subtree update:9201611135..17fd382f34 poky: subtree update:9052e5b32a..2e11d97b6c poky: subtree update:2e11d97b6c..a8544811d7 The change log was too large for the jenkins plugin to handle therefore it has been removed. Here is the first and last commit of each subtree: meta-openembedded:d0748372d2 cppzmq: bump to version 4.6.0 meta-openembedded:17fd382f34 mpv: Remove X11 dependency poky:9052e5b32a package_ipk: Remove pointless comment to trigger rebuild poky:a8544811d7 pbzip2: Fix license warning Change-Id: If0fc6c37629642ee207a4ca2f7aa501a2c673cd6 Signed-off-by: Andrew Geissler --- poky/bitbake/lib/bb/codeparser.py | 26 ++------------------------ 1 file changed, 2 insertions(+), 24 deletions(-) (limited to 'poky/bitbake/lib/bb/codeparser.py') diff --git a/poky/bitbake/lib/bb/codeparser.py b/poky/bitbake/lib/bb/codeparser.py index fd2c4734f..25a7ac69d 100644 --- a/poky/bitbake/lib/bb/codeparser.py +++ b/poky/bitbake/lib/bb/codeparser.py @@ -25,13 +25,11 @@ import ast import sys import codegen import logging -import pickle import bb.pysh as pysh -import os.path import bb.utils, bb.data import hashlib from itertools import chain -from bb.pysh import pyshyacc, pyshlex, sherrors +from bb.pysh import pyshyacc, pyshlex from bb.cache import MultiProcessCache logger = logging.getLogger('BitBake.CodeParser') @@ -58,30 +56,10 @@ def check_indent(codestr): return codestr - -# Basically pickle, in python 2.7.3 at least, does badly with data duplication -# upon pickling and unpickling. Combine this with duplicate objects and things -# are a mess. -# -# When the sets are originally created, python calls intern() on the set keys -# which significantly improves memory usage. Sadly the pickle/unpickle process -# doesn't call intern() on the keys and results in the same strings being duplicated -# in memory. This also means pickle will save the same string multiple times in -# the cache file. -# -# By having shell and python cacheline objects with setstate/getstate, we force -# the object creation through our own routine where we can call intern (via internSet). -# -# We also use hashable frozensets and ensure we use references to these so that -# duplicates can be removed, both in memory and in the resulting pickled data. -# -# By playing these games, the size of the cache file shrinks dramatically -# meaning faster load times and the reloaded cache files also consume much less -# memory. Smaller cache files, faster load times and lower memory usage is good. -# # A custom getstate/setstate using tuples is actually worth 15% cachesize by # avoiding duplication of the attribute names! + class SetCache(object): def __init__(self): self.setcache = {} -- cgit v1.2.3