summaryrefslogtreecommitdiffstats
path: root/bitbake/lib
diff options
context:
space:
mode:
authorRichard Purdie <richard.purdie@linuxfoundation.org>2011-02-28 15:31:20 +0000
committerRichard Purdie <richard.purdie@linuxfoundation.org>2011-02-28 20:48:08 +0000
commit1b08a7eb8b708f4d0fc119cf89deb450fa62fea1 (patch)
treecdd4c9656d94264e6c55d8632788cb39a04d1d65 /bitbake/lib
parentd5e12a1bfbb4d00b49209c602a68628b1b626898 (diff)
downloadast2050-yocto-poky-1b08a7eb8b708f4d0fc119cf89deb450fa62fea1.zip
ast2050-yocto-poky-1b08a7eb8b708f4d0fc119cf89deb450fa62fea1.tar.gz
bitbake/cache/runqueue.py: Move workload for recipe parsing to the child process
Parsing the recipe in the parent before forking off the child worker can mean the parent doesn't hit the idle loop and becomes a bottleneck when lauching many short lived processes. The reason we need this in the parent is to figure out the fakeroot environmental options. To address this, add the fakeroot variables to the cache and move recipe loadData into the child task. For a poky-image-sato build this results in about a 2 minute speedup (1.8%). Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'bitbake/lib')
-rw-r--r--bitbake/lib/bb/cache.py10
-rw-r--r--bitbake/lib/bb/runqueue.py35
2 files changed, 32 insertions, 13 deletions
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py
index 421bd79..c56b4b4 100644
--- a/bitbake/lib/bb/cache.py
+++ b/bitbake/lib/bb/cache.py
@@ -43,7 +43,7 @@ except ImportError:
logger.info("Importing cPickle failed. "
"Falling back to a very slow implementation.")
-__cache_version__ = "137"
+__cache_version__ = "138"
recipe_fields = (
'pn',
@@ -78,6 +78,8 @@ recipe_fields = (
'summary',
'license',
'section',
+ 'fakerootenv',
+ 'fakerootdirs'
)
@@ -172,6 +174,8 @@ class RecipeInfo(namedtuple('RecipeInfo', recipe_fields)):
summary = cls.getvar('SUMMARY', metadata),
license = cls.getvar('LICENSE', metadata),
section = cls.getvar('SECTION', metadata),
+ fakerootenv = cls.getvar('FAKEROOTENV', metadata),
+ fakerootdirs = cls.getvar('FAKEROOTDIRS', metadata),
)
@@ -584,6 +588,8 @@ class CacheData(object):
self.summary = {}
self.license = {}
self.section = {}
+ self.fakerootenv = {}
+ self.fakerootdirs = {}
# Indirect Cache variables (set elsewhere)
self.ignored_dependencies = []
@@ -647,3 +653,5 @@ class CacheData(object):
self.summary[fn] = info.summary
self.license[fn] = info.license
self.section[fn] = info.section
+ self.fakerootenv[fn] = info.fakerootenv
+ self.fakerootdirs[fn] = info.fakerootdirs
diff --git a/bitbake/lib/bb/runqueue.py b/bitbake/lib/bb/runqueue.py
index 172e591..d7d67fd 100644
--- a/bitbake/lib/bb/runqueue.py
+++ b/bitbake/lib/bb/runqueue.py
@@ -1060,27 +1060,23 @@ class RunQueueExecute:
return
def fork_off_task(self, fn, task, taskname, quieterrors=False):
- the_data = bb.cache.Cache.loadDataFull(fn, self.cooker.get_file_appends(fn), self.cooker.configuration.data)
- env = bb.data.export_vars(the_data)
- env = bb.data.export_envvars(env, the_data)
+ envbackup = os.environ.copy()
+ env = {}
taskdep = self.rqdata.dataCache.task_deps[fn]
if 'fakeroot' in taskdep and taskname in taskdep['fakeroot']:
- envvars = the_data.getVar("FAKEROOTENV", True).split()
+ envvars = (self.rqdata.dataCache.fakerootenv[fn] or "").split()
for var in envvars:
comps = var.split("=")
env[comps[0]] = comps[1]
- fakedirs = (the_data.getVar("FAKEROOTDIRS", True) or "").split()
+
+ fakedirs = (self.rqdata.dataCache.fakerootdirs[fn] or "").split()
for p in fakedirs:
bb.mkdirhier(p)
logger.debug(2, "Running %s:%s under fakeroot, state dir is %s" % (fn, taskname, fakedirs))
-
- envbackup = os.environ.copy()
- for e in envbackup:
- os.unsetenv(e)
- for e in env:
- os.putenv(e, env[e])
+ for e in env:
+ os.putenv(e, env[e])
sys.stdout.flush()
sys.stderr.flush()
@@ -1111,6 +1107,20 @@ class RunQueueExecute:
# No stdin
newsi = os.open(os.devnull, os.O_RDWR)
os.dup2(newsi, sys.stdin.fileno())
+
+
+ the_data = bb.cache.Cache.loadDataFull(fn, self.cooker.get_file_appends(fn), self.cooker.configuration.data)
+
+ env2 = bb.data.export_vars(the_data)
+ env2 = bb.data.export_envvars(env2, the_data)
+
+ for e in os.environ:
+ os.unsetenv(e)
+ for e in env2:
+ os.putenv(e, env2[e])
+ for e in env:
+ os.putenv(e, env[e])
+
if quieterrors:
the_data.setVarFlag(taskname, "quieterrors", "1")
@@ -1137,7 +1147,8 @@ class RunQueueExecute:
for e in env:
os.unsetenv(e)
for e in envbackup:
- os.putenv(e, envbackup[e])
+ if e in env:
+ os.putenv(e, envbackup[e])
return pid, pipein, pipeout
OpenPOWER on IntegriCloud