summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorRichard Purdie <rpurdie@linux.intel.com>2006-04-15 22:52:28 +0000
committerRichard Purdie <rpurdie@linux.intel.com>2006-04-15 22:52:28 +0000
commitb03a424879367fd4118f60abda7f78bc5f27b957 (patch)
tree22bb4fe4d2a558db0a78d86f2be8492b280c0e77
parent67e3bddeb1dc82ab58a856b9d763b21858b1c819 (diff)
downloadbitbake-b03a424879367fd4118f60abda7f78bc5f27b957.tar.gz
bitbake/lib/bb/data.py:
bitbake/lib/bb/__init__.py: bitbake/lib/bb/data_smart.py: bitbake/lib/bb/cache.py: bitbake/lib/bb/shell.py: bitbake/bin/bitbake: * Major cache refactoring. Change the cache to store only the data bitbake needs for dependency calculations instead of all the metadata. * Separate the cache code into its own file. * Update the rest of the code to work with the cache changes. * Temporarily break the shell's poke command.
-rwxr-xr-xbin/bitbake213
-rw-r--r--lib/bb/__init__.py3
-rw-r--r--lib/bb/cache.py212
-rw-r--r--lib/bb/data.py3
-rw-r--r--lib/bb/data_smart.py69
-rw-r--r--lib/bb/shell.py31
6 files changed, 317 insertions, 214 deletions
diff --git a/bin/bitbake b/bin/bitbake
index 81bbb40ff..508d34c93 100755
--- a/bin/bitbake
+++ b/bin/bitbake
@@ -24,7 +24,7 @@
import sys, os, getopt, glob, copy, os.path, re, time
sys.path.insert(0,os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
import bb
-from bb import utils, data, parse, debug, event, fatal
+from bb import utils, data, parse, debug, event, fatal, cache
from sets import Set
import itertools, optparse
@@ -44,7 +44,6 @@ class BBParsingStatus:
"""
def __init__(self):
- self.cache_dirty = False
self.providers = {}
self.rproviders = {}
self.packages = {}
@@ -60,34 +59,35 @@ class BBParsingStatus:
self.pkg_dp = {}
self.pn_provides = {}
self.all_depends = Set()
+ self.build_all = {}
+ self.rundeps = {}
+ self.runrecs = {}
+ self.stamp = {}
- def handle_bb_data(self, file_name, bb_data, cached):
+ def handle_bb_data(self, file_name, bb_cache, cached):
"""
We will fill the dictionaries with the stuff we
need for building the tree more fast
"""
- if bb_data == None:
- return
-
- if not cached:
- self.cache_dirty = True
-
- pn = bb.data.getVar('PN', bb_data, True)
- pv = bb.data.getVar('PV', bb_data, True)
- pr = bb.data.getVar('PR', bb_data, True)
- dp = int(bb.data.getVar('DEFAULT_PREFERENCE', bb_data, True) or "0")
- provides = Set([pn] + (bb.data.getVar("PROVIDES", bb_data, 1) or "").split())
- depends = (bb.data.getVar("DEPENDS", bb_data, True) or "").split()
- packages = (bb.data.getVar('PACKAGES', bb_data, True) or "").split()
- packages_dynamic = (bb.data.getVar('PACKAGES_DYNAMIC', bb_data, True) or "").split()
- rprovides = (bb.data.getVar("RPROVIDES", bb_data, 1) or "").split()
+ pn = bb_cache.getVar('PN', file_name, True)
+ pv = bb_cache.getVar('PV', file_name, True)
+ pr = bb_cache.getVar('PR', file_name, True)
+ dp = int(bb_cache.getVar('DEFAULT_PREFERENCE', file_name, True) or "0")
+ provides = Set([pn] + (bb_cache.getVar("PROVIDES", file_name, True) or "").split())
+ depends = (bb_cache.getVar("DEPENDS", file_name, True) or "").split()
+ packages = (bb_cache.getVar('PACKAGES', file_name, True) or "").split()
+ packages_dynamic = (bb_cache.getVar('PACKAGES_DYNAMIC', file_name, True) or "").split()
+ rprovides = (bb_cache.getVar("RPROVIDES", file_name, True) or "").split()
# build PackageName to FileName lookup table
if pn not in self.pkg_pn:
self.pkg_pn[pn] = []
self.pkg_pn[pn].append(file_name)
+ self.build_all[file_name] = int(bb_cache.getVar('BUILD_ALL_DEPS', file_name, True) or "0")
+ self.stamp[file_name] = bb_cache.getVar('STAMP', file_name, True)
+
# build FileName to PackageName lookup table
self.pkg_fn[file_name] = pn
self.pkg_pvpr[file_name] = (pv,pr)
@@ -114,7 +114,7 @@ class BBParsingStatus:
if not package in self.packages:
self.packages[package] = []
self.packages[package].append(file_name)
- rprovides += (bb.data.getVar("RPROVIDES_%s" % package, bb_data, 1) or "").split()
+ rprovides += (bb_cache.getVar("RPROVIDES_%s" % package, file_name, 1) or "").split()
for package in packages_dynamic:
if not package in self.packages_dynamic:
@@ -126,9 +126,27 @@ class BBParsingStatus:
self.rproviders[rprovide] = []
self.rproviders[rprovide].append(file_name)
+ # Build hash of runtime depeneds and rececommends
+
+ def add_dep(deplist, deps):
+ for dep in deps:
+ if not dep in deplist:
+ deplist[dep] = ""
+
+ for package in packages + [pn]:
+ if not package in self.rundeps:
+ self.rundeps[package] = {}
+ if not package in self.runrecs:
+ self.runrecs[package] = {}
+
+ add_dep(self.rundeps[package], bb.utils.explode_deps(bb_cache.getVar('RDEPENDS', file_name, True) or ""))
+ add_dep(self.runrecs[package], bb.utils.explode_deps(bb_cache.getVar('RRECOMMENDS', file_name, True) or ""))
+ add_dep(self.rundeps[package], bb.utils.explode_deps(bb_cache.getVar("RDEPENDS_%s" % package, file_name, True) or ""))
+ add_dep(self.runrecs[package], bb.utils.explode_deps(bb_cache.getVar("RRECOMMENDS_%s" % package, file_name, True) or ""))
+
# Collect files we may need for possible world-dep
# calculations
- if not bb.data.getVar('BROKEN', bb_data, True) and not bb.data.getVar('EXCLUDE_FROM_WORLD', bb_data, True):
+ if not bb_cache.getVar('BROKEN', file_name, True) and not bb_cache.getVar('EXCLUDE_FROM_WORLD', file_name, True):
self.possible_world.append(file_name)
@@ -190,8 +208,8 @@ class BBCooker:
self.stats = BBStatistics()
self.status = None
- self.pkgdata = None
self.cache = None
+ self.bb_cache = None
def tryBuildPackage( self, fn, item, the_data ):
"""Build one package"""
@@ -226,10 +244,10 @@ class BBCooker:
If build_depends is empty, we're dealing with a runtime depends
"""
- the_data = self.pkgdata[fn]
+ the_data, fromCache = self.bb_cache.loadDataFull(fn, self)
if not buildAllDeps:
- buildAllDeps = bb.data.getVar('BUILD_ALL_DEPS', the_data, True) or False
+ buildAllDeps = self.status.build_all[fn]
# Error on build time dependency loops
if build_depends and build_depends.count(fn) > 1:
@@ -402,12 +420,14 @@ class BBCooker:
print "%-30s %20s %20s" % (p, latest[0][0] + "-" + latest[0][1],
prefstr)
+
def showEnvironment( self ):
"""Show the outer or per-package environment"""
if self.configuration.buildfile:
+ self.bb_cache = bb.cache.init(self)
try:
- self.configuration.data, fromCache = self.load_bbfile( self.configuration.buildfile )
+ self.configuration.data, fromCache = self.bb_cache.loadDataFull(self.configuration.buildfile, self)
except IOError, e:
fatal("Unable to read %s: %s" % ( self.configuration.buildfile, e ))
except Exception, e:
@@ -457,11 +477,10 @@ class BBCooker:
# look to see if one of them is already staged, or marked as preferred.
# if so, bump it to the head of the queue
for p in providers:
- the_data = self.pkgdata[p]
- pn = bb.data.getVar('PN', the_data, 1)
- pv = bb.data.getVar('PV', the_data, 1)
- pr = bb.data.getVar('PR', the_data, 1)
- stamp = '%s.do_populate_staging' % bb.data.getVar('STAMP', the_data, 1)
+ pn = self.status.pkg_fn[p]
+ pv, pr = self.status.pkg_pvpr[p]
+
+ stamp = '%s.do_populate_staging' % self.status.stamp[p]
if os.path.exists(stamp):
(newvers, fn) = preferred_versions[pn]
if not fn in eligible:
@@ -656,20 +675,11 @@ class BBCooker:
rdepends = []
self.rbuild_cache.append(item)
- the_data = self.pkgdata[fn]
- pn = self.status.pkg_fn[fn]
-
- if (item == pn):
- rdepends += bb.utils.explode_deps(bb.data.getVar('RDEPENDS', the_data, True) or "")
- rdepends += bb.utils.explode_deps(bb.data.getVar('RRECOMMENDS', the_data, True) or "")
- rdepends += bb.utils.explode_deps(bb.data.getVar("RDEPENDS_%s" % pn, the_data, True) or "")
- rdepends += bb.utils.explode_deps(bb.data.getVar('RRECOMMENDS_%s' % pn, the_data, True) or "")
- else:
- packages = (bb.data.getVar('PACKAGES', the_data, 1).split() or "")
- for package in packages:
- if package == item:
- rdepends += bb.utils.explode_deps(bb.data.getVar("RDEPENDS_%s" % package, the_data, True) or "")
- rdepends += bb.utils.explode_deps(bb.data.getVar("RRECOMMENDS_%s" % package, the_data, True) or "")
+
+ if item in self.status.rundeps:
+ rdepends += self.status.rundeps[item].keys()
+ if item in self.status.runrecs:
+ rdepends += self.status.runrecs[item].keys()
bb.debug(2, "Additional runtime dependencies for %s are: %s" % (item, " ".join(rdepends)))
@@ -698,7 +708,7 @@ class BBCooker:
self.preferred[providee] = provider
# Calculate priorities for each file
- for p in self.pkgdata.keys():
+ for p in self.status.pkg_fn.keys():
self.status.bbfile_priority[p] = calc_bbfile_priority(p)
def buildWorldTargetList(self):
@@ -729,9 +739,10 @@ class BBCooker:
self.status.possible_world = None
self.status.all_depends = None
- def myProgressCallback( self, x, y, f, file_data, from_cache ):
+ def myProgressCallback( self, x, y, f, bb_cache, from_cache ):
# feed the status with new input
- self.status.handle_bb_data(f, file_data, from_cache)
+
+ self.status.handle_bb_data(f, bb_cache, from_cache)
if bbdebug > 0:
return
@@ -938,77 +949,12 @@ class BBCooker:
return []
return finddata.readlines()
- def deps_clean(self, d):
- depstr = data.getVar('__depends', d)
- if depstr:
- deps = depstr.split(" ")
- for dep in deps:
- (f,old_mtime_s) = dep.split("@")
- old_mtime = int(old_mtime_s)
- new_mtime = parse.cached_mtime(f)
- if (new_mtime > old_mtime):
- return False
- return True
-
- def load_bbfile( self, bbfile ):
- """Load and parse one .bb build file"""
-
- if not self.cache in [None, '']:
- # get the times
- cache_mtime = data.init_db_mtime(self.cache, bbfile)
- file_mtime = parse.cached_mtime(bbfile)
-
- if file_mtime > cache_mtime:
- #print " : '%s' dirty. reparsing..." % bbfile
- pass
- else:
- #print " : '%s' clean. loading from cache..." % bbfile
- cache_data = data.init_db( self.cache, bbfile, False )
- if self.deps_clean(cache_data):
- return cache_data, True
-
- topdir = data.getVar('TOPDIR', self.configuration.data)
- if not topdir:
- topdir = os.path.abspath(os.getcwd())
- # set topdir to here
- data.setVar('TOPDIR', topdir, self.configuration)
- bbfile = os.path.abspath(bbfile)
- bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
- # expand tmpdir to include this topdir
- data.setVar('TMPDIR', data.getVar('TMPDIR', self.configuration.data, 1) or "", self.configuration.data)
- # set topdir to location of .bb file
- topdir = bbfile_loc
- #data.setVar('TOPDIR', topdir, cfg)
- # go there
- oldpath = os.path.abspath(os.getcwd())
- os.chdir(topdir)
- bb = data.init_db(self.cache,bbfile, True, self.configuration.data)
- try:
- parse.handle(bbfile, bb) # read .bb data
- if not self.cache in [None, '']:
- bb.commit(parse.cached_mtime(bbfile)) # write cache
- os.chdir(oldpath)
- return bb, False
- finally:
- os.chdir(oldpath)
-
def collect_bbfiles( self, progressCallback ):
"""Collect all available .bb build files"""
self.cb = progressCallback
parsed, cached, skipped, masked = 0, 0, 0, 0
- self.cache = bb.data.getVar( "CACHE", self.configuration.data, 1 )
- self.pkgdata = data.pkgdata( not self.cache in [None, ''], self.cache, self.configuration.data )
+ self.bb_cache = bb.cache.init(self)
- if not self.cache in [None, '']:
- if self.cb is not None:
- print "NOTE: Using cache in '%s'" % self.cache
- try:
- os.stat( self.cache )
- except OSError:
- bb.mkdirhier( self.cache )
- else:
- if self.cb is not None:
- print "NOTE: Not using a cache. Set CACHE = <directory> to enable."
files = (data.getVar( "BBFILES", self.configuration.data, 1 ) or "").split()
data.setVar("BBFILES", " ".join(files), self.configuration.data)
@@ -1043,40 +989,49 @@ class BBCooker:
# read a file's metadata
try:
- bb_data, fromCache = self.load_bbfile(f)
- if fromCache: cached += 1
+ fromCache, skip = self.bb_cache.loadData(f, self)
+ if skip:
+ skipped += 1
+ #bb.note("Skipping %s" % f)
+ self.bb_cache.skip(f)
+ continue
+ elif fromCache: cached += 1
else: parsed += 1
deps = None
- if bb_data is not None:
- # allow metadata files to add items to BBFILES
- #data.update_data(self.pkgdata[f])
- addbbfiles = data.getVar('BBFILES', bb_data) or None
- if addbbfiles:
- for aof in addbbfiles.split():
- if not files.count(aof):
- if not os.path.isabs(aof):
- aof = os.path.join(os.path.dirname(f),aof)
- files.append(aof)
- self.pkgdata[f] = bb_data
+
+ # allow metadata files to add items to BBFILES
+ #data.update_data(self.pkgdata[f])
+ addbbfiles = self.bb_cache.getVar('BBFILES', f, False) or None
+ if addbbfiles:
+ for aof in addbbfiles.split():
+ if not files.count(aof):
+ if not os.path.isabs(aof):
+ aof = os.path.join(os.path.dirname(f),aof)
+ files.append(aof)
# now inform the caller
if self.cb is not None:
- self.cb( i + 1, len( newfiles ), f, bb_data, fromCache )
+ self.cb( i + 1, len( newfiles ), f, self.bb_cache, fromCache )
except IOError, e:
+ self.bb_cache.remove(f)
bb.error("opening %s: %s" % (f, e))
pass
- except bb.parse.SkipPackage:
- skipped += 1
- pass
except KeyboardInterrupt:
+ self.bb_cache.sync()
raise
except Exception, e:
+ self.bb_cache.remove(f)
bb.error("%s while parsing %s" % (e, f))
+ except:
+ self.bb_cache.remove(f)
+ raise
if self.cb is not None:
print "\rNOTE: Parsing finished. %d cached, %d parsed, %d skipped, %d masked." % ( cached, parsed, skipped, masked ),
+ self.bb_cache.sync()
+
#============================================================================#
# main
#============================================================================#
diff --git a/lib/bb/__init__.py b/lib/bb/__init__.py
index 1206905af..449f3bc81 100644
--- a/lib/bb/__init__.py
+++ b/lib/bb/__init__.py
@@ -61,7 +61,8 @@ __all__ = [
"build",
"fetch",
"manifest",
- "methodpool"
+ "methodpool",
+ "cache",
]
whitespace = '\t\n\x0b\x0c\r '
diff --git a/lib/bb/cache.py b/lib/bb/cache.py
new file mode 100644
index 000000000..e5c5c8ef7
--- /dev/null
+++ b/lib/bb/cache.py
@@ -0,0 +1,212 @@
+#!/usr/bin/env python
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+"""
+BitBake 'Event' implementation
+
+Caching of bitbake variables before task execution
+
+# Copyright (C) 2006 Richard Purdie
+
+# but small sections based on code from bin/bitbake:
+# Copyright (C) 2003, 2004 Chris Larson
+# Copyright (C) 2003, 2004 Phil Blundell
+# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
+# Copyright (C) 2005 Holger Hans Peter Freyther
+# Copyright (C) 2005 ROAD GmbH
+
+This program is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free Software
+Foundation; either version 2 of the License, or (at your option) any later
+version.
+
+This program is distributed in the hope that it will be useful, but WITHOUT
+ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License along with
+this program; if not, write to the Free Software Foundation, Inc., 59 Temple
+Place, Suite 330, Boston, MA 02111-1307 USA.
+
+"""
+
+import os, re
+import bb.data
+import bb.utils
+
+try:
+ import cPickle as pickle
+except ImportError:
+ import pickle
+ print "NOTE: Importing cPickle failed. Falling back to a very slow implementation."
+
+class Cache:
+
+ def __init__(self, cooker):
+
+ self.cachedir = bb.data.getVar("CACHE", cooker.configuration.data, True)
+ self.cachefile = os.path.join(self.cachedir,"bb_cache.dat")
+ self.clean = {}
+ self.depends_cache = {}
+ self.data = None
+ self.data_fn = None
+
+ if self.cachedir in [None, '']:
+ if cooker.cb is not None:
+ print "NOTE: Not using a cache. Set CACHE = <directory> to enable."
+ else:
+ if cooker.cb is not None:
+ print "NOTE: Using cache in '%s'" % self.cachedir
+ try:
+ os.stat( self.cachedir )
+ except OSError:
+ bb.mkdirhier( self.cachedir )
+
+ if (self.mtime(self.cachefile)):
+ p = pickle.Unpickler( file(self.cachefile,"rb"))
+ self.depends_cache = p.load()
+ if self.depends_cache:
+ for fn in self.depends_cache.keys():
+ self.clean[fn] = ""
+ self.cacheValidUpdate(fn)
+
+ def getVar(self, var, fn, exp = 0):
+ if fn in self.clean:
+ return self.depends_cache[fn][var]
+
+ if not fn in self.depends_cache:
+ self.depends_cache[fn] = {}
+
+ if fn != self.data_fn:
+ bb.fatal("Parsing error data_fn %s and fn %s don't match" % (self.data_fn, fn))
+
+ result = bb.data.getVar(var, self.data, exp)
+ self.depends_cache[fn][var] = result
+ return result
+
+ def setData(self, fn, data):
+ self.data_fn = fn
+ self.data = data
+
+ # Make sure __depends makes the depends_cache
+ self.getVar("__depends", fn, True)
+ self.depends_cache[fn]["CACHETIMESTAMP"] = bb.parse.cached_mtime(fn)
+
+ def loadDataFull(self, fn, cooker):
+
+ bb_data, skipped = self.load_bbfile(fn, cooker)
+ return bb_data, False
+
+ def loadData(self, fn, cooker):
+ if self.cacheValid(fn):
+ if "SKIPPED" in self.depends_cache[fn]:
+ return True, True
+ return True, False
+
+ bb_data, skipped = self.load_bbfile(fn, cooker)
+ self.setData(fn, bb_data)
+ return False, skipped
+
+ def cacheValid(self, fn):
+ # Is cache enabled?
+ if self.cachedir in [None, '']:
+ return False
+ if fn in self.clean:
+ return True
+ return False
+
+ def cacheValidUpdate(self, fn):
+ # Is cache enabled?
+ if self.cachedir in [None, '']:
+ return False
+
+ # File isn't in depends_cache
+ if not fn in self.depends_cache:
+ bb.note("Cache: %s is not cached" % fn)
+ if fn in self.clean:
+ del self.clean[fn]
+ return False
+
+ # Check the file's timestamp
+ if bb.parse.cached_mtime(fn) > self.getVar("CACHETIMESTAMP", fn, True):
+ bb.note("Cache: %s changed" % fn)
+ if fn in self.clean:
+ del self.clean[fn]
+ return False
+
+ # Check dependencies are still valid
+ depends = self.getVar("__depends", fn, True)
+ if depends:
+ deps = depends.split(" ")
+ for dep in deps:
+ (f,old_mtime_s) = dep.split("@")
+ old_mtime = int(old_mtime_s)
+ new_mtime = bb.parse.cached_mtime(f)
+ if (new_mtime > old_mtime):
+ bb.note("Cache: %s's dependency %s changed" % (fn, f))
+ if fn in self.clean:
+ del self.clean[fn]
+ return False
+
+ #bb.note("Depends Cache: %s is clean" % fn)
+ if not fn in self.clean:
+ self.clean[fn] = ""
+
+ return True
+
+ def skip(self, fn):
+ if not fn in self.depends_cache:
+ self.depends_cache[fn] = {}
+ self.depends_cache[fn]["SKIPPED"] = "1"
+
+ def remove(self, fn):
+ bb.note("Removing %s from cache" % fn)
+ if fn in self.depends_cache:
+ del self.depends_cache[fn]
+
+ def sync(self):
+ p = pickle.Pickler(file(self.cachefile, "wb" ), -1 )
+ p.dump(self.depends_cache)
+
+ def mtime(self, cachefile):
+ try:
+ return os.stat(cachefile)[8]
+ except OSError:
+ return 0
+
+ def load_bbfile( self, bbfile , cooker):
+ """Load and parse one .bb build file"""
+
+ import bb
+ from bb import utils, data, parse, debug, event, fatal
+
+ topdir = data.getVar('TOPDIR', cooker.configuration.data)
+ if not topdir:
+ topdir = os.path.abspath(os.getcwd())
+ # set topdir to here
+ data.setVar('TOPDIR', topdir, cooker.configuration)
+ bbfile = os.path.abspath(bbfile)
+ bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
+ # expand tmpdir to include this topdir
+ data.setVar('TMPDIR', data.getVar('TMPDIR', cooker.configuration.data, 1) or "", cooker.configuration.data)
+ # set topdir to location of .bb file
+ topdir = bbfile_loc
+ #data.setVar('TOPDIR', topdir, cfg)
+ # go there
+ oldpath = os.path.abspath(os.getcwd())
+ os.chdir(topdir)
+ bb_data = data.init_db(self.cachedir,bbfile, True, cooker.configuration.data)
+ try:
+ parse.handle(bbfile, bb_data) # read .bb data
+ os.chdir(oldpath)
+ return bb_data, False
+ except bb.parse.SkipPackage:
+ os.chdir(oldpath)
+ return bb_data, True
+ except:
+ os.chdir(oldpath)
+ raise
+
+def init(cooker):
+ return Cache(cooker)
+
diff --git a/lib/bb/data.py b/lib/bb/data.py
index 6cac3ea19..1c1eefe9d 100644
--- a/lib/bb/data.py
+++ b/lib/bb/data.py
@@ -120,9 +120,6 @@ def init():
def init_db(cache,name,clean,parent = None):
return _dict_p_type(cache,name,clean,parent)
-def init_db_mtime(cache,cache_bbfile):
- return _dict_p_type.mtime(cache,cache_bbfile)
-
def pkgdata(use_cache, cache, config = None):
"""
Return some sort of dictionary to lookup parsed dictionaires
diff --git a/lib/bb/data_smart.py b/lib/bb/data_smart.py
index 0fd0d5706..4d426f428 100644
--- a/lib/bb/data_smart.py
+++ b/lib/bb/data_smart.py
@@ -274,32 +274,6 @@ class DataSmartPackage(DataSmart):
"""
Persistent Data Storage
"""
- def sanitize_filename(bbfile):
- return bbfile.replace( '/', '_' )
- sanitize_filename = staticmethod(sanitize_filename)
-
- def unpickle(self):
- """
- Restore the dict from memory
- """
- cache_bbfile = self.sanitize_filename(self.bbfile)
- p = pickle.Unpickler( file("%s/%s"%(self.cache,cache_bbfile),"rb"))
- (self.dict, self._seen_overrides, self._special_values) = p.load()
- self.unpickle_prep()
-
- # compile the functions into global scope
- funcs = self.getVar('__functions__', 0) or {}
- for key in funcs.keys():
- methodpool.check_insert_method( key, funcs[key], self.bbfile )
- methodpool.parsed_module( key )
-
- # now add the handlers which were present
- handlers = self.getVar('__all_handlers__', 0) or {}
- import bb.event
- for key in handlers.keys():
- bb.event.register(key, handlers[key])
-
-
def linkDataSet(self):
if not self.parent == None:
# assume parent is a DataSmartInstance
@@ -317,45 +291,6 @@ class DataSmartPackage(DataSmart):
self.bbfile = os.path.abspath( name )
self.parent = parent
- # Either unpickle the data or do copy on write
- if clean:
- self.linkDataSet()
- self._seen_overrides = copy.copy(parent._seen_overrides)
- self._special_values = copy.copy(parent._special_values)
- else:
- self.unpickle()
+ # Do Copy on Write
+ self.linkDataSet()
- def commit(self, mtime):
- """
- Save the package to a permanent storage
- """
- self.pickle_prep()
-
- cache_bbfile = self.sanitize_filename(self.bbfile)
- p = pickle.Pickler(file("%s/%s" %(self.cache,cache_bbfile), "wb" ), -1 )
- p.dump( (self.dict,self._seen_overrides,self._special_values) )
-
- self.unpickle_prep()
-
- def mtime(cache,bbfile):
- cache_bbfile = DataSmartPackage.sanitize_filename(bbfile)
- try:
- return os.stat( "%s/%s" % (cache,cache_bbfile) )[8]
- except OSError:
- return 0
- mtime = staticmethod(mtime)
-
- def pickle_prep(self):
- """
- If self.dict contains a _data key and it is a configuration
- we will remember we had a configuration instance attached
- """
- if "_data" in self.dict and self.dict["_data"] == self.parent:
- dest["_data"] = "cfg"
-
- def unpickle_prep(self):
- """
- If we had a configuration instance attached, we will reattach it
- """
- if "_data" in self.dict and self.dict["_data"] == "cfg":
- self.dict["_data"] = self.parent
diff --git a/lib/bb/shell.py b/lib/bb/shell.py
index b86dc9753..f16f0f477 100644
--- a/lib/bb/shell.py
+++ b/lib/bb/shell.py
@@ -263,8 +263,9 @@ class BitBakeShellCommands:
bbfile = params[0]
print "SHELL: Parsing '%s'" % bbfile
parse.update_mtime( bbfile )
- bb_data, fromCache = cooker.load_bbfile( bbfile )
- cooker.pkgdata[bbfile] = bb_data
+ cooker.bb_cache.cacheValidUpdate(bbfile)
+ fromCache = cooker.bb_cache.loadData(bbfile, cooker)
+ cooker.bb_cache.sync()
if fromCache:
print "SHELL: File has not been updated, not reparsing"
else:
@@ -307,7 +308,7 @@ class BitBakeShellCommands:
what, globexpr = params
if what == "files":
self._checkParsed()
- for key in globfilter( cooker.pkgdata.keys(), globexpr ): print key
+ for key in globfilter( cooker.status.pkg_fn.keys(), globexpr ): print key
elif what == "providers":
self._checkParsed()
for key in globfilter( cooker.status.pkg_pn.keys(), globexpr ): print key
@@ -432,7 +433,8 @@ SRC_URI = ""
name, var = params
bbfile = self._findProvider( name )
if bbfile is not None:
- value = cooker.pkgdata[bbfile].getVar( var, 1 )
+ the_data = cooker.bb_cache.loadDataFull(bbfile, cooker)
+ value = the_data.getVar( var, 1 )
print value
else:
print "ERROR: Nothing provides '%s'" % name
@@ -442,13 +444,14 @@ SRC_URI = ""
"""Set contents of variable defined in providee's metadata"""
name, var, value = params
bbfile = self._findProvider( name )
- d = cooker.pkgdata[bbfile]
if bbfile is not None:
- data.setVar( var, value, d )
+ print "ERROR: Sorry, this functionality is currently broken"
+ #d = cooker.pkgdata[bbfile]
+ #data.setVar( var, value, d )
# mark the change semi persistant
- cooker.pkgdata.setDirty(bbfile, d)
- print "OK"
+ #cooker.pkgdata.setDirty(bbfile, d)
+ #print "OK"
else:
print "ERROR: Nothing provides '%s'" % name
poke.usage = "<providee> <variable> <value>"
@@ -458,7 +461,7 @@ SRC_URI = ""
what = params[0]
if what == "files":
self._checkParsed()
- for key in cooker.pkgdata.keys(): print key
+ for key in cooker.status.pkg_fn.keys(): print key
elif what == "providers":
self._checkParsed()
for key in cooker.status.providers.keys(): print key
@@ -555,8 +558,8 @@ SRC_URI = ""
def completeFilePath( bbfile ):
"""Get the complete bbfile path"""
- if not cooker.pkgdata: return bbfile
- for key in cooker.pkgdata.keys():
+ if not cooker.status.pkg_fn: return bbfile
+ for key in cooker.status.pkg_fn.keys():
if key.endswith( bbfile ):
return key
return bbfile
@@ -594,10 +597,10 @@ def completer( text, state ):
if u == "<variable>":
allmatches = cooker.configuration.data.keys()
elif u == "<bbfile>":
- if cooker.pkgdata is None: allmatches = [ "(No Matches Available. Parsed yet?)" ]
- else: allmatches = [ x.split("/")[-1] for x in cooker.pkgdata.keys() ]
+ if cooker.status.pkg_fn is None: allmatches = [ "(No Matches Available. Parsed yet?)" ]
+ else: allmatches = [ x.split("/")[-1] for x in cooker.status.pkg_fn.keys() ]
elif u == "<providee>":
- if cooker.pkgdata is None: allmatches = [ "(No Matches Available. Parsed yet?)" ]
+ if cooker.status.pkg_fn is None: allmatches = [ "(No Matches Available. Parsed yet?)" ]
else: allmatches = cooker.status.providers.iterkeys()
else: allmatches = [ "(No tab completion available for this command)" ]
else: allmatches = [ "(No tab completion available for this command)" ]