aboutsummaryrefslogtreecommitdiffstats
path: root/classes/base.bbclass
diff options
context:
space:
mode:
Diffstat (limited to 'classes/base.bbclass')
-rw-r--r--classes/base.bbclass222
1 files changed, 90 insertions, 132 deletions
diff --git a/classes/base.bbclass b/classes/base.bbclass
index 3c854c6e7b..299e875191 100644
--- a/classes/base.bbclass
+++ b/classes/base.bbclass
@@ -9,31 +9,27 @@ inherit utils
inherit utility-tasks
inherit metadata_scm
-python sys_path_eh () {
- if isinstance(e, bb.event.ConfigParsed):
- import sys
- import os
- import time
+OE_IMPORTS += "oe.path oe.utils sys os time"
+python oe_import () {
+ if isinstance(e, bb.event.ConfigParsed):
+ import os, sys
bbpath = e.data.getVar("BBPATH", True).split(":")
sys.path[0:0] = [os.path.join(dir, "lib") for dir in bbpath]
def inject(name, value):
- """Make a python object accessible from everywhere for the metadata"""
+ """Make a python object accessible from the metadata"""
if hasattr(bb.utils, "_context"):
bb.utils._context[name] = value
else:
__builtins__[name] = value
- import oe.path
- import oe.utils
- inject("bb", bb)
- inject("sys", sys)
- inject("time", time)
- inject("oe", oe)
+ for toimport in e.data.getVar("OE_IMPORTS", True).split():
+ imported = __import__(toimport)
+ inject(toimport.split(".", 1)[0], imported)
}
-addhandler sys_path_eh
+addhandler oe_import
die() {
oefatal "$*"
@@ -57,7 +53,7 @@ oe_runmake() {
${MAKE} ${EXTRA_OEMAKE} "$@" || die "oe_runmake failed"
}
-def base_dep_prepend(d):
+def base_deps(d):
#
# Ideally this will check a flag so we will operate properly in
# the case where host == build == target, for now we don't work in
@@ -75,11 +71,17 @@ def base_dep_prepend(d):
if (bb.data.getVar('HOST_SYS', d, 1) !=
bb.data.getVar('BUILD_SYS', d, 1)):
deps += " virtual/${TARGET_PREFIX}gcc virtual/libc "
+ elif bb.data.inherits_class('native', d) and \
+ bb.data.getVar('PN', d, True) not in \
+ ("linux-libc-headers-native", "quilt-native",
+ "unifdef-native", "shasum-native",
+ "stagemanager-native", "coreutils-native"):
+ deps += " linux-libc-headers-native"
return deps
-DEPENDS_prepend="${@base_dep_prepend(d)} "
-DEPENDS_virtclass-native_prepend="${@base_dep_prepend(d)} "
-DEPENDS_virtclass-nativesdk_prepend="${@base_dep_prepend(d)} "
+DEPENDS_prepend="${@base_deps(d)} "
+DEPENDS_virtclass-native_prepend="${@base_deps(d)} "
+DEPENDS_virtclass-nativesdk_prepend="${@base_deps(d)} "
SCENEFUNCS += "base_scenefunction"
@@ -161,108 +163,68 @@ python base_do_fetch() {
raise bb.build.FuncFailed("Checksum of '%s' failed" % uri)
}
-def oe_unpack_file(file, data, url = None):
- import subprocess
- if not url:
- url = "file://%s" % file
- dots = file.split(".")
- if dots[-1] in ['gz', 'bz2', 'Z']:
- efile = os.path.join(bb.data.getVar('WORKDIR', data, 1),os.path.basename('.'.join(dots[0:-1])))
- else:
- efile = file
- cmd = None
- if file.endswith('.tar'):
- cmd = 'tar x --no-same-owner -f %s' % file
- elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
- cmd = 'tar xz --no-same-owner -f %s' % file
- elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'):
- cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file
- elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'):
- cmd = 'gzip -dc %s > %s' % (file, efile)
- elif file.endswith('.bz2'):
- cmd = 'bzip2 -dc %s > %s' % (file, efile)
- elif file.endswith('.tar.xz'):
- cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file
- elif file.endswith('.xz'):
- cmd = 'xz -dc %s > %s' % (file, efile)
- elif file.endswith('.zip') or file.endswith('.jar'):
- cmd = 'unzip -q -o'
- (type, host, path, user, pswd, parm) = bb.decodeurl(url)
- if 'dos' in parm:
- cmd = '%s -a' % cmd
- cmd = "%s '%s'" % (cmd, file)
- elif os.path.isdir(file):
- destdir = "."
- filespath = bb.data.getVar("FILESPATH", data, 1).split(":")
- for fp in filespath:
- if file[0:len(fp)] == fp:
- destdir = file[len(fp):file.rfind('/')]
- destdir = destdir.strip('/')
- if len(destdir) < 1:
- destdir = "."
- elif not os.access("%s/%s" % (os.getcwd(), destdir), os.F_OK):
- os.makedirs("%s/%s" % (os.getcwd(), destdir))
- break
-
- cmd = 'cp -pPR %s %s/%s/' % (file, os.getcwd(), destdir)
- else:
- (type, host, path, user, pswd, parm) = bb.decodeurl(url)
- if not 'patch' in parm:
- # The "destdir" handling was specifically done for FILESPATH
- # items. So, only do so for file:// entries.
- if type == "file":
- destdir = bb.decodeurl(url)[1] or "."
- else:
- destdir = "."
- bb.mkdirhier("%s/%s" % (os.getcwd(), destdir))
- cmd = 'cp %s %s/%s/' % (file, os.getcwd(), destdir)
-
- if not cmd:
- return True
-
- dest = os.path.join(os.getcwd(), os.path.basename(file))
- if os.path.exists(dest):
- if os.path.samefile(file, dest):
- return True
-
- # Change to subdir before executing command
- save_cwd = os.getcwd();
- parm = bb.decodeurl(url)[5]
- if 'subdir' in parm:
- newdir = ("%s/%s" % (os.getcwd(), parm['subdir']))
- bb.mkdirhier(newdir)
- os.chdir(newdir)
-
- cmd = "PATH=\"%s\" %s" % (bb.data.getVar('PATH', data, 1), cmd)
- bb.note("Unpacking %s to %s/" % (base_path_out(file, data), base_path_out(os.getcwd(), data)))
- ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True)
-
- os.chdir(save_cwd)
-
- return ret == 0
+def oe_unpack(d, local, urldata):
+ from oe.unpack import unpack_file, is_patch, UnpackError
+ if is_patch(local, urldata.parm):
+ return
+
+ subdirs = []
+ if "subdir" in urldata.parm:
+ subdirs.append(urldata.parm["subdir"])
+
+ if urldata.type == "file":
+ if not urldata.host:
+ urlpath = urldata.path
+ else:
+ urlpath = oe.path.join(urldata.host, urldata.path)
+
+ if not os.path.isabs(urlpath):
+ subdirs.append(os.path.dirname(urlpath))
+
+ workdir = d.getVar("WORKDIR", True)
+ if subdirs:
+ destdir = oe.path.join(workdir, *subdirs)
+ bb.mkdirhier(destdir)
+ else:
+ destdir = workdir
+ dos = urldata.parm.get("dos")
+
+ bb.note("Unpacking %s to %s/" % (base_path_out(local, d),
+ base_path_out(destdir, d)))
+ try:
+ unpack_file(local, destdir, env={"PATH": d.getVar("PATH", True)}, dos=dos)
+ except UnpackError, exc:
+ bb.fatal(str(exc))
addtask unpack after do_fetch
do_unpack[dirs] = "${WORKDIR}"
python base_do_unpack() {
- import re
+ from glob import glob
+
+ srcurldata = bb.fetch.init(d.getVar("SRC_URI", True).split(), d, True)
+ filespath = d.getVar("FILESPATH", True).split(":")
+
+ for url, urldata in srcurldata.iteritems():
+ if urldata.type == "file" and "*" in urldata.path:
+ # The fetch code doesn't know how to handle globs, so
+ # we need to handle the local bits ourselves
+ for path in filespath:
+ srcdir = oe.path.join(path, urldata.host,
+ os.path.dirname(urldata.path))
+ if os.path.exists(srcdir):
+ break
+ else:
+ bb.fatal("Unable to locate files for %s" % url)
- localdata = bb.data.createCopy(d)
- bb.data.update_data(localdata)
+ for filename in glob(oe.path.join(srcdir,
+ os.path.basename(urldata.path))):
+ oe_unpack(d, filename, urldata)
+ else:
+ local = urldata.localpath
+ if not local:
+ raise bb.build.FuncFailed('Unable to locate local file for %s' % url)
- src_uri = bb.data.getVar('SRC_URI', localdata, True)
- if not src_uri:
- return
- for url in src_uri.split():
- try:
- local = bb.data.expand(bb.fetch.localpath(url, localdata), localdata)
- except bb.MalformedUrl, e:
- raise bb.build.FuncFailed('Unable to generate local path for malformed uri: %s' % e)
- if not local:
- raise bb.build.FuncFailed('Unable to locate local file for %s' % url)
- local = os.path.realpath(local)
- ret = oe_unpack_file(local, localdata, url)
- if not ret:
- raise bb.build.FuncFailed()
+ oe_unpack(d, local, urldata)
}
addhandler base_eventhandler
@@ -333,7 +295,7 @@ base_do_configure() {
addtask compile after do_configure
do_compile[dirs] = "${S} ${B}"
base_do_compile() {
- if [ -e Makefile -o -e makefile ]; then
+ if [ -e Makefile -o -e makefile -o -e GNUmakefile ]; then
oe_runmake || die "make failed"
else
oenote "nothing to compile"
@@ -374,7 +336,16 @@ python () {
import re
this_machine = bb.data.getVar('MACHINE', d, 1)
if this_machine and not re.match(need_machine, this_machine):
- raise bb.parse.SkipPackage("incompatible with machine %s" % this_machine)
+ this_soc_family = bb.data.getVar('SOC_FAMILY', d, 1)
+ if this_soc_family and not re.match(need_machine, this_soc_family):
+ raise bb.parse.SkipPackage("incompatible with machine %s" % this_machine)
+
+ need_target = bb.data.getVar('COMPATIBLE_TARGET_SYS', d, 1)
+ if need_target:
+ import re
+ this_target = bb.data.getVar('TARGET_SYS', d, 1)
+ if this_target and not re.match(need_target, this_target):
+ raise bb.parse.SkipPackage("incompatible with target system %s" % this_target)
pn = bb.data.getVar('PN', d, 1)
@@ -418,23 +389,10 @@ python () {
# unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0
#
override = bb.data.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH', d, 1)
- if override != '0':
- paths = []
- for p in [ "${PF}", "${P}", "${PN}", "files", "" ]:
- path = bb.data.expand(os.path.join("${FILE_DIRNAME}", p, "${MACHINE}"), d)
- if os.path.isdir(path):
- paths.append(path)
- if len(paths) != 0:
- for s in srcuri.split():
- if not s.startswith("file://"):
- continue
- local = bb.data.expand(bb.fetch.localpath(s, d), d)
- for mp in paths:
- if local.startswith(mp):
- #bb.note("overriding PACKAGE_ARCH from %s to %s" % (pkg_arch, mach_arch))
- bb.data.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}", d)
- bb.data.setVar('MULTIMACH_ARCH', mach_arch, d)
- return
+ if override != '0' and is_machine_specific(d):
+ bb.data.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}", d)
+ bb.data.setVar('MULTIMACH_ARCH', mach_arch, d)
+ return
multiarch = pkg_arch