aboutsummaryrefslogtreecommitdiffstats
path: root/classes
diff options
context:
space:
mode:
authorRichard Purdie <rpurdie@linux.intel.com>2009-11-16 10:45:23 +0000
committerRichard Purdie <rpurdie@linux.intel.com>2009-11-16 10:45:23 +0000
commitbc465d83ad8665211b6f9664b418f4eafcc5ca6c (patch)
tree6e46f3cb3096127f0df9825f1670482da199fc9e /classes
parentd762c7504032becb9d05cd48c86dabb1ec764911 (diff)
downloadopenembedded-bc465d83ad8665211b6f9664b418f4eafcc5ca6c.tar.gz
classes: Drop a number of unneeded import calls (from Poky)
Signed-off-by: Richard Purdie <rpurdie@linux.intel.com>
Diffstat (limited to 'classes')
-rw-r--r--classes/autotools.bbclass3
-rw-r--r--classes/autotools_stage.bbclass1
-rw-r--r--classes/base.bbclass1
-rw-r--r--classes/cpan_build.bbclass1
-rw-r--r--classes/gettext.bbclass2
-rw-r--r--classes/gtk-icon-cache.bbclass1
-rw-r--r--classes/icecc.bbclass10
-rw-r--r--classes/insane.bbclass13
-rw-r--r--classes/kernel-arch.bbclass2
-rw-r--r--classes/kernel.bbclass11
-rw-r--r--classes/linux-kernel-base.bbclass3
-rw-r--r--classes/openmoko-base.bbclass1
-rw-r--r--classes/openmoko2.bbclass2
-rw-r--r--classes/package.bbclass18
-rw-r--r--classes/package_deb.bbclass8
-rw-r--r--classes/package_ipk.bbclass15
-rw-r--r--classes/package_rpm.bbclass11
-rw-r--r--classes/package_tar.bbclass6
-rw-r--r--classes/packaged-staging.bbclass12
-rw-r--r--classes/packagehistory.bbclass6
-rw-r--r--classes/patch.bbclass7
-rw-r--r--classes/pkgconfig.bbclass2
-rw-r--r--classes/rm_work.bbclass5
-rw-r--r--classes/sanity.bbclass12
-rw-r--r--classes/siteinfo.bbclass4
-rw-r--r--classes/sourcepkg.bbclass4
-rw-r--r--classes/src_distribute.bbclass2
-rw-r--r--classes/task.bbclass1
-rw-r--r--classes/tinderclient.bbclass4
-rw-r--r--classes/update-alternatives.bbclass1
-rw-r--r--classes/update-rc.d.bbclass1
31 files changed, 38 insertions, 132 deletions
diff --git a/classes/autotools.bbclass b/classes/autotools.bbclass
index a944f0ec9f..1ea4b6f1d0 100644
--- a/classes/autotools.bbclass
+++ b/classes/autotools.bbclass
@@ -2,8 +2,6 @@
AUTOTOOLS_NATIVE_STAGE_INSTALL = "1"
def autotools_dep_prepend(d):
- import bb;
-
if bb.data.getVar('INHIBIT_AUTOTOOLS_DEPS', d, 1):
return ''
@@ -34,7 +32,6 @@ acpaths = "default"
EXTRA_AUTORECONF = "--exclude=autopoint"
def autotools_set_crosscompiling(d):
- import bb
if not bb.data.inherits_class('native', d):
return " cross_compiling=yes"
return ""
diff --git a/classes/autotools_stage.bbclass b/classes/autotools_stage.bbclass
index 3007eef969..ff0f4cd880 100644
--- a/classes/autotools_stage.bbclass
+++ b/classes/autotools_stage.bbclass
@@ -3,4 +3,3 @@ inherit autotools
do_stage () {
autotools_stage_all
}
-
diff --git a/classes/base.bbclass b/classes/base.bbclass
index 3602801f50..384a67d3d6 100644
--- a/classes/base.bbclass
+++ b/classes/base.bbclass
@@ -1072,7 +1072,6 @@ sysroot_stage_all() {
}
def is_legacy_staging(d):
- import bb
stagefunc = bb.data.getVar('do_stage', d, True)
legacy = True
if stagefunc is None:
diff --git a/classes/cpan_build.bbclass b/classes/cpan_build.bbclass
index 2ca01996a9..b2ec8255de 100644
--- a/classes/cpan_build.bbclass
+++ b/classes/cpan_build.bbclass
@@ -12,7 +12,6 @@ INHIBIT_NATIVE_STAGE_INSTALL = "1"
# libmodule-build-perl)
#
def cpan_build_dep_prepend(d):
- import bb;
if bb.data.getVar('CPAN_BUILD_DEPS', d, 1):
return ''
pn = bb.data.getVar('PN', d, 1)
diff --git a/classes/gettext.bbclass b/classes/gettext.bbclass
index bc0a1dfaba..b2ff2849fa 100644
--- a/classes/gettext.bbclass
+++ b/classes/gettext.bbclass
@@ -1,6 +1,5 @@
DEPENDS =+ "gettext-native"
def gettext_after_parse(d):
- import bb
# Remove the NLS bits if USE_NLS is no.
if bb.data.getVar('USE_NLS', d, 1) == 'no':
cfg = oe_filter_out('^--(dis|en)able-nls$', bb.data.getVar('EXTRA_OECONF', d, 1) or "", d)
@@ -15,4 +14,3 @@ python () {
DEPENDS =+ "gettext-native"
EXTRA_OECONF += "--enable-nls"
-
diff --git a/classes/gtk-icon-cache.bbclass b/classes/gtk-icon-cache.bbclass
index d6c86a383e..524c2f0c46 100644
--- a/classes/gtk-icon-cache.bbclass
+++ b/classes/gtk-icon-cache.bbclass
@@ -27,7 +27,6 @@ done
}
python populate_packages_append () {
- import os.path
packages = bb.data.getVar('PACKAGES', d, 1).split()
pkgdest = bb.data.getVar('PKGDEST', d, 1)
diff --git a/classes/icecc.bbclass b/classes/icecc.bbclass
index 0eb2d9feee..4962fcb7e6 100644
--- a/classes/icecc.bbclass
+++ b/classes/icecc.bbclass
@@ -33,10 +33,8 @@ def icc_determine_gcc_version(gcc):
'i686-apple-darwin8-gcc-4.0.1 (GCC) 4.0.1 (Apple Computer, Inc. build 5363)'
"""
- import os
return os.popen("%s --version" % gcc ).readline().split()[2]
-
def create_cross_env(bb,d):
"""
Create a tar.bz2 of the current toolchain
@@ -47,7 +45,7 @@ def create_cross_env(bb,d):
if len(prefix) == 0:
return ""
- import tarfile, socket, time, os
+ import tarfile, socket, time
ice_dir = bb.data.expand('${CROSS_DIR}', d)
prefix = bb.data.expand('${HOST_PREFIX}' , d)
distro = bb.data.expand('${DISTRO}', d)
@@ -97,7 +95,7 @@ def create_cross_env(bb,d):
def create_native_env(bb,d):
- import tarfile, socket, time, os
+ import tarfile, socket, time
ice_dir = bb.data.expand('${CROSS_DIR}', d)
prefix = bb.data.expand('${HOST_PREFIX}' , d)
distro = bb.data.expand('${DISTRO}', d)
@@ -144,7 +142,7 @@ def get_cross_kernel_cc(bb,d):
def create_cross_kernel_env(bb,d):
- import tarfile, socket, time, os
+ import tarfile, socket, time
ice_dir = bb.data.expand('${CROSS_DIR}', d)
prefix = bb.data.expand('${HOST_PREFIX}' , d)
distro = bb.data.expand('${DISTRO}', d)
@@ -210,8 +208,6 @@ def create_path(compilers, type, bb, d):
"""
Create Symlinks for the icecc in the staging directory
"""
- import os
-
staging = os.path.join(bb.data.expand('${STAGING_DIR}', d), "ice", type)
#check if the icecc path is set by the user
diff --git a/classes/insane.bbclass b/classes/insane.bbclass
index 950bb9771e..49cfc095d3 100644
--- a/classes/insane.bbclass
+++ b/classes/insane.bbclass
@@ -181,7 +181,6 @@ def package_qa_get_elf(path, bits32):
def package_qa_clean_path(path,d):
""" Remove the common prefix from the path. In this case it is the TMPDIR"""
- import bb
return path.replace(bb.data.getVar('TMPDIR',d,True),"")
def package_qa_make_fatal_error(error_class, name, path,d):
@@ -196,7 +195,6 @@ def package_qa_write_error(error_class, name, path, d):
"""
Log the error
"""
- import bb, os
if not bb.data.getVar('QA_LOG', d):
bb.note("a QA error occured but will not be logged because QA_LOG is not set")
return
@@ -221,7 +219,6 @@ def package_qa_write_error(error_class, name, path, d):
f.close()
def package_qa_handle_error(error_class, error_msg, name, path, d):
- import bb
bb.error("QA Issue with %s: %s" % (name, error_msg))
package_qa_write_error(error_class, name, path, d)
return not package_qa_make_fatal_error(error_class, name, path, d)
@@ -258,7 +255,6 @@ def package_qa_check_dev(path, name,d, elf):
Check for ".so" library symlinks in non-dev packages
"""
- import bb, os
sane = True
# SDK packages are special.
@@ -279,7 +275,6 @@ def package_qa_check_dbg(path, name,d, elf):
Check for ".debug" files or directories outside of the dbg package
"""
- import bb, os
sane = True
if not "-dbg" in name:
@@ -304,7 +299,6 @@ def package_qa_check_arch(path,name,d, elf):
if not elf:
return True
- import bb, os
sane = True
target_os = bb.data.getVar('TARGET_OS', d, True)
target_arch = bb.data.getVar('TARGET_ARCH', d, True)
@@ -339,7 +333,6 @@ def package_qa_check_desktop(path, name, d, elf):
"""
Run all desktop files through desktop-file-validate.
"""
- import bb, os
sane = True
if path.endswith(".desktop"):
output = os.popen("desktop-file-validate %s" % path)
@@ -353,7 +346,6 @@ def package_qa_hash_style(path, name, d, elf):
"""
Check if the binary has the right hash style...
"""
- import bb, os
if not elf:
return True
@@ -400,7 +392,6 @@ def package_qa_check_staged(path,d):
to find the one responsible for the errors easily even
if we look at every .pc and .la file
"""
- import os, bb
sane = True
tmpdir = bb.data.getVar('TMPDIR', d, True)
@@ -440,7 +431,6 @@ def package_qa_check_staged(path,d):
# Walk over all files in a directory and call func
def package_qa_walk(path, funcs, package,d):
- import bb, os
sane = True
#if this will throw an exception, then fix the dict above
@@ -464,7 +454,6 @@ def package_qa_walk(path, funcs, package,d):
return sane
def package_qa_check_rdepends(pkg, workdir, d):
- import bb
sane = True
if not "-dbg" in pkg and not "task-" in pkg and not "-image" in pkg:
# Copied from package_ipk.bbclass
@@ -501,7 +490,6 @@ def package_qa_check_rdepends(pkg, workdir, d):
# The PACKAGE FUNC to scan each package
python do_package_qa () {
- import bb
bb.debug(2, "DO PACKAGE QA")
workdir = bb.data.getVar('WORKDIR', d, True)
packages = bb.data.getVar('PACKAGES',d, True)
@@ -547,7 +535,6 @@ python do_qa_staging() {
addtask qa_configure after do_configure before do_compile
python do_qa_configure() {
bb.debug(1, "Checking sanity of the config.log file")
- import os
for root, dirs, files in os.walk(bb.data.getVar('WORKDIR', d, True)):
statement = "grep 'CROSS COMPILE Badness:' %s > /dev/null" % \
os.path.join(root,"config.log")
diff --git a/classes/kernel-arch.bbclass b/classes/kernel-arch.bbclass
index 65d220063e..8894fa554c 100644
--- a/classes/kernel-arch.bbclass
+++ b/classes/kernel-arch.bbclass
@@ -15,7 +15,7 @@ valid_archs = "alpha cris ia64 \
avr32 blackfin"
def map_kernel_arch(a, d):
- import bb, re
+ import re
valid_archs = bb.data.getVar('valid_archs', d, 1).split()
diff --git a/classes/kernel.bbclass b/classes/kernel.bbclass
index 0dde2d33ca..8714aed0fd 100644
--- a/classes/kernel.bbclass
+++ b/classes/kernel.bbclass
@@ -8,9 +8,6 @@ KERNEL_IMAGETYPE ?= "zImage"
# Add dependency on mkimage for kernels that build a uImage
python __anonymous () {
-
- import bb
-
kerneltype = bb.data.getVar('KERNEL_IMAGETYPE', d, 1) or ''
if kerneltype == 'uImage':
depends = bb.data.getVar("DEPENDS", d, 1)
@@ -310,7 +307,7 @@ module_conf_rfcomm = "alias bt-proto-3 rfcomm"
python populate_packages_prepend () {
def extract_modinfo(file):
- import tempfile, os, re
+ import tempfile, re
tempfile.tempdir = bb.data.getVar("WORKDIR", d, 1)
tf = tempfile.mkstemp()
tmpfile = tf[1]
@@ -331,7 +328,7 @@ python populate_packages_prepend () {
return vals
def parse_depmod():
- import os, re
+ import re
dvar = bb.data.getVar('PKGD', d, 1)
if not dvar:
@@ -385,7 +382,7 @@ python populate_packages_prepend () {
file = file.replace(bb.data.getVar('PKGD', d, 1) or '', '', 1)
if module_deps.has_key(file):
- import os.path, re
+ import re
dependencies = []
for i in module_deps[file]:
m = re.match(pattern, os.path.basename(i))
@@ -463,7 +460,7 @@ python populate_packages_prepend () {
do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.fw$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='')
do_split_packages(d, root='/lib/modules', file_regex=module_regex, output_pattern=module_pattern, description='%s kernel module', postinst=postinst, postrm=postrm, recursive=True, hook=frob_metadata, extra_depends='%skernel-%s' % (maybe_update_modules, bb.data.getVar("KERNEL_VERSION", d, 1)))
- import re, os
+ import re
metapkg = "kernel-modules"
bb.data.setVar('ALLOW_EMPTY_' + metapkg, "1", d)
bb.data.setVar('FILES_' + metapkg, "", d)
diff --git a/classes/linux-kernel-base.bbclass b/classes/linux-kernel-base.bbclass
index 4e2e2da373..b3e0fdad7a 100644
--- a/classes/linux-kernel-base.bbclass
+++ b/classes/linux-kernel-base.bbclass
@@ -1,6 +1,6 @@
# parse kernel ABI version out of <linux/version.h>
def get_kernelversion(p):
- import re, os
+ import re
fn = p + '/include/linux/utsrelease.h'
if not os.path.isfile(fn):
@@ -30,7 +30,6 @@ def get_kernelmajorversion(p):
return None
def linux_module_packages(s, d):
- import bb, os.path
suffix = ""
return " ".join(map(lambda s: "kernel-module-%s%s" % (s.lower().replace('_', '-').replace('@', '+'), suffix), s.split()))
diff --git a/classes/openmoko-base.bbclass b/classes/openmoko-base.bbclass
index 184477b1c0..8cbf7df884 100644
--- a/classes/openmoko-base.bbclass
+++ b/classes/openmoko-base.bbclass
@@ -4,7 +4,6 @@ OPENMOKO_RELEASE ?= "OM-2007.2"
OPENMOKO_MIRROR ?= "svn://svn.openmoko.org/trunk"
def openmoko_base_get_subdir(d):
- import bb
openmoko, section = bb.data.getVar('SECTION', d, 1).split("/")
if section == 'base' or section == 'libs': return ""
elif section in 'apps tools pim'.split(): return "applications"
diff --git a/classes/openmoko2.bbclass b/classes/openmoko2.bbclass
index ef734e4311..233c721ff7 100644
--- a/classes/openmoko2.bbclass
+++ b/classes/openmoko2.bbclass
@@ -5,12 +5,10 @@ OPENMOKO_RELEASE ?= "OM-2007.2"
OPENMOKO_MIRROR ?= "svn://svn.openmoko.org/trunk"
def openmoko_two_get_license(d):
- import bb
openmoko, section = bb.data.getVar('SECTION', d, 1).split("/")
return "LGPL GPL".split()[section != "libs"]
def openmoko_two_get_subdir(d):
- import bb
openmoko, section = bb.data.getVar('SECTION', d, 1).split("/")
if section == 'base': return ""
elif section == 'libs': return "libraries"
diff --git a/classes/package.bbclass b/classes/package.bbclass
index 1c14908f1a..5de48d6bfb 100644
--- a/classes/package.bbclass
+++ b/classes/package.bbclass
@@ -27,7 +27,6 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst
Used in .bb files to split up dynamically generated subpackages of a
given package, usually plugins or modules.
"""
- import os, os.path, bb
dvar = bb.data.getVar('PKGD', d, True)
@@ -130,7 +129,6 @@ def package_stash_hook(func, name, d):
f.close()
python () {
- import bb
if bb.data.getVar('PACKAGES', d, True) != '':
deps = bb.data.getVarFlag('do_package', 'depends', d) or ""
for dep in (bb.data.getVar('PACKAGE_DEPENDS', d, True) or "").split():
@@ -148,7 +146,7 @@ def runstrip(file, d):
# A working 'file' (one which works on the target architecture)
# is necessary for this stuff to work, hence the addition to do_package[depends]
- import bb, os, commands, stat
+ import commands, stat
pathprefix = "export PATH=%s; " % bb.data.getVar('PATH', d, True)
@@ -253,8 +251,6 @@ def write_package_md5sums (root, outfile, ignorepaths):
#
def get_package_mapping (pkg, d):
- import bb, os
-
data = read_subpkgdata(pkg, d)
key = "PKG_%s" % pkg
@@ -264,8 +260,6 @@ def get_package_mapping (pkg, d):
return pkg
def runtime_mapping_rename (varname, d):
- import bb, os
-
#bb.note("%s before: %s" % (varname, bb.data.getVar(varname, d, True)))
new_depends = []
@@ -287,8 +281,6 @@ def runtime_mapping_rename (varname, d):
#
python package_do_split_locales() {
- import os
-
if (bb.data.getVar('PACKAGE_NO_LOCALE', d, True) == '1'):
bb.debug(1, "package requested not splitting locales")
return
@@ -335,8 +327,6 @@ python package_do_split_locales() {
}
python perform_packagecopy () {
- import os
-
dest = bb.data.getVar('D', d, True)
dvar = bb.data.getVar('PKGD', d, True)
@@ -348,7 +338,7 @@ python perform_packagecopy () {
}
python populate_packages () {
- import os, glob, stat, errno, re
+ import glob, stat, errno, re
workdir = bb.data.getVar('WORKDIR', d, True)
outdir = bb.data.getVar('DEPLOY_DIR', d, True)
@@ -595,7 +585,7 @@ fi
SHLIBSDIR = "${STAGING_DIR_HOST}/shlibs"
python package_do_shlibs() {
- import os, re, os.path
+ import re
exclude_shlibs = bb.data.getVar('EXCLUDE_FROM_SHLIBS', d, 0)
if exclude_shlibs:
@@ -768,7 +758,7 @@ python package_do_shlibs() {
}
python package_do_pkgconfig () {
- import re, os
+ import re
packages = bb.data.getVar('PACKAGES', d, True)
workdir = bb.data.getVar('WORKDIR', d, True)
diff --git a/classes/package_deb.bbclass b/classes/package_deb.bbclass
index 4a17010538..2a9bf76a79 100644
--- a/classes/package_deb.bbclass
+++ b/classes/package_deb.bbclass
@@ -17,13 +17,11 @@ DPKG_ARCH_i686 ?= "i386"
DPKG_ARCH_pentium ?= "i386"
python package_deb_fn () {
- from bb import data
bb.data.setVar('PKGFN', bb.data.getVar('PKG',d), d)
}
addtask package_deb_install
python do_package_deb_install () {
- import os, sys
pkg = bb.data.getVar('PKG', d, 1)
pkgfn = bb.data.getVar('PKGFN', d, 1)
rootfs = bb.data.getVar('IMAGE_ROOTFS', d, 1)
@@ -39,6 +37,7 @@ python do_package_deb_install () {
os.makedirs(rootfs)
os.chdir(rootfs)
except OSError:
+ import sys
raise bb.build.FuncFailed(str(sys.exc_value))
# update packages file
@@ -69,14 +68,13 @@ python do_package_deb_install () {
}
python do_package_deb () {
- import sys, re, copy
+ import re, copy
workdir = bb.data.getVar('WORKDIR', d, 1)
if not workdir:
bb.error("WORKDIR not defined, unable to package")
return
- import os # path manipulations
outdir = bb.data.getVar('DEPLOY_DIR_DEB', d, 1)
if not outdir:
bb.error("DEPLOY_DIR_DEB not defined, unable to package")
@@ -188,6 +186,7 @@ python do_package_deb () {
for (c, fs) in fields:
ctrlfile.write(unicode(c % tuple(pullData(fs, localdata))))
except KeyError:
+ import sys
(type, value, traceback) = sys.exc_info()
bb.utils.unlockfile(lf)
ctrlfile.close()
@@ -261,7 +260,6 @@ python do_package_deb () {
}
python () {
- import bb
if bb.data.getVar('PACKAGES', d, True) != '':
deps = (bb.data.getVarFlag('do_package_write_deb', 'depends', d) or "").split()
deps.append('dpkg-native:do_populate_staging')
diff --git a/classes/package_ipk.bbclass b/classes/package_ipk.bbclass
index b48f8781eb..3582195393 100644
--- a/classes/package_ipk.bbclass
+++ b/classes/package_ipk.bbclass
@@ -17,7 +17,6 @@ python package_ipk_install () {
# Warning - this function is not multimachine safe (see stagingdir reference)!
#
- import os, sys, bb
pkg = bb.data.getVar('PKG', d, 1)
pkgfn = bb.data.getVar('PKGFN', d, 1)
rootfs = bb.data.getVar('IMAGE_ROOTFS', d, 1)
@@ -31,6 +30,7 @@ python package_ipk_install () {
bb.mkdirhier(rootfs)
os.chdir(rootfs)
except OSError:
+ import sys
(type, value, traceback) = sys.exc_info()
print value
raise bb.build.FuncFailed
@@ -135,14 +135,14 @@ package_generate_ipkg_conf () {
}
python do_package_ipk () {
- import sys, re, copy, bb
+ import re, copy
workdir = bb.data.getVar('WORKDIR', d, 1)
if not workdir:
bb.error("WORKDIR not defined, unable to package")
return
- import os # path manipulations
+
outdir = bb.data.getVar('DEPLOY_DIR_IPK', d, 1)
if not outdir:
bb.error("DEPLOY_DIR_IPK not defined, unable to package")
@@ -174,10 +174,9 @@ python do_package_ipk () {
pkgname = pkg
bb.data.setVar('PKG', pkgname, localdata)
- overrides = bb.data.getVar('OVERRIDES', localdata)
+ overrides = bb.data.getVar('OVERRIDES', localdata, True)
if not overrides:
raise bb.build.FuncFailed('OVERRIDES not defined')
- overrides = bb.data.expand(overrides, localdata)
bb.data.setVar('OVERRIDES', overrides + ':' + pkg, localdata)
bb.data.update_data(localdata)
@@ -194,8 +193,7 @@ python do_package_ipk () {
except ValueError:
pass
if not g and bb.data.getVar('ALLOW_EMPTY', localdata) != "1":
- from bb import note
- note("Not creating empty archive for %s-%s" % (pkg, bb.data.expand('${PV}-${PR}${DISTRO_PR}', localdata, True)))
+ bb.note("Not creating empty archive for %s-%s" % (pkg, bb.data.expand('${PV}-${PR}${DISTRO_PR}', localdata, True)))
bb.utils.unlockfile(lf)
continue
@@ -237,6 +235,7 @@ python do_package_ipk () {
raise KeyError(f)
ctrlfile.write(c % tuple(pullData(fs, localdata)))
except KeyError:
+ import sys
(type, value, traceback) = sys.exc_info()
ctrlfile.close()
bb.utils.unlockfile(lf)
@@ -308,7 +307,6 @@ python do_package_ipk () {
}
python () {
- import bb
if bb.data.getVar('PACKAGES', d, True) != '':
deps = (bb.data.getVarFlag('do_package_write_ipk', 'depends', d) or "").split()
deps.append('ipkg-utils-native:do_populate_staging')
@@ -317,7 +315,6 @@ python () {
}
python do_package_write_ipk () {
- import bb
packages = bb.data.getVar('PACKAGES', d, True)
if not packages:
bb.debug(1, "No PACKAGES defined, nothing to package")
diff --git a/classes/package_rpm.bbclass b/classes/package_rpm.bbclass
index 13cb8773af..ab09bb24d3 100644
--- a/classes/package_rpm.bbclass
+++ b/classes/package_rpm.bbclass
@@ -10,9 +10,6 @@ RPMOPTS="--rcfile=${WORKDIR}/rpmrc --target ${TARGET_SYS}"
RPM="rpm ${RPMOPTS}"
python write_specfile() {
- from bb import data, build
- import sys
-
version = bb.data.getVar('PV', d, 1)
version = version.replace('-', '+')
bb.data.setVar('RPMPV', version, d)
@@ -50,9 +47,9 @@ python write_specfile() {
del files[files.index(r)]
except ValueError:
pass
+
if not files and bb.data.getVar('ALLOW_EMPTY', d) != "1":
- from bb import note
- note("Not creating empty archive for %s" % (bb.data.expand('${PKG}-${PV}-${PR}${DISTRO_PR}', d, True)))
+ bb.note("Not creating empty archive for %s" % (bb.data.expand('${PKG}-${PV}-${PR}${DISTRO_PR}', d, True)))
return
# output .spec using this metadata store
@@ -154,7 +151,6 @@ python do_package_rpm () {
bb.error("WORKDIR not defined, unable to package")
return
- import os # path manipulations
outdir = bb.data.getVar('DEPLOY_DIR_RPM', d, 1)
if not outdir:
bb.error("DEPLOY_DIR_RPM not defined, unable to package")
@@ -209,7 +205,6 @@ python do_package_rpm () {
}
python () {
- import bb
if bb.data.getVar('PACKAGES', d, True) != '' and \
not bb.data.inherits_class('native', d) and \
not bb.data.inherits_class('cross', d):
@@ -225,5 +220,7 @@ python do_package_write_rpm () {
bb.build.exec_func("rpm_prep", d)
bb.build.exec_func("do_package_rpm", d)
}
+
do_package_write_rpm[dirs] = "${D}"
addtask package_write_rpm before do_package_write after do_package
+
diff --git a/classes/package_tar.bbclass b/classes/package_tar.bbclass
index a0eacf27f5..9c9ae4ff0a 100644
--- a/classes/package_tar.bbclass
+++ b/classes/package_tar.bbclass
@@ -1,14 +1,11 @@
inherit package
python package_tar_fn () {
- import os
- from bb import data
fn = os.path.join(bb.data.getVar('DEPLOY_DIR_TAR', d, True), bb.data.expand('${PKG}-${PV}-${PR}${DISTRO_PR}.tar.gz', d, True))
bb.data.setVar('PKGFN', fn, d)
}
python package_tar_install () {
- import os, sys
pkg = bb.data.getVar('PKG', d, 1)
pkgfn = bb.data.getVar('PKGFN', d, 1)
rootfs = bb.data.getVar('IMAGE_ROOTFS', d, 1)
@@ -20,6 +17,7 @@ python package_tar_install () {
bb.mkdirhier(rootfs)
os.chdir(rootfs)
except OSError:
+ import sys
(type, value, traceback) = sys.exc_info()
print value
raise bb.build.FuncFailed
@@ -39,7 +37,6 @@ python do_package_tar () {
bb.error("WORKDIR not defined, unable to package")
return
- import os # path manipulations
outdir = bb.data.getVar('DEPLOY_DIR_TAR', d, 1)
if not outdir:
bb.error("DEPLOY_DIR_TAR not defined, unable to package")
@@ -91,7 +88,6 @@ python do_package_tar () {
}
python () {
- import bb
if bb.data.getVar('PACKAGES', d, True) != '':
deps = (bb.data.getVarFlag('do_package_write_tar', 'depends', d) or "").split()
deps.append('tar-native:do_populate_staging')
diff --git a/classes/packaged-staging.bbclass b/classes/packaged-staging.bbclass
index d6e01da1ce..3b99e39ed9 100644
--- a/classes/packaged-staging.bbclass
+++ b/classes/packaged-staging.bbclass
@@ -80,8 +80,6 @@ PSTAGE_LIST_CMD = "${PSTAGE_PKGMANAGER} -f ${PSTAGE_MACHCONFIG} -o ${TMP
PSTAGE_TMPDIR_STAGE = "${WORKDIR}/staging-pkg"
def pstage_manualclean(srcname, destvarname, d):
- import os, bb
-
src = os.path.join(bb.data.getVar('PSTAGE_TMPDIR_STAGE', d, True), srcname)
dest = bb.data.getVar(destvarname, d, True)
@@ -92,7 +90,6 @@ def pstage_manualclean(srcname, destvarname, d):
os.system("rm %s" % filepath)
def pstage_set_pkgmanager(d):
- import bb
path = bb.data.getVar("PATH", d, 1)
pkgmanager = bb.which(path, 'opkg-cl')
if pkgmanager == "":
@@ -102,8 +99,6 @@ def pstage_set_pkgmanager(d):
def pstage_cleanpackage(pkgname, d):
- import os, bb
-
path = bb.data.getVar("PATH", d, 1)
pstage_set_pkgmanager(d)
list_cmd = bb.data.getVar("PSTAGE_LIST_CMD", d, True)
@@ -164,8 +159,6 @@ PSTAGE_TASKS_COVERED = "fetch unpack munge patch configure qa_configure rig_loca
SCENEFUNCS += "packagestage_scenefunc"
python packagestage_scenefunc () {
- import os
-
if bb.data.getVar("PSTAGING_ACTIVE", d, 1) == "0":
return
@@ -243,10 +236,7 @@ packagestage_scenefunc[dirs] = "${STAGING_DIR}"
addhandler packagedstage_stampfixing_eventhandler
python packagedstage_stampfixing_eventhandler() {
- from bb.event import getName
- import os
-
- if getName(e) == "StampUpdate":
+ if bb.event.getName(e) == "StampUpdate":
taskscovered = bb.data.getVar("PSTAGE_TASKS_COVERED", e.data, 1).split()
for (fn, task) in e.targets:
# strip off 'do_'
diff --git a/classes/packagehistory.bbclass b/classes/packagehistory.bbclass
index b435149d22..492bbac218 100644
--- a/classes/packagehistory.bbclass
+++ b/classes/packagehistory.bbclass
@@ -4,6 +4,7 @@ PACKAGEFUNCS += "emit_pkghistory"
PKGHIST_DIR = "${TMPDIR}/pkghistory/${BASEPKG_TARGET_SYS}/"
+
#
# Called during do_package to write out metadata about this package
# for comparision when writing future packages
@@ -60,8 +61,6 @@ python emit_pkghistory() {
def check_pkghistory(pkg, pe, pv, pr, lastversion):
- import bb
-
(last_pe, last_pv, last_pr) = lastversion
bb.debug(2, "Checking package history")
@@ -71,7 +70,6 @@ def check_pkghistory(pkg, pe, pv, pr, lastversion):
def write_pkghistory(pkg, pe, pv, pr, d):
- import bb, os
bb.debug(2, "Writing package history")
pkghistdir = bb.data.getVar('PKGHIST_DIR', d, True)
@@ -81,8 +79,6 @@ def write_pkghistory(pkg, pe, pv, pr, d):
os.makedirs(verpath)
def write_latestlink(pkg, pe, pv, pr, d):
- import bb, os
-
pkghistdir = bb.data.getVar('PKGHIST_DIR', d, True)
def rm_link(path):
diff --git a/classes/patch.bbclass b/classes/patch.bbclass
index bb49fed7a1..33184df0d0 100644
--- a/classes/patch.bbclass
+++ b/classes/patch.bbclass
@@ -4,8 +4,6 @@
QUILTRCFILE ?= "${STAGING_BINDIR_NATIVE}/quiltrc"
def patch_init(d):
- import os, sys
-
class NotFoundError(Exception):
def __init__(self, path):
self.path = path
@@ -13,8 +11,6 @@ def patch_init(d):
return "Error: %s not found." % self.path
def md5sum(fname):
- import sys
-
# when we move to Python 2.5 as minimal supported
# we can kill that try/except as hashlib is 2.5+
try:
@@ -251,6 +247,7 @@ def patch_init(d):
try:
output = runcmd(["quilt", "applied"], self.dir)
except CmdError:
+ import sys
if sys.exc_value.output.strip() == "No patches applied":
return
else:
@@ -364,6 +361,7 @@ def patch_init(d):
try:
self.patchset.Push()
except Exception:
+ import sys
os.chdir(olddir)
raise sys.exc_value
@@ -454,7 +452,6 @@ PATCHDEPENDENCY = "${PATCHTOOL}-native:do_populate_staging"
do_patch[depends] = "${PATCHDEPENDENCY}"
python patch_do_patch() {
- import re
patch_init(d)
src_uri = (bb.data.getVar('SRC_URI', d, 1) or '').split()
diff --git a/classes/pkgconfig.bbclass b/classes/pkgconfig.bbclass
index de892b5413..5844461fb4 100644
--- a/classes/pkgconfig.bbclass
+++ b/classes/pkgconfig.bbclass
@@ -1,5 +1,3 @@
-inherit base
-
DEPENDS_prepend = "pkgconfig-native "
do_install_prepend () {
diff --git a/classes/rm_work.bbclass b/classes/rm_work.bbclass
index 094f9167bb..a53d12baf3 100644
--- a/classes/rm_work.bbclass
+++ b/classes/rm_work.bbclass
@@ -35,10 +35,7 @@ addtask rm_work_all after do_rm_work
addhandler rmwork_stampfixing_eventhandler
python rmwork_stampfixing_eventhandler() {
- from bb.event import getName
- import os
-
- if getName(e) == "StampUpdate":
+ if bb.event.getName(e) == "StampUpdate":
for (fn, task) in e.targets:
if task == 'rm_work_all':
continue
diff --git a/classes/sanity.bbclass b/classes/sanity.bbclass
index 83c03299bf..f65df61c1d 100644
--- a/classes/sanity.bbclass
+++ b/classes/sanity.bbclass
@@ -11,8 +11,6 @@ def raise_sanity_error(msg):
%s""" % msg)
def check_conf_exists(fn, data):
- import bb, os
-
bbpath = []
fn = bb.data.expand(fn, data)
vbbpath = bb.data.getVar("BBPATH", data)
@@ -26,12 +24,12 @@ def check_conf_exists(fn, data):
def check_sanity(e):
from bb import note, error, data, __version__
- from bb.event import Handled, NotHandled, getName
+
try:
from distutils.version import LooseVersion
except ImportError:
def LooseVersion(v): print "WARNING: sanity.bbclass can't compare versions without python-distutils"; return 1
- import os, commands
+ import commands
# Check the bitbake version meets minimum requirements
minversion = data.getVar('BB_MIN_VERSION', e.data , True)
@@ -193,10 +191,8 @@ def check_sanity(e):
addhandler check_sanity_eventhandler
python check_sanity_eventhandler() {
- from bb import note, error, data, __version__
- from bb.event import getName
-
- if getName(e) == "ConfigParsed":
+ from bb.event import Handled, NotHandled
+ if bb.event.getName(e) == "ConfigParsed":
check_sanity(e)
return NotHandled
diff --git a/classes/siteinfo.bbclass b/classes/siteinfo.bbclass
index 5c929dffcd..93cee4f890 100644
--- a/classes/siteinfo.bbclass
+++ b/classes/siteinfo.bbclass
@@ -16,8 +16,6 @@
# If 'what' doesn't exist then an empty value is returned
#
def get_siteinfo_list(d):
- import bb
-
target = bb.data.getVar('HOST_ARCH', d, 1) + "-" + bb.data.getVar('HOST_OS', d, 1)
targetinfo = {\
@@ -86,8 +84,6 @@ def get_siteinfo_list(d):
# 2) ${FILE_DIRNAME}/site-${PV} - app version specific
#
def siteinfo_get_files(d):
- import bb, os
-
sitefiles = ""
# Determine which site files to look for
diff --git a/classes/sourcepkg.bbclass b/classes/sourcepkg.bbclass
index 200ff8c228..5aacf92d10 100644
--- a/classes/sourcepkg.bbclass
+++ b/classes/sourcepkg.bbclass
@@ -5,8 +5,6 @@ EXCLUDE_FROM ?= ".pc autom4te.cache"
DISTRO ?= "openembedded"
def get_src_tree(d):
- import bb
- import os, os.path
workdir = bb.data.getVar('WORKDIR', d, 1)
if not workdir:
@@ -56,8 +54,6 @@ sourcepkg_do_archive_bb() {
}
python sourcepkg_do_dumpdata() {
- import os
- import os.path
workdir = bb.data.getVar('WORKDIR', d, 1)
distro = bb.data.getVar('DISTRO', d, 1)
diff --git a/classes/src_distribute.bbclass b/classes/src_distribute.bbclass
index 7d75c9e16f..22044752ef 100644
--- a/classes/src_distribute.bbclass
+++ b/classes/src_distribute.bbclass
@@ -3,7 +3,7 @@ SRC_DISTRIBUTECOMMAND[func] = "1"
addtask distribute_sources before do_build after do_fetch
python do_distribute_sources () {
- import os, re, bb
+ import re
bb.build.exec_func("do_fetch", d)
diff --git a/classes/task.bbclass b/classes/task.bbclass
index f3287ebf73..4edd704829 100644
--- a/classes/task.bbclass
+++ b/classes/task.bbclass
@@ -17,7 +17,6 @@ PACKAGE_ARCH = "all"
# to the list. Their dependencies (RRECOMMENDS) are handled as usual
# by package_depchains in a following step.
python () {
- import bb
packages = bb.data.getVar('PACKAGES', d, 1).split()
genpackages = []
for pkg in packages:
diff --git a/classes/tinderclient.bbclass b/classes/tinderclient.bbclass
index d2b2b33898..6c86d44a21 100644
--- a/classes/tinderclient.bbclass
+++ b/classes/tinderclient.bbclass
@@ -1,6 +1,5 @@
def tinder_http_post(d, server, selector, content_type, body):
import httplib
- from bb import data
# now post it
for i in range(0,5):
try:
@@ -61,8 +60,7 @@ def tinder_format_http_post(d,status,log):
for the tinderbox to be happy.
"""
- from bb import data, build
- import os,random
+ import random
# the variables we will need to send on this form post
variables = {
diff --git a/classes/update-alternatives.bbclass b/classes/update-alternatives.bbclass
index c63581c5d1..ddbf4c1947 100644
--- a/classes/update-alternatives.bbclass
+++ b/classes/update-alternatives.bbclass
@@ -11,7 +11,6 @@ update-alternatives --remove ${ALTERNATIVE_NAME} ${ALTERNATIVE_PATH}
}
def update_alternatives_after_parse(d):
- import bb
if bb.data.getVar('ALTERNATIVE_NAME', d) == None:
raise bb.build.FuncFailed, "%s inherits update-alternatives but doesn't set ALTERNATIVE_NAME" % bb.data.getVar('FILE', d)
if bb.data.getVar('ALTERNATIVE_PATH', d) == None:
diff --git a/classes/update-rc.d.bbclass b/classes/update-rc.d.bbclass
index 00ec37cfb8..b6491ed9d6 100644
--- a/classes/update-rc.d.bbclass
+++ b/classes/update-rc.d.bbclass
@@ -41,7 +41,6 @@ fi
def update_rc_after_parse(d):
- import bb
if bb.data.getVar('INITSCRIPT_PACKAGES', d) == None:
if bb.data.getVar('INITSCRIPT_NAME', d) == None:
raise bb.build.FuncFailed, "%s inherits update-rc.d but doesn't set INITSCRIPT_NAME" % bb.data.getVar('FILE', d)