summaryrefslogtreecommitdiffstats
path: root/meta/classes
diff options
context:
space:
mode:
Diffstat (limited to 'meta/classes')
-rw-r--r--meta/classes/archiver.bbclass64
-rw-r--r--meta/classes/buildhistory.bbclass26
-rw-r--r--meta/classes/ccache.bbclass11
-rw-r--r--meta/classes/chrpath.bbclass2
-rw-r--r--meta/classes/create-spdx-2.2.bbclass1164
-rw-r--r--meta/classes/create-spdx.bbclass1024
-rw-r--r--meta/classes/cve-check.bbclass104
-rw-r--r--meta/classes/devtool-source.bbclass77
-rw-r--r--meta/classes/externalsrc.bbclass274
-rw-r--r--meta/classes/extrausers.bbclass2
-rw-r--r--meta/classes/go-vendor.bbclass211
-rw-r--r--meta/classes/icecc.bbclass10
-rw-r--r--meta/classes/image-buildinfo.bbclass4
-rw-r--r--meta/classes/multilib.bbclass5
-rw-r--r--meta/classes/multilib_global.bbclass1
-rw-r--r--meta/classes/own-mirrors.bbclass2
-rw-r--r--meta/classes/recipe_sanity.bbclass2
-rw-r--r--meta/classes/report-error.bbclass57
-rw-r--r--meta/classes/rm_work.bbclass116
-rw-r--r--meta/classes/siteconfig.bbclass39
-rw-r--r--meta/classes/testexport.bbclass180
-rw-r--r--meta/classes/useradd-staticids.bbclass2
-rw-r--r--meta/classes/useradd.bbclass75
-rw-r--r--meta/classes/useradd_base.bbclass2
24 files changed, 1989 insertions, 1465 deletions
diff --git a/meta/classes/archiver.bbclass b/meta/classes/archiver.bbclass
index 0710c1ec5e..2d0bbfbd42 100644
--- a/meta/classes/archiver.bbclass
+++ b/meta/classes/archiver.bbclass
@@ -76,33 +76,39 @@ do_ar_original[dirs] = "${ARCHIVER_OUTDIR} ${ARCHIVER_WORKDIR}"
# This is a convenience for the shell script to use it
-
-python () {
- pn = d.getVar('PN')
- assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split()
- if pn in assume_provided:
- for p in d.getVar("PROVIDES").split():
- if p != pn:
- pn = p
- break
+def include_package(d, pn):
included, reason = copyleft_should_include(d)
if not included:
bb.debug(1, 'archiver: %s is excluded: %s' % (pn, reason))
- return
+ return False
+
else:
bb.debug(1, 'archiver: %s is included: %s' % (pn, reason))
-
# glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted,
# so avoid archiving source here.
if pn.startswith('glibc-locale'):
- return
+ return False
# We just archive gcc-source for all the gcc related recipes
if d.getVar('BPN') in ['gcc', 'libgcc'] \
and not pn.startswith('gcc-source'):
bb.debug(1, 'archiver: %s is excluded, covered by gcc-source' % pn)
+ return False
+
+ return True
+
+python () {
+ pn = d.getVar('PN')
+ assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split()
+ if pn in assume_provided:
+ for p in d.getVar("PROVIDES").split():
+ if p != pn:
+ pn = p
+ break
+
+ if not include_package(d, pn):
return
# TARGET_SYS in ARCHIVER_ARCH will break the stamp for gcc-source in multiconfig
@@ -395,19 +401,11 @@ python do_ar_mirror() {
subprocess.check_call(cmd, shell=True)
}
-def exclude_useless_paths(tarinfo):
- if tarinfo.isdir():
- if tarinfo.name.endswith('/temp') or tarinfo.name.endswith('/patches') or tarinfo.name.endswith('/.pc'):
- return None
- elif tarinfo.name == 'temp' or tarinfo.name == 'patches' or tarinfo.name == '.pc':
- return None
- return tarinfo
-
def create_tarball(d, srcdir, suffix, ar_outdir):
"""
create the tarball from srcdir
"""
- import tarfile
+ import subprocess
# Make sure we are only creating a single tarball for gcc sources
if (d.getVar('SRC_URI') == ""):
@@ -419,6 +417,16 @@ def create_tarball(d, srcdir, suffix, ar_outdir):
srcdir = os.path.realpath(srcdir)
compression_method = d.getVarFlag('ARCHIVER_MODE', 'compression')
+ if compression_method == "xz":
+ compression_cmd = "xz %s" % d.getVar('XZ_DEFAULTS')
+ # To keep compatibility with ARCHIVER_MODE[compression]
+ elif compression_method == "gz":
+ compression_cmd = "gzip"
+ elif compression_method == "bz2":
+ compression_cmd = "bzip2"
+ else:
+ bb.fatal("Unsupported compression_method: %s" % compression_method)
+
bb.utils.mkdirhier(ar_outdir)
if suffix:
filename = '%s-%s.tar.%s' % (d.getVar('PF'), suffix, compression_method)
@@ -427,9 +435,11 @@ def create_tarball(d, srcdir, suffix, ar_outdir):
tarname = os.path.join(ar_outdir, filename)
bb.note('Creating %s' % tarname)
- tar = tarfile.open(tarname, 'w:%s' % compression_method)
- tar.add(srcdir, arcname=os.path.basename(srcdir), filter=exclude_useless_paths)
- tar.close()
+ dirname = os.path.dirname(srcdir)
+ basename = os.path.basename(srcdir)
+ exclude = "--exclude=temp --exclude=patches --exclude='.pc'"
+ tar_cmd = "tar %s -cf - %s | %s > %s" % (exclude, basename, compression_cmd, tarname)
+ subprocess.check_call(tar_cmd, cwd=dirname, shell=True)
# creating .diff.gz between source.orig and source
def create_diff_gz(d, src_orig, src, ar_outdir):
@@ -462,10 +472,8 @@ def create_diff_gz(d, src_orig, src, ar_outdir):
os.chdir(cwd)
def is_work_shared(d):
- pn = d.getVar('PN')
- return pn.startswith('gcc-source') or \
- bb.data.inherits_class('kernel', d) or \
- (bb.data.inherits_class('kernelsrc', d) and d.getVar('S') == d.getVar('STAGING_KERNEL_DIR'))
+ sharedworkdir = os.path.join(d.getVar('TMPDIR'), 'work-shared')
+ return d.getVar('S').startswith(sharedworkdir)
# Run do_unpack and do_patch
python do_unpack_and_patch() {
diff --git a/meta/classes/buildhistory.bbclass b/meta/classes/buildhistory.bbclass
index 395f594278..fd53e92402 100644
--- a/meta/classes/buildhistory.bbclass
+++ b/meta/classes/buildhistory.bbclass
@@ -691,28 +691,28 @@ python buildhistory_get_extra_sdkinfo() {
# By using ROOTFS_POSTUNINSTALL_COMMAND we get in after uninstallation of
# unneeded packages but before the removal of packaging files
-ROOTFS_POSTUNINSTALL_COMMAND += "buildhistory_list_installed_image ;"
-ROOTFS_POSTUNINSTALL_COMMAND += "buildhistory_get_image_installed ;"
-ROOTFS_POSTUNINSTALL_COMMAND[vardepvalueexclude] .= "| buildhistory_list_installed_image ;| buildhistory_get_image_installed ;"
+ROOTFS_POSTUNINSTALL_COMMAND += "buildhistory_list_installed_image"
+ROOTFS_POSTUNINSTALL_COMMAND += "buildhistory_get_image_installed"
+ROOTFS_POSTUNINSTALL_COMMAND[vardepvalueexclude] .= "| buildhistory_list_installed_image| buildhistory_get_image_installed"
ROOTFS_POSTUNINSTALL_COMMAND[vardepsexclude] += "buildhistory_list_installed_image buildhistory_get_image_installed"
-IMAGE_POSTPROCESS_COMMAND += "buildhistory_get_imageinfo ;"
-IMAGE_POSTPROCESS_COMMAND[vardepvalueexclude] .= "| buildhistory_get_imageinfo ;"
+IMAGE_POSTPROCESS_COMMAND += "buildhistory_get_imageinfo"
+IMAGE_POSTPROCESS_COMMAND[vardepvalueexclude] .= "| buildhistory_get_imageinfo"
IMAGE_POSTPROCESS_COMMAND[vardepsexclude] += "buildhistory_get_imageinfo"
# We want these to be the last run so that we get called after complementary package installation
-POPULATE_SDK_POST_TARGET_COMMAND:append = " buildhistory_list_installed_sdk_target;"
-POPULATE_SDK_POST_TARGET_COMMAND:append = " buildhistory_get_sdk_installed_target;"
-POPULATE_SDK_POST_TARGET_COMMAND[vardepvalueexclude] .= "| buildhistory_list_installed_sdk_target;| buildhistory_get_sdk_installed_target;"
+POPULATE_SDK_POST_TARGET_COMMAND:append = " buildhistory_list_installed_sdk_target"
+POPULATE_SDK_POST_TARGET_COMMAND:append = " buildhistory_get_sdk_installed_target"
+POPULATE_SDK_POST_TARGET_COMMAND[vardepvalueexclude] .= "| buildhistory_list_installed_sdk_target| buildhistory_get_sdk_installed_target"
POPULATE_SDK_POST_TARGET_COMMAND[vardepsexclude] += "buildhistory_list_installed_sdk_target buildhistory_get_sdk_installed_target"
-POPULATE_SDK_POST_HOST_COMMAND:append = " buildhistory_list_installed_sdk_host;"
-POPULATE_SDK_POST_HOST_COMMAND:append = " buildhistory_get_sdk_installed_host;"
-POPULATE_SDK_POST_HOST_COMMAND[vardepvalueexclude] .= "| buildhistory_list_installed_sdk_host;| buildhistory_get_sdk_installed_host;"
+POPULATE_SDK_POST_HOST_COMMAND:append = " buildhistory_list_installed_sdk_host"
+POPULATE_SDK_POST_HOST_COMMAND:append = " buildhistory_get_sdk_installed_host"
+POPULATE_SDK_POST_HOST_COMMAND[vardepvalueexclude] .= "| buildhistory_list_installed_sdk_host| buildhistory_get_sdk_installed_host"
POPULATE_SDK_POST_HOST_COMMAND[vardepsexclude] += "buildhistory_list_installed_sdk_host buildhistory_get_sdk_installed_host"
-SDK_POSTPROCESS_COMMAND:append = " buildhistory_get_sdkinfo ; buildhistory_get_extra_sdkinfo; "
-SDK_POSTPROCESS_COMMAND[vardepvalueexclude] .= "| buildhistory_get_sdkinfo ; buildhistory_get_extra_sdkinfo; "
+SDK_POSTPROCESS_COMMAND:append = " buildhistory_get_sdkinfo buildhistory_get_extra_sdkinfo"
+SDK_POSTPROCESS_COMMAND[vardepvalueexclude] .= "| buildhistory_get_sdkinfo buildhistory_get_extra_sdkinfo"
SDK_POSTPROCESS_COMMAND[vardepsexclude] += "buildhistory_get_sdkinfo buildhistory_get_extra_sdkinfo"
python buildhistory_write_sigs() {
diff --git a/meta/classes/ccache.bbclass b/meta/classes/ccache.bbclass
index 34becb69d1..262db6672c 100644
--- a/meta/classes/ccache.bbclass
+++ b/meta/classes/ccache.bbclass
@@ -28,6 +28,11 @@
# be shared between different builds.
CCACHE_TOP_DIR ?= "${TMPDIR}/ccache"
+# ccache-native and cmake-native have a circular dependency
+# that affects other native recipes, but not all.
+# Allows to use ccache in specified native recipes.
+CCACHE_NATIVE_RECIPES_ALLOWED ?= ""
+
# ccahe removes CCACHE_BASEDIR from file path, so that hashes will be the same
# in different builds.
export CCACHE_BASEDIR ?= "${TMPDIR}"
@@ -54,9 +59,9 @@ python() {
Enable ccache for the recipe
"""
pn = d.getVar('PN')
- # quilt-native doesn't need ccache since no c files
- if not (bb.data.inherits_class("native", d) or
- bb.utils.to_boolean(d.getVar('CCACHE_DISABLE'))):
+ if (pn in d.getVar('CCACHE_NATIVE_RECIPES_ALLOWED') or
+ not (bb.data.inherits_class("native", d) or
+ bb.utils.to_boolean(d.getVar('CCACHE_DISABLE')))):
d.appendVar('DEPENDS', ' ccache-native')
d.setVar('CCACHE', 'ccache ')
}
diff --git a/meta/classes/chrpath.bbclass b/meta/classes/chrpath.bbclass
index 1aecb4df66..16729dcf61 100644
--- a/meta/classes/chrpath.bbclass
+++ b/meta/classes/chrpath.bbclass
@@ -62,7 +62,7 @@ def process_file_linux(cmd, fpath, rootdir, baseprefix, tmpdir, d, break_hardlin
def process_file_darwin(cmd, fpath, rootdir, baseprefix, tmpdir, d, break_hardlinks = False):
import subprocess as sub
- p = sub.Popen([d.expand("${HOST_PREFIX}otool"), '-L', fpath],stdout=sub.PIPE,stderr=sub.PIPE)
+ p = sub.Popen([d.expand("${HOST_PREFIX}otool"), '-L', fpath],stdout=sub.PIPE,stderr=sub.PIPE, text=True)
out, err = p.communicate()
# If returned successfully, process stdout for results
if p.returncode != 0:
diff --git a/meta/classes/create-spdx-2.2.bbclass b/meta/classes/create-spdx-2.2.bbclass
new file mode 100644
index 0000000000..7c8a0b8b0f
--- /dev/null
+++ b/meta/classes/create-spdx-2.2.bbclass
@@ -0,0 +1,1164 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+DEPLOY_DIR_SPDX ??= "${DEPLOY_DIR}/spdx"
+
+# The product name that the CVE database uses. Defaults to BPN, but may need to
+# be overriden per recipe (for example tiff.bb sets CVE_PRODUCT=libtiff).
+CVE_PRODUCT ??= "${BPN}"
+CVE_VERSION ??= "${PV}"
+
+SPDXDIR ??= "${WORKDIR}/spdx"
+SPDXDEPLOY = "${SPDXDIR}/deploy"
+SPDXWORK = "${SPDXDIR}/work"
+SPDXIMAGEWORK = "${SPDXDIR}/image-work"
+SPDXSDKWORK = "${SPDXDIR}/sdk-work"
+SPDXDEPS = "${SPDXDIR}/deps.json"
+
+SPDX_TOOL_NAME ??= "oe-spdx-creator"
+SPDX_TOOL_VERSION ??= "1.0"
+
+SPDXRUNTIMEDEPLOY = "${SPDXDIR}/runtime-deploy"
+
+SPDX_INCLUDE_SOURCES ??= "0"
+SPDX_ARCHIVE_SOURCES ??= "0"
+SPDX_ARCHIVE_PACKAGED ??= "0"
+
+SPDX_UUID_NAMESPACE ??= "sbom.openembedded.org"
+SPDX_NAMESPACE_PREFIX ??= "http://spdx.org/spdxdocs"
+SPDX_PRETTY ??= "0"
+
+SPDX_LICENSES ??= "${COREBASE}/meta/files/spdx-licenses.json"
+
+SPDX_CUSTOM_ANNOTATION_VARS ??= ""
+
+SPDX_ORG ??= "OpenEmbedded ()"
+SPDX_SUPPLIER ??= "Organization: ${SPDX_ORG}"
+SPDX_SUPPLIER[doc] = "The SPDX PackageSupplier field for SPDX packages created from \
+ this recipe. For SPDX documents create using this class during the build, this \
+ is the contact information for the person or organization who is doing the \
+ build."
+
+def extract_licenses(filename):
+ import re
+
+ lic_regex = re.compile(rb'^\W*SPDX-License-Identifier:\s*([ \w\d.()+-]+?)(?:\s+\W*)?$', re.MULTILINE)
+
+ try:
+ with open(filename, 'rb') as f:
+ size = min(15000, os.stat(filename).st_size)
+ txt = f.read(size)
+ licenses = re.findall(lic_regex, txt)
+ if licenses:
+ ascii_licenses = [lic.decode('ascii') for lic in licenses]
+ return ascii_licenses
+ except Exception as e:
+ bb.warn(f"Exception reading {filename}: {e}")
+ return None
+
+def get_doc_namespace(d, doc):
+ import uuid
+ namespace_uuid = uuid.uuid5(uuid.NAMESPACE_DNS, d.getVar("SPDX_UUID_NAMESPACE"))
+ return "%s/%s-%s" % (d.getVar("SPDX_NAMESPACE_PREFIX"), doc.name, str(uuid.uuid5(namespace_uuid, doc.name)))
+
+def create_annotation(d, comment):
+ from datetime import datetime, timezone
+
+ creation_time = datetime.now(tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
+ annotation = oe.spdx.SPDXAnnotation()
+ annotation.annotationDate = creation_time
+ annotation.annotationType = "OTHER"
+ annotation.annotator = "Tool: %s - %s" % (d.getVar("SPDX_TOOL_NAME"), d.getVar("SPDX_TOOL_VERSION"))
+ annotation.comment = comment
+ return annotation
+
+def recipe_spdx_is_native(d, recipe):
+ return any(a.annotationType == "OTHER" and
+ a.annotator == "Tool: %s - %s" % (d.getVar("SPDX_TOOL_NAME"), d.getVar("SPDX_TOOL_VERSION")) and
+ a.comment == "isNative" for a in recipe.annotations)
+
+def is_work_shared_spdx(d):
+ return bb.data.inherits_class('kernel', d) or ('work-shared' in d.getVar('WORKDIR'))
+
+def get_json_indent(d):
+ if d.getVar("SPDX_PRETTY") == "1":
+ return 2
+ return None
+
+python() {
+ import json
+ if d.getVar("SPDX_LICENSE_DATA"):
+ return
+
+ with open(d.getVar("SPDX_LICENSES"), "r") as f:
+ data = json.load(f)
+ # Transform the license array to a dictionary
+ data["licenses"] = {l["licenseId"]: l for l in data["licenses"]}
+ d.setVar("SPDX_LICENSE_DATA", data)
+}
+
+def convert_license_to_spdx(lic, document, d, existing={}):
+ from pathlib import Path
+ import oe.spdx
+
+ license_data = d.getVar("SPDX_LICENSE_DATA")
+ extracted = {}
+
+ def add_extracted_license(ident, name):
+ nonlocal document
+
+ if name in extracted:
+ return
+
+ extracted_info = oe.spdx.SPDXExtractedLicensingInfo()
+ extracted_info.name = name
+ extracted_info.licenseId = ident
+ extracted_info.extractedText = None
+
+ if name == "PD":
+ # Special-case this.
+ extracted_info.extractedText = "Software released to the public domain"
+ else:
+ # Seach for the license in COMMON_LICENSE_DIR and LICENSE_PATH
+ for directory in [d.getVar('COMMON_LICENSE_DIR')] + (d.getVar('LICENSE_PATH') or '').split():
+ try:
+ with (Path(directory) / name).open(errors="replace") as f:
+ extracted_info.extractedText = f.read()
+ break
+ except FileNotFoundError:
+ pass
+ if extracted_info.extractedText is None:
+ # If it's not SPDX or PD, then NO_GENERIC_LICENSE must be set
+ filename = d.getVarFlag('NO_GENERIC_LICENSE', name)
+ if filename:
+ filename = d.expand("${S}/" + filename)
+ with open(filename, errors="replace") as f:
+ extracted_info.extractedText = f.read()
+ else:
+ bb.fatal("Cannot find any text for license %s" % name)
+
+ extracted[name] = extracted_info
+ document.hasExtractedLicensingInfos.append(extracted_info)
+
+ def convert(l):
+ if l == "(" or l == ")":
+ return l
+
+ if l == "&":
+ return "AND"
+
+ if l == "|":
+ return "OR"
+
+ if l == "CLOSED":
+ return "NONE"
+
+ spdx_license = d.getVarFlag("SPDXLICENSEMAP", l) or l
+ if spdx_license in license_data["licenses"]:
+ return spdx_license
+
+ try:
+ spdx_license = existing[l]
+ except KeyError:
+ spdx_license = "LicenseRef-" + l
+ add_extracted_license(spdx_license, l)
+
+ return spdx_license
+
+ lic_split = lic.replace("(", " ( ").replace(")", " ) ").replace("|", " | ").replace("&", " & ").split()
+
+ return ' '.join(convert(l) for l in lic_split)
+
+def process_sources(d):
+ pn = d.getVar('PN')
+ assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split()
+ if pn in assume_provided:
+ for p in d.getVar("PROVIDES").split():
+ if p != pn:
+ pn = p
+ break
+
+ # glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted,
+ # so avoid archiving source here.
+ if pn.startswith('glibc-locale'):
+ return False
+ if d.getVar('PN') == "libtool-cross":
+ return False
+ if d.getVar('PN') == "libgcc-initial":
+ return False
+ if d.getVar('PN') == "shadow-sysroot":
+ return False
+
+ # We just archive gcc-source for all the gcc related recipes
+ if d.getVar('BPN') in ['gcc', 'libgcc']:
+ bb.debug(1, 'spdx: There is bug in scan of %s is, do nothing' % pn)
+ return False
+
+ return True
+
+
+def add_package_files(d, doc, spdx_pkg, topdir, get_spdxid, get_types, *, archive=None, ignore_dirs=[], ignore_top_level_dirs=[]):
+ from pathlib import Path
+ import oe.spdx
+ import hashlib
+
+ source_date_epoch = d.getVar("SOURCE_DATE_EPOCH")
+ if source_date_epoch:
+ source_date_epoch = int(source_date_epoch)
+
+ sha1s = []
+ spdx_files = []
+
+ file_counter = 1
+ for subdir, dirs, files in os.walk(topdir):
+ dirs[:] = [d for d in dirs if d not in ignore_dirs]
+ if subdir == str(topdir):
+ dirs[:] = [d for d in dirs if d not in ignore_top_level_dirs]
+
+ for file in files:
+ filepath = Path(subdir) / file
+ filename = str(filepath.relative_to(topdir))
+
+ if not filepath.is_symlink() and filepath.is_file():
+ spdx_file = oe.spdx.SPDXFile()
+ spdx_file.SPDXID = get_spdxid(file_counter)
+ for t in get_types(filepath):
+ spdx_file.fileTypes.append(t)
+ spdx_file.fileName = filename
+
+ if archive is not None:
+ with filepath.open("rb") as f:
+ info = archive.gettarinfo(fileobj=f)
+ info.name = filename
+ info.uid = 0
+ info.gid = 0
+ info.uname = "root"
+ info.gname = "root"
+
+ if source_date_epoch is not None and info.mtime > source_date_epoch:
+ info.mtime = source_date_epoch
+
+ archive.addfile(info, f)
+
+ sha1 = bb.utils.sha1_file(filepath)
+ sha1s.append(sha1)
+ spdx_file.checksums.append(oe.spdx.SPDXChecksum(
+ algorithm="SHA1",
+ checksumValue=sha1,
+ ))
+ spdx_file.checksums.append(oe.spdx.SPDXChecksum(
+ algorithm="SHA256",
+ checksumValue=bb.utils.sha256_file(filepath),
+ ))
+
+ if "SOURCE" in spdx_file.fileTypes:
+ extracted_lics = extract_licenses(filepath)
+ if extracted_lics:
+ spdx_file.licenseInfoInFiles = extracted_lics
+
+ doc.files.append(spdx_file)
+ doc.add_relationship(spdx_pkg, "CONTAINS", spdx_file)
+ spdx_pkg.hasFiles.append(spdx_file.SPDXID)
+
+ spdx_files.append(spdx_file)
+
+ file_counter += 1
+
+ sha1s.sort()
+ verifier = hashlib.sha1()
+ for v in sha1s:
+ verifier.update(v.encode("utf-8"))
+ spdx_pkg.packageVerificationCode.packageVerificationCodeValue = verifier.hexdigest()
+
+ return spdx_files
+
+
+def add_package_sources_from_debug(d, package_doc, spdx_package, package, package_files, sources):
+ from pathlib import Path
+ import hashlib
+ import oe.packagedata
+ import oe.spdx
+
+ debug_search_paths = [
+ Path(d.getVar('PKGD')),
+ Path(d.getVar('STAGING_DIR_TARGET')),
+ Path(d.getVar('STAGING_DIR_NATIVE')),
+ Path(d.getVar('STAGING_KERNEL_DIR')),
+ ]
+
+ pkg_data = oe.packagedata.read_subpkgdata_extended(package, d)
+
+ if pkg_data is None:
+ return
+
+ for file_path, file_data in pkg_data["files_info"].items():
+ if not "debugsrc" in file_data:
+ continue
+
+ for pkg_file in package_files:
+ if file_path.lstrip("/") == pkg_file.fileName.lstrip("/"):
+ break
+ else:
+ bb.fatal("No package file found for %s in %s; SPDX found: %s" % (str(file_path), package,
+ " ".join(p.fileName for p in package_files)))
+ continue
+
+ for debugsrc in file_data["debugsrc"]:
+ ref_id = "NOASSERTION"
+ for search in debug_search_paths:
+ if debugsrc.startswith("/usr/src/kernel"):
+ debugsrc_path = search / debugsrc.replace('/usr/src/kernel/', '')
+ else:
+ debugsrc_path = search / debugsrc.lstrip("/")
+ if not debugsrc_path.exists():
+ continue
+
+ file_sha256 = bb.utils.sha256_file(debugsrc_path)
+
+ if file_sha256 in sources:
+ source_file = sources[file_sha256]
+
+ doc_ref = package_doc.find_external_document_ref(source_file.doc.documentNamespace)
+ if doc_ref is None:
+ doc_ref = oe.spdx.SPDXExternalDocumentRef()
+ doc_ref.externalDocumentId = "DocumentRef-dependency-" + source_file.doc.name
+ doc_ref.spdxDocument = source_file.doc.documentNamespace
+ doc_ref.checksum.algorithm = "SHA1"
+ doc_ref.checksum.checksumValue = source_file.doc_sha1
+ package_doc.externalDocumentRefs.append(doc_ref)
+
+ ref_id = "%s:%s" % (doc_ref.externalDocumentId, source_file.file.SPDXID)
+ else:
+ bb.debug(1, "Debug source %s with SHA256 %s not found in any dependency" % (str(debugsrc_path), file_sha256))
+ break
+ else:
+ bb.debug(1, "Debug source %s not found" % debugsrc)
+
+ package_doc.add_relationship(pkg_file, "GENERATED_FROM", ref_id, comment=debugsrc)
+
+add_package_sources_from_debug[vardepsexclude] += "STAGING_KERNEL_DIR"
+
+def collect_dep_recipes(d, doc, spdx_recipe):
+ import json
+ from pathlib import Path
+ import oe.sbom
+ import oe.spdx
+
+ deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
+ spdx_deps_file = Path(d.getVar("SPDXDEPS"))
+ package_archs = d.getVar("SSTATE_ARCHS").split()
+ package_archs.reverse()
+
+ dep_recipes = []
+
+ with spdx_deps_file.open("r") as f:
+ deps = json.load(f)
+
+ for dep_pn, dep_hashfn, in_taskhash in deps:
+ # If this dependency is not calculated in the taskhash skip it.
+ # Otherwise, it can result in broken links since this task won't
+ # rebuild and see the new SPDX ID if the dependency changes
+ if not in_taskhash:
+ continue
+
+ dep_recipe_path = oe.sbom.doc_find_by_hashfn(deploy_dir_spdx, package_archs, "recipe-" + dep_pn, dep_hashfn)
+ if not dep_recipe_path:
+ bb.fatal("Cannot find any SPDX file for recipe %s, %s" % (dep_pn, dep_hashfn))
+
+ spdx_dep_doc, spdx_dep_sha1 = oe.sbom.read_doc(dep_recipe_path)
+
+ for pkg in spdx_dep_doc.packages:
+ if pkg.name == dep_pn:
+ spdx_dep_recipe = pkg
+ break
+ else:
+ continue
+
+ dep_recipes.append(oe.sbom.DepRecipe(spdx_dep_doc, spdx_dep_sha1, spdx_dep_recipe))
+
+ dep_recipe_ref = oe.spdx.SPDXExternalDocumentRef()
+ dep_recipe_ref.externalDocumentId = "DocumentRef-dependency-" + spdx_dep_doc.name
+ dep_recipe_ref.spdxDocument = spdx_dep_doc.documentNamespace
+ dep_recipe_ref.checksum.algorithm = "SHA1"
+ dep_recipe_ref.checksum.checksumValue = spdx_dep_sha1
+
+ doc.externalDocumentRefs.append(dep_recipe_ref)
+
+ doc.add_relationship(
+ "%s:%s" % (dep_recipe_ref.externalDocumentId, spdx_dep_recipe.SPDXID),
+ "BUILD_DEPENDENCY_OF",
+ spdx_recipe
+ )
+
+ return dep_recipes
+
+collect_dep_recipes[vardepsexclude] = "SSTATE_ARCHS"
+
+def collect_dep_sources(d, dep_recipes):
+ import oe.sbom
+
+ sources = {}
+ for dep in dep_recipes:
+ # Don't collect sources from native recipes as they
+ # match non-native sources also.
+ if recipe_spdx_is_native(d, dep.recipe):
+ continue
+ recipe_files = set(dep.recipe.hasFiles)
+
+ for spdx_file in dep.doc.files:
+ if spdx_file.SPDXID not in recipe_files:
+ continue
+
+ if "SOURCE" in spdx_file.fileTypes:
+ for checksum in spdx_file.checksums:
+ if checksum.algorithm == "SHA256":
+ sources[checksum.checksumValue] = oe.sbom.DepSource(dep.doc, dep.doc_sha1, dep.recipe, spdx_file)
+ break
+
+ return sources
+
+def add_download_packages(d, doc, recipe):
+ import os.path
+ from bb.fetch2 import decodeurl, CHECKSUM_LIST
+ import bb.process
+ import oe.spdx
+ import oe.sbom
+
+ for download_idx, src_uri in enumerate(d.getVar('SRC_URI').split()):
+ f = bb.fetch2.FetchData(src_uri, d)
+
+ for name in f.names:
+ package = oe.spdx.SPDXPackage()
+ package.name = "%s-source-%d" % (d.getVar("PN"), download_idx + 1)
+ package.SPDXID = oe.sbom.get_download_spdxid(d, download_idx + 1)
+
+ if f.type == "file":
+ continue
+
+ uri = f.type
+ proto = getattr(f, "proto", None)
+ if proto is not None:
+ uri = uri + "+" + proto
+ uri = uri + "://" + f.host + f.path
+
+ if f.method.supports_srcrev():
+ uri = uri + "@" + f.revisions[name]
+
+ if f.method.supports_checksum(f):
+ for checksum_id in CHECKSUM_LIST:
+ if checksum_id.upper() not in oe.spdx.SPDXPackage.ALLOWED_CHECKSUMS:
+ continue
+
+ expected_checksum = getattr(f, "%s_expected" % checksum_id)
+ if expected_checksum is None:
+ continue
+
+ c = oe.spdx.SPDXChecksum()
+ c.algorithm = checksum_id.upper()
+ c.checksumValue = expected_checksum
+ package.checksums.append(c)
+
+ package.downloadLocation = uri
+ doc.packages.append(package)
+ doc.add_relationship(doc, "DESCRIBES", package)
+ # In the future, we might be able to do more fancy dependencies,
+ # but this should be sufficient for now
+ doc.add_relationship(package, "BUILD_DEPENDENCY_OF", recipe)
+
+def collect_direct_deps(d, dep_task):
+ current_task = "do_" + d.getVar("BB_CURRENTTASK")
+ pn = d.getVar("PN")
+
+ taskdepdata = d.getVar("BB_TASKDEPDATA", False)
+
+ for this_dep in taskdepdata.values():
+ if this_dep[0] == pn and this_dep[1] == current_task:
+ break
+ else:
+ bb.fatal(f"Unable to find this {pn}:{current_task} in taskdepdata")
+
+ deps = set()
+ for dep_name in this_dep[3]:
+ dep_data = taskdepdata[dep_name]
+ if dep_data[1] == dep_task and dep_data[0] != pn:
+ deps.add((dep_data[0], dep_data[7], dep_name in this_dep[8]))
+
+ return sorted(deps)
+
+collect_direct_deps[vardepsexclude] += "BB_TASKDEPDATA"
+collect_direct_deps[vardeps] += "DEPENDS"
+
+python do_collect_spdx_deps() {
+ # This task calculates the build time dependencies of the recipe, and is
+ # required because while a task can deptask on itself, those dependencies
+ # do not show up in BB_TASKDEPDATA. To work around that, this task does the
+ # deptask on do_create_spdx and writes out the dependencies it finds, then
+ # do_create_spdx reads in the found dependencies when writing the actual
+ # SPDX document
+ import json
+ from pathlib import Path
+
+ spdx_deps_file = Path(d.getVar("SPDXDEPS"))
+
+ deps = collect_direct_deps(d, "do_create_spdx")
+
+ with spdx_deps_file.open("w") as f:
+ json.dump(deps, f)
+}
+# NOTE: depending on do_unpack is a hack that is necessary to get it's dependencies for archive the source
+addtask do_collect_spdx_deps after do_unpack
+do_collect_spdx_deps[depends] += "${PATCHDEPENDENCY}"
+do_collect_spdx_deps[deptask] = "do_create_spdx"
+do_collect_spdx_deps[dirs] = "${SPDXDIR}"
+
+python do_create_spdx() {
+ from datetime import datetime, timezone
+ import oe.sbom
+ import oe.spdx
+ import uuid
+ from pathlib import Path
+ from contextlib import contextmanager
+ import oe.cve_check
+
+ @contextmanager
+ def optional_tarfile(name, guard, mode="w"):
+ import tarfile
+ import bb.compress.zstd
+
+ num_threads = int(d.getVar("BB_NUMBER_THREADS"))
+
+ if guard:
+ name.parent.mkdir(parents=True, exist_ok=True)
+ with bb.compress.zstd.open(name, mode=mode + "b", num_threads=num_threads) as f:
+ with tarfile.open(fileobj=f, mode=mode + "|") as tf:
+ yield tf
+ else:
+ yield None
+
+
+ deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
+ spdx_workdir = Path(d.getVar("SPDXWORK"))
+ include_sources = d.getVar("SPDX_INCLUDE_SOURCES") == "1"
+ archive_sources = d.getVar("SPDX_ARCHIVE_SOURCES") == "1"
+ archive_packaged = d.getVar("SPDX_ARCHIVE_PACKAGED") == "1"
+ pkg_arch = d.getVar("SSTATE_PKGARCH")
+
+ creation_time = datetime.now(tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
+
+ doc = oe.spdx.SPDXDocument()
+
+ doc.name = "recipe-" + d.getVar("PN")
+ doc.documentNamespace = get_doc_namespace(d, doc)
+ doc.creationInfo.created = creation_time
+ doc.creationInfo.comment = "This document was created by analyzing recipe files during the build."
+ doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"]
+ doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass")
+ doc.creationInfo.creators.append("Organization: %s" % d.getVar("SPDX_ORG"))
+ doc.creationInfo.creators.append("Person: N/A ()")
+
+ recipe = oe.spdx.SPDXPackage()
+ recipe.name = d.getVar("PN")
+ recipe.versionInfo = d.getVar("PV")
+ recipe.SPDXID = oe.sbom.get_recipe_spdxid(d)
+ recipe.supplier = d.getVar("SPDX_SUPPLIER")
+ if bb.data.inherits_class("native", d) or bb.data.inherits_class("cross", d):
+ recipe.annotations.append(create_annotation(d, "isNative"))
+
+ homepage = d.getVar("HOMEPAGE")
+ if homepage:
+ recipe.homepage = homepage
+
+ license = d.getVar("LICENSE")
+ if license:
+ recipe.licenseDeclared = convert_license_to_spdx(license, doc, d)
+
+ summary = d.getVar("SUMMARY")
+ if summary:
+ recipe.summary = summary
+
+ description = d.getVar("DESCRIPTION")
+ if description:
+ recipe.description = description
+
+ if d.getVar("SPDX_CUSTOM_ANNOTATION_VARS"):
+ for var in d.getVar('SPDX_CUSTOM_ANNOTATION_VARS').split():
+ recipe.annotations.append(create_annotation(d, var + "=" + d.getVar(var)))
+
+ # Some CVEs may be patched during the build process without incrementing the version number,
+ # so querying for CVEs based on the CPE id can lead to false positives. To account for this,
+ # save the CVEs fixed by patches to source information field in the SPDX.
+ patched_cves = oe.cve_check.get_patched_cves(d)
+ patched_cves = list(patched_cves)
+ patched_cves = ' '.join(patched_cves)
+ if patched_cves:
+ recipe.sourceInfo = "CVEs fixed: " + patched_cves
+
+ cpe_ids = oe.cve_check.get_cpe_ids(d.getVar("CVE_PRODUCT"), d.getVar("CVE_VERSION"))
+ if cpe_ids:
+ for cpe_id in cpe_ids:
+ cpe = oe.spdx.SPDXExternalReference()
+ cpe.referenceCategory = "SECURITY"
+ cpe.referenceType = "http://spdx.org/rdf/references/cpe23Type"
+ cpe.referenceLocator = cpe_id
+ recipe.externalRefs.append(cpe)
+
+ doc.packages.append(recipe)
+ doc.add_relationship(doc, "DESCRIBES", recipe)
+
+ add_download_packages(d, doc, recipe)
+
+ if process_sources(d) and include_sources:
+ recipe_archive = deploy_dir_spdx / "recipes" / (doc.name + ".tar.zst")
+ with optional_tarfile(recipe_archive, archive_sources) as archive:
+ spdx_get_src(d)
+
+ add_package_files(
+ d,
+ doc,
+ recipe,
+ spdx_workdir,
+ lambda file_counter: "SPDXRef-SourceFile-%s-%d" % (d.getVar("PN"), file_counter),
+ lambda filepath: ["SOURCE"],
+ ignore_dirs=[".git"],
+ ignore_top_level_dirs=["temp"],
+ archive=archive,
+ )
+
+ if archive is not None:
+ recipe.packageFileName = str(recipe_archive.name)
+
+ dep_recipes = collect_dep_recipes(d, doc, recipe)
+
+ doc_sha1 = oe.sbom.write_doc(d, doc, pkg_arch, "recipes", indent=get_json_indent(d))
+ dep_recipes.append(oe.sbom.DepRecipe(doc, doc_sha1, recipe))
+
+ recipe_ref = oe.spdx.SPDXExternalDocumentRef()
+ recipe_ref.externalDocumentId = "DocumentRef-recipe-" + recipe.name
+ recipe_ref.spdxDocument = doc.documentNamespace
+ recipe_ref.checksum.algorithm = "SHA1"
+ recipe_ref.checksum.checksumValue = doc_sha1
+
+ sources = collect_dep_sources(d, dep_recipes)
+ found_licenses = {license.name:recipe_ref.externalDocumentId + ":" + license.licenseId for license in doc.hasExtractedLicensingInfos}
+
+ if not recipe_spdx_is_native(d, recipe):
+ bb.build.exec_func("read_subpackage_metadata", d)
+
+ pkgdest = Path(d.getVar("PKGDEST"))
+ for package in d.getVar("PACKAGES").split():
+ if not oe.packagedata.packaged(package, d):
+ continue
+
+ package_doc = oe.spdx.SPDXDocument()
+ pkg_name = d.getVar("PKG:%s" % package) or package
+ package_doc.name = pkg_name
+ package_doc.documentNamespace = get_doc_namespace(d, package_doc)
+ package_doc.creationInfo.created = creation_time
+ package_doc.creationInfo.comment = "This document was created by analyzing packages created during the build."
+ package_doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"]
+ package_doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass")
+ package_doc.creationInfo.creators.append("Organization: %s" % d.getVar("SPDX_ORG"))
+ package_doc.creationInfo.creators.append("Person: N/A ()")
+ package_doc.externalDocumentRefs.append(recipe_ref)
+
+ package_license = d.getVar("LICENSE:%s" % package) or d.getVar("LICENSE")
+
+ spdx_package = oe.spdx.SPDXPackage()
+
+ spdx_package.SPDXID = oe.sbom.get_package_spdxid(pkg_name)
+ spdx_package.name = pkg_name
+ spdx_package.versionInfo = d.getVar("PV")
+ spdx_package.licenseDeclared = convert_license_to_spdx(package_license, package_doc, d, found_licenses)
+ spdx_package.supplier = d.getVar("SPDX_SUPPLIER")
+
+ package_doc.packages.append(spdx_package)
+
+ package_doc.add_relationship(spdx_package, "GENERATED_FROM", "%s:%s" % (recipe_ref.externalDocumentId, recipe.SPDXID))
+ package_doc.add_relationship(package_doc, "DESCRIBES", spdx_package)
+
+ package_archive = deploy_dir_spdx / "packages" / (package_doc.name + ".tar.zst")
+ with optional_tarfile(package_archive, archive_packaged) as archive:
+ package_files = add_package_files(
+ d,
+ package_doc,
+ spdx_package,
+ pkgdest / package,
+ lambda file_counter: oe.sbom.get_packaged_file_spdxid(pkg_name, file_counter),
+ lambda filepath: ["BINARY"],
+ ignore_top_level_dirs=['CONTROL', 'DEBIAN'],
+ archive=archive,
+ )
+
+ if archive is not None:
+ spdx_package.packageFileName = str(package_archive.name)
+
+ add_package_sources_from_debug(d, package_doc, spdx_package, package, package_files, sources)
+
+ oe.sbom.write_doc(d, package_doc, pkg_arch, "packages", indent=get_json_indent(d))
+}
+do_create_spdx[vardepsexclude] += "BB_NUMBER_THREADS"
+# NOTE: depending on do_unpack is a hack that is necessary to get it's dependencies for archive the source
+addtask do_create_spdx after do_package do_packagedata do_unpack do_collect_spdx_deps before do_populate_sdk do_build do_rm_work
+
+SSTATETASKS += "do_create_spdx"
+do_create_spdx[sstate-inputdirs] = "${SPDXDEPLOY}"
+do_create_spdx[sstate-outputdirs] = "${DEPLOY_DIR_SPDX}"
+
+python do_create_spdx_setscene () {
+ sstate_setscene(d)
+}
+addtask do_create_spdx_setscene
+
+do_create_spdx[dirs] = "${SPDXWORK}"
+do_create_spdx[cleandirs] = "${SPDXDEPLOY} ${SPDXWORK}"
+do_create_spdx[depends] += "${PATCHDEPENDENCY}"
+
+def collect_package_providers(d):
+ from pathlib import Path
+ import oe.sbom
+ import oe.spdx
+ import json
+
+ deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
+
+ providers = {}
+
+ deps = collect_direct_deps(d, "do_create_spdx")
+ deps.append((d.getVar("PN"), d.getVar("BB_HASHFILENAME"), True))
+
+ for dep_pn, dep_hashfn, _ in deps:
+ localdata = d
+ recipe_data = oe.packagedata.read_pkgdata(dep_pn, localdata)
+ if not recipe_data:
+ localdata = bb.data.createCopy(d)
+ localdata.setVar("PKGDATA_DIR", "${PKGDATA_DIR_SDK}")
+ recipe_data = oe.packagedata.read_pkgdata(dep_pn, localdata)
+
+ for pkg in recipe_data.get("PACKAGES", "").split():
+
+ pkg_data = oe.packagedata.read_subpkgdata_dict(pkg, localdata)
+ rprovides = set(n for n, _ in bb.utils.explode_dep_versions2(pkg_data.get("RPROVIDES", "")).items())
+ rprovides.add(pkg)
+
+ if "PKG" in pkg_data:
+ pkg = pkg_data["PKG"]
+ rprovides.add(pkg)
+
+ for r in rprovides:
+ providers[r] = (pkg, dep_hashfn)
+
+ return providers
+
+collect_package_providers[vardepsexclude] += "BB_TASKDEPDATA"
+
+python do_create_runtime_spdx() {
+ from datetime import datetime, timezone
+ import oe.sbom
+ import oe.spdx
+ import oe.packagedata
+ from pathlib import Path
+
+ deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
+ spdx_deploy = Path(d.getVar("SPDXRUNTIMEDEPLOY"))
+ is_native = bb.data.inherits_class("native", d) or bb.data.inherits_class("cross", d)
+
+ creation_time = datetime.now(tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
+
+ providers = collect_package_providers(d)
+ pkg_arch = d.getVar("SSTATE_PKGARCH")
+ package_archs = d.getVar("SSTATE_ARCHS").split()
+ package_archs.reverse()
+
+ if not is_native:
+ bb.build.exec_func("read_subpackage_metadata", d)
+
+ dep_package_cache = {}
+
+ pkgdest = Path(d.getVar("PKGDEST"))
+ for package in d.getVar("PACKAGES").split():
+ localdata = bb.data.createCopy(d)
+ pkg_name = d.getVar("PKG:%s" % package) or package
+ localdata.setVar("PKG", pkg_name)
+ localdata.setVar('OVERRIDES', d.getVar("OVERRIDES", False) + ":" + package)
+
+ if not oe.packagedata.packaged(package, localdata):
+ continue
+
+ pkg_spdx_path = oe.sbom.doc_path(deploy_dir_spdx, pkg_name, pkg_arch, "packages")
+
+ package_doc, package_doc_sha1 = oe.sbom.read_doc(pkg_spdx_path)
+
+ for p in package_doc.packages:
+ if p.name == pkg_name:
+ spdx_package = p
+ break
+ else:
+ bb.fatal("Package '%s' not found in %s" % (pkg_name, pkg_spdx_path))
+
+ runtime_doc = oe.spdx.SPDXDocument()
+ runtime_doc.name = "runtime-" + pkg_name
+ runtime_doc.documentNamespace = get_doc_namespace(localdata, runtime_doc)
+ runtime_doc.creationInfo.created = creation_time
+ runtime_doc.creationInfo.comment = "This document was created by analyzing package runtime dependencies."
+ runtime_doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"]
+ runtime_doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass")
+ runtime_doc.creationInfo.creators.append("Organization: %s" % d.getVar("SPDX_ORG"))
+ runtime_doc.creationInfo.creators.append("Person: N/A ()")
+
+ package_ref = oe.spdx.SPDXExternalDocumentRef()
+ package_ref.externalDocumentId = "DocumentRef-package-" + package
+ package_ref.spdxDocument = package_doc.documentNamespace
+ package_ref.checksum.algorithm = "SHA1"
+ package_ref.checksum.checksumValue = package_doc_sha1
+
+ runtime_doc.externalDocumentRefs.append(package_ref)
+
+ runtime_doc.add_relationship(
+ runtime_doc.SPDXID,
+ "AMENDS",
+ "%s:%s" % (package_ref.externalDocumentId, package_doc.SPDXID)
+ )
+
+ deps = bb.utils.explode_dep_versions2(localdata.getVar("RDEPENDS") or "")
+ seen_deps = set()
+ for dep, _ in deps.items():
+ if dep in seen_deps:
+ continue
+
+ if dep not in providers:
+ continue
+
+ (dep, dep_hashfn) = providers[dep]
+
+ if not oe.packagedata.packaged(dep, localdata):
+ continue
+
+ dep_pkg_data = oe.packagedata.read_subpkgdata_dict(dep, d)
+ dep_pkg = dep_pkg_data["PKG"]
+
+ if dep in dep_package_cache:
+ (dep_spdx_package, dep_package_ref) = dep_package_cache[dep]
+ else:
+ dep_path = oe.sbom.doc_find_by_hashfn(deploy_dir_spdx, package_archs, dep_pkg, dep_hashfn)
+ if not dep_path:
+ bb.fatal("No SPDX file found for package %s, %s" % (dep_pkg, dep_hashfn))
+
+ spdx_dep_doc, spdx_dep_sha1 = oe.sbom.read_doc(dep_path)
+
+ for pkg in spdx_dep_doc.packages:
+ if pkg.name == dep_pkg:
+ dep_spdx_package = pkg
+ break
+ else:
+ bb.fatal("Package '%s' not found in %s" % (dep_pkg, dep_path))
+
+ dep_package_ref = oe.spdx.SPDXExternalDocumentRef()
+ dep_package_ref.externalDocumentId = "DocumentRef-runtime-dependency-" + spdx_dep_doc.name
+ dep_package_ref.spdxDocument = spdx_dep_doc.documentNamespace
+ dep_package_ref.checksum.algorithm = "SHA1"
+ dep_package_ref.checksum.checksumValue = spdx_dep_sha1
+
+ dep_package_cache[dep] = (dep_spdx_package, dep_package_ref)
+
+ runtime_doc.externalDocumentRefs.append(dep_package_ref)
+
+ runtime_doc.add_relationship(
+ "%s:%s" % (dep_package_ref.externalDocumentId, dep_spdx_package.SPDXID),
+ "RUNTIME_DEPENDENCY_OF",
+ "%s:%s" % (package_ref.externalDocumentId, spdx_package.SPDXID)
+ )
+ seen_deps.add(dep)
+
+ oe.sbom.write_doc(d, runtime_doc, pkg_arch, "runtime", spdx_deploy, indent=get_json_indent(d))
+}
+
+do_create_runtime_spdx[vardepsexclude] += "OVERRIDES SSTATE_ARCHS"
+
+addtask do_create_runtime_spdx after do_create_spdx before do_build do_rm_work
+SSTATETASKS += "do_create_runtime_spdx"
+do_create_runtime_spdx[sstate-inputdirs] = "${SPDXRUNTIMEDEPLOY}"
+do_create_runtime_spdx[sstate-outputdirs] = "${DEPLOY_DIR_SPDX}"
+
+python do_create_runtime_spdx_setscene () {
+ sstate_setscene(d)
+}
+addtask do_create_runtime_spdx_setscene
+
+do_create_runtime_spdx[dirs] = "${SPDXRUNTIMEDEPLOY}"
+do_create_runtime_spdx[cleandirs] = "${SPDXRUNTIMEDEPLOY}"
+do_create_runtime_spdx[rdeptask] = "do_create_spdx"
+
+def spdx_get_src(d):
+ """
+ save patched source of the recipe in SPDX_WORKDIR.
+ """
+ import shutil
+ spdx_workdir = d.getVar('SPDXWORK')
+ spdx_sysroot_native = d.getVar('STAGING_DIR_NATIVE')
+ pn = d.getVar('PN')
+
+ workdir = d.getVar("WORKDIR")
+
+ try:
+ # The kernel class functions require it to be on work-shared, so we dont change WORKDIR
+ if not is_work_shared_spdx(d):
+ # Change the WORKDIR to make do_unpack do_patch run in another dir.
+ d.setVar('WORKDIR', spdx_workdir)
+ # Restore the original path to recipe's native sysroot (it's relative to WORKDIR).
+ d.setVar('STAGING_DIR_NATIVE', spdx_sysroot_native)
+
+ # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the
+ # possibly requiring of the following tasks (such as some recipes's
+ # do_patch required 'B' existed).
+ bb.utils.mkdirhier(d.getVar('B'))
+
+ bb.build.exec_func('do_unpack', d)
+ # Copy source of kernel to spdx_workdir
+ if is_work_shared_spdx(d):
+ share_src = d.getVar('WORKDIR')
+ d.setVar('WORKDIR', spdx_workdir)
+ d.setVar('STAGING_DIR_NATIVE', spdx_sysroot_native)
+ src_dir = spdx_workdir + "/" + d.getVar('PN')+ "-" + d.getVar('PV') + "-" + d.getVar('PR')
+ bb.utils.mkdirhier(src_dir)
+ if bb.data.inherits_class('kernel',d):
+ share_src = d.getVar('STAGING_KERNEL_DIR')
+ cmd_copy_share = "cp -rf " + share_src + "/* " + src_dir + "/"
+ cmd_copy_shared_res = os.popen(cmd_copy_share).read()
+ bb.note("cmd_copy_shared_result = " + cmd_copy_shared_res)
+
+ git_path = src_dir + "/.git"
+ if os.path.exists(git_path):
+ shutils.rmtree(git_path)
+
+ # Make sure gcc and kernel sources are patched only once
+ if not (d.getVar('SRC_URI') == "" or is_work_shared_spdx(d)):
+ bb.build.exec_func('do_patch', d)
+
+ # Some userland has no source.
+ if not os.path.exists( spdx_workdir ):
+ bb.utils.mkdirhier(spdx_workdir)
+ finally:
+ d.setVar("WORKDIR", workdir)
+
+spdx_get_src[vardepsexclude] += "STAGING_KERNEL_DIR"
+
+do_rootfs[recrdeptask] += "do_create_spdx do_create_runtime_spdx"
+do_rootfs[cleandirs] += "${SPDXIMAGEWORK}"
+
+ROOTFS_POSTUNINSTALL_COMMAND =+ "image_combine_spdx"
+
+do_populate_sdk[recrdeptask] += "do_create_spdx do_create_runtime_spdx"
+do_populate_sdk[cleandirs] += "${SPDXSDKWORK}"
+POPULATE_SDK_POST_HOST_COMMAND:append:task-populate-sdk = " sdk_host_combine_spdx"
+POPULATE_SDK_POST_TARGET_COMMAND:append:task-populate-sdk = " sdk_target_combine_spdx"
+
+python image_combine_spdx() {
+ import os
+ import oe.sbom
+ from pathlib import Path
+ from oe.rootfs import image_list_installed_packages
+
+ image_name = d.getVar("IMAGE_NAME")
+ image_link_name = d.getVar("IMAGE_LINK_NAME")
+ imgdeploydir = Path(d.getVar("IMGDEPLOYDIR"))
+ img_spdxid = oe.sbom.get_image_spdxid(image_name)
+ packages = image_list_installed_packages(d)
+
+ combine_spdx(d, image_name, imgdeploydir, img_spdxid, packages, Path(d.getVar("SPDXIMAGEWORK")))
+
+ def make_image_link(target_path, suffix):
+ if image_link_name:
+ link = imgdeploydir / (image_link_name + suffix)
+ if link != target_path:
+ link.symlink_to(os.path.relpath(target_path, link.parent))
+
+ spdx_tar_path = imgdeploydir / (image_name + ".spdx.tar.zst")
+ make_image_link(spdx_tar_path, ".spdx.tar.zst")
+}
+
+python sdk_host_combine_spdx() {
+ sdk_combine_spdx(d, "host")
+}
+
+python sdk_target_combine_spdx() {
+ sdk_combine_spdx(d, "target")
+}
+
+def sdk_combine_spdx(d, sdk_type):
+ import oe.sbom
+ from pathlib import Path
+ from oe.sdk import sdk_list_installed_packages
+
+ sdk_name = d.getVar("TOOLCHAIN_OUTPUTNAME") + "-" + sdk_type
+ sdk_deploydir = Path(d.getVar("SDKDEPLOYDIR"))
+ sdk_spdxid = oe.sbom.get_sdk_spdxid(sdk_name)
+ sdk_packages = sdk_list_installed_packages(d, sdk_type == "target")
+ combine_spdx(d, sdk_name, sdk_deploydir, sdk_spdxid, sdk_packages, Path(d.getVar('SPDXSDKWORK')))
+
+def combine_spdx(d, rootfs_name, rootfs_deploydir, rootfs_spdxid, packages, spdx_workdir):
+ import os
+ import oe.spdx
+ import oe.sbom
+ import io
+ import json
+ from datetime import timezone, datetime
+ from pathlib import Path
+ import tarfile
+ import bb.compress.zstd
+
+ providers = collect_package_providers(d)
+ package_archs = d.getVar("SSTATE_ARCHS").split()
+ package_archs.reverse()
+
+ creation_time = datetime.now(tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
+ deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
+ source_date_epoch = d.getVar("SOURCE_DATE_EPOCH")
+
+ doc = oe.spdx.SPDXDocument()
+ doc.name = rootfs_name
+ doc.documentNamespace = get_doc_namespace(d, doc)
+ doc.creationInfo.created = creation_time
+ doc.creationInfo.comment = "This document was created by analyzing the source of the Yocto recipe during the build."
+ doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"]
+ doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass")
+ doc.creationInfo.creators.append("Organization: %s" % d.getVar("SPDX_ORG"))
+ doc.creationInfo.creators.append("Person: N/A ()")
+
+ image = oe.spdx.SPDXPackage()
+ image.name = d.getVar("PN")
+ image.versionInfo = d.getVar("PV")
+ image.SPDXID = rootfs_spdxid
+ image.supplier = d.getVar("SPDX_SUPPLIER")
+
+ doc.packages.append(image)
+
+ for name in sorted(packages.keys()):
+ if name not in providers:
+ bb.fatal("Unable to find SPDX provider for '%s'" % name)
+
+ pkg_name, pkg_hashfn = providers[name]
+
+ pkg_spdx_path = oe.sbom.doc_find_by_hashfn(deploy_dir_spdx, package_archs, pkg_name, pkg_hashfn)
+ if not pkg_spdx_path:
+ bb.fatal("No SPDX file found for package %s, %s" % (pkg_name, pkg_hashfn))
+
+ pkg_doc, pkg_doc_sha1 = oe.sbom.read_doc(pkg_spdx_path)
+
+ for p in pkg_doc.packages:
+ if p.name == name:
+ pkg_ref = oe.spdx.SPDXExternalDocumentRef()
+ pkg_ref.externalDocumentId = "DocumentRef-%s" % pkg_doc.name
+ pkg_ref.spdxDocument = pkg_doc.documentNamespace
+ pkg_ref.checksum.algorithm = "SHA1"
+ pkg_ref.checksum.checksumValue = pkg_doc_sha1
+
+ doc.externalDocumentRefs.append(pkg_ref)
+ doc.add_relationship(image, "CONTAINS", "%s:%s" % (pkg_ref.externalDocumentId, p.SPDXID))
+ break
+ else:
+ bb.fatal("Unable to find package with name '%s' in SPDX file %s" % (name, pkg_spdx_path))
+
+ runtime_spdx_path = oe.sbom.doc_find_by_hashfn(deploy_dir_spdx, package_archs, "runtime-" + name, pkg_hashfn)
+ if not runtime_spdx_path:
+ bb.fatal("No runtime SPDX document found for %s, %s" % (name, pkg_hashfn))
+
+ runtime_doc, runtime_doc_sha1 = oe.sbom.read_doc(runtime_spdx_path)
+
+ runtime_ref = oe.spdx.SPDXExternalDocumentRef()
+ runtime_ref.externalDocumentId = "DocumentRef-%s" % runtime_doc.name
+ runtime_ref.spdxDocument = runtime_doc.documentNamespace
+ runtime_ref.checksum.algorithm = "SHA1"
+ runtime_ref.checksum.checksumValue = runtime_doc_sha1
+
+ # "OTHER" isn't ideal here, but I can't find a relationship that makes sense
+ doc.externalDocumentRefs.append(runtime_ref)
+ doc.add_relationship(
+ image,
+ "OTHER",
+ "%s:%s" % (runtime_ref.externalDocumentId, runtime_doc.SPDXID),
+ comment="Runtime dependencies for %s" % name
+ )
+ bb.utils.mkdirhier(spdx_workdir)
+ image_spdx_path = spdx_workdir / (rootfs_name + ".spdx.json")
+
+ with image_spdx_path.open("wb") as f:
+ doc.to_json(f, sort_keys=True, indent=get_json_indent(d))
+
+ num_threads = int(d.getVar("BB_NUMBER_THREADS"))
+
+ visited_docs = set()
+
+ index = {"documents": []}
+
+ spdx_tar_path = rootfs_deploydir / (rootfs_name + ".spdx.tar.zst")
+ with bb.compress.zstd.open(spdx_tar_path, "w", num_threads=num_threads) as f:
+ with tarfile.open(fileobj=f, mode="w|") as tar:
+ def collect_spdx_document(path):
+ nonlocal tar
+ nonlocal deploy_dir_spdx
+ nonlocal source_date_epoch
+ nonlocal index
+
+ if path in visited_docs:
+ return
+
+ visited_docs.add(path)
+
+ with path.open("rb") as f:
+ doc, sha1 = oe.sbom.read_doc(f)
+ f.seek(0)
+
+ if doc.documentNamespace in visited_docs:
+ return
+
+ bb.note("Adding SPDX document %s" % path)
+ visited_docs.add(doc.documentNamespace)
+ info = tar.gettarinfo(fileobj=f)
+
+ info.name = doc.name + ".spdx.json"
+ info.uid = 0
+ info.gid = 0
+ info.uname = "root"
+ info.gname = "root"
+
+ if source_date_epoch is not None and info.mtime > int(source_date_epoch):
+ info.mtime = int(source_date_epoch)
+
+ tar.addfile(info, f)
+
+ index["documents"].append({
+ "filename": info.name,
+ "documentNamespace": doc.documentNamespace,
+ "sha1": sha1,
+ })
+
+ for ref in doc.externalDocumentRefs:
+ ref_path = oe.sbom.doc_find_by_namespace(deploy_dir_spdx, package_archs, ref.spdxDocument)
+ if not ref_path:
+ bb.fatal("Cannot find any SPDX file for document %s" % ref.spdxDocument)
+ collect_spdx_document(ref_path)
+
+ collect_spdx_document(image_spdx_path)
+
+ index["documents"].sort(key=lambda x: x["filename"])
+
+ index_str = io.BytesIO(json.dumps(
+ index,
+ sort_keys=True,
+ indent=get_json_indent(d),
+ ).encode("utf-8"))
+
+ info = tarfile.TarInfo()
+ info.name = "index.json"
+ info.size = len(index_str.getvalue())
+ info.uid = 0
+ info.gid = 0
+ info.uname = "root"
+ info.gname = "root"
+
+ tar.addfile(info, fileobj=index_str)
+
+combine_spdx[vardepsexclude] += "BB_NUMBER_THREADS SSTATE_ARCHS"
diff --git a/meta/classes/create-spdx.bbclass b/meta/classes/create-spdx.bbclass
index 47dd12c383..19c6c0ff0b 100644
--- a/meta/classes/create-spdx.bbclass
+++ b/meta/classes/create-spdx.bbclass
@@ -3,1024 +3,6 @@
#
# SPDX-License-Identifier: GPL-2.0-only
#
-
-DEPLOY_DIR_SPDX ??= "${DEPLOY_DIR}/spdx/${MACHINE}"
-
-# The product name that the CVE database uses. Defaults to BPN, but may need to
-# be overriden per recipe (for example tiff.bb sets CVE_PRODUCT=libtiff).
-CVE_PRODUCT ??= "${BPN}"
-CVE_VERSION ??= "${PV}"
-
-SPDXDIR ??= "${WORKDIR}/spdx"
-SPDXDEPLOY = "${SPDXDIR}/deploy"
-SPDXWORK = "${SPDXDIR}/work"
-
-SPDX_TOOL_NAME ??= "oe-spdx-creator"
-SPDX_TOOL_VERSION ??= "1.0"
-
-SPDXRUNTIMEDEPLOY = "${SPDXDIR}/runtime-deploy"
-
-SPDX_INCLUDE_SOURCES ??= "0"
-SPDX_INCLUDE_PACKAGED ??= "0"
-SPDX_ARCHIVE_SOURCES ??= "0"
-SPDX_ARCHIVE_PACKAGED ??= "0"
-
-SPDX_UUID_NAMESPACE ??= "sbom.openembedded.org"
-SPDX_NAMESPACE_PREFIX ??= "http://spdx.org/spdxdoc"
-SPDX_PRETTY ??= "0"
-
-SPDX_LICENSES ??= "${COREBASE}/meta/files/spdx-licenses.json"
-
-SPDX_ORG ??= "OpenEmbedded ()"
-SPDX_SUPPLIER ??= "Organization: ${SPDX_ORG}"
-SPDX_SUPPLIER[doc] = "The SPDX PackageSupplier field for SPDX packages created from \
- this recipe. For SPDX documents create using this class during the build, this \
- is the contact information for the person or organization who is doing the \
- build."
-
-def extract_licenses(filename):
- import re
-
- lic_regex = re.compile(rb'^\W*SPDX-License-Identifier:\s*([ \w\d.()+-]+?)(?:\s+\W*)?$', re.MULTILINE)
-
- try:
- with open(filename, 'rb') as f:
- size = min(15000, os.stat(filename).st_size)
- txt = f.read(size)
- licenses = re.findall(lic_regex, txt)
- if licenses:
- ascii_licenses = [lic.decode('ascii') for lic in licenses]
- return ascii_licenses
- except Exception as e:
- bb.warn(f"Exception reading {filename}: {e}")
- return None
-
-def get_doc_namespace(d, doc):
- import uuid
- namespace_uuid = uuid.uuid5(uuid.NAMESPACE_DNS, d.getVar("SPDX_UUID_NAMESPACE"))
- return "%s/%s-%s" % (d.getVar("SPDX_NAMESPACE_PREFIX"), doc.name, str(uuid.uuid5(namespace_uuid, doc.name)))
-
-def create_annotation(d, comment):
- from datetime import datetime, timezone
-
- creation_time = datetime.now(tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
- annotation = oe.spdx.SPDXAnnotation()
- annotation.annotationDate = creation_time
- annotation.annotationType = "OTHER"
- annotation.annotator = "Tool: %s - %s" % (d.getVar("SPDX_TOOL_NAME"), d.getVar("SPDX_TOOL_VERSION"))
- annotation.comment = comment
- return annotation
-
-def recipe_spdx_is_native(d, recipe):
- return any(a.annotationType == "OTHER" and
- a.annotator == "Tool: %s - %s" % (d.getVar("SPDX_TOOL_NAME"), d.getVar("SPDX_TOOL_VERSION")) and
- a.comment == "isNative" for a in recipe.annotations)
-
-def is_work_shared_spdx(d):
- return bb.data.inherits_class('kernel', d) or ('work-shared' in d.getVar('WORKDIR'))
-
-def get_json_indent(d):
- if d.getVar("SPDX_PRETTY") == "1":
- return 2
- return None
-
-python() {
- import json
- if d.getVar("SPDX_LICENSE_DATA"):
- return
-
- with open(d.getVar("SPDX_LICENSES"), "r") as f:
- data = json.load(f)
- # Transform the license array to a dictionary
- data["licenses"] = {l["licenseId"]: l for l in data["licenses"]}
- d.setVar("SPDX_LICENSE_DATA", data)
-}
-
-def convert_license_to_spdx(lic, document, d, existing={}):
- from pathlib import Path
- import oe.spdx
-
- license_data = d.getVar("SPDX_LICENSE_DATA")
- extracted = {}
-
- def add_extracted_license(ident, name):
- nonlocal document
-
- if name in extracted:
- return
-
- extracted_info = oe.spdx.SPDXExtractedLicensingInfo()
- extracted_info.name = name
- extracted_info.licenseId = ident
- extracted_info.extractedText = None
-
- if name == "PD":
- # Special-case this.
- extracted_info.extractedText = "Software released to the public domain"
- else:
- # Seach for the license in COMMON_LICENSE_DIR and LICENSE_PATH
- for directory in [d.getVar('COMMON_LICENSE_DIR')] + (d.getVar('LICENSE_PATH') or '').split():
- try:
- with (Path(directory) / name).open(errors="replace") as f:
- extracted_info.extractedText = f.read()
- break
- except FileNotFoundError:
- pass
- if extracted_info.extractedText is None:
- # If it's not SPDX or PD, then NO_GENERIC_LICENSE must be set
- filename = d.getVarFlag('NO_GENERIC_LICENSE', name)
- if filename:
- filename = d.expand("${S}/" + filename)
- with open(filename, errors="replace") as f:
- extracted_info.extractedText = f.read()
- else:
- bb.error("Cannot find any text for license %s" % name)
-
- extracted[name] = extracted_info
- document.hasExtractedLicensingInfos.append(extracted_info)
-
- def convert(l):
- if l == "(" or l == ")":
- return l
-
- if l == "&":
- return "AND"
-
- if l == "|":
- return "OR"
-
- if l == "CLOSED":
- return "NONE"
-
- spdx_license = d.getVarFlag("SPDXLICENSEMAP", l) or l
- if spdx_license in license_data["licenses"]:
- return spdx_license
-
- try:
- spdx_license = existing[l]
- except KeyError:
- spdx_license = "LicenseRef-" + l
- add_extracted_license(spdx_license, l)
-
- return spdx_license
-
- lic_split = lic.replace("(", " ( ").replace(")", " ) ").split()
-
- return ' '.join(convert(l) for l in lic_split)
-
-def process_sources(d):
- pn = d.getVar('PN')
- assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split()
- if pn in assume_provided:
- for p in d.getVar("PROVIDES").split():
- if p != pn:
- pn = p
- break
-
- # glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted,
- # so avoid archiving source here.
- if pn.startswith('glibc-locale'):
- return False
- if d.getVar('PN') == "libtool-cross":
- return False
- if d.getVar('PN') == "libgcc-initial":
- return False
- if d.getVar('PN') == "shadow-sysroot":
- return False
-
- # We just archive gcc-source for all the gcc related recipes
- if d.getVar('BPN') in ['gcc', 'libgcc']:
- bb.debug(1, 'spdx: There is bug in scan of %s is, do nothing' % pn)
- return False
-
- return True
-
-
-def add_package_files(d, doc, spdx_pkg, topdir, get_spdxid, get_types, *, archive=None, ignore_dirs=[], ignore_top_level_dirs=[]):
- from pathlib import Path
- import oe.spdx
- import hashlib
-
- source_date_epoch = d.getVar("SOURCE_DATE_EPOCH")
- if source_date_epoch:
- source_date_epoch = int(source_date_epoch)
-
- sha1s = []
- spdx_files = []
-
- file_counter = 1
- for subdir, dirs, files in os.walk(topdir):
- dirs[:] = [d for d in dirs if d not in ignore_dirs]
- if subdir == str(topdir):
- dirs[:] = [d for d in dirs if d not in ignore_top_level_dirs]
-
- for file in files:
- filepath = Path(subdir) / file
- filename = str(filepath.relative_to(topdir))
-
- if not filepath.is_symlink() and filepath.is_file():
- spdx_file = oe.spdx.SPDXFile()
- spdx_file.SPDXID = get_spdxid(file_counter)
- for t in get_types(filepath):
- spdx_file.fileTypes.append(t)
- spdx_file.fileName = filename
-
- if archive is not None:
- with filepath.open("rb") as f:
- info = archive.gettarinfo(fileobj=f)
- info.name = filename
- info.uid = 0
- info.gid = 0
- info.uname = "root"
- info.gname = "root"
-
- if source_date_epoch is not None and info.mtime > source_date_epoch:
- info.mtime = source_date_epoch
-
- archive.addfile(info, f)
-
- sha1 = bb.utils.sha1_file(filepath)
- sha1s.append(sha1)
- spdx_file.checksums.append(oe.spdx.SPDXChecksum(
- algorithm="SHA1",
- checksumValue=sha1,
- ))
- spdx_file.checksums.append(oe.spdx.SPDXChecksum(
- algorithm="SHA256",
- checksumValue=bb.utils.sha256_file(filepath),
- ))
-
- if "SOURCE" in spdx_file.fileTypes:
- extracted_lics = extract_licenses(filepath)
- if extracted_lics:
- spdx_file.licenseInfoInFiles = extracted_lics
-
- doc.files.append(spdx_file)
- doc.add_relationship(spdx_pkg, "CONTAINS", spdx_file)
- spdx_pkg.hasFiles.append(spdx_file.SPDXID)
-
- spdx_files.append(spdx_file)
-
- file_counter += 1
-
- sha1s.sort()
- verifier = hashlib.sha1()
- for v in sha1s:
- verifier.update(v.encode("utf-8"))
- spdx_pkg.packageVerificationCode.packageVerificationCodeValue = verifier.hexdigest()
-
- return spdx_files
-
-
-def add_package_sources_from_debug(d, package_doc, spdx_package, package, package_files, sources):
- from pathlib import Path
- import hashlib
- import oe.packagedata
- import oe.spdx
-
- debug_search_paths = [
- Path(d.getVar('PKGD')),
- Path(d.getVar('STAGING_DIR_TARGET')),
- Path(d.getVar('STAGING_DIR_NATIVE')),
- Path(d.getVar('STAGING_KERNEL_DIR')),
- ]
-
- pkg_data = oe.packagedata.read_subpkgdata_extended(package, d)
-
- if pkg_data is None:
- return
-
- for file_path, file_data in pkg_data["files_info"].items():
- if not "debugsrc" in file_data:
- continue
-
- for pkg_file in package_files:
- if file_path.lstrip("/") == pkg_file.fileName.lstrip("/"):
- break
- else:
- bb.fatal("No package file found for %s" % str(file_path))
- continue
-
- for debugsrc in file_data["debugsrc"]:
- ref_id = "NOASSERTION"
- for search in debug_search_paths:
- if debugsrc.startswith("/usr/src/kernel"):
- debugsrc_path = search / debugsrc.replace('/usr/src/kernel/', '')
- else:
- debugsrc_path = search / debugsrc.lstrip("/")
- if not debugsrc_path.exists():
- continue
-
- file_sha256 = bb.utils.sha256_file(debugsrc_path)
-
- if file_sha256 in sources:
- source_file = sources[file_sha256]
-
- doc_ref = package_doc.find_external_document_ref(source_file.doc.documentNamespace)
- if doc_ref is None:
- doc_ref = oe.spdx.SPDXExternalDocumentRef()
- doc_ref.externalDocumentId = "DocumentRef-dependency-" + source_file.doc.name
- doc_ref.spdxDocument = source_file.doc.documentNamespace
- doc_ref.checksum.algorithm = "SHA1"
- doc_ref.checksum.checksumValue = source_file.doc_sha1
- package_doc.externalDocumentRefs.append(doc_ref)
-
- ref_id = "%s:%s" % (doc_ref.externalDocumentId, source_file.file.SPDXID)
- else:
- bb.debug(1, "Debug source %s with SHA256 %s not found in any dependency" % (str(debugsrc_path), file_sha256))
- break
- else:
- bb.debug(1, "Debug source %s not found" % debugsrc)
-
- package_doc.add_relationship(pkg_file, "GENERATED_FROM", ref_id, comment=debugsrc)
-
-def collect_dep_recipes(d, doc, spdx_recipe):
- from pathlib import Path
- import oe.sbom
- import oe.spdx
-
- deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
-
- dep_recipes = []
- taskdepdata = d.getVar("BB_TASKDEPDATA", False)
- deps = sorted(set(
- dep[0] for dep in taskdepdata.values() if
- dep[1] == "do_create_spdx" and dep[0] != d.getVar("PN")
- ))
- for dep_pn in deps:
- dep_recipe_path = deploy_dir_spdx / "recipes" / ("recipe-%s.spdx.json" % dep_pn)
-
- spdx_dep_doc, spdx_dep_sha1 = oe.sbom.read_doc(dep_recipe_path)
-
- for pkg in spdx_dep_doc.packages:
- if pkg.name == dep_pn:
- spdx_dep_recipe = pkg
- break
- else:
- continue
-
- dep_recipes.append(oe.sbom.DepRecipe(spdx_dep_doc, spdx_dep_sha1, spdx_dep_recipe))
-
- dep_recipe_ref = oe.spdx.SPDXExternalDocumentRef()
- dep_recipe_ref.externalDocumentId = "DocumentRef-dependency-" + spdx_dep_doc.name
- dep_recipe_ref.spdxDocument = spdx_dep_doc.documentNamespace
- dep_recipe_ref.checksum.algorithm = "SHA1"
- dep_recipe_ref.checksum.checksumValue = spdx_dep_sha1
-
- doc.externalDocumentRefs.append(dep_recipe_ref)
-
- doc.add_relationship(
- "%s:%s" % (dep_recipe_ref.externalDocumentId, spdx_dep_recipe.SPDXID),
- "BUILD_DEPENDENCY_OF",
- spdx_recipe
- )
-
- return dep_recipes
-
-collect_dep_recipes[vardepsexclude] += "BB_TASKDEPDATA"
-
-
-def collect_dep_sources(d, dep_recipes):
- import oe.sbom
-
- sources = {}
- for dep in dep_recipes:
- # Don't collect sources from native recipes as they
- # match non-native sources also.
- if recipe_spdx_is_native(d, dep.recipe):
- continue
- recipe_files = set(dep.recipe.hasFiles)
-
- for spdx_file in dep.doc.files:
- if spdx_file.SPDXID not in recipe_files:
- continue
-
- if "SOURCE" in spdx_file.fileTypes:
- for checksum in spdx_file.checksums:
- if checksum.algorithm == "SHA256":
- sources[checksum.checksumValue] = oe.sbom.DepSource(dep.doc, dep.doc_sha1, dep.recipe, spdx_file)
- break
-
- return sources
-
-
-python do_create_spdx() {
- from datetime import datetime, timezone
- import oe.sbom
- import oe.spdx
- import uuid
- from pathlib import Path
- from contextlib import contextmanager
- import oe.cve_check
-
- @contextmanager
- def optional_tarfile(name, guard, mode="w"):
- import tarfile
- import bb.compress.zstd
-
- num_threads = int(d.getVar("BB_NUMBER_THREADS"))
-
- if guard:
- name.parent.mkdir(parents=True, exist_ok=True)
- with bb.compress.zstd.open(name, mode=mode + "b", num_threads=num_threads) as f:
- with tarfile.open(fileobj=f, mode=mode + "|") as tf:
- yield tf
- else:
- yield None
-
-
- deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
- spdx_workdir = Path(d.getVar("SPDXWORK"))
- include_packaged = d.getVar("SPDX_INCLUDE_PACKAGED") == "1"
- include_sources = d.getVar("SPDX_INCLUDE_SOURCES") == "1"
- archive_sources = d.getVar("SPDX_ARCHIVE_SOURCES") == "1"
- archive_packaged = d.getVar("SPDX_ARCHIVE_PACKAGED") == "1"
-
- creation_time = datetime.now(tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
-
- doc = oe.spdx.SPDXDocument()
-
- doc.name = "recipe-" + d.getVar("PN")
- doc.documentNamespace = get_doc_namespace(d, doc)
- doc.creationInfo.created = creation_time
- doc.creationInfo.comment = "This document was created by analyzing recipe files during the build."
- doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"]
- doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass")
- doc.creationInfo.creators.append("Organization: %s" % d.getVar("SPDX_ORG"))
- doc.creationInfo.creators.append("Person: N/A ()")
-
- recipe = oe.spdx.SPDXPackage()
- recipe.name = d.getVar("PN")
- recipe.versionInfo = d.getVar("PV")
- recipe.SPDXID = oe.sbom.get_recipe_spdxid(d)
- recipe.supplier = d.getVar("SPDX_SUPPLIER")
- if bb.data.inherits_class("native", d) or bb.data.inherits_class("cross", d):
- recipe.annotations.append(create_annotation(d, "isNative"))
-
- for s in d.getVar('SRC_URI').split():
- if not s.startswith("file://"):
- recipe.downloadLocation = s
- break
- else:
- recipe.downloadLocation = "NOASSERTION"
-
- homepage = d.getVar("HOMEPAGE")
- if homepage:
- recipe.homepage = homepage
-
- license = d.getVar("LICENSE")
- if license:
- recipe.licenseDeclared = convert_license_to_spdx(license, doc, d)
-
- summary = d.getVar("SUMMARY")
- if summary:
- recipe.summary = summary
-
- description = d.getVar("DESCRIPTION")
- if description:
- recipe.description = description
-
- # Some CVEs may be patched during the build process without incrementing the version number,
- # so querying for CVEs based on the CPE id can lead to false positives. To account for this,
- # save the CVEs fixed by patches to source information field in the SPDX.
- patched_cves = oe.cve_check.get_patched_cves(d)
- patched_cves = list(patched_cves)
- patched_cves = ' '.join(patched_cves)
- if patched_cves:
- recipe.sourceInfo = "CVEs fixed: " + patched_cves
-
- cpe_ids = oe.cve_check.get_cpe_ids(d.getVar("CVE_PRODUCT"), d.getVar("CVE_VERSION"))
- if cpe_ids:
- for cpe_id in cpe_ids:
- cpe = oe.spdx.SPDXExternalReference()
- cpe.referenceCategory = "SECURITY"
- cpe.referenceType = "http://spdx.org/rdf/references/cpe23Type"
- cpe.referenceLocator = cpe_id
- recipe.externalRefs.append(cpe)
-
- doc.packages.append(recipe)
- doc.add_relationship(doc, "DESCRIBES", recipe)
-
- if process_sources(d) and include_sources:
- recipe_archive = deploy_dir_spdx / "recipes" / (doc.name + ".tar.zst")
- with optional_tarfile(recipe_archive, archive_sources) as archive:
- spdx_get_src(d)
-
- add_package_files(
- d,
- doc,
- recipe,
- spdx_workdir,
- lambda file_counter: "SPDXRef-SourceFile-%s-%d" % (d.getVar("PN"), file_counter),
- lambda filepath: ["SOURCE"],
- ignore_dirs=[".git"],
- ignore_top_level_dirs=["temp"],
- archive=archive,
- )
-
- if archive is not None:
- recipe.packageFileName = str(recipe_archive.name)
-
- dep_recipes = collect_dep_recipes(d, doc, recipe)
-
- doc_sha1 = oe.sbom.write_doc(d, doc, "recipes", indent=get_json_indent(d))
- dep_recipes.append(oe.sbom.DepRecipe(doc, doc_sha1, recipe))
-
- recipe_ref = oe.spdx.SPDXExternalDocumentRef()
- recipe_ref.externalDocumentId = "DocumentRef-recipe-" + recipe.name
- recipe_ref.spdxDocument = doc.documentNamespace
- recipe_ref.checksum.algorithm = "SHA1"
- recipe_ref.checksum.checksumValue = doc_sha1
-
- sources = collect_dep_sources(d, dep_recipes)
- found_licenses = {license.name:recipe_ref.externalDocumentId + ":" + license.licenseId for license in doc.hasExtractedLicensingInfos}
-
- if not recipe_spdx_is_native(d, recipe):
- bb.build.exec_func("read_subpackage_metadata", d)
-
- pkgdest = Path(d.getVar("PKGDEST"))
- for package in d.getVar("PACKAGES").split():
- if not oe.packagedata.packaged(package, d):
- continue
-
- package_doc = oe.spdx.SPDXDocument()
- pkg_name = d.getVar("PKG:%s" % package) or package
- package_doc.name = pkg_name
- package_doc.documentNamespace = get_doc_namespace(d, package_doc)
- package_doc.creationInfo.created = creation_time
- package_doc.creationInfo.comment = "This document was created by analyzing packages created during the build."
- package_doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"]
- package_doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass")
- package_doc.creationInfo.creators.append("Organization: %s" % d.getVar("SPDX_ORG"))
- package_doc.creationInfo.creators.append("Person: N/A ()")
- package_doc.externalDocumentRefs.append(recipe_ref)
-
- package_license = d.getVar("LICENSE:%s" % package) or d.getVar("LICENSE")
-
- spdx_package = oe.spdx.SPDXPackage()
-
- spdx_package.SPDXID = oe.sbom.get_package_spdxid(pkg_name)
- spdx_package.name = pkg_name
- spdx_package.versionInfo = d.getVar("PV")
- spdx_package.licenseDeclared = convert_license_to_spdx(package_license, package_doc, d, found_licenses)
- spdx_package.supplier = d.getVar("SPDX_SUPPLIER")
-
- package_doc.packages.append(spdx_package)
-
- package_doc.add_relationship(spdx_package, "GENERATED_FROM", "%s:%s" % (recipe_ref.externalDocumentId, recipe.SPDXID))
- package_doc.add_relationship(package_doc, "DESCRIBES", spdx_package)
-
- package_archive = deploy_dir_spdx / "packages" / (package_doc.name + ".tar.zst")
- with optional_tarfile(package_archive, archive_packaged) as archive:
- package_files = add_package_files(
- d,
- package_doc,
- spdx_package,
- pkgdest / package,
- lambda file_counter: oe.sbom.get_packaged_file_spdxid(pkg_name, file_counter),
- lambda filepath: ["BINARY"],
- ignore_top_level_dirs=['CONTROL', 'DEBIAN'],
- archive=archive,
- )
-
- if archive is not None:
- spdx_package.packageFileName = str(package_archive.name)
-
- add_package_sources_from_debug(d, package_doc, spdx_package, package, package_files, sources)
-
- oe.sbom.write_doc(d, package_doc, "packages", indent=get_json_indent(d))
-}
-# NOTE: depending on do_unpack is a hack that is necessary to get it's dependencies for archive the source
-addtask do_create_spdx after do_package do_packagedata do_unpack before do_populate_sdk do_build do_rm_work
-
-SSTATETASKS += "do_create_spdx"
-do_create_spdx[sstate-inputdirs] = "${SPDXDEPLOY}"
-do_create_spdx[sstate-outputdirs] = "${DEPLOY_DIR_SPDX}"
-
-python do_create_spdx_setscene () {
- sstate_setscene(d)
-}
-addtask do_create_spdx_setscene
-
-do_create_spdx[dirs] = "${SPDXWORK}"
-do_create_spdx[cleandirs] = "${SPDXDEPLOY} ${SPDXWORK}"
-do_create_spdx[depends] += "${PATCHDEPENDENCY}"
-do_create_spdx[deptask] = "do_create_spdx"
-
-def collect_package_providers(d):
- from pathlib import Path
- import oe.sbom
- import oe.spdx
- import json
-
- deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
-
- providers = {}
-
- taskdepdata = d.getVar("BB_TASKDEPDATA", False)
- deps = sorted(set(
- dep[0] for dep in taskdepdata.values() if dep[0] != d.getVar("PN")
- ))
- deps.append(d.getVar("PN"))
-
- for dep_pn in deps:
- recipe_data = oe.packagedata.read_pkgdata(dep_pn, d)
-
- for pkg in recipe_data.get("PACKAGES", "").split():
-
- pkg_data = oe.packagedata.read_subpkgdata_dict(pkg, d)
- rprovides = set(n for n, _ in bb.utils.explode_dep_versions2(pkg_data.get("RPROVIDES", "")).items())
- rprovides.add(pkg)
-
- for r in rprovides:
- providers[r] = pkg
-
- return providers
-
-collect_package_providers[vardepsexclude] += "BB_TASKDEPDATA"
-
-python do_create_runtime_spdx() {
- from datetime import datetime, timezone
- import oe.sbom
- import oe.spdx
- import oe.packagedata
- from pathlib import Path
-
- deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
- spdx_deploy = Path(d.getVar("SPDXRUNTIMEDEPLOY"))
- is_native = bb.data.inherits_class("native", d) or bb.data.inherits_class("cross", d)
-
- creation_time = datetime.now(tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
-
- providers = collect_package_providers(d)
-
- if not is_native:
- bb.build.exec_func("read_subpackage_metadata", d)
-
- dep_package_cache = {}
-
- pkgdest = Path(d.getVar("PKGDEST"))
- for package in d.getVar("PACKAGES").split():
- localdata = bb.data.createCopy(d)
- pkg_name = d.getVar("PKG:%s" % package) or package
- localdata.setVar("PKG", pkg_name)
- localdata.setVar('OVERRIDES', d.getVar("OVERRIDES", False) + ":" + package)
-
- if not oe.packagedata.packaged(package, localdata):
- continue
-
- pkg_spdx_path = deploy_dir_spdx / "packages" / (pkg_name + ".spdx.json")
-
- package_doc, package_doc_sha1 = oe.sbom.read_doc(pkg_spdx_path)
-
- for p in package_doc.packages:
- if p.name == pkg_name:
- spdx_package = p
- break
- else:
- bb.fatal("Package '%s' not found in %s" % (pkg_name, pkg_spdx_path))
-
- runtime_doc = oe.spdx.SPDXDocument()
- runtime_doc.name = "runtime-" + pkg_name
- runtime_doc.documentNamespace = get_doc_namespace(localdata, runtime_doc)
- runtime_doc.creationInfo.created = creation_time
- runtime_doc.creationInfo.comment = "This document was created by analyzing package runtime dependencies."
- runtime_doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"]
- runtime_doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass")
- runtime_doc.creationInfo.creators.append("Organization: %s" % d.getVar("SPDX_ORG"))
- runtime_doc.creationInfo.creators.append("Person: N/A ()")
-
- package_ref = oe.spdx.SPDXExternalDocumentRef()
- package_ref.externalDocumentId = "DocumentRef-package-" + package
- package_ref.spdxDocument = package_doc.documentNamespace
- package_ref.checksum.algorithm = "SHA1"
- package_ref.checksum.checksumValue = package_doc_sha1
-
- runtime_doc.externalDocumentRefs.append(package_ref)
-
- runtime_doc.add_relationship(
- runtime_doc.SPDXID,
- "AMENDS",
- "%s:%s" % (package_ref.externalDocumentId, package_doc.SPDXID)
- )
-
- deps = bb.utils.explode_dep_versions2(localdata.getVar("RDEPENDS") or "")
- seen_deps = set()
- for dep, _ in deps.items():
- if dep in seen_deps:
- continue
-
- if dep not in providers:
- continue
-
- dep = providers[dep]
-
- if not oe.packagedata.packaged(dep, localdata):
- continue
-
- dep_pkg_data = oe.packagedata.read_subpkgdata_dict(dep, d)
- dep_pkg = dep_pkg_data["PKG"]
-
- if dep in dep_package_cache:
- (dep_spdx_package, dep_package_ref) = dep_package_cache[dep]
- else:
- dep_path = deploy_dir_spdx / "packages" / ("%s.spdx.json" % dep_pkg)
-
- spdx_dep_doc, spdx_dep_sha1 = oe.sbom.read_doc(dep_path)
-
- for pkg in spdx_dep_doc.packages:
- if pkg.name == dep_pkg:
- dep_spdx_package = pkg
- break
- else:
- bb.fatal("Package '%s' not found in %s" % (dep_pkg, dep_path))
-
- dep_package_ref = oe.spdx.SPDXExternalDocumentRef()
- dep_package_ref.externalDocumentId = "DocumentRef-runtime-dependency-" + spdx_dep_doc.name
- dep_package_ref.spdxDocument = spdx_dep_doc.documentNamespace
- dep_package_ref.checksum.algorithm = "SHA1"
- dep_package_ref.checksum.checksumValue = spdx_dep_sha1
-
- dep_package_cache[dep] = (dep_spdx_package, dep_package_ref)
-
- runtime_doc.externalDocumentRefs.append(dep_package_ref)
-
- runtime_doc.add_relationship(
- "%s:%s" % (dep_package_ref.externalDocumentId, dep_spdx_package.SPDXID),
- "RUNTIME_DEPENDENCY_OF",
- "%s:%s" % (package_ref.externalDocumentId, spdx_package.SPDXID)
- )
- seen_deps.add(dep)
-
- oe.sbom.write_doc(d, runtime_doc, "runtime", spdx_deploy, indent=get_json_indent(d))
-}
-
-addtask do_create_runtime_spdx after do_create_spdx before do_build do_rm_work
-SSTATETASKS += "do_create_runtime_spdx"
-do_create_runtime_spdx[sstate-inputdirs] = "${SPDXRUNTIMEDEPLOY}"
-do_create_runtime_spdx[sstate-outputdirs] = "${DEPLOY_DIR_SPDX}"
-
-python do_create_runtime_spdx_setscene () {
- sstate_setscene(d)
-}
-addtask do_create_runtime_spdx_setscene
-
-do_create_runtime_spdx[dirs] = "${SPDXRUNTIMEDEPLOY}"
-do_create_runtime_spdx[cleandirs] = "${SPDXRUNTIMEDEPLOY}"
-do_create_runtime_spdx[rdeptask] = "do_create_spdx"
-
-def spdx_get_src(d):
- """
- save patched source of the recipe in SPDX_WORKDIR.
- """
- import shutil
- spdx_workdir = d.getVar('SPDXWORK')
- spdx_sysroot_native = d.getVar('STAGING_DIR_NATIVE')
- pn = d.getVar('PN')
-
- workdir = d.getVar("WORKDIR")
-
- try:
- # The kernel class functions require it to be on work-shared, so we dont change WORKDIR
- if not is_work_shared_spdx(d):
- # Change the WORKDIR to make do_unpack do_patch run in another dir.
- d.setVar('WORKDIR', spdx_workdir)
- # Restore the original path to recipe's native sysroot (it's relative to WORKDIR).
- d.setVar('STAGING_DIR_NATIVE', spdx_sysroot_native)
-
- # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the
- # possibly requiring of the following tasks (such as some recipes's
- # do_patch required 'B' existed).
- bb.utils.mkdirhier(d.getVar('B'))
-
- bb.build.exec_func('do_unpack', d)
- # Copy source of kernel to spdx_workdir
- if is_work_shared_spdx(d):
- d.setVar('WORKDIR', spdx_workdir)
- d.setVar('STAGING_DIR_NATIVE', spdx_sysroot_native)
- src_dir = spdx_workdir + "/" + d.getVar('PN')+ "-" + d.getVar('PV') + "-" + d.getVar('PR')
- bb.utils.mkdirhier(src_dir)
- if bb.data.inherits_class('kernel',d):
- share_src = d.getVar('STAGING_KERNEL_DIR')
- cmd_copy_share = "cp -rf " + share_src + "/* " + src_dir + "/"
- cmd_copy_kernel_result = os.popen(cmd_copy_share).read()
- bb.note("cmd_copy_kernel_result = " + cmd_copy_kernel_result)
-
- git_path = src_dir + "/.git"
- if os.path.exists(git_path):
- shutils.rmtree(git_path)
-
- # Make sure gcc and kernel sources are patched only once
- if not (d.getVar('SRC_URI') == "" or is_work_shared_spdx(d)):
- bb.build.exec_func('do_patch', d)
-
- # Some userland has no source.
- if not os.path.exists( spdx_workdir ):
- bb.utils.mkdirhier(spdx_workdir)
- finally:
- d.setVar("WORKDIR", workdir)
-
-do_rootfs[recrdeptask] += "do_create_spdx do_create_runtime_spdx"
-
-ROOTFS_POSTUNINSTALL_COMMAND =+ "image_combine_spdx ; "
-
-do_populate_sdk[recrdeptask] += "do_create_spdx do_create_runtime_spdx"
-POPULATE_SDK_POST_HOST_COMMAND:append:task-populate-sdk = " sdk_host_combine_spdx; "
-POPULATE_SDK_POST_TARGET_COMMAND:append:task-populate-sdk = " sdk_target_combine_spdx; "
-
-python image_combine_spdx() {
- import os
- import oe.sbom
- from pathlib import Path
- from oe.rootfs import image_list_installed_packages
-
- image_name = d.getVar("IMAGE_NAME")
- image_link_name = d.getVar("IMAGE_LINK_NAME")
- imgdeploydir = Path(d.getVar("IMGDEPLOYDIR"))
- img_spdxid = oe.sbom.get_image_spdxid(image_name)
- packages = image_list_installed_packages(d)
-
- combine_spdx(d, image_name, imgdeploydir, img_spdxid, packages)
-
- def make_image_link(target_path, suffix):
- if image_link_name:
- link = imgdeploydir / (image_link_name + suffix)
- if link != target_path:
- link.symlink_to(os.path.relpath(target_path, link.parent))
-
- image_spdx_path = imgdeploydir / (image_name + ".spdx.json")
- make_image_link(image_spdx_path, ".spdx.json")
- spdx_tar_path = imgdeploydir / (image_name + ".spdx.tar.zst")
- make_image_link(spdx_tar_path, ".spdx.tar.zst")
- spdx_index_path = imgdeploydir / (image_name + ".spdx.index.json")
- make_image_link(spdx_index_path, ".spdx.index.json")
-}
-
-python sdk_host_combine_spdx() {
- sdk_combine_spdx(d, "host")
-}
-
-python sdk_target_combine_spdx() {
- sdk_combine_spdx(d, "target")
-}
-
-def sdk_combine_spdx(d, sdk_type):
- import oe.sbom
- from pathlib import Path
- from oe.sdk import sdk_list_installed_packages
-
- sdk_name = d.getVar("SDK_NAME") + "-" + sdk_type
- sdk_deploydir = Path(d.getVar("SDKDEPLOYDIR"))
- sdk_spdxid = oe.sbom.get_sdk_spdxid(sdk_name)
- sdk_packages = sdk_list_installed_packages(d, sdk_type == "target")
- combine_spdx(d, sdk_name, sdk_deploydir, sdk_spdxid, sdk_packages)
-
-def combine_spdx(d, rootfs_name, rootfs_deploydir, rootfs_spdxid, packages):
- import os
- import oe.spdx
- import oe.sbom
- import io
- import json
- from datetime import timezone, datetime
- from pathlib import Path
- import tarfile
- import bb.compress.zstd
-
- creation_time = datetime.now(tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
- deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
- source_date_epoch = d.getVar("SOURCE_DATE_EPOCH")
-
- doc = oe.spdx.SPDXDocument()
- doc.name = rootfs_name
- doc.documentNamespace = get_doc_namespace(d, doc)
- doc.creationInfo.created = creation_time
- doc.creationInfo.comment = "This document was created by analyzing the source of the Yocto recipe during the build."
- doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"]
- doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass")
- doc.creationInfo.creators.append("Organization: %s" % d.getVar("SPDX_ORG"))
- doc.creationInfo.creators.append("Person: N/A ()")
-
- image = oe.spdx.SPDXPackage()
- image.name = d.getVar("PN")
- image.versionInfo = d.getVar("PV")
- image.SPDXID = rootfs_spdxid
- image.supplier = d.getVar("SPDX_SUPPLIER")
-
- doc.packages.append(image)
-
- for name in sorted(packages.keys()):
- pkg_spdx_path = deploy_dir_spdx / "packages" / (name + ".spdx.json")
- pkg_doc, pkg_doc_sha1 = oe.sbom.read_doc(pkg_spdx_path)
-
- for p in pkg_doc.packages:
- if p.name == name:
- pkg_ref = oe.spdx.SPDXExternalDocumentRef()
- pkg_ref.externalDocumentId = "DocumentRef-%s" % pkg_doc.name
- pkg_ref.spdxDocument = pkg_doc.documentNamespace
- pkg_ref.checksum.algorithm = "SHA1"
- pkg_ref.checksum.checksumValue = pkg_doc_sha1
-
- doc.externalDocumentRefs.append(pkg_ref)
- doc.add_relationship(image, "CONTAINS", "%s:%s" % (pkg_ref.externalDocumentId, p.SPDXID))
- break
- else:
- bb.fatal("Unable to find package with name '%s' in SPDX file %s" % (name, pkg_spdx_path))
-
- runtime_spdx_path = deploy_dir_spdx / "runtime" / ("runtime-" + name + ".spdx.json")
- runtime_doc, runtime_doc_sha1 = oe.sbom.read_doc(runtime_spdx_path)
-
- runtime_ref = oe.spdx.SPDXExternalDocumentRef()
- runtime_ref.externalDocumentId = "DocumentRef-%s" % runtime_doc.name
- runtime_ref.spdxDocument = runtime_doc.documentNamespace
- runtime_ref.checksum.algorithm = "SHA1"
- runtime_ref.checksum.checksumValue = runtime_doc_sha1
-
- # "OTHER" isn't ideal here, but I can't find a relationship that makes sense
- doc.externalDocumentRefs.append(runtime_ref)
- doc.add_relationship(
- image,
- "OTHER",
- "%s:%s" % (runtime_ref.externalDocumentId, runtime_doc.SPDXID),
- comment="Runtime dependencies for %s" % name
- )
-
- image_spdx_path = rootfs_deploydir / (rootfs_name + ".spdx.json")
-
- with image_spdx_path.open("wb") as f:
- doc.to_json(f, sort_keys=True, indent=get_json_indent(d))
-
- num_threads = int(d.getVar("BB_NUMBER_THREADS"))
-
- visited_docs = set()
-
- index = {"documents": []}
-
- spdx_tar_path = rootfs_deploydir / (rootfs_name + ".spdx.tar.zst")
- with bb.compress.zstd.open(spdx_tar_path, "w", num_threads=num_threads) as f:
- with tarfile.open(fileobj=f, mode="w|") as tar:
- def collect_spdx_document(path):
- nonlocal tar
- nonlocal deploy_dir_spdx
- nonlocal source_date_epoch
- nonlocal index
-
- if path in visited_docs:
- return
-
- visited_docs.add(path)
-
- with path.open("rb") as f:
- doc, sha1 = oe.sbom.read_doc(f)
- f.seek(0)
-
- if doc.documentNamespace in visited_docs:
- return
-
- bb.note("Adding SPDX document %s" % path)
- visited_docs.add(doc.documentNamespace)
- info = tar.gettarinfo(fileobj=f)
-
- info.name = doc.name + ".spdx.json"
- info.uid = 0
- info.gid = 0
- info.uname = "root"
- info.gname = "root"
-
- if source_date_epoch is not None and info.mtime > int(source_date_epoch):
- info.mtime = int(source_date_epoch)
-
- tar.addfile(info, f)
-
- index["documents"].append({
- "filename": info.name,
- "documentNamespace": doc.documentNamespace,
- "sha1": sha1,
- })
-
- for ref in doc.externalDocumentRefs:
- ref_path = deploy_dir_spdx / "by-namespace" / ref.spdxDocument.replace("/", "_")
- collect_spdx_document(ref_path)
-
- collect_spdx_document(image_spdx_path)
-
- index["documents"].sort(key=lambda x: x["filename"])
-
- index_str = io.BytesIO(json.dumps(
- index,
- sort_keys=True,
- indent=get_json_indent(d),
- ).encode("utf-8"))
-
- info = tarfile.TarInfo()
- info.name = "index.json"
- info.size = len(index_str.getvalue())
- info.uid = 0
- info.gid = 0
- info.uname = "root"
- info.gname = "root"
-
- tar.addfile(info, fileobj=index_str)
-
- spdx_index_path = rootfs_deploydir / (rootfs_name + ".spdx.index.json")
- with spdx_index_path.open("w") as f:
- json.dump(index, f, sort_keys=True, indent=get_json_indent(d))
+# Include this class when you don't care what version of SPDX you get; it will
+# be updated to the latest stable version that is supported
+inherit create-spdx-2.2
diff --git a/meta/classes/cve-check.bbclass b/meta/classes/cve-check.bbclass
index 4b4ea7893e..56ba8bceef 100644
--- a/meta/classes/cve-check.bbclass
+++ b/meta/classes/cve-check.bbclass
@@ -32,7 +32,7 @@ CVE_PRODUCT ??= "${BPN}"
CVE_VERSION ??= "${PV}"
CVE_CHECK_DB_DIR ?= "${DL_DIR}/CVE_CHECK"
-CVE_CHECK_DB_FILE ?= "${CVE_CHECK_DB_DIR}/nvdcve_1.1.db"
+CVE_CHECK_DB_FILE ?= "${CVE_CHECK_DB_DIR}/nvdcve_2-1.db"
CVE_CHECK_DB_FILE_LOCK ?= "${CVE_CHECK_DB_FILE}.lock"
CVE_CHECK_LOG ?= "${T}/cve.log"
@@ -48,8 +48,8 @@ CVE_CHECK_LOG_JSON ?= "${T}/cve.json"
CVE_CHECK_DIR ??= "${DEPLOY_DIR}/cve"
CVE_CHECK_RECIPE_FILE ?= "${CVE_CHECK_DIR}/${PN}"
CVE_CHECK_RECIPE_FILE_JSON ?= "${CVE_CHECK_DIR}/${PN}_cve.json"
-CVE_CHECK_MANIFEST ?= "${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.cve"
-CVE_CHECK_MANIFEST_JSON ?= "${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.json"
+CVE_CHECK_MANIFEST ?= "${IMGDEPLOYDIR}/${IMAGE_NAME}.cve"
+CVE_CHECK_MANIFEST_JSON ?= "${IMGDEPLOYDIR}/${IMAGE_NAME}.json"
CVE_CHECK_COPY_FILES ??= "1"
CVE_CHECK_CREATE_MANIFEST ??= "1"
@@ -70,12 +70,28 @@ CVE_CHECK_COVERAGE ??= "1"
# Skip CVE Check for packages (PN)
CVE_CHECK_SKIP_RECIPE ?= ""
-# Ingore the check for a given list of CVEs. If a CVE is found,
-# then it is considered patched. The value is a string containing
-# space separated CVE values:
+# Replace NVD DB check status for a given CVE. Each of CVE has to be mentioned
+# separately with optional detail and description for this status.
#
-# CVE_CHECK_IGNORE = 'CVE-2014-2524 CVE-2018-1234'
+# CVE_STATUS[CVE-1234-0001] = "not-applicable-platform: Issue only applies on Windows"
+# CVE_STATUS[CVE-1234-0002] = "fixed-version: Fixed externally"
#
+# Settings the same status and reason for multiple CVEs is possible
+# via CVE_STATUS_GROUPS variable.
+#
+# CVE_STATUS_GROUPS = "CVE_STATUS_WIN CVE_STATUS_PATCHED"
+#
+# CVE_STATUS_WIN = "CVE-1234-0001 CVE-1234-0003"
+# CVE_STATUS_WIN[status] = "not-applicable-platform: Issue only applies on Windows"
+# CVE_STATUS_PATCHED = "CVE-1234-0002 CVE-1234-0004"
+# CVE_STATUS_PATCHED[status] = "fixed-version: Fixed externally"
+#
+# All possible CVE statuses could be found in cve-check-map.conf
+# CVE_CHECK_STATUSMAP[not-applicable-platform] = "Ignored"
+# CVE_CHECK_STATUSMAP[fixed-version] = "Patched"
+#
+# CVE_CHECK_IGNORE is deprecated and CVE_STATUS has to be used instead.
+# Keep CVE_CHECK_IGNORE until other layers migrate to new variables
CVE_CHECK_IGNORE ?= ""
# Layers to be excluded
@@ -88,6 +104,24 @@ CVE_CHECK_LAYER_INCLUDELIST ??= ""
# set to "alphabetical" for version using single alphabetical character as increment release
CVE_VERSION_SUFFIX ??= ""
+python () {
+ # Fallback all CVEs from CVE_CHECK_IGNORE to CVE_STATUS
+ cve_check_ignore = d.getVar("CVE_CHECK_IGNORE")
+ if cve_check_ignore:
+ bb.warn("CVE_CHECK_IGNORE is deprecated in favor of CVE_STATUS")
+ for cve in (d.getVar("CVE_CHECK_IGNORE") or "").split():
+ d.setVarFlag("CVE_STATUS", cve, "ignored")
+
+ # Process CVE_STATUS_GROUPS to set multiple statuses and optional detail or description at once
+ for cve_status_group in (d.getVar("CVE_STATUS_GROUPS") or "").split():
+ cve_group = d.getVar(cve_status_group)
+ if cve_group is not None:
+ for cve in cve_group.split():
+ d.setVarFlag("CVE_STATUS", cve, d.getVarFlag(cve_status_group, "status"))
+ else:
+ bb.warn("CVE_STATUS_GROUPS contains undefined variable %s" % cve_status_group)
+}
+
def generate_json_report(d, out_path, link_path):
if os.path.exists(d.getVar("CVE_CHECK_SUMMARY_INDEX_PATH")):
import json
@@ -104,6 +138,8 @@ def generate_json_report(d, out_path, link_path):
cve_check_merge_jsons(summary, data)
filename = f.readline()
+ summary["package"].sort(key=lambda d: d['name'])
+
with open(out_path, "w") as f:
json.dump(summary, f, indent=2)
@@ -161,7 +197,7 @@ python do_cve_check () {
}
addtask cve_check before do_build
-do_cve_check[depends] = "cve-update-db-native:do_fetch"
+do_cve_check[depends] = "cve-update-nvd2-native:do_fetch"
do_cve_check[nostamp] = "1"
python cve_check_cleanup () {
@@ -202,7 +238,7 @@ python cve_check_write_rootfs_manifest () {
recipies.add(pkg_data["PN"])
bb.note("Writing rootfs CVE manifest")
- deploy_dir = d.getVar("DEPLOY_DIR_IMAGE")
+ deploy_dir = d.getVar("IMGDEPLOYDIR")
link_name = d.getVar("IMAGE_LINK_NAME")
json_data = {"version":"1", "package": []}
@@ -252,7 +288,7 @@ python cve_check_write_rootfs_manifest () {
bb.plain("Image CVE JSON report stored in: %s" % manifest_name)
}
-ROOTFS_POSTPROCESS_COMMAND:prepend = "${@'cve_check_write_rootfs_manifest; ' if d.getVar('CVE_CHECK_CREATE_MANIFEST') == '1' else ''}"
+ROOTFS_POSTPROCESS_COMMAND:prepend = "${@'cve_check_write_rootfs_manifest ' if d.getVar('CVE_CHECK_CREATE_MANIFEST') == '1' else ''}"
do_rootfs[recrdeptask] += "${@'do_cve_check' if d.getVar('CVE_CHECK_CREATE_MANIFEST') == '1' else ''}"
do_populate_sdk[recrdeptask] += "${@'do_cve_check' if d.getVar('CVE_CHECK_CREATE_MANIFEST') == '1' else ''}"
@@ -260,7 +296,7 @@ def check_cves(d, patched_cves):
"""
Connect to the NVD database and find unpatched cves.
"""
- from oe.cve_check import Version
+ from oe.cve_check import Version, convert_cve_version, decode_cve_status
pn = d.getVar("PN")
real_pv = d.getVar("PV")
@@ -282,7 +318,12 @@ def check_cves(d, patched_cves):
bb.note("Recipe has been skipped by cve-check")
return ([], [], [], [])
- cve_ignore = d.getVar("CVE_CHECK_IGNORE").split()
+ # Convert CVE_STATUS into ignored CVEs and check validity
+ cve_ignore = []
+ for cve in (d.getVarFlags("CVE_STATUS") or {}):
+ decoded_status, _, _ = decode_cve_status(d, cve)
+ if decoded_status == "Ignored":
+ cve_ignore.append(cve)
import sqlite3
db_file = d.expand("file:${CVE_CHECK_DB_FILE}?mode=ro")
@@ -324,6 +365,9 @@ def check_cves(d, patched_cves):
if cve in cve_ignore:
ignored = True
+ version_start = convert_cve_version(version_start)
+ version_end = convert_cve_version(version_end)
+
if (operator_start == '=' and pv == version_start) or version_start == '-':
vulnerable = True
else:
@@ -374,6 +418,9 @@ def check_cves(d, patched_cves):
cves_status.append([product, False])
conn.close()
+ diff_ignore = list(set(cve_ignore) - set(cves_ignored))
+ if diff_ignore:
+ oe.qa.handle_error("cve_status_not_in_db", "Found CVE (%s) with CVE_STATUS set that are not found in database for this component" % " ".join(diff_ignore), d)
if not cves_in_recipe:
bb.note("No CVE records for products in recipe %s" % (pn))
@@ -400,6 +447,7 @@ def get_cve_info(d, cves):
cve_data[row[0]]["scorev3"] = row[3]
cve_data[row[0]]["modified"] = row[4]
cve_data[row[0]]["vector"] = row[5]
+ cve_data[row[0]]["vectorString"] = row[6]
cursor.close()
conn.close()
return cve_data
@@ -410,6 +458,8 @@ def cve_write_data_text(d, patched, unpatched, ignored, cve_data):
CVE manifest if enabled.
"""
+ from oe.cve_check import decode_cve_status
+
cve_file = d.getVar("CVE_CHECK_LOG")
fdir_name = d.getVar("FILE_DIRNAME")
layer = fdir_name.split("/")[-3]
@@ -438,24 +488,32 @@ def cve_write_data_text(d, patched, unpatched, ignored, cve_data):
is_patched = cve in patched
is_ignored = cve in ignored
+ status = "Unpatched"
if (is_patched or is_ignored) and not report_all:
continue
+ if is_ignored:
+ status = "Ignored"
+ elif is_patched:
+ status = "Patched"
+ else:
+ # default value of status is Unpatched
+ unpatched_cves.append(cve)
write_string += "LAYER: %s\n" % layer
write_string += "PACKAGE NAME: %s\n" % d.getVar("PN")
write_string += "PACKAGE VERSION: %s%s\n" % (d.getVar("EXTENDPE"), d.getVar("PV"))
write_string += "CVE: %s\n" % cve
- if is_ignored:
- write_string += "CVE STATUS: Ignored\n"
- elif is_patched:
- write_string += "CVE STATUS: Patched\n"
- else:
- unpatched_cves.append(cve)
- write_string += "CVE STATUS: Unpatched\n"
+ write_string += "CVE STATUS: %s\n" % status
+ _, detail, description = decode_cve_status(d, cve)
+ if detail:
+ write_string += "CVE DETAIL: %s\n" % detail
+ if description:
+ write_string += "CVE DESCRIPTION: %s\n" % description
write_string += "CVE SUMMARY: %s\n" % cve_data[cve]["summary"]
write_string += "CVSS v2 BASE SCORE: %s\n" % cve_data[cve]["scorev2"]
write_string += "CVSS v3 BASE SCORE: %s\n" % cve_data[cve]["scorev3"]
write_string += "VECTOR: %s\n" % cve_data[cve]["vector"]
+ write_string += "VECTORSTRING: %s\n" % cve_data[cve]["vectorString"]
write_string += "MORE INFORMATION: %s%s\n\n" % (nvd_link, cve)
if unpatched_cves and d.getVar("CVE_CHECK_SHOW_WARNINGS") == "1":
@@ -513,6 +571,8 @@ def cve_write_data_json(d, patched, unpatched, ignored, cve_data, cve_status):
Prepare CVE data for the JSON format, then write it.
"""
+ from oe.cve_check import decode_cve_status
+
output = {"version":"1", "package": []}
nvd_link = "https://nvd.nist.gov/vuln/detail/"
@@ -570,9 +630,15 @@ def cve_write_data_json(d, patched, unpatched, ignored, cve_data, cve_status):
"scorev2" : cve_data[cve]["scorev2"],
"scorev3" : cve_data[cve]["scorev3"],
"vector" : cve_data[cve]["vector"],
+ "vectorString" : cve_data[cve]["vectorString"],
"status" : status,
"link": issue_link
}
+ _, detail, description = decode_cve_status(d, cve)
+ if detail:
+ cve_item["detail"] = detail
+ if description:
+ cve_item["description"] = description
cve_list.append(cve_item)
package_data["issue"] = cve_list
diff --git a/meta/classes/devtool-source.bbclass b/meta/classes/devtool-source.bbclass
index a02b1e9b0e..3e24800dcb 100644
--- a/meta/classes/devtool-source.bbclass
+++ b/meta/classes/devtool-source.bbclass
@@ -26,8 +26,6 @@
DEVTOOL_TEMPDIR ?= ""
-DEVTOOL_PATCH_SRCDIR = "${DEVTOOL_TEMPDIR}/patchworkdir"
-
python() {
tempdir = d.getVar('DEVTOOL_TEMPDIR')
@@ -60,7 +58,6 @@ python() {
else:
unpacktask = 'do_unpack'
d.appendVarFlag(unpacktask, 'postfuncs', ' devtool_post_unpack')
- d.prependVarFlag('do_patch', 'prefuncs', ' devtool_pre_patch')
d.appendVarFlag('do_patch', 'postfuncs', ' devtool_post_patch')
# NOTE: in order for the patch stuff to be fully functional,
@@ -79,67 +76,23 @@ python devtool_post_unpack() {
tempdir = d.getVar('DEVTOOL_TEMPDIR')
workdir = d.getVar('WORKDIR')
+ unpackdir = d.getVar('UNPACKDIR')
srcsubdir = d.getVar('S')
- def _move_file(src, dst):
- """Move a file. Creates all the directory components of destination path."""
- dst_d = os.path.dirname(dst)
- if dst_d:
- bb.utils.mkdirhier(dst_d)
- shutil.move(src, dst)
-
- def _ls_tree(directory):
- """Recursive listing of files in a directory"""
- ret = []
- for root, dirs, files in os.walk(directory):
- ret.extend([os.path.relpath(os.path.join(root, fname), directory) for
- fname in files])
- return ret
-
- is_kernel_yocto = bb.data.inherits_class('kernel-yocto', d)
- # Move local source files into separate subdir
- recipe_patches = [os.path.basename(patch) for patch in
- oe.recipeutils.get_recipe_patches(d)]
+ # Add locally copied files to gitignore as we add back to the metadata directly
local_files = oe.recipeutils.get_recipe_local_files(d)
-
- if is_kernel_yocto:
- for key in [f for f in local_files if f.endswith('scc')]:
- with open(local_files[key], 'r') as sccfile:
- for l in sccfile:
- line = l.split()
- if line and line[0] in ('kconf', 'patch'):
- cfg = os.path.join(os.path.dirname(local_files[key]), line[-1])
- if cfg not in local_files.values():
- local_files[line[-1]] = cfg
- shutil.copy2(cfg, workdir)
-
- # Ignore local files with subdir={BP}
srcabspath = os.path.abspath(srcsubdir)
local_files = [fname for fname in local_files if
- os.path.exists(os.path.join(workdir, fname)) and
- (srcabspath == workdir or not
- os.path.join(workdir, fname).startswith(srcabspath +
- os.sep))]
+ os.path.exists(os.path.join(unpackdir, fname)) and
+ srcabspath == unpackdir]
if local_files:
- for fname in local_files:
- _move_file(os.path.join(workdir, fname),
- os.path.join(tempdir, 'oe-local-files', fname))
- with open(os.path.join(tempdir, 'oe-local-files', '.gitignore'),
- 'w') as f:
- f.write('# Ignore local files, by default. Remove this file '
- 'if you want to commit the directory to Git\n*\n')
-
- if srcsubdir == workdir:
- # Find non-patch non-local sources that were "unpacked" to srctree
- # directory
- src_files = [fname for fname in _ls_tree(workdir) if
- os.path.basename(fname) not in recipe_patches]
- srcsubdir = d.getVar('DEVTOOL_PATCH_SRCDIR')
- # Move source files to S
- for path in src_files:
- _move_file(os.path.join(workdir, path),
- os.path.join(srcsubdir, path))
- elif os.path.dirname(srcsubdir) != workdir:
+ with open(os.path.join(tempdir, '.gitignore'), 'a+') as f:
+ f.write('# Ignore local files, by default. Remove following lines'
+ 'if you want to commit the directory to Git\n')
+ for fname in local_files:
+ f.write('%s\n' % fname)
+
+ if os.path.dirname(srcsubdir) != workdir:
# Handle if S is set to a subdirectory of the source
srcsubdir = os.path.join(workdir, os.path.relpath(srcsubdir, workdir).split(os.sep)[0])
@@ -164,11 +117,6 @@ python devtool_post_unpack() {
f.write(srcsubdir)
}
-python devtool_pre_patch() {
- if d.getVar('S') == d.getVar('WORKDIR'):
- d.setVar('S', '${DEVTOOL_PATCH_SRCDIR}')
-}
-
python devtool_post_patch() {
import shutil
tempdir = d.getVar('DEVTOOL_TEMPDIR')
@@ -232,6 +180,9 @@ python devtool_post_patch() {
bb.process.run('git rebase devtool-no-overrides', cwd=srcsubdir)
bb.process.run('git checkout %s' % devbranch, cwd=srcsubdir)
bb.process.run('git tag -f devtool-patched', cwd=srcsubdir)
+ if os.path.exists(os.path.join(srcsubdir, '.gitmodules')):
+ bb.process.run('git submodule foreach --recursive "git tag -f devtool-patched"', cwd=srcsubdir)
+
}
python devtool_post_configure() {
diff --git a/meta/classes/externalsrc.bbclass b/meta/classes/externalsrc.bbclass
new file mode 100644
index 0000000000..70e27a8d35
--- /dev/null
+++ b/meta/classes/externalsrc.bbclass
@@ -0,0 +1,274 @@
+# Copyright (C) 2012 Linux Foundation
+# Author: Richard Purdie
+# Some code and influence taken from srctree.bbclass:
+# Copyright (C) 2009 Chris Larson <clarson@kergoth.com>
+#
+# SPDX-License-Identifier: MIT
+#
+# externalsrc.bbclass enables use of an existing source tree, usually external to
+# the build system to build a piece of software rather than the usual fetch/unpack/patch
+# process.
+#
+# To use, add externalsrc to the global inherit and set EXTERNALSRC to point at the
+# directory you want to use containing the sources e.g. from local.conf for a recipe
+# called "myrecipe" you would do:
+#
+# INHERIT += "externalsrc"
+# EXTERNALSRC:pn-myrecipe = "/path/to/my/source/tree"
+#
+# In order to make this class work for both target and native versions (or with
+# multilibs/cross or other BBCLASSEXTEND variants), B is set to point to a separate
+# directory under the work directory (split source and build directories). This is
+# the default, but the build directory can be set to the source directory if
+# circumstances dictate by setting EXTERNALSRC_BUILD to the same value, e.g.:
+#
+# EXTERNALSRC_BUILD:pn-myrecipe = "/path/to/my/source/tree"
+#
+
+SRCTREECOVEREDTASKS ?= "do_patch do_unpack do_fetch"
+EXTERNALSRC_SYMLINKS ?= "oe-workdir:${WORKDIR} oe-logs:${T}"
+
+python () {
+ externalsrc = d.getVar('EXTERNALSRC')
+ externalsrcbuild = d.getVar('EXTERNALSRC_BUILD')
+
+ if externalsrc and not externalsrc.startswith("/"):
+ bb.error("EXTERNALSRC must be an absolute path")
+ if externalsrcbuild and not externalsrcbuild.startswith("/"):
+ bb.error("EXTERNALSRC_BUILD must be an absolute path")
+
+ # If this is the base recipe and EXTERNALSRC is set for it or any of its
+ # derivatives, then enable BB_DONT_CACHE to force the recipe to always be
+ # re-parsed so that the file-checksums function for do_compile is run every
+ # time.
+ bpn = d.getVar('BPN')
+ classextend = (d.getVar('BBCLASSEXTEND') or '').split()
+ if bpn == d.getVar('PN') or not classextend:
+ if (externalsrc or
+ ('native' in classextend and
+ d.getVar('EXTERNALSRC:pn-%s-native' % bpn)) or
+ ('nativesdk' in classextend and
+ d.getVar('EXTERNALSRC:pn-nativesdk-%s' % bpn)) or
+ ('cross' in classextend and
+ d.getVar('EXTERNALSRC:pn-%s-cross' % bpn))):
+ d.setVar('BB_DONT_CACHE', '1')
+
+ if externalsrc:
+ import oe.recipeutils
+ import oe.path
+
+ d.setVar('S', externalsrc)
+ if externalsrcbuild:
+ d.setVar('B', externalsrcbuild)
+ else:
+ d.setVar('B', '${WORKDIR}/${BPN}-${PV}')
+
+ bb.fetch.get_hashvalue(d)
+ local_srcuri = []
+ fetch = bb.fetch2.Fetch((d.getVar('SRC_URI') or '').split(), d)
+ for url in fetch.urls:
+ url_data = fetch.ud[url]
+ parm = url_data.parm
+ if url_data.type in ['file', 'npmsw', 'crate'] or parm.get('type') in ['kmeta', 'git-dependency']:
+ local_srcuri.append(url)
+
+ d.setVar('SRC_URI', ' '.join(local_srcuri))
+
+ # sstate is never going to work for external source trees, disable it
+ d.setVar('SSTATE_SKIP_CREATION', '1')
+
+ if d.getVar('CONFIGUREOPT_DEPTRACK') == '--disable-dependency-tracking':
+ d.setVar('CONFIGUREOPT_DEPTRACK', '')
+
+ tasks = filter(lambda k: d.getVarFlag(k, "task"), d.keys())
+
+ for task in tasks:
+ if os.path.realpath(d.getVar('S')) == os.path.realpath(d.getVar('B')):
+ # Since configure will likely touch ${S}, ensure only we lock so one task has access at a time
+ d.appendVarFlag(task, "lockfiles", " ${S}/singletask.lock")
+
+ for v in d.keys():
+ cleandirs = d.getVarFlag(v, "cleandirs", False)
+ if cleandirs:
+ # We do not want our source to be wiped out, ever (kernel.bbclass does this for do_clean)
+ cleandirs = oe.recipeutils.split_var_value(cleandirs)
+ setvalue = False
+ for cleandir in cleandirs[:]:
+ if oe.path.is_path_parent(externalsrc, d.expand(cleandir)):
+ cleandirs.remove(cleandir)
+ setvalue = True
+ if setvalue:
+ d.setVarFlag(v, 'cleandirs', ' '.join(cleandirs))
+
+ fetch_tasks = ['do_fetch', 'do_unpack']
+ # If we deltask do_patch, there's no dependency to ensure do_unpack gets run, so add one
+ # Note that we cannot use d.appendVarFlag() here because deps is expected to be a list object, not a string
+ d.setVarFlag('do_configure', 'deps', (d.getVarFlag('do_configure', 'deps', False) or []) + ['do_unpack'])
+ d.setVarFlag('do_populate_lic', 'deps', (d.getVarFlag('do_populate_lic', 'deps', False) or []) + ['do_unpack'])
+
+ for task in d.getVar("SRCTREECOVEREDTASKS").split():
+ if local_srcuri and task in fetch_tasks:
+ continue
+ bb.build.deltask(task, d)
+ if task == 'do_unpack':
+ # The reproducible build create_source_date_epoch_stamp function must
+ # be run after the source is available and before the
+ # do_deploy_source_date_epoch task. In the normal case, it's attached
+ # to do_unpack as a postfuncs, but since we removed do_unpack (above)
+ # we need to move the function elsewhere. The easiest thing to do is
+ # move it into the prefuncs of the do_deploy_source_date_epoch task.
+ # This is safe, as externalsrc runs with the source already unpacked.
+ d.prependVarFlag('do_deploy_source_date_epoch', 'prefuncs', 'create_source_date_epoch_stamp ')
+
+ d.prependVarFlag('do_compile', 'prefuncs', "externalsrc_compile_prefunc ")
+ d.prependVarFlag('do_configure', 'prefuncs', "externalsrc_configure_prefunc ")
+
+ d.setVarFlag('do_compile', 'file-checksums', '${@srctree_hash_files(d)}')
+ d.setVarFlag('do_configure', 'file-checksums', '${@srctree_configure_hash_files(d)}')
+
+ d.appendVarFlag('do_compile', 'prefuncs', ' fetcher_hashes_dummyfunc')
+ d.appendVarFlag('do_configure', 'prefuncs', ' fetcher_hashes_dummyfunc')
+
+ # We don't want the workdir to go away
+ d.appendVar('RM_WORK_EXCLUDE', ' ' + d.getVar('PN'))
+
+ bb.build.addtask('do_buildclean',
+ 'do_clean' if d.getVar('S') == d.getVar('B') else None,
+ None, d)
+
+ # If B=S the same builddir is used even for different architectures.
+ # Thus, use a shared CONFIGURESTAMPFILE and STAMP directory so that
+ # change of do_configure task hash is correctly detected and stamps are
+ # invalidated if e.g. MACHINE changes.
+ if d.getVar('S') == d.getVar('B'):
+ configstamp = '${TMPDIR}/work-shared/${PN}/${EXTENDPE}${PV}-${PR}/configure.sstate'
+ d.setVar('CONFIGURESTAMPFILE', configstamp)
+ d.setVar('STAMP', '${STAMPS_DIR}/work-shared/${PN}/${EXTENDPE}${PV}-${PR}')
+ d.setVar('STAMPCLEAN', '${STAMPS_DIR}/work-shared/${PN}/*-*')
+}
+
+python externalsrc_configure_prefunc() {
+ s_dir = d.getVar('S')
+ # Create desired symlinks
+ symlinks = (d.getVar('EXTERNALSRC_SYMLINKS') or '').split()
+ newlinks = []
+ for symlink in symlinks:
+ symsplit = symlink.split(':', 1)
+ lnkfile = os.path.join(s_dir, symsplit[0])
+ target = d.expand(symsplit[1])
+ if len(symsplit) > 1:
+ if os.path.islink(lnkfile):
+ # Link already exists, leave it if it points to the right location already
+ if os.readlink(lnkfile) == target:
+ continue
+ os.unlink(lnkfile)
+ elif os.path.exists(lnkfile):
+ # File/dir exists with same name as link, just leave it alone
+ continue
+ os.symlink(target, lnkfile)
+ newlinks.append(symsplit[0])
+ # Hide the symlinks from git
+ try:
+ git_exclude_file = os.path.join(s_dir, '.git/info/exclude')
+ if os.path.exists(git_exclude_file):
+ with open(git_exclude_file, 'r+') as efile:
+ elines = efile.readlines()
+ for link in newlinks:
+ if link in elines or '/'+link in elines:
+ continue
+ efile.write('/' + link + '\n')
+ except IOError as ioe:
+ bb.note('Failed to hide EXTERNALSRC_SYMLINKS from git')
+}
+
+python externalsrc_compile_prefunc() {
+ # Make it obvious that this is happening, since forgetting about it could lead to much confusion
+ bb.plain('NOTE: %s: compiling from external source tree %s' % (d.getVar('PN'), d.getVar('EXTERNALSRC')))
+}
+
+do_buildclean[dirs] = "${S} ${B}"
+do_buildclean[nostamp] = "1"
+do_buildclean[doc] = "Call 'make clean' or equivalent in ${B}"
+externalsrc_do_buildclean() {
+ if [ -e Makefile -o -e makefile -o -e GNUmakefile ]; then
+ rm -f ${@' '.join([x.split(':')[0] for x in (d.getVar('EXTERNALSRC_SYMLINKS') or '').split()])}
+ if [ "${CLEANBROKEN}" != "1" ]; then
+ oe_runmake clean || die "make failed"
+ fi
+ else
+ bbnote "nothing to do - no makefile found"
+ fi
+}
+
+def srctree_hash_files(d, srcdir=None):
+ import shutil
+ import subprocess
+ import tempfile
+ import hashlib
+
+ s_dir = srcdir or d.getVar('EXTERNALSRC')
+ git_dir = None
+
+ try:
+ git_dir = os.path.join(s_dir,
+ subprocess.check_output(['git', '-C', s_dir, 'rev-parse', '--git-dir'], stderr=subprocess.DEVNULL).decode("utf-8").rstrip())
+ top_git_dir = os.path.join(d.getVar("TOPDIR"),
+ subprocess.check_output(['git', '-C', d.getVar("TOPDIR"), 'rev-parse', '--git-dir'], stderr=subprocess.DEVNULL).decode("utf-8").rstrip())
+ if git_dir == top_git_dir:
+ git_dir = None
+ except subprocess.CalledProcessError:
+ pass
+
+ ret = " "
+ if git_dir is not None:
+ oe_hash_file = os.path.join(git_dir, 'oe-devtool-tree-sha1-%s' % d.getVar('PN'))
+ with tempfile.NamedTemporaryFile(prefix='oe-devtool-index') as tmp_index:
+ # Clone index
+ shutil.copyfile(os.path.join(git_dir, 'index'), tmp_index.name)
+ # Update our custom index
+ env = os.environ.copy()
+ env['GIT_INDEX_FILE'] = tmp_index.name
+ subprocess.check_output(['git', 'add', '-A', '.'], cwd=s_dir, env=env)
+ git_sha1 = subprocess.check_output(['git', 'write-tree'], cwd=s_dir, env=env).decode("utf-8")
+ if os.path.exists(os.path.join(s_dir, ".gitmodules")) and os.path.getsize(os.path.join(s_dir, ".gitmodules")) > 0:
+ submodule_helper = subprocess.check_output(["git", "config", "--file", ".gitmodules", "--get-regexp", "path"], cwd=s_dir, env=env).decode("utf-8")
+ for line in submodule_helper.splitlines():
+ module_dir = os.path.join(s_dir, line.rsplit(maxsplit=1)[1])
+ if os.path.isdir(module_dir):
+ proc = subprocess.Popen(['git', 'add', '-A', '.'], cwd=module_dir, env=env, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
+ proc.communicate()
+ proc = subprocess.Popen(['git', 'write-tree'], cwd=module_dir, env=env, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL)
+ stdout, _ = proc.communicate()
+ git_sha1 += stdout.decode("utf-8")
+ sha1 = hashlib.sha1(git_sha1.encode("utf-8")).hexdigest()
+ with open(oe_hash_file, 'w') as fobj:
+ fobj.write(sha1)
+ ret = oe_hash_file + ':True'
+ else:
+ ret = s_dir + '/*:True'
+ return ret
+
+def srctree_configure_hash_files(d):
+ """
+ Get the list of files that should trigger do_configure to re-execute,
+ based on the value of CONFIGURE_FILES
+ """
+ import fnmatch
+
+ in_files = (d.getVar('CONFIGURE_FILES') or '').split()
+ out_items = []
+ search_files = []
+ for entry in in_files:
+ if entry.startswith('/'):
+ out_items.append('%s:%s' % (entry, os.path.exists(entry)))
+ else:
+ search_files.append(entry)
+ if search_files:
+ s_dir = d.getVar('EXTERNALSRC')
+ for root, _, files in os.walk(s_dir):
+ for p in search_files:
+ for f in fnmatch.filter(files, p):
+ out_items.append('%s:True' % os.path.join(root, f))
+ return ' '.join(out_items)
+
+EXPORT_FUNCTIONS do_buildclean
diff --git a/meta/classes/extrausers.bbclass b/meta/classes/extrausers.bbclass
index 94576b8872..c825c06df9 100644
--- a/meta/classes/extrausers.bbclass
+++ b/meta/classes/extrausers.bbclass
@@ -23,7 +23,7 @@ inherit useradd_base
PACKAGE_INSTALL:append = " ${@['', 'base-passwd shadow'][bool(d.getVar('EXTRA_USERS_PARAMS'))]}"
# Image level user / group settings
-ROOTFS_POSTPROCESS_COMMAND:append = " set_user_group;"
+ROOTFS_POSTPROCESS_COMMAND:append = " set_user_group"
# Image level user / group settings
set_user_group () {
diff --git a/meta/classes/go-vendor.bbclass b/meta/classes/go-vendor.bbclass
new file mode 100644
index 0000000000..1bbb99ac79
--- /dev/null
+++ b/meta/classes/go-vendor.bbclass
@@ -0,0 +1,211 @@
+#
+# Copyright 2023 (C) Weidmueller GmbH & Co KG
+# Author: Lukas Funke <lukas.funke@weidmueller.com>
+#
+# Handle Go vendor support for offline builds
+#
+# When importing Go modules, Go downloads the imported modules using
+# a network (proxy) connection ahead of the compile stage. This contradicts
+# the yocto build concept of fetching every source ahead of build-time
+# and supporting offline builds.
+#
+# To support offline builds, we use Go 'vendoring': module dependencies are
+# downloaded during the fetch-phase and unpacked into the modules 'vendor'
+# folder. Additionally a manifest file is generated for the 'vendor' folder
+#
+
+inherit go-mod
+
+def go_src_uri(repo, version, path=None, subdir=None, \
+ vcs='git', replaces=None, pathmajor=None):
+
+ destsuffix = "git/src/import/vendor.fetch"
+ module_path = repo if not path else path
+
+ src_uri = "{}://{};name={}".format(vcs, repo, module_path.replace('/', '.'))
+ src_uri += ";destsuffix={}/{}@{}".format(destsuffix, repo, version)
+
+ if vcs == "git":
+ src_uri += ";nobranch=1;protocol=https"
+
+ src_uri += ";go_module_path={}".format(module_path)
+
+ if replaces:
+ src_uri += ";go_module_replacement={}".format(replaces)
+ if subdir:
+ src_uri += ";go_subdir={}".format(subdir)
+ if pathmajor:
+ src_uri += ";go_pathmajor={}".format(pathmajor)
+ src_uri += ";is_go_dependency=1"
+
+ return src_uri
+
+python do_vendor_unlink() {
+ go_import = d.getVar('GO_IMPORT')
+ source_dir = d.getVar('S')
+ linkname = os.path.join(source_dir, *['src', go_import, 'vendor'])
+
+ os.unlink(linkname)
+}
+
+addtask vendor_unlink before do_package after do_install
+
+python do_go_vendor() {
+ import shutil
+
+ src_uri = (d.getVar('SRC_URI') or "").split()
+
+ if not src_uri:
+ bb.fatal("SRC_URI is empty")
+
+ default_destsuffix = "git/src/import/vendor.fetch"
+ fetcher = bb.fetch2.Fetch(src_uri, d)
+ go_import = d.getVar('GO_IMPORT')
+ source_dir = d.getVar('S')
+
+ linkname = os.path.join(source_dir, *['src', go_import, 'vendor'])
+ vendor_dir = os.path.join(source_dir, *['src', 'import', 'vendor'])
+ import_dir = os.path.join(source_dir, *['src', 'import', 'vendor.fetch'])
+
+ if os.path.exists(vendor_dir):
+ # Nothing to do except re-establish link to actual vendor folder
+ if not os.path.exists(linkname):
+ os.symlink(vendor_dir, linkname)
+ return
+
+ bb.utils.mkdirhier(vendor_dir)
+
+ modules = {}
+
+ for url in fetcher.urls:
+ srcuri = fetcher.ud[url].host + fetcher.ud[url].path
+
+ # Skip non Go module src uris
+ if not fetcher.ud[url].parm.get('is_go_dependency'):
+ continue
+
+ destsuffix = fetcher.ud[url].parm.get('destsuffix')
+ # We derive the module repo / version in the following manner (exmaple):
+ #
+ # destsuffix = git/src/import/vendor.fetch/github.com/foo/bar@v1.2.3
+ # p = github.com/foo/bar@v1.2.3
+ # repo = github.com/foo/bar
+ # version = v1.2.3
+
+ p = destsuffix[len(default_destsuffix)+1:]
+ repo, version = p.split('@')
+
+ module_path = fetcher.ud[url].parm.get('go_module_path')
+
+ subdir = fetcher.ud[url].parm.get('go_subdir')
+ subdir = None if not subdir else subdir
+
+ pathMajor = fetcher.ud[url].parm.get('go_pathmajor')
+ pathMajor = None if not pathMajor else pathMajor.strip('/')
+
+ if not (repo, version) in modules:
+ modules[(repo, version)] = {
+ "repo_path": os.path.join(import_dir, p),
+ "module_path": module_path,
+ "subdir": subdir,
+ "pathMajor": pathMajor }
+
+ for module_key, module in modules.items():
+
+ # only take the version which is explicitly listed
+ # as a dependency in the go.mod
+ module_path = module['module_path']
+ rootdir = module['repo_path']
+ subdir = module['subdir']
+ pathMajor = module['pathMajor']
+
+ src = rootdir
+
+ if subdir:
+ src = os.path.join(rootdir, subdir)
+
+ # If the module is released at major version 2 or higher, the module
+ # path must end with a major version suffix like /v2.
+ # This may or may not be part of the subdirectory name
+ #
+ # https://go.dev/ref/mod#modules-overview
+ if pathMajor:
+ tmp = os.path.join(src, pathMajor)
+ # source directory including major version path may or may not exist
+ if os.path.exists(tmp):
+ src = tmp
+
+ dst = os.path.join(vendor_dir, module_path)
+
+ bb.debug(1, "cp %s --> %s" % (src, dst))
+ shutil.copytree(src, dst, symlinks=True, dirs_exist_ok=True, \
+ ignore=shutil.ignore_patterns(".git", \
+ "vendor", \
+ "*._test.go"))
+
+ # If the root directory has a LICENSE file but not the subdir
+ # we copy the root license to the sub module since the license
+ # applies to all modules in the repository
+ # see https://go.dev/ref/mod#vcs-license
+ if subdir:
+ rootdirLicese = os.path.join(rootdir, "LICENSE")
+ subdirLicense = os.path.join(src, "LICENSE")
+
+ if not os.path.exists(subdir) and \
+ os.path.exists(rootdirLicese):
+ shutil.copy2(rootdirLicese, subdirLicense)
+
+ # Copy vendor manifest
+ modules_txt_src = os.path.join(d.getVar('WORKDIR'), "modules.txt")
+ bb.debug(1, "cp %s --> %s" % (modules_txt_src, vendor_dir))
+ shutil.copy2(modules_txt_src, vendor_dir)
+
+ # Clean up vendor dir
+ # We only require the modules in the modules_txt file
+ fetched_paths = set([os.path.relpath(x[0], vendor_dir) for x in os.walk(vendor_dir)])
+
+ # Remove toplevel dir
+ fetched_paths.remove('.')
+
+ vendored_paths = set()
+ replaced_paths = dict()
+ with open(modules_txt_src) as f:
+ for line in f:
+ if not line.startswith("#"):
+ line = line.strip()
+ vendored_paths.add(line)
+
+ # Add toplevel dirs into vendored dir, as we want to keep them
+ topdir = os.path.dirname(line)
+ while len(topdir):
+ if not topdir in vendored_paths:
+ vendored_paths.add(topdir)
+
+ topdir = os.path.dirname(topdir)
+ else:
+ replaced_module = line.split("=>")
+ if len(replaced_module) > 1:
+ # This module has been replaced, use a local path
+ # we parse the line that has a pattern "# module-name [module-version] => local-path
+ actual_path = replaced_module[1].strip()
+ vendored_name = replaced_module[0].split()[1]
+ bb.debug(1, "added vendored name %s for actual path %s" % (vendored_name, actual_path))
+ replaced_paths[vendored_name] = actual_path
+
+ for path in fetched_paths:
+ if path not in vendored_paths:
+ realpath = os.path.join(vendor_dir, path)
+ if os.path.exists(realpath):
+ shutil.rmtree(realpath)
+
+ for vendored_name, replaced_path in replaced_paths.items():
+ symlink_target = os.path.join(source_dir, *['src', go_import, replaced_path])
+ symlink_name = os.path.join(vendor_dir, vendored_name)
+ bb.debug(1, "vendored name %s, symlink name %s" % (vendored_name, symlink_name))
+ os.symlink(symlink_target, symlink_name)
+
+ # Create a symlink to the actual directory
+ os.symlink(vendor_dir, linkname)
+}
+
+addtask go_vendor before do_patch after do_unpack
diff --git a/meta/classes/icecc.bbclass b/meta/classes/icecc.bbclass
index 312e0f17b5..159cae20f8 100644
--- a/meta/classes/icecc.bbclass
+++ b/meta/classes/icecc.bbclass
@@ -428,22 +428,18 @@ set_icecc_env() {
bbnote "Using icecc tarball: $ICECC_VERSION"
}
-do_configure[network] = "1"
do_configure:prepend() {
set_icecc_env
}
-do_compile[network] = "1"
do_compile:prepend() {
set_icecc_env
}
-do_compile_kernelmodules[network] = "1"
do_compile_kernelmodules:prepend() {
set_icecc_env
}
-do_install[network] = "1"
do_install:prepend() {
set_icecc_env
}
@@ -457,3 +453,9 @@ ICECC_SDK_HOST_TASK:pn-uninative-tarball = ""
# Add the toolchain scripts to the SDK
TOOLCHAIN_HOST_TASK:append = " ${ICECC_SDK_HOST_TASK}"
+
+python () {
+ if d.getVar('ICECC_DISABLED') != "1":
+ for task in ['do_configure', 'do_compile', 'do_compile_kernelmodules', 'do_install']:
+ d.setVarFlag(task, 'network', '1')
+}
diff --git a/meta/classes/image-buildinfo.bbclass b/meta/classes/image-buildinfo.bbclass
index 206cc9d57d..b83ce650ad 100644
--- a/meta/classes/image-buildinfo.bbclass
+++ b/meta/classes/image-buildinfo.bbclass
@@ -76,6 +76,6 @@ python buildinfo_sdk () {
bb.build.exec_func("buildinfo", d)
}
-IMAGE_PREPROCESS_COMMAND += "buildinfo_image;"
-POPULATE_SDK_PRE_TARGET_COMMAND += "buildinfo_sdk;"
+IMAGE_PREPROCESS_COMMAND += "buildinfo_image"
+POPULATE_SDK_PRE_TARGET_COMMAND += "buildinfo_sdk"
diff --git a/meta/classes/multilib.bbclass b/meta/classes/multilib.bbclass
index 10a4ef9c37..b6c09969b1 100644
--- a/meta/classes/multilib.bbclass
+++ b/meta/classes/multilib.bbclass
@@ -30,6 +30,9 @@ python multilib_virtclass_handler () {
if val:
e.data.setVar(name + "_MULTILIB_ORIGINAL", val)
+ # We nearly don't need this but dependencies on NON_MULTILIB_RECIPES don't work without it
+ d.setVar("SSTATE_ARCHS_TUNEPKG", "${@all_multilib_tune_values(d, 'TUNE_PKGARCH')}")
+
overrides = e.data.getVar("OVERRIDES", False)
pn = e.data.getVar("PN", False)
overrides = overrides.replace("pn-${PN}", "pn-${PN}:pn-" + pn)
@@ -51,6 +54,7 @@ python multilib_virtclass_handler () {
e.data.setVar("RECIPE_SYSROOT", "${WORKDIR}/recipe-sysroot")
e.data.setVar("STAGING_DIR_TARGET", "${WORKDIR}/recipe-sysroot")
e.data.setVar("STAGING_DIR_HOST", "${WORKDIR}/recipe-sysroot")
+ e.data.setVar("RECIPE_SYSROOT_MANIFEST_SUBDIR", "nativesdk-" + variant)
e.data.setVar("MLPREFIX", variant + "-")
override = ":virtclass-multilib-" + variant
e.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + override)
@@ -136,6 +140,7 @@ python multilib_virtclass_handler_postkeyexp () {
return
clsextend.map_depends_variable("DEPENDS")
+ clsextend.map_depends_variable("PACKAGE_WRITE_DEPS")
clsextend.map_variable("PROVIDES")
if bb.data.inherits_class('cross-canadian', d):
diff --git a/meta/classes/multilib_global.bbclass b/meta/classes/multilib_global.bbclass
index dcd89b2f63..6095d278dd 100644
--- a/meta/classes/multilib_global.bbclass
+++ b/meta/classes/multilib_global.bbclass
@@ -195,6 +195,7 @@ python multilib_virtclass_handler_global () {
# from a copy of the datastore
localdata = bb.data.createCopy(d)
localdata.delVar("KERNEL_VERSION")
+ localdata.delVar("KERNEL_VERSION_PKG_NAME")
variants = (e.data.getVar("MULTILIB_VARIANTS") or "").split()
diff --git a/meta/classes/own-mirrors.bbclass b/meta/classes/own-mirrors.bbclass
index 2f24ff1830..36c7f8e3f3 100644
--- a/meta/classes/own-mirrors.bbclass
+++ b/meta/classes/own-mirrors.bbclass
@@ -17,4 +17,6 @@ https?://.*/.* ${SOURCE_MIRROR_URL} \
ftp://.*/.* ${SOURCE_MIRROR_URL} \
npm://.*/?.* ${SOURCE_MIRROR_URL} \
s3://.*/.* ${SOURCE_MIRROR_URL} \
+crate://.*/.* ${SOURCE_MIRROR_URL} \
+gs://.*/.* ${SOURCE_MIRROR_URL} \
"
diff --git a/meta/classes/recipe_sanity.bbclass b/meta/classes/recipe_sanity.bbclass
index 1c2e24c6a1..a5cc4315fb 100644
--- a/meta/classes/recipe_sanity.bbclass
+++ b/meta/classes/recipe_sanity.bbclass
@@ -16,7 +16,7 @@ def bad_runtime_vars(cfgdata, d):
for var in d.getVar("__recipe_sanity_badruntimevars").split():
val = d.getVar(var, False)
if val and val != cfgdata.get(var):
- __note("%s should be %s_${PN}" % (var, var), d)
+ __note("%s should be %s:${PN}" % (var, var), d)
__recipe_sanity_reqvars = "DESCRIPTION"
__recipe_sanity_reqdiffvars = ""
diff --git a/meta/classes/report-error.bbclass b/meta/classes/report-error.bbclass
index 2f692fbbcc..1452513a66 100644
--- a/meta/classes/report-error.bbclass
+++ b/meta/classes/report-error.bbclass
@@ -39,6 +39,19 @@ def get_conf_data(e, filename):
jsonstring=jsonstring + line
return jsonstring
+def get_common_data(e):
+ data = {}
+ data['machine'] = e.data.getVar("MACHINE")
+ data['build_sys'] = e.data.getVar("BUILD_SYS")
+ data['distro'] = e.data.getVar("DISTRO")
+ data['target_sys'] = e.data.getVar("TARGET_SYS")
+ data['branch_commit'] = str(oe.buildcfg.detect_branch(e.data)) + ": " + str(oe.buildcfg.detect_revision(e.data))
+ data['bitbake_version'] = e.data.getVar("BB_VERSION")
+ data['layer_version'] = get_layers_branch_rev(e.data)
+ data['local_conf'] = get_conf_data(e, 'local.conf')
+ data['auto_conf'] = get_conf_data(e, 'auto.conf')
+ return data
+
python errorreport_handler () {
import json
import codecs
@@ -56,19 +69,10 @@ python errorreport_handler () {
if isinstance(e, bb.event.BuildStarted):
bb.utils.mkdirhier(logpath)
data = {}
- machine = e.data.getVar("MACHINE")
- data['machine'] = machine
- data['build_sys'] = e.data.getVar("BUILD_SYS")
+ data = get_common_data(e)
data['nativelsb'] = nativelsb()
- data['distro'] = e.data.getVar("DISTRO")
- data['target_sys'] = e.data.getVar("TARGET_SYS")
data['failures'] = []
data['component'] = " ".join(e.getPkgs())
- data['branch_commit'] = str(oe.buildcfg.detect_branch(e.data)) + ": " + str(oe.buildcfg.detect_revision(e.data))
- data['bitbake_version'] = e.data.getVar("BB_VERSION")
- data['layer_version'] = get_layers_branch_rev(e.data)
- data['local_conf'] = get_conf_data(e, 'local.conf')
- data['auto_conf'] = get_conf_data(e, 'auto.conf')
lock = bb.utils.lockfile(datafile + '.lock')
errorreport_savedata(e, data, "error-report.txt")
bb.utils.unlockfile(lock)
@@ -107,6 +111,37 @@ python errorreport_handler () {
errorreport_savedata(e, jsondata, "error-report.txt")
bb.utils.unlockfile(lock)
+ elif isinstance(e, bb.event.NoProvider):
+ bb.utils.mkdirhier(logpath)
+ data = {}
+ data = get_common_data(e)
+ data['nativelsb'] = nativelsb()
+ data['failures'] = []
+ data['component'] = str(e._item)
+ taskdata={}
+ taskdata['log'] = str(e)
+ taskdata['package'] = str(e._item)
+ taskdata['task'] = "Nothing provides " + "'" + str(e._item) + "'"
+ data['failures'].append(taskdata)
+ lock = bb.utils.lockfile(datafile + '.lock')
+ errorreport_savedata(e, data, "error-report.txt")
+ bb.utils.unlockfile(lock)
+
+ elif isinstance(e, bb.event.ParseError):
+ bb.utils.mkdirhier(logpath)
+ data = {}
+ data = get_common_data(e)
+ data['nativelsb'] = nativelsb()
+ data['failures'] = []
+ data['component'] = "parse"
+ taskdata={}
+ taskdata['log'] = str(e._msg)
+ taskdata['task'] = str(e._msg)
+ data['failures'].append(taskdata)
+ lock = bb.utils.lockfile(datafile + '.lock')
+ errorreport_savedata(e, data, "error-report.txt")
+ bb.utils.unlockfile(lock)
+
elif isinstance(e, bb.event.BuildCompleted):
lock = bb.utils.lockfile(datafile + '.lock')
jsondata = json.loads(errorreport_getdata(e))
@@ -120,4 +155,4 @@ python errorreport_handler () {
}
addhandler errorreport_handler
-errorreport_handler[eventmask] = "bb.event.BuildStarted bb.event.BuildCompleted bb.build.TaskFailed"
+errorreport_handler[eventmask] = "bb.event.BuildStarted bb.event.BuildCompleted bb.build.TaskFailed bb.event.NoProvider bb.event.ParseError"
diff --git a/meta/classes/rm_work.bbclass b/meta/classes/rm_work.bbclass
index c493efff2f..52ecfafb72 100644
--- a/meta/classes/rm_work.bbclass
+++ b/meta/classes/rm_work.bbclass
@@ -33,6 +33,13 @@ BB_SCHEDULER ?= "completion"
BB_TASK_IONICE_LEVEL:task-rm_work = "3.0"
do_rm_work () {
+ # Force using the HOSTTOOLS 'rm' - otherwise the SYSROOT_NATIVE 'rm' can be selected depending on PATH
+ # Avoids race-condition accessing 'rm' when deleting WORKDIR folders at the end of this function
+ RM_BIN="$(PATH=${HOSTTOOLS_DIR} command -v rm)"
+ if [ -z "${RM_BIN}" ]; then
+ bbfatal "Binary 'rm' not found in HOSTTOOLS_DIR, cannot remove WORKDIR data."
+ fi
+
# If the recipe name is in the RM_WORK_EXCLUDE, skip the recipe.
for p in ${RM_WORK_EXCLUDE}; do
if [ "$p" = "${PN}" ]; then
@@ -50,55 +57,58 @@ do_rm_work () {
# Change normal stamps into setscene stamps as they better reflect the
# fact WORKDIR is now empty
# Also leave noexec stamps since setscene stamps don't cover them
- cd `dirname ${STAMP}`
- for i in `basename ${STAMP}`*
- do
- case $i in
- *sigdata*|*sigbasedata*)
- # Save/skip anything that looks like a signature data file.
- ;;
- *do_image_complete_setscene*|*do_image_qa_setscene*)
- # Ensure we don't 'stack' setscene extensions to these stamps with the sections below
- ;;
- *do_image_complete*)
- # Promote do_image_complete stamps to setscene versions (ahead of *do_image* below)
- mv $i `echo $i | sed -e "s#do_image_complete#do_image_complete_setscene#"`
- ;;
- *do_image_qa*)
- # Promote do_image_qa stamps to setscene versions (ahead of *do_image* below)
- mv $i `echo $i | sed -e "s#do_image_qa#do_image_qa_setscene#"`
- ;;
- *do_package_write*|*do_rootfs*|*do_image*|*do_bootimg*|*do_write_qemuboot_conf*|*do_build*)
- ;;
- *do_addto_recipe_sysroot*)
- # Preserve recipe-sysroot-native if do_addto_recipe_sysroot has been used
- excludes="$excludes recipe-sysroot-native"
- ;;
- *do_package|*do_package.*|*do_package_setscene.*)
- # We remove do_package entirely, including any
- # sstate version since otherwise we'd need to leave 'plaindirs' around
- # such as 'packages' and 'packages-split' and these can be large. No end
- # of chain tasks depend directly on do_package anymore.
- rm -f -- $i;
- ;;
- *_setscene*)
- # Skip stamps which are already setscene versions
- ;;
- *)
- # For everything else: if suitable, promote the stamp to a setscene
- # version, otherwise remove it
- for j in ${SSTATETASKS} do_shared_workdir
- do
- case $i in
- *$j|*$j.*)
- mv $i `echo $i | sed -e "s#${j}#${j}_setscene#"`
- break
- ;;
- esac
- done
- rm -f -- $i
- esac
- done
+ STAMPDIR=`dirname ${STAMP}`
+ if test -d $STAMPDIR; then
+ cd $STAMPDIR
+ for i in `basename ${STAMP}`*
+ do
+ case $i in
+ *sigdata*|*sigbasedata*)
+ # Save/skip anything that looks like a signature data file.
+ ;;
+ *do_image_complete_setscene*|*do_image_qa_setscene*)
+ # Ensure we don't 'stack' setscene extensions to these stamps with the sections below
+ ;;
+ *do_image_complete*)
+ # Promote do_image_complete stamps to setscene versions (ahead of *do_image* below)
+ mv $i `echo $i | sed -e "s#do_image_complete#do_image_complete_setscene#"`
+ ;;
+ *do_image_qa*)
+ # Promote do_image_qa stamps to setscene versions (ahead of *do_image* below)
+ mv $i `echo $i | sed -e "s#do_image_qa#do_image_qa_setscene#"`
+ ;;
+ *do_package_write*|*do_rootfs*|*do_image*|*do_bootimg*|*do_write_qemuboot_conf*|*do_build*)
+ ;;
+ *do_addto_recipe_sysroot*)
+ # Preserve recipe-sysroot-native if do_addto_recipe_sysroot has been used
+ excludes="$excludes recipe-sysroot-native"
+ ;;
+ *do_package|*do_package.*|*do_package_setscene.*)
+ # We remove do_package entirely, including any
+ # sstate version since otherwise we'd need to leave 'plaindirs' around
+ # such as 'packages' and 'packages-split' and these can be large. No end
+ # of chain tasks depend directly on do_package anymore.
+ "${RM_BIN}" -f -- $i;
+ ;;
+ *_setscene*)
+ # Skip stamps which are already setscene versions
+ ;;
+ *)
+ # For everything else: if suitable, promote the stamp to a setscene
+ # version, otherwise remove it
+ for j in ${SSTATETASKS} do_shared_workdir
+ do
+ case $i in
+ *$j|*$j.*)
+ mv $i `echo $i | sed -e "s#${j}#${j}_setscene#"`
+ break
+ ;;
+ esac
+ done
+ "${RM_BIN}" -f -- $i
+ esac
+ done
+ fi
cd ${WORKDIR}
for dir in *
@@ -106,12 +116,14 @@ do_rm_work () {
# Retain only logs and other files in temp, safely ignore
# failures of removing pseudo folers on NFS2/3 server.
if [ $dir = 'pseudo' ]; then
- rm -rf -- $dir 2> /dev/null || true
+ "${RM_BIN}" -rf -- $dir 2> /dev/null || true
elif ! echo "$excludes" | grep -q -w "$dir"; then
- rm -rf -- $dir
+ "${RM_BIN}" -rf -- $dir
fi
done
}
+do_rm_work[vardepsexclude] += "SSTATETASKS"
+
do_rm_work_all () {
:
}
@@ -178,7 +190,7 @@ python inject_rm_work() {
# other recipes and thus will typically run much later than completion of
# work in the recipe itself.
# In practice, addtask() here merely updates the dependencies.
- bb.build.addtask('do_rm_work', 'do_build', ' '.join(deps), d)
+ bb.build.addtask('do_rm_work', 'do_rm_work_all do_build', ' '.join(deps), d)
# Always update do_build_without_rm_work dependencies.
bb.build.addtask('do_build_without_rm_work', '', ' '.join(deps), d)
diff --git a/meta/classes/siteconfig.bbclass b/meta/classes/siteconfig.bbclass
deleted file mode 100644
index 953cafd285..0000000000
--- a/meta/classes/siteconfig.bbclass
+++ /dev/null
@@ -1,39 +0,0 @@
-#
-# Copyright OpenEmbedded Contributors
-#
-# SPDX-License-Identifier: MIT
-#
-
-python siteconfig_do_siteconfig () {
- shared_state = sstate_state_fromvars(d)
- if shared_state['task'] != 'populate_sysroot':
- return
- if not os.path.isdir(os.path.join(d.getVar('FILE_DIRNAME'), 'site_config')):
- bb.debug(1, "No site_config directory, skipping do_siteconfig")
- return
- sstate_install(shared_state, d)
- bb.build.exec_func('do_siteconfig_gencache', d)
- sstate_clean(shared_state, d)
-}
-
-EXTRASITECONFIG ?= ""
-
-siteconfig_do_siteconfig_gencache () {
- mkdir -p ${WORKDIR}/site_config_${MACHINE}
- gen-site-config ${FILE_DIRNAME}/site_config \
- >${WORKDIR}/site_config_${MACHINE}/configure.ac
- cd ${WORKDIR}/site_config_${MACHINE}
- autoconf
- rm -f ${BPN}_cache
- CONFIG_SITE="" ${EXTRASITECONFIG} ./configure ${CONFIGUREOPTS} --cache-file ${BPN}_cache
- sed -n -e "/ac_cv_c_bigendian/p" -e "/ac_cv_sizeof_/p" \
- -e "/ac_cv_type_/p" -e "/ac_cv_header_/p" -e "/ac_cv_func_/p" \
- < ${BPN}_cache > ${BPN}_config
- mkdir -p ${SYSROOT_DESTDIR}${datadir}/${TARGET_SYS}_config_site.d
- cp ${BPN}_config ${SYSROOT_DESTDIR}${datadir}/${TARGET_SYS}_config_site.d
-
-}
-
-do_populate_sysroot[sstate-interceptfuncs] += "do_siteconfig "
-
-EXPORT_FUNCTIONS do_siteconfig do_siteconfig_gencache
diff --git a/meta/classes/testexport.bbclass b/meta/classes/testexport.bbclass
deleted file mode 100644
index f7c5242dc5..0000000000
--- a/meta/classes/testexport.bbclass
+++ /dev/null
@@ -1,180 +0,0 @@
-# Copyright (C) 2016 Intel Corporation
-#
-# SPDX-License-Identifier: MIT
-#
-# testexport.bbclass allows to execute runtime test outside OE environment.
-# Most of the tests are commands run on target image over ssh.
-# To use it add testexport to global inherit and call your target image with -c testexport
-# You can try it out like this:
-# - First build an image. i.e. core-image-sato
-# - Add INHERIT += "testexport" in local.conf
-# - Then bitbake core-image-sato -c testexport. That will generate the directory structure
-# to execute the runtime tests using runexported.py.
-#
-# For more information on TEST_SUITES check testimage class.
-
-TEST_LOG_DIR ?= "${WORKDIR}/testexport"
-TEST_EXPORT_DIR ?= "${TMPDIR}/testexport/${PN}"
-TEST_EXPORT_PACKAGED_DIR ?= "packages/packaged"
-TEST_EXPORT_EXTRACTED_DIR ?= "packages/extracted"
-
-TEST_TARGET ?= "simpleremote"
-TEST_TARGET_IP ?= ""
-TEST_SERVER_IP ?= ""
-
-require conf/testexport.conf
-
-TEST_EXPORT_SDK_ENABLED ?= "0"
-
-TEST_EXPORT_DEPENDS = ""
-TEST_EXPORT_DEPENDS += "${@bb.utils.contains('IMAGE_PKGTYPE', 'rpm', 'cpio-native:do_populate_sysroot', '', d)}"
-TEST_EXPORT_DEPENDS += "${@bb.utils.contains('TEST_EXPORT_SDK_ENABLED', '1', 'testexport-tarball:do_populate_sdk', '', d)}"
-TEST_EXPORT_LOCK = "${TMPDIR}/testimage.lock"
-
-addtask testexport
-do_testexport[nostamp] = "1"
-do_testexport[depends] += "${TEST_EXPORT_DEPENDS} ${TESTIMAGEDEPENDS}"
-do_testexport[lockfiles] += "${TEST_EXPORT_LOCK}"
-
-python do_testexport() {
- testexport_main(d)
-}
-
-def testexport_main(d):
- import json
- import logging
-
- from oeqa.runtime.context import OERuntimeTestContext
- from oeqa.runtime.context import OERuntimeTestContextExecutor
-
- image_name = ("%s/%s" % (d.getVar('DEPLOY_DIR_IMAGE'),
- d.getVar('IMAGE_LINK_NAME')))
-
- tdname = "%s.testdata.json" % image_name
- td = json.load(open(tdname, "r"))
-
- logger = logging.getLogger("BitBake")
-
- target = OERuntimeTestContextExecutor.getTarget(
- d.getVar("TEST_TARGET"), None, d.getVar("TEST_TARGET_IP"),
- d.getVar("TEST_SERVER_IP"))
-
- host_dumper = OERuntimeTestContextExecutor.getHostDumper(
- d.getVar("testimage_dump_host"), d.getVar("TESTIMAGE_DUMP_DIR"))
-
- image_manifest = "%s.manifest" % image_name
- image_packages = OERuntimeTestContextExecutor.readPackagesManifest(image_manifest)
-
- extract_dir = d.getVar("TEST_EXTRACTED_DIR")
-
- tc = OERuntimeTestContext(td, logger, target, host_dumper,
- image_packages, extract_dir)
-
- copy_needed_files(d, tc)
-
-def copy_needed_files(d, tc):
- import shutil
- import oe.path
-
- from oeqa.utils.package_manager import _get_json_file
- from oeqa.core.utils.test import getSuiteCasesFiles
-
- export_path = d.getVar('TEST_EXPORT_DIR')
- corebase_path = d.getVar('COREBASE')
-
- # Clean everything before starting
- oe.path.remove(export_path)
- bb.utils.mkdirhier(os.path.join(export_path, 'lib', 'oeqa'))
-
- # The source of files to copy are relative to 'COREBASE' directory
- # The destination is relative to 'TEST_EXPORT_DIR'
- # Because we are squashing the libraries, we need to remove
- # the layer/script directory
- files_to_copy = [ os.path.join('meta', 'lib', 'oeqa', 'core'),
- os.path.join('meta', 'lib', 'oeqa', 'runtime'),
- os.path.join('meta', 'lib', 'oeqa', 'files'),
- os.path.join('meta', 'lib', 'oeqa', 'utils'),
- os.path.join('scripts', 'oe-test'),
- os.path.join('scripts', 'lib', 'argparse_oe.py'),
- os.path.join('scripts', 'lib', 'scriptutils.py'), ]
-
- for f in files_to_copy:
- src = os.path.join(corebase_path, f)
- dst = os.path.join(export_path, f.split('/', 1)[-1])
- if os.path.isdir(src):
- oe.path.copytree(src, dst)
- else:
- shutil.copy2(src, dst)
-
- # Remove cases and just copy the ones specified
- cases_path = os.path.join(export_path, 'lib', 'oeqa', 'runtime', 'cases')
- oe.path.remove(cases_path)
- bb.utils.mkdirhier(cases_path)
- test_paths = get_runtime_paths(d)
- test_modules = d.getVar('TEST_SUITES').split()
- tc.loadTests(test_paths, modules=test_modules)
- for f in getSuiteCasesFiles(tc.suites):
- shutil.copy2(f, cases_path)
- json_file = _get_json_file(f)
- if json_file:
- shutil.copy2(json_file, cases_path)
-
- # Copy test data
- image_name = ("%s/%s" % (d.getVar('DEPLOY_DIR_IMAGE'),
- d.getVar('IMAGE_LINK_NAME')))
- image_manifest = "%s.manifest" % image_name
- tdname = "%s.testdata.json" % image_name
- test_data_path = os.path.join(export_path, 'data')
- bb.utils.mkdirhier(test_data_path)
- shutil.copy2(image_manifest, os.path.join(test_data_path, 'manifest'))
- shutil.copy2(tdname, os.path.join(test_data_path, 'testdata.json'))
-
- for subdir, dirs, files in os.walk(export_path):
- for dir in dirs:
- if dir == '__pycache__':
- shutil.rmtree(os.path.join(subdir, dir))
-
- # Create tar file for common parts of testexport
- testexport_create_tarball(d, "testexport.tar.gz", d.getVar("TEST_EXPORT_DIR"))
-
- # Copy packages needed for runtime testing
- package_extraction(d, tc.suites)
- test_pkg_dir = d.getVar("TEST_NEEDED_PACKAGES_DIR")
- if os.path.isdir(test_pkg_dir) and os.listdir(test_pkg_dir):
- export_pkg_dir = os.path.join(d.getVar("TEST_EXPORT_DIR"), "packages")
- oe.path.copytree(test_pkg_dir, export_pkg_dir)
- # Create tar file for packages needed by the DUT
- testexport_create_tarball(d, "testexport_packages_%s.tar.gz" % d.getVar("MACHINE"), export_pkg_dir)
-
- # Copy SDK
- if d.getVar("TEST_EXPORT_SDK_ENABLED") == "1":
- sdk_deploy = d.getVar("SDK_DEPLOY")
- tarball_name = "%s.sh" % d.getVar("TEST_EXPORT_SDK_NAME")
- tarball_path = os.path.join(sdk_deploy, tarball_name)
- export_sdk_dir = os.path.join(d.getVar("TEST_EXPORT_DIR"),
- d.getVar("TEST_EXPORT_SDK_DIR"))
- bb.utils.mkdirhier(export_sdk_dir)
- shutil.copy2(tarball_path, export_sdk_dir)
-
- # Create tar file for the sdk
- testexport_create_tarball(d, "testexport_sdk_%s.tar.gz" % d.getVar("SDK_ARCH"), export_sdk_dir)
-
- bb.plain("Exported tests to: %s" % export_path)
-
-def testexport_create_tarball(d, tar_name, src_dir):
-
- import tarfile
-
- tar_path = os.path.join(d.getVar("TEST_EXPORT_DIR"), tar_name)
- current_dir = os.getcwd()
- src_dir = src_dir.rstrip('/')
- dir_name = os.path.dirname(src_dir)
- base_name = os.path.basename(src_dir)
-
- os.chdir(dir_name)
- tar = tarfile.open(tar_path, "w:gz")
- tar.add(base_name)
- tar.close()
- os.chdir(current_dir)
-
-IMAGE_CLASSES += "testimage"
diff --git a/meta/classes/useradd-staticids.bbclass b/meta/classes/useradd-staticids.bbclass
index abe484eb46..1dbcba2bf1 100644
--- a/meta/classes/useradd-staticids.bbclass
+++ b/meta/classes/useradd-staticids.bbclass
@@ -47,7 +47,7 @@ def update_useradd_static_config(d):
def handle_missing_id(id, type, pkg, files, var, value):
# For backwards compatibility we accept "1" in addition to "error"
error_dynamic = d.getVar('USERADD_ERROR_DYNAMIC')
- msg = "%s - %s: %sname %s does not have a static ID defined." % (d.getVar('PN'), pkg, type, id)
+ msg = 'Recipe %s, package %s: %sname "%s" does not have a static ID defined.' % (d.getVar('PN'), pkg, type, id)
if files:
msg += " Add %s to one of these files: %s" % (id, files)
else:
diff --git a/meta/classes/useradd.bbclass b/meta/classes/useradd.bbclass
index 4d3bd9a5f5..16a65ac323 100644
--- a/meta/classes/useradd.bbclass
+++ b/meta/classes/useradd.bbclass
@@ -103,6 +103,18 @@ fi
}
useradd_sysroot () {
+ user_group_groupmems_add_sysroot user
+}
+
+groupadd_sysroot () {
+ user_group_groupmems_add_sysroot group
+}
+
+groupmemsadd_sysroot () {
+ user_group_groupmems_add_sysroot groupmems
+}
+
+user_group_groupmems_add_sysroot () {
# Pseudo may (do_prepare_recipe_sysroot) or may not (do_populate_sysroot_setscene) be running
# at this point so we're explicit about the environment so pseudo can load if
# not already present.
@@ -131,9 +143,15 @@ useradd_sysroot () {
fi
# Add groups and users defined for all recipe packages
- GROUPADD_PARAM="${@get_all_cmd_params(d, 'groupadd')}"
- USERADD_PARAM="${@get_all_cmd_params(d, 'useradd')}"
- GROUPMEMS_PARAM="${@get_all_cmd_params(d, 'groupmems')}"
+ if test "$1" = "group"; then
+ GROUPADD_PARAM="${@get_all_cmd_params(d, 'groupadd')}"
+ elif test "$1" = "user"; then
+ USERADD_PARAM="${@get_all_cmd_params(d, 'useradd')}"
+ elif test "$1" = "groupmems"; then
+ GROUPMEMS_PARAM="${@get_all_cmd_params(d, 'groupmems')}"
+ elif test "x$1" = "x"; then
+ bbwarn "missing type of passwd db action"
+ fi
# Tell the system to use the environment vars
UA_SYSROOT=1
@@ -148,25 +166,30 @@ useradd_sysroot () {
EXTRA_STAGING_FIXMES += "PSEUDO_SYSROOT PSEUDO_LOCALSTATEDIR LOGFIFO"
python useradd_sysroot_sstate () {
- scriptfile = None
- task = d.getVar("BB_CURRENTTASK")
- if task == "package_setscene":
- bb.build.exec_func("useradd_sysroot", d)
- elif task == "prepare_recipe_sysroot":
- # Used to update this recipe's own sysroot so the user/groups are available to do_install
- scriptfile = d.expand("${RECIPE_SYSROOT}${bindir}/postinst-useradd-${PN}")
- bb.build.exec_func("useradd_sysroot", d)
- elif task == "populate_sysroot":
- # Used when installed in dependent task sysroots
- scriptfile = d.expand("${SYSROOT_DESTDIR}${bindir}/postinst-useradd-${PN}")
-
- if scriptfile:
- bb.utils.mkdirhier(os.path.dirname(scriptfile))
- with open(scriptfile, 'w') as script:
- script.write("#!/bin/sh\n")
- bb.data.emit_func("useradd_sysroot", script, d)
- script.write("useradd_sysroot\n")
- os.chmod(scriptfile, 0o755)
+ for type, sort_prefix in [("group", "01"), ("user", "02"), ("groupmems", "03")]:
+ scriptfile = None
+ task = d.getVar("BB_CURRENTTASK")
+ if task == "package_setscene":
+ bb.build.exec_func(type + "add_sysroot", d)
+ elif task == "prepare_recipe_sysroot":
+ # Used to update this recipe's own sysroot so the user/groups are available to do_install
+
+ # If do_populate_sysroot is triggered and we write the file here, there would be an overlapping
+ # files. See usergrouptests.UserGroupTests.test_add_task_between_p_sysroot_and_package
+ scriptfile = d.expand("${RECIPE_SYSROOT}${bindir}/postinst-useradd-" + sort_prefix + type + "-${PN}-recipedebug")
+
+ bb.build.exec_func(type + "add_sysroot", d)
+ elif task == "populate_sysroot":
+ # Used when installed in dependent task sysroots
+ scriptfile = d.expand("${SYSROOT_DESTDIR}${bindir}/postinst-useradd-" + sort_prefix + type + "-${PN}")
+
+ if scriptfile:
+ bb.utils.mkdirhier(os.path.dirname(scriptfile))
+ with open(scriptfile, 'w') as script:
+ script.write("#!/bin/sh -e\n")
+ bb.data.emit_func(type + "add_sysroot", script, d)
+ script.write(type + "add_sysroot\n")
+ os.chmod(scriptfile, 0o755)
}
do_prepare_recipe_sysroot[postfuncs] += "${SYSROOTFUNC}"
@@ -177,9 +200,11 @@ SYSROOT_PREPROCESS_FUNCS += "${SYSROOTFUNC}"
SSTATEPREINSTFUNCS:append:class-target = " useradd_sysroot_sstate"
+USERADD_DEPENDS ??= ""
+DEPENDS += "${USERADD_DEPENDS}"
do_package_setscene[depends] += "${USERADDSETSCENEDEPS}"
do_populate_sysroot_setscene[depends] += "${USERADDSETSCENEDEPS}"
-USERADDSETSCENEDEPS:class-target = "${MLPREFIX}base-passwd:do_populate_sysroot_setscene pseudo-native:do_populate_sysroot_setscene shadow-native:do_populate_sysroot_setscene ${MLPREFIX}shadow-sysroot:do_populate_sysroot_setscene"
+USERADDSETSCENEDEPS:class-target = "${MLPREFIX}base-passwd:do_populate_sysroot_setscene pseudo-native:do_populate_sysroot_setscene shadow-native:do_populate_sysroot_setscene ${MLPREFIX}shadow-sysroot:do_populate_sysroot_setscene ${@' '.join(['%s:do_populate_sysroot_setscene' % pkg for pkg in d.getVar("USERADD_DEPENDS").split()])}"
USERADDSETSCENEDEPS = ""
# Recipe parse-time sanity checks
@@ -190,7 +215,7 @@ def update_useradd_after_parse(d):
bb.fatal("%s inherits useradd but doesn't set USERADD_PACKAGES" % d.getVar('FILE', False))
for pkg in useradd_packages.split():
- d.appendVarFlag("do_populate_sysroot", "vardeps", "USERADD_PARAM:%s GROUPADD_PARAM:%s GROUPMEMS_PARAM:%s" % (pkg, pkg, pkg))
+ d.appendVarFlag("do_populate_sysroot", "vardeps", " USERADD_PARAM:%s GROUPADD_PARAM:%s GROUPMEMS_PARAM:%s" % (pkg, pkg, pkg))
if not d.getVar('USERADD_PARAM:%s' % pkg) and not d.getVar('GROUPADD_PARAM:%s' % pkg) and not d.getVar('GROUPMEMS_PARAM:%s' % pkg):
bb.fatal("%s inherits useradd but doesn't set USERADD_PARAM, GROUPADD_PARAM or GROUPMEMS_PARAM for package %s" % (d.getVar('FILE', False), pkg))
@@ -262,4 +287,4 @@ fakeroot python populate_packages:prepend () {
# Use the following to extend the useradd with custom functions
USERADDEXTENSION ?= ""
-inherit ${USERADDEXTENSION}
+inherit_defer ${USERADDEXTENSION}
diff --git a/meta/classes/useradd_base.bbclass b/meta/classes/useradd_base.bbclass
index 863cb7b76c..5e1c699118 100644
--- a/meta/classes/useradd_base.bbclass
+++ b/meta/classes/useradd_base.bbclass
@@ -160,7 +160,7 @@ perform_passwd_expire () {
local username=`echo "$opts" | awk '{ print $NF }'`
local user_exists="`grep "^$username:" $rootdir/etc/passwd || true`"
if test "x$user_exists" != "x"; then
- eval flock -x $rootdir${sysconfdir} -c \"$PSEUDO sed -i \''s/^\('$username':[^:]*\):[^:]*:/\1:0:/'\' $rootdir/etc/shadow \" || true
+ eval flock -x $rootdir${sysconfdir} -c \"$PSEUDO sed --follow-symlinks -i \''s/^\('$username':[^:]*\):[^:]*:/\1:0:/'\' $rootdir/etc/shadow \" || true
local passwd_lastchanged="`grep "^$username:" $rootdir/etc/shadow | cut -d: -f3`"
if test "x$passwd_lastchanged" != "x0"; then
bbfatal "${PN}: passwd --expire operation did not succeed."