aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--MAINTAINERS.md5
-rw-r--r--meta-selftest/recipes-devtools/python/python-async-test.inc2
-rw-r--r--meta-selftest/recipes-test/logging-test/logging-test.bb24
-rw-r--r--meta-selftest/recipes-test/overlayfs-user/overlayfs-user.bb17
-rw-r--r--meta-selftest/recipes-test/systemd-machine-units/systemd-machine-units_%.bbappend2
-rw-r--r--meta-selftest/wic/test_efi_plugin.wks6
-rw-r--r--meta-skeleton/recipes-baremetal/baremetal-examples/baremetal-helloworld_git.bb5
-rw-r--r--meta-skeleton/recipes-kernel/hello-mod/files/hello.c10
-rw-r--r--meta/classes/autotools.bbclass5
-rw-r--r--meta/classes/baremetal-image.bbclass13
-rw-r--r--meta/classes/base.bbclass56
-rw-r--r--meta/classes/buildhistory.bbclass29
-rw-r--r--meta/classes/cargo.bbclass89
-rw-r--r--meta/classes/cargo_common.bbclass125
-rw-r--r--meta/classes/cpan-base.bbclass2
-rw-r--r--meta/classes/crate-fetch.bbclass13
-rw-r--r--meta/classes/create-spdx.bbclass948
-rw-r--r--meta/classes/cross-canadian.bbclass4
-rw-r--r--meta/classes/cross.bbclass4
-rw-r--r--meta/classes/cve-check.bbclass72
-rw-r--r--meta/classes/devshell.bbclass10
-rw-r--r--meta/classes/devupstream.bbclass20
-rw-r--r--meta/classes/distrooverrides.bbclass2
-rw-r--r--meta/classes/externalsrc.bbclass9
-rw-r--r--meta/classes/features_check.bbclass4
-rw-r--r--meta/classes/goarch.bbclass14
-rw-r--r--meta/classes/image-artifact-names.bbclass9
-rw-r--r--meta/classes/image-container.bbclass2
-rw-r--r--meta/classes/image-live.bbclass6
-rw-r--r--meta/classes/image-prelink.bbclass13
-rw-r--r--meta/classes/image.bbclass29
-rw-r--r--meta/classes/image_types.bbclass60
-rw-r--r--meta/classes/image_types_wic.bbclass39
-rw-r--r--meta/classes/insane.bbclass204
-rw-r--r--meta/classes/kernel-fitimage.bbclass346
-rw-r--r--meta/classes/kernel-module-split.bbclass28
-rw-r--r--meta/classes/kernel-yocto.bbclass5
-rw-r--r--meta/classes/kernel.bbclass48
-rw-r--r--meta/classes/libc-package.bbclass2
-rw-r--r--meta/classes/license.bbclass26
-rw-r--r--meta/classes/license_image.bbclass13
-rw-r--r--meta/classes/manpages.bbclass4
-rw-r--r--meta/classes/meson.bbclass3
-rw-r--r--meta/classes/meta.bbclass4
-rw-r--r--meta/classes/mirrors.bbclass4
-rw-r--r--meta/classes/multilib.bbclass9
-rw-r--r--meta/classes/nativesdk.bbclass2
-rw-r--r--meta/classes/npm.bbclass27
-rw-r--r--meta/classes/overlayfs.bbclass111
-rw-r--r--meta/classes/package.bbclass105
-rw-r--r--meta/classes/package_deb.bbclass5
-rw-r--r--meta/classes/package_ipk.bbclass8
-rw-r--r--meta/classes/package_rpm.bbclass16
-rw-r--r--meta/classes/packagedata.bbclass2
-rw-r--r--meta/classes/patch.bbclass7
-rw-r--r--meta/classes/populate_sdk_base.bbclass4
-rw-r--r--meta/classes/populate_sdk_ext.bbclass2
-rw-r--r--meta/classes/ptest.bbclass10
-rw-r--r--meta/classes/pypi.bbclass6
-rw-r--r--meta/classes/python3-dir.bbclass2
-rw-r--r--meta/classes/reproducible_build.bbclass127
-rw-r--r--meta/classes/reproducible_build_simple.bbclass9
-rw-r--r--meta/classes/rm_work.bbclass8
-rw-r--r--meta/classes/rootfs-postcommands.bbclass29
-rw-r--r--meta/classes/rust-bin.bbclass149
-rw-r--r--meta/classes/rust-common.bbclass181
-rw-r--r--meta/classes/rust.bbclass45
-rw-r--r--meta/classes/siteinfo.bbclass34
-rw-r--r--meta/classes/sstate.bbclass99
-rw-r--r--meta/classes/staging.bbclass1
-rw-r--r--meta/classes/terminal.bbclass5
-rw-r--r--meta/classes/testimage.bbclass21
-rw-r--r--meta/classes/toaster.bbclass2
-rw-r--r--meta/classes/toolchain-scripts.bbclass5
-rw-r--r--meta/classes/uboot-config.bbclass20
-rw-r--r--meta/classes/uboot-extlinux-config.bbclass2
-rw-r--r--meta/classes/uboot-sign.bbclass56
-rw-r--r--meta/classes/uninative.bbclass2
-rw-r--r--meta/classes/update-alternatives.bbclass6
-rw-r--r--meta/classes/useradd.bbclass4
-rw-r--r--meta/classes/utils.bbclass37
-rw-r--r--meta/conf/abi_version.conf2
-rw-r--r--meta/conf/bitbake.conf51
-rw-r--r--meta/conf/distro/include/cve-extra-exclusions.inc4
-rw-r--r--meta/conf/distro/include/default-versions.inc2
-rw-r--r--meta/conf/distro/include/lto.inc2
-rw-r--r--meta/conf/distro/include/maintainers.inc39
-rw-r--r--meta/conf/distro/include/ptest-packagelists.inc3
-rw-r--r--meta/conf/distro/include/rust_security_flags.inc7
-rw-r--r--meta/conf/distro/include/tcmode-default.inc19
-rw-r--r--meta/conf/distro/include/yocto-uninative.inc10
-rw-r--r--meta/conf/documentation.conf2
-rw-r--r--meta/conf/layer.conf12
-rw-r--r--meta/conf/licenses.conf31
-rw-r--r--meta/conf/local.conf.sample5
-rw-r--r--meta/conf/local.conf.sample.extended6
-rw-r--r--meta/conf/machine/include/arm/arch-arm.inc4
-rw-r--r--meta/conf/machine/include/arm/arch-armv4.inc2
-rw-r--r--meta/conf/machine/include/arm/arch-armv5-dsp.inc1
-rw-r--r--meta/conf/machine/include/arm/arch-armv5.inc2
-rw-r--r--meta/conf/machine/include/arm/arch-armv6.inc2
-rw-r--r--meta/conf/machine/include/arm/arch-armv6m.inc2
-rw-r--r--meta/conf/machine/include/arm/arch-armv7a.inc3
-rw-r--r--meta/conf/machine/include/arm/arch-armv7em.inc17
-rw-r--r--meta/conf/machine/include/arm/arch-armv7m.inc17
-rw-r--r--meta/conf/machine/include/arm/arch-armv7r.inc22
-rw-r--r--meta/conf/machine/include/arm/arch-armv7ve.inc2
-rw-r--r--meta/conf/machine/include/arm/arch-armv8-1m-main.inc18
-rw-r--r--meta/conf/machine/include/arm/arch-armv8-2a.inc2
-rw-r--r--meta/conf/machine/include/arm/arch-armv8a.inc8
-rw-r--r--meta/conf/machine/include/arm/arch-armv8m-base.inc17
-rw-r--r--meta/conf/machine/include/arm/arch-armv8m-main.inc28
-rw-r--r--meta/conf/machine/include/arm/arch-armv8r.inc37
-rw-r--r--meta/conf/machine/include/arm/armv4/tune-arm920t.inc13
-rw-r--r--meta/conf/machine/include/arm/armv4/tune-arm9tdmi.inc13
-rw-r--r--meta/conf/machine/include/arm/armv4/tune-ep9312.inc (renamed from meta/conf/machine/include/tune-ep9312.inc)0
-rw-r--r--meta/conf/machine/include/arm/armv4/tune-strongarm1100.inc12
-rw-r--r--meta/conf/machine/include/arm/armv5/tune-arm926ejs.inc (renamed from meta/conf/machine/include/tune-arm926ejs.inc)0
-rw-r--r--meta/conf/machine/include/arm/armv5/tune-iwmmxt.inc (renamed from meta/conf/machine/include/tune-iwmmxt.inc)0
-rw-r--r--meta/conf/machine/include/arm/armv5/tune-xscale.inc (renamed from meta/conf/machine/include/tune-xscale.inc)0
-rw-r--r--meta/conf/machine/include/arm/armv6/tune-arm1136jf-s.inc (renamed from meta/conf/machine/include/tune-arm1136jf-s.inc)0
-rw-r--r--meta/conf/machine/include/arm/armv6/tune-arm1176jz-s.inc (renamed from meta/conf/machine/include/tune-arm1176jz-s.inc)0
-rw-r--r--meta/conf/machine/include/arm/armv6m/tune-cortexm0.inc (renamed from meta/conf/machine/include/tune-cortex-m0.inc)0
-rw-r--r--meta/conf/machine/include/arm/armv6m/tune-cortexm0plus.inc (renamed from meta/conf/machine/include/tune-cortex-m0plus.inc)0
-rw-r--r--meta/conf/machine/include/arm/armv6m/tune-cortexm1.inc14
-rw-r--r--meta/conf/machine/include/arm/armv7a/tune-cortexa15.inc51
-rw-r--r--meta/conf/machine/include/arm/armv7a/tune-cortexa17.inc51
-rw-r--r--meta/conf/machine/include/arm/armv7a/tune-cortexa5.inc51
-rw-r--r--meta/conf/machine/include/arm/armv7a/tune-cortexa7.inc51
-rw-r--r--meta/conf/machine/include/arm/armv7a/tune-cortexa8.inc39
-rw-r--r--meta/conf/machine/include/arm/armv7a/tune-cortexa9.inc55
-rw-r--r--meta/conf/machine/include/arm/armv7m/tune-cortexm3.inc14
-rw-r--r--meta/conf/machine/include/arm/armv7m/tune-cortexm4.inc14
-rw-r--r--meta/conf/machine/include/arm/armv7m/tune-cortexm7.inc14
-rw-r--r--meta/conf/machine/include/arm/armv7r/tune-cortexr4.inc14
-rw-r--r--meta/conf/machine/include/arm/armv7r/tune-cortexr4f.inc14
-rw-r--r--meta/conf/machine/include/arm/armv7r/tune-cortexr5.inc14
-rw-r--r--meta/conf/machine/include/arm/armv7r/tune-cortexr7.inc14
-rw-r--r--meta/conf/machine/include/arm/armv7r/tune-cortexr8.inc14
-rw-r--r--meta/conf/machine/include/arm/armv8-1m/tune-cortexm55.inc14
-rw-r--r--meta/conf/machine/include/arm/armv8-2a/tune-cortexa55.inc (renamed from meta/conf/machine/include/tune-cortexa55.inc)0
-rw-r--r--meta/conf/machine/include/arm/armv8-m/tune-cortexm23.inc14
-rw-r--r--meta/conf/machine/include/arm/armv8-m/tune-cortexm33.inc17
-rw-r--r--meta/conf/machine/include/arm/armv8-m/tune-cortexm35p.inc17
-rw-r--r--meta/conf/machine/include/arm/armv8a/tune-cortexa32.inc (renamed from meta/conf/machine/include/tune-cortexa32.inc)0
-rw-r--r--meta/conf/machine/include/arm/armv8a/tune-cortexa35.inc (renamed from meta/conf/machine/include/tune-cortexa35.inc)0
-rw-r--r--meta/conf/machine/include/arm/armv8a/tune-cortexa53.inc (renamed from meta/conf/machine/include/tune-cortexa53.inc)0
-rw-r--r--meta/conf/machine/include/arm/armv8a/tune-cortexa57-cortexa53.inc (renamed from meta/conf/machine/include/tune-cortexa57-cortexa53.inc)0
-rw-r--r--meta/conf/machine/include/arm/armv8a/tune-cortexa57.inc (renamed from meta/conf/machine/include/tune-cortexa57.inc)0
-rw-r--r--meta/conf/machine/include/arm/armv8a/tune-cortexa72-cortexa53.inc (renamed from meta/conf/machine/include/tune-cortexa72-cortexa53.inc)0
-rw-r--r--meta/conf/machine/include/arm/armv8a/tune-cortexa72.inc (renamed from meta/conf/machine/include/tune-cortexa72.inc)0
-rw-r--r--meta/conf/machine/include/arm/armv8a/tune-cortexa73-cortexa53.inc (renamed from meta/conf/machine/include/tune-cortexa73-cortexa53.inc)0
-rw-r--r--meta/conf/machine/include/arm/armv8a/tune-thunderx.inc (renamed from meta/conf/machine/include/tune-thunderx.inc)0
-rw-r--r--meta/conf/machine/include/arm/armv8r/tune-cortexr52.inc14
-rw-r--r--meta/conf/machine/include/arm/feature-arm-crc.inc4
-rw-r--r--meta/conf/machine/include/arm/feature-arm-crypto.inc5
-rw-r--r--meta/conf/machine/include/arm/feature-arm-dsp.inc3
-rw-r--r--meta/conf/machine/include/arm/feature-arm-idiv.inc2
-rw-r--r--meta/conf/machine/include/arm/feature-arm-neon.inc5
-rw-r--r--meta/conf/machine/include/arm/feature-arm-simd.inc5
-rw-r--r--meta/conf/machine/include/m68k/tune-mcf5441x.inc (renamed from meta/conf/machine/include/tune-mcf5441x.inc)0
-rw-r--r--meta/conf/machine/include/microblaze/tune-microblaze.inc (renamed from meta/conf/machine/include/tune-microblaze.inc)0
-rw-r--r--meta/conf/machine/include/mips/feature-mips-mips16e.inc2
-rw-r--r--meta/conf/machine/include/mips/qemuboot-mips.inc (renamed from meta/conf/machine/include/qemuboot-mips.inc)0
-rw-r--r--meta/conf/machine/include/mips/tune-mips-24k.inc2
-rw-r--r--meta/conf/machine/include/mips/tune-mips-74k.inc2
-rw-r--r--meta/conf/machine/include/mips/tune-mips32.inc (renamed from meta/conf/machine/include/tune-mips32.inc)0
-rw-r--r--meta/conf/machine/include/mips/tune-mips32r2.inc29
-rw-r--r--meta/conf/machine/include/mips/tune-mips32r6.inc (renamed from meta/conf/machine/include/tune-mips32r6.inc)0
-rw-r--r--meta/conf/machine/include/mips/tune-mips64.inc3
-rw-r--r--meta/conf/machine/include/mips/tune-mips64r2.inc84
-rw-r--r--meta/conf/machine/include/mips/tune-mips64r6.inc (renamed from meta/conf/machine/include/tune-mips64r6.inc)0
-rw-r--r--meta/conf/machine/include/mips/tune-octeon.inc32
-rw-r--r--meta/conf/machine/include/powerpc/tune-power5.inc (renamed from meta/conf/machine/include/tune-power5.inc)0
-rw-r--r--meta/conf/machine/include/powerpc/tune-power6.inc (renamed from meta/conf/machine/include/tune-power6.inc)0
-rw-r--r--meta/conf/machine/include/powerpc/tune-power7.inc (renamed from meta/conf/machine/include/tune-power7.inc)0
-rw-r--r--meta/conf/machine/include/powerpc/tune-power9.inc (renamed from meta/conf/machine/include/tune-power9.inc)0
-rw-r--r--meta/conf/machine/include/powerpc/tune-ppc476.inc (renamed from meta/conf/machine/include/tune-ppc476.inc)0
-rw-r--r--meta/conf/machine/include/powerpc/tune-ppc603e.inc (renamed from meta/conf/machine/include/tune-ppc603e.inc)0
-rw-r--r--meta/conf/machine/include/powerpc/tune-ppc7400.inc (renamed from meta/conf/machine/include/tune-ppc7400.inc)0
-rw-r--r--meta/conf/machine/include/powerpc/tune-ppce300c2.inc (renamed from meta/conf/machine/include/tune-ppce300c2.inc)0
-rw-r--r--meta/conf/machine/include/powerpc/tune-ppce300c3.inc (renamed from meta/conf/machine/include/tune-ppce300c3.inc)0
-rw-r--r--meta/conf/machine/include/powerpc/tune-ppce500.inc (renamed from meta/conf/machine/include/tune-ppce500.inc)0
-rw-r--r--meta/conf/machine/include/powerpc/tune-ppce500mc.inc (renamed from meta/conf/machine/include/tune-ppce500mc.inc)0
-rw-r--r--meta/conf/machine/include/powerpc/tune-ppce500v2.inc (renamed from meta/conf/machine/include/tune-ppce500v2.inc)0
-rw-r--r--meta/conf/machine/include/powerpc/tune-ppce5500.inc (renamed from meta/conf/machine/include/tune-ppce5500.inc)0
-rw-r--r--meta/conf/machine/include/powerpc/tune-ppce6500.inc (renamed from meta/conf/machine/include/tune-ppce6500.inc)0
-rw-r--r--meta/conf/machine/include/qemu.inc2
-rw-r--r--meta/conf/machine/include/sh/tune-sh3.inc (renamed from meta/conf/machine/include/tune-sh3.inc)0
-rw-r--r--meta/conf/machine/include/sh/tune-sh4.inc34
-rw-r--r--meta/conf/machine/include/tune-arm920t.inc13
-rw-r--r--meta/conf/machine/include/tune-arm9tdmi.inc13
-rw-r--r--meta/conf/machine/include/tune-atom.inc2
-rw-r--r--meta/conf/machine/include/tune-core2.inc38
-rw-r--r--meta/conf/machine/include/tune-corei7.inc38
-rw-r--r--meta/conf/machine/include/tune-cortexa15.inc51
-rw-r--r--meta/conf/machine/include/tune-cortexa17.inc51
-rw-r--r--meta/conf/machine/include/tune-cortexa5.inc51
-rw-r--r--meta/conf/machine/include/tune-cortexa7.inc51
-rw-r--r--meta/conf/machine/include/tune-cortexa8.inc39
-rw-r--r--meta/conf/machine/include/tune-cortexa9.inc55
-rw-r--r--meta/conf/machine/include/tune-i686.inc27
-rw-r--r--meta/conf/machine/include/tune-mips32r2.inc29
-rw-r--r--meta/conf/machine/include/tune-mips64.inc3
-rw-r--r--meta/conf/machine/include/tune-mips64r2.inc84
-rw-r--r--meta/conf/machine/include/tune-octeon.inc32
-rw-r--r--meta/conf/machine/include/tune-sh4.inc34
-rw-r--r--meta/conf/machine/include/tune-strongarm1100.inc12
-rw-r--r--meta/conf/machine/include/x86-base.inc46
-rw-r--r--meta/conf/machine/include/x86/qemuboot-x86.inc (renamed from meta/conf/machine/include/qemuboot-x86.inc)0
-rw-r--r--meta/conf/machine/include/x86/tune-atom.inc2
-rw-r--r--meta/conf/machine/include/x86/tune-c3.inc (renamed from meta/conf/machine/include/tune-c3.inc)0
-rw-r--r--meta/conf/machine/include/x86/tune-core2.inc38
-rw-r--r--meta/conf/machine/include/x86/tune-corei7.inc38
-rw-r--r--meta/conf/machine/include/x86/tune-i586-nlp.inc (renamed from meta/conf/machine/include/tune-i586-nlp.inc)0
-rw-r--r--meta/conf/machine/include/x86/tune-i586.inc (renamed from meta/conf/machine/include/tune-i586.inc)0
-rw-r--r--meta/conf/machine/include/x86/tune-i686.inc27
-rw-r--r--meta/conf/machine/include/x86/x86-base.inc46
-rw-r--r--meta/conf/machine/qemuarm.conf6
-rw-r--r--meta/conf/machine/qemuarm64.conf6
-rw-r--r--meta/conf/machine/qemuarmv5.conf7
-rw-r--r--meta/conf/machine/qemumips.conf4
-rw-r--r--meta/conf/machine/qemumips64.conf4
-rw-r--r--meta/conf/machine/qemuppc.conf2
-rw-r--r--meta/conf/machine/qemuppc64.conf6
-rw-r--r--meta/conf/machine/qemux86-64.conf4
-rw-r--r--meta/conf/machine/qemux86.conf4
-rw-r--r--meta/conf/multilib.conf2
-rw-r--r--meta/files/common-licenses/0BSD5
-rw-r--r--meta/files/common-licenses/ADSL1
-rw-r--r--meta/files/common-licenses/AFL-1.127
-rw-r--r--meta/files/common-licenses/AGPL-1.0-only86
-rw-r--r--meta/files/common-licenses/AGPL-1.0-or-later86
-rw-r--r--meta/files/common-licenses/AMDPLPA20
-rw-r--r--meta/files/common-licenses/AML9
-rw-r--r--meta/files/common-licenses/AMPAS13
-rw-r--r--meta/files/common-licenses/ANTLR-PD-fallback7
-rw-r--r--meta/files/common-licenses/APAFML3
-rw-r--r--meta/files/common-licenses/Abstyles11
-rw-r--r--meta/files/common-licenses/Adobe-200612
-rw-r--r--meta/files/common-licenses/Adobe-Glyph10
-rw-r--r--meta/files/common-licenses/Afmparse10
-rw-r--r--meta/files/common-licenses/Aladdin62
-rw-r--r--meta/files/common-licenses/Artistic-1.0-Perl51
-rw-r--r--meta/files/common-licenses/Artistic-1.0-cl851
-rw-r--r--meta/files/common-licenses/BSD-0-Clause12
-rw-r--r--meta/files/common-licenses/BSD-2-Clause-Views11
-rw-r--r--meta/files/common-licenses/BSD-3-Clause-Attribution11
-rw-r--r--meta/files/common-licenses/BSD-3-Clause-LBNL12
-rw-r--r--meta/files/common-licenses/BSD-3-Clause-Modification35
-rw-r--r--meta/files/common-licenses/BSD-3-Clause-No-Military-License16
-rw-r--r--meta/files/common-licenses/BSD-3-Clause-No-Nuclear-License14
-rw-r--r--meta/files/common-licenses/BSD-3-Clause-No-Nuclear-License-201416
-rw-r--r--meta/files/common-licenses/BSD-3-Clause-No-Nuclear-Warranty14
-rw-r--r--meta/files/common-licenses/BSD-3-Clause-Open-MPI34
-rw-r--r--meta/files/common-licenses/BSD-4-Clause-Shortened13
-rw-r--r--meta/files/common-licenses/BSD-4-Clause-UC15
-rw-r--r--meta/files/common-licenses/BSD-Protection53
-rw-r--r--meta/files/common-licenses/BSD-Source-Code10
-rw-r--r--meta/files/common-licenses/BUSL-1.172
-rw-r--r--meta/files/common-licenses/Bahyph11
-rw-r--r--meta/files/common-licenses/Barr1
-rw-r--r--meta/files/common-licenses/Beerware1
-rw-r--r--meta/files/common-licenses/BitTorrent-1.0330
-rw-r--r--meta/files/common-licenses/BitTorrent-1.1137
-rw-r--r--meta/files/common-licenses/BlueOak-1.0.055
-rw-r--r--meta/files/common-licenses/Borceux19
-rw-r--r--meta/files/common-licenses/C-UDA-1.047
-rw-r--r--meta/files/common-licenses/CAL-1.0354
-rw-r--r--meta/files/common-licenses/CAL-1.0-Combined-Work-Exception354
-rw-r--r--meta/files/common-licenses/CC-BY-2.5-AU112
-rw-r--r--meta/files/common-licenses/CC-BY-3.0-AT111
-rw-r--r--meta/files/common-licenses/CC-BY-3.0-DE109
-rw-r--r--meta/files/common-licenses/CC-BY-3.0-NL97
-rw-r--r--meta/files/common-licenses/CC-BY-3.0-US83
-rw-r--r--meta/files/common-licenses/CC-BY-4.0156
-rw-r--r--meta/files/common-licenses/CC-BY-NC-3.0-DE110
-rw-r--r--meta/files/common-licenses/CC-BY-NC-4.0158
-rw-r--r--meta/files/common-licenses/CC-BY-NC-ND-3.0-DE101
-rw-r--r--meta/files/common-licenses/CC-BY-NC-ND-3.0-IGO99
-rw-r--r--meta/files/common-licenses/CC-BY-NC-ND-4.0155
-rw-r--r--meta/files/common-licenses/CC-BY-NC-SA-2.0-FR93
-rw-r--r--meta/files/common-licenses/CC-BY-NC-SA-2.0-UK149
-rw-r--r--meta/files/common-licenses/CC-BY-NC-SA-3.0-DE126
-rw-r--r--meta/files/common-licenses/CC-BY-NC-SA-3.0-IGO105
-rw-r--r--meta/files/common-licenses/CC-BY-NC-SA-4.0170
-rw-r--r--meta/files/common-licenses/CC-BY-ND-3.0-DE101
-rw-r--r--meta/files/common-licenses/CC-BY-ND-4.0154
-rw-r--r--meta/files/common-licenses/CC-BY-SA-2.0-UK147
-rw-r--r--meta/files/common-licenses/CC-BY-SA-2.1-JP83
-rw-r--r--meta/files/common-licenses/CC-BY-SA-3.0-AT139
-rw-r--r--meta/files/common-licenses/CC-BY-SA-3.0-DE136
-rw-r--r--meta/files/common-licenses/CC-PDDC8
-rw-r--r--meta/files/common-licenses/CDDL-1.1123
-rw-r--r--meta/files/common-licenses/CDL-1.053
-rw-r--r--meta/files/common-licenses/CDLA-Permissive-1.085
-rw-r--r--meta/files/common-licenses/CDLA-Permissive-2.035
-rw-r--r--meta/files/common-licenses/CDLA-Sharing-1.089
-rw-r--r--meta/files/common-licenses/CECILL-1.1229
-rw-r--r--meta/files/common-licenses/CECILL-2.1518
-rw-r--r--meta/files/common-licenses/CERN-OHL-1.147
-rw-r--r--meta/files/common-licenses/CERN-OHL-1.249
-rw-r--r--meta/files/common-licenses/CERN-OHL-P-2.0199
-rw-r--r--meta/files/common-licenses/CERN-OHL-S-2.0289
-rw-r--r--meta/files/common-licenses/CERN-OHL-W-2.0310
-rw-r--r--meta/files/common-licenses/CNRI-Jython12
-rw-r--r--meta/files/common-licenses/CNRI-Python25
-rw-r--r--meta/files/common-licenses/CNRI-Python-GPL-Compatible23
-rw-r--r--meta/files/common-licenses/CPOL-1.0298
-rw-r--r--meta/files/common-licenses/Caldera25
-rw-r--r--meta/files/common-licenses/Condor-1.140
-rw-r--r--meta/files/common-licenses/Crossword5
-rw-r--r--meta/files/common-licenses/CrystalStacker7
-rw-r--r--meta/files/common-licenses/Cube17
-rw-r--r--meta/files/common-licenses/D-FSL-1.0147
-rw-r--r--meta/files/common-licenses/DOC15
-rw-r--r--meta/files/common-licenses/DRL-1.012
-rw-r--r--meta/files/common-licenses/DSDP18
-rw-r--r--meta/files/common-licenses/Dotseqn5
-rw-r--r--meta/files/common-licenses/EPICS32
-rw-r--r--meta/files/common-licenses/EUPL-1.2190
-rw-r--r--meta/files/common-licenses/Eurosym18
-rw-r--r--meta/files/common-licenses/FSFAP1
-rw-r--r--meta/files/common-licenses/FSFUL3
-rw-r--r--meta/files/common-licenses/FSFULLR3
-rw-r--r--meta/files/common-licenses/FTL79
-rw-r--r--meta/files/common-licenses/FreeBSD-DOC23
-rw-r--r--meta/files/common-licenses/FreeImage117
-rw-r--r--meta/files/common-licenses/FreeType170
-rw-r--r--meta/files/common-licenses/GD24
-rw-r--r--meta/files/common-licenses/GFDL-1.1-invariants-only119
-rw-r--r--meta/files/common-licenses/GFDL-1.1-invariants-or-later119
-rw-r--r--meta/files/common-licenses/GFDL-1.1-no-invariants-only119
-rw-r--r--meta/files/common-licenses/GFDL-1.1-no-invariants-or-later119
-rw-r--r--meta/files/common-licenses/GFDL-1.1-only119
-rw-r--r--meta/files/common-licenses/GFDL-1.1-or-later119
-rw-r--r--meta/files/common-licenses/GFDL-1.2-invariants-only130
-rw-r--r--meta/files/common-licenses/GFDL-1.2-invariants-or-later130
-rw-r--r--meta/files/common-licenses/GFDL-1.2-no-invariants-only130
-rw-r--r--meta/files/common-licenses/GFDL-1.2-no-invariants-or-later130
-rw-r--r--meta/files/common-licenses/GFDL-1.2-only130
-rw-r--r--meta/files/common-licenses/GFDL-1.2-or-later130
-rw-r--r--meta/files/common-licenses/GFDL-1.3-invariants-only149
-rw-r--r--meta/files/common-licenses/GFDL-1.3-invariants-or-later149
-rw-r--r--meta/files/common-licenses/GFDL-1.3-no-invariants-only149
-rw-r--r--meta/files/common-licenses/GFDL-1.3-no-invariants-or-later149
-rw-r--r--meta/files/common-licenses/GFDL-1.3-only149
-rw-r--r--meta/files/common-licenses/GFDL-1.3-or-later149
-rw-r--r--meta/files/common-licenses/GL2PS13
-rw-r--r--meta/files/common-licenses/GLWTPL25
-rw-r--r--meta/files/common-licenses/Giftware9
-rw-r--r--meta/files/common-licenses/Glide95
-rw-r--r--meta/files/common-licenses/Glulxe3
-rw-r--r--meta/files/common-licenses/HPND-sell-variant19
-rw-r--r--meta/files/common-licenses/HTMLTIDY13
-rw-r--r--meta/files/common-licenses/HaskellReport6
-rw-r--r--meta/files/common-licenses/Hippocratic-2.133
-rw-r--r--meta/files/common-licenses/IBM-pibs8
-rw-r--r--meta/files/common-licenses/IJG38
-rw-r--r--meta/files/common-licenses/ImageMagick98
-rw-r--r--meta/files/common-licenses/Imlib29
-rw-r--r--meta/files/common-licenses/Info-ZIP16
-rw-r--r--meta/files/common-licenses/Intel-ACPI34
-rw-r--r--meta/files/common-licenses/Interbase-1.0199
-rw-r--r--meta/files/common-licenses/JPNIC40
-rw-r--r--meta/files/common-licenses/JSON11
-rw-r--r--meta/files/common-licenses/JasPer-2.017
-rw-r--r--meta/files/common-licenses/LAL-1.267
-rw-r--r--meta/files/common-licenses/LAL-1.388
-rw-r--r--meta/files/common-licenses/LGPLLR89
-rw-r--r--meta/files/common-licenses/LPL-1.081
-rw-r--r--meta/files/common-licenses/LPPL-1.3a175
-rw-r--r--meta/files/common-licenses/Latex2e9
-rw-r--r--meta/files/common-licenses/Leptonica9
-rw-r--r--meta/files/common-licenses/LiLiQ-P-1.170
-rw-r--r--meta/files/common-licenses/LiLiQ-R-1.194
-rw-r--r--meta/files/common-licenses/LiLiQ-Rplus-1.188
-rw-r--r--meta/files/common-licenses/Linux-OpenIB18
-rw-r--r--meta/files/common-licenses/MIT-016
-rw-r--r--meta/files/common-licenses/MIT-CMU7
-rw-r--r--meta/files/common-licenses/MIT-Modern-Variant17
-rw-r--r--meta/files/common-licenses/MIT-advertising7
-rw-r--r--meta/files/common-licenses/MIT-enna9
-rw-r--r--meta/files/common-licenses/MIT-feh5
-rw-r--r--meta/files/common-licenses/MIT-open-group23
-rw-r--r--meta/files/common-licenses/MITNFA7
-rw-r--r--meta/files/common-licenses/MPL-2.0-no-copyleft-exception144
-rw-r--r--meta/files/common-licenses/MTLL24
-rw-r--r--meta/files/common-licenses/MakeIndex19
-rw-r--r--meta/files/common-licenses/MulanPSL-1.0116
-rw-r--r--meta/files/common-licenses/MulanPSL-2.0131
-rw-r--r--meta/files/common-licenses/Mup13
-rw-r--r--meta/files/common-licenses/NAIST-200370
-rw-r--r--meta/files/common-licenses/NBPL-1.059
-rw-r--r--meta/files/common-licenses/NCGL-UK-2.067
-rw-r--r--meta/files/common-licenses/NIST-PD15
-rw-r--r--meta/files/common-licenses/NIST-PD-fallback5
-rw-r--r--meta/files/common-licenses/NLOD-1.079
-rw-r--r--meta/files/common-licenses/NLOD-2.080
-rw-r--r--meta/files/common-licenses/NLPL14
-rw-r--r--meta/files/common-licenses/NOSL150
-rw-r--r--meta/files/common-licenses/NPL-1.0102
-rw-r--r--meta/files/common-licenses/NPL-1.1186
-rw-r--r--meta/files/common-licenses/NRL28
-rw-r--r--meta/files/common-licenses/NTP-05
-rw-r--r--meta/files/common-licenses/Nauman50
-rw-r--r--meta/files/common-licenses/Naumen21
-rw-r--r--meta/files/common-licenses/Net-SNMP107
-rw-r--r--meta/files/common-licenses/NetCDF7
-rw-r--r--meta/files/common-licenses/Newsletr7
-rw-r--r--meta/files/common-licenses/Noweb9
-rw-r--r--meta/files/common-licenses/O-UDA-1.047
-rw-r--r--meta/files/common-licenses/OCCT-PL112
-rw-r--r--meta/files/common-licenses/ODC-By-1.0195
-rw-r--r--meta/files/common-licenses/OFL-1.049
-rw-r--r--meta/files/common-licenses/OFL-1.0-RFN49
-rw-r--r--meta/files/common-licenses/OFL-1.0-no-RFN49
-rw-r--r--meta/files/common-licenses/OFL-1.1-RFN43
-rw-r--r--meta/files/common-licenses/OFL-1.1-no-RFN43
-rw-r--r--meta/files/common-licenses/OGC-1.017
-rw-r--r--meta/files/common-licenses/OGDL-Taiwan-1.0141
-rw-r--r--meta/files/common-licenses/OGL-Canada-2.051
-rw-r--r--meta/files/common-licenses/OGL-UK-1.069
-rw-r--r--meta/files/common-licenses/OGL-UK-2.072
-rw-r--r--meta/files/common-licenses/OGL-UK-3.069
-rw-r--r--meta/files/common-licenses/OLDAP-1.160
-rw-r--r--meta/files/common-licenses/OLDAP-1.260
-rw-r--r--meta/files/common-licenses/OLDAP-1.362
-rw-r--r--meta/files/common-licenses/OLDAP-1.462
-rw-r--r--meta/files/common-licenses/OLDAP-2.018
-rw-r--r--meta/files/common-licenses/OLDAP-2.0.118
-rw-r--r--meta/files/common-licenses/OLDAP-2.120
-rw-r--r--meta/files/common-licenses/OLDAP-2.222
-rw-r--r--meta/files/common-licenses/OLDAP-2.2.122
-rw-r--r--meta/files/common-licenses/OLDAP-2.2.224
-rw-r--r--meta/files/common-licenses/OLDAP-2.324
-rw-r--r--meta/files/common-licenses/OLDAP-2.422
-rw-r--r--meta/files/common-licenses/OLDAP-2.522
-rw-r--r--meta/files/common-licenses/OLDAP-2.620
-rw-r--r--meta/files/common-licenses/OLDAP-2.720
-rw-r--r--meta/files/common-licenses/OML5
-rw-r--r--meta/files/common-licenses/OPL-1.0136
-rw-r--r--meta/files/common-licenses/OPUBL-1.078
-rw-r--r--meta/files/common-licenses/OSET-PL-2.1161
-rw-r--r--meta/files/common-licenses/OSL-1.147
-rw-r--r--meta/files/common-licenses/OSL-2.147
-rw-r--r--meta/files/common-licenses/PDDL-1.0136
-rw-r--r--meta/files/common-licenses/PHP-3.0127
-rw-r--r--meta/files/common-licenses/Parity-6.0.044
-rw-r--r--meta/files/common-licenses/Parity-7.0.071
-rw-r--r--meta/files/common-licenses/Plexus15
-rw-r--r--meta/files/common-licenses/PolyForm-Noncommercial-1.0.0131
-rw-r--r--meta/files/common-licenses/PolyForm-Small-Business-1.0.0121
-rw-r--r--meta/files/common-licenses/Qhull17
-rw-r--r--meta/files/common-licenses/RPL-1.1177
-rw-r--r--meta/files/common-licenses/RSA-MD9
-rw-r--r--meta/files/common-licenses/Rdisc13
-rw-r--r--meta/files/common-licenses/SCEA60
-rw-r--r--meta/files/common-licenses/SGI-B-1.082
-rw-r--r--meta/files/common-licenses/SGI-B-1.184
-rw-r--r--meta/files/common-licenses/SGI-B-2.012
-rw-r--r--meta/files/common-licenses/SHL-0.564
-rw-r--r--meta/files/common-licenses/SHL-0.5165
-rw-r--r--meta/files/common-licenses/SISSL116
-rw-r--r--meta/files/common-licenses/SISSL-1.2114
-rw-r--r--meta/files/common-licenses/SMLNJ7
-rw-r--r--meta/files/common-licenses/SMPPL29
-rw-r--r--meta/files/common-licenses/SNIA122
-rw-r--r--meta/files/common-licenses/SSH-OpenSSH67
-rw-r--r--meta/files/common-licenses/SSH-short5
-rw-r--r--meta/files/common-licenses/SSPL-1.0557
-rw-r--r--meta/files/common-licenses/SWL7
-rw-r--r--meta/files/common-licenses/Saxpath19
-rw-r--r--meta/files/common-licenses/Sendmail36
-rw-r--r--meta/files/common-licenses/Sendmail-8.2336
-rw-r--r--meta/files/common-licenses/SimPL-2.037
-rw-r--r--meta/files/common-licenses/Spencer-8611
-rw-r--r--meta/files/common-licenses/Spencer-9412
-rw-r--r--meta/files/common-licenses/Spencer-999
-rw-r--r--meta/files/common-licenses/TAPR-OHL-1.0266
-rw-r--r--meta/files/common-licenses/TCL9
-rw-r--r--meta/files/common-licenses/TCP-wrappers7
-rw-r--r--meta/files/common-licenses/TMate21
-rw-r--r--meta/files/common-licenses/TORQUE-1.125
-rw-r--r--meta/files/common-licenses/TOSL9
-rw-r--r--meta/files/common-licenses/TU-Berlin-1.010
-rw-r--r--meta/files/common-licenses/TU-Berlin-2.020
-rw-r--r--meta/files/common-licenses/UCL-1.048
-rw-r--r--meta/files/common-licenses/UPL-1.017
-rw-r--r--meta/files/common-licenses/Unicode-DFS-201519
-rw-r--r--meta/files/common-licenses/Unicode-DFS-201622
-rw-r--r--meta/files/common-licenses/Unicode-TOU51
-rw-r--r--meta/files/common-licenses/VOSTROM27
-rw-r--r--meta/files/common-licenses/Vim30
-rw-r--r--meta/files/common-licenses/W3C-1998072023
-rw-r--r--meta/files/common-licenses/W3C-2015051317
-rw-r--r--meta/files/common-licenses/WTFPL11
-rw-r--r--meta/files/common-licenses/Wsuipa5
-rw-r--r--meta/files/common-licenses/X1113
-rw-r--r--meta/files/common-licenses/XSkat10
-rw-r--r--meta/files/common-licenses/Xerox5
-rw-r--r--meta/files/common-licenses/YPL-1.047
-rw-r--r--meta/files/common-licenses/Zed3
-rw-r--r--meta/files/common-licenses/Zend-2.018
-rw-r--r--meta/files/common-licenses/Zimbra-1.447
-rw-r--r--meta/files/common-licenses/blessing5
-rw-r--r--meta/files/common-licenses/bzip2-1.0.518
-rw-r--r--meta/files/common-licenses/copyleft-next-0.3.0219
-rw-r--r--meta/files/common-licenses/copyleft-next-0.3.1220
-rw-r--r--meta/files/common-licenses/curl10
-rw-r--r--meta/files/common-licenses/diffmark2
-rw-r--r--meta/files/common-licenses/dvipdfm1
-rw-r--r--meta/files/common-licenses/eGenix40
-rw-r--r--meta/files/common-licenses/etalab-2.0179
-rw-r--r--meta/files/common-licenses/gnuplot14
-rw-r--r--meta/files/common-licenses/iMatix39
-rw-r--r--meta/files/common-licenses/libpng-2.033
-rw-r--r--meta/files/common-licenses/libselinux-1.021
-rw-r--r--meta/files/common-licenses/libtiff8
-rw-r--r--meta/files/common-licenses/mpich218
-rw-r--r--meta/files/common-licenses/psfrag5
-rw-r--r--meta/files/common-licenses/psutils29
-rw-r--r--meta/files/common-licenses/tcl40
-rw-r--r--meta/files/common-licenses/vim81
-rw-r--r--meta/files/common-licenses/xinetd25
-rw-r--r--meta/files/common-licenses/xpp21
-rw-r--r--meta/files/common-licenses/zlib-acknowledgement15
-rw-r--r--meta/files/spdx-licenses.json5937
-rw-r--r--meta/files/toolchain-shar-relocate.sh2
-rw-r--r--meta/lib/buildstats.py4
-rw-r--r--meta/lib/crate.py149
-rw-r--r--meta/lib/oe/cve_check.py83
-rw-r--r--meta/lib/oe/elf.py8
-rw-r--r--meta/lib/oe/gpg_sign.py27
-rw-r--r--meta/lib/oe/license.py6
-rw-r--r--meta/lib/oe/maketype.py7
-rw-r--r--meta/lib/oe/overlayfs.py43
-rw-r--r--meta/lib/oe/package_manager/__init__.py2
-rw-r--r--meta/lib/oe/packagedata.py20
-rw-r--r--meta/lib/oe/prservice.py25
-rw-r--r--meta/lib/oe/qa.py34
-rw-r--r--meta/lib/oe/reproducible.py84
-rw-r--r--meta/lib/oe/rootfs.py2
-rw-r--r--meta/lib/oe/sbom.py74
-rw-r--r--meta/lib/oe/spdx.py335
-rw-r--r--meta/lib/oe/sstatesig.py81
-rw-r--r--meta/lib/oe/utils.py35
-rw-r--r--meta/lib/oeqa/core/target/ssh.py4
-rw-r--r--meta/lib/oeqa/manual/eclipse-plugin.json6
-rw-r--r--meta/lib/oeqa/manual/toaster-managed-mode.json4
-rw-r--r--meta/lib/oeqa/runtime/cases/date.py9
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs.py1
-rw-r--r--meta/lib/oeqa/runtime/cases/ptest.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/rpm.py6
-rw-r--r--meta/lib/oeqa/sdk/buildtools-cases/README2
-rw-r--r--meta/lib/oeqa/sdk/buildtools-cases/build.py30
-rw-r--r--meta/lib/oeqa/sdk/buildtools-cases/gcc.py29
-rw-r--r--meta/lib/oeqa/sdk/buildtools-cases/https.py20
-rw-r--r--meta/lib/oeqa/sdk/buildtools-cases/sanity.py22
-rw-r--r--meta/lib/oeqa/selftest/cases/bblogging.py104
-rw-r--r--meta/lib/oeqa/selftest/cases/bbtests.py7
-rw-r--r--meta/lib/oeqa/selftest/cases/buildoptions.py24
-rw-r--r--meta/lib/oeqa/selftest/cases/containerimage.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/devtool.py4
-rw-r--r--meta/lib/oeqa/selftest/cases/distrodata.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/eSDK.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/fitimage.py29
-rw-r--r--meta/lib/oeqa/selftest/cases/glibc.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/gotoolchain.py6
-rw-r--r--meta/lib/oeqa/selftest/cases/image_typedep.py4
-rw-r--r--meta/lib/oeqa/selftest/cases/oescripts.py4
-rw-r--r--meta/lib/oeqa/selftest/cases/overlayfs.py171
-rw-r--r--meta/lib/oeqa/selftest/cases/recipetool.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/recipeutils.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/reproducible.py7
-rw-r--r--meta/lib/oeqa/selftest/cases/runtime_test.py6
-rw-r--r--meta/lib/oeqa/selftest/cases/signing.py8
-rw-r--r--meta/lib/oeqa/selftest/cases/sstatetests.py36
-rw-r--r--meta/lib/oeqa/selftest/cases/tinfoil.py6
-rw-r--r--meta/lib/oeqa/selftest/cases/wic.py98
-rw-r--r--meta/lib/oeqa/selftest/context.py2
-rw-r--r--meta/lib/oeqa/utils/buildproject.py3
-rw-r--r--meta/lib/oeqa/utils/qemurunner.py11
-rw-r--r--meta/lib/oeqa/utils/targetbuild.py4
-rw-r--r--meta/recipes-bsp/acpid/acpid.inc6
-rw-r--r--meta/recipes-bsp/acpid/acpid_2.0.32.bb7
-rw-r--r--meta/recipes-bsp/acpid/acpid_2.0.33.bb6
-rw-r--r--meta/recipes-bsp/apmd/apmd_3.2.2-15.bb6
-rw-r--r--meta/recipes-bsp/gnu-efi/gnu-efi_3.0.13.bb71
-rw-r--r--meta/recipes-bsp/gnu-efi/gnu-efi_3.0.14.bb70
-rw-r--r--meta/recipes-bsp/grub/grub-bootconf_1.00.bb2
-rw-r--r--meta/recipes-bsp/grub/grub-efi_2.06.bb2
-rw-r--r--meta/recipes-bsp/opensbi/opensbi-payloads.inc10
-rw-r--r--meta/recipes-bsp/u-boot/u-boot.inc49
-rw-r--r--meta/recipes-bsp/usbutils/usbutils_013.bb30
-rw-r--r--meta/recipes-bsp/usbutils/usbutils_014.bb31
-rw-r--r--meta/recipes-connectivity/avahi/avahi_0.8.bb4
-rw-r--r--meta/recipes-connectivity/bind/bind-9.16.21/0001-avoid-start-failure-with-bind-user.patch (renamed from meta/recipes-connectivity/bind/bind-9.16.19/0001-avoid-start-failure-with-bind-user.patch)0
-rw-r--r--meta/recipes-connectivity/bind/bind-9.16.21/0001-named-lwresd-V-and-start-log-hide-build-options.patch (renamed from meta/recipes-connectivity/bind/bind-9.16.19/0001-named-lwresd-V-and-start-log-hide-build-options.patch)0
-rw-r--r--meta/recipes-connectivity/bind/bind-9.16.21/bind-ensure-searching-for-json-headers-searches-sysr.patch (renamed from meta/recipes-connectivity/bind/bind-9.16.19/bind-ensure-searching-for-json-headers-searches-sysr.patch)0
-rw-r--r--meta/recipes-connectivity/bind/bind-9.16.21/bind9 (renamed from meta/recipes-connectivity/bind/bind-9.16.19/bind9)0
-rw-r--r--meta/recipes-connectivity/bind/bind-9.16.21/conf.patch (renamed from meta/recipes-connectivity/bind/bind-9.16.19/conf.patch)0
-rw-r--r--meta/recipes-connectivity/bind/bind-9.16.21/generate-rndc-key.sh (renamed from meta/recipes-connectivity/bind/bind-9.16.19/generate-rndc-key.sh)0
-rw-r--r--meta/recipes-connectivity/bind/bind-9.16.21/init.d-add-support-for-read-only-rootfs.patch (renamed from meta/recipes-connectivity/bind/bind-9.16.19/init.d-add-support-for-read-only-rootfs.patch)0
-rw-r--r--meta/recipes-connectivity/bind/bind-9.16.21/make-etc-initd-bind-stop-work.patch (renamed from meta/recipes-connectivity/bind/bind-9.16.19/make-etc-initd-bind-stop-work.patch)0
-rw-r--r--meta/recipes-connectivity/bind/bind-9.16.21/named.service (renamed from meta/recipes-connectivity/bind/bind-9.16.19/named.service)0
-rw-r--r--meta/recipes-connectivity/bind/bind_9.16.19.bb129
-rw-r--r--meta/recipes-connectivity/bind/bind_9.16.21.bb129
-rw-r--r--meta/recipes-connectivity/bluez5/bluez5_5.60.bb70
-rw-r--r--meta/recipes-connectivity/bluez5/bluez5_5.61.bb70
-rw-r--r--meta/recipes-connectivity/connman/connman.inc8
-rw-r--r--meta/recipes-connectivity/dhcpcd/dhcpcd_9.4.0.bb4
-rw-r--r--meta/recipes-connectivity/inetutils/inetutils/inetutils-only-check-pam_appl.h-when-pam-enabled.patch12
-rw-r--r--meta/recipes-connectivity/inetutils/inetutils_2.0.bb212
-rw-r--r--meta/recipes-connectivity/inetutils/inetutils_2.2.bb211
-rw-r--r--meta/recipes-connectivity/iproute2/iproute2_5.13.0.bb11
-rw-r--r--meta/recipes-connectivity/iproute2/iproute2_5.14.0.bb11
-rw-r--r--meta/recipes-connectivity/kea/files/0001-add-missing-headers-in-timer_mgr.cc.patch29
-rw-r--r--meta/recipes-connectivity/kea/kea_1.8.2.bb1
-rw-r--r--meta/recipes-connectivity/mobile-broadband-provider-info/mobile-broadband-provider-info_git.bb4
-rw-r--r--meta/recipes-connectivity/neard/neard_0.16.bb2
-rw-r--r--meta/recipes-connectivity/nfs-utils/nfs-utils_2.5.4.bb20
-rw-r--r--meta/recipes-connectivity/ofono/ofono_1.32.bb2
-rw-r--r--meta/recipes-connectivity/openssh/openssh_8.6p1.bb185
-rw-r--r--meta/recipes-connectivity/openssh/openssh_8.7p1.bb185
-rw-r--r--meta/recipes-connectivity/openssl/openssl/0001-Configure-do-not-tweak-mips-cflags.patch36
-rw-r--r--meta/recipes-connectivity/openssl/openssl/0001-buildinfo-strip-sysroot-and-debug-prefix-map-from-co.patch21
-rw-r--r--meta/recipes-connectivity/openssl/openssl/0001-skip-test_symbol_presence.patch46
-rw-r--r--meta/recipes-connectivity/openssl/openssl/0003-Add-support-for-io_pgetevents_time64-syscall.patch62
-rw-r--r--meta/recipes-connectivity/openssl/openssl/0004-Fixup-support-for-io_pgetevents_time64-syscall.patch99
-rw-r--r--meta/recipes-connectivity/openssl/openssl/reproducible.patch32
-rw-r--r--meta/recipes-connectivity/openssl/openssl/run-ptest2
-rw-r--r--meta/recipes-connectivity/openssl/openssl_1.1.1k.bb248
-rw-r--r--meta/recipes-connectivity/openssl/openssl_3.0.0.bb244
-rw-r--r--meta/recipes-connectivity/ppp/ppp_2.4.9.bb10
-rw-r--r--meta/recipes-connectivity/wpa-supplicant/wpa-supplicant_2.9.bb4
-rw-r--r--meta/recipes-core/busybox/busybox-inittab_1.34.1.bb (renamed from meta/recipes-core/busybox/busybox-inittab_1.33.0.bb)0
-rw-r--r--meta/recipes-core/busybox/busybox.inc24
-rw-r--r--meta/recipes-core/busybox/busybox/0001-gen_build_files-Use-C-locale-when-calling-sed-on-glo.patch28
-rw-r--r--meta/recipes-core/busybox/busybox/0001-testsuite-check-uudecode-before-using-it.patch6
-rw-r--r--meta/recipes-core/busybox/busybox/busybox-udhcpc-no_deconfig.patch102
-rw-r--r--meta/recipes-core/busybox/busybox/defconfig12
-rw-r--r--meta/recipes-core/busybox/busybox_1.33.1.bb55
-rw-r--r--meta/recipes-core/busybox/busybox_1.34.1.bb54
-rw-r--r--meta/recipes-core/coreutils/coreutils/0001-fts-remove-NOSTAT_LEAF_OPTIMIZATION.patch167
-rw-r--r--meta/recipes-core/coreutils/coreutils/0001-ls-restore-8.31-behavior-on-removed-directories.patch99
-rw-r--r--meta/recipes-core/coreutils/coreutils/disable-ls-output-quoting.patch49
-rw-r--r--meta/recipes-core/coreutils/coreutils/e8b56ebd536e82b15542a00c888109471936bfda.patch93
-rw-r--r--meta/recipes-core/coreutils/coreutils/fix-selinux-flask.patch7
-rw-r--r--meta/recipes-core/coreutils/coreutils_8.32.bb212
-rw-r--r--meta/recipes-core/coreutils/coreutils_9.0.bb209
-rw-r--r--meta/recipes-core/dbus/dbus-glib_0.112.bb2
-rw-r--r--meta/recipes-core/dbus/dbus.inc5
-rw-r--r--meta/recipes-core/dbus/dbus/stop_using_selinux_set_mapping.patch148
-rw-r--r--meta/recipes-core/dropbear/dropbear.inc10
-rw-r--r--meta/recipes-core/ell/ell_0.41.bb24
-rw-r--r--meta/recipes-core/ell/ell_0.44.bb24
-rw-r--r--meta/recipes-core/expat/expat_2.4.1.bb6
-rw-r--r--meta/recipes-core/gettext/gettext_0.21.bb1
-rw-r--r--meta/recipes-core/glib-2.0/glib-2.0/0001-correctly-use-3-parameters-for-close_range.patch29
-rw-r--r--meta/recipes-core/glib-2.0/glib-2.0/Enable-more-tests-while-cross-compiling.patch26
-rw-r--r--meta/recipes-core/glib-2.0/glib-2.0/relocate-modules.patch8
-rw-r--r--meta/recipes-core/glib-2.0/glib-2.0/time-test.patch40
-rw-r--r--meta/recipes-core/glib-2.0/glib-2.0_2.68.3.bb56
-rw-r--r--meta/recipes-core/glib-2.0/glib-2.0_2.70.0.bb54
-rw-r--r--meta/recipes-core/glib-2.0/glib.inc10
-rw-r--r--meta/recipes-core/glib-networking/glib-networking_2.68.1.bb38
-rw-r--r--meta/recipes-core/glib-networking/glib-networking_2.70.0.bb38
-rw-r--r--meta/recipes-core/glibc/cross-localedef-native_2.33.bb50
-rw-r--r--meta/recipes-core/glibc/cross-localedef-native_2.34.bb49
-rw-r--r--meta/recipes-core/glibc/glibc-common.inc2
-rw-r--r--meta/recipes-core/glibc/glibc-locale_2.34.bb (renamed from meta/recipes-core/glibc/glibc-locale_2.33.bb)0
-rw-r--r--meta/recipes-core/glibc/glibc-mtrace_2.34.bb (renamed from meta/recipes-core/glibc/glibc-mtrace_2.33.bb)0
-rw-r--r--meta/recipes-core/glibc/glibc-package.inc17
-rw-r--r--meta/recipes-core/glibc/glibc-scripts_2.34.bb (renamed from meta/recipes-core/glibc/glibc-scripts_2.33.bb)0
-rw-r--r--meta/recipes-core/glibc/glibc-tests.inc32
-rw-r--r--meta/recipes-core/glibc/glibc-testsuite_2.33.bb64
-rw-r--r--meta/recipes-core/glibc/glibc-testsuite_2.34.bb35
-rw-r--r--meta/recipes-core/glibc/glibc-version.inc8
-rw-r--r--meta/recipes-core/glibc/glibc.inc2
-rw-r--r--meta/recipes-core/glibc/glibc/0001-CVE-2021-38604.patch43
-rw-r--r--meta/recipes-core/glibc/glibc/0001-fix-create-thread-failed-in-unprivileged-process-BZ-.patch79
-rw-r--r--meta/recipes-core/glibc/glibc/0001-localedef-Add-hardlink-resolver-from-util-linux.patch2
-rw-r--r--meta/recipes-core/glibc/glibc/0002-CVE-2021-38604.patch150
-rw-r--r--meta/recipes-core/glibc/glibc/0002-localedef-fix-ups-hardlink-to-make-it-compile.patch2
-rw-r--r--meta/recipes-core/glibc/glibc/0003-nativesdk-glibc-Look-for-host-system-ld.so.cache-as-.patch8
-rw-r--r--meta/recipes-core/glibc/glibc/0004-nativesdk-glibc-Fix-buffer-overrun-with-a-relocated-.patch6
-rw-r--r--meta/recipes-core/glibc/glibc/0005-nativesdk-glibc-Raise-the-size-of-arrays-containing-.patch22
-rw-r--r--meta/recipes-core/glibc/glibc/0006-nativesdk-glibc-Allow-64-bit-atomics-for-x86.patch2
-rw-r--r--meta/recipes-core/glibc/glibc/0007-nativesdk-glibc-Make-relocatable-install-for-locales.patch6
-rw-r--r--meta/recipes-core/glibc/glibc/0008-fsl-e500-e5500-e6500-603e-fsqrt-implementation.patch1581
-rw-r--r--meta/recipes-core/glibc/glibc/0008-nativesdk-glibc-Fall-back-to-faccessat-on-faccess2-r.patch32
-rw-r--r--meta/recipes-core/glibc/glibc/0009-fsl-e500-e5500-e6500-603e-fsqrt-implementation.patch1581
-rw-r--r--meta/recipes-core/glibc/glibc/0009-ppc-sqrt-Fix-undefined-reference-to-__sqrt_finite.patch205
-rw-r--r--meta/recipes-core/glibc/glibc/0010-__ieee754_sqrt-f-are-now-inline-functions-and-call-o.patch384
-rw-r--r--meta/recipes-core/glibc/glibc/0010-ppc-sqrt-Fix-undefined-reference-to-__sqrt_finite.patch205
-rw-r--r--meta/recipes-core/glibc/glibc/0011-Quote-from-bug-1443-which-explains-what-the-patch-do.patch58
-rw-r--r--meta/recipes-core/glibc/glibc/0011-__ieee754_sqrt-f-are-now-inline-functions-and-call-o.patch384
-rw-r--r--meta/recipes-core/glibc/glibc/0012-Quote-from-bug-1443-which-explains-what-the-patch-do.patch58
-rw-r--r--meta/recipes-core/glibc/glibc/0012-eglibc-run-libm-err-tab.pl-with-specific-dirs-in-S.patch33
-rw-r--r--meta/recipes-core/glibc/glibc/0013-__ieee754_sqrt-f-are-now-inline-functions-and-call-o.patch58
-rw-r--r--meta/recipes-core/glibc/glibc/0013-eglibc-run-libm-err-tab.pl-with-specific-dirs-in-S.patch33
-rw-r--r--meta/recipes-core/glibc/glibc/0014-__ieee754_sqrt-f-are-now-inline-functions-and-call-o.patch58
-rw-r--r--meta/recipes-core/glibc/glibc/0014-sysdeps-gnu-configure.ac-handle-correctly-libc_cv_ro.patch39
-rw-r--r--meta/recipes-core/glibc/glibc/0015-sysdeps-gnu-configure.ac-handle-correctly-libc_cv_ro.patch39
-rw-r--r--meta/recipes-core/glibc/glibc/0015-yes-within-the-path-sets-wrong-config-variables.patch260
-rw-r--r--meta/recipes-core/glibc/glibc/0016-timezone-re-written-tzselect-as-posix-sh.patch42
-rw-r--r--meta/recipes-core/glibc/glibc/0016-yes-within-the-path-sets-wrong-config-variables.patch260
-rw-r--r--meta/recipes-core/glibc/glibc/0017-Remove-bash-dependency-for-nscd-init-script.patch72
-rw-r--r--meta/recipes-core/glibc/glibc/0017-timezone-re-written-tzselect-as-posix-sh.patch34
-rw-r--r--meta/recipes-core/glibc/glibc/0018-Remove-bash-dependency-for-nscd-init-script.patch72
-rw-r--r--meta/recipes-core/glibc/glibc/0018-eglibc-Cross-building-and-testing-instructions.patch616
-rw-r--r--meta/recipes-core/glibc/glibc/0019-eglibc-Cross-building-and-testing-instructions.patch616
-rw-r--r--meta/recipes-core/glibc/glibc/0019-eglibc-Help-bootstrap-cross-toolchain.patch97
-rw-r--r--meta/recipes-core/glibc/glibc/0020-eglibc-Help-bootstrap-cross-toolchain.patch97
-rw-r--r--meta/recipes-core/glibc/glibc/0020-eglibc-Resolve-__fpscr_values-on-SH4.patch53
-rw-r--r--meta/recipes-core/glibc/glibc/0021-eglibc-Forward-port-cross-locale-generation-support.patch560
-rw-r--r--meta/recipes-core/glibc/glibc/0021-eglibc-Resolve-__fpscr_values-on-SH4.patch53
-rw-r--r--meta/recipes-core/glibc/glibc/0022-Define-DUMMY_LOCALE_T-if-not-defined.patch29
-rw-r--r--meta/recipes-core/glibc/glibc/0022-eglibc-Forward-port-cross-locale-generation-support.patch560
-rw-r--r--meta/recipes-core/glibc/glibc/0023-localedef-add-to-archive-uses-a-hard-coded-locale-pa.patch80
-rw-r--r--meta/recipes-core/glibc/glibc/0024-elf-dl-deps.c-Make-_dl_build_local_scope-breadth-fir.patch53
-rw-r--r--meta/recipes-core/glibc/glibc/0024-localedef-add-to-archive-uses-a-hard-coded-locale-pa.patch80
-rw-r--r--meta/recipes-core/glibc/glibc/0025-elf-dl-deps.c-Make-_dl_build_local_scope-breadth-fir.patch53
-rw-r--r--meta/recipes-core/glibc/glibc/0025-intl-Emit-no-lines-in-bison-generated-files.patch31
-rw-r--r--meta/recipes-core/glibc/glibc/0026-intl-Emit-no-lines-in-bison-generated-files.patch31
-rw-r--r--meta/recipes-core/glibc/glibc/0027-locale-prevent-maybe-uninitialized-errors-with-Os-BZ.patch2
-rw-r--r--meta/recipes-core/glibc/glibc/0028-readlib-Add-OECORE_KNOWN_INTERPRETER_NAMES-to-known-.patch2
-rw-r--r--meta/recipes-core/glibc/glibc/0029-wordsize.h-Unify-the-header-between-arm-and-aarch64.patch10
-rw-r--r--meta/recipes-core/glibc/glibc/0030-powerpc-Do-not-ask-compiler-for-finding-arch.patch2
-rw-r--r--meta/recipes-core/glibc/glibc/CVE-2021-33574.patch61
-rw-r--r--meta/recipes-core/glibc/glibc/faccessat2-perm.patch31
-rw-r--r--meta/recipes-core/glibc/glibc/mte-backports.patch1238
-rw-r--r--meta/recipes-core/glibc/glibc_2.33.bb134
-rw-r--r--meta/recipes-core/glibc/glibc_2.34.bb133
-rw-r--r--meta/recipes-core/glibc/ldconfig-native-2.12.1/ldconfig.patch2
-rw-r--r--meta/recipes-core/images/build-appliance-image_15.0.0.bb4
-rw-r--r--meta/recipes-core/images/core-image-ptest-all.bb4
-rw-r--r--meta/recipes-core/libcgroup/libcgroup/CVE-2018-14348.patch37
-rw-r--r--meta/recipes-core/libcgroup/libcgroup/module.patch36
-rw-r--r--meta/recipes-core/libcgroup/libcgroup/musl-decls-compat.patch187
-rw-r--r--meta/recipes-core/libcgroup/libcgroup_0.41.bb46
-rw-r--r--meta/recipes-core/libcgroup/libcgroup_2.0.bb35
-rw-r--r--meta/recipes-core/libxcrypt/libxcrypt-compat_4.4.25.bb (renamed from meta/recipes-core/libxcrypt/libxcrypt-compat_4.4.23.bb)0
-rw-r--r--meta/recipes-core/libxcrypt/libxcrypt.inc2
-rw-r--r--meta/recipes-core/libxcrypt/libxcrypt_4.4.25.bb (renamed from meta/recipes-core/libxcrypt/libxcrypt_4.4.23.bb)0
-rw-r--r--meta/recipes-core/libxml/libxml2_2.9.12.bb2
-rw-r--r--meta/recipes-core/meta/buildtools-tarball.bb14
-rw-r--r--meta/recipes-core/meta/meta-environment.bb5
-rw-r--r--meta/recipes-core/meta/meta-ide-support.bb8
-rw-r--r--meta/recipes-core/meta/testexport-tarball.bb1
-rw-r--r--meta/recipes-core/meta/uninative-tarball.bb1
-rw-r--r--meta/recipes-core/musl/musl/0001-riscv-Rename-__NR_fstatat-__NR_newfstatat.patch32
-rw-r--r--meta/recipes-core/musl/musl_git.bb3
-rw-r--r--meta/recipes-core/ovmf/ovmf_git.bb11
-rw-r--r--meta/recipes-core/packagegroups/nativesdk-packagegroup-sdk-host.bb3
-rw-r--r--meta/recipes-core/packagegroups/packagegroup-core-tools-profile.bb1
-rw-r--r--meta/recipes-core/packagegroups/packagegroup-rust-cross-canadian.bb18
-rw-r--r--meta/recipes-core/psplash/psplash_git.bb6
-rw-r--r--meta/recipes-core/readline/readline.inc1
-rw-r--r--meta/recipes-core/systemd/systemd-boot_249.1.bb70
-rw-r--r--meta/recipes-core/systemd/systemd-boot_249.4.bb74
-rw-r--r--meta/recipes-core/systemd/systemd-bootconf_1.00.bb2
-rw-r--r--meta/recipes-core/systemd/systemd-compat-units.bb2
-rw-r--r--meta/recipes-core/systemd/systemd-conf_1.0.bb6
-rw-r--r--meta/recipes-core/systemd/systemd-serialgetty.bb18
-rwxr-xr-xmeta/recipes-core/systemd/systemd-systemctl/systemctl5
-rw-r--r--meta/recipes-core/systemd/systemd.inc2
-rw-r--r--meta/recipes-core/systemd/systemd/0002-don-t-use-glibc-specific-qsort_r.patch104
-rw-r--r--meta/recipes-core/systemd/systemd/0003-missing_type.h-add-__compare_fn_t-and-comparison_fn_.patch4
-rw-r--r--meta/recipes-core/systemd/systemd/0004-add-fallback-parse_printf_format-implementation.patch16
-rw-r--r--meta/recipes-core/systemd/systemd/0005-src-basic-missing.h-check-for-missing-strndupa.patch247
-rw-r--r--meta/recipes-core/systemd/systemd/0006-Include-netinet-if_ether.h.patch247
-rw-r--r--meta/recipes-core/systemd/systemd/0007-don-t-fail-if-GLOB_BRACE-and-GLOB_ALTDIRFUNC-is-not-.patch12
-rw-r--r--meta/recipes-core/systemd/systemd/0008-add-missing-FTW_-macros-for-musl.patch2
-rw-r--r--meta/recipes-core/systemd/systemd/0009-fix-missing-of-__register_atfork-for-non-glibc-build.patch6
-rw-r--r--meta/recipes-core/systemd/systemd/0010-Use-uintmax_t-for-handling-rlim_t.patch12
-rw-r--r--meta/recipes-core/systemd/systemd/0011-test-sizeof.c-Disable-tests-for-missing-typedefs-in-.patch4
-rw-r--r--meta/recipes-core/systemd/systemd/0012-don-t-pass-AT_SYMLINK_NOFOLLOW-flag-to-faccessat.patch14
-rw-r--r--meta/recipes-core/systemd/systemd/0013-Define-glibc-compatible-basename-for-non-glibc-syste.patch4
-rw-r--r--meta/recipes-core/systemd/systemd/0014-Do-not-disable-buffering-when-writing-to-oom_score_a.patch6
-rw-r--r--meta/recipes-core/systemd/systemd/0015-distinguish-XSI-compliant-strerror_r-from-GNU-specif.patch6
-rw-r--r--meta/recipes-core/systemd/systemd/0016-Hide-__start_BUS_ERROR_MAP-and-__stop_BUS_ERROR_MAP.patch4
-rw-r--r--meta/recipes-core/systemd/systemd/0017-missing_type.h-add-__compar_d_fn_t-definition.patch2
-rw-r--r--meta/recipes-core/systemd/systemd/0018-avoid-redefinition-of-prctl_mm_map-structure.patch2
-rw-r--r--meta/recipes-core/systemd/systemd/0019-Handle-missing-LOCK_EX.patch2
-rw-r--r--meta/recipes-core/systemd/systemd/0020-Fix-incompatible-pointer-type-struct-sockaddr_un.patch6
-rw-r--r--meta/recipes-core/systemd/systemd/0021-test-json.c-define-M_PIl.patch2
-rw-r--r--meta/recipes-core/systemd/systemd/0022-do-not-disable-buffer-in-writing-files.patch136
-rw-r--r--meta/recipes-core/systemd/systemd/0025-Handle-__cpu_mask-usage.patch4
-rw-r--r--meta/recipes-core/systemd/systemd/0026-Handle-missing-gshadow.patch20
-rw-r--r--meta/recipes-core/systemd/systemd/0028-missing_syscall.h-Define-MIPS-ABI-defines-for-musl.patch4
-rw-r--r--meta/recipes-core/systemd/systemd_249.1.bb772
-rw-r--r--meta/recipes-core/systemd/systemd_249.4.bb785
-rw-r--r--meta/recipes-core/util-linux/util-linux-libuuid_2.37.2.bb (renamed from meta/recipes-core/util-linux/util-linux-libuuid_2.37.1.bb)0
-rw-r--r--meta/recipes-core/util-linux/util-linux.inc2
-rw-r--r--meta/recipes-core/util-linux/util-linux_2.37.1.bb290
-rw-r--r--meta/recipes-core/util-linux/util-linux_2.37.2.bb320
-rwxr-xr-xmeta/recipes-core/volatile-binds/files/mount-copybind26
-rw-r--r--meta/recipes-core/volatile-binds/volatile-binds.bb6
-rw-r--r--meta/recipes-devtools/binutils/binutils-2.37.inc1
-rw-r--r--meta/recipes-devtools/binutils/binutils.inc2
-rw-r--r--meta/recipes-devtools/binutils/binutils/0017-bfd-Close-the-file-descriptor-if-there-is-no-archive.patch234
-rw-r--r--meta/recipes-devtools/binutils/binutils_2.37.bb2
-rw-r--r--meta/recipes-devtools/bison/bison/0001-Use-mapped-file-name-for-symbols.patch62
-rw-r--r--meta/recipes-devtools/bison/bison_3.7.6.bb53
-rw-r--r--meta/recipes-devtools/bison/bison_3.8.1.bb54
-rw-r--r--meta/recipes-devtools/bootchart2/bootchart2_0.14.9.bb8
-rw-r--r--meta/recipes-devtools/btrfs-tools/btrfs-tools/0001-fix-error-for-undeclared-macro-on-musl.patch28
-rw-r--r--meta/recipes-devtools/btrfs-tools/btrfs-tools_5.13.1.bb72
-rw-r--r--meta/recipes-devtools/btrfs-tools/btrfs-tools_5.13.bb73
-rw-r--r--meta/recipes-devtools/cargo/cargo-cross-canadian.inc74
-rw-r--r--meta/recipes-devtools/cargo/cargo-cross-canadian_1.55.0.bb6
-rw-r--r--meta/recipes-devtools/cargo/cargo.inc56
-rw-r--r--meta/recipes-devtools/cargo/cargo_1.55.0.bb4
-rw-r--r--meta/recipes-devtools/ccache/ccache_4.3.bb25
-rw-r--r--meta/recipes-devtools/ccache/ccache_4.4.bb26
-rw-r--r--meta/recipes-devtools/cmake/cmake-native_3.21.2.bb (renamed from meta/recipes-devtools/cmake/cmake-native_3.21.1.bb)0
-rw-r--r--meta/recipes-devtools/cmake/cmake.inc2
-rw-r--r--meta/recipes-devtools/cmake/cmake_3.21.2.bb (renamed from meta/recipes-devtools/cmake/cmake_3.21.1.bb)0
-rw-r--r--meta/recipes-devtools/createrepo-c/createrepo-c_0.17.4.bb40
-rw-r--r--meta/recipes-devtools/createrepo-c/createrepo-c_0.17.5.bb40
-rw-r--r--meta/recipes-devtools/distcc/distcc_3.4.bb6
-rw-r--r--meta/recipes-devtools/dnf/dnf_4.8.0.bb91
-rw-r--r--meta/recipes-devtools/dnf/dnf_4.9.0.bb91
-rw-r--r--meta/recipes-devtools/e2fsprogs/e2fsprogs.inc2
-rw-r--r--meta/recipes-devtools/e2fsprogs/e2fsprogs/0001-lib-ext2fs-unix_io.c-do-unlock-on-error.patch24
-rw-r--r--meta/recipes-devtools/e2fsprogs/e2fsprogs/0001-lib-ext2fs-unix_io.c-revert-parts-of-libext2fs-fix-p.patch48
-rw-r--r--meta/recipes-devtools/e2fsprogs/e2fsprogs/0001-tests-u_direct_io-expect-correct-expected-output.patch69
-rw-r--r--meta/recipes-devtools/e2fsprogs/e2fsprogs/e2fsprogs-fix-missing-check-for-permission-denied.patch4
-rw-r--r--meta/recipes-devtools/e2fsprogs/e2fsprogs/quiet-debugfs.patch2
-rw-r--r--meta/recipes-devtools/e2fsprogs/e2fsprogs_1.46.2.bb143
-rw-r--r--meta/recipes-devtools/e2fsprogs/e2fsprogs_1.46.4.bb141
-rw-r--r--meta/recipes-devtools/elfutils/elfutils_0.185.bb12
-rw-r--r--meta/recipes-devtools/elfutils/files/glibc-2.34-fix.patch138
-rw-r--r--meta/recipes-devtools/flex/flex_2.6.4.bb5
-rw-r--r--meta/recipes-devtools/gcc/gcc-11.2.inc1
-rw-r--r--meta/recipes-devtools/gcc/gcc-configure-common.inc2
-rw-r--r--meta/recipes-devtools/gcc/gcc-cross-canadian.inc8
-rw-r--r--meta/recipes-devtools/gcc/gcc-multilib-config.inc32
-rw-r--r--meta/recipes-devtools/gcc/gcc-runtime.inc2
-rw-r--r--meta/recipes-devtools/gcc/gcc/0041-apply-debug-prefix-maps-before-checksumming-DIEs.patch95
-rw-r--r--meta/recipes-devtools/gdb/gdb-common.inc8
-rw-r--r--meta/recipes-devtools/gdb/gdb-cross-canadian.inc1
-rw-r--r--meta/recipes-devtools/gdb/gdb-cross.inc2
-rw-r--r--meta/recipes-devtools/gdb/gdb.inc2
-rw-r--r--meta/recipes-devtools/git/git_2.32.0.bb9
-rw-r--r--meta/recipes-devtools/git/git_2.33.0.bb9
-rw-r--r--meta/recipes-devtools/go/go-1.16.5.inc20
-rw-r--r--meta/recipes-devtools/go/go-1.16.8.inc25
-rw-r--r--meta/recipes-devtools/go/go-binary-native_1.16.5.bb46
-rw-r--r--meta/recipes-devtools/go/go-binary-native_1.16.8.bb46
-rw-r--r--meta/recipes-devtools/go/go-cross-canadian_1.16.8.bb (renamed from meta/recipes-devtools/go/go-cross-canadian_1.16.5.bb)0
-rw-r--r--meta/recipes-devtools/go/go-cross_1.16.8.bb (renamed from meta/recipes-devtools/go/go-cross_1.16.5.bb)0
-rw-r--r--meta/recipes-devtools/go/go-crosssdk_1.16.8.bb (renamed from meta/recipes-devtools/go/go-crosssdk_1.16.5.bb)0
-rw-r--r--meta/recipes-devtools/go/go-native_1.16.8.bb (renamed from meta/recipes-devtools/go/go-native_1.16.5.bb)0
-rw-r--r--meta/recipes-devtools/go/go-runtime_1.16.8.bb (renamed from meta/recipes-devtools/go/go-runtime_1.16.5.bb)0
-rw-r--r--meta/recipes-devtools/go/go_1.16.8.bb (renamed from meta/recipes-devtools/go/go_1.16.5.bb)0
-rw-r--r--meta/recipes-devtools/help2man/help2man_1.48.3.bb24
-rw-r--r--meta/recipes-devtools/help2man/help2man_1.48.5.bb24
-rw-r--r--meta/recipes-devtools/libdnf/libdnf/0001-libdnf-config.h-avoid-the-use-of-non-portable-__WORD.patch34
-rw-r--r--meta/recipes-devtools/libdnf/libdnf/0001-libdnf-dnf-context.cpp-do-not-try-to-access-BDB-data.patch37
-rw-r--r--meta/recipes-devtools/libdnf/libdnf_0.63.1.bb38
-rw-r--r--meta/recipes-devtools/libdnf/libdnf_0.64.0.bb38
-rw-r--r--meta/recipes-devtools/libedit/libedit_20210522-3.1.bb24
-rw-r--r--meta/recipes-devtools/libedit/libedit_20210714-3.1.bb24
-rw-r--r--meta/recipes-devtools/libmodulemd/libmodulemd_git.bb2
-rw-r--r--meta/recipes-devtools/libtool/libtool-2.4.6.inc2
-rw-r--r--meta/recipes-devtools/libtool/libtool/lto-prefix.patch22
-rw-r--r--meta/recipes-devtools/libtool/libtool_2.4.6.bb2
-rw-r--r--meta/recipes-devtools/log4cplus/log4cplus_2.0.6.bb20
-rw-r--r--meta/recipes-devtools/log4cplus/log4cplus_2.0.7.bb19
-rw-r--r--meta/recipes-devtools/lua/lua/lua.pc.in10
-rw-r--r--meta/recipes-devtools/lua/lua/run-ptest19
-rw-r--r--meta/recipes-devtools/lua/lua_5.4.3.bb62
-rw-r--r--meta/recipes-devtools/meson/meson.inc4
-rw-r--r--meta/recipes-devtools/meson/meson/0001-Make-CPU-family-warnings-fatal.patch20
-rw-r--r--meta/recipes-devtools/meson/meson/0003-native_bindir.patch94
-rw-r--r--meta/recipes-devtools/meson/meson/gi-target-dep.patch41
-rw-r--r--meta/recipes-devtools/meson/meson_0.59.1.bb (renamed from meta/recipes-devtools/meson/meson_0.58.1.bb)0
-rw-r--r--meta/recipes-devtools/meson/nativesdk-meson_0.59.1.bb (renamed from meta/recipes-devtools/meson/nativesdk-meson_0.58.1.bb)0
-rw-r--r--meta/recipes-devtools/mtools/mtools/disable-hardcoded-configs.patch2
-rw-r--r--meta/recipes-devtools/mtools/mtools_4.0.34.bb49
-rw-r--r--meta/recipes-devtools/mtools/mtools_4.0.35.bb49
-rw-r--r--meta/recipes-devtools/patchelf/patchelf/6edec83653ce1b5fc201ff6db93b966394766814.patch44
-rw-r--r--meta/recipes-devtools/patchelf/patchelf/alignmentfix.patch44
-rw-r--r--meta/recipes-devtools/patchelf/patchelf/handle-read-only-files.patch16
-rw-r--r--meta/recipes-devtools/patchelf/patchelf_0.12.bb20
-rw-r--r--meta/recipes-devtools/patchelf/patchelf_0.13.bb18
-rw-r--r--meta/recipes-devtools/perl/files/CVE-2021-36770.patch40
-rw-r--r--meta/recipes-devtools/perl/perl_5.34.0.bb12
-rw-r--r--meta/recipes-devtools/pkgconf/pkgconf_1.7.4.bb67
-rw-r--r--meta/recipes-devtools/pkgconf/pkgconf_1.8.0.bb67
-rw-r--r--meta/recipes-devtools/pkgconfig/pkgconfig_git.bb2
-rw-r--r--meta/recipes-devtools/prelink/prelink_git.bb4
-rwxr-xr-xmeta/recipes-devtools/pseudo/files/build-oldlibc20
-rw-r--r--meta/recipes-devtools/pseudo/files/older-glibc-symbols.patch57
-rw-r--r--meta/recipes-devtools/pseudo/pseudo_git.bb9
-rw-r--r--meta/recipes-devtools/python-numpy/python3-numpy_1.21.0.bb58
-rw-r--r--meta/recipes-devtools/python-numpy/python3-numpy_1.21.2.bb58
-rw-r--r--meta/recipes-devtools/python/files/0001-conditionally-do-not-fetch-code-by-easy_install.patch2
-rw-r--r--meta/recipes-devtools/python/python-cython.inc2
-rw-r--r--meta/recipes-devtools/python/python-smmap.inc19
-rw-r--r--meta/recipes-devtools/python/python3-cython_0.29.23.bb36
-rw-r--r--meta/recipes-devtools/python/python3-cython_0.29.24.bb37
-rw-r--r--meta/recipes-devtools/python/python3-dbusmock_0.23.1.bb18
-rw-r--r--meta/recipes-devtools/python/python3-dbusmock_0.24.0.bb18
-rw-r--r--meta/recipes-devtools/python/python3-git_3.1.18.bb32
-rw-r--r--meta/recipes-devtools/python/python3-git_3.1.24.bb32
-rw-r--r--meta/recipes-devtools/python/python3-hypothesis/run-ptest10
-rw-r--r--meta/recipes-devtools/python/python3-hypothesis/test_binary_search.py135
-rw-r--r--meta/recipes-devtools/python/python3-hypothesis/test_rle.py101
-rw-r--r--meta/recipes-devtools/python/python3-hypothesis_6.14.3.bb22
-rw-r--r--meta/recipes-devtools/python/python3-hypothesis_6.23.2.bb38
-rw-r--r--meta/recipes-devtools/python/python3-importlib-metadata_4.6.1.bb20
-rw-r--r--meta/recipes-devtools/python/python3-importlib-metadata_4.8.1.bb20
-rw-r--r--meta/recipes-devtools/python/python3-mako_1.1.4.bb18
-rw-r--r--meta/recipes-devtools/python/python3-mako_1.1.5.bb18
-rw-r--r--meta/recipes-devtools/python/python3-more-itertools_8.10.0.bb27
-rw-r--r--meta/recipes-devtools/python/python3-more-itertools_8.8.0.bb27
-rw-r--r--meta/recipes-devtools/python/python3-packaging_21.0.bb8
-rw-r--r--meta/recipes-devtools/python/python3-pip_21.1.3.bb33
-rw-r--r--meta/recipes-devtools/python/python3-pip_21.2.4.bb33
-rw-r--r--meta/recipes-devtools/python/python3-pluggy_0.13.1.bb27
-rw-r--r--meta/recipes-devtools/python/python3-pluggy_1.0.0.bb26
-rw-r--r--meta/recipes-devtools/python/python3-pycryptodome_3.10.1.bb5
-rw-r--r--meta/recipes-devtools/python/python3-pycryptodome_3.11.0.bb5
-rw-r--r--meta/recipes-devtools/python/python3-pycryptodomex_3.10.1.bb9
-rw-r--r--meta/recipes-devtools/python/python3-pycryptodomex_3.11.0.bb9
-rw-r--r--meta/recipes-devtools/python/python3-pygments_2.10.0.bb19
-rw-r--r--meta/recipes-devtools/python/python3-pygments_2.9.0.bb19
-rw-r--r--meta/recipes-devtools/python/python3-pytest_6.2.4.bb41
-rw-r--r--meta/recipes-devtools/python/python3-pytest_6.2.5.bb41
-rw-r--r--meta/recipes-devtools/python/python3-scons-native_4.2.0.bb (renamed from meta/recipes-devtools/python/python3-scons-native_4.1.0.bb)0
-rw-r--r--meta/recipes-devtools/python/python3-scons/0001-Fix-man-page-installation.patch23
-rw-r--r--meta/recipes-devtools/python/python3-scons_4.1.0.bb27
-rw-r--r--meta/recipes-devtools/python/python3-scons_4.2.0.bb27
-rw-r--r--meta/recipes-devtools/python/python3-setuptools-scm_6.0.1.bb26
-rw-r--r--meta/recipes-devtools/python/python3-setuptools-scm_6.3.2.bb26
-rw-r--r--meta/recipes-devtools/python/python3-setuptools_57.1.0.bb55
-rw-r--r--meta/recipes-devtools/python/python3-setuptools_58.0.4.bb55
-rw-r--r--meta/recipes-devtools/python/python3-smmap_4.0.0.bb3
-rw-r--r--meta/recipes-devtools/python/python3-smmap_5.0.0.bb19
-rw-r--r--meta/recipes-devtools/python/python3-tomli_1.2.1.bb20
-rw-r--r--meta/recipes-devtools/python/python3-zipp_3.5.0.bb18
-rw-r--r--meta/recipes-devtools/python/python3-zipp_3.6.0.bb18
-rw-r--r--meta/recipes-devtools/python/python3/0001-Don-t-search-system-for-headers-libraries.patch6
-rw-r--r--meta/recipes-devtools/python/python3/0001-Lib-pty.py-handle-stdin-I-O-errors-same-way-as-maste.patch49
-rw-r--r--meta/recipes-devtools/python/python3/0001-Lib-sysconfig.py-use-libdir-values-from-configuratio.patch35
-rw-r--r--meta/recipes-devtools/python/python3/0001-Lib-sysconfig.py-use-prefix-value-from-build-configu.patch34
-rw-r--r--meta/recipes-devtools/python/python3/0001-distutils-sysconfig-append-STAGING_LIBDIR-python-sys.patch24
-rw-r--r--meta/recipes-devtools/python/python3/0001-setup.py-pass-missing-libraries-to-Extension-for-mul.patch15
-rw-r--r--meta/recipes-devtools/python/python3/0001-sysconfig.py-use-platlibdir-also-for-purelib.patch30
-rw-r--r--meta/recipes-devtools/python/python3/12-distutils-prefix-is-inside-staging-area.patch14
-rw-r--r--meta/recipes-devtools/python/python3/makerace.patch25
-rw-r--r--meta/recipes-devtools/python/python3/reformat_sysconfig.py2
-rw-r--r--meta/recipes-devtools/python/python3_3.10.0.bb400
-rw-r--r--meta/recipes-devtools/python/python3_3.9.6.bb386
-rw-r--r--meta/recipes-devtools/qemu/qemu-native_6.0.0.bb9
-rw-r--r--meta/recipes-devtools/qemu/qemu-native_6.1.0.bb9
-rw-r--r--meta/recipes-devtools/qemu/qemu-system-native_6.0.0.bb31
-rw-r--r--meta/recipes-devtools/qemu/qemu-system-native_6.1.0.bb32
-rw-r--r--meta/recipes-devtools/qemu/qemu.inc23
-rw-r--r--meta/recipes-devtools/qemu/qemu/0001-Add-enable-disable-udev.patch29
-rw-r--r--meta/recipes-devtools/qemu/qemu/0001-configure-fix-detection-of-gdbus-codegen.patch50
-rw-r--r--meta/recipes-devtools/qemu/qemu/0001-linux-user-Tag-vsx-with-ieee128-fpbits.patch35
-rw-r--r--meta/recipes-devtools/qemu/qemu/0001-vhost-user-gpu-fix-memory-disclosure-in-virgl_cmd_ge.patch43
-rw-r--r--meta/recipes-devtools/qemu/qemu/0002-vhost-user-gpu-fix-resource-leak-in-vg_resource_crea.patch41
-rw-r--r--meta/recipes-devtools/qemu/qemu/0003-vhost-user-gpu-fix-memory-leak-in-vg_resource_attach.patch48
-rw-r--r--meta/recipes-devtools/qemu/qemu/0004-qemu-disable-Valgrind.patch34
-rw-r--r--meta/recipes-devtools/qemu/qemu/0004-vhost-user-gpu-fix-memory-leak-while-calling-vg_reso.patch50
-rw-r--r--meta/recipes-devtools/qemu/qemu/0005-vhost-user-gpu-fix-memory-leak-in-virgl_cmd_resource.patch58
-rw-r--r--meta/recipes-devtools/qemu/qemu/0006-vhost-user-gpu-fix-memory-leak-in-virgl_resource_att.patch49
-rw-r--r--meta/recipes-devtools/qemu/qemu/0007-vhost-user-gpu-fix-OOB-write-in-virgl_cmd_get_capset.patch49
-rw-r--r--meta/recipes-devtools/qemu/qemu/0010-configure-Add-pkg-config-handling-for-libgcrypt.patch87
-rw-r--r--meta/recipes-devtools/qemu/qemu/CVE-2021-3527-1.patch42
-rw-r--r--meta/recipes-devtools/qemu/qemu/CVE-2021-3527-2.patch59
-rw-r--r--meta/recipes-devtools/qemu/qemu/cross.patch40
-rw-r--r--meta/recipes-devtools/qemu/qemu_6.1.0.bb (renamed from meta/recipes-devtools/qemu/qemu_6.0.0.bb)0
-rw-r--r--meta/recipes-devtools/qemu/qemuwrapper-cross_1.0.bb1
-rw-r--r--meta/recipes-devtools/rpm/files/0001-Do-not-hardcode-lib-rpm-as-the-installation-path-for.patch14
-rw-r--r--meta/recipes-devtools/rpm/files/0001-Fix-build-with-musl-C-library.patch22
-rw-r--r--meta/recipes-devtools/rpm/files/0001-build-pack.c-do-not-insert-payloadflags-into-.rpm-me.patch28
-rw-r--r--meta/recipes-devtools/rpm/files/0001-docs-do-not-build-manpages-requires-pandoc.patch26
-rw-r--r--meta/recipes-devtools/rpm/files/0001-rpm-rpmio.c-restrict-virtual-memory-usage-if-limit-s.patch65
-rw-r--r--meta/recipes-devtools/rpm/files/0001-tools-Add-error.h-for-non-glibc-case.patch59
-rw-r--r--meta/recipes-devtools/rpm/files/0011-Do-not-require-that-ELF-binaries-are-executable-to-b.patch33
-rw-r--r--meta/recipes-devtools/rpm/rpm_4.16.1.3.bb196
-rw-r--r--meta/recipes-devtools/rpm/rpm_4.17.0.bb202
-rw-r--r--meta/recipes-devtools/ruby/ruby.inc5
-rw-r--r--meta/recipes-devtools/ruby/ruby/0002-template-Makefile.in-filter-out-f-prefix-map.patch42
-rw-r--r--meta/recipes-devtools/ruby/ruby/0003-rdoc-build-reproducible-documentation.patch35
-rw-r--r--meta/recipes-devtools/ruby/ruby/0004-lib-mkmf.rb-sort-list-of-object-files-in-generated-M.patch28
-rw-r--r--meta/recipes-devtools/ruby/ruby/0005-Mark-Gemspec-reproducible-change-fixing-784225-too.patch28
-rw-r--r--meta/recipes-devtools/ruby/ruby/0006-Make-gemspecs-reproducible.patch67
-rw-r--r--meta/recipes-devtools/ruby/ruby_3.0.2.bb9
-rw-r--r--meta/recipes-devtools/run-postinsts/run-postinsts_1.0.bb6
-rw-r--r--meta/recipes-devtools/rust/README-rust.md58
-rw-r--r--meta/recipes-devtools/rust/files/riscv-march.patch73
-rw-r--r--meta/recipes-devtools/rust/files/rv64gc.patch37
-rw-r--r--meta/recipes-devtools/rust/libstd-rs.inc40
-rw-r--r--meta/recipes-devtools/rust/libstd-rs/0005-Add-base-definitions-for-riscv64-musl-libc-0.2.93.patch905
-rw-r--r--meta/recipes-devtools/rust/libstd-rs/0006-FIXUP-linux-musl-mod.rs-add-riscv64-to-b64-set-libc-.patch31
-rw-r--r--meta/recipes-devtools/rust/libstd-rs/0007-FIXUP-Correct-definitions-to-match-musl-libc-0.2.93.patch741
-rw-r--r--meta/recipes-devtools/rust/libstd-rs/0008-Update-checksums-for-modified-files-for-rust-1.54.0-.patch23
-rw-r--r--meta/recipes-devtools/rust/libstd-rs_1.55.0.bb11
-rw-r--r--meta/recipes-devtools/rust/rust-common.inc361
-rw-r--r--meta/recipes-devtools/rust/rust-cross-canadian-common.inc55
-rw-r--r--meta/recipes-devtools/rust/rust-cross-canadian.inc78
-rw-r--r--meta/recipes-devtools/rust/rust-cross-canadian_1.55.0.bb6
-rw-r--r--meta/recipes-devtools/rust/rust-cross.inc73
-rw-r--r--meta/recipes-devtools/rust/rust-cross_1.55.0.bb2
-rw-r--r--meta/recipes-devtools/rust/rust-llvm.inc69
-rw-r--r--meta/recipes-devtools/rust/rust-llvm/0002-llvm-allow-env-override-of-exe-path.patch32
-rw-r--r--meta/recipes-devtools/rust/rust-llvm_1.55.0.bb5
-rw-r--r--meta/recipes-devtools/rust/rust-snapshot.inc25
-rw-r--r--meta/recipes-devtools/rust/rust-source.inc7
-rw-r--r--meta/recipes-devtools/rust/rust-target.inc10
-rw-r--r--meta/recipes-devtools/rust/rust-tools-cross-canadian.inc38
-rw-r--r--meta/recipes-devtools/rust/rust-tools-cross-canadian_1.55.0.bb6
-rw-r--r--meta/recipes-devtools/rust/rust.inc202
-rw-r--r--meta/recipes-devtools/rust/rust_1.55.0.bb20
-rw-r--r--meta/recipes-devtools/squashfs-tools/files/0001-squashfs-tools-fix-build-failure-against-gcc-10.patch45
-rw-r--r--meta/recipes-devtools/squashfs-tools/squashfs-tools/0001-Avoid-use-of-INSTALL_DIR-for-symlink-targets.patch34
-rw-r--r--meta/recipes-devtools/squashfs-tools/squashfs-tools_git.bb10
-rw-r--r--meta/recipes-devtools/strace/strace/Makefile-ptest.patch10
-rw-r--r--meta/recipes-devtools/strace/strace_5.12.bb56
-rw-r--r--meta/recipes-devtools/strace/strace_5.14.bb56
-rw-r--r--meta/recipes-devtools/swig/swig.inc2
-rw-r--r--meta/recipes-devtools/syslinux/syslinux_6.04-pre2.bb1
-rw-r--r--meta/recipes-devtools/systemd-bootchart/systemd-bootchart_234.bb2
-rw-r--r--meta/recipes-devtools/tcf-agent/tcf-agent/0001-Fixed-copyright-messages.patch56
-rw-r--r--meta/recipes-devtools/tcf-agent/tcf-agent_git.bb11
-rw-r--r--meta/recipes-devtools/tcltk/tcl_8.6.11.bb4
-rw-r--r--meta/recipes-devtools/vala/vala_0.52.4.bb5
-rw-r--r--meta/recipes-devtools/vala/vala_0.52.5.bb5
-rw-r--r--meta/recipes-devtools/valgrind/valgrind/200b6a5a0ea3e1e154663b0fc575bfe2becf177d.patch34
-rw-r--r--meta/recipes-devtools/valgrind/valgrind/52ed51fc35f8a6148c2940eb46932b02dd3b9b23.patch171
-rw-r--r--meta/recipes-devtools/valgrind/valgrind/6da22a4d246519cd1a638cfc7eff00cdd74413c4.patch43
-rw-r--r--meta/recipes-devtools/valgrind/valgrind/a1364805fc74b5690f763033c0c9b43f27613572.patch1422
-rw-r--r--meta/recipes-devtools/valgrind/valgrind/remove-for-aarch642
-rw-r--r--meta/recipes-devtools/valgrind/valgrind/remove-for-all7
-rw-r--r--meta/recipes-devtools/valgrind/valgrind_3.17.0.bb6
-rw-r--r--meta/recipes-example/rust-hello-world/rust-hello-world/0001-enable-LTO.patch23
-rw-r--r--meta/recipes-example/rust-hello-world/rust-hello-world_git.bb19
-rw-r--r--meta/recipes-example/rustfmt/rustfmt_1.4.2.bb171
-rw-r--r--meta/recipes-extended/acpica/acpica_20210331.bb51
-rw-r--r--meta/recipes-extended/acpica/acpica_20210730.bb51
-rw-r--r--meta/recipes-extended/at/at_3.2.2.bb6
-rw-r--r--meta/recipes-extended/bash/bash.inc5
-rw-r--r--meta/recipes-extended/bzip2/bzip2/Makefile.am2
-rw-r--r--meta/recipes-extended/cpio/cpio-2.13/CVE-2021-38185.patch581
-rw-r--r--meta/recipes-extended/cpio/cpio_2.13.bb1
-rw-r--r--meta/recipes-extended/cronie/cronie_1.5.7.bb6
-rw-r--r--meta/recipes-extended/diffutils/diffutils/0001-c-stack-stop-using-SIGSTKSZ.patch84
-rw-r--r--meta/recipes-extended/diffutils/diffutils_3.7.bb41
-rw-r--r--meta/recipes-extended/diffutils/diffutils_3.8.bb42
-rw-r--r--meta/recipes-extended/ethtool/ethtool/avoid_parallel_tests.patch6
-rw-r--r--meta/recipes-extended/ethtool/ethtool_5.13.bb37
-rw-r--r--meta/recipes-extended/ethtool/ethtool_5.14.bb37
-rw-r--r--meta/recipes-extended/findutils/findutils.inc6
-rw-r--r--meta/recipes-extended/findutils/findutils/run-ptest57
-rw-r--r--meta/recipes-extended/findutils/findutils_4.8.0.bb12
-rw-r--r--meta/recipes-extended/grep/grep_3.6.bb46
-rw-r--r--meta/recipes-extended/grep/grep_3.7.bb46
-rw-r--r--meta/recipes-extended/gzip/gzip-1.11/wrong-path-fix.patch (renamed from meta/recipes-extended/gzip/gzip-1.10/wrong-path-fix.patch)0
-rw-r--r--meta/recipes-extended/gzip/gzip_1.10.bb40
-rw-r--r--meta/recipes-extended/gzip/gzip_1.11.bb39
-rw-r--r--meta/recipes-extended/hdparm/hdparm_9.62.bb7
-rw-r--r--meta/recipes-extended/iputils/iputils/0001-meson-Make-tests-optional.patch220
-rw-r--r--meta/recipes-extended/iputils/iputils_20210722.bb11
-rw-r--r--meta/recipes-extended/libidn/libidn2_2.3.1.bb32
-rw-r--r--meta/recipes-extended/libidn/libidn2_2.3.2.bb31
-rw-r--r--meta/recipes-extended/libnsl/libnsl2_git.bb4
-rw-r--r--meta/recipes-extended/libsolv/libsolv_0.7.19.bb32
-rw-r--r--meta/recipes-extended/libsolv/libsolv_0.7.20.bb32
-rw-r--r--meta/recipes-extended/lighttpd/lighttpd/0001-meson-add-with_zstd-to-meson_options.txt.patch27
-rw-r--r--meta/recipes-extended/lighttpd/lighttpd_1.4.59.bb64
-rw-r--r--meta/recipes-extended/lsof/files/lsof-remove-host-information.patch17
-rw-r--r--meta/recipes-extended/lsof/lsof_4.91.bb63
-rw-r--r--meta/recipes-extended/lsof/lsof_4.94.0.bb46
-rw-r--r--meta/recipes-extended/ltp/ltp/0001-Remove-OOM-tests-from-runtest-mm.patch12
-rw-r--r--meta/recipes-extended/ltp/ltp/0002-lib-fix-MemAvailable-parsing.patch36
-rw-r--r--meta/recipes-extended/ltp/ltp/0003-lapi-rtnetlink.h-Fix-include-guards.patch37
-rw-r--r--meta/recipes-extended/ltp/ltp/0004-lapi-Create-if_addr.h-and-reuse-it-in-rtnetlink.h.patch58
-rw-r--r--meta/recipes-extended/ltp/ltp/0005-lapi-if_addr.h-Define-IFA_FLAGS.patch60
-rw-r--r--meta/recipes-extended/ltp/ltp_20210524.bb138
-rw-r--r--meta/recipes-extended/ltp/ltp_20210927.bb136
-rw-r--r--meta/recipes-extended/man-db/man-db_2.9.4.bb13
-rw-r--r--meta/recipes-extended/man-pages/man-pages_5.12.bb36
-rw-r--r--meta/recipes-extended/man-pages/man-pages_5.13.bb36
-rw-r--r--meta/recipes-extended/mc/files/nomandate.patch24
-rw-r--r--meta/recipes-extended/mc/mc_4.8.26.bb55
-rw-r--r--meta/recipes-extended/mc/mc_4.8.27.bb60
-rw-r--r--meta/recipes-extended/mdadm/mdadm_4.1.bb2
-rw-r--r--meta/recipes-extended/newt/libnewt_0.52.21.bb2
-rw-r--r--meta/recipes-extended/pam/libpam/0001-Makefile.am-support-usrmage.patch28
-rw-r--r--meta/recipes-extended/pam/libpam/0001-modules-pam_namespace-Makefile.am-correctly-install-.patch28
-rw-r--r--meta/recipes-extended/pam/libpam_1.5.1.bb181
-rw-r--r--meta/recipes-extended/pam/libpam_1.5.2.bb184
-rw-r--r--meta/recipes-extended/parted/files/0002-libparted_fs_resize-link-against-libuuid-explicitly-.patch34
-rw-r--r--meta/recipes-extended/parted/files/check-vfat.patch2
-rw-r--r--meta/recipes-extended/parted/parted_3.4.bb1
-rw-r--r--meta/recipes-extended/perl/libconvert-asn1-perl_0.27.bb22
-rw-r--r--meta/recipes-extended/perl/libconvert-asn1-perl_0.33.bb21
-rw-r--r--meta/recipes-extended/quota/quota_4.06.bb2
-rw-r--r--meta/recipes-extended/rpcbind/rpcbind_1.2.6.bb2
-rw-r--r--meta/recipes-extended/shadow/files/0001-libmisc-fix-default-value-in-SHA_get_salt_rounds.patch64
-rw-r--r--meta/recipes-extended/shadow/files/useradd8
-rw-r--r--meta/recipes-extended/shadow/shadow-sysroot_4.6.bb2
-rw-r--r--meta/recipes-extended/shadow/shadow.inc12
-rw-r--r--meta/recipes-extended/stress-ng/stress-ng/0001-Detemine-minimal-stack-size-via-sysconf-then-PTHREAD.patch103
-rw-r--r--meta/recipes-extended/stress-ng/stress-ng_0.12.12.bb26
-rw-r--r--meta/recipes-extended/stress-ng/stress-ng_0.13.00.bb25
-rw-r--r--meta/recipes-extended/sudo/files/0001-lib-util-mksigname.c-correctly-include-header-for-ou.patch25
-rw-r--r--meta/recipes-extended/sudo/sudo.inc18
-rw-r--r--meta/recipes-extended/sudo/sudo_1.9.7p1.bb59
-rw-r--r--meta/recipes-extended/sudo/sudo_1.9.8p1.bb60
-rw-r--r--meta/recipes-extended/sysstat/sysstat.inc6
-rw-r--r--meta/recipes-extended/tar/tar_1.34.bb5
-rw-r--r--meta/recipes-extended/timezone/timezone.inc2
-rw-r--r--meta/recipes-extended/wget/wget.inc2
-rw-r--r--meta/recipes-extended/wget/wget_1.21.1.bb7
-rw-r--r--meta/recipes-extended/wget/wget_1.21.2.bb7
-rw-r--r--meta/recipes-extended/xdg-utils/xdg-utils/1f199813e0eb0246f63b54e9e154970e609575af.patch58
-rw-r--r--meta/recipes-extended/xdg-utils/xdg-utils_1.1.3.bb1
-rw-r--r--meta/recipes-extended/xinetd/xinetd_2.3.15.4.bb10
-rw-r--r--meta/recipes-gnome/epiphany/epiphany_40.2.bb29
-rw-r--r--meta/recipes-gnome/epiphany/epiphany_41.0.bb39
-rw-r--r--meta/recipes-gnome/gi-docgen/gi-docgen_git.bb4
-rw-r--r--meta/recipes-gnome/gnome/adwaita-icon-theme/0001-Don-t-use-AC_CANONICAL_HOST-3.34.3.patch27
-rw-r--r--meta/recipes-gnome/gnome/adwaita-icon-theme/0001-Don-t-use-AC_CANONICAL_HOST.patch6
-rw-r--r--meta/recipes-gnome/gnome/adwaita-icon-theme/0001-Run-installation-commands-as-shell-jobs.patch24
-rw-r--r--meta/recipes-gnome/gnome/adwaita-icon-theme_3.34.3.bb43
-rw-r--r--meta/recipes-gnome/gnome/adwaita-icon-theme_3.38.0.bb44
-rw-r--r--meta/recipes-gnome/gnome/adwaita-icon-theme_41.0.bb43
-rw-r--r--meta/recipes-gnome/gobject-introspection/gobject-introspection_1.68.0.bb2
-rw-r--r--meta/recipes-gnome/json-glib/json-glib_1.6.2.bb33
-rw-r--r--meta/recipes-gnome/json-glib/json-glib_1.6.4.bb32
-rw-r--r--meta/recipes-gnome/libgudev/files/0001-gudevenumtypes-make-deterministic.patch44
-rw-r--r--meta/recipes-gnome/libgudev/libgudev_236.bb33
-rw-r--r--meta/recipes-gnome/libgudev/libgudev_237.bb31
-rw-r--r--meta/recipes-gnome/libhandy/libhandy_1.2.3.bb27
-rw-r--r--meta/recipes-gnome/libhandy/libhandy_1.4.0.bb27
-rw-r--r--meta/recipes-gnome/librsvg/librsvg/0001-Auto-detect-Bsymbolic-fixes-configure-on-macOS.patch35
-rw-r--r--meta/recipes-gnome/librsvg/librsvg/0001-GdkPixbufRGBA-ToGdkPixbufRGBA-start-naming-types-and.patch81
-rw-r--r--meta/recipes-gnome/librsvg/librsvg/0001-Makefile.am-pass-rust-target-to-cargo-also-when-not-.patch47
-rw-r--r--meta/recipes-gnome/librsvg/librsvg/0001-Remove-non-reproducible-SRCDIR.patch30
-rw-r--r--meta/recipes-gnome/librsvg/librsvg/0001-crossbeam-utils-check-only-the-architecture-not-the-.patch148
-rw-r--r--meta/recipes-gnome/librsvg/librsvg/0001-system-deps-src-lib.rs-do-not-probe-into-harcoded-li.patch53
-rw-r--r--meta/recipes-gnome/librsvg/librsvg/0001-vendor-system-deps-sort-dependencies-before-using-th.patch53
-rw-r--r--meta/recipes-gnome/librsvg/librsvg/0002-New-ToPixel-trait.patch100
-rw-r--r--meta/recipes-gnome/librsvg/librsvg/0003-New-ToCairoARGB-trait.patch81
-rw-r--r--meta/recipes-gnome/librsvg/librsvg/0004-impl-ToPixel-for-CairoARGB.patch49
-rw-r--r--meta/recipes-gnome/librsvg/librsvg/gtk-option.patch60
-rw-r--r--meta/recipes-gnome/librsvg/librsvg_2.40.21.bb52
-rw-r--r--meta/recipes-gnome/librsvg/librsvg_2.52.0.bb76
-rw-r--r--meta/recipes-graphics/glew/glew/notempdir.patch19
-rw-r--r--meta/recipes-graphics/glew/glew_2.2.0.bb1
-rw-r--r--meta/recipes-graphics/glslang/glslang_11.5.0.bb31
-rw-r--r--meta/recipes-graphics/glslang/glslang_11.6.0.bb31
-rw-r--r--meta/recipes-graphics/harfbuzz/harfbuzz_2.8.2.bb48
-rw-r--r--meta/recipes-graphics/harfbuzz/harfbuzz_2.9.1.bb48
-rw-r--r--meta/recipes-graphics/igt-gpu-tools/igt-gpu-tools_git.bb2
-rw-r--r--meta/recipes-graphics/jpeg/libjpeg-turbo_2.1.0.bb61
-rw-r--r--meta/recipes-graphics/jpeg/libjpeg-turbo_2.1.1.bb62
-rw-r--r--meta/recipes-graphics/libepoxy/libepoxy_1.5.8.bb32
-rw-r--r--meta/recipes-graphics/libepoxy/libepoxy_1.5.9.bb32
-rw-r--r--meta/recipes-graphics/libsdl2/libsdl2_2.0.14.bb79
-rw-r--r--meta/recipes-graphics/libsdl2/libsdl2_2.0.16.bb79
-rw-r--r--meta/recipes-graphics/mesa/files/0001-v3d-vc4-Fix-dmabuf-import-for-non-scanout-buffers.patch72
-rw-r--r--meta/recipes-graphics/mesa/files/0002-meson.build-make-TLS-ELF-optional.patch12
-rw-r--r--meta/recipes-graphics/mesa/files/without-neon.patch53
-rw-r--r--meta/recipes-graphics/mesa/mesa-gl_21.2.1.bb (renamed from meta/recipes-graphics/mesa/mesa-gl_21.1.5.bb)0
-rw-r--r--meta/recipes-graphics/mesa/mesa.inc15
-rw-r--r--meta/recipes-graphics/mesa/mesa_21.1.5.bb4
-rw-r--r--meta/recipes-graphics/mesa/mesa_21.2.1.bb5
-rw-r--r--meta/recipes-graphics/pango/pango_1.48.7.bb54
-rw-r--r--meta/recipes-graphics/pango/pango_1.48.9.bb54
-rw-r--r--meta/recipes-graphics/piglit/piglit_git.bb5
-rw-r--r--meta/recipes-graphics/shaderc/shaderc_2021.1.bb28
-rw-r--r--meta/recipes-graphics/shaderc/shaderc_2021.2.bb28
-rw-r--r--meta/recipes-graphics/spir/files/0001-fix-strncpy-bound-error.patch30
-rw-r--r--meta/recipes-graphics/spir/spirv-headers_1.5.4.bb2
-rw-r--r--meta/recipes-graphics/spir/spirv-tools_2021.2.bb42
-rw-r--r--meta/recipes-graphics/spir/spirv-tools_2021.3.bb40
-rw-r--r--meta/recipes-graphics/vulkan/assimp/0001-Use-ASSIMP_LIB_INSTALL_DIR-to-search-library.patch68
-rw-r--r--meta/recipes-graphics/vulkan/assimp/0001-closes-https-github.com-assimp-assimp-issues-2733-up.patch1664
-rw-r--r--meta/recipes-graphics/vulkan/assimp_5.0.1.bb25
-rw-r--r--meta/recipes-graphics/vulkan/vulkan-headers_1.2.182.0.bb22
-rw-r--r--meta/recipes-graphics/vulkan/vulkan-headers_1.2.191.0.bb22
-rw-r--r--meta/recipes-graphics/vulkan/vulkan-loader_1.2.182.0.bb41
-rw-r--r--meta/recipes-graphics/vulkan/vulkan-loader_1.2.191.0.bb41
-rw-r--r--meta/recipes-graphics/vulkan/vulkan-samples_git.bb2
-rw-r--r--meta/recipes-graphics/vulkan/vulkan-tools_1.2.182.0.bb32
-rw-r--r--meta/recipes-graphics/vulkan/vulkan-tools_1.2.191.0.bb32
-rw-r--r--meta/recipes-graphics/waffle/waffle_1.6.1.bb2
-rw-r--r--meta/recipes-graphics/wayland/libinput_1.18.0.bb49
-rw-r--r--meta/recipes-graphics/wayland/libinput_1.19.0.bb49
-rw-r--r--meta/recipes-graphics/wayland/wayland-protocols_1.21.bb21
-rw-r--r--meta/recipes-graphics/wayland/wayland-protocols_1.23.bb21
-rw-r--r--meta/recipes-graphics/wayland/weston-init.bb2
-rwxr-xr-xmeta/recipes-graphics/wayland/weston-init/weston-start11
-rw-r--r--meta/recipes-graphics/wayland/weston/0001-libweston-backend-drm-Re-order-gbm-destruction-at-DR.patch50
-rw-r--r--meta/recipes-graphics/wayland/weston/systemd-notify.weston-start9
-rw-r--r--meta/recipes-graphics/wayland/weston_9.0.0.bb10
-rw-r--r--meta/recipes-graphics/x11-common/xserver-nodm-init_3.0.bb6
-rw-r--r--meta/recipes-graphics/xorg-app/xeyes_1.1.2.bb14
-rw-r--r--meta/recipes-graphics/xorg-app/xeyes_1.2.0.bb13
-rw-r--r--meta/recipes-graphics/xorg-driver/xf86-input-libinput_1.1.0.bb11
-rw-r--r--meta/recipes-graphics/xorg-driver/xf86-input-libinput_1.2.0.bb11
-rw-r--r--meta/recipes-graphics/xorg-font/font-util_1.3.2.bb2
-rw-r--r--meta/recipes-graphics/xorg-lib/libx11-compose-data_1.6.8.bb2
-rw-r--r--meta/recipes-graphics/xorg-lib/libx11_1.7.2.bb2
-rw-r--r--meta/recipes-graphics/xorg-lib/libxfont2_2.0.4.bb24
-rw-r--r--meta/recipes-graphics/xorg-lib/libxfont2_2.0.5.bb23
-rw-r--r--meta/recipes-graphics/xorg-lib/libxfont_1.5.4.bb2
-rw-r--r--meta/recipes-graphics/xorg-lib/libxft_2.3.3.bb33
-rw-r--r--meta/recipes-graphics/xorg-lib/libxft_2.3.4.bb32
-rw-r--r--meta/recipes-graphics/xorg-lib/libxi_1.7.99.2.bb22
-rw-r--r--meta/recipes-graphics/xorg-lib/libxi_1.8.bb22
-rw-r--r--meta/recipes-graphics/xorg-lib/libxkbcommon_1.3.0.bb26
-rw-r--r--meta/recipes-graphics/xorg-lib/libxkbcommon_1.3.1.bb26
-rw-r--r--meta/recipes-graphics/xorg-lib/pixman_0.40.0.bb6
-rw-r--r--meta/recipes-graphics/xorg-lib/xkeyboard-config_2.33.bb30
-rw-r--r--meta/recipes-graphics/xorg-lib/xkeyboard-config_2.34.bb30
-rw-r--r--meta/recipes-graphics/xorg-proto/xorgproto_2021.4.99.2.bb25
-rw-r--r--meta/recipes-graphics/xorg-proto/xorgproto_2021.5.bb25
-rw-r--r--meta/recipes-graphics/xorg-xserver/xserver-xorg_1.20.12.bb33
-rw-r--r--meta/recipes-graphics/xorg-xserver/xserver-xorg_1.20.13.bb33
-rw-r--r--meta/recipes-graphics/xwayland/xwayland_21.1.2.bb2
-rw-r--r--meta/recipes-kernel/cryptodev/cryptodev-tests_1.12.bb1
-rw-r--r--meta/recipes-kernel/cryptodev/files/0001-tests-Makefile-do-not-use-Werror.patch24
-rw-r--r--meta/recipes-kernel/dtc/dtc.inc2
-rw-r--r--meta/recipes-kernel/kexec/kexec-tools_2.0.22.bb6
-rw-r--r--meta/recipes-kernel/kmod/kmod_git.bb8
-rw-r--r--meta/recipes-kernel/linux-firmware/linux-firmware_20210511.bb1052
-rw-r--r--meta/recipes-kernel/linux-firmware/linux-firmware_20210818.bb1067
-rw-r--r--meta/recipes-kernel/linux-libc-headers/linux-libc-headers_5.13.bb18
-rw-r--r--meta/recipes-kernel/linux-libc-headers/linux-libc-headers_5.14.bb20
-rw-r--r--meta/recipes-kernel/linux/kernel-devsrc.bb10
-rw-r--r--meta/recipes-kernel/linux/linux-yocto-dev.bb3
-rw-r--r--meta/recipes-kernel/linux/linux-yocto-rt_5.10.bb6
-rw-r--r--meta/recipes-kernel/linux/linux-yocto-rt_5.13.bb45
-rw-r--r--meta/recipes-kernel/linux/linux-yocto-rt_5.14.bb45
-rw-r--r--meta/recipes-kernel/linux/linux-yocto-rt_5.4.bb45
-rw-r--r--meta/recipes-kernel/linux/linux-yocto-tiny_5.10.bb8
-rw-r--r--meta/recipes-kernel/linux/linux-yocto-tiny_5.13.bb32
-rw-r--r--meta/recipes-kernel/linux/linux-yocto-tiny_5.14.bb32
-rw-r--r--meta/recipes-kernel/linux/linux-yocto-tiny_5.4.bb32
-rw-r--r--meta/recipes-kernel/linux/linux-yocto_5.10.bb24
-rw-r--r--meta/recipes-kernel/linux/linux-yocto_5.13.bb68
-rw-r--r--meta/recipes-kernel/linux/linux-yocto_5.14.bb68
-rw-r--r--meta/recipes-kernel/linux/linux-yocto_5.4.bb55
-rw-r--r--meta/recipes-kernel/lttng/lttng-modules/0001-fix-cpu-hotplug-Remove-deprecated-CPU-hotplug-functi.patch394
-rw-r--r--meta/recipes-kernel/lttng/lttng-modules/0001-src-Kbuild-change-missing-CONFIG_TRACEPOINTS-to-warn.patch25
-rw-r--r--meta/recipes-kernel/lttng/lttng-modules/0002-fix-Revert-Makefile-Enable-Wimplicit-fallthrough-for.patch829
-rw-r--r--meta/recipes-kernel/lttng/lttng-modules/Makefile-Do-not-fail-if-CONFIG_TRACEPOINTS-is-not-en.patch49
-rw-r--r--meta/recipes-kernel/lttng/lttng-modules_2.12.6.bb42
-rw-r--r--meta/recipes-kernel/lttng/lttng-modules_2.13.0.bb47
-rw-r--r--meta/recipes-kernel/lttng/lttng-tools/0001-Fix-Tests-race-condition-in-test_event_tracker.patch221
-rw-r--r--meta/recipes-kernel/lttng/lttng-tools/0001-src-common-correct-header-location.patch41
-rw-r--r--meta/recipes-kernel/lttng/lttng-tools/0001-tests-regression-disable-the-tools-live-tests.patch55
-rw-r--r--meta/recipes-kernel/lttng/lttng-tools/0002-Fix-Tests-race-condition-in-test_ns_contexts_change.patch46
-rwxr-xr-xmeta/recipes-kernel/lttng/lttng-tools/run-ptest1
-rw-r--r--meta/recipes-kernel/lttng/lttng-tools_2.12.4.bb177
-rw-r--r--meta/recipes-kernel/lttng/lttng-tools_2.13.0.bb190
-rw-r--r--meta/recipes-kernel/lttng/lttng-ust/0001-Makefile.am-update-rpath-link.patch35
-rw-r--r--meta/recipes-kernel/lttng/lttng-ust/0001-lttng-ust-common-link-with-liburcu-explicitly.patch25
-rw-r--r--meta/recipes-kernel/lttng/lttng-ust/0001-python-lttngust-Makefile.am-Add-install-lib-to-setup.patch22
-rw-r--r--meta/recipes-kernel/lttng/lttng-ust_2.12.2.bb52
-rw-r--r--meta/recipes-kernel/lttng/lttng-ust_2.13.0.bb53
-rw-r--r--meta/recipes-kernel/perf/perf.bb2
-rw-r--r--meta/recipes-kernel/systemtap/systemtap/ef5a8b9eda402e4e96c4e3ce01e7ff95d3e10470.patch26
-rw-r--r--meta/recipes-kernel/systemtap/systemtap_git.bb2
-rw-r--r--meta/recipes-kernel/systemtap/systemtap_git.inc3
-rw-r--r--meta/recipes-kernel/wireless-regdb/wireless-regdb_2021.04.21.bb43
-rw-r--r--meta/recipes-kernel/wireless-regdb/wireless-regdb_2021.07.14.bb43
-rw-r--r--meta/recipes-multimedia/ffmpeg/ffmpeg/fix-CVE-2021-38171.patch42
-rw-r--r--meta/recipes-multimedia/ffmpeg/ffmpeg_4.4.bb5
-rw-r--r--meta/recipes-multimedia/flac/flac_1.3.3.bb2
-rw-r--r--meta/recipes-multimedia/gstreamer/gst-devtools_1.18.4.bb49
-rw-r--r--meta/recipes-multimedia/gstreamer/gst-devtools_1.18.5.bb49
-rw-r--r--meta/recipes-multimedia/gstreamer/gst-examples_1.18.4.bb35
-rw-r--r--meta/recipes-multimedia/gstreamer/gst-examples_1.18.5.bb35
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-libav_1.18.4.bb24
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-libav_1.18.5.bb24
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-omx_1.18.4.bb47
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-omx_1.18.5.bb47
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad_1.18.4.bb154
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad_1.18.5.bb155
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base/4ef5c91697a141fea7317aff7f0f28e5a861db99.patch50
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base_1.18.4.bb95
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base_1.18.5.bb94
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good/0002-rtpjitterbuffer-Fix-parsing-of-the-mediaclk-direct-f.patch33
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good/0003-Remove-volatile-from-static-vars-to-fix-build-with-g.patch100
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good_1.18.4.bb78
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good_1.18.5.bb76
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-ugly_1.18.4.bb43
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-ugly_1.18.5.bb43
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-python_1.18.4.bb26
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-python_1.18.5.bb26
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-rtsp-server_1.18.4.bb31
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-rtsp-server_1.18.5.bb31
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-vaapi_1.18.4.bb53
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-vaapi_1.18.5.bb53
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0/0002-Remove-unused-valgrind-detection.patch14
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0_1.18.4.bb75
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0_1.18.5.bb75
-rw-r--r--meta/recipes-multimedia/libsamplerate/libsamplerate0/shared_version_info.patch13
-rw-r--r--meta/recipes-multimedia/libsamplerate/libsamplerate0_0.1.9.bb1
-rw-r--r--meta/recipes-multimedia/pulseaudio/pulseaudio.inc2
-rw-r--r--meta/recipes-multimedia/pulseaudio/pulseaudio/0001-build-sys-Add-an-option-for-enabling-disabling-Valgr.patch67
-rw-r--r--meta/recipes-multimedia/pulseaudio/pulseaudio/0001-build-sys-meson-check-if-NEON-code-can-be-compiled-o.patch71
-rw-r--r--meta/recipes-multimedia/pulseaudio/pulseaudio_14.2.bb14
-rw-r--r--meta/recipes-multimedia/pulseaudio/pulseaudio_15.0.bb11
-rw-r--r--meta/recipes-multimedia/webp/libwebp_1.2.0.bb55
-rw-r--r--meta/recipes-multimedia/webp/libwebp_1.2.1.bb55
-rw-r--r--meta/recipes-rt/rt-tests/files/0001-Makefile-Allow-for-CC-and-AR-to-be-overridden.patch6
-rw-r--r--meta/recipes-rt/rt-tests/rt-tests.inc4
-rw-r--r--meta/recipes-sato/images/core-image-sato.bb1
-rw-r--r--meta/recipes-sato/webkit/libwpe_1.10.1.bb7
-rw-r--r--meta/recipes-sato/webkit/webkitgtk/0001-ANGLE-do-not-enable-SSE-on-x86.patch26
-rw-r--r--meta/recipes-sato/webkit/webkitgtk/0001-Enable-THREADS_PREFER_PTHREAD_FLAG.patch10
-rw-r--r--meta/recipes-sato/webkit/webkitgtk/0001-MiniBrowser-Fix-reproduciblity.patch31
-rw-r--r--meta/recipes-sato/webkit/webkitgtk/0001-Properly-use-CompletionHandler-when-USE_OPENGL_OR_ES.patch37
-rw-r--r--meta/recipes-sato/webkit/webkitgtk/include_xutil.patch8
-rw-r--r--meta/recipes-sato/webkit/webkitgtk/musl-lower-stack-usage.patch14
-rw-r--r--meta/recipes-sato/webkit/webkitgtk_2.32.2.bb151
-rw-r--r--meta/recipes-sato/webkit/webkitgtk_2.34.0.bb161
-rw-r--r--meta/recipes-sato/webkit/wpebackend-fdo_1.10.0.bb4
-rw-r--r--meta/recipes-support/apr/apr/CVE-2021-35940.patch58
-rw-r--r--meta/recipes-support/apr/apr_1.7.0.bb1
-rw-r--r--meta/recipes-support/aspell/aspell_0.60.8.bb4
-rw-r--r--meta/recipes-support/aspell/files/CVE-2019-25051.patch101
-rw-r--r--meta/recipes-support/atk/at-spi2-core/0001-Ensure-x11_dep-is-defined.patch33
-rw-r--r--meta/recipes-support/atk/at-spi2-core_2.40.3.bb39
-rw-r--r--meta/recipes-support/atk/at-spi2-core_2.42.0.bb41
-rw-r--r--meta/recipes-support/boost/boost-1.76.0.inc20
-rw-r--r--meta/recipes-support/boost/boost-1.77.0.inc20
-rw-r--r--meta/recipes-support/boost/boost/0001-Fixes-wrong-type-for-mutex-in-regex-v5.patch54
-rw-r--r--meta/recipes-support/boost/boost_1.76.0.bb11
-rw-r--r--meta/recipes-support/boost/boost_1.77.0.bb9
-rw-r--r--meta/recipes-support/consolekit/consolekit_0.4.6.bb2
-rw-r--r--meta/recipes-support/curl/curl/cve-2021-22945.patch34
-rw-r--r--meta/recipes-support/curl/curl/cve-2021-22946.patch332
-rw-r--r--meta/recipes-support/curl/curl/cve-2021-22947.patch355
-rw-r--r--meta/recipes-support/curl/curl_7.78.0.bb4
-rw-r--r--meta/recipes-support/diffoscope/diffoscope_178.bb30
-rw-r--r--meta/recipes-support/diffoscope/diffoscope_182.bb30
-rw-r--r--meta/recipes-support/enchant/enchant2_2.3.0.bb31
-rw-r--r--meta/recipes-support/enchant/enchant2_2.3.1.bb31
-rw-r--r--meta/recipes-support/gnupg/gnupg/0001-configure.ac-use-a-custom-value-for-the-location-of-.patch6
-rw-r--r--meta/recipes-support/gnupg/gnupg/relocate.patch39
-rw-r--r--meta/recipes-support/gnupg/gnupg_2.3.1.bb84
-rw-r--r--meta/recipes-support/gnupg/gnupg_2.3.2.bb85
-rw-r--r--meta/recipes-support/itstool/itstool/0001-Native-Don-t-use-build-time-hardcoded-python-binary-.patch8
-rw-r--r--meta/recipes-support/itstool/itstool_2.0.6.bb24
-rw-r--r--meta/recipes-support/itstool/itstool_2.0.7.bb24
-rw-r--r--meta/recipes-support/libcap/files/0001-nativesdk-libcap-Raise-the-size-of-arrays-containing.patch36
-rw-r--r--meta/recipes-support/libcap/files/0001-tests-do-not-statically-link-a-test.patch52
-rw-r--r--meta/recipes-support/libcap/files/0002-tests-do-not-run-target-executables.patch10
-rw-r--r--meta/recipes-support/libcap/libcap_2.51.bb73
-rw-r--r--meta/recipes-support/libcap/libcap_2.54.bb76
-rw-r--r--meta/recipes-support/libevent/libevent/0003-test-mark-util-monotonic_prc_fallback-as-retriable.patch28
-rw-r--r--meta/recipes-support/libevent/libevent/0004-test-retriable-tests-are-marked-failed-only-when-all-a.patch81
-rw-r--r--meta/recipes-support/libevent/libevent/run-ptest10
-rw-r--r--meta/recipes-support/libevent/libevent_2.1.12.bb6
-rw-r--r--meta/recipes-support/libexif/files/CVE-2020-0198.patch66
-rw-r--r--meta/recipes-support/libexif/files/CVE-2020-0452.patch39
-rw-r--r--meta/recipes-support/libexif/libexif_0.6.22.bb26
-rw-r--r--meta/recipes-support/libexif/libexif_0.6.23.bb24
-rw-r--r--meta/recipes-support/libgcrypt/libgcrypt_1.9.3.bb61
-rw-r--r--meta/recipes-support/libgcrypt/libgcrypt_1.9.4.bb61
-rw-r--r--meta/recipes-support/libgit2/libgit2_1.1.1.bb22
-rw-r--r--meta/recipes-support/libgit2/libgit2_1.2.0.bb22
-rw-r--r--meta/recipes-support/libical/libical_3.0.10.bb46
-rw-r--r--meta/recipes-support/libical/libical_3.0.11.bb46
-rw-r--r--meta/recipes-support/libjitterentropy/libjitterentropy/0001-Makefile-restore-build-reproducibility.patch27
-rw-r--r--meta/recipes-support/libjitterentropy/libjitterentropy_3.0.2.bb27
-rw-r--r--meta/recipes-support/libjitterentropy/libjitterentropy_3.1.0.bb33
-rw-r--r--meta/recipes-support/libseccomp/files/0001-arch-Add-riscv32-architecture-support.patch162
-rw-r--r--meta/recipes-support/libseccomp/files/0002-Regenerate-syscall-cvs-file-from-5.13-rc5-kernel.patch996
-rw-r--r--meta/recipes-support/libseccomp/files/0002-man-Add-RISCV64-to-arch-list.patch28
-rw-r--r--meta/recipes-support/libseccomp/files/0003-syscalls-update-the-syscall-defs-for-Linux-v5.15.0-r.patch980
-rw-r--r--meta/recipes-support/libseccomp/files/0004-syscalls-Add-quotactl_path.patch40
-rw-r--r--meta/recipes-support/libseccomp/libseccomp_2.5.1.bb56
-rw-r--r--meta/recipes-support/libseccomp/libseccomp_2.5.2.bb58
-rw-r--r--meta/recipes-support/libsoup/libsoup-2.4_2.72.0.bb4
-rw-r--r--meta/recipes-support/libsoup/libsoup_3.0.1.bb44
-rw-r--r--meta/recipes-support/libssh2/files/0001-Don-t-let-host-enviroment-to-decide-if-a-test-is-bui.patch6
-rw-r--r--meta/recipes-support/libssh2/files/0001-configure-Conditionally-undefine-backend-m4-macro.patch30
-rw-r--r--meta/recipes-support/libssh2/files/0001-kex.c-move-EC-macro-outside-of-if-check-549-550.patch112
-rw-r--r--meta/recipes-support/libssh2/files/CVE-2019-17498.patch131
-rw-r--r--meta/recipes-support/libssh2/libssh2_1.10.0.bb50
-rw-r--r--meta/recipes-support/libssh2/libssh2_1.9.0.bb54
-rw-r--r--meta/recipes-support/lz4/files/CVE-2021-3520.patch27
-rw-r--r--meta/recipes-support/lz4/files/run-ptest43
-rw-r--r--meta/recipes-support/lz4/lz4_1.9.3.bb4
-rw-r--r--meta/recipes-support/lzo/lzo_2.10.bb2
-rw-r--r--meta/recipes-support/nghttp2/nghttp2/0001-fetch-ocsp-response-use-python3.patch27
-rw-r--r--meta/recipes-support/nghttp2/nghttp2_1.45.1.bb35
-rw-r--r--meta/recipes-support/pinentry/pinentry-1.1.1/gpg-error_pkconf.patch177
-rw-r--r--meta/recipes-support/pinentry/pinentry-1.2.0/gpg-error_pkconf.patch195
-rw-r--r--meta/recipes-support/pinentry/pinentry-1.2.0/libassuan_pkgconf.patch (renamed from meta/recipes-support/pinentry/pinentry-1.1.1/libassuan_pkgconf.patch)0
-rw-r--r--meta/recipes-support/pinentry/pinentry_1.1.1.bb36
-rw-r--r--meta/recipes-support/pinentry/pinentry_1.2.0.bb36
-rw-r--r--meta/recipes-support/ptest-runner/ptest-runner_2.4.1.bb30
-rw-r--r--meta/recipes-support/ptest-runner/ptest-runner_2.4.2.bb34
-rw-r--r--meta/recipes-support/re2c/re2c_2.1.1.bb16
-rw-r--r--meta/recipes-support/re2c/re2c_2.2.bb16
-rw-r--r--meta/recipes-support/rng-tools/rng-tools/0001-Adding-ability-to-detect-non-posix-extensions-for-pt.patch41
-rw-r--r--meta/recipes-support/rng-tools/rng-tools/0002-Allow-for-use-of-either-pthread-affinity-set-methods.patch47
-rw-r--r--meta/recipes-support/rng-tools/rng-tools/rngd.service1
-rw-r--r--meta/recipes-support/rng-tools/rng-tools_6.13.bb60
-rw-r--r--meta/recipes-support/rng-tools/rng-tools_6.14.bb61
-rw-r--r--meta/recipes-support/serf/serf/0001-buckets-ssl_buckets.c-do-not-use-ERR_GET_FUNC.patch28
-rw-r--r--meta/recipes-support/serf/serf_1.3.9.bb1
-rw-r--r--meta/recipes-support/shared-mime-info/shared-mime-info_git.bb2
-rw-r--r--meta/recipes-support/sqlite/sqlite3_3.36.0.bb2
-rw-r--r--meta/recipes-support/vim/files/CVE-2021-3778.patch46
-rw-r--r--meta/recipes-support/vim/files/b7081e135a16091c93f6f5f7525a5c58fb7ca9f9.patch207
-rw-r--r--meta/recipes-support/vim/vim.inc8
-rw-r--r--meta/site/arm-3214
-rw-r--r--meta/site/arm-6414
-rw-r--r--meta/site/arm-common3
-rw-r--r--meta/site/ix86-common17
-rw-r--r--meta/site/mips-common14
-rw-r--r--meta/site/mips-linux3
-rw-r--r--meta/site/mips64-linux3
-rw-r--r--meta/site/mips64el-linux3
-rw-r--r--meta/site/mipsel-linux3
-rw-r--r--meta/site/mipsisa32r6-linux3
-rw-r--r--meta/site/mipsisa32r6el-linux3
-rw-r--r--meta/site/mipsisa64r6-linux3
-rw-r--r--meta/site/mipsisa64r6el-linux3
-rw-r--r--meta/site/nios2-linux18
-rw-r--r--meta/site/powerpc32-linux17
-rw-r--r--meta/site/sh-common17
-rw-r--r--meta/site/x86_64-linux17
-rwxr-xr-xscripts/autobuilder-worker-prereq-tests5
-rwxr-xr-xscripts/buildhistory-collect-srcrevs4
-rwxr-xr-xscripts/contrib/convert-overrides.py13
-rwxr-xr-xscripts/gen-lockedsig-cache3
-rw-r--r--scripts/lib/build_perf/report.py3
-rw-r--r--scripts/lib/devtool/search.py5
-rw-r--r--scripts/lib/devtool/standard.py4
-rw-r--r--scripts/lib/recipetool/append.py8
-rw-r--r--scripts/lib/recipetool/create.py197
-rw-r--r--scripts/lib/recipetool/create_buildsys_python.py4
-rw-r--r--scripts/lib/recipetool/create_npm.py60
-rw-r--r--scripts/lib/recipetool/licenses.csv37
-rw-r--r--scripts/lib/scriptutils.py3
-rw-r--r--scripts/lib/wic/canned-wks/common.wks.inc2
-rw-r--r--scripts/lib/wic/canned-wks/directdisk-gpt.wks2
-rw-r--r--scripts/lib/wic/canned-wks/mkefidisk.wks2
-rw-r--r--scripts/lib/wic/help.py5
-rw-r--r--scripts/lib/wic/ksparser.py1
-rw-r--r--scripts/lib/wic/partition.py7
-rw-r--r--scripts/lib/wic/plugins/imager/direct.py9
-rw-r--r--scripts/lib/wic/plugins/source/bootimg-efi.py74
-rw-r--r--scripts/lib/wic/plugins/source/rootfs.py2
-rwxr-xr-xscripts/oe-pkgdata-util41
-rwxr-xr-xscripts/oe-publish-sdk4
-rwxr-xr-xscripts/oepydevshell-internal.py2
-rw-r--r--scripts/pybootchartgui/pybootchartgui/draw.py5
-rwxr-xr-xscripts/runqemu14
-rwxr-xr-xscripts/sstate-cache-management.sh40
-rwxr-xr-xscripts/wic2
1469 files changed, 60282 insertions, 25919 deletions
diff --git a/MAINTAINERS.md b/MAINTAINERS.md
index 604d800f63..36a9bde90c 100644
--- a/MAINTAINERS.md
+++ b/MAINTAINERS.md
@@ -38,6 +38,9 @@ Component/Subsystem Maintainers
* Toolchain: Khem Raj
* ptest-runner: Aníbal Limón
* opkg: Alex Stewart
+* devtool: Saul Wold
+* eSDK: Saul Wold
+* overlayfs: Vyacheslav Yurkov
Maintainers needed
------------------
@@ -53,8 +56,6 @@ Maintainers needed
* Prelink-cross
* Matchbox
* Sato
-* devtool
-* eSDK
* Autobuilder
Layer Maintainers needed
diff --git a/meta-selftest/recipes-devtools/python/python-async-test.inc b/meta-selftest/recipes-devtools/python/python-async-test.inc
index 439a0b9276..6d7c7458b0 100644
--- a/meta-selftest/recipes-devtools/python/python-async-test.inc
+++ b/meta-selftest/recipes-devtools/python/python-async-test.inc
@@ -1,7 +1,7 @@
SUMMARY = "Python framework to process interdependent tasks in a pool of workers"
HOMEPAGE = "http://github.com/gitpython-developers/async"
SECTION = "devel/python"
-LICENSE = "BSD"
+LICENSE = "BSD-3-Clause"
LIC_FILES_CHKSUM = "file://PKG-INFO;beginline=8;endline=8;md5=88df8e78b9edfd744953862179f2d14e"
inherit pypi
diff --git a/meta-selftest/recipes-test/logging-test/logging-test.bb b/meta-selftest/recipes-test/logging-test/logging-test.bb
new file mode 100644
index 0000000000..a6100123f9
--- /dev/null
+++ b/meta-selftest/recipes-test/logging-test/logging-test.bb
@@ -0,0 +1,24 @@
+SUMMARY = "Destined to fail"
+LICENSE = "CLOSED"
+
+deltask do_patch
+INHIBIT_DEFAULT_DEPS = "1"
+
+do_shelltest() {
+ echo "This is shell stdout"
+ echo "This is shell stderr" >&2
+ exit 1
+}
+addtask do_shelltest
+
+python do_pythontest_exit () {
+ print("This is python stdout")
+ sys.exit(1)
+}
+addtask do_pythontest_exit
+
+python do_pythontest_fatal () {
+ print("This is python fatal test stdout")
+ bb.fatal("This is a fatal error")
+}
+addtask do_pythontest_fatal
diff --git a/meta-selftest/recipes-test/overlayfs-user/overlayfs-user.bb b/meta-selftest/recipes-test/overlayfs-user/overlayfs-user.bb
new file mode 100644
index 0000000000..60405067de
--- /dev/null
+++ b/meta-selftest/recipes-test/overlayfs-user/overlayfs-user.bb
@@ -0,0 +1,17 @@
+SUMMARY = "Overlayfs class unit test"
+DESCRIPTION = "Contains an overlayfs configuration"
+LICENSE = "MIT"
+
+INHIBIT_DEFAULT_DEPS = "1"
+EXCLUDE_FROM_WORLD = "1"
+
+inherit ${@bb.utils.contains("DISTRO_FEATURES", "overlayfs", "overlayfs", "", d)}
+include test_recipe.inc
+
+OVERLAYFS_WRITABLE_PATHS[mnt-overlay] = "/usr/share/my-application"
+
+do_install() {
+ install -d ${D}/usr/share/my-application
+}
+
+FILES:${PN} += "/usr"
diff --git a/meta-selftest/recipes-test/systemd-machine-units/systemd-machine-units_%.bbappend b/meta-selftest/recipes-test/systemd-machine-units/systemd-machine-units_%.bbappend
new file mode 100644
index 0000000000..205720982c
--- /dev/null
+++ b/meta-selftest/recipes-test/systemd-machine-units/systemd-machine-units_%.bbappend
@@ -0,0 +1,2 @@
+# This bbappend is used to alter the recipe using the test_recipe.inc file created by tests.
+include test_recipe.inc
diff --git a/meta-selftest/wic/test_efi_plugin.wks b/meta-selftest/wic/test_efi_plugin.wks
new file mode 100644
index 0000000000..1603d6c4bb
--- /dev/null
+++ b/meta-selftest/wic/test_efi_plugin.wks
@@ -0,0 +1,6 @@
+# short-description: This file is used in oe-selftest wic module to test efi plugin
+
+part /boot --source bootimg-efi --sourceparams="loader=systemd-boot,create-unified-kernel-image=true,initrd=${INITRAMFS_IMAGE}-${MACHINE}.${INITRAMFS_FSTYPES}" --active --align 1024 --use-uuid
+part / --source rootfs --fstype=ext4 --align 1024 --use-uuid
+
+bootloader --timeout=0 --append="console=ttyS0,115200n8"
diff --git a/meta-skeleton/recipes-baremetal/baremetal-examples/baremetal-helloworld_git.bb b/meta-skeleton/recipes-baremetal/baremetal-examples/baremetal-helloworld_git.bb
index 37193f5a33..d11e2e530e 100644
--- a/meta-skeleton/recipes-baremetal/baremetal-examples/baremetal-helloworld_git.bb
+++ b/meta-skeleton/recipes-baremetal/baremetal-examples/baremetal-helloworld_git.bb
@@ -4,7 +4,7 @@ DESCRIPTION = "These are introductory examples to showcase the use of QEMU to ru
LICENSE = "MIT"
LIC_FILES_CHKSUM = "file://LICENSE;md5=39346640a23c701e4f459e05f56f4449"
-SRCREV = "0bf9ea216e6f76be50726a3a74e527b7bbb0ad93"
+SRCREV = "31b4e5a337018b4a00a7426b0e5ed83b81df30c7"
PV = "0.1+git${SRCPV}"
SRC_URI = "git://github.com/aehs29/baremetal-helloqemu.git;protocol=https;branch=master"
@@ -28,13 +28,14 @@ inherit baremetal-image
# machine that QEMU uses on OE, e.g. -machine virt -cpu cortex-a57
# but the examples can also be run on other architectures/machines
# such as vexpress-a15 by overriding the setting on the machine.conf
-COMPATIBLE_MACHINE = "qemuarmv5|qemuarm|qemuarm64|qemuriscv64"
+COMPATIBLE_MACHINE = "qemuarmv5|qemuarm|qemuarm64|qemuriscv64|qemuriscv32"
BAREMETAL_QEMUARCH ?= ""
BAREMETAL_QEMUARCH:qemuarmv5 = "versatile"
BAREMETAL_QEMUARCH:qemuarm = "arm"
BAREMETAL_QEMUARCH:qemuarm64 = "aarch64"
BAREMETAL_QEMUARCH:qemuriscv64 = "riscv64"
+BAREMETAL_QEMUARCH:qemuriscv32 = "riscv32"
EXTRA_OEMAKE:append = " QEMUARCH=${BAREMETAL_QEMUARCH} V=1"
diff --git a/meta-skeleton/recipes-kernel/hello-mod/files/hello.c b/meta-skeleton/recipes-kernel/hello-mod/files/hello.c
index f3c0d372eb..6b73a79524 100644
--- a/meta-skeleton/recipes-kernel/hello-mod/files/hello.c
+++ b/meta-skeleton/recipes-kernel/hello-mod/files/hello.c
@@ -19,15 +19,17 @@
#include <linux/module.h>
-int init_module(void)
+static int __init hello_init(void)
{
- printk("Hello World!\n");
+ pr_info("Hello World!\n");
return 0;
}
-void cleanup_module(void)
+static void __exit hello_exit(void)
{
- printk("Goodbye Cruel World!\n");
+ pr_info("Goodbye Cruel World!\n");
}
+module_init(hello_init);
+module_exit(hello_exit);
MODULE_LICENSE("GPL");
diff --git a/meta/classes/autotools.bbclass b/meta/classes/autotools.bbclass
index 2c7968e659..4ab2460990 100644
--- a/meta/classes/autotools.bbclass
+++ b/meta/classes/autotools.bbclass
@@ -145,9 +145,12 @@ ACLOCALEXTRAPATH:class-target = " -I ${STAGING_DATADIR_NATIVE}/aclocal/"
ACLOCALEXTRAPATH:class-nativesdk = " -I ${STAGING_DATADIR_NATIVE}/aclocal/"
python autotools_aclocals () {
- d.setVar("CONFIG_SITE", siteinfo_get_files(d, sysrootcache=True))
+ sitefiles, searched = siteinfo_get_files(d, sysrootcache=True)
+ d.setVar("CONFIG_SITE", " ".join(sitefiles))
}
+do_configure[file-checksums] += "${@' '.join(siteinfo_get_files(d, sysrootcache=False)[1])}"
+
CONFIGURE_FILES = "${S}/configure.in ${S}/configure.ac ${S}/config.h.in ${S}/acinclude.m4 Makefile.am"
autotools_do_configure() {
diff --git a/meta/classes/baremetal-image.bbclass b/meta/classes/baremetal-image.bbclass
index 9ec3f1460b..81f5e5e93d 100644
--- a/meta/classes/baremetal-image.bbclass
+++ b/meta/classes/baremetal-image.bbclass
@@ -82,12 +82,15 @@ QB_OPT_APPEND:append = " -nographic"
# RISC-V tunes set the BIOS, unset, and instruct QEMU to
# ignore the BIOS and boot from -kernel
QB_DEFAULT_BIOS:qemuriscv64 = ""
+QB_DEFAULT_BIOS:qemuriscv32 = ""
QB_OPT_APPEND:append:qemuriscv64 = " -bios none"
+QB_OPT_APPEND:append:qemuriscv32 = " -bios none"
# Use the medium-any code model for the RISC-V 64 bit implementation,
# since medlow can only access addresses below 0x80000000 and RAM
# starts at 0x80000000 on RISC-V 64
+# Keep RISC-V 32 using -mcmodel=medlow (symbols lie between -2GB:2GB)
CFLAGS:append:qemuriscv64 = " -mcmodel=medany"
@@ -102,13 +105,17 @@ inherit qemuboot
python(){
# do_addto_recipe_sysroot doesnt exist for all recipes, but we need it to have
# /usr/bin on recipe-sysroot (qemu) populated
+ # The do_addto_recipe_sysroot dependency is coming from EXTRA_IMAGDEPENDS now,
+ # we just need to add the logic to add its dependency to do_image.
def extraimage_getdepends(task):
deps = ""
for dep in (d.getVar('EXTRA_IMAGEDEPENDS') or "").split():
# Make sure we only add it for qemu
if 'qemu' in dep:
- deps += " %s:%s" % (dep, task)
+ if ":" in dep:
+ deps += " %s " % (dep)
+ else:
+ deps += " %s:%s" % (dep, task)
return deps
- d.appendVarFlag('do_image', 'depends', extraimage_getdepends('do_addto_recipe_sysroot'))
- d.appendVarFlag('do_image', 'depends', extraimage_getdepends('do_populate_sysroot'))
+ d.appendVarFlag('do_image', 'depends', extraimage_getdepends('do_populate_sysroot'))
}
diff --git a/meta/classes/base.bbclass b/meta/classes/base.bbclass
index 11b65171d9..a65fcc6c1d 100644
--- a/meta/classes/base.bbclass
+++ b/meta/classes/base.bbclass
@@ -12,7 +12,7 @@ inherit logging
OE_EXTRA_IMPORTS ?= ""
-OE_IMPORTS += "os sys time oe.path oe.utils oe.types oe.package oe.packagegroup oe.sstatesig oe.lsb oe.cachedpath oe.license ${OE_EXTRA_IMPORTS}"
+OE_IMPORTS += "os sys time oe.path oe.utils oe.types oe.package oe.packagegroup oe.sstatesig oe.lsb oe.cachedpath oe.license oe.qa oe.reproducible ${OE_EXTRA_IMPORTS}"
OE_IMPORTS[type] = "list"
PACKAGECONFIG_CONFARGS ??= ""
@@ -153,14 +153,14 @@ do_fetch[vardeps] += "SRCREV"
python base_do_fetch() {
src_uri = (d.getVar('SRC_URI') or "").split()
- if len(src_uri) == 0:
+ if not src_uri:
return
try:
fetcher = bb.fetch2.Fetch(src_uri, d)
fetcher.download()
except bb.fetch2.BBFetchException as e:
- bb.fatal(str(e))
+ bb.fatal("Bitbake Fetcher Error: " + repr(e))
}
addtask unpack after do_fetch
@@ -170,15 +170,53 @@ do_unpack[cleandirs] = "${@d.getVar('S') if os.path.normpath(d.getVar('S')) != o
python base_do_unpack() {
src_uri = (d.getVar('SRC_URI') or "").split()
- if len(src_uri) == 0:
+ if not src_uri:
return
try:
fetcher = bb.fetch2.Fetch(src_uri, d)
fetcher.unpack(d.getVar('WORKDIR'))
except bb.fetch2.BBFetchException as e:
- bb.fatal(str(e))
+ bb.fatal("Bitbake Fetcher Error: " + repr(e))
+}
+
+SSTATETASKS += "do_deploy_source_date_epoch"
+
+do_deploy_source_date_epoch () {
+ mkdir -p ${SDE_DEPLOYDIR}
+ if [ -e ${SDE_FILE} ]; then
+ echo "Deploying SDE from ${SDE_FILE} -> ${SDE_DEPLOYDIR}."
+ cp -p ${SDE_FILE} ${SDE_DEPLOYDIR}/__source_date_epoch.txt
+ else
+ echo "${SDE_FILE} not found!"
+ fi
+}
+
+python do_deploy_source_date_epoch_setscene () {
+ sstate_setscene(d)
+ bb.utils.mkdirhier(d.getVar('SDE_DIR'))
+ sde_file = os.path.join(d.getVar('SDE_DEPLOYDIR'), '__source_date_epoch.txt')
+ if os.path.exists(sde_file):
+ target = d.getVar('SDE_FILE')
+ bb.debug(1, "Moving setscene SDE file %s -> %s" % (sde_file, target))
+ bb.utils.rename(sde_file, target)
+ else:
+ bb.debug(1, "%s not found!" % sde_file)
+}
+
+do_deploy_source_date_epoch[dirs] = "${SDE_DEPLOYDIR}"
+do_deploy_source_date_epoch[sstate-plaindirs] = "${SDE_DEPLOYDIR}"
+addtask do_deploy_source_date_epoch_setscene
+addtask do_deploy_source_date_epoch before do_configure after do_patch
+
+python create_source_date_epoch_stamp() {
+ source_date_epoch = oe.reproducible.get_source_date_epoch(d, d.getVar('S'))
+ oe.reproducible.epochfile_write(source_date_epoch, d.getVar('SDE_FILE'), d)
}
+do_unpack[postfuncs] += "create_source_date_epoch_stamp"
+
+def get_source_date_epoch_value(d):
+ return oe.reproducible.epochfile_read(d.getVar('SDE_FILE'), d)
def get_layers_branch_rev(d):
layers = (d.getVar("BBLAYERS") or "").split()
@@ -631,6 +669,10 @@ python () {
if path.endswith('.lz4'):
d.appendVarFlag('do_unpack', 'depends', ' lz4-native:do_populate_sysroot')
+ # *.zst should DEPEND on zstd-native for unpacking
+ elif path.endswith('.zst'):
+ d.appendVarFlag('do_unpack', 'depends', ' zstd-native:do_populate_sysroot')
+
# *.lz should DEPEND on lzip-native for unpacking
elif path.endswith('.lz'):
d.appendVarFlag('do_unpack', 'depends', ' lzip-native:do_populate_sysroot')
@@ -689,7 +731,7 @@ python () {
if os.path.basename(p) == machine and os.path.isdir(p):
paths.append(p)
- if len(paths) != 0:
+ if paths:
for s in srcuri.split():
if not s.startswith("file://"):
continue
@@ -722,7 +764,7 @@ do_cleansstate[nostamp] = "1"
python do_cleanall() {
src_uri = (d.getVar('SRC_URI') or "").split()
- if len(src_uri) == 0:
+ if not src_uri:
return
try:
diff --git a/meta/classes/buildhistory.bbclass b/meta/classes/buildhistory.bbclass
index 5099e70fb7..62d0d781a1 100644
--- a/meta/classes/buildhistory.bbclass
+++ b/meta/classes/buildhistory.bbclass
@@ -287,7 +287,7 @@ python buildhistory_emit_pkghistory() {
r = bb.utils.vercmp((pkge, pkgv, pkgr), (last_pkge, last_pkgv, last_pkgr))
if r < 0:
msg = "Package version for package %s went backwards which would break package feeds (from %s:%s-%s to %s:%s-%s)" % (pkg, last_pkge, last_pkgv, last_pkgr, pkge, pkgv, pkgr)
- package_qa_handle_error("version-going-backwards", msg, d)
+ oe.qa.handle_error("version-going-backwards", msg, d)
pkginfo = PackageInfo(pkg)
# Apparently the version can be different on a per-package basis (see Python)
@@ -321,6 +321,7 @@ python buildhistory_emit_pkghistory() {
# Create files-in-<package-name>.txt files containing a list of files of each recipe's package
bb.build.exec_func("buildhistory_list_pkg_files", d)
+ oe.qa.exit_if_errors(d)
}
python buildhistory_emit_outputsigs() {
@@ -442,11 +443,16 @@ def buildhistory_list_installed(d, rootfs_type="image"):
else:
pkgs = sdk_list_installed_packages(d, rootfs_type == "sdk_target")
+ if rootfs_type == "sdk_host":
+ pkgdata_dir = d.getVar('PKGDATA_DIR_SDK')
+ else:
+ pkgdata_dir = d.getVar('PKGDATA_DIR')
+
for output_type, output_file in process_list:
output_file_full = os.path.join(d.getVar('WORKDIR'), output_file)
with open(output_file_full, 'w') as output:
- output.write(format_pkg_list(pkgs, output_type))
+ output.write(format_pkg_list(pkgs, output_type, pkgdata_dir))
python buildhistory_list_installed_image() {
buildhistory_list_installed(d)
@@ -487,6 +493,8 @@ buildhistory_get_installed() {
-e 's:|: -> :' \
-e 's:"\[REC\]":[style=dotted]:' \
-e 's:"\([<>=]\+\)" "\([^"]*\)":[label="\1 \2"]:' \
+ -e 's:"\([*]\+\)" "\([^"]*\)":[label="\2"]:' \
+ -e 's:"\[RPROVIDES\]":[style=dashed]:' \
$1/depends.tmp
# Add header, sorted and de-duped contents and footer and then delete the temp file
printf "digraph depends {\n node [shape=plaintext]\n" > $1/depends.dot
@@ -494,11 +502,22 @@ buildhistory_get_installed() {
echo "}" >> $1/depends.dot
rm $1/depends.tmp
+ # Set correct pkgdatadir
+ pkgdatadir=${PKGDATA_DIR}
+ if [ "$2" == "sdk" ] && [ "$3" == "host" ]; then
+ pkgdatadir="${PKGDATA_DIR_SDK}"
+ fi
+
# Produce installed package sizes list
- oe-pkgdata-util -p ${PKGDATA_DIR} read-value "PKGSIZE" -n -f $pkgcache > $1/installed-package-sizes.tmp
+ oe-pkgdata-util -p $pkgdatadir read-value "PKGSIZE" -n -f $pkgcache > $1/installed-package-sizes.tmp
cat $1/installed-package-sizes.tmp | awk '{print $2 "\tKiB\t" $1}' | sort -n -r > $1/installed-package-sizes.txt
rm $1/installed-package-sizes.tmp
+ # Produce package info: runtime_name, buildtime_name, recipe, version, size
+ oe-pkgdata-util -p $pkgdatadir read-value "PACKAGE,PN,PV,PKGSIZE" -n -f $pkgcache > $1/installed-package-info.tmp
+ cat $1/installed-package-info.tmp | sort -n -r -k 5 > $1/installed-package-info.txt
+ rm $1/installed-package-info.tmp
+
# We're now done with the cache, delete it
rm $pkgcache
@@ -535,7 +554,7 @@ buildhistory_get_sdk_installed() {
return
fi
- buildhistory_get_installed ${BUILDHISTORY_DIR_SDK}/$1 sdk
+ buildhistory_get_installed ${BUILDHISTORY_DIR_SDK}/$1 sdk $1
}
buildhistory_get_sdk_installed_host() {
@@ -766,7 +785,7 @@ def buildhistory_get_imagevars(d):
def buildhistory_get_sdkvars(d):
if d.getVar('BB_WORKERCONTEXT') != '1':
return ""
- sdkvars = "DISTRO DISTRO_VERSION SDK_NAME SDK_VERSION SDKMACHINE SDKIMAGE_FEATURES BAD_RECOMMENDATIONS NO_RECOMMENDATIONS PACKAGE_EXCLUDE"
+ sdkvars = "DISTRO DISTRO_VERSION SDK_NAME SDK_VERSION SDKMACHINE SDKIMAGE_FEATURES TOOLCHAIN_HOST_TASK TOOLCHAIN_TARGET_TASK BAD_RECOMMENDATIONS NO_RECOMMENDATIONS PACKAGE_EXCLUDE"
if d.getVar('BB_CURRENTTASK') == 'populate_sdk_ext':
# Extensible SDK uses some additional variables
sdkvars += " SDK_LOCAL_CONF_WHITELIST SDK_LOCAL_CONF_BLACKLIST SDK_INHERIT_BLACKLIST SDK_UPDATE_URL SDK_EXT_TYPE SDK_RECRDEP_TASKS SDK_INCLUDE_PKGDATA SDK_INCLUDE_TOOLCHAIN"
diff --git a/meta/classes/cargo.bbclass b/meta/classes/cargo.bbclass
new file mode 100644
index 0000000000..0ca38143c0
--- /dev/null
+++ b/meta/classes/cargo.bbclass
@@ -0,0 +1,89 @@
+##
+## Purpose:
+## This class is used by any recipes that are built using
+## Cargo.
+
+inherit cargo_common
+
+# the binary we will use
+CARGO = "cargo"
+
+# We need cargo to compile for the target
+BASEDEPENDS:append = " cargo-native"
+
+# Ensure we get the right rust variant
+DEPENDS:append:class-target = " virtual/${TARGET_PREFIX}rust ${RUSTLIB_DEP}"
+DEPENDS:append:class-native = " rust-native"
+
+# Enable build separation
+B = "${WORKDIR}/build"
+
+# In case something fails in the build process, give a bit more feedback on
+# where the issue occured
+export RUST_BACKTRACE = "1"
+
+# The directory of the Cargo.toml relative to the root directory, per default
+# assume there's a Cargo.toml directly in the root directory
+CARGO_SRC_DIR ??= ""
+
+# The actual path to the Cargo.toml
+MANIFEST_PATH ??= "${S}/${CARGO_SRC_DIR}/Cargo.toml"
+
+RUSTFLAGS ??= ""
+BUILD_MODE = "${@['--release', ''][d.getVar('DEBUG_BUILD') == '1']}"
+CARGO_BUILD_FLAGS = "-v --target ${HOST_SYS} ${BUILD_MODE} --manifest-path=${MANIFEST_PATH}"
+
+# This is based on the content of CARGO_BUILD_FLAGS and generally will need to
+# change if CARGO_BUILD_FLAGS changes.
+BUILD_DIR = "${@['release', 'debug'][d.getVar('DEBUG_BUILD') == '1']}"
+CARGO_TARGET_SUBDIR="${HOST_SYS}/${BUILD_DIR}"
+oe_cargo_build () {
+ export RUSTFLAGS="${RUSTFLAGS}"
+ export RUST_TARGET_PATH="${RUST_TARGET_PATH}"
+ bbnote "cargo = $(which ${CARGO})"
+ bbnote "rustc = $(which ${RUSTC})"
+ bbnote "${CARGO} build ${CARGO_BUILD_FLAGS} $@"
+ "${CARGO}" build ${CARGO_BUILD_FLAGS} "$@"
+}
+
+do_compile[progress] = "outof:\s+(\d+)/(\d+)"
+cargo_do_compile () {
+ oe_cargo_fix_env
+ oe_cargo_build
+}
+
+cargo_do_install () {
+ local have_installed=false
+ for tgt in "${B}/target/${CARGO_TARGET_SUBDIR}/"*; do
+ case $tgt in
+ *.so|*.rlib)
+ install -d "${D}${rustlibdir}"
+ install -m755 "$tgt" "${D}${rustlibdir}"
+ have_installed=true
+ ;;
+ *examples)
+ if [ -d "$tgt" ]; then
+ for example in "$tgt/"*; do
+ if [ -f "$example" ] && [ -x "$example" ]; then
+ install -d "${D}${bindir}"
+ install -m755 "$example" "${D}${bindir}"
+ have_installed=true
+ fi
+ done
+ fi
+ ;;
+ *)
+ if [ -f "$tgt" ] && [ -x "$tgt" ]; then
+ install -d "${D}${bindir}"
+ install -m755 "$tgt" "${D}${bindir}"
+ have_installed=true
+ fi
+ ;;
+ esac
+ done
+ if ! $have_installed; then
+ die "Did not find anything to install"
+ fi
+}
+
+EXPORT_FUNCTIONS do_compile do_install
diff --git a/meta/classes/cargo_common.bbclass b/meta/classes/cargo_common.bbclass
new file mode 100644
index 0000000000..23d82aa6ab
--- /dev/null
+++ b/meta/classes/cargo_common.bbclass
@@ -0,0 +1,125 @@
+##
+## Purpose:
+## This class is to support building with cargo. It
+## must be different than cargo.bbclass because Rust
+## now builds with Cargo but cannot use cargo.bbclass
+## due to dependencies and assumptions in cargo.bbclass
+## that Rust & Cargo are already installed. So this
+## is used by cargo.bbclass and Rust
+##
+
+# add crate fetch support
+inherit crate-fetch
+inherit rust-common
+
+# Where we download our registry and dependencies to
+export CARGO_HOME = "${WORKDIR}/cargo_home"
+
+# The pkg-config-rs library used by cargo build scripts disables itself when
+# cross compiling unless this is defined. We set up pkg-config appropriately
+# for cross compilation, so tell it we know better than it.
+export PKG_CONFIG_ALLOW_CROSS = "1"
+
+# Don't instruct cargo to use crates downloaded by bitbake. Some rust packages,
+# for example the rust compiler itself, come with their own vendored sources.
+# Specifying two [source.crates-io] will not work.
+CARGO_DISABLE_BITBAKE_VENDORING ?= "0"
+
+# Used by libstd-rs to point to the vendor dir included in rustc src
+CARGO_VENDORING_DIRECTORY ?= "${CARGO_HOME}/bitbake"
+
+CARGO_RUST_TARGET_CCLD ?= "${RUST_TARGET_CCLD}"
+cargo_common_do_configure () {
+ mkdir -p ${CARGO_HOME}/bitbake
+
+ cat <<- EOF > ${CARGO_HOME}/config
+ # EXTRA_OECARGO_PATHS
+ paths = [
+ $(for p in ${EXTRA_OECARGO_PATHS}; do echo \"$p\",; done)
+ ]
+ EOF
+
+ cat <<- EOF >> ${CARGO_HOME}/config
+
+ # Local mirror vendored by bitbake
+ [source.bitbake]
+ directory = "${CARGO_VENDORING_DIRECTORY}"
+ EOF
+
+ if [ -z "${EXTERNALSRC}" ] && [ ${CARGO_DISABLE_BITBAKE_VENDORING} = "0" ]; then
+ cat <<- EOF >> ${CARGO_HOME}/config
+
+ [source.crates-io]
+ replace-with = "bitbake"
+ local-registry = "/nonexistant"
+ EOF
+ fi
+
+ cat <<- EOF >> ${CARGO_HOME}/config
+
+ [http]
+ # Multiplexing can't be enabled because http2 can't be enabled
+ # in curl-native without dependency loops
+ multiplexing = false
+
+ # Ignore the hard coded and incorrect path to certificates
+ cainfo = "${STAGING_ETCDIR_NATIVE}/ssl/certs/ca-certificates.crt"
+
+ EOF
+
+ cat <<- EOF >> ${CARGO_HOME}/config
+
+ # HOST_SYS
+ [target.${HOST_SYS}]
+ linker = "${CARGO_RUST_TARGET_CCLD}"
+ EOF
+
+ if [ "${HOST_SYS}" != "${BUILD_SYS}" ]; then
+ cat <<- EOF >> ${CARGO_HOME}/config
+
+ # BUILD_SYS
+ [target.${BUILD_SYS}]
+ linker = "${RUST_BUILD_CCLD}"
+ EOF
+ fi
+
+ # Put build output in build directory preferred by bitbake instead of
+ # inside source directory unless they are the same
+ if [ "${B}" != "${S}" ]; then
+ cat <<- EOF >> ${CARGO_HOME}/config
+
+ [build]
+ # Use out of tree build destination to avoid poluting the source tree
+ target-dir = "${B}/target"
+ EOF
+ fi
+
+ cat <<- EOF >> ${CARGO_HOME}/config
+
+ [term]
+ progress.when = 'always'
+ progress.width = 80
+ EOF
+}
+
+oe_cargo_fix_env () {
+ export CC="${RUST_TARGET_CC}"
+ export CXX="${RUST_TARGET_CXX}"
+ export CFLAGS="${CFLAGS}"
+ export CXXFLAGS="${CXXFLAGS}"
+ export AR="${AR}"
+ export TARGET_CC="${RUST_TARGET_CC}"
+ export TARGET_CXX="${RUST_TARGET_CXX}"
+ export TARGET_CFLAGS="${CFLAGS}"
+ export TARGET_CXXFLAGS="${CXXFLAGS}"
+ export TARGET_AR="${AR}"
+ export HOST_CC="${RUST_BUILD_CC}"
+ export HOST_CXX="${RUST_BUILD_CXX}"
+ export HOST_CFLAGS="${BUILD_CFLAGS}"
+ export HOST_CXXFLAGS="${BUILD_CXXFLAGS}"
+ export HOST_AR="${BUILD_AR}"
+}
+
+EXTRA_OECARGO_PATHS ??= ""
+
+EXPORT_FUNCTIONS do_configure
diff --git a/meta/classes/cpan-base.bbclass b/meta/classes/cpan-base.bbclass
index 9758065bfc..93d11e1bee 100644
--- a/meta/classes/cpan-base.bbclass
+++ b/meta/classes/cpan-base.bbclass
@@ -20,7 +20,7 @@ PERLLIBDIRS:class-native = "${libdir}/perl5"
def cpan_upstream_check_pattern(d):
for x in (d.getVar('SRC_URI') or '').split(' '):
if x.startswith("https://cpan.metacpan.org"):
- _pattern = x.split('/')[-1].replace(d.getVar('PV'), '(?P<pver>\d+.\d+)')
+ _pattern = x.split('/')[-1].replace(d.getVar('PV'), r'(?P<pver>\d+.\d+)')
return _pattern
return ''
diff --git a/meta/classes/crate-fetch.bbclass b/meta/classes/crate-fetch.bbclass
new file mode 100644
index 0000000000..c0ed434a96
--- /dev/null
+++ b/meta/classes/crate-fetch.bbclass
@@ -0,0 +1,13 @@
+#
+# crate-fetch class
+#
+# Registers 'crate' method for Bitbake fetch2.
+#
+# Adds support for following format in recipe SRC_URI:
+# crate://<packagename>/<version>
+#
+
+python () {
+ import crate
+ bb.fetch2.methods.append( crate.Crate() )
+}
diff --git a/meta/classes/create-spdx.bbclass b/meta/classes/create-spdx.bbclass
new file mode 100644
index 0000000000..739b46e9b3
--- /dev/null
+++ b/meta/classes/create-spdx.bbclass
@@ -0,0 +1,948 @@
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+DEPLOY_DIR_SPDX ??= "${DEPLOY_DIR}/spdx/${MACHINE}"
+
+# The product name that the CVE database uses. Defaults to BPN, but may need to
+# be overriden per recipe (for example tiff.bb sets CVE_PRODUCT=libtiff).
+CVE_PRODUCT ??= "${BPN}"
+CVE_VERSION ??= "${PV}"
+
+SPDXDIR ??= "${WORKDIR}/spdx"
+SPDXDEPLOY = "${SPDXDIR}/deploy"
+SPDXWORK = "${SPDXDIR}/work"
+
+SPDX_TOOL_NAME ??= "oe-spdx-creator"
+SPDX_TOOL_VERSION ??= "1.0"
+
+SPDXRUNTIMEDEPLOY = "${SPDXDIR}/runtime-deploy"
+
+SPDX_INCLUDE_SOURCES ??= "0"
+SPDX_INCLUDE_PACKAGED ??= "0"
+SPDX_ARCHIVE_SOURCES ??= "0"
+SPDX_ARCHIVE_PACKAGED ??= "0"
+
+SPDX_UUID_NAMESPACE ??= "sbom.openembedded.org"
+SPDX_NAMESPACE_PREFIX ??= "http://spdx.org/spdxdoc"
+
+SPDX_LICENSES ??= "${COREBASE}/meta/files/spdx-licenses.json"
+
+do_image_complete[depends] = "virtual/kernel:do_create_spdx"
+
+def get_doc_namespace(d, doc):
+ import uuid
+ namespace_uuid = uuid.uuid5(uuid.NAMESPACE_DNS, d.getVar("SPDX_UUID_NAMESPACE"))
+ return "%s/%s-%s" % (d.getVar("SPDX_NAMESPACE_PREFIX"), doc.name, str(uuid.uuid5(namespace_uuid, doc.name)))
+
+def recipe_spdx_is_native(d, recipe):
+ return any(a.annotationType == "OTHER" and
+ a.annotator == "Tool: %s - %s" % (d.getVar("SPDX_TOOL_NAME"), d.getVar("SPDX_TOOL_VERSION")) and
+ a.comment == "isNative" for a in recipe.annotations)
+
+def is_work_shared(d):
+ pn = d.getVar('PN')
+ return bb.data.inherits_class('kernel', d) or pn.startswith('gcc-source')
+
+
+python() {
+ import json
+ if d.getVar("SPDX_LICENSE_DATA"):
+ return
+
+ with open(d.getVar("SPDX_LICENSES"), "r") as f:
+ data = json.load(f)
+ # Transform the license array to a dictionary
+ data["licenses"] = {l["licenseId"]: l for l in data["licenses"]}
+ d.setVar("SPDX_LICENSE_DATA", data)
+}
+
+def convert_license_to_spdx(lic, document, d, existing={}):
+ from pathlib import Path
+ import oe.spdx
+
+ available_licenses = d.getVar("AVAILABLE_LICENSES").split()
+ license_data = d.getVar("SPDX_LICENSE_DATA")
+ extracted = {}
+
+ def add_extracted_license(ident, name):
+ nonlocal document
+
+ if name in extracted:
+ return
+
+ extracted_info = oe.spdx.SPDXExtractedLicensingInfo()
+ extracted_info.name = name
+ extracted_info.licenseId = ident
+ extracted_info.extractedText = None
+
+ if name == "PD":
+ # Special-case this.
+ extracted_info.extractedText = "Software released to the public domain"
+ elif name in available_licenses:
+ # This license can be found in COMMON_LICENSE_DIR or LICENSE_PATH
+ for directory in [d.getVar('COMMON_LICENSE_DIR')] + d.getVar('LICENSE_PATH').split():
+ try:
+ with (Path(directory) / name).open(errors="replace") as f:
+ extracted_info.extractedText = f.read()
+ break
+ except FileNotFoundError:
+ pass
+ if extracted_info.extractedText is None:
+ # Error out, as the license was in available_licenses so should
+ # be on disk somewhere.
+ bb.error("Cannot find text for license %s" % name)
+ else:
+ # If it's not SPDX, or PD, or in available licenses, then NO_GENERIC_LICENSE must be set
+ filename = d.getVarFlag('NO_GENERIC_LICENSE', name)
+ if filename:
+ filename = d.expand("${S}/" + filename)
+ with open(filename, errors="replace") as f:
+ extracted_info.extractedText = f.read()
+ else:
+ bb.error("Cannot find any text for license %s" % name)
+
+ extracted[name] = extracted_info
+ document.hasExtractedLicensingInfos.append(extracted_info)
+
+ def convert(l):
+ if l == "(" or l == ")":
+ return l
+
+ if l == "&":
+ return "AND"
+
+ if l == "|":
+ return "OR"
+
+ if l == "CLOSED":
+ return "NONE"
+
+ spdx_license = d.getVarFlag("SPDXLICENSEMAP", l) or l
+ if spdx_license in license_data["licenses"]:
+ return spdx_license
+
+ try:
+ spdx_license = existing[l]
+ except KeyError:
+ spdx_license = "LicenseRef-" + l
+ add_extracted_license(spdx_license, l)
+
+ return spdx_license
+
+ lic_split = lic.replace("(", " ( ").replace(")", " ) ").split()
+
+ return ' '.join(convert(l) for l in lic_split)
+
+
+def process_sources(d):
+ pn = d.getVar('PN')
+ assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split()
+ if pn in assume_provided:
+ for p in d.getVar("PROVIDES").split():
+ if p != pn:
+ pn = p
+ break
+
+ # glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted,
+ # so avoid archiving source here.
+ if pn.startswith('glibc-locale'):
+ return False
+ if d.getVar('PN') == "libtool-cross":
+ return False
+ if d.getVar('PN') == "libgcc-initial":
+ return False
+ if d.getVar('PN') == "shadow-sysroot":
+ return False
+
+ # We just archive gcc-source for all the gcc related recipes
+ if d.getVar('BPN') in ['gcc', 'libgcc']:
+ bb.debug(1, 'spdx: There is bug in scan of %s is, do nothing' % pn)
+ return False
+
+ return True
+
+
+def add_package_files(d, doc, spdx_pkg, topdir, get_spdxid, get_types, *, archive=None, ignore_dirs=[], ignore_top_level_dirs=[]):
+ from pathlib import Path
+ import oe.spdx
+ import hashlib
+
+ source_date_epoch = d.getVar("SOURCE_DATE_EPOCH")
+ if source_date_epoch:
+ source_date_epoch = int(source_date_epoch)
+
+ sha1s = []
+ spdx_files = []
+
+ file_counter = 1
+ for subdir, dirs, files in os.walk(topdir):
+ dirs[:] = [d for d in dirs if d not in ignore_dirs]
+ if subdir == str(topdir):
+ dirs[:] = [d for d in dirs if d not in ignore_top_level_dirs]
+
+ for file in files:
+ filepath = Path(subdir) / file
+ filename = str(filepath.relative_to(topdir))
+
+ if filepath.is_file() and not filepath.is_symlink():
+ spdx_file = oe.spdx.SPDXFile()
+ spdx_file.SPDXID = get_spdxid(file_counter)
+ for t in get_types(filepath):
+ spdx_file.fileTypes.append(t)
+ spdx_file.fileName = filename
+
+ if archive is not None:
+ with filepath.open("rb") as f:
+ info = archive.gettarinfo(fileobj=f)
+ info.name = filename
+ info.uid = 0
+ info.gid = 0
+ info.uname = "root"
+ info.gname = "root"
+
+ if source_date_epoch is not None and info.mtime > source_date_epoch:
+ info.mtime = source_date_epoch
+
+ archive.addfile(info, f)
+
+ sha1 = bb.utils.sha1_file(filepath)
+ sha1s.append(sha1)
+ spdx_file.checksums.append(oe.spdx.SPDXChecksum(
+ algorithm="SHA1",
+ checksumValue=sha1,
+ ))
+ spdx_file.checksums.append(oe.spdx.SPDXChecksum(
+ algorithm="SHA256",
+ checksumValue=bb.utils.sha256_file(filepath),
+ ))
+
+ doc.files.append(spdx_file)
+ doc.add_relationship(spdx_pkg, "CONTAINS", spdx_file)
+ spdx_pkg.hasFiles.append(spdx_file.SPDXID)
+
+ spdx_files.append(spdx_file)
+
+ file_counter += 1
+
+ sha1s.sort()
+ verifier = hashlib.sha1()
+ for v in sha1s:
+ verifier.update(v.encode("utf-8"))
+ spdx_pkg.packageVerificationCode.packageVerificationCodeValue = verifier.hexdigest()
+
+ return spdx_files
+
+
+def add_package_sources_from_debug(d, package_doc, spdx_package, package, package_files, sources):
+ from pathlib import Path
+ import hashlib
+ import oe.packagedata
+ import oe.spdx
+
+ debug_search_paths = [
+ Path(d.getVar('PKGD')),
+ Path(d.getVar('STAGING_DIR_TARGET')),
+ Path(d.getVar('STAGING_DIR_NATIVE')),
+ ]
+
+ pkg_data = oe.packagedata.read_subpkgdata_extended(package, d)
+
+ if pkg_data is None:
+ return
+
+ for file_path, file_data in pkg_data["files_info"].items():
+ if not "debugsrc" in file_data:
+ continue
+
+ for pkg_file in package_files:
+ if file_path.lstrip("/") == pkg_file.fileName.lstrip("/"):
+ break
+ else:
+ bb.fatal("No package file found for %s" % str(file_path))
+ continue
+
+ for debugsrc in file_data["debugsrc"]:
+ ref_id = "NOASSERTION"
+ for search in debug_search_paths:
+ debugsrc_path = search / debugsrc.lstrip("/")
+ if not debugsrc_path.exists():
+ continue
+
+ file_sha256 = bb.utils.sha256_file(debugsrc_path)
+
+ if file_sha256 in sources:
+ source_file = sources[file_sha256]
+
+ doc_ref = package_doc.find_external_document_ref(source_file.doc.documentNamespace)
+ if doc_ref is None:
+ doc_ref = oe.spdx.SPDXExternalDocumentRef()
+ doc_ref.externalDocumentId = "DocumentRef-dependency-" + source_file.doc.name
+ doc_ref.spdxDocument = source_file.doc.documentNamespace
+ doc_ref.checksum.algorithm = "SHA1"
+ doc_ref.checksum.checksumValue = source_file.doc_sha1
+ package_doc.externalDocumentRefs.append(doc_ref)
+
+ ref_id = "%s:%s" % (doc_ref.externalDocumentId, source_file.file.SPDXID)
+ else:
+ bb.debug(1, "Debug source %s with SHA256 %s not found in any dependency" % (str(debugsrc_path), file_sha256))
+ break
+ else:
+ bb.debug(1, "Debug source %s not found" % debugsrc)
+
+ package_doc.add_relationship(pkg_file, "GENERATED_FROM", ref_id, comment=debugsrc)
+
+def collect_dep_recipes(d, doc, spdx_recipe):
+ from pathlib import Path
+ import oe.sbom
+ import oe.spdx
+
+ deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
+
+ dep_recipes = []
+ taskdepdata = d.getVar("BB_TASKDEPDATA", False)
+ deps = sorted(set(
+ dep[0] for dep in taskdepdata.values() if
+ dep[1] == "do_create_spdx" and dep[0] != d.getVar("PN")
+ ))
+ for dep_pn in deps:
+ dep_recipe_path = deploy_dir_spdx / "recipes" / ("recipe-%s.spdx.json" % dep_pn)
+
+ spdx_dep_doc, spdx_dep_sha1 = oe.sbom.read_doc(dep_recipe_path)
+
+ for pkg in spdx_dep_doc.packages:
+ if pkg.name == dep_pn:
+ spdx_dep_recipe = pkg
+ break
+ else:
+ continue
+
+ dep_recipes.append(oe.sbom.DepRecipe(spdx_dep_doc, spdx_dep_sha1, spdx_dep_recipe))
+
+ dep_recipe_ref = oe.spdx.SPDXExternalDocumentRef()
+ dep_recipe_ref.externalDocumentId = "DocumentRef-dependency-" + spdx_dep_doc.name
+ dep_recipe_ref.spdxDocument = spdx_dep_doc.documentNamespace
+ dep_recipe_ref.checksum.algorithm = "SHA1"
+ dep_recipe_ref.checksum.checksumValue = spdx_dep_sha1
+
+ doc.externalDocumentRefs.append(dep_recipe_ref)
+
+ doc.add_relationship(
+ "%s:%s" % (dep_recipe_ref.externalDocumentId, spdx_dep_recipe.SPDXID),
+ "BUILD_DEPENDENCY_OF",
+ spdx_recipe
+ )
+
+ return dep_recipes
+
+collect_dep_recipes[vardepsexclude] += "BB_TASKDEPDATA"
+
+
+def collect_dep_sources(d, dep_recipes):
+ import oe.sbom
+
+ sources = {}
+ for dep in dep_recipes:
+ # Don't collect sources from native recipes as they
+ # match non-native sources also.
+ if recipe_spdx_is_native(d, dep.recipe):
+ continue
+ recipe_files = set(dep.recipe.hasFiles)
+
+ for spdx_file in dep.doc.files:
+ if spdx_file.SPDXID not in recipe_files:
+ continue
+
+ if "SOURCE" in spdx_file.fileTypes:
+ for checksum in spdx_file.checksums:
+ if checksum.algorithm == "SHA256":
+ sources[checksum.checksumValue] = oe.sbom.DepSource(dep.doc, dep.doc_sha1, dep.recipe, spdx_file)
+ break
+
+ return sources
+
+
+python do_create_spdx() {
+ from datetime import datetime, timezone
+ import oe.sbom
+ import oe.spdx
+ import uuid
+ from pathlib import Path
+ from contextlib import contextmanager
+ import oe.cve_check
+
+ @contextmanager
+ def optional_tarfile(name, guard, mode="w"):
+ import tarfile
+ import bb.compress.zstd
+
+ num_threads = int(d.getVar("BB_NUMBER_THREADS"))
+
+ if guard:
+ name.parent.mkdir(parents=True, exist_ok=True)
+ with bb.compress.zstd.open(name, mode=mode + "b", num_threads=num_threads) as f:
+ with tarfile.open(fileobj=f, mode=mode + "|") as tf:
+ yield tf
+ else:
+ yield None
+
+
+ deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
+ spdx_workdir = Path(d.getVar("SPDXWORK"))
+ include_packaged = d.getVar("SPDX_INCLUDE_PACKAGED") == "1"
+ include_sources = d.getVar("SPDX_INCLUDE_SOURCES") == "1"
+ archive_sources = d.getVar("SPDX_ARCHIVE_SOURCES") == "1"
+ archive_packaged = d.getVar("SPDX_ARCHIVE_PACKAGED") == "1"
+
+ creation_time = datetime.now(tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
+
+ doc = oe.spdx.SPDXDocument()
+
+ doc.name = "recipe-" + d.getVar("PN")
+ doc.documentNamespace = get_doc_namespace(d, doc)
+ doc.creationInfo.created = creation_time
+ doc.creationInfo.comment = "This document was created by analyzing recipe files during the build."
+ doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"]
+ doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass")
+ doc.creationInfo.creators.append("Organization: OpenEmbedded ()")
+ doc.creationInfo.creators.append("Person: N/A ()")
+
+ recipe = oe.spdx.SPDXPackage()
+ recipe.name = d.getVar("PN")
+ recipe.versionInfo = d.getVar("PV")
+ recipe.SPDXID = oe.sbom.get_recipe_spdxid(d)
+ if bb.data.inherits_class("native", d):
+ annotation = oe.spdx.SPDXAnnotation()
+ annotation.annotationDate = creation_time
+ annotation.annotationType = "OTHER"
+ annotation.annotator = "Tool: %s - %s" % (d.getVar("SPDX_TOOL_NAME"), d.getVar("SPDX_TOOL_VERSION"))
+ annotation.comment = "isNative"
+ recipe.annotations.append(annotation)
+
+ for s in d.getVar('SRC_URI').split():
+ if not s.startswith("file://"):
+ recipe.downloadLocation = s
+ break
+ else:
+ recipe.downloadLocation = "NOASSERTION"
+
+ homepage = d.getVar("HOMEPAGE")
+ if homepage:
+ recipe.homepage = homepage
+
+ license = d.getVar("LICENSE")
+ if license:
+ recipe.licenseDeclared = convert_license_to_spdx(license, doc, d)
+
+ summary = d.getVar("SUMMARY")
+ if summary:
+ recipe.summary = summary
+
+ description = d.getVar("DESCRIPTION")
+ if description:
+ recipe.description = description
+
+ # Some CVEs may be patched during the build process without incrementing the version number,
+ # so querying for CVEs based on the CPE id can lead to false positives. To account for this,
+ # save the CVEs fixed by patches to source information field in the SPDX.
+ patched_cves = oe.cve_check.get_patched_cves(d)
+ patched_cves = list(patched_cves)
+ patched_cves = ' '.join(patched_cves)
+ if patched_cves:
+ recipe.sourceInfo = "CVEs fixed: " + patched_cves
+
+ cpe_ids = oe.cve_check.get_cpe_ids(d.getVar("CVE_PRODUCT"), d.getVar("CVE_VERSION"))
+ if cpe_ids:
+ for cpe_id in cpe_ids:
+ cpe = oe.spdx.SPDXExternalReference()
+ cpe.referenceCategory = "SECURITY"
+ cpe.referenceType = "http://spdx.org/rdf/references/cpe23Type"
+ cpe.referenceLocator = cpe_id
+ recipe.externalRefs.append(cpe)
+
+ doc.packages.append(recipe)
+ doc.add_relationship(doc, "DESCRIBES", recipe)
+
+ if process_sources(d) and include_sources:
+ recipe_archive = deploy_dir_spdx / "recipes" / (doc.name + ".tar.zst")
+ with optional_tarfile(recipe_archive, archive_sources) as archive:
+ spdx_get_src(d)
+
+ add_package_files(
+ d,
+ doc,
+ recipe,
+ spdx_workdir,
+ lambda file_counter: "SPDXRef-SourceFile-%s-%d" % (d.getVar("PN"), file_counter),
+ lambda filepath: ["SOURCE"],
+ ignore_dirs=[".git"],
+ ignore_top_level_dirs=["temp"],
+ archive=archive,
+ )
+
+ if archive is not None:
+ recipe.packageFileName = str(recipe_archive.name)
+
+ dep_recipes = collect_dep_recipes(d, doc, recipe)
+
+ doc_sha1 = oe.sbom.write_doc(d, doc, "recipes")
+ dep_recipes.append(oe.sbom.DepRecipe(doc, doc_sha1, recipe))
+
+ recipe_ref = oe.spdx.SPDXExternalDocumentRef()
+ recipe_ref.externalDocumentId = "DocumentRef-recipe-" + recipe.name
+ recipe_ref.spdxDocument = doc.documentNamespace
+ recipe_ref.checksum.algorithm = "SHA1"
+ recipe_ref.checksum.checksumValue = doc_sha1
+
+ sources = collect_dep_sources(d, dep_recipes)
+ found_licenses = {license.name:recipe_ref.externalDocumentId + ":" + license.licenseId for license in doc.hasExtractedLicensingInfos}
+
+ if not recipe_spdx_is_native(d, recipe):
+ bb.build.exec_func("read_subpackage_metadata", d)
+
+ pkgdest = Path(d.getVar("PKGDEST"))
+ for package in d.getVar("PACKAGES").split():
+ if not oe.packagedata.packaged(package, d):
+ continue
+
+ package_doc = oe.spdx.SPDXDocument()
+ pkg_name = d.getVar("PKG:%s" % package) or package
+ package_doc.name = pkg_name
+ package_doc.documentNamespace = get_doc_namespace(d, package_doc)
+ package_doc.creationInfo.created = creation_time
+ package_doc.creationInfo.comment = "This document was created by analyzing packages created during the build."
+ package_doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"]
+ package_doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass")
+ package_doc.creationInfo.creators.append("Organization: OpenEmbedded ()")
+ package_doc.creationInfo.creators.append("Person: N/A ()")
+ package_doc.externalDocumentRefs.append(recipe_ref)
+
+ package_license = d.getVar("LICENSE:%s" % package) or d.getVar("LICENSE")
+
+ spdx_package = oe.spdx.SPDXPackage()
+
+ spdx_package.SPDXID = oe.sbom.get_package_spdxid(pkg_name)
+ spdx_package.name = pkg_name
+ spdx_package.versionInfo = d.getVar("PV")
+ spdx_package.licenseDeclared = convert_license_to_spdx(package_license, package_doc, d, found_licenses)
+
+ package_doc.packages.append(spdx_package)
+
+ package_doc.add_relationship(spdx_package, "GENERATED_FROM", "%s:%s" % (recipe_ref.externalDocumentId, recipe.SPDXID))
+ package_doc.add_relationship(package_doc, "DESCRIBES", spdx_package)
+
+ package_archive = deploy_dir_spdx / "packages" / (package_doc.name + ".tar.zst")
+ with optional_tarfile(package_archive, archive_packaged) as archive:
+ package_files = add_package_files(
+ d,
+ package_doc,
+ spdx_package,
+ pkgdest / package,
+ lambda file_counter: oe.sbom.get_packaged_file_spdxid(pkg_name, file_counter),
+ lambda filepath: ["BINARY"],
+ archive=archive,
+ )
+
+ if archive is not None:
+ spdx_package.packageFileName = str(package_archive.name)
+
+ add_package_sources_from_debug(d, package_doc, spdx_package, package, package_files, sources)
+
+ oe.sbom.write_doc(d, package_doc, "packages")
+}
+# NOTE: depending on do_unpack is a hack that is necessary to get it's dependencies for archive the source
+addtask do_create_spdx after do_package do_packagedata do_unpack before do_build do_rm_work
+
+SSTATETASKS += "do_create_spdx"
+do_create_spdx[sstate-inputdirs] = "${SPDXDEPLOY}"
+do_create_spdx[sstate-outputdirs] = "${DEPLOY_DIR_SPDX}"
+
+python do_create_spdx_setscene () {
+ sstate_setscene(d)
+}
+addtask do_create_spdx_setscene
+
+do_create_spdx[dirs] = "${SPDXDEPLOY} ${SPDXWORK}"
+do_create_spdx[cleandirs] = "${SPDXDEPLOY} ${SPDXWORK}"
+do_create_spdx[depends] += "${PATCHDEPENDENCY}"
+do_create_spdx[deptask] = "do_create_spdx"
+
+def collect_package_providers(d):
+ from pathlib import Path
+ import oe.sbom
+ import oe.spdx
+ import json
+
+ deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
+
+ providers = {}
+
+ taskdepdata = d.getVar("BB_TASKDEPDATA", False)
+ deps = sorted(set(
+ dep[0] for dep in taskdepdata.values() if dep[0] != d.getVar("PN")
+ ))
+ deps.append(d.getVar("PN"))
+
+ for dep_pn in deps:
+ recipe_data = oe.packagedata.read_pkgdata(dep_pn, d)
+
+ for pkg in recipe_data.get("PACKAGES", "").split():
+
+ pkg_data = oe.packagedata.read_subpkgdata_dict(pkg, d)
+ rprovides = set(n for n, _ in bb.utils.explode_dep_versions2(pkg_data.get("RPROVIDES", "")).items())
+ rprovides.add(pkg)
+
+ for r in rprovides:
+ providers[r] = pkg
+
+ return providers
+
+collect_package_providers[vardepsexclude] += "BB_TASKDEPDATA"
+
+python do_create_runtime_spdx() {
+ from datetime import datetime, timezone
+ import oe.sbom
+ import oe.spdx
+ import oe.packagedata
+ from pathlib import Path
+
+ deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
+ spdx_deploy = Path(d.getVar("SPDXRUNTIMEDEPLOY"))
+ is_native = bb.data.inherits_class("native", d)
+
+ creation_time = datetime.now(tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
+
+ providers = collect_package_providers(d)
+
+ if not is_native:
+ bb.build.exec_func("read_subpackage_metadata", d)
+
+ dep_package_cache = {}
+
+ pkgdest = Path(d.getVar("PKGDEST"))
+ for package in d.getVar("PACKAGES").split():
+ localdata = bb.data.createCopy(d)
+ pkg_name = d.getVar("PKG:%s" % package) or package
+ localdata.setVar("PKG", pkg_name)
+ localdata.setVar('OVERRIDES', d.getVar("OVERRIDES", False) + ":" + package)
+
+ if not oe.packagedata.packaged(package, localdata):
+ continue
+
+ pkg_spdx_path = deploy_dir_spdx / "packages" / (pkg_name + ".spdx.json")
+
+ package_doc, package_doc_sha1 = oe.sbom.read_doc(pkg_spdx_path)
+
+ for p in package_doc.packages:
+ if p.name == pkg_name:
+ spdx_package = p
+ break
+ else:
+ bb.fatal("Package '%s' not found in %s" % (pkg_name, pkg_spdx_path))
+
+ runtime_doc = oe.spdx.SPDXDocument()
+ runtime_doc.name = "runtime-" + pkg_name
+ runtime_doc.documentNamespace = get_doc_namespace(localdata, runtime_doc)
+ runtime_doc.creationInfo.created = creation_time
+ runtime_doc.creationInfo.comment = "This document was created by analyzing package runtime dependencies."
+ runtime_doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"]
+ runtime_doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass")
+ runtime_doc.creationInfo.creators.append("Organization: OpenEmbedded ()")
+ runtime_doc.creationInfo.creators.append("Person: N/A ()")
+
+ package_ref = oe.spdx.SPDXExternalDocumentRef()
+ package_ref.externalDocumentId = "DocumentRef-package-" + package
+ package_ref.spdxDocument = package_doc.documentNamespace
+ package_ref.checksum.algorithm = "SHA1"
+ package_ref.checksum.checksumValue = package_doc_sha1
+
+ runtime_doc.externalDocumentRefs.append(package_ref)
+
+ runtime_doc.add_relationship(
+ runtime_doc.SPDXID,
+ "AMENDS",
+ "%s:%s" % (package_ref.externalDocumentId, package_doc.SPDXID)
+ )
+
+ deps = bb.utils.explode_dep_versions2(localdata.getVar("RDEPENDS") or "")
+ seen_deps = set()
+ for dep, _ in deps.items():
+ if dep in seen_deps:
+ continue
+
+ dep = providers[dep]
+
+ if not oe.packagedata.packaged(dep, localdata):
+ continue
+
+ dep_pkg_data = oe.packagedata.read_subpkgdata_dict(dep, d)
+ dep_pkg = dep_pkg_data["PKG"]
+
+ if dep in dep_package_cache:
+ (dep_spdx_package, dep_package_ref) = dep_package_cache[dep]
+ else:
+ dep_path = deploy_dir_spdx / "packages" / ("%s.spdx.json" % dep_pkg)
+
+ spdx_dep_doc, spdx_dep_sha1 = oe.sbom.read_doc(dep_path)
+
+ for pkg in spdx_dep_doc.packages:
+ if pkg.name == dep_pkg:
+ dep_spdx_package = pkg
+ break
+ else:
+ bb.fatal("Package '%s' not found in %s" % (dep_pkg, dep_path))
+
+ dep_package_ref = oe.spdx.SPDXExternalDocumentRef()
+ dep_package_ref.externalDocumentId = "DocumentRef-runtime-dependency-" + spdx_dep_doc.name
+ dep_package_ref.spdxDocument = spdx_dep_doc.documentNamespace
+ dep_package_ref.checksum.algorithm = "SHA1"
+ dep_package_ref.checksum.checksumValue = spdx_dep_sha1
+
+ dep_package_cache[dep] = (dep_spdx_package, dep_package_ref)
+
+ runtime_doc.externalDocumentRefs.append(dep_package_ref)
+
+ runtime_doc.add_relationship(
+ "%s:%s" % (dep_package_ref.externalDocumentId, dep_spdx_package.SPDXID),
+ "RUNTIME_DEPENDENCY_OF",
+ "%s:%s" % (package_ref.externalDocumentId, spdx_package.SPDXID)
+ )
+ seen_deps.add(dep)
+
+ oe.sbom.write_doc(d, runtime_doc, "runtime", spdx_deploy)
+}
+
+addtask do_create_runtime_spdx after do_create_spdx before do_build do_rm_work
+SSTATETASKS += "do_create_runtime_spdx"
+do_create_runtime_spdx[sstate-inputdirs] = "${SPDXRUNTIMEDEPLOY}"
+do_create_runtime_spdx[sstate-outputdirs] = "${DEPLOY_DIR_SPDX}"
+
+python do_create_runtime_spdx_setscene () {
+ sstate_setscene(d)
+}
+addtask do_create_runtime_spdx_setscene
+
+do_create_runtime_spdx[dirs] = "${SPDXRUNTIMEDEPLOY}"
+do_create_runtime_spdx[cleandirs] = "${SPDXRUNTIMEDEPLOY}"
+do_create_runtime_spdx[rdeptask] = "do_create_spdx"
+
+def spdx_get_src(d):
+ """
+ save patched source of the recipe in SPDX_WORKDIR.
+ """
+ import shutil
+ spdx_workdir = d.getVar('SPDXWORK')
+ spdx_sysroot_native = d.getVar('STAGING_DIR_NATIVE')
+ pn = d.getVar('PN')
+
+ workdir = d.getVar("WORKDIR")
+
+ try:
+ # The kernel class functions require it to be on work-shared, so we dont change WORKDIR
+ if not is_work_shared(d):
+ # Change the WORKDIR to make do_unpack do_patch run in another dir.
+ d.setVar('WORKDIR', spdx_workdir)
+ # Restore the original path to recipe's native sysroot (it's relative to WORKDIR).
+ d.setVar('STAGING_DIR_NATIVE', spdx_sysroot_native)
+
+ # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the
+ # possibly requiring of the following tasks (such as some recipes's
+ # do_patch required 'B' existed).
+ bb.utils.mkdirhier(d.getVar('B'))
+
+ bb.build.exec_func('do_unpack', d)
+ # Copy source of kernel to spdx_workdir
+ if is_work_shared(d):
+ d.setVar('WORKDIR', spdx_workdir)
+ d.setVar('STAGING_DIR_NATIVE', spdx_sysroot_native)
+ src_dir = spdx_workdir + "/" + d.getVar('PN')+ "-" + d.getVar('PV') + "-" + d.getVar('PR')
+ bb.utils.mkdirhier(src_dir)
+ if bb.data.inherits_class('kernel',d):
+ share_src = d.getVar('STAGING_KERNEL_DIR')
+ cmd_copy_share = "cp -rf " + share_src + "/* " + src_dir + "/"
+ cmd_copy_kernel_result = os.popen(cmd_copy_share).read()
+ bb.note("cmd_copy_kernel_result = " + cmd_copy_kernel_result)
+
+ git_path = src_dir + "/.git"
+ if os.path.exists(git_path):
+ shutils.rmtree(git_path)
+
+ # Make sure gcc and kernel sources are patched only once
+ if not (d.getVar('SRC_URI') == "" or is_work_shared(d)):
+ bb.build.exec_func('do_patch', d)
+
+ # Some userland has no source.
+ if not os.path.exists( spdx_workdir ):
+ bb.utils.mkdirhier(spdx_workdir)
+ finally:
+ d.setVar("WORKDIR", workdir)
+
+do_rootfs[recrdeptask] += "do_create_spdx do_create_runtime_spdx"
+
+ROOTFS_POSTUNINSTALL_COMMAND =+ "image_combine_spdx ; "
+python image_combine_spdx() {
+ import os
+ import oe.spdx
+ import oe.sbom
+ import io
+ import json
+ from oe.rootfs import image_list_installed_packages
+ from datetime import timezone, datetime
+ from pathlib import Path
+ import tarfile
+ import bb.compress.zstd
+
+ creation_time = datetime.now(tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
+ image_name = d.getVar("IMAGE_NAME")
+ image_link_name = d.getVar("IMAGE_LINK_NAME")
+
+ deploy_dir_spdx = Path(d.getVar("DEPLOY_DIR_SPDX"))
+ imgdeploydir = Path(d.getVar("IMGDEPLOYDIR"))
+ source_date_epoch = d.getVar("SOURCE_DATE_EPOCH")
+
+ doc = oe.spdx.SPDXDocument()
+ doc.name = image_name
+ doc.documentNamespace = get_doc_namespace(d, doc)
+ doc.creationInfo.created = creation_time
+ doc.creationInfo.comment = "This document was created by analyzing the source of the Yocto recipe during the build."
+ doc.creationInfo.licenseListVersion = d.getVar("SPDX_LICENSE_DATA")["licenseListVersion"]
+ doc.creationInfo.creators.append("Tool: OpenEmbedded Core create-spdx.bbclass")
+ doc.creationInfo.creators.append("Organization: OpenEmbedded ()")
+ doc.creationInfo.creators.append("Person: N/A ()")
+
+ image = oe.spdx.SPDXPackage()
+ image.name = d.getVar("PN")
+ image.versionInfo = d.getVar("PV")
+ image.SPDXID = oe.sbom.get_image_spdxid(image_name)
+
+ doc.packages.append(image)
+
+ spdx_package = oe.spdx.SPDXPackage()
+
+ packages = image_list_installed_packages(d)
+
+ for name in sorted(packages.keys()):
+ pkg_spdx_path = deploy_dir_spdx / "packages" / (name + ".spdx.json")
+ pkg_doc, pkg_doc_sha1 = oe.sbom.read_doc(pkg_spdx_path)
+
+ for p in pkg_doc.packages:
+ if p.name == name:
+ pkg_ref = oe.spdx.SPDXExternalDocumentRef()
+ pkg_ref.externalDocumentId = "DocumentRef-%s" % pkg_doc.name
+ pkg_ref.spdxDocument = pkg_doc.documentNamespace
+ pkg_ref.checksum.algorithm = "SHA1"
+ pkg_ref.checksum.checksumValue = pkg_doc_sha1
+
+ doc.externalDocumentRefs.append(pkg_ref)
+ doc.add_relationship(image, "CONTAINS", "%s:%s" % (pkg_ref.externalDocumentId, p.SPDXID))
+ break
+ else:
+ bb.fatal("Unable to find package with name '%s' in SPDX file %s" % (name, pkg_spdx_path))
+
+ runtime_spdx_path = deploy_dir_spdx / "runtime" / ("runtime-" + name + ".spdx.json")
+ runtime_doc, runtime_doc_sha1 = oe.sbom.read_doc(runtime_spdx_path)
+
+ runtime_ref = oe.spdx.SPDXExternalDocumentRef()
+ runtime_ref.externalDocumentId = "DocumentRef-%s" % runtime_doc.name
+ runtime_ref.spdxDocument = runtime_doc.documentNamespace
+ runtime_ref.checksum.algorithm = "SHA1"
+ runtime_ref.checksum.checksumValue = runtime_doc_sha1
+
+ # "OTHER" isn't ideal here, but I can't find a relationship that makes sense
+ doc.externalDocumentRefs.append(runtime_ref)
+ doc.add_relationship(
+ image,
+ "OTHER",
+ "%s:%s" % (runtime_ref.externalDocumentId, runtime_doc.SPDXID),
+ comment="Runtime dependencies for %s" % name
+ )
+
+ image_spdx_path = imgdeploydir / (image_name + ".spdx.json")
+
+ with image_spdx_path.open("wb") as f:
+ doc.to_json(f, sort_keys=True)
+
+ image_spdx_link = imgdeploydir / (image_link_name + ".spdx.json")
+ image_spdx_link.symlink_to(os.path.relpath(image_spdx_path, image_spdx_link.parent))
+
+ num_threads = int(d.getVar("BB_NUMBER_THREADS"))
+
+ visited_docs = set()
+
+ index = {"documents": []}
+
+ spdx_tar_path = imgdeploydir / (image_name + ".spdx.tar.zst")
+ with bb.compress.zstd.open(spdx_tar_path, "w", num_threads=num_threads) as f:
+ with tarfile.open(fileobj=f, mode="w|") as tar:
+ def collect_spdx_document(path):
+ nonlocal tar
+ nonlocal deploy_dir_spdx
+ nonlocal source_date_epoch
+ nonlocal index
+
+ if path in visited_docs:
+ return
+
+ visited_docs.add(path)
+
+ with path.open("rb") as f:
+ doc, sha1 = oe.sbom.read_doc(f)
+ f.seek(0)
+
+ if doc.documentNamespace in visited_docs:
+ return
+
+ bb.note("Adding SPDX document %s" % path)
+ visited_docs.add(doc.documentNamespace)
+ info = tar.gettarinfo(fileobj=f)
+
+ info.name = doc.name + ".spdx.json"
+ info.uid = 0
+ info.gid = 0
+ info.uname = "root"
+ info.gname = "root"
+
+ if source_date_epoch is not None and info.mtime > int(source_date_epoch):
+ info.mtime = int(source_date_epoch)
+
+ tar.addfile(info, f)
+
+ index["documents"].append({
+ "filename": info.name,
+ "documentNamespace": doc.documentNamespace,
+ "sha1": sha1,
+ })
+
+ for ref in doc.externalDocumentRefs:
+ ref_path = deploy_dir_spdx / "by-namespace" / ref.spdxDocument.replace("/", "_")
+ collect_spdx_document(ref_path)
+
+ collect_spdx_document(image_spdx_path)
+
+ index["documents"].sort(key=lambda x: x["filename"])
+
+ index_str = io.BytesIO(json.dumps(index, sort_keys=True).encode("utf-8"))
+
+ info = tarfile.TarInfo()
+ info.name = "index.json"
+ info.size = len(index_str.getvalue())
+ info.uid = 0
+ info.gid = 0
+ info.uname = "root"
+ info.gname = "root"
+
+ tar.addfile(info, fileobj=index_str)
+
+ def make_image_link(target_path, suffix):
+ link = imgdeploydir / (image_link_name + suffix)
+ link.symlink_to(os.path.relpath(target_path, link.parent))
+
+ make_image_link(spdx_tar_path, ".spdx.tar.zst")
+
+ spdx_index_path = imgdeploydir / (image_name + ".spdx.index.json")
+ with spdx_index_path.open("w") as f:
+ json.dump(index, f, sort_keys=True)
+
+ make_image_link(spdx_index_path, ".spdx.index.json")
+}
+
diff --git a/meta/classes/cross-canadian.bbclass b/meta/classes/cross-canadian.bbclass
index 447a2bb158..ac82e86356 100644
--- a/meta/classes/cross-canadian.bbclass
+++ b/meta/classes/cross-canadian.bbclass
@@ -40,6 +40,8 @@ python () {
extralibcs = [""]
if "musl" in d.getVar("BASECANADIANEXTRAOS"):
extralibcs.append("musl")
+ if "android" in tos:
+ extralibcs.append("android")
for variant in ["", "spe", "x32", "eabi", "n32", "_ilp32"]:
for libc in extralibcs:
entry = "linux"
@@ -167,7 +169,7 @@ USE_NLS = "${SDKUSE_NLS}"
# and not any particular tune that is enabled.
TARGET_ARCH[vardepsexclude] = "TUNE_ARCH"
-PKGDATA_DIR = "${TMPDIR}/pkgdata/${SDK_SYS}"
+PKGDATA_DIR = "${PKGDATA_DIR_SDK}"
# If MLPREFIX is set by multilib code, shlibs
# points to the wrong place so force it
SHLIBSDIRS = "${PKGDATA_DIR}/nativesdk-shlibs2"
diff --git a/meta/classes/cross.bbclass b/meta/classes/cross.bbclass
index 00e0de84f3..3e6a2f60b9 100644
--- a/meta/classes/cross.bbclass
+++ b/meta/classes/cross.bbclass
@@ -72,10 +72,6 @@ libexecdir = "${exec_prefix}/libexec/${CROSS_TARGET_SYS_DIR}"
do_populate_sysroot[sstate-inputdirs] = "${SYSROOT_DESTDIR}/${STAGING_DIR_NATIVE}/"
do_packagedata[stamp-extra-info] = ""
-do_install () {
- oe_runmake 'DESTDIR=${D}' install
-}
-
USE_NLS = "no"
export CC = "${BUILD_CC}"
diff --git a/meta/classes/cve-check.bbclass b/meta/classes/cve-check.bbclass
index a3fc9c2623..70d1988a70 100644
--- a/meta/classes/cve-check.bbclass
+++ b/meta/classes/cve-check.bbclass
@@ -20,7 +20,7 @@
# the only method to check against CVEs. Running this tool
# doesn't guarantee your packages are free of CVEs.
-# The product name that the CVE database uses. Defaults to BPN, but may need to
+# The product name that the CVE database uses defaults to BPN, but may need to
# be overriden per recipe (for example tiff.bb sets CVE_PRODUCT=libtiff).
CVE_PRODUCT ??= "${BPN}"
CVE_VERSION ??= "${PV}"
@@ -56,11 +56,11 @@ CVE_CHECK_WHITELIST ?= ""
# Layers to be excluded
CVE_CHECK_LAYER_EXCLUDELIST ??= ""
-# Layers to be included
+# Layers to be included
CVE_CHECK_LAYER_INCLUDELIST ??= ""
-# set to "alphabetical" for version using single alphabetical character as increament release
+# set to "alphabetical" for version using single alphabetical character as increment release
CVE_VERSION_SUFFIX ??= ""
python cve_save_summary_handler () {
@@ -94,10 +94,11 @@ python do_cve_check () {
"""
Check recipe for patched and unpatched CVEs
"""
+ from oe.cve_check import get_patched_cves
if os.path.exists(d.getVar("CVE_CHECK_DB_FILE")):
try:
- patched_cves = get_patches_cves(d)
+ patched_cves = get_patched_cves(d)
except FileNotFoundError:
bb.fatal("Failure in searching patches")
whitelisted, patched, unpatched = check_cves(d, patched_cves)
@@ -156,62 +157,6 @@ python cve_check_write_rootfs_manifest () {
ROOTFS_POSTPROCESS_COMMAND:prepend = "${@'cve_check_write_rootfs_manifest; ' if d.getVar('CVE_CHECK_CREATE_MANIFEST') == '1' else ''}"
do_rootfs[recrdeptask] += "${@'do_cve_check' if d.getVar('CVE_CHECK_CREATE_MANIFEST') == '1' else ''}"
-def get_patches_cves(d):
- """
- Get patches that solve CVEs using the "CVE: " tag.
- """
-
- import re
-
- pn = d.getVar("PN")
- cve_match = re.compile("CVE:( CVE\-\d{4}\-\d+)+")
-
- # Matches last CVE-1234-211432 in the file name, also if written
- # with small letters. Not supporting multiple CVE id's in a single
- # file name.
- cve_file_name_match = re.compile(".*([Cc][Vv][Ee]\-\d{4}\-\d+)")
-
- patched_cves = set()
- bb.debug(2, "Looking for patches that solves CVEs for %s" % pn)
- for url in src_patches(d):
- patch_file = bb.fetch.decodeurl(url)[2]
-
- if not os.path.isfile(patch_file):
- bb.error("File Not found: %s" % patch_file)
- raise FileNotFoundError
-
- # Check patch file name for CVE ID
- fname_match = cve_file_name_match.search(patch_file)
- if fname_match:
- cve = fname_match.group(1).upper()
- patched_cves.add(cve)
- bb.debug(2, "Found CVE %s from patch file name %s" % (cve, patch_file))
-
- with open(patch_file, "r", encoding="utf-8") as f:
- try:
- patch_text = f.read()
- except UnicodeDecodeError:
- bb.debug(1, "Failed to read patch %s using UTF-8 encoding"
- " trying with iso8859-1" % patch_file)
- f.close()
- with open(patch_file, "r", encoding="iso8859-1") as f:
- patch_text = f.read()
-
- # Search for one or more "CVE: " lines
- text_match = False
- for match in cve_match.finditer(patch_text):
- # Get only the CVEs without the "CVE: " tag
- cves = patch_text[match.start()+5:match.end()]
- for cve in cves.split():
- bb.debug(2, "Patch %s solves %s" % (patch_file, cve))
- patched_cves.add(cve)
- text_match = True
-
- if not fname_match and not text_match:
- bb.debug(2, "Patch %s doesn't solve CVEs" % patch_file)
-
- return patched_cves
-
def check_cves(d, patched_cves):
"""
Connect to the NVD database and find unpatched cves.
@@ -230,14 +175,11 @@ def check_cves(d, patched_cves):
return ([], [], [])
pv = d.getVar("CVE_VERSION").split("+git")[0]
- # If the recipe has been whitlisted we return empty lists
+ # If the recipe has been whitelisted we return empty lists
if pn in d.getVar("CVE_CHECK_PN_WHITELIST").split():
bb.note("Recipe has been whitelisted, skipping check")
return ([], [], [])
- old_cve_whitelist = d.getVar("CVE_CHECK_CVE_WHITELIST")
- if old_cve_whitelist:
- bb.warn("CVE_CHECK_CVE_WHITELIST is deprecated, please use CVE_CHECK_WHITELIST.")
cve_whitelist = d.getVar("CVE_CHECK_WHITELIST").split()
import sqlite3
@@ -355,7 +297,7 @@ def cve_write_data(d, patched, unpatched, whitelisted, cve_data):
if include_layers and layer not in include_layers:
return
- nvd_link = "https://web.nvd.nist.gov/view/vuln/detail?vulnId="
+ nvd_link = "https://nvd.nist.gov/vuln/detail/"
write_string = ""
unpatched_cves = []
bb.utils.mkdirhier(os.path.dirname(cve_file))
diff --git a/meta/classes/devshell.bbclass b/meta/classes/devshell.bbclass
index 76dd0b42ee..62dc958d9a 100644
--- a/meta/classes/devshell.bbclass
+++ b/meta/classes/devshell.bbclass
@@ -34,7 +34,7 @@ python () {
d.delVarFlag("do_devshell", "fakeroot")
}
-def devpyshell(d):
+def pydevshell(d):
import code
import select
@@ -140,17 +140,17 @@ def devpyshell(d):
os.kill(child, signal.SIGTERM)
break
-python do_devpyshell() {
+python do_pydevshell() {
import signal
try:
- devpyshell(d)
+ pydevshell(d)
except SystemExit:
# Stop the SIGTERM above causing an error exit code
return
finally:
return
}
-addtask devpyshell after do_patch
+addtask pydevshell after do_patch
-do_devpyshell[nostamp] = "1"
+do_pydevshell[nostamp] = "1"
diff --git a/meta/classes/devupstream.bbclass b/meta/classes/devupstream.bbclass
index 1230fa12ea..dc9a9472b1 100644
--- a/meta/classes/devupstream.bbclass
+++ b/meta/classes/devupstream.bbclass
@@ -16,8 +16,6 @@
# - If the fetcher requires native tools (such as subversion-native) then
# bitbake won't be able to add them automatically.
-CLASSOVERRIDE .= ":class-devupstream"
-
python devupstream_virtclass_handler () {
# Do nothing if this is inherited, as it's for BBCLASSEXTEND
if "devupstream" not in (d.getVar('BBCLASSEXTEND') or ""):
@@ -25,8 +23,8 @@ python devupstream_virtclass_handler () {
return
variant = d.getVar("BBEXTENDVARIANT")
- if variant not in ("target"):
- bb.error("Pass the variant when using devupstream, for example devupstream:target")
+ if variant not in ("target", "native"):
+ bb.error("Unsupported variant %s. Pass the variant when using devupstream, for example devupstream:target" % variant)
return
# Develpment releases are never preferred by default
@@ -34,14 +32,22 @@ python devupstream_virtclass_handler () {
uri = bb.fetch2.URI(d.getVar("SRC_URI").split()[0])
- if uri.scheme == "git":
- d.setVar("S", "${WORKDIR}/git")
+ if uri.scheme == "git" and not d.getVar("S:class-devupstream"):
+ d.setVar("S:class-devupstream", "${WORKDIR}/git")
# Modify the PV if the recipe hasn't already overridden it
pv = d.getVar("PV")
proto_marker = "+" + uri.scheme
- if proto_marker not in pv:
+ if proto_marker not in pv and not d.getVar("PV:class-devupstream"):
d.setVar("PV", pv + proto_marker + "${SRCPV}")
+
+ if variant == "native":
+ pn = d.getVar("PN")
+ d.setVar("PN", "%s-native" % (pn))
+ fn = d.getVar("FILE")
+ bb.parse.BBHandler.inherit("native", fn, 0, d)
+
+ d.appendVar("CLASSOVERRIDE", ":class-devupstream")
}
addhandler devupstream_virtclass_handler
diff --git a/meta/classes/distrooverrides.bbclass b/meta/classes/distrooverrides.bbclass
index c172a348d8..bf3a2b2090 100644
--- a/meta/classes/distrooverrides.bbclass
+++ b/meta/classes/distrooverrides.bbclass
@@ -6,7 +6,7 @@
# This makes it simpler to write .bbappends that only change the
# task signatures of the recipe if the change is really enabled,
# for example with:
-# do_install:append_df-my-feature () { ... }
+# do_install:append:df-my-feature () { ... }
# where "my-feature" is a DISTRO_FEATURE.
#
# The class is meant to be used in a layer.conf or distro
diff --git a/meta/classes/externalsrc.bbclass b/meta/classes/externalsrc.bbclass
index 54b08adf62..abfe24bace 100644
--- a/meta/classes/externalsrc.bbclass
+++ b/meta/classes/externalsrc.bbclass
@@ -109,6 +109,15 @@ python () {
if local_srcuri and task in fetch_tasks:
continue
bb.build.deltask(task, d)
+ if task == 'do_unpack':
+ # The reproducible build create_source_date_epoch_stamp function must
+ # be run after the source is available and before the
+ # do_deploy_source_date_epoch task. In the normal case, it's attached
+ # to do_unpack as a postfuncs, but since we removed do_unpack (above)
+ # we need to move the function elsewhere. The easiest thing to do is
+ # move it into the prefuncs of the do_deploy_source_date_epoch task.
+ # This is safe, as externalsrc runs with the source already unpacked.
+ d.prependVarFlag('do_deploy_source_date_epoch', 'prefuncs', 'create_source_date_epoch_stamp ')
d.prependVarFlag('do_compile', 'prefuncs', "externalsrc_compile_prefunc ")
d.prependVarFlag('do_configure', 'prefuncs', "externalsrc_configure_prefunc ")
diff --git a/meta/classes/features_check.bbclass b/meta/classes/features_check.bbclass
index b3c8047861..205e1b9cd3 100644
--- a/meta/classes/features_check.bbclass
+++ b/meta/classes/features_check.bbclass
@@ -1,6 +1,6 @@
# Allow checking of required and conflicting features
#
-# xxx = [DISTRO,MACHINE,COMBINED]
+# xxx = [DISTRO,MACHINE,COMBINED,IMAGE]
#
# ANY_OF_xxx_FEATURES: ensure at least one item on this list is included
# in xxx_FEATURES.
@@ -18,7 +18,7 @@ python () {
unused = True
- for kind in ['DISTRO', 'MACHINE', 'COMBINED']:
+ for kind in ['DISTRO', 'MACHINE', 'COMBINED', 'IMAGE']:
if d.getVar('ANY_OF_' + kind + '_FEATURES') is None and \
d.overridedata.get('ANY_OF_' + kind + '_FEATURES') is None and \
d.getVar('REQUIRED_' + kind + '_FEATURES') is None and \
diff --git a/meta/classes/goarch.bbclass b/meta/classes/goarch.bbclass
index 48dc48dabf..92fec16b82 100644
--- a/meta/classes/goarch.bbclass
+++ b/meta/classes/goarch.bbclass
@@ -32,13 +32,13 @@ BASE_GOARM:armv5 = '5'
# Go supports dynamic linking on a limited set of architectures.
# See the supportsDynlink function in go/src/cmd/compile/internal/gc/main.go
GO_DYNLINK = ""
-GO_DYNLINK:arm = "1"
-GO_DYNLINK:aarch64 = "1"
-GO_DYNLINK:x86 = "1"
-GO_DYNLINK:x86-64 = "1"
-GO_DYNLINK:powerpc64 = "1"
-GO_DYNLINK:powerpc64le = "1"
-GO_DYNLINK:class-native = ""
+GO_DYNLINK:arm ?= "1"
+GO_DYNLINK:aarch64 ?= "1"
+GO_DYNLINK:x86 ?= "1"
+GO_DYNLINK:x86-64 ?= "1"
+GO_DYNLINK:powerpc64 ?= "1"
+GO_DYNLINK:powerpc64le ?= "1"
+GO_DYNLINK:class-native ?= ""
GO_DYNLINK:class-nativesdk = ""
# define here because everybody inherits this class
diff --git a/meta/classes/image-artifact-names.bbclass b/meta/classes/image-artifact-names.bbclass
index 3ac8dd731a..f5769e520f 100644
--- a/meta/classes/image-artifact-names.bbclass
+++ b/meta/classes/image-artifact-names.bbclass
@@ -4,7 +4,7 @@
IMAGE_BASENAME ?= "${PN}"
IMAGE_VERSION_SUFFIX ?= "-${DATETIME}"
-IMAGE_VERSION_SUFFIX[vardepsexclude] += "DATETIME"
+IMAGE_VERSION_SUFFIX[vardepsexclude] += "DATETIME SOURCE_DATE_EPOCH"
IMAGE_NAME ?= "${IMAGE_BASENAME}-${MACHINE}${IMAGE_VERSION_SUFFIX}"
IMAGE_LINK_NAME ?= "${IMAGE_BASENAME}-${MACHINE}"
@@ -13,3 +13,10 @@ IMAGE_LINK_NAME ?= "${IMAGE_BASENAME}-${MACHINE}"
# by default) followed by additional suffices which describe the format (.ext4,
# .ext4.xz, etc.).
IMAGE_NAME_SUFFIX ??= ".rootfs"
+
+python () {
+ if bb.data.inherits_class('deploy', d) and d.getVar("IMAGE_VERSION_SUFFIX") == "-${DATETIME}":
+ import datetime
+ d.setVar("IMAGE_VERSION_SUFFIX", "-" + datetime.datetime.fromtimestamp(int(d.getVar("SOURCE_DATE_EPOCH")), datetime.timezone.utc).strftime('%Y%m%d%H%M%S'))
+ d.setVarFlag("IMAGE_VERSION_SUFFIX", "vardepvalue", "")
+}
diff --git a/meta/classes/image-container.bbclass b/meta/classes/image-container.bbclass
index f002858bd2..3d1993576a 100644
--- a/meta/classes/image-container.bbclass
+++ b/meta/classes/image-container.bbclass
@@ -1,6 +1,6 @@
ROOTFS_BOOTSTRAP_INSTALL = ""
IMAGE_TYPES_MASKED += "container"
-IMAGE_TYPEDEP_container = "tar.bz2"
+IMAGE_TYPEDEP:container = "tar.bz2"
python __anonymous() {
if "container" in d.getVar("IMAGE_FSTYPES") and \
diff --git a/meta/classes/image-live.bbclass b/meta/classes/image-live.bbclass
index fd876ed8e1..2c948190cf 100644
--- a/meta/classes/image-live.bbclass
+++ b/meta/classes/image-live.bbclass
@@ -42,9 +42,9 @@ INITRD_LIVE ?= "${DEPLOY_DIR_IMAGE}/${INITRD_IMAGE_LIVE}-${MACHINE}.${INITRAMFS_
LIVE_ROOTFS_TYPE ?= "ext4"
ROOTFS ?= "${IMGDEPLOYDIR}/${IMAGE_LINK_NAME}.${LIVE_ROOTFS_TYPE}"
-IMAGE_TYPEDEP_live = "${LIVE_ROOTFS_TYPE}"
-IMAGE_TYPEDEP_iso = "${LIVE_ROOTFS_TYPE}"
-IMAGE_TYPEDEP_hddimg = "${LIVE_ROOTFS_TYPE}"
+IMAGE_TYPEDEP:live = "${LIVE_ROOTFS_TYPE}"
+IMAGE_TYPEDEP:iso = "${LIVE_ROOTFS_TYPE}"
+IMAGE_TYPEDEP:hddimg = "${LIVE_ROOTFS_TYPE}"
IMAGE_TYPES_MASKED += "live hddimg iso"
python() {
diff --git a/meta/classes/image-prelink.bbclass b/meta/classes/image-prelink.bbclass
index 0da094a551..8158eeaf4c 100644
--- a/meta/classes/image-prelink.bbclass
+++ b/meta/classes/image-prelink.bbclass
@@ -46,17 +46,12 @@ prelink_image () {
dynamic_loader=${@get_linuxloader(d)}
# prelink!
- if [ "${BUILD_REPRODUCIBLE_BINARIES}" = "1" ]; then
- bbnote " prelink: BUILD_REPRODUCIBLE_BINARIES..."
- if [ "$REPRODUCIBLE_TIMESTAMP_ROOTFS" = "" ]; then
- export PRELINK_TIMESTAMP=`git log -1 --pretty=%ct `
- else
- export PRELINK_TIMESTAMP=$REPRODUCIBLE_TIMESTAMP_ROOTFS
- fi
- ${STAGING_SBINDIR_NATIVE}/prelink --root ${IMAGE_ROOTFS} -am -N -c ${sysconfdir}/prelink.conf --dynamic-linker $dynamic_loader
+ if [ "$REPRODUCIBLE_TIMESTAMP_ROOTFS" = "" ]; then
+ export PRELINK_TIMESTAMP=`git log -1 --pretty=%ct `
else
- ${STAGING_SBINDIR_NATIVE}/prelink --root ${IMAGE_ROOTFS} -amR -N -c ${sysconfdir}/prelink.conf --dynamic-linker $dynamic_loader
+ export PRELINK_TIMESTAMP=$REPRODUCIBLE_TIMESTAMP_ROOTFS
fi
+ ${STAGING_SBINDIR_NATIVE}/prelink --root ${IMAGE_ROOTFS} -am -N -c ${sysconfdir}/prelink.conf --dynamic-linker $dynamic_loader
# Remove the prelink.conf if we had to add it.
if [ "$dummy_prelink_conf" = "true" ]; then
diff --git a/meta/classes/image.bbclass b/meta/classes/image.bbclass
index 45dd56b544..2fa69a40d1 100644
--- a/meta/classes/image.bbclass
+++ b/meta/classes/image.bbclass
@@ -138,7 +138,10 @@ python () {
def extraimage_getdepends(task):
deps = ""
for dep in (d.getVar('EXTRA_IMAGEDEPENDS') or "").split():
- deps += " %s:%s" % (dep, task)
+ if ":" in dep:
+ deps += " %s " % (dep)
+ else:
+ deps += " %s:%s" % (dep, task)
return deps
d.appendVarFlag('do_image_complete', 'depends', extraimage_getdepends('do_populate_sysroot'))
@@ -382,8 +385,8 @@ python () {
if t.startswith("debugfs_"):
t = t[8:]
debug = "debugfs_"
- deps = (d.getVar('IMAGE_TYPEDEP_' + t) or "").split()
- vardeps.add('IMAGE_TYPEDEP_' + t)
+ deps = (d.getVar('IMAGE_TYPEDEP:' + t) or "").split()
+ vardeps.add('IMAGE_TYPEDEP:' + t)
if baset not in typedeps:
typedeps[baset] = set()
deps = [debug + dep for dep in deps]
@@ -431,6 +434,7 @@ python () {
localdata.delVar('DATETIME')
localdata.delVar('DATE')
localdata.delVar('TMPDIR')
+ localdata.delVar('IMAGE_VERSION_SUFFIX')
vardepsexclude = (d.getVarFlag('IMAGE_CMD:' + realt, 'vardepsexclude', True) or '').split()
for dep in vardepsexclude:
localdata.delVar(dep)
@@ -457,11 +461,10 @@ python () {
# Create input image first.
gen_conversion_cmds(type)
localdata.setVar('type', type)
- cmd = "\t" + (localdata.getVar("CONVERSION_CMD_" + ctype) or localdata.getVar("COMPRESS_CMD_" + ctype))
+ cmd = "\t" + localdata.getVar("CONVERSION_CMD:" + ctype)
if cmd not in cmds:
cmds.append(cmd)
- vardeps.add('CONVERSION_CMD_' + ctype)
- vardeps.add('COMPRESS_CMD_' + ctype)
+ vardeps.add('CONVERSION_CMD:' + ctype)
subimage = type + "." + ctype
if subimage not in subimages:
subimages.append(subimage)
@@ -648,17 +651,15 @@ ROOTFS_PREPROCESS_COMMAND += "${@bb.utils.contains('DISTRO_FEATURES', 'usrmerge'
POPULATE_SDK_PRE_TARGET_COMMAND += "${@bb.utils.contains('DISTRO_FEATURES', 'usrmerge', 'create_merged_usr_symlinks_sdk; ', '',d)}"
reproducible_final_image_task () {
- if [ "${BUILD_REPRODUCIBLE_BINARIES}" = "1" ]; then
+ if [ "$REPRODUCIBLE_TIMESTAMP_ROOTFS" = "" ]; then
+ REPRODUCIBLE_TIMESTAMP_ROOTFS=`git -C "${COREBASE}" log -1 --pretty=%ct 2>/dev/null` || true
if [ "$REPRODUCIBLE_TIMESTAMP_ROOTFS" = "" ]; then
- REPRODUCIBLE_TIMESTAMP_ROOTFS=`git -C "${COREBASE}" log -1 --pretty=%ct 2>/dev/null` || true
- if [ "$REPRODUCIBLE_TIMESTAMP_ROOTFS" = "" ]; then
- REPRODUCIBLE_TIMESTAMP_ROOTFS=`stat -c%Y ${@bb.utils.which(d.getVar("BBPATH"), "conf/bitbake.conf")}`
- fi
+ REPRODUCIBLE_TIMESTAMP_ROOTFS=`stat -c%Y ${@bb.utils.which(d.getVar("BBPATH"), "conf/bitbake.conf")}`
fi
- # Set mtime of all files to a reproducible value
- bbnote "reproducible_final_image_task: mtime set to $REPRODUCIBLE_TIMESTAMP_ROOTFS"
- find ${IMAGE_ROOTFS} -print0 | xargs -0 touch -h --date=@$REPRODUCIBLE_TIMESTAMP_ROOTFS
fi
+ # Set mtime of all files to a reproducible value
+ bbnote "reproducible_final_image_task: mtime set to $REPRODUCIBLE_TIMESTAMP_ROOTFS"
+ find ${IMAGE_ROOTFS} -print0 | xargs -0 touch -h --date=@$REPRODUCIBLE_TIMESTAMP_ROOTFS
}
systemd_preset_all () {
diff --git a/meta/classes/image_types.bbclass b/meta/classes/image_types.bbclass
index 3620931224..32d4cd4c76 100644
--- a/meta/classes/image_types.bbclass
+++ b/meta/classes/image_types.bbclass
@@ -29,7 +29,7 @@ def imagetypes_getdepends(d):
if d.getVar(var) is not None:
deprecated.add(var)
- for typedepends in (d.getVar("IMAGE_TYPEDEP_%s" % basetype) or "").split():
+ for typedepends in (d.getVar("IMAGE_TYPEDEP:%s" % basetype) or "").split():
base, rest = split_types(typedepends)
resttypes += rest
@@ -107,6 +107,7 @@ IMAGE_CMD:squashfs = "mksquashfs ${IMAGE_ROOTFS} ${IMGDEPLOYDIR}/${IMAGE_NAME}${
IMAGE_CMD:squashfs-xz = "mksquashfs ${IMAGE_ROOTFS} ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.squashfs-xz ${EXTRA_IMAGECMD} -noappend -comp xz"
IMAGE_CMD:squashfs-lzo = "mksquashfs ${IMAGE_ROOTFS} ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.squashfs-lzo ${EXTRA_IMAGECMD} -noappend -comp lzo"
IMAGE_CMD:squashfs-lz4 = "mksquashfs ${IMAGE_ROOTFS} ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.squashfs-lz4 ${EXTRA_IMAGECMD} -noappend -comp lz4"
+IMAGE_CMD:squashfs-zst = "mksquashfs ${IMAGE_ROOTFS} ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.squashfs-zst ${EXTRA_IMAGECMD} -noappend -comp zstd"
IMAGE_CMD:erofs = "mkfs.erofs ${EXTRA_IMAGECMD} ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.erofs ${IMAGE_ROOTFS}"
IMAGE_CMD:erofs-lz4 = "mkfs.erofs -zlz4 ${EXTRA_IMAGECMD} ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.erofs-lz4 ${IMAGE_ROOTFS}"
@@ -197,7 +198,7 @@ IMAGE_CMD:multiubi () {
IMAGE_CMD:ubi () {
multiubi_mkfs "${MKUBIFS_ARGS}" "${UBINIZE_ARGS}"
}
-IMAGE_TYPEDEP_ubi = "ubifs"
+IMAGE_TYPEDEP:ubi = "ubifs"
IMAGE_CMD:ubifs = "mkfs.ubifs -r ${IMAGE_ROOTFS} -o ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.ubifs ${MKUBIFS_ARGS}"
@@ -244,6 +245,7 @@ do_image_squashfs[depends] += "squashfs-tools-native:do_populate_sysroot"
do_image_squashfs_xz[depends] += "squashfs-tools-native:do_populate_sysroot"
do_image_squashfs_lzo[depends] += "squashfs-tools-native:do_populate_sysroot"
do_image_squashfs_lz4[depends] += "squashfs-tools-native:do_populate_sysroot"
+do_image_squashfs_zst[depends] += "squashfs-tools-native:do_populate_sysroot"
do_image_ubi[depends] += "mtd-utils-native:do_populate_sysroot"
do_image_ubifs[depends] += "mtd-utils-native:do_populate_sysroot"
do_image_multiubi[depends] += "mtd-utils-native:do_populate_sysroot"
@@ -262,10 +264,10 @@ IMAGE_TYPES = " \
btrfs \
iso \
hddimg \
- squashfs squashfs-xz squashfs-lzo squashfs-lz4 \
+ squashfs squashfs-xz squashfs-lzo squashfs-lz4 squashfs-zst \
ubi ubifs multiubi \
tar tar.gz tar.bz2 tar.xz tar.lz4 tar.zst \
- cpio cpio.gz cpio.xz cpio.lzma cpio.lz4 \
+ cpio cpio.gz cpio.xz cpio.lzma cpio.lz4 cpio.zst \
wic wic.gz wic.bz2 wic.lzma wic.zst \
container \
f2fs \
@@ -279,31 +281,31 @@ IMAGE_TYPES = " \
COMPRESSIONTYPES ?= ""
CONVERSIONTYPES = "gz bz2 lzma xz lz4 lzo zip zst sum md5sum sha1sum sha224sum sha256sum sha384sum sha512sum bmap u-boot vmdk vhd vhdx vdi qcow2 base64 gzsync zsync ${COMPRESSIONTYPES}"
-CONVERSION_CMD_lzma = "lzma -k -f -7 ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}"
-CONVERSION_CMD_gz = "gzip -f -9 -n -c --rsyncable ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} > ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.gz"
-CONVERSION_CMD_bz2 = "pbzip2 -f -k ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}"
-CONVERSION_CMD_xz = "xz -f -k -c ${XZ_COMPRESSION_LEVEL} ${XZ_DEFAULTS} --check=${XZ_INTEGRITY_CHECK} ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} > ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.xz"
-CONVERSION_CMD_lz4 = "lz4 -9 -z -l ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.lz4"
-CONVERSION_CMD_lzo = "lzop -9 ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}"
-CONVERSION_CMD_zip = "zip ${ZIP_COMPRESSION_LEVEL} ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.zip ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}"
-CONVERSION_CMD_zst = "zstd -f -k -T0 -c ${ZSTD_COMPRESSION_LEVEL} ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} > ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.zst"
-CONVERSION_CMD_sum = "sumtool -i ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} -o ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.sum ${JFFS2_SUM_EXTRA_ARGS}"
-CONVERSION_CMD_md5sum = "md5sum ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} > ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.md5sum"
-CONVERSION_CMD_sha1sum = "sha1sum ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} > ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.sha1sum"
-CONVERSION_CMD_sha224sum = "sha224sum ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} > ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.sha224sum"
-CONVERSION_CMD_sha256sum = "sha256sum ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} > ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.sha256sum"
-CONVERSION_CMD_sha384sum = "sha384sum ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} > ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.sha384sum"
-CONVERSION_CMD_sha512sum = "sha512sum ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} > ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.sha512sum"
-CONVERSION_CMD_bmap = "bmaptool create ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} -o ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.bmap"
-CONVERSION_CMD_u-boot = "mkimage -A ${UBOOT_ARCH} -O linux -T ramdisk -C none -n ${IMAGE_NAME} -d ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.u-boot"
-CONVERSION_CMD_vmdk = "qemu-img convert -O vmdk ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.vmdk"
-CONVERSION_CMD_vhdx = "qemu-img convert -O vhdx -o subformat=dynamic ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.vhdx"
-CONVERSION_CMD_vhd = "qemu-img convert -O vpc -o subformat=fixed ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.vhd"
-CONVERSION_CMD_vdi = "qemu-img convert -O vdi ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.vdi"
-CONVERSION_CMD_qcow2 = "qemu-img convert -O qcow2 ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.qcow2"
-CONVERSION_CMD_base64 = "base64 ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} > ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.base64"
-CONVERSION_CMD_zsync = "zsyncmake_curl ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}"
-CONVERSION_CMD_gzsync = "zsyncmake_curl -z ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}"
+CONVERSION_CMD:lzma = "lzma -k -f -7 ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}"
+CONVERSION_CMD:gz = "gzip -f -9 -n -c --rsyncable ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} > ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.gz"
+CONVERSION_CMD:bz2 = "pbzip2 -f -k ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}"
+CONVERSION_CMD:xz = "xz -f -k -c ${XZ_COMPRESSION_LEVEL} ${XZ_DEFAULTS} --check=${XZ_INTEGRITY_CHECK} ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} > ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.xz"
+CONVERSION_CMD:lz4 = "lz4 -9 -z -l ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.lz4"
+CONVERSION_CMD:lzo = "lzop -9 ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}"
+CONVERSION_CMD:zip = "zip ${ZIP_COMPRESSION_LEVEL} ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.zip ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}"
+CONVERSION_CMD:zst = "zstd -f -k -T0 -c ${ZSTD_COMPRESSION_LEVEL} ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} > ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.zst"
+CONVERSION_CMD:sum = "sumtool -i ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} -o ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.sum ${JFFS2_SUM_EXTRA_ARGS}"
+CONVERSION_CMD:md5sum = "md5sum ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} > ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.md5sum"
+CONVERSION_CMD:sha1sum = "sha1sum ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} > ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.sha1sum"
+CONVERSION_CMD:sha224sum = "sha224sum ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} > ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.sha224sum"
+CONVERSION_CMD:sha256sum = "sha256sum ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} > ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.sha256sum"
+CONVERSION_CMD:sha384sum = "sha384sum ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} > ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.sha384sum"
+CONVERSION_CMD:sha512sum = "sha512sum ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} > ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.sha512sum"
+CONVERSION_CMD:bmap = "bmaptool create ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} -o ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.bmap"
+CONVERSION_CMD:u-boot = "mkimage -A ${UBOOT_ARCH} -O linux -T ramdisk -C none -n ${IMAGE_NAME} -d ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.u-boot"
+CONVERSION_CMD:vmdk = "qemu-img convert -O vmdk ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.vmdk"
+CONVERSION_CMD:vhdx = "qemu-img convert -O vhdx -o subformat=dynamic ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.vhdx"
+CONVERSION_CMD:vhd = "qemu-img convert -O vpc -o subformat=fixed ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.vhd"
+CONVERSION_CMD:vdi = "qemu-img convert -O vdi ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.vdi"
+CONVERSION_CMD:qcow2 = "qemu-img convert -O qcow2 ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.qcow2"
+CONVERSION_CMD:base64 = "base64 ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} > ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.base64"
+CONVERSION_CMD:zsync = "zsyncmake_curl ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}"
+CONVERSION_CMD:gzsync = "zsyncmake_curl -z ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}"
CONVERSION_DEPENDS_lzma = "xz-native"
CONVERSION_DEPENDS_gz = "pigz-native"
CONVERSION_DEPENDS_bz2 = "pbzip2-native"
diff --git a/meta/classes/image_types_wic.bbclass b/meta/classes/image_types_wic.bbclass
index 2f1a0b709c..e3863c88a9 100644
--- a/meta/classes/image_types_wic.bbclass
+++ b/meta/classes/image_types_wic.bbclass
@@ -1,11 +1,36 @@
# The WICVARS variable is used to define list of bitbake variables used in wic code
# variables from this list is written to <image>.env file
WICVARS ?= "\
- BBLAYERS IMGDEPLOYDIR DEPLOY_DIR_IMAGE FAKEROOTCMD IMAGE_BASENAME IMAGE_EFI_BOOT_FILES IMAGE_BOOT_FILES \
- IMAGE_LINK_NAME IMAGE_ROOTFS INITRAMFS_FSTYPES INITRD INITRD_LIVE ISODIR RECIPE_SYSROOT_NATIVE \
- ROOTFS_SIZE STAGING_DATADIR STAGING_DIR STAGING_LIBDIR TARGET_SYS HOSTTOOLS_DIR \
- KERNEL_IMAGETYPE MACHINE INITRAMFS_IMAGE INITRAMFS_IMAGE_BUNDLE INITRAMFS_LINK_NAME APPEND \
- ASSUME_PROVIDED PSEUDO_IGNORE_PATHS"
+ APPEND \
+ ASSUME_PROVIDED \
+ BBLAYERS \
+ DEPLOY_DIR_IMAGE \
+ FAKEROOTCMD \
+ HOSTTOOLS_DIR \
+ IMAGE_BASENAME \
+ IMAGE_BOOT_FILES \
+ IMAGE_EFI_BOOT_FILES \
+ IMAGE_LINK_NAME \
+ IMAGE_ROOTFS \
+ IMGDEPLOYDIR \
+ INITRAMFS_FSTYPES \
+ INITRAMFS_IMAGE \
+ INITRAMFS_IMAGE_BUNDLE \
+ INITRAMFS_LINK_NAME \
+ INITRD \
+ INITRD_LIVE \
+ ISODIR \
+ KERNEL_IMAGETYPE \
+ MACHINE \
+ PSEUDO_IGNORE_PATHS \
+ RECIPE_SYSROOT_NATIVE \
+ ROOTFS_SIZE \
+ STAGING_DATADIR \
+ STAGING_DIR \
+ STAGING_DIR_HOST \
+ STAGING_LIBDIR \
+ TARGET_SYS \
+"
inherit ${@bb.utils.contains('INITRAMFS_IMAGE_BUNDLE', '1', 'kernel-artifact-names', '', d)}
@@ -60,8 +85,8 @@ do_image_wic[deptask] += "do_image_complete"
WKS_FILE_DEPENDS_DEFAULT = '${@bb.utils.contains_any("BUILD_ARCH", [ 'x86_64', 'i686' ], "syslinux-native", "",d)}'
WKS_FILE_DEPENDS_DEFAULT += "bmap-tools-native cdrtools-native btrfs-tools-native squashfs-tools-native e2fsprogs-native"
WKS_FILE_DEPENDS_BOOTLOADERS = ""
-WKS_FILE_DEPENDS_BOOTLOADERS:x86 = "syslinux grub-efi systemd-boot"
-WKS_FILE_DEPENDS_BOOTLOADERS:x86-64 = "syslinux grub-efi systemd-boot"
+WKS_FILE_DEPENDS_BOOTLOADERS:x86 = "syslinux grub-efi systemd-boot os-release"
+WKS_FILE_DEPENDS_BOOTLOADERS:x86-64 = "syslinux grub-efi systemd-boot os-release"
WKS_FILE_DEPENDS_BOOTLOADERS:x86-x32 = "syslinux grub-efi"
WKS_FILE_DEPENDS ??= "${WKS_FILE_DEPENDS_DEFAULT} ${WKS_FILE_DEPENDS_BOOTLOADERS}"
diff --git a/meta/classes/insane.bbclass b/meta/classes/insane.bbclass
index ee858f84b7..1e2f1b768a 100644
--- a/meta/classes/insane.bbclass
+++ b/meta/classes/insane.bbclass
@@ -18,8 +18,6 @@
# files under exec_prefix
# -Check if the package name is upper case
-QA_SANE = "True"
-
# Elect whether a given type of error is a warning or error, they may
# have been set by other files.
WARN_QA ?= " libdir xorg-driver-abi \
@@ -27,7 +25,9 @@ WARN_QA ?= " libdir xorg-driver-abi \
infodir build-deps src-uri-bad symlink-to-sysroot multilib \
invalid-packageconfig host-user-contaminated uppercase-pn patch-fuzz \
mime mime-xdg unlisted-pkg-lics unhandled-features-check \
- missing-update-alternatives native-last \
+ missing-update-alternatives native-last missing-ptest \
+ license-exists license-no-generic license-syntax license-format \
+ license-incompatible license-file-missing \
"
ERROR_QA ?= "dev-so debug-deps dev-deps debug-files arch pkgconfig la \
perms dep-cmp pkgvarcheck perm-config perm-line perm-link \
@@ -59,32 +59,6 @@ def package_qa_clean_path(path, d, pkg=None):
path = path.replace(os.path.join(d.getVar("PKGDEST"), pkg), "/")
return path.replace(d.getVar("TMPDIR"), "/").replace("//", "/")
-def package_qa_write_error(type, error, d):
- logfile = d.getVar('QA_LOGFILE')
- if logfile:
- p = d.getVar('P')
- with open(logfile, "a+") as f:
- f.write("%s: %s [%s]\n" % (p, error, type))
-
-def package_qa_handle_error(error_class, error_msg, d):
- if error_class in (d.getVar("ERROR_QA") or "").split():
- package_qa_write_error(error_class, error_msg, d)
- bb.error("QA Issue: %s [%s]" % (error_msg, error_class))
- d.setVar("QA_SANE", False)
- return False
- elif error_class in (d.getVar("WARN_QA") or "").split():
- package_qa_write_error(error_class, error_msg, d)
- bb.warn("QA Issue: %s [%s]" % (error_msg, error_class))
- else:
- bb.note("QA Issue: %s [%s]" % (error_msg, error_class))
- return True
-
-def package_qa_add_message(messages, section, new_msg):
- if section not in messages:
- messages[section] = new_msg
- else:
- messages[section] = messages[section] + "\n" + new_msg
-
QAPATHTEST[shebang-size] = "package_qa_check_shebang_size"
def package_qa_check_shebang_size(path, name, d, elf, messages):
import stat
@@ -106,7 +80,7 @@ def package_qa_check_shebang_size(path, name, d, elf, messages):
return
if len(stanza) > 129:
- package_qa_add_message(messages, "shebang-size", "%s: %s maximum shebang size exceeded, the maximum size is 128." % (name, package_qa_clean_path(path, d)))
+ oe.qa.add_message(messages, "shebang-size", "%s: %s maximum shebang size exceeded, the maximum size is 128." % (name, package_qa_clean_path(path, d)))
return
QAPATHTEST[libexec] = "package_qa_check_libexec"
@@ -118,7 +92,7 @@ def package_qa_check_libexec(path,name, d, elf, messages):
return True
if 'libexec' in path.split(os.path.sep):
- package_qa_add_message(messages, "libexec", "%s: %s is using libexec please relocate to %s" % (name, package_qa_clean_path(path, d), libexec))
+ oe.qa.add_message(messages, "libexec", "%s: %s is using libexec please relocate to %s" % (name, package_qa_clean_path(path, d), libexec))
return False
return True
@@ -146,7 +120,7 @@ def package_qa_check_rpath(file,name, d, elf, messages):
rpath = m.group(1)
for dir in bad_dirs:
if dir in rpath:
- package_qa_add_message(messages, "rpaths", "package %s contains bad RPATH %s in file %s" % (name, rpath, file))
+ oe.qa.add_message(messages, "rpaths", "package %s contains bad RPATH %s in file %s" % (name, rpath, file))
QAPATHTEST[useless-rpaths] = "package_qa_check_useless_rpaths"
def package_qa_check_useless_rpaths(file, name, d, elf, messages):
@@ -176,7 +150,7 @@ def package_qa_check_useless_rpaths(file, name, d, elf, messages):
if rpath_eq(rpath, libdir) or rpath_eq(rpath, base_libdir):
# The dynamic linker searches both these places anyway. There is no point in
# looking there again.
- package_qa_add_message(messages, "useless-rpaths", "%s: %s contains probably-redundant RPATH %s" % (name, package_qa_clean_path(file, d, name), rpath))
+ oe.qa.add_message(messages, "useless-rpaths", "%s: %s contains probably-redundant RPATH %s" % (name, package_qa_clean_path(file, d, name), rpath))
QAPATHTEST[dev-so] = "package_qa_check_dev"
def package_qa_check_dev(path, name, d, elf, messages):
@@ -185,7 +159,7 @@ def package_qa_check_dev(path, name, d, elf, messages):
"""
if not name.endswith("-dev") and not name.endswith("-dbg") and not name.endswith("-ptest") and not name.startswith("nativesdk-") and path.endswith(".so") and os.path.islink(path):
- package_qa_add_message(messages, "dev-so", "non -dev/-dbg/nativesdk- package %s contains symlink .so '%s'" % \
+ oe.qa.add_message(messages, "dev-so", "non -dev/-dbg/nativesdk- package %s contains symlink .so '%s'" % \
(name, package_qa_clean_path(path, d, name)))
QAPATHTEST[dev-elf] = "package_qa_check_dev_elf"
@@ -196,7 +170,7 @@ def package_qa_check_dev_elf(path, name, d, elf, messages):
install link-time .so files that are linker scripts.
"""
if name.endswith("-dev") and path.endswith(".so") and not os.path.islink(path) and elf:
- package_qa_add_message(messages, "dev-elf", "-dev package %s contains non-symlink .so '%s'" % \
+ oe.qa.add_message(messages, "dev-elf", "-dev package %s contains non-symlink .so '%s'" % \
(name, package_qa_clean_path(path, d, name)))
QAPATHTEST[staticdev] = "package_qa_check_staticdev"
@@ -209,7 +183,7 @@ def package_qa_check_staticdev(path, name, d, elf, messages):
"""
if not name.endswith("-pic") and not name.endswith("-staticdev") and not name.endswith("-ptest") and path.endswith(".a") and not path.endswith("_nonshared.a") and not '/usr/lib/debug-static/' in path and not '/.debug-static/' in path:
- package_qa_add_message(messages, "staticdev", "non -staticdev package contains static .a library: %s path '%s'" % \
+ oe.qa.add_message(messages, "staticdev", "non -staticdev package contains static .a library: %s path '%s'" % \
(name, package_qa_clean_path(path,d, name)))
QAPATHTEST[mime] = "package_qa_check_mime"
@@ -220,7 +194,7 @@ def package_qa_check_mime(path, name, d, elf, messages):
"""
if d.getVar("datadir") + "/mime/packages" in path and path.endswith('.xml') and not bb.data.inherits_class("mime", d):
- package_qa_add_message(messages, "mime", "package contains mime types but does not inherit mime: %s path '%s'" % \
+ oe.qa.add_message(messages, "mime", "package contains mime types but does not inherit mime: %s path '%s'" % \
(name, package_qa_clean_path(path,d)))
QAPATHTEST[mime-xdg] = "package_qa_check_mime_xdg"
@@ -247,9 +221,9 @@ def package_qa_check_mime_xdg(path, name, d, elf, messages):
if name == d.getVar('PN'):
pkgname = '${PN}'
wstr += "If yes: add \'inhert mime-xdg\' and \'MIME_XDG_PACKAGES += \"%s\"\' / if no add \'INSANE_SKIP:%s += \"mime-xdg\"\' to recipe." % (pkgname, pkgname)
- package_qa_add_message(messages, "mime-xdg", wstr)
+ oe.qa.add_message(messages, "mime-xdg", wstr)
if mime_type_found:
- package_qa_add_message(messages, "mime-xdg", "package contains desktop file with key 'MimeType' but does not inhert mime-xdg: %s path '%s'" % \
+ oe.qa.add_message(messages, "mime-xdg", "package contains desktop file with key 'MimeType' but does not inhert mime-xdg: %s path '%s'" % \
(name, package_qa_clean_path(path,d)))
def package_qa_check_libdir(d):
@@ -313,7 +287,7 @@ def package_qa_check_libdir(d):
pass
if messages:
- package_qa_handle_error("libdir", "\n".join(messages), d)
+ oe.qa.handle_error("libdir", "\n".join(messages), d)
QAPATHTEST[debug-files] = "package_qa_check_dbg"
def package_qa_check_dbg(path, name, d, elf, messages):
@@ -323,7 +297,7 @@ def package_qa_check_dbg(path, name, d, elf, messages):
if not "-dbg" in name and not "-ptest" in name:
if '.debug' in path.split(os.path.sep):
- package_qa_add_message(messages, "debug-files", "non debug package contains .debug directory: %s path %s" % \
+ oe.qa.add_message(messages, "debug-files", "non debug package contains .debug directory: %s path %s" % \
(name, package_qa_clean_path(path,d)))
QAPATHTEST[arch] = "package_qa_check_arch"
@@ -343,7 +317,7 @@ def package_qa_check_arch(path,name,d, elf, messages):
if target_arch == "allarch":
pn = d.getVar('PN')
- package_qa_add_message(messages, "arch", pn + ": Recipe inherits the allarch class, but has packaged architecture-specific binaries")
+ oe.qa.add_message(messages, "arch", pn + ": Recipe inherits the allarch class, but has packaged architecture-specific binaries")
return
# FIXME: Cross package confuse this check, so just skip them
@@ -366,13 +340,13 @@ def package_qa_check_arch(path,name,d, elf, messages):
target_os == "linux-gnu_ilp32" or re.match(r'mips64.*32', d.getVar('DEFAULTTUNE')))
is_bpf = (oe.qa.elf_machine_to_string(elf.machine()) == "BPF")
if not ((machine == elf.machine()) or is_32 or is_bpf):
- package_qa_add_message(messages, "arch", "Architecture did not match (%s, expected %s) in %s" % \
+ oe.qa.add_message(messages, "arch", "Architecture did not match (%s, expected %s) in %s" % \
(oe.qa.elf_machine_to_string(elf.machine()), oe.qa.elf_machine_to_string(machine), package_qa_clean_path(path, d, name)))
elif not ((bits == elf.abiSize()) or is_32 or is_bpf):
- package_qa_add_message(messages, "arch", "Bit size did not match (%d, expected %d) in %s" % \
+ oe.qa.add_message(messages, "arch", "Bit size did not match (%d, expected %d) in %s" % \
(elf.abiSize(), bits, package_qa_clean_path(path, d, name)))
elif not ((littleendian == elf.isLittleEndian()) or is_bpf):
- package_qa_add_message(messages, "arch", "Endiannes did not match (%d, expected %d) in %s" % \
+ oe.qa.add_message(messages, "arch", "Endiannes did not match (%d, expected %d) in %s" % \
(elf.isLittleEndian(), littleendian, package_qa_clean_path(path,d, name)))
QAPATHTEST[desktop] = "package_qa_check_desktop"
@@ -385,7 +359,7 @@ def package_qa_check_desktop(path, name, d, elf, messages):
output = os.popen("%s %s" % (desktop_file_validate, path))
# This only produces output on errors
for l in output:
- package_qa_add_message(messages, "desktop", "Desktop file issue: " + l.strip())
+ oe.qa.add_message(messages, "desktop", "Desktop file issue: " + l.strip())
QAPATHTEST[textrel] = "package_qa_textrel"
def package_qa_textrel(path, name, d, elf, messages):
@@ -411,7 +385,7 @@ def package_qa_textrel(path, name, d, elf, messages):
if not sane:
path = package_qa_clean_path(path, d, name)
- package_qa_add_message(messages, "textrel", "%s: ELF binary %s has relocations in .text" % (name, path))
+ oe.qa.add_message(messages, "textrel", "%s: ELF binary %s has relocations in .text" % (name, path))
QAPATHTEST[ldflags] = "package_qa_hash_style"
def package_qa_hash_style(path, name, d, elf, messages):
@@ -446,7 +420,7 @@ def package_qa_hash_style(path, name, d, elf, messages):
sane = True
if has_syms and not sane:
path = package_qa_clean_path(path, d, name)
- package_qa_add_message(messages, "ldflags", "File %s in package %s doesn't have GNU_HASH (didn't pass LDFLAGS?)" % (path, name))
+ oe.qa.add_message(messages, "ldflags", "File %s in package %s doesn't have GNU_HASH (didn't pass LDFLAGS?)" % (path, name))
QAPATHTEST[buildpaths] = "package_qa_check_buildpaths"
@@ -467,7 +441,7 @@ def package_qa_check_buildpaths(path, name, d, elf, messages):
file_content = f.read()
if tmpdir in file_content:
trimmed = path.replace(os.path.join (d.getVar("PKGDEST"), name), "")
- package_qa_add_message(messages, "buildpaths", "File %s in package %s contains reference to TMPDIR" % (trimmed, name))
+ oe.qa.add_message(messages, "buildpaths", "File %s in package %s contains reference to TMPDIR" % (trimmed, name))
QAPATHTEST[xorg-driver-abi] = "package_qa_check_xorg_driver_abi"
@@ -486,7 +460,7 @@ def package_qa_check_xorg_driver_abi(path, name, d, elf, messages):
for rdep in bb.utils.explode_deps(d.getVar('RDEPENDS:' + name) or ""):
if rdep.startswith("%sxorg-abi-" % mlprefix):
return
- package_qa_add_message(messages, "xorg-driver-abi", "Package %s contains Xorg driver (%s) but no xorg-abi- dependencies" % (name, os.path.basename(path)))
+ oe.qa.add_message(messages, "xorg-driver-abi", "Package %s contains Xorg driver (%s) but no xorg-abi- dependencies" % (name, os.path.basename(path)))
QAPATHTEST[infodir] = "package_qa_check_infodir"
def package_qa_check_infodir(path, name, d, elf, messages):
@@ -496,7 +470,7 @@ def package_qa_check_infodir(path, name, d, elf, messages):
infodir = d.expand("${infodir}/dir")
if infodir in path:
- package_qa_add_message(messages, "infodir", "The /usr/share/info/dir file is not meant to be shipped in a particular package.")
+ oe.qa.add_message(messages, "infodir", "The /usr/share/info/dir file is not meant to be shipped in a particular package.")
QAPATHTEST[symlink-to-sysroot] = "package_qa_check_symlink_to_sysroot"
def package_qa_check_symlink_to_sysroot(path, name, d, elf, messages):
@@ -509,7 +483,7 @@ def package_qa_check_symlink_to_sysroot(path, name, d, elf, messages):
tmpdir = d.getVar('TMPDIR')
if target.startswith(tmpdir):
trimmed = path.replace(os.path.join (d.getVar("PKGDEST"), name), "")
- package_qa_add_message(messages, "symlink-to-sysroot", "Symlink %s in %s points to TMPDIR" % (trimmed, name))
+ oe.qa.add_message(messages, "symlink-to-sysroot", "Symlink %s in %s points to TMPDIR" % (trimmed, name))
# Check license variables
do_populate_lic[postfuncs] += "populate_lic_qa_checksum"
@@ -517,7 +491,6 @@ python populate_lic_qa_checksum() {
"""
Check for changes in the license files.
"""
- sane = True
lic_files = d.getVar('LIC_FILES_CHKSUM') or ''
lic = d.getVar('LICENSE')
@@ -527,7 +500,7 @@ python populate_lic_qa_checksum() {
return
if not lic_files and d.getVar('SRC_URI'):
- sane &= package_qa_handle_error("license-checksum", pn + ": Recipe file fetches files and does not have license file information (LIC_FILES_CHKSUM)", d)
+ oe.qa.handle_error("license-checksum", pn + ": Recipe file fetches files and does not have license file information (LIC_FILES_CHKSUM)", d)
srcdir = d.getVar('S')
corebase_licensefile = d.getVar('COREBASE') + "/LICENSE"
@@ -535,11 +508,11 @@ python populate_lic_qa_checksum() {
try:
(type, host, path, user, pswd, parm) = bb.fetch.decodeurl(url)
except bb.fetch.MalformedUrl:
- sane &= package_qa_handle_error("license-checksum", pn + ": LIC_FILES_CHKSUM contains an invalid URL: " + url, d)
+ oe.qa.handle_error("license-checksum", pn + ": LIC_FILES_CHKSUM contains an invalid URL: " + url, d)
continue
srclicfile = os.path.join(srcdir, path)
if not os.path.isfile(srclicfile):
- sane &= package_qa_handle_error("license-checksum", pn + ": LIC_FILES_CHKSUM points to an invalid file: " + srclicfile, d)
+ oe.qa.handle_error("license-checksum", pn + ": LIC_FILES_CHKSUM points to an invalid file: " + srclicfile, d)
continue
if (srclicfile == corebase_licensefile):
@@ -607,10 +580,9 @@ python populate_lic_qa_checksum() {
else:
msg = pn + ": LIC_FILES_CHKSUM is not specified for " + url
msg = msg + "\n" + pn + ": The md5 checksum is " + md5chksum
- sane &= package_qa_handle_error("license-checksum", msg, d)
+ oe.qa.handle_error("license-checksum", msg, d)
- if not sane:
- bb.fatal("Fatal QA errors found, failing task.")
+ oe.qa.exit_if_errors(d)
}
def qa_check_staged(path,d):
@@ -622,7 +594,6 @@ def qa_check_staged(path,d):
responsible for the errors easily even if we look at every .pc and .la file.
"""
- sane = True
tmpdir = d.getVar('TMPDIR')
workdir = os.path.join(tmpdir, "work")
recipesysroot = d.getVar("RECIPE_SYSROOT")
@@ -655,16 +626,14 @@ def qa_check_staged(path,d):
file_content = file_content.replace(recipesysroot, "")
if workdir in file_content:
error_msg = "%s failed sanity test (workdir) in path %s" % (file,root)
- sane &= package_qa_handle_error("la", error_msg, d)
+ oe.qa.handle_error("la", error_msg, d)
elif file.endswith(".pc") and not skip_pkgconfig:
with open(path) as f:
file_content = f.read()
file_content = file_content.replace(recipesysroot, "")
if pkgconfigcheck in file_content:
error_msg = "%s failed sanity test (tmpdir) in path %s" % (file,root)
- sane &= package_qa_handle_error("pkgconfig", error_msg, d)
-
- return sane
+ oe.qa.handle_error("pkgconfig", error_msg, d)
# Run all package-wide warnfuncs and errorfuncs
def package_qa_package(warnfuncs, errorfuncs, package, d):
@@ -677,9 +646,9 @@ def package_qa_package(warnfuncs, errorfuncs, package, d):
func(package, d, errors)
for w in warnings:
- package_qa_handle_error(w, warnings[w], d)
+ oe.qa.handle_error(w, warnings[w], d)
for e in errors:
- package_qa_handle_error(e, errors[e], d)
+ oe.qa.handle_error(e, errors[e], d)
return len(errors) == 0
@@ -694,16 +663,14 @@ def package_qa_recipe(warnfuncs, errorfuncs, pn, d):
func(pn, d, errors)
for w in warnings:
- package_qa_handle_error(w, warnings[w], d)
+ oe.qa.handle_error(w, warnings[w], d)
for e in errors:
- package_qa_handle_error(e, errors[e], d)
+ oe.qa.handle_error(e, errors[e], d)
return len(errors) == 0
# Walk over all files in a directory and call func
def package_qa_walk(warnfuncs, errorfuncs, package, d):
- import oe.qa
-
#if this will throw an exception, then fix the dict above
target_os = d.getVar('TARGET_OS')
target_arch = d.getVar('TARGET_ARCH')
@@ -724,9 +691,9 @@ def package_qa_walk(warnfuncs, errorfuncs, package, d):
func(path, package, d, elf, errors)
for w in warnings:
- package_qa_handle_error(w, warnings[w], d)
+ oe.qa.handle_error(w, warnings[w], d)
for e in errors:
- package_qa_handle_error(e, errors[e], d)
+ oe.qa.handle_error(e, errors[e], d)
def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d):
# Don't do this check for kernel/module recipes, there aren't too many debug/development
@@ -746,10 +713,10 @@ def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d):
for rdepend in rdepends:
if "-dbg" in rdepend and "debug-deps" not in skip:
error_msg = "%s rdepends on %s" % (pkg,rdepend)
- package_qa_handle_error("debug-deps", error_msg, d)
+ oe.qa.handle_error("debug-deps", error_msg, d)
if (not "-dev" in pkg and not "-staticdev" in pkg) and rdepend.endswith("-dev") and "dev-deps" not in skip:
error_msg = "%s rdepends on %s" % (pkg, rdepend)
- package_qa_handle_error("dev-deps", error_msg, d)
+ oe.qa.handle_error("dev-deps", error_msg, d)
if rdepend not in packages:
rdep_data = oe.packagedata.read_subpkgdata(rdepend, d)
if rdep_data and 'PN' in rdep_data and rdep_data['PN'] in taskdeps:
@@ -770,7 +737,7 @@ def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d):
error_msg = "%s rdepends on %s, but it isn't a build dependency, missing %s in DEPENDS or PACKAGECONFIG?" % (pkg, rdepend, rdep_data['PN'])
else:
error_msg = "%s rdepends on %s, but it isn't a build dependency?" % (pkg, rdepend)
- package_qa_handle_error("build-deps", error_msg, d)
+ oe.qa.handle_error("build-deps", error_msg, d)
if "file-rdeps" not in skip:
ignored_file_rdeps = set(['/bin/sh', '/usr/bin/env', 'rtld(GNU_HASH)'])
@@ -780,7 +747,7 @@ def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d):
filerdepends = {}
rdep_data = oe.packagedata.read_subpkgdata(pkg, d)
for key in rdep_data:
- if key.startswith("FILERDEPENDS_"):
+ if key.startswith("FILERDEPENDS:"):
for subkey in bb.utils.explode_deps(rdep_data[key]):
if subkey not in ignored_file_rdeps and \
not subkey.startswith('perl('):
@@ -808,11 +775,11 @@ def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d):
# For Saving the FILERPROVIDES, RPROVIDES and FILES_INFO
rdep_data = oe.packagedata.read_subpkgdata(rdep, d)
for key in rdep_data:
- if key.startswith("FILERPROVIDES_") or key.startswith("RPROVIDES:"):
+ if key.startswith("FILERPROVIDES:") or key.startswith("RPROVIDES:"):
for subkey in bb.utils.explode_deps(rdep_data[key]):
filerdepends.pop(subkey,None)
# Add the files list to the rprovides
- if key == "FILES_INFO":
+ if key.startswith("FILES_INFO:"):
# Use eval() to make it as a dict
for subkey in eval(rdep_data[key]):
filerdepends.pop(subkey,None)
@@ -822,8 +789,8 @@ def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d):
if filerdepends:
for key in filerdepends:
error_msg = "%s contained in package %s requires %s, but no providers found in RDEPENDS:%s?" % \
- (filerdepends[key].replace("_%s" % pkg, "").replace("@underscore@", "_"), pkg, key, pkg)
- package_qa_handle_error("file-rdeps", error_msg, d)
+ (filerdepends[key].replace(":%s" % pkg, "").replace("@underscore@", "_"), pkg, key, pkg)
+ oe.qa.handle_error("file-rdeps", error_msg, d)
package_qa_check_rdepends[vardepsexclude] = "OVERRIDES"
def package_qa_check_deps(pkg, pkgdest, d):
@@ -835,12 +802,12 @@ def package_qa_check_deps(pkg, pkgdest, d):
try:
rvar = bb.utils.explode_dep_versions2(localdata.getVar(var) or "")
except ValueError as e:
- bb.fatal("%s_%s: %s" % (var, pkg, e))
+ bb.fatal("%s:%s: %s" % (var, pkg, e))
for dep in rvar:
for v in rvar[dep]:
if v and not v.startswith(('< ', '= ', '> ', '<= ', '>=')):
- error_msg = "%s_%s is invalid: %s (%s) only comparisons <, =, >, <=, and >= are allowed" % (var, pkg, dep, v)
- package_qa_handle_error("dep-cmp", error_msg, d)
+ error_msg = "%s:%s is invalid: %s (%s) only comparisons <, =, >, <=, and >= are allowed" % (var, pkg, dep, v)
+ oe.qa.handle_error("dep-cmp", error_msg, d)
check_valid_deps('RDEPENDS')
check_valid_deps('RRECOMMENDS')
@@ -851,13 +818,14 @@ def package_qa_check_deps(pkg, pkgdest, d):
QAPKGTEST[usrmerge] = "package_qa_check_usrmerge"
def package_qa_check_usrmerge(pkg, d, messages):
+
pkgdest = d.getVar('PKGDEST')
pkg_dir = pkgdest + os.sep + pkg + os.sep
merged_dirs = ['bin', 'sbin', 'lib'] + d.getVar('MULTILIB_VARIANTS').split()
for f in merged_dirs:
if os.path.exists(pkg_dir + f) and not os.path.islink(pkg_dir + f):
msg = "%s package is not obeying usrmerge distro feature. /%s should be relocated to /usr." % (pkg, f)
- package_qa_add_message(messages, "usrmerge", msg)
+ oe.qa.add_message(messages, "usrmerge", msg)
return False
return True
@@ -876,7 +844,7 @@ def package_qa_check_perllocalpod(pkg, d, messages):
if matches:
matches = [package_qa_clean_path(path, d, pkg) for path in matches]
msg = "%s contains perllocal.pod (%s), should not be installed" % (pkg, " ".join(matches))
- package_qa_add_message(messages, "perllocalpod", msg)
+ oe.qa.add_message(messages, "perllocalpod", msg)
QAPKGTEST[expanded-d] = "package_qa_check_expanded_d"
def package_qa_check_expanded_d(package, d, messages):
@@ -888,13 +856,13 @@ def package_qa_check_expanded_d(package, d, messages):
expanded_d = d.getVar('D')
for var in 'FILES','pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm':
- bbvar = d.getVar(var + "_" + package) or ""
+ bbvar = d.getVar(var + ":" + package) or ""
if expanded_d in bbvar:
if var == 'FILES':
- package_qa_add_message(messages, "expanded-d", "FILES in %s recipe should not contain the ${D} variable as it references the local build directory not the target filesystem, best solution is to remove the ${D} reference" % package)
+ oe.qa.add_message(messages, "expanded-d", "FILES in %s recipe should not contain the ${D} variable as it references the local build directory not the target filesystem, best solution is to remove the ${D} reference" % package)
sane = False
else:
- package_qa_add_message(messages, "expanded-d", "%s in %s recipe contains ${D}, it should be replaced by $D instead" % (var, package))
+ oe.qa.add_message(messages, "expanded-d", "%s in %s recipe contains ${D}, it should be replaced by $D instead" % (var, package))
sane = False
return sane
@@ -912,7 +880,7 @@ def package_qa_check_unlisted_pkg_lics(package, d, messages):
if not unlisted:
return True
- package_qa_add_message(messages, "unlisted-pkg-lics",
+ oe.qa.add_message(messages, "unlisted-pkg-lics",
"LICENSE:%s includes licenses (%s) that are not "
"listed in LICENSE" % (package, ' '.join(unlisted)))
return False
@@ -927,7 +895,7 @@ def package_qa_check_encoding(keys, encode, d):
except UnicodeDecodeError as e:
error_msg = "%s has non %s characters" % (key,enc)
sane = False
- package_qa_handle_error("invalid-chars", error_msg, d)
+ oe.qa.handle_error("invalid-chars", error_msg, d)
return sane
for key in keys:
@@ -960,12 +928,12 @@ def package_qa_check_host_user(path, name, d, elf, messages):
else:
check_uid = int(d.getVar('HOST_USER_UID'))
if stat.st_uid == check_uid:
- package_qa_add_message(messages, "host-user-contaminated", "%s: %s is owned by uid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, package_qa_clean_path(path, d, name), check_uid))
+ oe.qa.add_message(messages, "host-user-contaminated", "%s: %s is owned by uid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, package_qa_clean_path(path, d, name), check_uid))
return False
check_gid = int(d.getVar('HOST_USER_GID'))
if stat.st_gid == check_gid:
- package_qa_add_message(messages, "host-user-contaminated", "%s: %s is owned by gid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, package_qa_clean_path(path, d, name), check_gid))
+ oe.qa.add_message(messages, "host-user-contaminated", "%s: %s is owned by gid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, package_qa_clean_path(path, d, name), check_gid))
return False
return True
@@ -974,11 +942,11 @@ def package_qa_check_src_uri(pn, d, messages):
import re
if "${PN}" in d.getVar("SRC_URI", False):
- package_qa_handle_error("src-uri-bad", "%s: SRC_URI uses PN not BPN" % pn, d)
+ oe.qa.handle_error("src-uri-bad", "%s: SRC_URI uses PN not BPN" % pn, d)
for url in d.getVar("SRC_URI").split():
if re.search(r"git(hu|la)b\.com/.+/.+/archive/.+", url):
- package_qa_handle_error("src-uri-bad", "%s: SRC_URI uses unstable GitHub/GitLab archives, convert recipe to use git protocol" % pn, d)
+ oe.qa.handle_error("src-uri-bad", "%s: SRC_URI uses unstable GitHub/GitLab archives, convert recipe to use git protocol" % pn, d)
QARECIPETEST[unhandled-features-check] = "package_qa_check_unhandled_features_check"
def package_qa_check_unhandled_features_check(pn, d, messages):
@@ -989,7 +957,7 @@ def package_qa_check_unhandled_features_check(pn, d, messages):
if d.getVar(var) is not None or d.overridedata.get(var) is not None:
var_set = True
if var_set:
- package_qa_handle_error("unhandled-features-check", "%s: recipe doesn't inherit features_check" % pn, d)
+ oe.qa.handle_error("unhandled-features-check", "%s: recipe doesn't inherit features_check" % pn, d)
QARECIPETEST[missing-update-alternatives] = "package_qa_check_missing_update_alternatives"
def package_qa_check_missing_update_alternatives(pn, d, messages):
@@ -997,7 +965,7 @@ def package_qa_check_missing_update_alternatives(pn, d, messages):
# without inheriting update-alternatives class
for pkg in (d.getVar('PACKAGES') or '').split():
if d.getVar('ALTERNATIVE:%s' % pkg) and not bb.data.inherits_class('update-alternatives', d):
- package_qa_handle_error("missing-update-alternatives", "%s: recipe defines ALTERNATIVE:%s but doesn't inherit update-alternatives. This might fail during do_rootfs later!" % (pn, pkg), d)
+ oe.qa.handle_error("missing-update-alternatives", "%s: recipe defines ALTERNATIVE:%s but doesn't inherit update-alternatives. This might fail during do_rootfs later!" % (pn, pkg), d)
# The PACKAGE FUNC to scan each package
python do_package_qa () {
@@ -1073,7 +1041,7 @@ python do_package_qa () {
bb.note("Checking Package: %s" % package)
# Check package name
if not pkgname_pattern.match(package):
- package_qa_handle_error("pkgname",
+ oe.qa.handle_error("pkgname",
"%s doesn't match the [a-z0-9.+-]+ regex" % package, d)
warn_checks, error_checks = parse_test_matrix("QAPATHTEST")
@@ -1091,10 +1059,7 @@ python do_package_qa () {
if 'libdir' in d.getVar("ALL_QA").split():
package_qa_check_libdir(d)
- qa_sane = d.getVar("QA_SANE")
- if not qa_sane:
- bb.fatal("QA run found fatal errors. Please consider fixing them.")
- bb.note("DONE with PACKAGE QA")
+ oe.qa.exit_if_errors(d)
}
# binutils is used for most checks, so need to set as dependency
@@ -1121,8 +1086,8 @@ addtask do_package_qa_setscene
python do_qa_staging() {
bb.note("QA checking staging")
- if not qa_check_staged(d.expand('${SYSROOT_DESTDIR}${libdir}'), d):
- bb.fatal("QA staging was broken by the package built above")
+ qa_check_staged(d.expand('${SYSROOT_DESTDIR}${libdir}'), d)
+ oe.qa.exit_with_message_if_errors("QA staging was broken by the package built above", d)
}
python do_qa_patch() {
@@ -1170,7 +1135,7 @@ python do_qa_patch() {
elif 'patch-fuzz' in d.getVar('WARN_QA'):
bb.warn(msg)
msg = "Patch log indicates that patches do not apply cleanly."
- package_qa_handle_error("patch-fuzz", msg, d)
+ oe.qa.handle_error("patch-fuzz", msg, d)
}
python do_qa_configure() {
@@ -1198,7 +1163,7 @@ python do_qa_configure() {
if subprocess.call(statement, shell=True) == 0:
error_msg = """This autoconf log indicates errors, it looked at host include and/or library paths while determining system capabilities.
Rerun configure task after fixing this."""
- package_qa_handle_error("configure-unsafe", error_msg, d)
+ oe.qa.handle_error("configure-unsafe", error_msg, d)
if "configure.ac" in files:
configs.append(os.path.join(root,"configure.ac"))
@@ -1228,7 +1193,7 @@ Rerun configure task after fixing this."""
gnu = "grep \"^[[:space:]]*AM_GNU_GETTEXT\" %s >/dev/null" % config
if subprocess.call(gnu, shell=True) == 0:
error_msg = "AM_GNU_GETTEXT used but no inherit gettext"
- package_qa_handle_error("configure-gettext", error_msg, d)
+ oe.qa.handle_error("configure-gettext", error_msg, d)
###########################################################################
# Check unrecognised configure options (with a white list)
@@ -1251,7 +1216,7 @@ Rerun configure task after fixing this."""
if options:
pn = d.getVar('PN')
error_msg = pn + ": configure was passed unrecognised options: " + " ".join(options)
- package_qa_handle_error("unknown-configure-option", error_msg, d)
+ oe.qa.handle_error("unknown-configure-option", error_msg, d)
except subprocess.CalledProcessError:
pass
@@ -1263,11 +1228,9 @@ Rerun configure task after fixing this."""
if pconfig not in pkgconfigflags:
pn = d.getVar('PN')
error_msg = "%s: invalid PACKAGECONFIG: %s" % (pn, pconfig)
- package_qa_handle_error("invalid-packageconfig", error_msg, d)
+ oe.qa.handle_error("invalid-packageconfig", error_msg, d)
- qa_sane = d.getVar("QA_SANE")
- if not qa_sane:
- bb.fatal("Fatal QA errors found, failing task.")
+ oe.qa.exit_if_errors(d)
}
python do_qa_unpack() {
@@ -1320,15 +1283,15 @@ python () {
pn = d.getVar('PN')
if pn in overrides:
msg = 'Recipe %s has PN of "%s" which is in OVERRIDES, this can result in unexpected behaviour.' % (d.getVar("FILE"), pn)
- package_qa_handle_error("pn-overrides", msg, d)
+ oe.qa.handle_error("pn-overrides", msg, d)
prog = re.compile(r'[A-Z]')
if prog.search(pn):
- package_qa_handle_error("uppercase-pn", 'PN: %s is upper case, this can result in unexpected behavior.' % pn, d)
+ oe.qa.handle_error("uppercase-pn", 'PN: %s is upper case, this can result in unexpected behavior.' % pn, d)
- # Some people mistakenly use DEPENDS_${PN} instead of DEPENDS and wonder
+ # Some people mistakenly use DEPENDS:${PN} instead of DEPENDS and wonder
# why it doesn't work.
- if (d.getVar(d.expand('DEPENDS_${PN}'))):
- package_qa_handle_error("pkgvarcheck", "recipe uses DEPENDS_${PN}, should use DEPENDS", d)
+ if (d.getVar(d.expand('DEPENDS:${PN}'))):
+ oe.qa.handle_error("pkgvarcheck", "recipe uses DEPENDS:${PN}, should use DEPENDS", d)
issues = []
if (d.getVar('PACKAGES') or "").split():
@@ -1345,7 +1308,7 @@ python () {
else:
d.setVarFlag('do_package_qa', 'rdeptask', '')
for i in issues:
- package_qa_handle_error("pkgvarcheck", "%s: Variable %s is set as not being package specific, please fix this." % (d.getVar("FILE"), i), d)
+ oe.qa.handle_error("pkgvarcheck", "%s: Variable %s is set as not being package specific, please fix this." % (d.getVar("FILE"), i), d)
if 'native-last' not in (d.getVar('INSANE_SKIP') or "").split():
for native_class in ['native', 'nativesdk']:
@@ -1373,11 +1336,8 @@ python () {
else:
break
if broken_order:
- package_qa_handle_error("native-last", "%s: native/nativesdk class is not inherited last, this can result in unexpected behaviour. "
+ oe.qa.handle_error("native-last", "%s: native/nativesdk class is not inherited last, this can result in unexpected behaviour. "
"Classes inherited after native/nativesdk: %s" % (pn, " ".join(broken_order)), d)
-
- qa_sane = d.getVar("QA_SANE")
- if not qa_sane:
- bb.fatal("Fatal QA errors found, failing task.")
+ oe.qa.exit_if_errors(d)
}
diff --git a/meta/classes/kernel-fitimage.bbclass b/meta/classes/kernel-fitimage.bbclass
index a9d1002200..8718ce7e16 100644
--- a/meta/classes/kernel-fitimage.bbclass
+++ b/meta/classes/kernel-fitimage.bbclass
@@ -60,12 +60,20 @@ FIT_DESC ?= "Kernel fitImage for ${DISTRO_NAME}/${PV}/${MACHINE}"
# Sign individual images as well
FIT_SIGN_INDIVIDUAL ?= "0"
+# Keys used to sign individually image nodes.
+# The keys to sign image nodes must be different from those used to sign
+# configuration nodes, otherwise the "required" property, from
+# UBOOT_DTB_BINARY, will be set to "conf", because "conf" prevails on "image".
+# Then the images signature checking will not be mandatory and no error will be
+# raised in case of failure.
+# UBOOT_SIGN_IMG_KEYNAME = "dev2" # keys name in keydir (eg. "dev2.crt", "dev2.key")
+
#
# Emit the fitImage ITS header
#
# $1 ... .its filename
fitimage_emit_fit_header() {
- cat << EOF >> ${1}
+ cat << EOF >> $1
/dts-v1/;
/ {
@@ -86,24 +94,24 @@ EOF
fitimage_emit_section_maint() {
case $2 in
imagestart)
- cat << EOF >> ${1}
+ cat << EOF >> $1
images {
EOF
;;
confstart)
- cat << EOF >> ${1}
+ cat << EOF >> $1
configurations {
EOF
;;
sectend)
- cat << EOF >> ${1}
+ cat << EOF >> $1
};
EOF
;;
fitend)
- cat << EOF >> ${1}
+ cat << EOF >> $1
};
EOF
;;
@@ -121,7 +129,7 @@ fitimage_emit_section_kernel() {
kernel_csum="${FIT_HASH_ALG}"
kernel_sign_algo="${FIT_SIGN_ALG}"
- kernel_sign_keyname="${UBOOT_SIGN_KEYNAME}"
+ kernel_sign_keyname="${UBOOT_SIGN_IMG_KEYNAME}"
ENTRYPOINT="${UBOOT_ENTRYPOINT}"
if [ -n "${UBOOT_ENTRYSYMBOL}" ]; then
@@ -129,28 +137,28 @@ fitimage_emit_section_kernel() {
awk '$3=="${UBOOT_ENTRYSYMBOL}" {print "0x"$1;exit}'`
fi
- cat << EOF >> ${1}
- kernel-${2} {
+ cat << EOF >> $1
+ kernel-$2 {
description = "Linux kernel";
- data = /incbin/("${3}");
+ data = /incbin/("$3");
type = "kernel";
arch = "${UBOOT_ARCH}";
os = "linux";
- compression = "${4}";
+ compression = "$4";
load = <${UBOOT_LOADADDRESS}>;
- entry = <${ENTRYPOINT}>;
+ entry = <$ENTRYPOINT>;
hash-1 {
- algo = "${kernel_csum}";
+ algo = "$kernel_csum";
};
};
EOF
- if [ "${UBOOT_SIGN_ENABLE}" = "1" -a "${FIT_SIGN_INDIVIDUAL}" = "1" -a -n "${kernel_sign_keyname}" ] ; then
- sed -i '$ d' ${1}
- cat << EOF >> ${1}
+ if [ "${UBOOT_SIGN_ENABLE}" = "1" -a "${FIT_SIGN_INDIVIDUAL}" = "1" -a -n "$kernel_sign_keyname" ] ; then
+ sed -i '$ d' $1
+ cat << EOF >> $1
signature-1 {
- algo = "${kernel_csum},${kernel_sign_algo}";
- key-name-hint = "${kernel_sign_keyname}";
+ algo = "$kernel_csum,$kernel_sign_algo";
+ key-name-hint = "$kernel_sign_keyname";
};
};
EOF
@@ -167,7 +175,7 @@ fitimage_emit_section_dtb() {
dtb_csum="${FIT_HASH_ALG}"
dtb_sign_algo="${FIT_SIGN_ALG}"
- dtb_sign_keyname="${UBOOT_SIGN_KEYNAME}"
+ dtb_sign_keyname="${UBOOT_SIGN_IMG_KEYNAME}"
dtb_loadline=""
dtb_ext=${DTB##*.}
@@ -178,26 +186,26 @@ fitimage_emit_section_dtb() {
elif [ -n "${UBOOT_DTB_LOADADDRESS}" ]; then
dtb_loadline="load = <${UBOOT_DTB_LOADADDRESS}>;"
fi
- cat << EOF >> ${1}
- fdt-${2} {
+ cat << EOF >> $1
+ fdt-$2 {
description = "Flattened Device Tree blob";
- data = /incbin/("${3}");
+ data = /incbin/("$3");
type = "flat_dt";
arch = "${UBOOT_ARCH}";
compression = "none";
- ${dtb_loadline}
+ $dtb_loadline
hash-1 {
- algo = "${dtb_csum}";
+ algo = "$dtb_csum";
};
};
EOF
- if [ "${UBOOT_SIGN_ENABLE}" = "1" -a "${FIT_SIGN_INDIVIDUAL}" = "1" -a -n "${dtb_sign_keyname}" ] ; then
- sed -i '$ d' ${1}
- cat << EOF >> ${1}
+ if [ "${UBOOT_SIGN_ENABLE}" = "1" -a "${FIT_SIGN_INDIVIDUAL}" = "1" -a -n "$dtb_sign_keyname" ] ; then
+ sed -i '$ d' $1
+ cat << EOF >> $1
signature-1 {
- algo = "${dtb_csum},${dtb_sign_algo}";
- key-name-hint = "${dtb_sign_keyname}";
+ algo = "$dtb_csum,$dtb_sign_algo";
+ key-name-hint = "$dtb_sign_keyname";
};
};
EOF
@@ -212,29 +220,29 @@ EOF
# $3 ... Path to boot script image
fitimage_emit_section_boot_script() {
- bootscr_csum="${FIT_HASH_ALG}"
+ bootscr_csum="${FIT_HASH_ALG}"
bootscr_sign_algo="${FIT_SIGN_ALG}"
- bootscr_sign_keyname="${UBOOT_SIGN_KEYNAME}"
+ bootscr_sign_keyname="${UBOOT_SIGN_IMG_KEYNAME}"
- cat << EOF >> ${1}
- bootscr-${2} {
+ cat << EOF >> $1
+ bootscr-$2 {
description = "U-boot script";
- data = /incbin/("${3}");
+ data = /incbin/("$3");
type = "script";
arch = "${UBOOT_ARCH}";
compression = "none";
hash-1 {
- algo = "${bootscr_csum}";
+ algo = "$bootscr_csum";
};
};
EOF
- if [ "${UBOOT_SIGN_ENABLE}" = "1" -a "${FIT_SIGN_INDIVIDUAL}" = "1" -a -n "${bootscr_sign_keyname}" ] ; then
- sed -i '$ d' ${1}
- cat << EOF >> ${1}
+ if [ "${UBOOT_SIGN_ENABLE}" = "1" -a "${FIT_SIGN_INDIVIDUAL}" = "1" -a -n "$bootscr_sign_keyname" ] ; then
+ sed -i '$ d' $1
+ cat << EOF >> $1
signature-1 {
- algo = "${bootscr_csum},${bootscr_sign_algo}";
- key-name-hint = "${bootscr_sign_keyname}";
+ algo = "$bootscr_csum,$bootscr_sign_algo";
+ key-name-hint = "$bootscr_sign_keyname";
};
};
EOF
@@ -251,10 +259,10 @@ fitimage_emit_section_setup() {
setup_csum="${FIT_HASH_ALG}"
- cat << EOF >> ${1}
- setup-${2} {
+ cat << EOF >> $1
+ setup-$2 {
description = "Linux setup.bin";
- data = /incbin/("${3}");
+ data = /incbin/("$3");
type = "x86_setup";
arch = "${UBOOT_ARCH}";
os = "linux";
@@ -262,7 +270,7 @@ fitimage_emit_section_setup() {
load = <0x00090000>;
entry = <0x00090000>;
hash-1 {
- algo = "${setup_csum}";
+ algo = "$setup_csum";
};
};
EOF
@@ -278,7 +286,7 @@ fitimage_emit_section_ramdisk() {
ramdisk_csum="${FIT_HASH_ALG}"
ramdisk_sign_algo="${FIT_SIGN_ALG}"
- ramdisk_sign_keyname="${UBOOT_SIGN_KEYNAME}"
+ ramdisk_sign_keyname="${UBOOT_SIGN_IMG_KEYNAME}"
ramdisk_loadline=""
ramdisk_entryline=""
@@ -289,28 +297,28 @@ fitimage_emit_section_ramdisk() {
ramdisk_entryline="entry = <${UBOOT_RD_ENTRYPOINT}>;"
fi
- cat << EOF >> ${1}
- ramdisk-${2} {
+ cat << EOF >> $1
+ ramdisk-$2 {
description = "${INITRAMFS_IMAGE}";
- data = /incbin/("${3}");
+ data = /incbin/("$3");
type = "ramdisk";
arch = "${UBOOT_ARCH}";
os = "linux";
compression = "none";
- ${ramdisk_loadline}
- ${ramdisk_entryline}
+ $ramdisk_loadline
+ $ramdisk_entryline
hash-1 {
- algo = "${ramdisk_csum}";
+ algo = "$ramdisk_csum";
};
};
EOF
- if [ "${UBOOT_SIGN_ENABLE}" = "1" -a "${FIT_SIGN_INDIVIDUAL}" = "1" -a -n "${ramdisk_sign_keyname}" ] ; then
- sed -i '$ d' ${1}
- cat << EOF >> ${1}
+ if [ "${UBOOT_SIGN_ENABLE}" = "1" -a "${FIT_SIGN_INDIVIDUAL}" = "1" -a -n "$ramdisk_sign_keyname" ] ; then
+ sed -i '$ d' $1
+ cat << EOF >> $1
signature-1 {
- algo = "${ramdisk_csum},${ramdisk_sign_algo}";
- key-name-hint = "${ramdisk_sign_keyname}";
+ algo = "$ramdisk_csum,$ramdisk_sign_algo";
+ key-name-hint = "$ramdisk_sign_keyname";
};
};
EOF
@@ -335,13 +343,13 @@ fitimage_emit_section_config() {
conf_sign_keyname="${UBOOT_SIGN_KEYNAME}"
fi
- its_file="${1}"
- kernel_id="${2}"
- dtb_image="${3}"
- ramdisk_id="${4}"
- bootscr_id="${5}"
- config_id="${6}"
- default_flag="${7}"
+ its_file="$1"
+ kernel_id="$2"
+ dtb_image="$3"
+ ramdisk_id="$4"
+ bootscr_id="$5"
+ config_id="$6"
+ default_flag="$7"
# Test if we have any DTBs at all
sep=""
@@ -356,106 +364,106 @@ fitimage_emit_section_config() {
# conf node name is selected based on dtb ID if it is present,
# otherwise its selected based on kernel ID
- if [ -n "${dtb_image}" ]; then
- conf_node=$conf_node${dtb_image}
+ if [ -n "$dtb_image" ]; then
+ conf_node=$conf_node$dtb_image
else
- conf_node=$conf_node${kernel_id}
+ conf_node=$conf_node$kernel_id
fi
- if [ -n "${kernel_id}" ]; then
+ if [ -n "$kernel_id" ]; then
conf_desc="Linux kernel"
sep=", "
- kernel_line="kernel = \"kernel-${kernel_id}\";"
+ kernel_line="kernel = \"kernel-$kernel_id\";"
fi
- if [ -n "${dtb_image}" ]; then
- conf_desc="${conf_desc}${sep}FDT blob"
+ if [ -n "$dtb_image" ]; then
+ conf_desc="$conf_desc${sep}FDT blob"
sep=", "
- fdt_line="fdt = \"fdt-${dtb_image}\";"
+ fdt_line="fdt = \"fdt-$dtb_image\";"
fi
- if [ -n "${ramdisk_id}" ]; then
- conf_desc="${conf_desc}${sep}ramdisk"
+ if [ -n "$ramdisk_id" ]; then
+ conf_desc="$conf_desc${sep}ramdisk"
sep=", "
- ramdisk_line="ramdisk = \"ramdisk-${ramdisk_id}\";"
+ ramdisk_line="ramdisk = \"ramdisk-$ramdisk_id\";"
fi
- if [ -n "${bootscr_id}" ]; then
- conf_desc="${conf_desc}${sep}u-boot script"
+ if [ -n "$bootscr_id" ]; then
+ conf_desc="$conf_desc${sep}u-boot script"
sep=", "
- bootscr_line="bootscr = \"bootscr-${bootscr_id}\";"
+ bootscr_line="bootscr = \"bootscr-$bootscr_id\";"
fi
- if [ -n "${config_id}" ]; then
- conf_desc="${conf_desc}${sep}setup"
- setup_line="setup = \"setup-${config_id}\";"
+ if [ -n "$config_id" ]; then
+ conf_desc="$conf_desc${sep}setup"
+ setup_line="setup = \"setup-$config_id\";"
fi
- if [ "${default_flag}" = "1" ]; then
+ if [ "$default_flag" = "1" ]; then
# default node is selected based on dtb ID if it is present,
# otherwise its selected based on kernel ID
- if [ -n "${dtb_image}" ]; then
- default_line="default = \"conf-${dtb_image}\";"
+ if [ -n "$dtb_image" ]; then
+ default_line="default = \"conf-$dtb_image\";"
else
- default_line="default = \"conf-${kernel_id}\";"
+ default_line="default = \"conf-$kernel_id\";"
fi
fi
- cat << EOF >> ${its_file}
- ${default_line}
+ cat << EOF >> $its_file
+ $default_line
$conf_node {
- description = "${default_flag} ${conf_desc}";
- ${kernel_line}
- ${fdt_line}
- ${ramdisk_line}
- ${bootscr_line}
- ${setup_line}
+ description = "$default_flag $conf_desc";
+ $kernel_line
+ $fdt_line
+ $ramdisk_line
+ $bootscr_line
+ $setup_line
hash-1 {
- algo = "${conf_csum}";
+ algo = "$conf_csum";
};
EOF
- if [ ! -z "${conf_sign_keyname}" ] ; then
+ if [ -n "$conf_sign_keyname" ] ; then
sign_line="sign-images = "
sep=""
- if [ -n "${kernel_id}" ]; then
- sign_line="${sign_line}${sep}\"kernel\""
+ if [ -n "$kernel_id" ]; then
+ sign_line="$sign_line${sep}\"kernel\""
sep=", "
fi
- if [ -n "${dtb_image}" ]; then
- sign_line="${sign_line}${sep}\"fdt\""
+ if [ -n "$dtb_image" ]; then
+ sign_line="$sign_line${sep}\"fdt\""
sep=", "
fi
- if [ -n "${ramdisk_id}" ]; then
- sign_line="${sign_line}${sep}\"ramdisk\""
+ if [ -n "$ramdisk_id" ]; then
+ sign_line="$sign_line${sep}\"ramdisk\""
sep=", "
fi
- if [ -n "${bootscr_id}" ]; then
- sign_line="${sign_line}${sep}\"bootscr\""
+ if [ -n "$bootscr_id" ]; then
+ sign_line="$sign_line${sep}\"bootscr\""
sep=", "
fi
- if [ -n "${config_id}" ]; then
- sign_line="${sign_line}${sep}\"setup\""
+ if [ -n "$config_id" ]; then
+ sign_line="$sign_line${sep}\"setup\""
fi
- sign_line="${sign_line};"
+ sign_line="$sign_line;"
- cat << EOF >> ${its_file}
+ cat << EOF >> $its_file
signature-1 {
- algo = "${conf_csum},${conf_sign_algo}";
- key-name-hint = "${conf_sign_keyname}";
- ${sign_line}
+ algo = "$conf_csum,$conf_sign_algo";
+ key-name-hint = "$conf_sign_keyname";
+ $sign_line
};
EOF
fi
- cat << EOF >> ${its_file}
+ cat << EOF >> $its_file
};
EOF
}
@@ -470,35 +478,24 @@ fitimage_assemble() {
kernelcount=1
dtbcount=""
DTBS=""
- ramdiskcount=${3}
+ ramdiskcount=$3
setupcount=""
bootscr_id=""
- rm -f ${1} arch/${ARCH}/boot/${2}
+ rm -f $1 arch/${ARCH}/boot/$2
+
+ if [ -n "${UBOOT_SIGN_IMG_KEYNAME}" -a "${UBOOT_SIGN_KEYNAME}" = "${UBOOT_SIGN_IMG_KEYNAME}" ]; then
+ bbfatal "Keys used to sign images and configuration nodes must be different."
+ fi
- fitimage_emit_fit_header ${1}
+ fitimage_emit_fit_header $1
#
# Step 1: Prepare a kernel image section.
#
- fitimage_emit_section_maint ${1} imagestart
+ fitimage_emit_section_maint $1 imagestart
uboot_prep_kimage
-
- if [ "${INITRAMFS_IMAGE_BUNDLE}" = "1" ]; then
- initramfs_bundle_path="arch/"${UBOOT_ARCH}"/boot/"${KERNEL_IMAGETYPE_REPLACEMENT}".initramfs"
- if [ -e "${initramfs_bundle_path}" ]; then
-
- #
- # Include the kernel/rootfs bundle.
- #
-
- fitimage_emit_section_kernel ${1} "${kernelcount}" "${initramfs_bundle_path}" "${linux_comp}"
- else
- bbwarn "${initramfs_bundle_path} not found."
- fi
- else
- fitimage_emit_section_kernel ${1} "${kernelcount}" linux.bin "${linux_comp}"
- fi
+ fitimage_emit_section_kernel $1 $kernelcount linux.bin "$linux_comp"
#
# Step 2: Prepare a DTB image section
@@ -507,9 +504,9 @@ fitimage_assemble() {
if [ -n "${KERNEL_DEVICETREE}" ]; then
dtbcount=1
for DTB in ${KERNEL_DEVICETREE}; do
- if echo ${DTB} | grep -q '/dts/'; then
- bbwarn "${DTB} contains the full path to the the dts file, but only the dtb name should be used."
- DTB=`basename ${DTB} | sed 's,\.dts$,.dtb,g'`
+ if echo $DTB | grep -q '/dts/'; then
+ bbwarn "$DTB contains the full path to the the dts file, but only the dtb name should be used."
+ DTB=`basename $DTB | sed 's,\.dts$,.dtb,g'`
fi
# Skip ${DTB} if it's also provided in ${EXTERNAL_KERNEL_DEVICETREE}
@@ -517,23 +514,23 @@ fitimage_assemble() {
continue
fi
- DTB_PATH="arch/${ARCH}/boot/dts/${DTB}"
- if [ ! -e "${DTB_PATH}" ]; then
- DTB_PATH="arch/${ARCH}/boot/${DTB}"
+ DTB_PATH="arch/${ARCH}/boot/dts/$DTB"
+ if [ ! -e "$DTB_PATH" ]; then
+ DTB_PATH="arch/${ARCH}/boot/$DTB"
fi
- DTB=$(echo "${DTB}" | tr '/' '_')
- DTBS="${DTBS} ${DTB}"
- fitimage_emit_section_dtb ${1} ${DTB} ${DTB_PATH}
+ DTB=$(echo "$DTB" | tr '/' '_')
+ DTBS="$DTBS $DTB"
+ fitimage_emit_section_dtb $1 $DTB $DTB_PATH
done
fi
if [ -n "${EXTERNAL_KERNEL_DEVICETREE}" ]; then
dtbcount=1
for DTB in $(find "${EXTERNAL_KERNEL_DEVICETREE}" \( -name '*.dtb' -o -name '*.dtbo' \) -printf '%P\n' | sort); do
- DTB=$(echo "${DTB}" | tr '/' '_')
- DTBS="${DTBS} ${DTB}"
- fitimage_emit_section_dtb ${1} ${DTB} "${EXTERNAL_KERNEL_DEVICETREE}/${DTB}"
+ DTB=$(echo "$DTB" | tr '/' '_')
+ DTBS="$DTBS $DTB"
+ fitimage_emit_section_dtb $1 $DTB "${EXTERNAL_KERNEL_DEVICETREE}/$DTB"
done
fi
@@ -545,7 +542,7 @@ fitimage_assemble() {
if [ -e "${STAGING_DIR_HOST}/boot/${UBOOT_ENV_BINARY}" ]; then
cp ${STAGING_DIR_HOST}/boot/${UBOOT_ENV_BINARY} ${B}
bootscr_id="${UBOOT_ENV_BINARY}"
- fitimage_emit_section_boot_script ${1} "${bootscr_id}" ${UBOOT_ENV_BINARY}
+ fitimage_emit_section_boot_script $1 "$bootscr_id" ${UBOOT_ENV_BINARY}
else
bbwarn "${STAGING_DIR_HOST}/boot/${UBOOT_ENV_BINARY} not found."
fi
@@ -556,7 +553,7 @@ fitimage_assemble() {
#
if [ -e arch/${ARCH}/boot/setup.bin ]; then
setupcount=1
- fitimage_emit_section_setup ${1} "${setupcount}" arch/${ARCH}/boot/setup.bin
+ fitimage_emit_section_setup $1 $setupcount arch/${ARCH}/boot/setup.bin
fi
#
@@ -564,28 +561,31 @@ fitimage_assemble() {
#
if [ "x${ramdiskcount}" = "x1" ] && [ "${INITRAMFS_IMAGE_BUNDLE}" != "1" ]; then
# Find and use the first initramfs image archive type we find
- for img in cpio.lz4 cpio.lzo cpio.lzma cpio.xz cpio.gz ext2.gz cpio; do
- initramfs_path="${DEPLOY_DIR_IMAGE}/${INITRAMFS_IMAGE_NAME}.${img}"
- echo "Using $initramfs_path"
- if [ -e "${initramfs_path}" ]; then
- fitimage_emit_section_ramdisk ${1} "${ramdiskcount}" "${initramfs_path}"
+ for img in cpio.lz4 cpio.lzo cpio.lzma cpio.xz cpio.zst cpio.gz ext2.gz cpio; do
+ initramfs_path="${DEPLOY_DIR_IMAGE}/${INITRAMFS_IMAGE_NAME}.$img"
+ echo -n "Searching for $initramfs_path..."
+ if [ -e "$initramfs_path" ]; then
+ echo "found"
+ fitimage_emit_section_ramdisk $1 "$ramdiskcount" "$initramfs_path"
break
+ else
+ echo "not found"
fi
done
fi
- fitimage_emit_section_maint ${1} sectend
+ fitimage_emit_section_maint $1 sectend
# Force the first Kernel and DTB in the default config
kernelcount=1
- if [ -n "${dtbcount}" ]; then
+ if [ -n "$dtbcount" ]; then
dtbcount=1
fi
#
# Step 6: Prepare a configurations section
#
- fitimage_emit_section_maint ${1} confstart
+ fitimage_emit_section_maint $1 confstart
# kernel-fitimage.bbclass currently only supports a single kernel (no less or
# more) to be added to the FIT image along with 0 or more device trees and
@@ -596,33 +596,33 @@ fitimage_assemble() {
# the default configuration to be used is based on the dtbcount. If there is
# no dtb present than select the default configuation to be based on
# the kernelcount.
- if [ -n "${DTBS}" ]; then
+ if [ -n "$DTBS" ]; then
i=1
for DTB in ${DTBS}; do
dtb_ext=${DTB##*.}
- if [ "${dtb_ext}" = "dtbo" ]; then
- fitimage_emit_section_config ${1} "" "${DTB}" "" "${bootscr_id}" "" "`expr ${i} = ${dtbcount}`"
+ if [ "$dtb_ext" = "dtbo" ]; then
+ fitimage_emit_section_config $1 "" "$DTB" "" "$bootscr_id" "" "`expr $i = $dtbcount`"
else
- fitimage_emit_section_config ${1} "${kernelcount}" "${DTB}" "${ramdiskcount}" "${bootscr_id}" "${setupcount}" "`expr ${i} = ${dtbcount}`"
+ fitimage_emit_section_config $1 $kernelcount "$DTB" "$ramdiskcount" "$bootscr_id" "$setupcount" "`expr $i = $dtbcount`"
fi
- i=`expr ${i} + 1`
+ i=`expr $i + 1`
done
else
defaultconfigcount=1
- fitimage_emit_section_config ${1} "${kernelcount}" "" "${ramdiskcount}" "${bootscr_id}" "${setupcount}" "${defaultconfigcount}"
+ fitimage_emit_section_config $1 $kernelcount "" "$ramdiskcount" "$bootscr_id" "$setupcount" $defaultconfigcount
fi
- fitimage_emit_section_maint ${1} sectend
+ fitimage_emit_section_maint $1 sectend
- fitimage_emit_section_maint ${1} fitend
+ fitimage_emit_section_maint $1 fitend
#
# Step 7: Assemble the image
#
${UBOOT_MKIMAGE} \
${@'-D "${UBOOT_MKIMAGE_DTCOPTS}"' if len('${UBOOT_MKIMAGE_DTCOPTS}') else ''} \
- -f ${1} \
- arch/${ARCH}/boot/${2}
+ -f $1 \
+ arch/${ARCH}/boot/$2
#
# Step 8: Sign the image and add public key to U-Boot dtb
@@ -639,7 +639,7 @@ fitimage_assemble() {
${@'-D "${UBOOT_MKIMAGE_DTCOPTS}"' if len('${UBOOT_MKIMAGE_DTCOPTS}') else ''} \
-F -k "${UBOOT_SIGN_KEYDIR}" \
$add_key_to_u_boot \
- -r arch/${ARCH}/boot/${2} \
+ -r arch/${ARCH}/boot/$2 \
${UBOOT_MKIMAGE_SIGN_ARGS}
fi
}
@@ -647,7 +647,7 @@ fitimage_assemble() {
do_assemble_fitimage() {
if echo ${KERNEL_IMAGETYPES} | grep -wq "fitImage"; then
cd ${B}
- fitimage_assemble fit-image.its fitImage
+ fitimage_assemble fit-image.its fitImage ""
fi
}
@@ -674,7 +674,7 @@ do_kernel_generate_rsa_keys() {
if [ "${UBOOT_SIGN_ENABLE}" = "1" ] && [ "${FIT_GENERATE_KEYS}" = "1" ]; then
- # Generate keys only if they don't already exist
+ # Generate keys to sign configuration nodes, only if they don't already exist
if [ ! -f "${UBOOT_SIGN_KEYDIR}/${UBOOT_SIGN_KEYNAME}".key ] || \
[ ! -f "${UBOOT_SIGN_KEYDIR}/${UBOOT_SIGN_KEYNAME}".crt ]; then
@@ -691,6 +691,24 @@ do_kernel_generate_rsa_keys() {
-key "${UBOOT_SIGN_KEYDIR}/${UBOOT_SIGN_KEYNAME}".key \
-out "${UBOOT_SIGN_KEYDIR}/${UBOOT_SIGN_KEYNAME}".crt
fi
+
+ # Generate keys to sign image nodes, only if they don't already exist
+ if [ ! -f "${UBOOT_SIGN_KEYDIR}/${UBOOT_SIGN_IMG_KEYNAME}".key ] || \
+ [ ! -f "${UBOOT_SIGN_KEYDIR}/${UBOOT_SIGN_IMG_KEYNAME}".crt ]; then
+
+ # make directory if it does not already exist
+ mkdir -p "${UBOOT_SIGN_KEYDIR}"
+
+ echo "Generating RSA private key for signing fitImage"
+ openssl genrsa ${FIT_KEY_GENRSA_ARGS} -out \
+ "${UBOOT_SIGN_KEYDIR}/${UBOOT_SIGN_IMG_KEYNAME}".key \
+ "${FIT_SIGN_NUMBITS}"
+
+ echo "Generating certificate for signing fitImage"
+ openssl req ${FIT_KEY_REQ_ARGS} "${FIT_KEY_SIGN_PKCS}" \
+ -key "${UBOOT_SIGN_KEYDIR}/${UBOOT_SIGN_IMG_KEYNAME}".key \
+ -out "${UBOOT_SIGN_KEYDIR}/${UBOOT_SIGN_IMG_KEYNAME}".crt
+ fi
fi
}
diff --git a/meta/classes/kernel-module-split.bbclass b/meta/classes/kernel-module-split.bbclass
index b56dd4a9c7..a29c294810 100644
--- a/meta/classes/kernel-module-split.bbclass
+++ b/meta/classes/kernel-module-split.bbclass
@@ -28,6 +28,7 @@ do_install:append() {
install -d ${D}${sysconfdir}/modules-load.d/ ${D}${sysconfdir}/modprobe.d/
}
+KERNEL_SPLIT_MODULES ?= "1"
PACKAGESPLITFUNCS:prepend = "split_kernel_module_packages "
KERNEL_MODULES_META_PACKAGE ?= "${@ d.getVar("KERNEL_PACKAGE_NAME") or "kernel" }-modules"
@@ -44,17 +45,20 @@ python split_kernel_module_packages () {
def extract_modinfo(file):
import tempfile, subprocess
tempfile.tempdir = d.getVar("WORKDIR")
- compressed = re.match( r'.*\.([xg])z$', file)
+ compressed = re.match( r'.*\.(gz|xz|zst)$', file)
tf = tempfile.mkstemp()
tmpfile = tf[1]
if compressed:
tmpkofile = tmpfile + ".ko"
- if compressed.group(1) == 'g':
+ if compressed.group(1) == 'gz':
cmd = "gunzip -dc %s > %s" % (file, tmpkofile)
subprocess.check_call(cmd, shell=True)
- elif compressed.group(1) == 'x':
+ elif compressed.group(1) == 'xz':
cmd = "xz -dc %s > %s" % (file, tmpkofile)
subprocess.check_call(cmd, shell=True)
+ elif compressed.group(1) == 'zst':
+ cmd = "zstd -dc %s > %s" % (file, tmpkofile)
+ subprocess.check_call(cmd, shell=True)
else:
msg = "Cannot decompress '%s'" % file
raise msg
@@ -153,18 +157,26 @@ python split_kernel_module_packages () {
kernel_package_name = d.getVar("KERNEL_PACKAGE_NAME") or "kernel"
kernel_version = d.getVar("KERNEL_VERSION")
- module_regex = r'^(.*)\.k?o(?:\.[xg]z)?$'
+ metapkg = d.getVar('KERNEL_MODULES_META_PACKAGE')
+ splitmods = d.getVar('KERNEL_SPLIT_MODULES')
+ postinst = d.getVar('pkg_postinst:modules')
+ postrm = d.getVar('pkg_postrm:modules')
+
+ if splitmods != '1':
+ etcdir = d.getVar('sysconfdir')
+ d.appendVar('FILES:' + metapkg, '%s/modules-load.d/ %s/modprobe.d/ %s/modules/' % (etcdir, etcdir, d.getVar("nonarch_base_libdir")))
+ d.appendVar('pkg_postinst:%s' % metapkg, postinst)
+ d.prependVar('pkg_postrm:%s' % metapkg, postrm);
+ return
+
+ module_regex = r'^(.*)\.k?o(?:\.(gz|xz|zst))?$'
module_pattern_prefix = d.getVar('KERNEL_MODULE_PACKAGE_PREFIX')
module_pattern_suffix = d.getVar('KERNEL_MODULE_PACKAGE_SUFFIX')
module_pattern = module_pattern_prefix + kernel_package_name + '-module-%s' + module_pattern_suffix
- postinst = d.getVar('pkg_postinst:modules')
- postrm = d.getVar('pkg_postrm:modules')
-
modules = do_split_packages(d, root='${nonarch_base_libdir}/modules', file_regex=module_regex, output_pattern=module_pattern, description='%s kernel module', postinst=postinst, postrm=postrm, recursive=True, hook=frob_metadata, extra_depends='%s-%s' % (kernel_package_name, kernel_version))
if modules:
- metapkg = d.getVar('KERNEL_MODULES_META_PACKAGE')
d.appendVar('RDEPENDS:' + metapkg, ' '+' '.join(modules))
# If modules-load.d and modprobe.d are empty at this point, remove them to
diff --git a/meta/classes/kernel-yocto.bbclass b/meta/classes/kernel-yocto.bbclass
index 549dfd97a4..1d5a8cdf29 100644
--- a/meta/classes/kernel-yocto.bbclass
+++ b/meta/classes/kernel-yocto.bbclass
@@ -36,7 +36,10 @@ def find_patches(d,subdir):
if subdir == patchdir:
patch_list.append(local)
else:
- patch_list.append(local)
+ # skip the patch if a patchdir was supplied, it won't be handled
+ # properly
+ if not patchdir:
+ patch_list.append(local)
return patch_list
diff --git a/meta/classes/kernel.bbclass b/meta/classes/kernel.bbclass
index 027e66eec7..7b3c6bee96 100644
--- a/meta/classes/kernel.bbclass
+++ b/meta/classes/kernel.bbclass
@@ -9,6 +9,7 @@ PROVIDES += "${@ "virtual/kernel" if (d.getVar("KERNEL_PACKAGE_NAME") == "kernel
DEPENDS += "virtual/${TARGET_PREFIX}binutils virtual/${TARGET_PREFIX}gcc kmod-native bc-native bison-native"
DEPENDS += "${@bb.utils.contains("INITRAMFS_FSTYPES", "cpio.lzo", "lzop-native", "", d)}"
DEPENDS += "${@bb.utils.contains("INITRAMFS_FSTYPES", "cpio.lz4", "lz4-native", "", d)}"
+DEPENDS += "${@bb.utils.contains("INITRAMFS_FSTYPES", "cpio.zst", "zstd-native", "", d)}"
PACKAGE_WRITE_DEPS += "depmodwrapper-cross"
do_deploy[depends] += "depmodwrapper-cross:do_populate_sysroot gzip-native:do_populate_sysroot"
@@ -97,7 +98,14 @@ python __anonymous () {
typelower = type.lower()
d.appendVar('PACKAGES', ' %s-image-%s' % (kname, typelower))
d.setVar('FILES:' + kname + '-image-' + typelower, '/' + imagedest + '/' + type + '-${KERNEL_VERSION_NAME}' + ' /' + imagedest + '/' + type)
- d.appendVar('RDEPENDS:%s-image' % kname, ' %s-image-%s' % (kname, typelower))
+ d.appendVar('RDEPENDS:%s-image' % kname, ' %s-image-%s (= ${EXTENDPKGV})' % (kname, typelower))
+ splitmods = d.getVar("KERNEL_SPLIT_MODULES")
+ if splitmods != '1':
+ d.appendVar('RDEPENDS:%s-image' % kname, ' %s-modules (= ${EXTENDPKGV})' % kname)
+ d.appendVar('RDEPENDS:%s-image-%s' % (kname, typelower), ' %s-modules-${KERNEL_VERSION_PKG_NAME} (= ${EXTENDPKGV})' % kname)
+ d.setVar('PKG:%s-modules' % kname, '%s-modules-${KERNEL_VERSION_PKG_NAME}' % kname)
+ d.appendVar('RPROVIDES:%s-modules' % kname, '%s-modules-${KERNEL_VERSION_PKG_NAME}' % kname)
+
d.setVar('PKG:%s-image-%s' % (kname,typelower), '%s-image-%s-${KERNEL_VERSION_PKG_NAME}' % (kname, typelower))
d.setVar('ALLOW_EMPTY:%s-image-%s' % (kname, typelower), '1')
d.setVar('pkg_postinst:%s-image-%s' % (kname,typelower), """set +e
@@ -218,8 +226,8 @@ UBOOT_LOADADDRESS ?= "${UBOOT_ENTRYPOINT}"
# Some Linux kernel configurations need additional parameters on the command line
KERNEL_EXTRA_ARGS ?= ""
-EXTRA_OEMAKE = " HOSTCC="${BUILD_CC} ${BUILD_CFLAGS} ${BUILD_LDFLAGS}" HOSTCPP="${BUILD_CPP}""
-EXTRA_OEMAKE += " HOSTCXX="${BUILD_CXX} ${BUILD_CXXFLAGS} ${BUILD_LDFLAGS}""
+EXTRA_OEMAKE = " HOSTCC="${BUILD_CC}" HOSTCFLAGS="${BUILD_CFLAGS}" HOSTLDFLAGS="${BUILD_LDFLAGS}" HOSTCPP="${BUILD_CPP}""
+EXTRA_OEMAKE += " HOSTCXX="${BUILD_CXX}" HOSTCXXFLAGS="${BUILD_CXXFLAGS}""
KERNEL_ALT_IMAGETYPE ??= ""
@@ -229,7 +237,7 @@ copy_initramfs() {
mkdir -p ${B}/usr
# Find and use the first initramfs image archive type we find
rm -f ${B}/usr/${INITRAMFS_IMAGE_NAME}.cpio
- for img in cpio cpio.gz cpio.lz4 cpio.lzo cpio.lzma cpio.xz; do
+ for img in cpio cpio.gz cpio.lz4 cpio.lzo cpio.lzma cpio.xz cpio.zst; do
if [ -e "${DEPLOY_DIR_IMAGE}/${INITRAMFS_IMAGE_NAME}.$img" ]; then
cp ${DEPLOY_DIR_IMAGE}/${INITRAMFS_IMAGE_NAME}.$img ${B}/usr/.
case $img in
@@ -258,12 +266,17 @@ copy_initramfs() {
xz -df ${B}/usr/${INITRAMFS_IMAGE_NAME}.$img
break
;;
+ *zst)
+ echo "zst decompressing image"
+ zstd -df ${B}/usr/${INITRAMFS_IMAGE_NAME}.$img
+ break
+ ;;
esac
break
fi
done
# Verify that the above loop found a initramfs, fail otherwise
- [ -f ${B}/usr/${INITRAMFS_IMAGE_NAME}.cpio ] && echo "Finished copy of initramfs into ./usr" || die "Could not find any ${DEPLOY_DIR_IMAGE}/${INITRAMFS_IMAGE_NAME}.cpio{.gz|.lz4|.lzo|.lzma|.xz) for bundling; INITRAMFS_IMAGE_NAME might be wrong."
+ [ -f ${B}/usr/${INITRAMFS_IMAGE_NAME}.cpio ] && echo "Finished copy of initramfs into ./usr" || die "Could not find any ${DEPLOY_DIR_IMAGE}/${INITRAMFS_IMAGE_NAME}.cpio{.gz|.lz4|.lzo|.lzma|.xz|.zst) for bundling; INITRAMFS_IMAGE_NAME might be wrong."
}
do_bundle_initramfs () {
@@ -309,18 +322,11 @@ python do_devshell:prepend () {
addtask bundle_initramfs after do_install before do_deploy
-get_cc_option () {
- # Check if KERNEL_CC supports the option "file-prefix-map".
- # This option allows us to build images with __FILE__ values that do not
- # contain the host build path.
- if ${KERNEL_CC} -Q --help=joined | grep -q "\-ffile-prefix-map=<old=new>"; then
- echo "-ffile-prefix-map=${S}=/kernel-source/"
- fi
-}
+KERNEL_DEBUG_TIMESTAMPS ??= "0"
kernel_do_compile() {
unset CFLAGS CPPFLAGS CXXFLAGS LDFLAGS MACHINE
- if [ "${BUILD_REPRODUCIBLE_BINARIES}" = "1" ]; then
+ if [ "${KERNEL_DEBUG_TIMESTAMPS}" != "1" ]; then
# kernel sources do not use do_unpack, so SOURCE_DATE_EPOCH may not
# be set....
if [ "${SOURCE_DATE_EPOCH}" = "" -o "${SOURCE_DATE_EPOCH}" = "0" ]; then
@@ -348,9 +354,8 @@ kernel_do_compile() {
copy_initramfs
use_alternate_initrd=CONFIG_INITRAMFS_SOURCE=${B}/usr/${INITRAMFS_IMAGE_NAME}.cpio
fi
- cc_extra=$(get_cc_option)
for typeformake in ${KERNEL_IMAGETYPE_FOR_MAKE} ; do
- oe_runmake ${typeformake} CC="${KERNEL_CC} $cc_extra " LD="${KERNEL_LD}" ${KERNEL_EXTRA_ARGS} $use_alternate_initrd
+ oe_runmake ${typeformake} CC="${KERNEL_CC}" LD="${KERNEL_LD}" ${KERNEL_EXTRA_ARGS} $use_alternate_initrd
done
# vmlinux.gz is not built by kernel
if (echo "${KERNEL_IMAGETYPES}" | grep -wq "vmlinux\.gz"); then
@@ -361,7 +366,7 @@ kernel_do_compile() {
do_compile_kernelmodules() {
unset CFLAGS CPPFLAGS CXXFLAGS LDFLAGS MACHINE
- if [ "${BUILD_REPRODUCIBLE_BINARIES}" = "1" ]; then
+ if [ "${KERNEL_DEBUG_TIMESTAMPS}" != "1" ]; then
# kernel sources do not use do_unpack, so SOURCE_DATE_EPOCH may not
# be set....
if [ "${SOURCE_DATE_EPOCH}" = "" -o "${SOURCE_DATE_EPOCH}" = "0" ]; then
@@ -377,8 +382,7 @@ do_compile_kernelmodules() {
bbnote "KBUILD_BUILD_TIMESTAMP: $ts"
fi
if (grep -q -i -e '^CONFIG_MODULES=y$' ${B}/.config); then
- cc_extra=$(get_cc_option)
- oe_runmake -C ${B} ${PARALLEL_MAKE} modules CC="${KERNEL_CC} $cc_extra " LD="${KERNEL_LD}" ${KERNEL_EXTRA_ARGS}
+ oe_runmake -C ${B} ${PARALLEL_MAKE} modules CC="${KERNEL_CC}" LD="${KERNEL_LD}" ${KERNEL_EXTRA_ARGS}
# Module.symvers gets updated during the
# building of the kernel modules. We need to
@@ -621,12 +625,12 @@ FILES:${KERNEL_PACKAGE_NAME}-image = ""
FILES:${KERNEL_PACKAGE_NAME}-dev = "/boot/System.map* /boot/Module.symvers* /boot/config* ${KERNEL_SRC_PATH} ${nonarch_base_libdir}/modules/${KERNEL_VERSION}/build"
FILES:${KERNEL_PACKAGE_NAME}-vmlinux = "/boot/vmlinux-${KERNEL_VERSION_NAME}"
FILES:${KERNEL_PACKAGE_NAME}-modules = ""
-RDEPENDS:${KERNEL_PACKAGE_NAME} = "${KERNEL_PACKAGE_NAME}-base"
+RDEPENDS:${KERNEL_PACKAGE_NAME} = "${KERNEL_PACKAGE_NAME}-base (= ${EXTENDPKGV})"
# Allow machines to override this dependency if kernel image files are
# not wanted in images as standard
-RDEPENDS:${KERNEL_PACKAGE_NAME}-base ?= "${KERNEL_PACKAGE_NAME}-image"
+RDEPENDS:${KERNEL_PACKAGE_NAME}-base ?= "${KERNEL_PACKAGE_NAME}-image (= ${EXTENDPKGV})"
PKG:${KERNEL_PACKAGE_NAME}-image = "${KERNEL_PACKAGE_NAME}-image-${@legitimize_package_name(d.getVar('KERNEL_VERSION'))}"
-RDEPENDS:${KERNEL_PACKAGE_NAME}-image += "${@oe.utils.conditional('KERNEL_IMAGETYPE', 'vmlinux', '${KERNEL_PACKAGE_NAME}-vmlinux', '', d)}"
+RDEPENDS:${KERNEL_PACKAGE_NAME}-image += "${@oe.utils.conditional('KERNEL_IMAGETYPE', 'vmlinux', '${KERNEL_PACKAGE_NAME}-vmlinux (= ${EXTENDPKGV})', '', d)}"
PKG:${KERNEL_PACKAGE_NAME}-base = "${KERNEL_PACKAGE_NAME}-${@legitimize_package_name(d.getVar('KERNEL_VERSION'))}"
RPROVIDES:${KERNEL_PACKAGE_NAME}-base += "${KERNEL_PACKAGE_NAME}-${KERNEL_VERSION}"
ALLOW_EMPTY:${KERNEL_PACKAGE_NAME} = "1"
diff --git a/meta/classes/libc-package.bbclass b/meta/classes/libc-package.bbclass
index 7a661d44bd..13ef8cdc0d 100644
--- a/meta/classes/libc-package.bbclass
+++ b/meta/classes/libc-package.bbclass
@@ -355,7 +355,7 @@ python package_do_split_gconvs () {
m.write("\t@echo 'Progress %d/%d'\n" % (i, total))
m.write("\t" + makerecipe + "\n\n")
d.setVar("EXTRA_OEMAKE", "-C %s ${PARALLEL_MAKE}" % (os.path.dirname(makefile)))
- d.setVarFlag("oe_runmake", "progress", "outof:Progress\s(\d+)/(\d+)")
+ d.setVarFlag("oe_runmake", "progress", r"outof:Progress\s(\d+)/(\d+)")
bb.note("Executing binary locale generation makefile")
bb.build.exec_func("oe_runmake", d)
bb.note("collecting binary locales from locale tree")
diff --git a/meta/classes/license.bbclass b/meta/classes/license.bbclass
index fecbc49497..d5480d87e2 100644
--- a/meta/classes/license.bbclass
+++ b/meta/classes/license.bbclass
@@ -29,9 +29,10 @@ python do_populate_lic() {
with open(os.path.join(destdir, "recipeinfo"), "w") as f:
for key in sorted(info.keys()):
f.write("%s: %s\n" % (key, info[key]))
+ oe.qa.exit_if_errors(d)
}
-PSEUDO_IGNORE_PATHS .= ",${@','.join(((d.getVar('COMMON_LICENSE_DIR') or '') + ' ' + (d.getVar('LICENSE_PATH') or '')).split())}"
+PSEUDO_IGNORE_PATHS .= ",${@','.join(((d.getVar('COMMON_LICENSE_DIR') or '') + ' ' + (d.getVar('LICENSE_PATH') or '') + ' ' + d.getVar('COREBASE') + '/meta/COPYING').split())}"
# it would be better to copy them in do_install:append, but find_license_filesa is python
python perform_packagecopy:prepend () {
enabled = oe.data.typed_value('LICENSE_CREATE_PACKAGE', d)
@@ -145,6 +146,10 @@ def find_license_files(d):
find_license(node.s.replace("+", "").replace("*", ""))
self.generic_visit(node)
+ def visit_Constant(self, node):
+ find_license(node.value.replace("+", "").replace("*", ""))
+ self.generic_visit(node)
+
def find_license(license_type):
try:
bb.utils.mkdirhier(gen_lic_dest)
@@ -178,7 +183,8 @@ def find_license_files(d):
# The user may attempt to use NO_GENERIC_LICENSE for a generic license which doesn't make sense
# and should not be allowed, warn the user in this case.
if d.getVarFlag('NO_GENERIC_LICENSE', license_type):
- bb.warn("%s: %s is a generic license, please don't use NO_GENERIC_LICENSE for it." % (pn, license_type))
+ oe.qa.handle_error("license-no-generic",
+ "%s: %s is a generic license, please don't use NO_GENERIC_LICENSE for it." % (pn, license_type), d)
elif non_generic_lic and non_generic_lic in lic_chksums:
# if NO_GENERIC_LICENSE is set, we copy the license files from the fetched source
@@ -190,7 +196,8 @@ def find_license_files(d):
# Add explicity avoid of CLOSED license because this isn't generic
if license_type != 'CLOSED':
# And here is where we warn people that their licenses are lousy
- bb.warn("%s: No generic license file exists for: %s in any provider" % (pn, license_type))
+ oe.qa.handle_error("license-exists",
+ "%s: No generic license file exists for: %s in any provider" % (pn, license_type), d)
pass
if not generic_directory:
@@ -215,7 +222,8 @@ def find_license_files(d):
except oe.license.InvalidLicense as exc:
bb.fatal('%s: %s' % (d.getVar('PF'), exc))
except SyntaxError:
- bb.warn("%s: Failed to parse it's LICENSE field." % (d.getVar('PF')))
+ oe.qa.handle_error("license-syntax",
+ "%s: Failed to parse it's LICENSE field." % (d.getVar('PF')), d)
# Add files from LIC_FILES_CHKSUM to list of license files
lic_chksum_paths = defaultdict(OrderedDict)
for path, data in sorted(lic_chksums.items()):
@@ -406,14 +414,16 @@ def check_license_format(d):
for pos, element in enumerate(elements):
if license_pattern.match(element):
if pos > 0 and license_pattern.match(elements[pos - 1]):
- bb.warn('%s: LICENSE value "%s" has an invalid format - license names ' \
+ oe.qa.handle_error('license-format',
+ '%s: LICENSE value "%s" has an invalid format - license names ' \
'must be separated by the following characters to indicate ' \
'the license selection: %s' %
- (pn, licenses, license_operator_chars))
+ (pn, licenses, license_operator_chars), d)
elif not license_operator.match(element):
- bb.warn('%s: LICENSE value "%s" has an invalid separator "%s" that is not ' \
+ oe.qa.handle_error('license-format',
+ '%s: LICENSE value "%s" has an invalid separator "%s" that is not ' \
'in the valid list of separators (%s)' %
- (pn, licenses, element, license_operator_chars))
+ (pn, licenses, element, license_operator_chars), d)
SSTATETASKS += "do_populate_lic"
do_populate_lic[sstate-inputdirs] = "${LICSSTATEDIR}"
diff --git a/meta/classes/license_image.bbclass b/meta/classes/license_image.bbclass
index 5cec7a07fa..bf70bee99b 100644
--- a/meta/classes/license_image.bbclass
+++ b/meta/classes/license_image.bbclass
@@ -75,7 +75,7 @@ def write_license_files(d, license_manifest, pkg_dic, rootfs=True):
pkg_dic[pkg]["LICENSES"] = re.sub(r' *', ' ', pkg_dic[pkg]["LICENSES"])
pkg_dic[pkg]["LICENSES"] = pkg_dic[pkg]["LICENSES"].split()
if pkg in whitelist:
- bb.warn("Including %s with an incompatible license %s into the image, because it has been whitelisted." %(pkg, pkg_dic[pkg]["LICENSE"]))
+ oe.qa.handle_error('license-incompatible', "Including %s with an incompatible license %s into the image, because it has been whitelisted." %(pkg, pkg_dic[pkg]["LICENSE"]), d)
if not "IMAGE_MANIFEST" in pkg_dic[pkg]:
# Rootfs manifest
@@ -87,7 +87,7 @@ def write_license_files(d, license_manifest, pkg_dic, rootfs=True):
# If the package doesn't contain any file, that is, its size is 0, the license
# isn't relevant as far as the final image is concerned. So doing license check
# doesn't make much sense, skip it.
- if pkg_dic[pkg]["PKGSIZE_%s" % pkg] == "0":
+ if pkg_dic[pkg]["PKGSIZE:%s" % pkg] == "0":
continue
else:
# Image manifest
@@ -105,10 +105,10 @@ def write_license_files(d, license_manifest, pkg_dic, rootfs=True):
continue
if not os.path.exists(lic_file):
- bb.warn("The license listed %s was not in the "\
- "licenses collected for recipe %s"
- % (lic, pkg_dic[pkg]["PN"]))
-
+ oe.qa.handle_error('license-file-missing',
+ "The license listed %s was not in the "\
+ "licenses collected for recipe %s"
+ % (lic, pkg_dic[pkg]["PN"]), d)
# Two options here:
# - Just copy the manifest
# - Copy the manifest and the license directories
@@ -274,6 +274,7 @@ do_rootfs[recrdeptask] += "do_populate_lic"
python do_populate_lic_deploy() {
license_deployed_manifest(d)
+ oe.qa.exit_if_errors(d)
}
addtask populate_lic_deploy before do_build after do_image_complete
diff --git a/meta/classes/manpages.bbclass b/meta/classes/manpages.bbclass
index 3a96659460..64b7d8c422 100644
--- a/meta/classes/manpages.bbclass
+++ b/meta/classes/manpages.bbclass
@@ -12,7 +12,7 @@ MAN_PKG ?= "${PN}-doc"
# only add man-db to RDEPENDS when manual files are built and installed
RDEPENDS:${MAN_PKG} += "${@bb.utils.contains('PACKAGECONFIG', 'manpages', 'man-db', '', d)}"
-pkg_postinst:append_${MAN_PKG} () {
+pkg_postinst:append:${MAN_PKG} () {
# only update manual page index caches when manual files are built and installed
if ${@bb.utils.contains('PACKAGECONFIG', 'manpages', 'true', 'false', d)}; then
if test -n "$D"; then
@@ -36,7 +36,7 @@ pkg_postinst:append_${MAN_PKG} () {
fi
}
-pkg_postrm:append_${MAN_PKG} () {
+pkg_postrm:append:${MAN_PKG} () {
# only update manual page index caches when manual files are built and installed
if ${@bb.utils.contains('PACKAGECONFIG', 'manpages', 'true', 'false', d)}; then
mandb -q
diff --git a/meta/classes/meson.bbclass b/meta/classes/meson.bbclass
index 2beed89d11..e124d18144 100644
--- a/meta/classes/meson.bbclass
+++ b/meta/classes/meson.bbclass
@@ -12,7 +12,8 @@ MESON_SOURCEPATH = "${S}"
def noprefix(var, d):
return d.getVar(var).replace(d.getVar('prefix') + '/', '', 1)
-MESON_BUILDTYPE ?= "plain"
+MESON_BUILDTYPE ?= "${@oe.utils.vartrue('DEBUG_BUILD', 'debug', 'plain', d)}"
+MESON_BUILDTYPE[vardeps] += "DEBUG_BUILD"
MESONOPTS = " --prefix ${prefix} \
--buildtype ${MESON_BUILDTYPE} \
--bindir ${@noprefix('bindir', d)} \
diff --git a/meta/classes/meta.bbclass b/meta/classes/meta.bbclass
deleted file mode 100644
index 5e6890238b..0000000000
--- a/meta/classes/meta.bbclass
+++ /dev/null
@@ -1,4 +0,0 @@
-
-PACKAGES = ""
-
-do_build[recrdeptask] = "do_build"
diff --git a/meta/classes/mirrors.bbclass b/meta/classes/mirrors.bbclass
index 87bba41472..1afce92147 100644
--- a/meta/classes/mirrors.bbclass
+++ b/meta/classes/mirrors.bbclass
@@ -29,7 +29,6 @@ ftp://dante.ctan.org/tex-archive ftp://ftp.fu-berlin.de/tex/CTAN \n \
ftp://dante.ctan.org/tex-archive http://sunsite.sut.ac.jp/pub/archives/ctan/ \n \
ftp://dante.ctan.org/tex-archive http://ctan.unsw.edu.au/ \n \
ftp://ftp.gnutls.org/gcrypt/gnutls ${GNUPG_MIRROR}/gnutls \n \
-http://ftp.info-zip.org/pub/infozip/src/ http://mirror.switch.ch/ftp/mirror/infozip/src/ \n \
http://ftp.info-zip.org/pub/infozip/src/ ftp://sunsite.icm.edu.pl/pub/unix/archiving/info-zip/src/ \n \
http://www.mirrorservice.org/sites/lsof.itap.purdue.edu/pub/tools/unix/lsof/ http://www.mirrorservice.org/sites/lsof.itap.purdue.edu/pub/tools/unix/lsof/OLD/ \n \
${APACHE_MIRROR} http://www.us.apache.org/dist \n \
@@ -70,7 +69,6 @@ ${CPAN_MIRROR} http://search.cpan.org/CPAN/ \n \
MIRRORS += "\
git://salsa.debian.org/.* git://salsa.debian.org/PATH;protocol=https \n \
git://git.gnome.org/.* git://gitlab.gnome.org/GNOME/PATH;protocol=https \n \
-git://git.savannah.gnu.org/.* git://git.savannah.gnu.org/git/PATH;protocol=https \n \
-git://git.yoctoproject.org/.* git://git.yoctoproject.org/git/PATH;protocol=https \n \
git://.*/.* git://HOST/PATH;protocol=https \n \
+git://.*/.* git://HOST/git/PATH;protocol=https \n \
"
diff --git a/meta/classes/multilib.bbclass b/meta/classes/multilib.bbclass
index c3be89767a..73ad2ab7b3 100644
--- a/meta/classes/multilib.bbclass
+++ b/meta/classes/multilib.bbclass
@@ -35,7 +35,7 @@ python multilib_virtclass_handler () {
e.data.setVar('SDKTARGETSYSROOT', e.data.getVar('SDKTARGETSYSROOT'))
override = ":virtclass-multilib-" + variant
e.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + override)
- target_vendor = e.data.getVar("TARGET_VENDOR_" + "virtclass-multilib-" + variant, False)
+ target_vendor = e.data.getVar("TARGET_VENDOR:" + "virtclass-multilib-" + variant, False)
if target_vendor:
e.data.setVar("TARGET_VENDOR", target_vendor)
return
@@ -82,7 +82,7 @@ python multilib_virtclass_handler () {
e.data.setVar("WHITELIST_GPL-3.0", pkgs)
# DEFAULTTUNE can change TARGET_ARCH override so expand this now before update_data
- newtune = e.data.getVar("DEFAULTTUNE_" + "virtclass-multilib-" + variant, False)
+ newtune = e.data.getVar("DEFAULTTUNE:" + "virtclass-multilib-" + variant, False)
if newtune:
e.data.setVar("DEFAULTTUNE", newtune)
}
@@ -196,7 +196,7 @@ PACKAGEFUNCS:append = " do_package_qa_multilib"
python do_package_qa_multilib() {
def check_mlprefix(pkg, var, mlprefix):
- values = bb.utils.explode_deps(d.getVar('%s_%s' % (var, pkg)) or d.getVar(var) or "")
+ values = bb.utils.explode_deps(d.getVar('%s:%s' % (var, pkg)) or d.getVar(var) or "")
candidates = []
for i in values:
if i.startswith('virtual/'):
@@ -210,7 +210,7 @@ python do_package_qa_multilib() {
if len(candidates) > 0:
msg = "%s package %s - suspicious values '%s' in %s" \
% (d.getVar('PN'), pkg, ' '.join(candidates), var)
- package_qa_handle_error("multilib", msg, d)
+ oe.qa.handle_error("multilib", msg, d)
ml = d.getVar('MLPREFIX')
if not ml:
@@ -228,4 +228,5 @@ python do_package_qa_multilib() {
check_mlprefix(pkg, 'RSUGGESTS', ml)
check_mlprefix(pkg, 'RREPLACES', ml)
check_mlprefix(pkg, 'RCONFLICTS', ml)
+ oe.qa.exit_if_errors(d)
}
diff --git a/meta/classes/nativesdk.bbclass b/meta/classes/nativesdk.bbclass
index c66de8c787..14e210562f 100644
--- a/meta/classes/nativesdk.bbclass
+++ b/meta/classes/nativesdk.bbclass
@@ -31,7 +31,7 @@ PACKAGE_ARCHS = "${SDK_PACKAGE_ARCHS}"
DEPENDS:append = " chrpath-replacement-native"
EXTRANATIVEPATH += "chrpath-native"
-PKGDATA_DIR = "${TMPDIR}/pkgdata/${SDK_SYS}"
+PKGDATA_DIR = "${PKGDATA_DIR_SDK}"
HOST_ARCH = "${SDK_ARCH}"
HOST_VENDOR = "${SDK_VENDOR}"
diff --git a/meta/classes/npm.bbclass b/meta/classes/npm.bbclass
index 91a2321116..547c531883 100644
--- a/meta/classes/npm.bbclass
+++ b/meta/classes/npm.bbclass
@@ -22,8 +22,12 @@ inherit python3native
DEPENDS:prepend = "nodejs-native "
RDEPENDS:${PN}:append:class-target = " nodejs"
+EXTRA_OENPM = ""
+
NPM_INSTALL_DEV ?= "0"
+NPM_NODEDIR ?= "${RECIPE_SYSROOT_NATIVE}${prefix_native}"
+
def npm_target_arch_map(target_arch):
"""Maps arch names to npm arch names"""
import re
@@ -57,8 +61,8 @@ def npm_pack(env, srcdir, workdir):
"""Run 'npm pack' on a specified directory"""
import shlex
cmd = "npm pack %s" % shlex.quote(srcdir)
- configs = [("ignore-scripts", "true")]
- tarball = env.run(cmd, configs=configs, workdir=workdir).strip("\n")
+ args = [("ignore-scripts", "true")]
+ tarball = env.run(cmd, args=args, workdir=workdir).strip("\n")
return os.path.join(workdir, tarball)
python npm_do_configure() {
@@ -224,15 +228,11 @@ python npm_do_compile() {
bb.utils.remove(d.getVar("NPM_BUILD"), recurse=True)
- env = NpmEnvironment(d, configs=npm_global_configs(d))
-
- dev = bb.utils.to_boolean(d.getVar("NPM_INSTALL_DEV"), False)
-
with tempfile.TemporaryDirectory() as tmpdir:
args = []
- configs = []
+ configs = npm_global_configs(d)
- if dev:
+ if bb.utils.to_boolean(d.getVar("NPM_INSTALL_DEV"), False):
configs.append(("also", "development"))
else:
configs.append(("only", "production"))
@@ -247,20 +247,19 @@ python npm_do_compile() {
# Add node-gyp configuration
configs.append(("arch", d.getVar("NPM_ARCH")))
configs.append(("release", "true"))
- nodedir = d.getVar("NPM_NODEDIR")
- if not nodedir:
- sysroot = d.getVar("RECIPE_SYSROOT_NATIVE")
- nodedir = os.path.join(sysroot, d.getVar("prefix_native").strip("/"))
- configs.append(("nodedir", nodedir))
+ configs.append(("nodedir", d.getVar("NPM_NODEDIR")))
configs.append(("python", d.getVar("PYTHON")))
+ env = NpmEnvironment(d, configs)
+
# Add node-pre-gyp configuration
args.append(("target_arch", d.getVar("NPM_ARCH")))
args.append(("build-from-source", "true"))
# Pack and install the main package
tarball = npm_pack(env, d.getVar("NPM_PACKAGE"), tmpdir)
- env.run("npm install %s" % shlex.quote(tarball), args=args, configs=configs)
+ cmd = "npm install %s %s" % (shlex.quote(tarball), d.getVar("EXTRA_OENPM"))
+ env.run(cmd, args=args, configs=configs)
}
npm_do_install() {
diff --git a/meta/classes/overlayfs.bbclass b/meta/classes/overlayfs.bbclass
new file mode 100644
index 0000000000..8d9b59c9bf
--- /dev/null
+++ b/meta/classes/overlayfs.bbclass
@@ -0,0 +1,111 @@
+# Class for generation of overlayfs mount units
+#
+# It's often desired in Embedded System design to have a read-only rootfs.
+# But a lot of different applications might want to have a read-write access to
+# some parts of a filesystem. It can be especially useful when your update mechanism
+# overwrites the whole rootfs, but you want your application data to be preserved
+# between updates. This class provides a way to achieve that by means
+# of overlayfs and at the same time keeping the base rootfs read-only.
+#
+# Usage example.
+#
+# Set a mount point for a partition overlayfs is going to use as upper layer
+# in your machine configuration. Underlying file system can be anything that
+# is supported by overlayfs. This has to be done in your machine configuration.
+# QA check fails to catch file existence if you redefine this variable in your recipe!
+#
+# OVERLAYFS_MOUNT_POINT[data] ?= "/data"
+#
+# The class assumes you have a data.mount systemd unit defined in your
+# systemd-machine-units recipe and installed to the image.
+#
+# Then you can specify writable directories on a recipe base
+#
+# OVERLAYFS_WRITABLE_PATHS[data] = "/usr/share/my-custom-application"
+#
+# To support several mount points you can use a different variable flag. Assume we
+# want to have a writable location on the file system, but not interested where the data
+# survive a reboot. Then we could have a mnt-overlay.mount unit for a tmpfs file system:
+#
+# OVERLAYFS_MOUNT_POINT[mnt-overlay] = "/mnt/overlay"
+# OVERLAYFS_WRITABLE_PATHS[mnt-overlay] = "/usr/share/another-application"
+#
+# Note: the class does not support /etc directory itself, because systemd depends on it
+
+REQUIRED_DISTRO_FEATURES += "systemd overlayfs"
+
+inherit systemd features_check
+
+python do_create_overlayfs_units() {
+ CreateDirsUnitTemplate = """[Unit]
+Description=Overlayfs directories setup
+Requires={DATA_MOUNT_UNIT}
+After={DATA_MOUNT_UNIT}
+DefaultDependencies=no
+
+[Service]
+Type=oneshot
+ExecStart=mkdir -p {DATA_MOUNT_POINT}/workdir{LOWERDIR} && mkdir -p {DATA_MOUNT_POINT}/upper{LOWERDIR}
+RemainAfterExit=true
+StandardOutput=journal
+
+[Install]
+WantedBy=multi-user.target
+"""
+ MountUnitTemplate = """[Unit]
+Description=Overlayfs mount unit
+Requires={CREATE_DIRS_SERVICE}
+After={CREATE_DIRS_SERVICE}
+
+[Mount]
+What=overlay
+Where={LOWERDIR}
+Type=overlay
+Options=lowerdir={LOWERDIR},upperdir={DATA_MOUNT_POINT}/upper{LOWERDIR},workdir={DATA_MOUNT_POINT}/workdir{LOWERDIR}
+
+[Install]
+WantedBy=multi-user.target
+"""
+
+ def prepareUnits(data, lower):
+ from oe.overlayfs import mountUnitName, helperUnitName
+
+ args = {
+ 'DATA_MOUNT_POINT': data,
+ 'DATA_MOUNT_UNIT': mountUnitName(data),
+ 'CREATE_DIRS_SERVICE': helperUnitName(lower),
+ 'LOWERDIR': lower,
+ }
+
+ with open(os.path.join(d.getVar('WORKDIR'), mountUnitName(lower)), 'w') as f:
+ f.write(MountUnitTemplate.format(**args))
+
+ with open(os.path.join(d.getVar('WORKDIR'), helperUnitName(lower)), 'w') as f:
+ f.write(CreateDirsUnitTemplate.format(**args))
+
+ overlayMountPoints = d.getVarFlags("OVERLAYFS_MOUNT_POINT")
+ for mountPoint in overlayMountPoints:
+ for lower in d.getVarFlag('OVERLAYFS_WRITABLE_PATHS', mountPoint).split():
+ prepareUnits(d.getVarFlag('OVERLAYFS_MOUNT_POINT', mountPoint), lower)
+}
+
+# we need to generate file names early during parsing stage
+python () {
+ from oe.overlayfs import strForBash, unitFileList
+
+ unitList = unitFileList(d)
+ for unit in unitList:
+ d.appendVar('SYSTEMD_SERVICE:' + d.getVar('PN'), ' ' + unit);
+ d.appendVar('FILES:' + d.getVar('PN'), ' ' + strForBash(unit))
+
+ d.setVar('OVERLAYFS_UNIT_LIST', ' '.join([strForBash(s) for s in unitList]))
+}
+
+do_install:append() {
+ install -d ${D}${systemd_system_unitdir}
+ for unit in ${OVERLAYFS_UNIT_LIST}; do
+ install -m 0444 ${WORKDIR}/${unit} ${D}${systemd_system_unitdir}
+ done
+}
+
+addtask create_overlayfs_units before do_install
diff --git a/meta/classes/package.bbclass b/meta/classes/package.bbclass
index a659a1ef5c..92eba98892 100644
--- a/meta/classes/package.bbclass
+++ b/meta/classes/package.bbclass
@@ -41,8 +41,6 @@
inherit packagedata
inherit chrpath
inherit package_pkgdata
-
-# Need the package_qa_handle_error() in insane.bbclass
inherit insane
PKGD = "${WORKDIR}/package"
@@ -714,9 +712,7 @@ python package_get_auto_pr() {
return
try:
- conn = d.getVar("__PRSERV_CONN")
- if conn is None:
- conn = oe.prservice.prserv_make_conn(d)
+ conn = oe.prservice.prserv_make_conn(d)
if conn is not None:
if "AUTOINC" in pkgv:
srcpv = bb.fetch2.get_srcrev(d)
@@ -725,6 +721,7 @@ python package_get_auto_pr() {
d.setVar("PRSERV_PV_AUTOINC", str(value))
auto_pr = conn.getPR(version, pkgarch, checksum)
+ conn.close()
except Exception as e:
bb.fatal("Can NOT get PRAUTO, exception %s" % str(e))
if auto_pr is None:
@@ -866,7 +863,7 @@ python fixup_perms () {
self._setdir(lsplit[0], lsplit[1], lsplit[2], lsplit[3], lsplit[4], lsplit[5], lsplit[6], lsplit[7])
else:
msg = "Fixup Perms: invalid config line %s" % line
- package_qa_handle_error("perm-config", msg, d)
+ oe.qa.handle_error("perm-config", msg, d)
self.path = None
self.link = None
@@ -1006,7 +1003,7 @@ python fixup_perms () {
continue
if len(lsplit) != 8 and not (len(lsplit) == 3 and lsplit[1].lower() == "link"):
msg = "Fixup perms: %s invalid line: %s" % (conf, line)
- package_qa_handle_error("perm-line", msg, d)
+ oe.qa.handle_error("perm-line", msg, d)
continue
entry = fs_perms_entry(d.expand(line))
if entry and entry.path:
@@ -1043,7 +1040,7 @@ python fixup_perms () {
ptarget = os.path.join(os.path.dirname(dir), link)
if os.path.exists(target):
msg = "Fixup Perms: Unable to correct directory link, target already exists: %s -> %s" % (dir, ptarget)
- package_qa_handle_error("perm-link", msg, d)
+ oe.qa.handle_error("perm-link", msg, d)
continue
# Create path to move directory to, move it, and then setup the symlink
@@ -1203,7 +1200,7 @@ python split_and_strip_files () {
bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn))
else:
msg = "File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn)
- package_qa_handle_error("already-stripped", msg, d)
+ oe.qa.handle_error("already-stripped", msg, d)
continue
# At this point we have an unstripped elf file. We need to:
@@ -1225,6 +1222,14 @@ python split_and_strip_files () {
# Modified the file so clear the cache
cpath.updatecache(file)
+ def strip_pkgd_prefix(f):
+ nonlocal dvar
+
+ if f.startswith(dvar):
+ return f[len(dvar):]
+
+ return f
+
#
# First lets process debug splitting
#
@@ -1238,6 +1243,8 @@ python split_and_strip_files () {
for file in staticlibs:
results.append( (file,source_info(file, d)) )
+ d.setVar("PKGDEBUGSOURCES", {strip_pkgd_prefix(f): sorted(s) for f, s in results})
+
sources = set()
for r in results:
sources.update(r[1])
@@ -1353,7 +1360,7 @@ python populate_packages () {
for i, pkg in enumerate(packages):
if pkg in package_dict:
msg = "%s is listed in PACKAGES multiple times, this leads to packaging errors." % pkg
- package_qa_handle_error("packages-list", msg, d)
+ oe.qa.handle_error("packages-list", msg, d)
# Ensure the source package gets the chance to pick up the source files
# before the debug package by ordering it first in PACKAGES. Whether it
# actually picks up any source files is controlled by
@@ -1390,7 +1397,7 @@ python populate_packages () {
filesvar = d.getVar('FILES:%s' % pkg) or ""
if "//" in filesvar:
msg = "FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg
- package_qa_handle_error("files-invalid", msg, d)
+ oe.qa.handle_error("files-invalid", msg, d)
filesvar.replace("//", "/")
origfiles = filesvar.split()
@@ -1459,7 +1466,7 @@ python populate_packages () {
licenses = d.getVar('LICENSE_EXCLUSION-' + pkg)
if licenses:
msg = "Excluding %s from packaging as it has incompatible license(s): %s" % (pkg, licenses)
- package_qa_handle_error("incompatible-license", msg, d)
+ oe.qa.handle_error("incompatible-license", msg, d)
else:
package_list.append(pkg)
d.setVar('PACKAGES', ' '.join(package_list))
@@ -1483,7 +1490,7 @@ python populate_packages () {
msg = msg + "\n " + f
msg = msg + "\nPlease set FILES such that these items are packaged. Alternatively if they are unneeded, avoid installing them or delete them within do_install.\n"
msg = msg + "%s: %d installed and not shipped files." % (pn, len(unshipped))
- package_qa_handle_error("installed-vs-shipped", msg, d)
+ oe.qa.handle_error("installed-vs-shipped", msg, d)
}
populate_packages[dirs] = "${D}"
@@ -1550,6 +1557,7 @@ PKGDATA_VARS = "PN PE PV PR PKGE PKGV PKGR LICENSE DESCRIPTION SUMMARY RDEPENDS
python emit_pkgdata() {
from glob import glob
import json
+ import bb.compress.zstd
def process_postinst_on_target(pkg, mlprefix):
pkgval = d.getVar('PKG:%s' % pkg)
@@ -1622,6 +1630,8 @@ fi
with open(data_file, 'w') as fd:
fd.write("PACKAGES: %s\n" % packages)
+ pkgdebugsource = d.getVar("PKGDEBUGSOURCES") or []
+
pn = d.getVar('PN')
global_variants = (d.getVar('MULTILIB_GLOBAL_VARIANTS') or "").split()
variants = (d.getVar('MULTILIB_VARIANTS') or "").split()
@@ -1641,18 +1651,33 @@ fi
pkgval = pkg
d.setVar('PKG:%s' % pkg, pkg)
+ extended_data = {
+ "files_info": {}
+ }
+
pkgdestpkg = os.path.join(pkgdest, pkg)
files = {}
+ files_extra = {}
total_size = 0
seen = set()
for f in pkgfiles[pkg]:
- relpth = os.path.relpath(f, pkgdestpkg)
+ fpath = os.sep + os.path.relpath(f, pkgdestpkg)
+
fstat = os.lstat(f)
- files[os.sep + relpth] = fstat.st_size
+ files[fpath] = fstat.st_size
+
+ extended_data["files_info"].setdefault(fpath, {})
+ extended_data["files_info"][fpath]['size'] = fstat.st_size
+
if fstat.st_ino not in seen:
seen.add(fstat.st_ino)
total_size += fstat.st_size
- d.setVar('FILES_INFO', json.dumps(files, sort_keys=True))
+
+ if fpath in pkgdebugsource:
+ extended_data["files_info"][fpath]['debugsrc'] = pkgdebugsource[fpath]
+ del pkgdebugsource[fpath]
+
+ d.setVar('FILES_INFO:' + pkg , json.dumps(files, sort_keys=True))
process_postinst_on_target(pkg, d.getVar("MLPREFIX"))
add_set_e_to_scriptlets(pkg)
@@ -1663,14 +1688,19 @@ fi
val = write_if_exists(sf, pkg, var)
write_if_exists(sf, pkg, 'FILERPROVIDESFLIST')
- for dfile in (d.getVar('FILERPROVIDESFLIST_' + pkg) or "").split():
- write_if_exists(sf, pkg, 'FILERPROVIDES_' + dfile)
+ for dfile in sorted((d.getVar('FILERPROVIDESFLIST:' + pkg) or "").split()):
+ write_if_exists(sf, pkg, 'FILERPROVIDES:' + dfile)
write_if_exists(sf, pkg, 'FILERDEPENDSFLIST')
- for dfile in (d.getVar('FILERDEPENDSFLIST_' + pkg) or "").split():
- write_if_exists(sf, pkg, 'FILERDEPENDS_' + dfile)
+ for dfile in sorted((d.getVar('FILERDEPENDSFLIST:' + pkg) or "").split()):
+ write_if_exists(sf, pkg, 'FILERDEPENDS:' + dfile)
+
+ sf.write('%s:%s: %d\n' % ('PKGSIZE', pkg, total_size))
- sf.write('%s_%s: %d\n' % ('PKGSIZE', pkg, total_size))
+ subdata_extended_file = pkgdatadir + "/extended/%s.json.zstd" % pkg
+ num_threads = int(d.getVar("BB_NUMBER_THREADS"))
+ with bb.compress.zstd.open(subdata_extended_file, "wt", encoding="utf-8", num_threads=num_threads) as f:
+ json.dump(extended_data, f, sort_keys=True, separators=(",", ":"))
# Symlinks needed for rprovides lookup
rprov = d.getVar('RPROVIDES:%s' % pkg) or d.getVar('RPROVIDES')
@@ -1702,7 +1732,8 @@ fi
write_extra_runtime_pkgs(global_variants, packages, pkgdatadir)
}
-emit_pkgdata[dirs] = "${PKGDESTWORK}/runtime ${PKGDESTWORK}/runtime-reverse ${PKGDESTWORK}/runtime-rprovides"
+emit_pkgdata[dirs] = "${PKGDESTWORK}/runtime ${PKGDESTWORK}/runtime-reverse ${PKGDESTWORK}/runtime-rprovides ${PKGDESTWORK}/extended"
+emit_pkgdata[vardepsexclude] = "BB_NUMBER_THREADS"
ldconfig_postinst_fragment() {
if [ x"$D" = "x" ]; then
@@ -1714,11 +1745,11 @@ RPMDEPS = "${STAGING_LIBDIR_NATIVE}/rpm/rpmdeps --alldeps --define '__font_provi
# Collect perfile run-time dependency metadata
# Output:
-# FILERPROVIDESFLIST_pkg - list of all files w/ deps
-# FILERPROVIDES_filepath_pkg - per file dep
+# FILERPROVIDESFLIST:pkg - list of all files w/ deps
+# FILERPROVIDES:filepath:pkg - per file dep
#
-# FILERDEPENDSFLIST_pkg - list of all files w/ deps
-# FILERDEPENDS_filepath_pkg - per file dep
+# FILERDEPENDSFLIST:pkg - list of all files w/ deps
+# FILERDEPENDS:filepath:pkg - per file dep
python package_do_filedeps() {
if d.getVar('SKIP_FILEDEPS') == '1':
@@ -1755,18 +1786,18 @@ python package_do_filedeps() {
for file in sorted(provides):
provides_files[pkg].append(file)
- key = "FILERPROVIDES_" + file + "_" + pkg
+ key = "FILERPROVIDES:" + file + ":" + pkg
d.appendVar(key, " " + " ".join(provides[file]))
for file in sorted(requires):
requires_files[pkg].append(file)
- key = "FILERDEPENDS_" + file + "_" + pkg
+ key = "FILERDEPENDS:" + file + ":" + pkg
d.appendVar(key, " " + " ".join(requires[file]))
for pkg in requires_files:
- d.setVar("FILERDEPENDSFLIST_" + pkg, " ".join(requires_files[pkg]))
+ d.setVar("FILERDEPENDSFLIST:" + pkg, " ".join(sorted(requires_files[pkg])))
for pkg in provides_files:
- d.setVar("FILERPROVIDESFLIST_" + pkg, " ".join(provides_files[pkg]))
+ d.setVar("FILERPROVIDESFLIST:" + pkg, " ".join(sorted(provides_files[pkg])))
}
SHLIBSDIRS = "${WORKDIR_PKGDATA}/${MLPREFIX}shlibs2"
@@ -1805,7 +1836,7 @@ python package_do_shlibs() {
ver = d.getVar('PKGV')
if not ver:
msg = "PKGV not defined"
- package_qa_handle_error("pkgv-undefined", msg, d)
+ oe.qa.handle_error("pkgv-undefined", msg, d)
return
pkgdest = d.getVar('PKGDEST')
@@ -2079,12 +2110,12 @@ python package_do_pkgconfig () {
for pkg in packages.split():
pkgconfig_provided[pkg] = []
pkgconfig_needed[pkg] = []
- for file in pkgfiles[pkg]:
+ for file in sorted(pkgfiles[pkg]):
m = pc_re.match(file)
if m:
pd = bb.data.init()
name = m.group(1)
- pkgconfig_provided[pkg].append(name)
+ pkgconfig_provided[pkg].append(os.path.basename(name))
if not os.access(file, os.R_OK):
continue
with open(file, 'r') as f:
@@ -2107,7 +2138,7 @@ python package_do_pkgconfig () {
pkgs_file = os.path.join(shlibswork_dir, pkg + ".pclist")
if pkgconfig_provided[pkg] != []:
with open(pkgs_file, 'w') as f:
- for p in pkgconfig_provided[pkg]:
+ for p in sorted(pkgconfig_provided[pkg]):
f.write('%s\n' % p)
# Go from least to most specific since the last one found wins
@@ -2369,7 +2400,7 @@ python do_package () {
if not workdir or not outdir or not dest or not dvar or not pn:
msg = "WORKDIR, DEPLOY_DIR, D, PN and PKGD all must be defined, unable to package"
- package_qa_handle_error("var-undefined", msg, d)
+ oe.qa.handle_error("var-undefined", msg, d)
return
bb.build.exec_func("package_convert_pr_autoinc", d)
@@ -2422,9 +2453,7 @@ python do_package () {
for f in (d.getVar('PACKAGEFUNCS') or '').split():
bb.build.exec_func(f, d)
- qa_sane = d.getVar("QA_SANE")
- if not qa_sane:
- bb.fatal("Fatal QA errors found, failing task.")
+ oe.qa.exit_if_errors(d)
}
do_package[dirs] = "${SHLIBSWORKDIR} ${PKGDESTWORK} ${D}"
diff --git a/meta/classes/package_deb.bbclass b/meta/classes/package_deb.bbclass
index eca43e1787..1ae6393d37 100644
--- a/meta/classes/package_deb.bbclass
+++ b/meta/classes/package_deb.bbclass
@@ -315,10 +315,7 @@ python do_package_write_deb () {
do_package_write_deb[dirs] = "${PKGWRITEDIRDEB}"
do_package_write_deb[cleandirs] = "${PKGWRITEDIRDEB}"
do_package_write_deb[depends] += "${@oe.utils.build_depends_string(d.getVar('PACKAGE_WRITE_DEPS'), 'do_populate_sysroot')}"
-addtask package_write_deb after do_packagedata do_package
-
+addtask package_write_deb after do_packagedata do_package do_deploy_source_date_epoch before do_build
PACKAGEINDEXDEPS += "dpkg-native:do_populate_sysroot"
PACKAGEINDEXDEPS += "apt-native:do_populate_sysroot"
-
-do_build[recrdeptask] += "do_package_write_deb"
diff --git a/meta/classes/package_ipk.bbclass b/meta/classes/package_ipk.bbclass
index c3b53854e8..902b7f94c8 100644
--- a/meta/classes/package_ipk.bbclass
+++ b/meta/classes/package_ipk.bbclass
@@ -230,8 +230,8 @@ def ipk_write_pkg(pkg, d):
shell=True)
if d.getVar('IPK_SIGN_PACKAGES') == '1':
- ipkver = "%s-%s" % (d.getVar('PKGV'), d.getVar('PKGR'))
- ipk_to_sign = "%s/%s_%s_%s.ipk" % (pkgoutdir, pkgname, ipkver, d.getVar('PACKAGE_ARCH'))
+ ipkver = "%s-%s" % (localdata.getVar('PKGV'), localdata.getVar('PKGR'))
+ ipk_to_sign = "%s/%s_%s_%s.ipk" % (pkgoutdir, pkgname, ipkver, localdata.getVar('PACKAGE_ARCH'))
sign_ipk(d, ipk_to_sign)
finally:
@@ -274,9 +274,7 @@ python do_package_write_ipk () {
do_package_write_ipk[dirs] = "${PKGWRITEDIRIPK}"
do_package_write_ipk[cleandirs] = "${PKGWRITEDIRIPK}"
do_package_write_ipk[depends] += "${@oe.utils.build_depends_string(d.getVar('PACKAGE_WRITE_DEPS'), 'do_populate_sysroot')}"
-addtask package_write_ipk after do_packagedata do_package
+addtask package_write_ipk after do_packagedata do_package do_deploy_source_date_epoch before do_build
PACKAGEINDEXDEPS += "opkg-utils-native:do_populate_sysroot"
PACKAGEINDEXDEPS += "opkg-native:do_populate_sysroot"
-
-do_build[recrdeptask] += "do_package_write_ipk"
diff --git a/meta/classes/package_rpm.bbclass b/meta/classes/package_rpm.bbclass
index aebeaf6932..b0754421a3 100644
--- a/meta/classes/package_rpm.bbclass
+++ b/meta/classes/package_rpm.bbclass
@@ -40,10 +40,10 @@ def write_rpm_perfiledata(srcname, d):
outfile.write("# Dependency table\n")
outfile.write('deps = {\n')
for pkg in packages.split():
- dependsflist_key = 'FILE' + varname + 'FLIST' + "_" + pkg
+ dependsflist_key = 'FILE' + varname + 'FLIST' + ":" + pkg
dependsflist = (d.getVar(dependsflist_key) or "")
for dfile in dependsflist.split():
- key = "FILE" + varname + "_" + dfile + "_" + pkg
+ key = "FILE" + varname + ":" + dfile + ":" + pkg
deps = filter_nativesdk_deps(srcname, d.getVar(key) or "")
depends_dict = bb.utils.explode_dep_versions(deps)
file = dfile.replace("@underscore@", "_")
@@ -249,10 +249,10 @@ python write_specfile () {
def get_perfile(varname, pkg, d):
deps = []
- dependsflist_key = 'FILE' + varname + 'FLIST' + "_" + pkg
+ dependsflist_key = 'FILE' + varname + 'FLIST' + ":" + pkg
dependsflist = (d.getVar(dependsflist_key) or "")
for dfile in dependsflist.split():
- key = "FILE" + varname + "_" + dfile + "_" + pkg
+ key = "FILE" + varname + ":" + dfile + ":" + pkg
depends = d.getVar(key)
if depends:
deps.append(depends)
@@ -684,8 +684,8 @@ python do_package_rpm () {
cmd = cmd + " --define '_use_internal_dependency_generator 0'"
cmd = cmd + " --define '_binaries_in_noarch_packages_terminate_build 0'"
cmd = cmd + " --define '_build_id_links none'"
- cmd = cmd + " --define '_binary_payload w6T%d.xzdio'" % int(d.getVar("XZ_THREADS"))
- cmd = cmd + " --define '_source_payload w6T%d.xzdio'" % int(d.getVar("XZ_THREADS"))
+ cmd = cmd + " --define '_binary_payload w19T%d.zstdio'" % int(d.getVar("ZSTD_THREADS"))
+ cmd = cmd + " --define '_source_payload w19T%d.zstdio'" % int(d.getVar("ZSTD_THREADS"))
cmd = cmd + " --define 'clamp_mtime_to_source_date_epoch 1'"
cmd = cmd + " --define 'use_source_date_epoch_as_buildtime 1'"
cmd = cmd + " --define '_buildhost reproducible'"
@@ -748,9 +748,7 @@ python do_package_write_rpm () {
do_package_write_rpm[dirs] = "${PKGWRITEDIRRPM}"
do_package_write_rpm[cleandirs] = "${PKGWRITEDIRRPM}"
do_package_write_rpm[depends] += "${@oe.utils.build_depends_string(d.getVar('PACKAGE_WRITE_DEPS'), 'do_populate_sysroot')}"
-addtask package_write_rpm after do_packagedata do_package
+addtask package_write_rpm after do_packagedata do_package do_deploy_source_date_epoch before do_build
PACKAGEINDEXDEPS += "rpm-native:do_populate_sysroot"
PACKAGEINDEXDEPS += "createrepo-c-native:do_populate_sysroot"
-
-do_build[recrdeptask] += "do_package_write_rpm"
diff --git a/meta/classes/packagedata.bbclass b/meta/classes/packagedata.bbclass
index 3b962f29b5..c2760e2bf0 100644
--- a/meta/classes/packagedata.bbclass
+++ b/meta/classes/packagedata.bbclass
@@ -27,7 +27,7 @@ python read_subpackage_metadata () {
# of that variable, e.g. DESCRIPTION could clobber DESCRIPTION:<pkgname>
# We therefore don't clobber for the unsuffixed variable versions
#
- if key.endswith("_" + pkg):
+ if key.endswith(":" + pkg):
d.setVar(key, sdata[key])
else:
d.setVar(key, sdata[key], parsing=True)
diff --git a/meta/classes/patch.bbclass b/meta/classes/patch.bbclass
index 388773a237..fdf3c633bc 100644
--- a/meta/classes/patch.bbclass
+++ b/meta/classes/patch.bbclass
@@ -131,6 +131,9 @@ python patch_do_patch() {
patchdir = parm["patchdir"]
if not os.path.isabs(patchdir):
patchdir = os.path.join(s, patchdir)
+ if not os.path.isdir(patchdir):
+ bb.fatal("Target directory '%s' not found, patchdir '%s' is incorrect in patch file '%s'" %
+ (patchdir, parm["patchdir"], parm['patchname']))
else:
patchdir = s
@@ -147,12 +150,12 @@ python patch_do_patch() {
patchset.Import({"file":local, "strippath": parm['striplevel']}, True)
except Exception as exc:
bb.utils.remove(process_tmpdir, True)
- bb.fatal(str(exc))
+ bb.fatal("Importing patch '%s' with striplevel '%s'\n%s" % (parm['patchname'], parm['striplevel'], repr(exc)))
try:
resolver.Resolve()
except bb.BBHandledException as e:
bb.utils.remove(process_tmpdir, True)
- bb.fatal(str(e))
+ bb.fatal("Applying patch '%s' on target directory '%s'\n%s" % (parm['patchname'], patchdir, repr(e)))
bb.utils.remove(process_tmpdir, True)
del os.environ['TMPDIR']
diff --git a/meta/classes/populate_sdk_base.bbclass b/meta/classes/populate_sdk_base.bbclass
index ccfe223289..49e166e697 100644
--- a/meta/classes/populate_sdk_base.bbclass
+++ b/meta/classes/populate_sdk_base.bbclass
@@ -1,4 +1,6 @@
-inherit meta image-postinst-intercepts image-artifact-names
+PACKAGES = ""
+
+inherit image-postinst-intercepts image-artifact-names
# Wildcards specifying complementary packages to install for every package that has been explicitly
# installed into the rootfs
diff --git a/meta/classes/populate_sdk_ext.bbclass b/meta/classes/populate_sdk_ext.bbclass
index 35679208bc..9187f53f13 100644
--- a/meta/classes/populate_sdk_ext.bbclass
+++ b/meta/classes/populate_sdk_ext.bbclass
@@ -550,7 +550,7 @@ python copy_buildsystem () {
# We don't need sstate do_package files
for root, dirs, files in os.walk(sstate_out):
for name in files:
- if name.endswith("_package.tgz"):
+ if name.endswith("_package.tar.zst"):
f = os.path.join(root, name)
os.remove(f)
diff --git a/meta/classes/ptest.bbclass b/meta/classes/ptest.bbclass
index 200446e52b..1ec23c0923 100644
--- a/meta/classes/ptest.bbclass
+++ b/meta/classes/ptest.bbclass
@@ -118,13 +118,15 @@ python () {
if not(d.getVar('PTEST_ENABLED') == "1"):
for i in ['do_configure_ptest_base', 'do_compile_ptest_base', 'do_install_ptest_base']:
bb.build.deltask(i, d)
+}
+QARECIPETEST[missing-ptest] = "package_qa_check_missing_ptest"
+def package_qa_check_missing_ptest(pn, d, messages):
# This checks that ptest package is actually included
# in standard oe-core ptest images - only for oe-core recipes
if not 'meta/recipes' in d.getVar('FILE') or not(d.getVar('PTEST_ENABLED') == "1"):
return
- enabled_ptests = " ".join([d.getVar('PTESTS_FAST'),d.getVar('PTESTS_SLOW'), d.getVar('PTESTS_PROBLEMS')]).split()
- if (d.getVar('PN') + "-ptest").replace(d.getVar('MLPREFIX'), '') not in enabled_ptests:
- bb.error("Recipe %s supports ptests but is not included in oe-core's conf/distro/include/ptest-packagelists.inc" % d.getVar("PN"))
-}
+ enabled_ptests = " ".join([d.getVar('PTESTS_FAST'), d.getVar('PTESTS_SLOW'), d.getVar('PTESTS_PROBLEMS')]).split()
+ if (pn + "-ptest").replace(d.getVar('MLPREFIX'), '') not in enabled_ptests:
+ oe.qa.handle_error("missing-ptest", "supports ptests but is not included in oe-core's ptest-packagelists.inc", d)
diff --git a/meta/classes/pypi.bbclass b/meta/classes/pypi.bbclass
index 272c220bca..9405d58601 100644
--- a/meta/classes/pypi.bbclass
+++ b/meta/classes/pypi.bbclass
@@ -8,12 +8,12 @@ def pypi_package(d):
PYPI_PACKAGE ?= "${@pypi_package(d)}"
PYPI_PACKAGE_EXT ?= "tar.gz"
+PYPI_ARCHIVE_NAME ?= "${PYPI_PACKAGE}-${PV}.${PYPI_PACKAGE_EXT}"
def pypi_src_uri(d):
package = d.getVar('PYPI_PACKAGE')
- package_ext = d.getVar('PYPI_PACKAGE_EXT')
- pv = d.getVar('PV')
- return 'https://files.pythonhosted.org/packages/source/%s/%s/%s-%s.%s' % (package[0], package, package, pv, package_ext)
+ archive_name = d.getVar('PYPI_ARCHIVE_NAME')
+ return 'https://files.pythonhosted.org/packages/source/%s/%s/%s' % (package[0], package, archive_name)
PYPI_SRC_URI ?= "${@pypi_src_uri(d)}"
diff --git a/meta/classes/python3-dir.bbclass b/meta/classes/python3-dir.bbclass
index f51f971fc5..ff03e584d4 100644
--- a/meta/classes/python3-dir.bbclass
+++ b/meta/classes/python3-dir.bbclass
@@ -1,4 +1,4 @@
-PYTHON_BASEVERSION = "3.9"
+PYTHON_BASEVERSION = "3.10"
PYTHON_ABI = ""
PYTHON_DIR = "python${PYTHON_BASEVERSION}"
PYTHON_PN = "python3"
diff --git a/meta/classes/reproducible_build.bbclass b/meta/classes/reproducible_build.bbclass
deleted file mode 100644
index 378121903d..0000000000
--- a/meta/classes/reproducible_build.bbclass
+++ /dev/null
@@ -1,127 +0,0 @@
-# reproducible_build.bbclass
-#
-# Sets SOURCE_DATE_EPOCH in each component's build environment.
-# Upstream components (generally) respect this environment variable,
-# using it in place of the "current" date and time.
-# See https://reproducible-builds.org/specs/source-date-epoch/
-#
-# After sources are unpacked but before they are patched, we set a reproducible value for SOURCE_DATE_EPOCH.
-# This value should be reproducible for anyone who builds the same revision from the same sources.
-#
-# There are 4 ways we determine SOURCE_DATE_EPOCH:
-#
-# 1. Use the value from __source_date_epoch.txt file if this file exists.
-# This file was most likely created in the previous build by one of the following methods 2,3,4.
-# Alternatively, it can be provided by a recipe via SRC_URI.
-#
-# If the file does not exist:
-#
-# 2. If there is a git checkout, use the last git commit timestamp.
-# Git does not preserve file timestamps on checkout.
-#
-# 3. Use the mtime of "known" files such as NEWS, CHANGLELOG, ...
-# This works for well-kept repositories distributed via tarball.
-#
-# 4. Use the modification time of the youngest file in the source tree, if there is one.
-# This will be the newest file from the distribution tarball, if any.
-#
-# 5. Fall back to a fixed timestamp.
-#
-# Once the value of SOURCE_DATE_EPOCH is determined, it is stored in the recipe's SDE_FILE.
-# If none of these mechanisms are suitable, replace the do_deploy_source_date_epoch task
-# with recipe-specific functionality to write the appropriate SOURCE_DATE_EPOCH into the SDE_FILE.
-#
-# If this file is found by other tasks, the value is exported in the SOURCE_DATE_EPOCH variable.
-# SOURCE_DATE_EPOCH is set for all tasks that might use it (do_configure, do_compile, do_package, ...)
-
-BUILD_REPRODUCIBLE_BINARIES ??= '1'
-inherit ${@oe.utils.ifelse(d.getVar('BUILD_REPRODUCIBLE_BINARIES') == '1', 'reproducible_build_simple', '')}
-
-SDE_DIR = "${WORKDIR}/source-date-epoch"
-SDE_FILE = "${SDE_DIR}/__source_date_epoch.txt"
-SDE_DEPLOYDIR = "${WORKDIR}/deploy-source-date-epoch"
-
-# Enable compiler warning when the __TIME__, __DATE__ and __TIMESTAMP__ macros are used.
-TARGET_CC_ARCH:append:class-target = " -Wdate-time"
-
-# A SOURCE_DATE_EPOCH of '0' might be misinterpreted as no SDE
-export SOURCE_DATE_EPOCH_FALLBACK ??= "1302044400"
-
-SSTATETASKS += "do_deploy_source_date_epoch"
-
-do_deploy_source_date_epoch () {
- mkdir -p ${SDE_DEPLOYDIR}
- if [ -e ${SDE_FILE} ]; then
- echo "Deploying SDE from ${SDE_FILE} -> ${SDE_DEPLOYDIR}."
- cp -p ${SDE_FILE} ${SDE_DEPLOYDIR}/__source_date_epoch.txt
- else
- echo "${SDE_FILE} not found!"
- fi
-}
-
-python do_deploy_source_date_epoch_setscene () {
- sstate_setscene(d)
- bb.utils.mkdirhier(d.getVar('SDE_DIR'))
- sde_file = os.path.join(d.getVar('SDE_DEPLOYDIR'), '__source_date_epoch.txt')
- if os.path.exists(sde_file):
- target = d.getVar('SDE_FILE')
- bb.debug(1, "Moving setscene SDE file %s -> %s" % (sde_file, target))
- bb.utils.rename(sde_file, target)
- else:
- bb.debug(1, "%s not found!" % sde_file)
-}
-
-do_deploy_source_date_epoch[dirs] = "${SDE_DEPLOYDIR}"
-do_deploy_source_date_epoch[sstate-plaindirs] = "${SDE_DEPLOYDIR}"
-addtask do_deploy_source_date_epoch_setscene
-addtask do_deploy_source_date_epoch before do_configure after do_patch
-
-python create_source_date_epoch_stamp() {
- import oe.reproducible
-
- epochfile = d.getVar('SDE_FILE')
- tmp_file = "%s.new" % epochfile
-
- source_date_epoch = oe.reproducible.get_source_date_epoch(d, d.getVar('S'))
-
- bb.debug(1, "SOURCE_DATE_EPOCH: %d" % source_date_epoch)
- bb.utils.mkdirhier(d.getVar('SDE_DIR'))
- with open(tmp_file, 'w') as f:
- f.write(str(source_date_epoch))
-
- os.rename(tmp_file, epochfile)
-}
-
-def get_source_date_epoch_value(d):
- cached = d.getVar('__CACHED_SOURCE_DATE_EPOCH')
- if cached:
- return cached
-
- epochfile = d.getVar('SDE_FILE')
- source_date_epoch = int(d.getVar('SOURCE_DATE_EPOCH_FALLBACK'))
- try:
- with open(epochfile, 'r') as f:
- s = f.read()
- try:
- source_date_epoch = int(s)
- # workaround for old sstate with SDE_FILE content being 0 - use SOURCE_DATE_EPOCH_FALLBACK
- if source_date_epoch == 0 :
- source_date_epoch = int(d.getVar('SOURCE_DATE_EPOCH_FALLBACK'))
- bb.warn("SOURCE_DATE_EPOCH value from sstate '%s' is deprecated/invalid. Reverting to SOURCE_DATE_EPOCH_FALLBACK '%s'" % (s, source_date_epoch))
- except ValueError:
- bb.warn("SOURCE_DATE_EPOCH value '%s' is invalid. Reverting to SOURCE_DATE_EPOCH_FALLBACK" % s)
- source_date_epoch = int(d.getVar('SOURCE_DATE_EPOCH_FALLBACK'))
- bb.debug(1, "SOURCE_DATE_EPOCH: %d" % source_date_epoch)
- except FileNotFoundError:
- bb.debug(1, "Cannot find %s. SOURCE_DATE_EPOCH will default to %d" % (epochfile, source_date_epoch))
-
- d.setVar('__CACHED_SOURCE_DATE_EPOCH', str(source_date_epoch))
- return str(source_date_epoch)
-
-export SOURCE_DATE_EPOCH ?= "${@get_source_date_epoch_value(d)}"
-BB_HASHBASE_WHITELIST += "SOURCE_DATE_EPOCH"
-
-python () {
- if d.getVar('BUILD_REPRODUCIBLE_BINARIES') == '1':
- d.appendVarFlag("do_unpack", "postfuncs", " create_source_date_epoch_stamp")
-}
diff --git a/meta/classes/reproducible_build_simple.bbclass b/meta/classes/reproducible_build_simple.bbclass
deleted file mode 100644
index 393372993d..0000000000
--- a/meta/classes/reproducible_build_simple.bbclass
+++ /dev/null
@@ -1,9 +0,0 @@
-# Setup default environment for reproducible builds.
-
-BUILD_REPRODUCIBLE_BINARIES = "1"
-
-export PYTHONHASHSEED = "0"
-export PERL_HASH_SEED = "0"
-export SOURCE_DATE_EPOCH ??= "1520598896"
-
-REPRODUCIBLE_TIMESTAMP_ROOTFS ??= "1520598896"
diff --git a/meta/classes/rm_work.bbclass b/meta/classes/rm_work.bbclass
index 07901d7597..5f12d5aaeb 100644
--- a/meta/classes/rm_work.bbclass
+++ b/meta/classes/rm_work.bbclass
@@ -73,7 +73,7 @@ do_rm_work () {
# sstate version since otherwise we'd need to leave 'plaindirs' around
# such as 'packages' and 'packages-split' and these can be large. No end
# of chain tasks depend directly on do_package anymore.
- rm -f $i;
+ rm -f -- $i;
;;
*_setscene*)
# Skip stamps which are already setscene versions
@@ -90,7 +90,7 @@ do_rm_work () {
;;
esac
done
- rm -f $i
+ rm -f -- $i
esac
done
@@ -100,9 +100,9 @@ do_rm_work () {
# Retain only logs and other files in temp, safely ignore
# failures of removing pseudo folers on NFS2/3 server.
if [ $dir = 'pseudo' ]; then
- rm -rf $dir 2> /dev/null || true
+ rm -rf -- $dir 2> /dev/null || true
elif ! echo "$excludes" | grep -q -w "$dir"; then
- rm -rf $dir
+ rm -rf -- $dir
fi
done
}
diff --git a/meta/classes/rootfs-postcommands.bbclass b/meta/classes/rootfs-postcommands.bbclass
index fbfa63fcb3..7fe9e3d8c8 100644
--- a/meta/classes/rootfs-postcommands.bbclass
+++ b/meta/classes/rootfs-postcommands.bbclass
@@ -39,6 +39,8 @@ ROOTFS_POSTPROCESS_COMMAND += '${@bb.utils.contains("DISTRO_FEATURES", "systemd"
ROOTFS_POSTPROCESS_COMMAND += 'empty_var_volatile;'
+ROOTFS_POSTPROCESS_COMMAND += '${@bb.utils.contains("DISTRO_FEATURES", "overlayfs", "overlayfs_qa_check;", "", d)}'
+
inherit image-artifact-names
# Sort the user and group entries in /etc by ID in order to make the content
@@ -216,8 +218,8 @@ postinst_enable_logging () {
# Modify systemd default target
#
set_systemd_default_target () {
- if [ -d ${IMAGE_ROOTFS}${sysconfdir}/systemd/system -a -e ${IMAGE_ROOTFS}${systemd_unitdir}/system/${SYSTEMD_DEFAULT_TARGET} ]; then
- ln -sf ${systemd_unitdir}/system/${SYSTEMD_DEFAULT_TARGET} ${IMAGE_ROOTFS}${sysconfdir}/systemd/system/default.target
+ if [ -d ${IMAGE_ROOTFS}${sysconfdir}/systemd/system -a -e ${IMAGE_ROOTFS}${systemd_system_unitdir}/${SYSTEMD_DEFAULT_TARGET} ]; then
+ ln -sf ${systemd_system_unitdir}/${SYSTEMD_DEFAULT_TARGET} ${IMAGE_ROOTFS}${sysconfdir}/systemd/system/default.target
fi
}
@@ -373,3 +375,26 @@ rootfs_reproducible () {
fi
fi
}
+
+python overlayfs_qa_check() {
+ from oe.overlayfs import mountUnitName
+
+ # this is a dumb check for unit existence, not its validity
+ overlayMountPoints = d.getVarFlags("OVERLAYFS_MOUNT_POINT")
+ imagepath = d.getVar("IMAGE_ROOTFS")
+ searchpaths = [oe.path.join(imagepath, d.getVar("sysconfdir"), "systemd", "system"),
+ oe.path.join(imagepath, d.getVar("systemd_system_unitdir"))]
+
+ allUnitExist = True;
+ for mountPoint in overlayMountPoints:
+ path = d.getVarFlag('OVERLAYFS_MOUNT_POINT', mountPoint)
+ unit = mountUnitName(path)
+
+ if not any(os.path.isfile(oe.path.join(dirpath, unit))
+ for dirpath in searchpaths):
+ bb.warn('Unit name %s not found in systemd unit directories' % unit)
+ allUnitExist = False;
+
+ if not allUnitExist: