aboutsummaryrefslogtreecommitdiffstats
path: root/meta-oe/dynamic-layers
diff options
context:
space:
mode:
Diffstat (limited to 'meta-oe/dynamic-layers')
-rw-r--r--meta-oe/dynamic-layers/gnome-layer/recipes-core/packagegroups/packagegroup-meta-oe.bbappend2
-rw-r--r--meta-oe/dynamic-layers/gnome-layer/recipes-graphics/openbox/openbox-xdgmenu/7_6.diff2
-rw-r--r--meta-oe/dynamic-layers/gnome-layer/recipes-graphics/openbox/openbox-xdgmenu/fix-menu-generation.patch2
-rw-r--r--meta-oe/dynamic-layers/gnome-layer/recipes-graphics/openbox/openbox-xdgmenu/port-gnome-menus3.patch2
-rw-r--r--meta-oe/dynamic-layers/gnome-layer/recipes-graphics/openbox/openbox-xdgmenu_0.3.bb7
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-benchmark/speedtest-cli/speedtest-cli_2.1.3.bb14
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-bsp/rwmem/rwmem/0001-include-missing-cstdint.patch32
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-bsp/rwmem/rwmem_1.2.bb30
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-connectivity/lirc/lirc/0001-Fix-build-on-32bit-arches-with-64bit-time_t.patch2
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-connectivity/lirc/lirc/0001-Makefile.am-do-not-clobber-PYTHONPATH-from-build-env.patch57
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-connectivity/lirc/lirc/0001-Unbolt-ubuntu-hack.patch26
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-connectivity/lirc/lirc/0001-mplay-Fix-build-with-musl.patch44
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-connectivity/lirc/lirc/fix_build_errors.patch2
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-connectivity/lirc/lirc_0.10.1.bb110
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-connectivity/lirc/lirc_0.10.2.bb115
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-connectivity/netplan/netplan/0001-don-t-fail-if-GLOB_BRACE-is-not-defined.patch32
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-connectivity/netplan/netplan/0001-meson.build-drop-unnecessary-build-dependencies.patch58
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-connectivity/netplan/netplan/0001-networkd.c-define-scope-specific-to-case-statement.patch47
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-connectivity/netplan/netplan/0002-meson.build-do-not-use-Werror.patch29
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-connectivity/netplan/netplan_1.0.bb52
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-connectivity/thingsboard-gateway/thingsboard-gateway_3.4.6.bb (renamed from meta-oe/dynamic-layers/meta-python/recipes-connectivity/thingsboard-gateway/thingsboard-gateway_2.5.2.bb)13
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-core/packagegroups/packagegroup-meta-oe.bbappend14
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-Do-not-use-MINSIGSTKSZ.patch34
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-Fix-build-on-32bit.patch38
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-Fix-type-mismatch-on-32bit-arches.patch33
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-IntelRDFPMathLib20U1-Check-for-__DEFINED_wchar_t.patch2
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-Tell-scons-to-use-build-settings-from-environment-va.patch2
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-The-std-lib-unary-binary_function-base-classes-are-d.patch40
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-Use-__GLIBC__-to-control-use-of-gnu_get_libc_version.patch2
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-Use-explicit-typecast-to-size_t.patch35
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-Use-long-long-instead-of-int64_t.patch2
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-add-explict-static_cast-size_t-to-maxMemoryUsageByte.patch38
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-apply-msvc-workaround-for-clang-16.patch32
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-asio-Dont-use-experimental-with-clang.patch2
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-free_mon-Include-missing-cstdint.patch28
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-server-Adjust-the-cache-alignment-assumptions.patch37
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-wiredtiger-Avoid-using-off64_t.patch30
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0002-Add-a-definition-for-the-macro-__ELF_NATIVE_CLASS.patch2
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0003-Fix-unknown-prefix-env.patch2
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0004-wiredtiger-Disable-strtouq-on-musl.patch2
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/1296.patch3
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/arm64-support.patch2
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb_git.bb120
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-devtools/nanopb/nanopb-generator_0.4.8.bb14
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-devtools/nanopb/nanopb-runtime_0.4.8.bb17
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-devtools/nanopb/nanopb.bb16
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-devtools/nanopb/nanopb.inc13
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-devtools/nanopb/nanopb_0.4.5.bb29
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-extended/lcdproc/lcdproc/0001-Fix-compilation-with-GCC-10.x.patch46
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-extended/lcdproc/lcdproc/0001-Fix-parallel-build-fix-port-internal-make-dependenci.patch33
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-extended/lcdproc/lcdproc/0002-Include-limits.h-for-PATH_MAX-definition.patch41
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-extended/lcdproc/lcdproc/0003-Fix-non-x86-platforms-on-musl.patch35
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-extended/lcdproc/lcdproc_git.bb49
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0001-Port-build-to-python3.patch6897
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0002-js.pc.in-do-not-include-RequiredDefines.h-for-depend.patch33
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0003-fix-cross-compilation-on-i586-targets.patch38
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0004-do-not-create-python-environment.patch64
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0005-fix-cannot-find-link.patch34
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0006-workaround-autoconf-2.13-detection-failed.patch28
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0007-fix-do_compile-failed-on-mips.patch33
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0008-add-riscv-support.patch50
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0009-mozjs-fix-coredump-caused-by-getenv.patch27
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0010-format-overflow.patch21
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0011-To-fix-build-error-on-arm32BE.patch28
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0012-JS_PUBLIC_API.patch55
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0013-riscv-Disable-atomic-operations.patch38
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0014-remove-JS_VOLATIME_ARM.patch111
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/mipsarchn32/0001-fix-compiling-failure-on-mips64-n32-bsp.patch80
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/musl/0001-support-musl.patch98
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/musl/0002-js-Fix-build-with-musl.patch31
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs_60.9.0.bb147
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-multimedia/kmsxx/kmsxx_git.bb35
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-support/nvmetcli/nvmetcli_0.7.bb39
-rw-r--r--meta-oe/dynamic-layers/meta-python/recipes-support/smem/smem_1.5.bb8
-rw-r--r--meta-oe/dynamic-layers/networking-layer/recipes-core/packagegroups/packagegroup-meta-oe.bbappend8
-rw-r--r--meta-oe/dynamic-layers/networking-layer/recipes-devtools/valijson/valijson_0.4.bb36
-rw-r--r--meta-oe/dynamic-layers/perl-layer/recipes-core/packagegroups/packagegroup-meta-oe.bbappend2
-rw-r--r--meta-oe/dynamic-layers/perl-layer/recipes-support/debsums/debsums_2.2.2.bb56
-rw-r--r--meta-oe/dynamic-layers/perl-layer/recipes-support/rasdaemon/files/0001-Fix-system-header-includes.patch44
-rw-r--r--meta-oe/dynamic-layers/perl-layer/recipes-support/rasdaemon/rasdaemon_0.8.0.bb (renamed from meta-oe/dynamic-layers/perl-layer/recipes-support/rasdaemon/rasdaemon_0.6.7.bb)28
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools-conf-configfs/10-adbd-configfs.conf4
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools-conf-configfs/android-gadget-cleanup24
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools-conf-configfs/android-gadget-setup35
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools-conf-configfs/android-gadget-start7
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools-conf-configfs_1.0.bb35
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools-conf/android-gadget-setup37
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools-conf_1.0.bb18
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/0001-Fixes-for-yocto-build.patch164
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/0002-android-tools-modifications-to-make-it-build-in-yoct.patch375
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/0003-Update-usage-of-usbdevfs_urb-to-match-new-kernel-UAP.patch128
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/0004-adb-Fix-build-on-big-endian-systems.patch50
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/0005-adb-Allow-adbd-to-be-run-as-root.patch25
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/adbd.mk87
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/android-tools-adbd.service13
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/Add-riscv64-support.patch653
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/Added-missing-headers.patch46
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/Nonnull.patch22
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/Vector-cast.patch16
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/add-missing-headers.patch43
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/external/libunwind/20150704-CVE-2015-3239_dwarf_i.h.patch15
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/external/libunwind/legacy_built-in_sync_functions.patch28
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/external/libunwind/user_pt_regs.patch26
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/fix-attribute-issue-with-gcc.patch19
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/fix-build-on-non-x86.patch26
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/fix-gettid-exception-declaration.patch26
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/fix-standard-namespace-errors.patch24
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/hard-code-build-number.patch46
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/libusb-header-path.patch18
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/move-log-file-to-proper-dir.patch20
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/simg_dump-python3.patch64
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/stdatomic.patch66
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/stub-out-fastdeploy.patch95
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/Add-riscv64-support.patch653
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/Added-missing-headers.patch46
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/Nonnull.patch22
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/Vector-cast.patch16
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/add-missing-headers.patch43
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/fix-attribute-issue-with-gcc.patch19
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/fix-build-on-non-x86.patch26
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/fix-gettid-exception-declaration.patch26
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/fix-standard-namespace-errors.patch24
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/hard-code-build-number.patch46
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/libusb-header-path.patch18
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/move-log-file-to-proper-dir.patch20
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/simg_dump-python3.patch64
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/stdatomic.patch66
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/stub-out-fastdeploy.patch95
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/throw-exception-on-unknown-os.patch18
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/throw-exception-on-unknown-os.patch18
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/remount2
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/rules_yocto.mk1
-rw-r--r--meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools_29.0.6.r14.bb194
132 files changed, 4747 insertions, 8465 deletions
diff --git a/meta-oe/dynamic-layers/gnome-layer/recipes-core/packagegroups/packagegroup-meta-oe.bbappend b/meta-oe/dynamic-layers/gnome-layer/recipes-core/packagegroups/packagegroup-meta-oe.bbappend
index dcd88841f9..f15b36184d 100644
--- a/meta-oe/dynamic-layers/gnome-layer/recipes-core/packagegroups/packagegroup-meta-oe.bbappend
+++ b/meta-oe/dynamic-layers/gnome-layer/recipes-core/packagegroups/packagegroup-meta-oe.bbappend
@@ -1,3 +1,3 @@
-RDEPENDS_packagegroup-meta-oe-graphics +="\
+RDEPENDS:packagegroup-meta-oe-graphics +="\
openbox-xdgmenu \
"
diff --git a/meta-oe/dynamic-layers/gnome-layer/recipes-graphics/openbox/openbox-xdgmenu/7_6.diff b/meta-oe/dynamic-layers/gnome-layer/recipes-graphics/openbox/openbox-xdgmenu/7_6.diff
index 8ab5386dc1..cdb29f24dc 100644
--- a/meta-oe/dynamic-layers/gnome-layer/recipes-graphics/openbox/openbox-xdgmenu/7_6.diff
+++ b/meta-oe/dynamic-layers/gnome-layer/recipes-graphics/openbox/openbox-xdgmenu/7_6.diff
@@ -1,3 +1,5 @@
+Upstream-Status: Pending
+
=== modified file 'openbox-xdgmenu.c'
--- openbox-xdgmenu.c 2011-09-03 20:13:39 +0000
+++ openbox-xdgmenu.c 2013-12-28 17:41:04 +0000
diff --git a/meta-oe/dynamic-layers/gnome-layer/recipes-graphics/openbox/openbox-xdgmenu/fix-menu-generation.patch b/meta-oe/dynamic-layers/gnome-layer/recipes-graphics/openbox/openbox-xdgmenu/fix-menu-generation.patch
index 03b357263f..3def090eb1 100644
--- a/meta-oe/dynamic-layers/gnome-layer/recipes-graphics/openbox/openbox-xdgmenu/fix-menu-generation.patch
+++ b/meta-oe/dynamic-layers/gnome-layer/recipes-graphics/openbox/openbox-xdgmenu/fix-menu-generation.patch
@@ -1,3 +1,5 @@
+Upstream-Status: Pending
+
Index: openbox-xdgmenu-0.3/openbox-xdgmenu.c
===================================================================
--- openbox-xdgmenu-0.3.orig/openbox-xdgmenu.c
diff --git a/meta-oe/dynamic-layers/gnome-layer/recipes-graphics/openbox/openbox-xdgmenu/port-gnome-menus3.patch b/meta-oe/dynamic-layers/gnome-layer/recipes-graphics/openbox/openbox-xdgmenu/port-gnome-menus3.patch
index 3fcc319164..6d123482e8 100644
--- a/meta-oe/dynamic-layers/gnome-layer/recipes-graphics/openbox/openbox-xdgmenu/port-gnome-menus3.patch
+++ b/meta-oe/dynamic-layers/gnome-layer/recipes-graphics/openbox/openbox-xdgmenu/port-gnome-menus3.patch
@@ -1,3 +1,5 @@
+Upstream-Status: Pending
+
Index: openbox-xdgmenu-0.3/openbox-xdgmenu.c
===================================================================
--- openbox-xdgmenu-0.3.orig/openbox-xdgmenu.c
diff --git a/meta-oe/dynamic-layers/gnome-layer/recipes-graphics/openbox/openbox-xdgmenu_0.3.bb b/meta-oe/dynamic-layers/gnome-layer/recipes-graphics/openbox/openbox-xdgmenu_0.3.bb
index 237154e295..ac197a631e 100644
--- a/meta-oe/dynamic-layers/gnome-layer/recipes-graphics/openbox/openbox-xdgmenu_0.3.bb
+++ b/meta-oe/dynamic-layers/gnome-layer/recipes-graphics/openbox/openbox-xdgmenu_0.3.bb
@@ -1,11 +1,10 @@
SUMMARY = "Openbox configuration tool"
-AUTHOR = "Siegfried Gevatter"
HOMEPAGE = "https://launchpad.net/openbox-xdgmenu/"
SECTION = "x11/wm"
-LICENSE = "GPLv3+"
+LICENSE = "GPL-3.0-or-later"
LIC_FILES_CHKSUM = "file://COPYING;md5=f27defe1e96c2e1ecd4e0c9be8967949"
DEPENDS = " \
- gnome-menus3 \
+ gnome-menus \
glib-2.0 \
"
PV = "0.3"
@@ -33,4 +32,4 @@ do_install() {
install -m 0755 openbox-xdgmenu ${D}${bindir}
}
-RDEPENDS_${PN} += "virtual/x-terminal-emulator"
+RDEPENDS:${PN} += "virtual-x-terminal-emulator"
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-benchmark/speedtest-cli/speedtest-cli_2.1.3.bb b/meta-oe/dynamic-layers/meta-python/recipes-benchmark/speedtest-cli/speedtest-cli_2.1.3.bb
deleted file mode 100644
index 949b45ba8e..0000000000
--- a/meta-oe/dynamic-layers/meta-python/recipes-benchmark/speedtest-cli/speedtest-cli_2.1.3.bb
+++ /dev/null
@@ -1,14 +0,0 @@
-SUMMARY = "Command line interface for testing internet bandwidth using speedtest.net"
-AUTHOR = "Matt Martz"
-
-LICENSE="Apache-2.0"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=3b83ef96387f14655fc854ddc3c6bd57"
-
-inherit setuptools3
-
-SRC_URI = "git://github.com/sivel/speedtest-cli.git"
-SRCREV = "42e96b13dda2afabbcec2622612d13495a415caa"
-
-S = "${WORKDIR}/git"
-
-RDEPENDS_${PN} = "python3 python3-setuptools-scm"
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-bsp/rwmem/rwmem/0001-include-missing-cstdint.patch b/meta-oe/dynamic-layers/meta-python/recipes-bsp/rwmem/rwmem/0001-include-missing-cstdint.patch
new file mode 100644
index 0000000000..0560daa4c2
--- /dev/null
+++ b/meta-oe/dynamic-layers/meta-python/recipes-bsp/rwmem/rwmem/0001-include-missing-cstdint.patch
@@ -0,0 +1,32 @@
+From 84e884f99e581515b49d8973538bb17e1e6c0dc0 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Thu, 26 Jan 2023 20:45:57 -0800
+Subject: [PATCH] include missing <cstdint>
+
+gcc 13 moved some includes around and as a result <cstdint> is no
+longer transitively included [1]. Explicitly include it for
+uint{32,64}_t.
+
+[1] https://gcc.gnu.org/gcc-13/porting_to.html#header-dep-changes
+
+Upstream-Status: Submitted [https://github.com/tomba/rwmem/pull/7]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ librwmem/helpers.h | 1 +
+ 1 file changed, 1 insertion(+)
+
+diff --git a/librwmem/helpers.h b/librwmem/helpers.h
+index a0a738b..8d02c9c 100644
+--- a/librwmem/helpers.h
++++ b/librwmem/helpers.h
+@@ -1,6 +1,7 @@
+ #pragma once
+
+ #include <cerrno>
++#include <cstdint>
+ #include <string>
+ #include <vector>
+ #include <string.h>
+--
+2.39.1
+
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-bsp/rwmem/rwmem_1.2.bb b/meta-oe/dynamic-layers/meta-python/recipes-bsp/rwmem/rwmem_1.2.bb
index 065243ccfe..90c9a7147c 100644
--- a/meta-oe/dynamic-layers/meta-python/recipes-bsp/rwmem/rwmem_1.2.bb
+++ b/meta-oe/dynamic-layers/meta-python/recipes-bsp/rwmem/rwmem_1.2.bb
@@ -8,28 +8,28 @@ can thus be used to access devices which have memory mapped registers. \
\
In i2c mode rwmem accesses an i2c peripheral by sending i2c messages to it."
-LICENSE = "GPLv2+"
+LICENSE = "GPL-2.0-or-later"
LIC_FILES_CHKSUM = "file://LICENSE;md5=b234ee4d69f5fce4486a80fdaf4a4263"
-DEPENDS += "python3 python3-pybind11"
+DEPENDS += "fmt libinih"
-PV .= "+git${SRCPV}"
+PV .= "+git"
-SRCREV_rwmem = "3ec3e421211b58e766651c2e3a3a21acf14a1906"
-SRCREV_inih = "4b10c654051a86556dfdb634c891b6c3224c4109"
+SRCREV = "8416326777b2aada0706539b8f9f6acefa476b16"
-SRCREV_FORMAT = "rwmem_inih"
-
-SRC_URI = " \
- git://github.com/tomba/rwmem.git;protocol=https;name=rwmem \
- git://github.com/benhoyt/inih.git;protocol=https;name=inih;nobranch=1;destsuffix=git/ext/inih \
-"
+SRC_URI = "git://github.com/tomba/rwmem.git;protocol=https;name=rwmem;branch=master \
+ file://0001-include-missing-cstdint.patch"
S = "${WORKDIR}/git"
-inherit cmake pkgconfig
+inherit meson pkgconfig python3native
+
+PACKAGECONFIG ?= "python static"
+PACKAGECONFIG[python] = "-Dpyrwmem=enabled,-Dpyrwmem=disabled,cmake-native python3 python3-pybind11"
+PACKAGECONFIG[static] = "-Dstatic-libc=true,-Dstatic-libc=false,"
-do_install() {
- install -D -m 0755 ${B}/bin/rwmem ${D}${bindir}/rwmem
- install -D -m 0644 ${B}/lib/librwmem.a ${D}${libdir}/librwmem.a
+do_install:append() {
+ install -D -m 0644 ${B}/librwmem/librwmem.a ${D}${libdir}/librwmem.a
}
+
+FILES:${PN} += "${PYTHON_SITEPACKAGES_DIR}/pyrwmem"
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-connectivity/lirc/lirc/0001-Fix-build-on-32bit-arches-with-64bit-time_t.patch b/meta-oe/dynamic-layers/meta-python/recipes-connectivity/lirc/lirc/0001-Fix-build-on-32bit-arches-with-64bit-time_t.patch
index 0e38f7d8ec..d4c8a3d7fc 100644
--- a/meta-oe/dynamic-layers/meta-python/recipes-connectivity/lirc/lirc/0001-Fix-build-on-32bit-arches-with-64bit-time_t.patch
+++ b/meta-oe/dynamic-layers/meta-python/recipes-connectivity/lirc/lirc/0001-Fix-build-on-32bit-arches-with-64bit-time_t.patch
@@ -10,6 +10,8 @@ input.h [1]
Signed-off-by: Khem Raj <raj.khem@gmail.com>
---
+Upstream-Status: Pending
+
plugins/devinput.c | 7 ++++++-
1 file changed, 6 insertions(+), 1 deletion(-)
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-connectivity/lirc/lirc/0001-Makefile.am-do-not-clobber-PYTHONPATH-from-build-env.patch b/meta-oe/dynamic-layers/meta-python/recipes-connectivity/lirc/lirc/0001-Makefile.am-do-not-clobber-PYTHONPATH-from-build-env.patch
new file mode 100644
index 0000000000..11f6485b27
--- /dev/null
+++ b/meta-oe/dynamic-layers/meta-python/recipes-connectivity/lirc/lirc/0001-Makefile.am-do-not-clobber-PYTHONPATH-from-build-env.patch
@@ -0,0 +1,57 @@
+From 5e3b74927b4fef03d91518d235e9e3ba8cd7ab2e Mon Sep 17 00:00:00 2001
+From: Alexander Kanavin <alex@linutronix.de>
+Date: Wed, 9 Nov 2022 20:49:41 +0100
+Subject: [PATCH] Makefile.am: do not clobber PYTHONPATH from build environment
+
+This environment variable has special significance for python,
+and so lirc's variable has to be named something else.
+
+Signed-off-by: Alexander Kanavin <alex@linutronix.de>
+---
+Upstream-Status: Pending
+
+ Makefile.am | 2 +-
+ pylint.mak | 2 +-
+ tools/Makefile.am | 2 +-
+ 3 files changed, 3 insertions(+), 3 deletions(-)
+
+diff --git a/Makefile.am b/Makefile.am
+index 6718af1..fae423e 100644
+--- a/Makefile.am
++++ b/Makefile.am
+@@ -128,7 +128,7 @@ endif
+
+ pylint: .phony
+ $(MAKE) -C tools pylint
+- -PYTHONPATH=$(PYTHONPATH) $(PYLINT) --rcfile=pylint.conf \
++ -PYTHONPATH=$(LIRCPYTHONPATH) $(PYLINT) --rcfile=pylint.conf \
+ --msg-template='$(pylint_template)' $(py_PYTHON)
+
+ pep8: $(py_PYTHON)
+diff --git a/pylint.mak b/pylint.mak
+index bf427ab..2692951 100644
+--- a/pylint.mak
++++ b/pylint.mak
+@@ -1,5 +1,5 @@
+ PYTHONPATH1 = $(abs_top_srcdir)/python-pkg/lirc:
+ PYTHONPATH2 = $(abs_top_srcdir)/python-pkg/lirc/lib/.libs
+-PYTHONPATH = $(PYTHONPATH1):$(PYTHONPATH2)
++LIRCPYTHONPATH = $(PYTHONPATH1):$(PYTHONPATH2)
+ PYLINT = python3-pylint
+ pylint_template = {path}:{line}: [{msg_id}({symbol}), {obj}] {msg}
+diff --git a/tools/Makefile.am b/tools/Makefile.am
+index 85d1fd0..96b17f8 100644
+--- a/tools/Makefile.am
++++ b/tools/Makefile.am
+@@ -142,7 +142,7 @@ force-pylint: .phony
+
+ pylint: .pylint-stamp
+ .pylint-stamp: $(py_sources)
+- -PYTHONPATH=$(PYTHONPATH) $(PYLINT) --rcfile=../pylint.conf \
++ -PYTHONPATH=$(LIRCPYTHONPATH) $(PYLINT) --rcfile=../pylint.conf \
+ --msg-template='$(pylint_template)' $? && touch $@
+
+ .phony:
+--
+2.30.2
+
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-connectivity/lirc/lirc/0001-Unbolt-ubuntu-hack.patch b/meta-oe/dynamic-layers/meta-python/recipes-connectivity/lirc/lirc/0001-Unbolt-ubuntu-hack.patch
new file mode 100644
index 0000000000..73bacc9139
--- /dev/null
+++ b/meta-oe/dynamic-layers/meta-python/recipes-connectivity/lirc/lirc/0001-Unbolt-ubuntu-hack.patch
@@ -0,0 +1,26 @@
+From ca126a2832aaff0deef3ba7eaf411dd0dc43b068 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Thu, 16 Mar 2023 11:31:14 -0700
+Subject: [PATCH] Unbolt ubuntu hack
+
+This bites during cross compiling where the target is different than
+build host and build host might be ubuntu but that does not matter in
+cross compilation case. This fails builds when usrmerge feature is used
+
+Upstream-Status: Inappropriate [ Cross-compile specific ]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ configure.ac | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+--- a/configure.ac
++++ b/configure.ac
+@@ -429,7 +429,7 @@ AC_CHECK_LIB([udev], [udev_device_new_fr
+ ])
+
+ dnl Ubuntu's systemd pkg-config seems broken beyond repair. So:
+-kernelversion=`cat /proc/version || echo "non-linux"`
++kernelversion="cross-compiled"
+ AS_CASE([$kernelversion],
+ [*Ubuntu*],[
+ AC_MSG_NOTICE([Hardwiring Ubuntu systemd setup])
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-connectivity/lirc/lirc/0001-mplay-Fix-build-with-musl.patch b/meta-oe/dynamic-layers/meta-python/recipes-connectivity/lirc/lirc/0001-mplay-Fix-build-with-musl.patch
new file mode 100644
index 0000000000..48cf7a355c
--- /dev/null
+++ b/meta-oe/dynamic-layers/meta-python/recipes-connectivity/lirc/lirc/0001-mplay-Fix-build-with-musl.patch
@@ -0,0 +1,44 @@
+From e9e9027d7a324e1ce5e0cb06d4eb51847262a09d Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Sun, 28 Aug 2022 12:26:52 -0700
+Subject: [PATCH] mplay: Fix build with musl
+
+pthread_t is an opaque type, therefore typecast it to avoid warnings on
+musl
+
+Fixes
+mplay.c:200:12: error: incompatible integer to pointer conversion initializing 'pthread_t' (aka 'struct __pthread *') with an expression of type 'int' [-Wint-conversion]
+| .tid = -1
+| ^~
+
+Upstream-Status: Submitted [https://sourceforge.net/p/lirc/git/merge-requests/47/]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ plugins/mplay.c | 4 ++--
+ 1 file changed, 2 insertions(+), 2 deletions(-)
+
+diff --git a/plugins/mplay.c b/plugins/mplay.c
+index d6d9619..5b9eb4b 100644
+--- a/plugins/mplay.c
++++ b/plugins/mplay.c
+@@ -197,7 +197,7 @@ static struct {
+ .latest_button = MPLAY_CODE_ERROR,
+ .fd = -1,
+ .pipefd = { -1, -1 },
+- .tid = -1
++ .tid = (pthread_t)-1
+ };
+
+ /**
+@@ -788,7 +788,7 @@ int mplayfamily_deinit(void)
+ return 0;
+ }
+ pthread_join(mplayfamily_local_data.tid, NULL);
+- mplayfamily_local_data.tid = -1;
++ mplayfamily_local_data.tid = (pthread_t)-1;
+ }
+ if (mplayfamily_local_data.pipefd[0] != -1) {
+ close(mplayfamily_local_data.pipefd[0]);
+--
+2.37.2
+
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-connectivity/lirc/lirc/fix_build_errors.patch b/meta-oe/dynamic-layers/meta-python/recipes-connectivity/lirc/lirc/fix_build_errors.patch
index 41353dbbdc..ed840cd098 100644
--- a/meta-oe/dynamic-layers/meta-python/recipes-connectivity/lirc/lirc/fix_build_errors.patch
+++ b/meta-oe/dynamic-layers/meta-python/recipes-connectivity/lirc/lirc/fix_build_errors.patch
@@ -1,3 +1,5 @@
+Upstream-Status: Pending
+
diff --git a/configure.ac b/configure.ac
index 58347d8..8c7fca2 100644
--- a/configure.ac
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-connectivity/lirc/lirc_0.10.1.bb b/meta-oe/dynamic-layers/meta-python/recipes-connectivity/lirc/lirc_0.10.1.bb
deleted file mode 100644
index b68303241e..0000000000
--- a/meta-oe/dynamic-layers/meta-python/recipes-connectivity/lirc/lirc_0.10.1.bb
+++ /dev/null
@@ -1,110 +0,0 @@
-DESCRIPTION = "LIRC is a package that allows you to decode and send infra-red signals of many commonly used remote controls."
-DESCRIPTION_append_lirc = " This package contains the lirc daemon, libraries and tools."
-DESCRIPTION_append_lirc-exec = " This package contains a daemon that runs programs on IR signals."
-DESCRIPTION_append_lirc-remotes = " This package contains some config files for remotes."
-DESCRIPTION_append_lirc-nslu2example = " This package contains a working config for RC5 remotes and a modified NSLU2."
-HOMEPAGE = "http://www.lirc.org"
-SECTION = "console/network"
-LICENSE = "GPLv2"
-DEPENDS = "libxslt-native alsa-lib libftdi libusb1 libusb-compat jack portaudio-v19 python3-pyyaml python3-setuptools-native"
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263"
-
-SRC_URI = "http://prdownloads.sourceforge.net/lirc/lirc-${PV}.tar.bz2 \
- file://0001-Fix-build-on-32bit-arches-with-64bit-time_t.patch \
- file://fix_build_errors.patch \
- file://lircd.service \
- file://lircd.init \
- file://lircexec.init \
- file://lircd.conf \
- file://lirc_options.conf \
- file://lirc.tmpfiles \
-"
-SRC_URI[md5sum] = "86c3f8e4efaba10571addb8313d1e040"
-SRC_URI[sha256sum] = "8b753c60df2a7f5dcda2db72c38e448ca300c3b4f6000c1501fcb0bd5df414f2"
-
-SYSTEMD_PACKAGES = "lirc lirc-exec"
-SYSTEMD_SERVICE_${PN} = "lircd.service lircmd.service lircd-setup.service lircd-uinput.service"
-SYSTEMD_SERVICE_${PN}-exec = "irexec.service"
-SYSTEMD_AUTO_ENABLE_lirc = "enable"
-SYSTEMD_AUTO_ENABLE_lirc-exec = "enable"
-
-inherit autotools pkgconfig systemd python3native distutils-common-base
-
-PACKAGECONFIG[systemd] = "--with-systemdsystemunitdir=${systemd_unitdir}/system/,--without-systemdsystemunitdir,systemd"
-PACKAGECONFIG[x11] = "--with-x,--with-x=no,libx11,"
-
-PACKAGECONFIG ?= " \
- ${@bb.utils.contains('DISTRO_FEATURES', 'systemd', ' systemd', '', d)} \
- ${@bb.utils.contains('DISTRO_FEATURES', 'x11', ' x11', '', d)} \
-"
-CACHED_CONFIGUREVARS = "HAVE_WORKING_POLL=yes"
-
-#EXTRA_OEMAKE = 'SUBDIRS="lib daemons tools"'
-
-# Ensure python-pkg/VERSION exists
-do_configure_append() {
- cp ${S}/VERSION ${S}/python-pkg/
-}
-
-# Create PYTHON_TARBALL which LIRC needs for install-nodist_pkgdataDATA
-do_install_prepend() {
- rm -rf ${WORKDIR}/${PN}-${PV}/python-pkg/dist/
- mkdir ${WORKDIR}/${PN}-${PV}/python-pkg/dist/
- tar --exclude='${WORKDIR}/${PN}-${PV}/python-pkg/*' -czf ${WORKDIR}/${PN}-${PV}/python-pkg/dist/${PN}-${PV}.tar.gz ${S}
-}
-
-# In code, path to python is a variable that is replaced with path to native version of it
-# during the configure stage, e.g ../recipe-sysroot-native/usr/bin/python3-native/python3.
-# Replace it with #!/usr/bin/env python3
-do_install_append() {
- sed -i '1c#!/usr/bin/env python3' ${D}${bindir}/lirc-setup \
- ${D}${PYTHON_SITEPACKAGES_DIR}/lirc-setup/lirc-setup \
- ${D}${bindir}/irtext2udp \
- ${D}${bindir}/lirc-init-db \
- ${D}${bindir}/irdb-get \
- ${D}${bindir}/pronto2lirc \
- ${D}${sbindir}/lircd-setup
-
- install -m 0755 -d ${D}${sysconfdir}
- install -m 0755 -d ${D}${sysconfdir}/lirc
- install -m 0755 -d ${D}${systemd_unitdir}/system
- install -m 0755 -d ${D}${libdir}/tmpfiles.d
- install -m 0644 ${WORKDIR}/lircd.conf ${D}${sysconfdir}/lirc/
- install -m 0644 ${WORKDIR}/lirc_options.conf ${D}${sysconfdir}/lirc/
- install -m 0644 ${WORKDIR}/lircd.service ${D}${systemd_unitdir}/system/
- install -m 0755 ${WORKDIR}/lircexec.init ${D}${systemd_unitdir}/system/
- install -m 0644 ${WORKDIR}/lirc.tmpfiles ${D}${libdir}/tmpfiles.d/lirc.conf
- rm -rf ${D}${libdir}/lirc/plugins/*.la
- rmdir ${D}/var/run/lirc ${D}/var/run
- chown -R root:root ${D}${datadir}/lirc/contrib
-}
-
-PACKAGES =+ "${PN}-contrib ${PN}-exec ${PN}-plugins ${PN}-python"
-
-RDEPENDS_${PN} = "bash python3"
-RDEPENDS_${PN}-exec = "${PN}"
-RDEPENDS_${PN}-python = "python3-shell python3-pyyaml python3-datetime python3-netclient python3-stringold"
-
-RRECOMMENDS_${PN} = "${PN}-exec ${PN}-plugins"
-
-FILES_${PN}-plugins = "${libdir}/lirc/plugins/*.so ${datadir}/lirc/configs"
-FILES_${PN}-contrib = "${datadir}/lirc/contrib"
-FILES_${PN}-exec = "${bindir}/irexec ${sysconfdir}/lircexec ${systemd_unitdir}/system/irexec.service"
-FILES_${PN} += "${systemd_unitdir}/system/lircexec.init"
-FILES_${PN} += "${systemd_unitdir}/system/lircd.service"
-FILES_${PN} += "${systemd_unitdir}/system/lircd.socket"
-FILES_${PN} += "${libdir}/tmpfiles.d/lirc.conf"
-FILES_${PN}-dbg += "${libdir}/lirc/plugins/.debug"
-FILES_${PN}-python += "${bindir}/irdb-get ${bindir}/irtext2udp ${bindir}/lircd-setup ${bindir}/pronto2lirc ${libdir}/python*/site-packages"
-
-INITSCRIPT_PACKAGES = "lirc lirc-exec"
-INITSCRIPT_NAME_lirc-exec = "lircexec"
-INITSCRIPT_PARAMS_lirc-exec = "defaults 21"
-
-# this is for distributions that don't use udev
-pkg_postinst_${PN}_append() {
- if [ ! -c $D/dev/lirc -a ! -f /sbin/udevd ]; then mknod $D/dev/lirc c 61 0; fi
-}
-
-SECURITY_CFLAGS = "${SECURITY_NO_PIE_CFLAGS}"
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-connectivity/lirc/lirc_0.10.2.bb b/meta-oe/dynamic-layers/meta-python/recipes-connectivity/lirc/lirc_0.10.2.bb
new file mode 100644
index 0000000000..d348ef8be9
--- /dev/null
+++ b/meta-oe/dynamic-layers/meta-python/recipes-connectivity/lirc/lirc_0.10.2.bb
@@ -0,0 +1,115 @@
+DESCRIPTION = "LIRC is a package that allows you to decode and send infra-red signals of many commonly used remote controls."
+DESCRIPTION:append:lirc = " This package contains the lirc daemon, libraries and tools."
+DESCRIPTION:append:lirc-exec = " This package contains a daemon that runs programs on IR signals."
+DESCRIPTION:append:lirc-remotes = " This package contains some config files for remotes."
+DESCRIPTION:append:lirc-nslu2example = " This package contains a working config for RC5 remotes and a modified NSLU2."
+HOMEPAGE = "http://www.lirc.org"
+SECTION = "console/network"
+LICENSE = "GPL-2.0-only"
+DEPENDS = "libxslt-native alsa-lib libftdi libusb1 libusb-compat jack portaudio-v19 python3-pyyaml python3-setuptools-native"
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263"
+
+SRC_URI = "http://prdownloads.sourceforge.net/lirc/lirc-${PV}.tar.bz2 \
+ file://0001-Fix-build-on-32bit-arches-with-64bit-time_t.patch \
+ file://fix_build_errors.patch \
+ file://0001-mplay-Fix-build-with-musl.patch \
+ file://lircd.service \
+ file://lircd.init \
+ file://lircexec.init \
+ file://lircd.conf \
+ file://lirc_options.conf \
+ file://lirc.tmpfiles \
+ file://0001-Makefile.am-do-not-clobber-PYTHONPATH-from-build-env.patch \
+ file://0001-Unbolt-ubuntu-hack.patch \
+ "
+SRC_URI[sha256sum] = "3d44ec8274881cf262f160805641f0827ffcc20ade0d85e7e6f3b90e0d3d222a"
+
+SYSTEMD_PACKAGES = "lirc lirc-exec"
+SYSTEMD_SERVICE:${PN} = "lircd.service lircmd.service lircd-setup.service lircd-uinput.service"
+SYSTEMD_SERVICE:${PN}-exec = "irexec.service"
+SYSTEMD_AUTO_ENABLE:lirc = "enable"
+SYSTEMD_AUTO_ENABLE:lirc-exec = "enable"
+
+inherit autotools pkgconfig systemd python3native setuptools3-base
+
+PACKAGECONFIG[systemd] = "--with-systemdsystemunitdir=${systemd_unitdir}/system/,--without-systemdsystemunitdir,systemd"
+PACKAGECONFIG[x11] = "--with-x,--with-x=no,libx11,"
+
+PACKAGECONFIG ?= " \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'systemd', ' systemd', '', d)} \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'x11', ' x11', '', d)} \
+"
+CACHED_CONFIGUREVARS = "HAVE_WORKING_POLL=yes SH_PATH=/bin/sh"
+
+#EXTRA_OEMAKE = 'SUBDIRS="lib daemons tools"'
+
+# Ensure python-pkg/VERSION exists
+do_configure:append() {
+ cp ${S}/VERSION ${S}/python-pkg/
+}
+
+# Create PYTHON_TARBALL which LIRC needs for install-nodist_pkgdataDATA
+do_install:prepend() {
+ rm -rf ${S}/python-pkg/dist/
+ mkdir ${S}/python-pkg/dist/
+ tar --exclude='${S}/python-pkg/*' -czf ${S}/python-pkg/dist/${BP}.tar.gz ${S}
+}
+
+# In code, path to python is a variable that is replaced with path to native version of it
+# during the configure stage, e.g ../recipe-sysroot-native/usr/bin/python3-native/python3.
+# Replace it with #!/usr/bin/env python3
+do_install:append() {
+ sed -i '1c#!/usr/bin/env python3' ${D}${bindir}/lirc-setup \
+ ${D}${PYTHON_SITEPACKAGES_DIR}/lirc-setup/lirc-setup \
+ ${D}${bindir}/irtext2udp \
+ ${D}${bindir}/lirc-init-db \
+ ${D}${bindir}/irdb-get \
+ ${D}${bindir}/pronto2lirc \
+ ${D}${sbindir}/lircd-setup
+
+ install -m 0755 -d ${D}${sysconfdir}
+ install -m 0755 -d ${D}${sysconfdir}/lirc
+ install -m 0644 ${WORKDIR}/lircd.conf ${D}${sysconfdir}/lirc/
+ install -m 0644 ${WORKDIR}/lirc_options.conf ${D}${sysconfdir}/lirc/
+ if ${@bb.utils.contains('DISTRO_FEATURES', 'systemd', 'true', 'false', d)}; then
+ install -m 0755 -d ${D}${systemd_unitdir}/system ${D}${libdir}/tmpfiles.d
+ install -m 0644 ${WORKDIR}/lircd.service ${D}${systemd_unitdir}/system/
+ install -m 0755 ${WORKDIR}/lircexec.init ${D}${systemd_unitdir}/system/
+ install -m 0644 ${WORKDIR}/lirc.tmpfiles ${D}${libdir}/tmpfiles.d/lirc.conf
+ else
+ rm -rf ${D}/lib
+ fi
+ rm -rf ${D}${libdir}/lirc/plugins/*.la
+ rmdir ${D}/var/run/lirc ${D}/var/run
+ chown -R root:root ${D}${datadir}/lirc/contrib
+}
+
+PACKAGES =+ "${PN}-contrib ${PN}-exec ${PN}-plugins ${PN}-python"
+
+RDEPENDS:${PN} = "bash python3"
+RDEPENDS:${PN}-exec = "${PN}"
+RDEPENDS:${PN}-python = "python3-shell python3-pyyaml python3-datetime python3-netclient python3-stringold"
+
+RRECOMMENDS:${PN} = "${PN}-exec ${PN}-plugins"
+
+FILES:${PN}-plugins = "${libdir}/lirc/plugins/*.so ${datadir}/lirc/configs"
+FILES:${PN}-contrib = "${datadir}/lirc/contrib"
+FILES:${PN}-exec = "${bindir}/irexec ${sysconfdir}/lircexec ${systemd_unitdir}/system/irexec.service"
+FILES:${PN} += "${systemd_unitdir}/system/lircexec.init"
+FILES:${PN} += "${systemd_unitdir}/system/lircd.service"
+FILES:${PN} += "${systemd_unitdir}/system/lircd.socket"
+FILES:${PN} += "${libdir}/tmpfiles.d/lirc.conf"
+FILES:${PN}-dbg += "${libdir}/lirc/plugins/.debug"
+FILES:${PN}-python += "${bindir}/irdb-get ${bindir}/irtext2udp ${bindir}/lircd-setup ${bindir}/pronto2lirc ${PYTHON_SITEPACKAGES_DIR}"
+
+INITSCRIPT_PACKAGES = "lirc lirc-exec"
+INITSCRIPT_NAME:lirc-exec = "lircexec"
+INITSCRIPT_PARAMS:lirc-exec = "defaults 21"
+
+# this is for distributions that don't use udev
+pkg_postinst:${PN}:append() {
+ if [ ! -c $D/dev/lirc -a ! -f /sbin/udevd ]; then mknod $D/dev/lirc c 61 0; fi
+}
+
+SECURITY_CFLAGS = "${SECURITY_NO_PIE_CFLAGS}"
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-connectivity/netplan/netplan/0001-don-t-fail-if-GLOB_BRACE-is-not-defined.patch b/meta-oe/dynamic-layers/meta-python/recipes-connectivity/netplan/netplan/0001-don-t-fail-if-GLOB_BRACE-is-not-defined.patch
new file mode 100644
index 0000000000..cabceae84d
--- /dev/null
+++ b/meta-oe/dynamic-layers/meta-python/recipes-connectivity/netplan/netplan/0001-don-t-fail-if-GLOB_BRACE-is-not-defined.patch
@@ -0,0 +1,32 @@
+From 0ea11f520a8b4453e60eaf0679b9feb757024422 Mon Sep 17 00:00:00 2001
+From: Zang Ruochen <zangrc.fnst@cn.fujitsu.com>
+Date: Fri, 25 Dec 2020 11:41:43 +0900
+Subject: [PATCH] don't fail if GLOB_BRACE is not defined
+
+Upstream-Status: Pending
+
+Signed-off-by: Zang Ruochen <zangrc.fnst@cn.fujitsu.com>
+---
+ src/util.c | 6 ++++++
+ 1 file changed, 6 insertions(+)
+
+diff --git a/src/util.c b/src/util.c
+index 36eb896a..ee13ec44 100644
+--- a/src/util.c
++++ b/src/util.c
+@@ -35,6 +35,12 @@
+ #include "names.h"
+ #include "yaml-helpers.h"
+
++/* Don't fail if the standard library
++ * doesn't provide brace expansion */
++#ifndef GLOB_BRACE
++#define GLOB_BRACE 0
++#endif
++
+ GHashTable*
+ wifi_frequency_24;
+
+--
+2.25.1
+
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-connectivity/netplan/netplan/0001-meson.build-drop-unnecessary-build-dependencies.patch b/meta-oe/dynamic-layers/meta-python/recipes-connectivity/netplan/netplan/0001-meson.build-drop-unnecessary-build-dependencies.patch
new file mode 100644
index 0000000000..4f385e917a
--- /dev/null
+++ b/meta-oe/dynamic-layers/meta-python/recipes-connectivity/netplan/netplan/0001-meson.build-drop-unnecessary-build-dependencies.patch
@@ -0,0 +1,58 @@
+From d3aa30f5cd7ba375e006a755752acbcfcd619452 Mon Sep 17 00:00:00 2001
+From: Yi Zhao <yi.zhao@windriver.com>
+Date: Wed, 6 Mar 2024 19:27:15 +0800
+Subject: [PATCH] meson.build: drop unnecessary build dependencies
+
+The pytest and pycoverage are required by meson test but not for
+building. Mark them as 'required: false' to get rid of unnecessary
+build dependencies.
+
+Upstream-Status: Inappropriate [oe specific]
+
+Signed-off-by: Yi Zhao <yi.zhao@windriver.com>
+---
+ meson.build | 8 ++++++--
+ 1 file changed, 6 insertions(+), 2 deletions(-)
+
+diff --git a/meson.build b/meson.build
+index 9556836a..30f33fe2 100644
+--- a/meson.build
++++ b/meson.build
+@@ -25,8 +25,8 @@ bash_completions_dir = completions.get_variable(pkgconfig: 'completionsdir', def
+ # Order: Fedora/Mageia/openSUSE || Debian/Ubuntu
+ pyflakes = find_program('pyflakes-3', 'pyflakes3', required: false)
+ pycodestyle = find_program('pycodestyle-3', 'pycodestyle', 'pep8', required: false)
+-pytest = find_program('pytest-3', 'pytest3') # also requires the pytest-cov plugin
+-pycoverage = find_program('coverage-3', 'python3-coverage')
++pytest = find_program('pytest-3', 'pytest3', required: false) # also requires the pytest-cov plugin
++pycoverage = find_program('coverage-3', 'python3-coverage', required: false)
+ pandoc = find_program('pandoc', required: false)
+ find = find_program('find')
+
+@@ -75,6 +75,7 @@ if get_option('unit_testing')
+ endif
+
+ #FIXME: exclude doc/env/
++if pyflakes.found() and pycodestyle.found()
+ test('linting',
+ pyflakes,
+ timeout: 100,
+@@ -91,7 +92,9 @@ test('legacy-tests',
+ find_program('tests/cli_legacy.py'),
+ timeout: 600,
+ env: test_env)
++endif
+ #TODO: split out dbus tests into own test() instance, to run in parallel
++if pycoverage.found()
+ test('unit-tests',
+ pycoverage,
+ args: ['run', '-a', '-m', 'pytest', '-s', '-v', '--cov-append', meson.current_source_dir()],
+@@ -143,4 +146,5 @@ if get_option('b_coverage')
+ priority: -99, # run last
+ is_parallel: false)
+ endif
++endif
+
+--
+2.25.1
+
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-connectivity/netplan/netplan/0001-networkd.c-define-scope-specific-to-case-statement.patch b/meta-oe/dynamic-layers/meta-python/recipes-connectivity/netplan/netplan/0001-networkd.c-define-scope-specific-to-case-statement.patch
new file mode 100644
index 0000000000..9f01108a20
--- /dev/null
+++ b/meta-oe/dynamic-layers/meta-python/recipes-connectivity/netplan/netplan/0001-networkd.c-define-scope-specific-to-case-statement.patch
@@ -0,0 +1,47 @@
+From 6e3dd61bf90a7ca8c36c5b95943cbff7c1ad3c2d Mon Sep 17 00:00:00 2001
+From: Yi Zhao <yi.zhao@windriver.com>
+Date: Wed, 6 Mar 2024 16:12:31 +0800
+Subject: [PATCH] networkd.c: define scope specific to case statement
+
+Per [1], define a scope specific to case statement to fix build with
+clang.
+
+Fixes:
+../git/src/networkd.c:544:13: error: expected expression
+ 544 | gchar* first = g_strcmp0(def->id, def->veth_peer_link->id) < 0 ? def->id : def->veth_peer_link->id;
+ | ^
+../git/src/networkd.c:545:17: error: use of undeclared identifier 'first'
+ 545 | if (first != def->id) {
+ | ^
+
+[1] https://stackoverflow.com/questions/92396/why-cant-variables-be-declared-in-a-switch-statement
+
+Upstream-Status: Pending
+
+Signed-off-by: Yi Zhao <yi.zhao@windriver.com>
+---
+ src/networkd.c | 2 ++
+ 1 file changed, 2 insertions(+)
+
+diff --git a/src/networkd.c b/src/networkd.c
+index 25121c48..5eb9c0fe 100644
+--- a/src/networkd.c
++++ b/src/networkd.c
+@@ -541,12 +541,14 @@ write_netdev_file(const NetplanNetDefinition* def, const char* rootdir, const ch
+ * and, if the selected name is the name of the netdef being written, we generate
+ * the .netdev file. Otherwise we skip the netdef.
+ */
++ {
+ gchar* first = g_strcmp0(def->id, def->veth_peer_link->id) < 0 ? def->id : def->veth_peer_link->id;
+ if (first != def->id) {
+ g_string_free(s, TRUE);
+ return;
+ }
+ g_string_append_printf(s, "Kind=veth\n\n[Peer]\nName=%s\n", def->veth_peer_link->id);
++ }
+ break;
+
+ case NETPLAN_DEF_TYPE_TUNNEL:
+--
+2.25.1
+
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-connectivity/netplan/netplan/0002-meson.build-do-not-use-Werror.patch b/meta-oe/dynamic-layers/meta-python/recipes-connectivity/netplan/netplan/0002-meson.build-do-not-use-Werror.patch
new file mode 100644
index 0000000000..663a80ecde
--- /dev/null
+++ b/meta-oe/dynamic-layers/meta-python/recipes-connectivity/netplan/netplan/0002-meson.build-do-not-use-Werror.patch
@@ -0,0 +1,29 @@
+From 668ee79f39614ad758edd44c42b8b0eff57877cf Mon Sep 17 00:00:00 2001
+From: Alexander Kanavin <alex@linutronix.de>
+Date: Sun, 3 Oct 2021 21:52:16 +0200
+Subject: [PATCH] meson.build: do not use -Werror
+
+Upstream-Status: Inappropriate [oe specific]
+
+Signed-off-by: Alexander Kanavin <alex@linutronix.de>
+Signed-off-by: Yi Zhao <yi.zhao@windriver.com>
+---
+ meson.build | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/meson.build b/meson.build
+index 30f33fe2..0b214795 100644
+--- a/meson.build
++++ b/meson.build
+@@ -4,7 +4,7 @@ project('netplan', 'c',
+ default_options: [
+ 'c_std=c99',
+ 'warning_level=2',
+- 'werror=true',
++ 'werror=false',
+ ],
+ meson_version: '>= 0.61.0',
+ )
+--
+2.25.1
+
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-connectivity/netplan/netplan_1.0.bb b/meta-oe/dynamic-layers/meta-python/recipes-connectivity/netplan/netplan_1.0.bb
new file mode 100644
index 0000000000..229414718c
--- /dev/null
+++ b/meta-oe/dynamic-layers/meta-python/recipes-connectivity/netplan/netplan_1.0.bb
@@ -0,0 +1,52 @@
+SUMMARY = "The network configuration abstraction renderer"
+DESCRIPTION = "Netplan is a utility for easily configuring networking on a \
+linux system. You simply create a YAML description of the required network \
+interfaces and what each should be configured to do. From this description \
+Netplan will generate all the necessary configuration for your chosen renderer \
+tool."
+HOMEPAGE = "https://netplan.io"
+SECTION = "net/misc"
+
+LICENSE = "GPL-3.0-only"
+LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504"
+
+inherit meson pkgconfig systemd python3targetconfig features_check
+
+REQUIRED_DISTRO_FEATURES = "systemd"
+
+SRC_URI = "git://github.com/CanonicalLtd/netplan.git;branch=main;protocol=https \
+ file://0001-meson.build-drop-unnecessary-build-dependencies.patch \
+ file://0002-meson.build-do-not-use-Werror.patch \
+ "
+
+SRC_URI:append:libc-musl = " file://0001-don-t-fail-if-GLOB_BRACE-is-not-defined.patch"
+SRC_URI:append:toolchain-clang = " file://0001-networkd.c-define-scope-specific-to-case-statement.patch"
+
+SRCREV = "45f7cd1569896d9e316c130bf5c60b7ccfc8211d"
+
+S = "${WORKDIR}/git"
+
+DEPENDS = "glib-2.0 libyaml util-linux-libuuid \
+ systemd python3-cffi-native \
+ "
+
+EXTRA_OEMESON = "-Dunit_testing=false"
+
+RDEPENDS:${PN} = "python3-core python3-netifaces python3-pyyaml \
+ python3-dbus python3-rich python3-cffi \
+ util-linux-libuuid libnetplan \
+ "
+
+do_install:append() {
+ install -d -m 755 ${D}${sysconfdir}/netplan
+}
+
+PACKAGES += "${PN}-dbus libnetplan"
+
+FILES:libnetplan = "${libdir}/libnetplan.so.*"
+FILES:${PN} = "${sbindir} ${libexecdir}/netplan/generate \
+ ${datadir}/netplan ${datadir}/bash-completion \
+ ${systemd_unitdir} ${PYTHON_SITEPACKAGES_DIR} \
+ ${sysconfdir}/netplan \
+ "
+FILES:${PN}-dbus = "${libexecdir}/netplan/netplan-dbus ${datadir}/dbus-1"
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-connectivity/thingsboard-gateway/thingsboard-gateway_2.5.2.bb b/meta-oe/dynamic-layers/meta-python/recipes-connectivity/thingsboard-gateway/thingsboard-gateway_3.4.6.bb
index 2f0ef16c80..0d0f6fea48 100644
--- a/meta-oe/dynamic-layers/meta-python/recipes-connectivity/thingsboard-gateway/thingsboard-gateway_2.5.2.bb
+++ b/meta-oe/dynamic-layers/meta-python/recipes-connectivity/thingsboard-gateway/thingsboard-gateway_3.4.6.bb
@@ -7,14 +7,13 @@ HOMEPAGE = "https://thingsboard.io/"
LICENSE = "Apache-2.0"
LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/Apache-2.0;md5=89aea4e17d99a7cacdbeed46a0096b10"
-SRC_URI[md5sum] = "469c8b5cd1c16c20ef40f0a97a3a0fda"
-SRC_URI[sha256sum] = "b328f4e315c3541ac80a4931974a34a81afe4d1f382f48e8604669a55816c0d7"
+SRC_URI[sha256sum] = "fc24bb674308f05d963a1dbed8d0b38ead77424ad7cf032a2652732af48f1336"
inherit pypi setuptools3
PYPI_PACKAGE = "thingsboard-gateway"
-RDEPENDS_${PN} += " python3-jsonpath-rw \
+RDEPENDS:${PN} += " python3-jsonpath-rw \
python3-regex \
python3-paho-mqtt \
python3-pyyaml \
@@ -45,14 +44,14 @@ SRC_URI += "file://bacnet.json \
inherit systemd
SYSTEMD_PACKAGES = "${PN}"
-SYSTEMD_SERVICE_${PN} = "thingsboard-gateway.service"
+SYSTEMD_SERVICE:${PN} = "thingsboard-gateway.service"
-FILES_${PN} += "/etc \
+FILES:${PN} += "/etc \
/lib \
/usr \
"
-do_install_append(){
+do_install:append(){
install -d ${D}${sysconfdir}/thingsboard-gateway/config
@@ -65,4 +64,4 @@ do_install_append(){
install -d ${D}${systemd_unitdir}/system/
install -m 0644 ${WORKDIR}/thingsboard-gateway.service ${D}${systemd_system_unitdir}/thingsboard-gateway.service
-} \ No newline at end of file
+}
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-core/packagegroups/packagegroup-meta-oe.bbappend b/meta-oe/dynamic-layers/meta-python/recipes-core/packagegroups/packagegroup-meta-oe.bbappend
index 50da5e5d0d..7fff849ecd 100644
--- a/meta-oe/dynamic-layers/meta-python/recipes-core/packagegroups/packagegroup-meta-oe.bbappend
+++ b/meta-oe/dynamic-layers/meta-python/recipes-core/packagegroups/packagegroup-meta-oe.bbappend
@@ -1,20 +1,18 @@
-RDEPENDS_packagegroup-meta-oe-devtools += "\
+RDEPENDS:packagegroup-meta-oe-devtools += "\
python3-distutils-extra \
rwmem \
- speedtest-cli \
- mongodb \
"
-RDEPENDS_packagegroup-meta-oe-connectivity += "\
+RDEPENDS:packagegroup-meta-oe-connectivity += "\
lirc \
"
-RDEPENDS_packagegroup-meta-oe-extended += "\
+RDEPENDS:packagegroup-meta-oe-extended += "\
lcdproc \
- mozjs \
"
-RDEPENDS_packagegroup-meta-oe-support += "\
+RDEPENDS:packagegroup-meta-oe-support += "\
+ nvmetcli \
smem \
"
-RDEPENDS_packagegroup-meta-oe-extended_remove_libc-musl = "lcdproc"
+RDEPENDS:packagegroup-meta-oe-extended:remove:libc-musl = "lcdproc"
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-Do-not-use-MINSIGSTKSZ.patch b/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-Do-not-use-MINSIGSTKSZ.patch
deleted file mode 100644
index 0ee64e9c6c..0000000000
--- a/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-Do-not-use-MINSIGSTKSZ.patch
+++ /dev/null
@@ -1,34 +0,0 @@
-From 027044a692b588ef586d495f65eb58b07cc711a3 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Tue, 11 May 2021 10:15:51 -0700
-Subject: [PATCH] Do not use MINSIGSTKSZ
-
-Since glibc 2.34+ MINSIGSTKSZ is no more a constant. So,
-let's hardwire this for now until better fix is found.
-64Kb should be good anyway
-
-Upstream-Status: Pending
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- src/mongo/stdx/thread.h | 5 +----
- 1 file changed, 1 insertion(+), 4 deletions(-)
-
-diff --git a/src/mongo/stdx/thread.h b/src/mongo/stdx/thread.h
-index f8058279e3..f31f309959 100644
---- a/src/mongo/stdx/thread.h
-+++ b/src/mongo/stdx/thread.h
-@@ -104,10 +104,7 @@ private:
- // . N Y : 4,344 | 13,048 | 7,352
- // . Y Y : 4,424 | 13,672 | 8,392
- // ( https://jira.mongodb.org/secure/attachment/233569/233569_stacktrace-writeup.txt )
-- static constexpr std::size_t kMongoMinSignalStackSize = std::size_t{64} << 10;
--
-- static constexpr std::size_t kStackSize =
-- std::max(kMongoMinSignalStackSize, std::size_t{MINSIGSTKSZ});
-+ static constexpr std::size_t kStackSize = std::size_t{64} << 10;
- std::unique_ptr<std::byte[]> _stackStorage = std::make_unique<std::byte[]>(kStackSize);
-
- #else // !MONGO_HAS_SIGALTSTACK
---
-2.31.1
-
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-Fix-build-on-32bit.patch b/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-Fix-build-on-32bit.patch
new file mode 100644
index 0000000000..4abc044151
--- /dev/null
+++ b/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-Fix-build-on-32bit.patch
@@ -0,0 +1,38 @@
+From b172ebe7e709b10338c1b260310dacc15c557cff Mon Sep 17 00:00:00 2001
+From: Martin Jansa <martin.jansa@gmail.com>
+Date: Fri, 22 Sep 2023 15:37:29 +0200
+Subject: [PATCH] Fix build on 32bit
+
+* fixes:
+ src/mongo/util/net/http_client_curl.cpp: In function 'size_t mongo::{anonymous}::ReadMemoryCallback(char*, size_t, size_t, void*)':
+ src/mongo/util/net/http_client_curl.cpp:172:21: error: no matching function for call to 'min(size_t, long unsigned int)'
+ 172 | std::min(size * nitems, static_cast<unsigned long>(bufReader->remaining()));
+ | ~~~~~~~~^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+* reported in:
+ https://jira.mongodb.org/browse/SERVER-73007
+ but will probably get closed like:
+ mongodb/0001-Fix-type-mismatch-on-32bit-arches.patch
+ submitted in:
+ https://jira.mongodb.org/browse/SERVER-74633
+ as they don't support 32bit builds
+
+Signed-off-by: Martin Jansa <martin.jansa@gmail.com>
+Upstream-Status: Pending
+---
+ src/mongo/util/net/http_client_curl.cpp | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/src/mongo/util/net/http_client_curl.cpp b/src/mongo/util/net/http_client_curl.cpp
+index 57290d0f8ed..f251fe2a550 100644
+--- a/src/mongo/util/net/http_client_curl.cpp
++++ b/src/mongo/util/net/http_client_curl.cpp
+@@ -169,7 +169,7 @@ size_t ReadMemoryCallback(char* buffer, size_t size, size_t nitems, void* instre
+
+ if (bufReader->remaining() > 0) {
+ size_t readSize =
+- std::min(size * nitems, static_cast<unsigned long>(bufReader->remaining()));
++ std::min(size * nitems, static_cast<size_t>(bufReader->remaining()));
+ auto buf = bufReader->readBytes(readSize);
+ memcpy(buffer, buf.rawData(), readSize);
+ ret = readSize;
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-Fix-type-mismatch-on-32bit-arches.patch b/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-Fix-type-mismatch-on-32bit-arches.patch
new file mode 100644
index 0000000000..def17995dc
--- /dev/null
+++ b/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-Fix-type-mismatch-on-32bit-arches.patch
@@ -0,0 +1,33 @@
+From 81eabea4e4da55cddfe8bcfcbc3759fa90948254 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Fri, 3 Mar 2023 14:13:29 -0800
+Subject: [PATCH] Fix type mismatch on 32bit arches
+
+std::set::size returns an unsigned integral type.
+std::max call therefore gets (unsigned int, unsigned long) here.
+Type of both arguments is not same, so its ambigous
+and there is no matching std::max implementation for mismatching
+arguments. std::max expects both input variables to be of
+same type, max(int,int) etc..
+
+Fixes
+src/mongo/util/processinfo_linux.cpp:424:16: error: no matching function for call to 'max'
+ return std::max(socketIds.size(), 1ul);
+
+Upstream-Status: Submitted [https://jira.mongodb.org/browse/SERVER-74633]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ src/mongo/util/processinfo_linux.cpp | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+--- a/src/mongo/util/processinfo_linux.cpp
++++ b/src/mongo/util/processinfo_linux.cpp
+@@ -421,7 +421,7 @@ public:
+
+ // On ARM64, the "physical id" field is unpopulated, causing there to be 0 sockets found. In
+ // this case, we default to 1.
+- return std::max(socketIds.size(), 1ul);
++ return std::max(static_cast<unsigned long>(socketIds.size()), 1ul);
+ }
+
+ /**
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-IntelRDFPMathLib20U1-Check-for-__DEFINED_wchar_t.patch b/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-IntelRDFPMathLib20U1-Check-for-__DEFINED_wchar_t.patch
index e636adc556..500e76bc2f 100644
--- a/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-IntelRDFPMathLib20U1-Check-for-__DEFINED_wchar_t.patch
+++ b/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-IntelRDFPMathLib20U1-Check-for-__DEFINED_wchar_t.patch
@@ -13,6 +13,8 @@ typedef int wchar_t;
Signed-off-by: Khem Raj <raj.khem@gmail.com>
Signed-off-by: Vincent Prince <vincent.prince.fr@gmail.com>
+
+Upstream-Status: Pending
---
src/third_party/IntelRDFPMathLib20U1/LIBRARY/src/bid_functions.h | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-Tell-scons-to-use-build-settings-from-environment-va.patch b/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-Tell-scons-to-use-build-settings-from-environment-va.patch
index b8a325295d..4d84d3d15b 100644
--- a/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-Tell-scons-to-use-build-settings-from-environment-va.patch
+++ b/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-Tell-scons-to-use-build-settings-from-environment-va.patch
@@ -6,6 +6,8 @@ Subject: [PATCH 01/10] Tell scons to use build settings from environment
Signed-off-by: Sven Ebenfeld <sven.ebenfeld@gmail.com>
Signed-off-by: Vincent Prince <vincent.prince.fr@gmail.com>
+
+Upstream-Status: Pending
---
SConstruct | 8 ++++++--
1 file changed, 6 insertions(+), 2 deletions(-)
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-The-std-lib-unary-binary_function-base-classes-are-d.patch b/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-The-std-lib-unary-binary_function-base-classes-are-d.patch
new file mode 100644
index 0000000000..4594bec81a
--- /dev/null
+++ b/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-The-std-lib-unary-binary_function-base-classes-are-d.patch
@@ -0,0 +1,40 @@
+From f9b55f5a1fab85bf73c95e6372779d6f50f75e84 Mon Sep 17 00:00:00 2001
+From: jzmaddock <john@johnmaddock.co.uk>
+Date: Mon, 11 Jul 2022 18:26:07 +0100
+Subject: [PATCH] The std lib unary/binary_function base classes are
+ deprecated/removed from libcpp15. Fixes
+ https://github.com/boostorg/container_hash/issues/24.
+
+Upstream-Status: Backport [https://github.com/boostorg/config/pull/440/commits/f0af4a9184457939b89110795ae2d293582c5f66]
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ src/third_party/boost-1.70.0/boost/config/stdlib/libcpp.hpp | 9 +++++++++
+ 1 file changed, 9 insertions(+)
+
+--- a/src/third_party/boost-1.70.0/boost/config/stdlib/libcpp.hpp
++++ b/src/third_party/boost-1.70.0/boost/config/stdlib/libcpp.hpp
+@@ -140,4 +140,13 @@
+ # define BOOST_NO_CXX14_HDR_SHARED_MUTEX
+ #endif
+
++#if _LIBCPP_VERSION >= 15000
++//
++// Unary function is now deprecated in C++11 and later:
++//
++#if __cplusplus >= 201103L
++#define BOOST_NO_CXX98_FUNCTION_BASE
++#endif
++#endif
++
+ // --- end ---
+--- a/src/third_party/boost-1.70.0/boost/container_hash/hash.hpp
++++ b/src/third_party/boost-1.70.0/boost/container_hash/hash.hpp
+@@ -118,7 +118,7 @@ namespace boost
+ {
+ namespace hash_detail
+ {
+-#if defined(_HAS_AUTO_PTR_ETC) && !_HAS_AUTO_PTR_ETC
++#if defined(BOOST_NO_CXX98_FUNCTION_BASE)
+ template <typename T>
+ struct hash_base
+ {
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-Use-__GLIBC__-to-control-use-of-gnu_get_libc_version.patch b/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-Use-__GLIBC__-to-control-use-of-gnu_get_libc_version.patch
index 8d82be1b57..cfbcbd9aa2 100644
--- a/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-Use-__GLIBC__-to-control-use-of-gnu_get_libc_version.patch
+++ b/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-Use-__GLIBC__-to-control-use-of-gnu_get_libc_version.patch
@@ -5,6 +5,8 @@ Subject: [PATCH 03/10] Use __GLIBC__ to control use of gnu_get_libc_version
Signed-off-by: Khem Raj <raj.khem@gmail.com>
Signed-off-by: Vincent Prince <vincent.prince.fr@gmail.com>
+
+Upstream-Status: Pending
---
src/mongo/util/processinfo_linux.cpp | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-Use-explicit-typecast-to-size_t.patch b/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-Use-explicit-typecast-to-size_t.patch
deleted file mode 100644
index 5b724ff8aa..0000000000
--- a/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-Use-explicit-typecast-to-size_t.patch
+++ /dev/null
@@ -1,35 +0,0 @@
-From 6fdb2d304e05a17e57b2efd7f8252794a8722dbe Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Tue, 1 Jun 2021 08:25:36 -0700
-Subject: [PATCH] Use explicit typecast to size_t
-
-maxMemoryUsageBytes is size_t type which may not match long long value
-internalDocumentSourceGroupMaxMemoryBytes.load() returns, so typecast it
-to avoid narrowing warning from clang
-
-document_source_group.cpp:378:22: error: non-constant-expression cannot be narrowed from type 'long long' to 'size_t' (aka 'unsigned int') in initializer list [-Wc++11-narrowing]
- maxMemoryUsageBytes ? *maxMemoryUsageBytes
- ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Upstream-Status: Submitted [https://github.com/mongodb/mongo/pull/1405]
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- src/mongo/db/pipeline/document_source_group.cpp | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/src/mongo/db/pipeline/document_source_group.cpp b/src/mongo/db/pipeline/document_source_group.cpp
-index c7bf44e72e..a52906a940 100644
---- a/src/mongo/db/pipeline/document_source_group.cpp
-+++ b/src/mongo/db/pipeline/document_source_group.cpp
-@@ -376,7 +376,7 @@ DocumentSourceGroup::DocumentSourceGroup(const intrusive_ptr<ExpressionContext>&
- _doingMerge(false),
- _memoryTracker{pExpCtx->allowDiskUse && !pExpCtx->inMongos,
- maxMemoryUsageBytes ? *maxMemoryUsageBytes
-- : internalDocumentSourceGroupMaxMemoryBytes.load()},
-+ : (size_t)internalDocumentSourceGroupMaxMemoryBytes.load()},
- _initialized(false),
- _groups(pExpCtx->getValueComparator().makeUnorderedValueMap<Accumulators>()),
- _spilled(false) {
---
-2.31.1
-
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-Use-long-long-instead-of-int64_t.patch b/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-Use-long-long-instead-of-int64_t.patch
index 958e09c3dc..310301d57b 100644
--- a/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-Use-long-long-instead-of-int64_t.patch
+++ b/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-Use-long-long-instead-of-int64_t.patch
@@ -9,6 +9,8 @@ since this function expects long long as parameter and not int64_t
Signed-off-by: Khem Raj <raj.khem@gmail.com>
Signed-off-by: Vincent Prince <vincent.prince.fr@gmail.com>
+
+Upstream-Status: Pending
---
src/mongo/util/procparser.cpp | 10 +++++-----
1 file changed, 5 insertions(+), 5 deletions(-)
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-add-explict-static_cast-size_t-to-maxMemoryUsageByte.patch b/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-add-explict-static_cast-size_t-to-maxMemoryUsageByte.patch
new file mode 100644
index 0000000000..de05624429
--- /dev/null
+++ b/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-add-explict-static_cast-size_t-to-maxMemoryUsageByte.patch
@@ -0,0 +1,38 @@
+From ad37ee80b32a1f740a3197105174d74dff11e4e8 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Wed, 13 Apr 2022 13:56:32 -0700
+Subject: [PATCH] add explict static_cast<size_t> to maxMemoryUsageBytes
+
+Fixes
+src/mongo/db/pipeline/document_source_group.cpp:377:22: error: non-constant-expression cannot be narrowed from type 'long long' to 'size_t' (aka 'unsigned int') in initializer list [-Wc++11-narrowing]
+ maxMemoryUsageBytes ? *maxMemoryUsageBytes
+ ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+src/mongo/db/pipeline/document_source_group.cpp:377:22: note: insert an explicit cast to silence this issue
+ maxMemoryUsageBytes ? *maxMemoryUsageBytes
+ ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Upstream-Status: Pending
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ src/mongo/db/pipeline/document_source_group.cpp | 4 ++--
+ 1 file changed, 2 insertions(+), 2 deletions(-)
+
+diff --git a/src/mongo/db/pipeline/document_source_group.cpp b/src/mongo/db/pipeline/document_source_group.cpp
+index 4a7b48d6cd2..9a6076c6041 100644
+--- a/src/mongo/db/pipeline/document_source_group.cpp
++++ b/src/mongo/db/pipeline/document_source_group.cpp
+@@ -374,8 +374,8 @@ DocumentSourceGroup::DocumentSourceGroup(const intrusive_ptr<ExpressionContext>&
+ _usedDisk(false),
+ _doingMerge(false),
+ _memoryTracker{pExpCtx->allowDiskUse && !pExpCtx->inMongos,
+- maxMemoryUsageBytes ? *maxMemoryUsageBytes
+- : internalDocumentSourceGroupMaxMemoryBytes.load()},
++ static_cast<size_t>(maxMemoryUsageBytes ? *maxMemoryUsageBytes
++ : internalDocumentSourceGroupMaxMemoryBytes.load())},
+ // We spill to disk in debug mode, regardless of allowDiskUse, to stress the system.
+ _file(!pExpCtx->inMongos && (pExpCtx->allowDiskUse || kDebugBuild)
+ ? std::make_shared<Sorter<Value, Value>::File>(pExpCtx->tempDir + "/" +
+--
+2.35.2
+
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-apply-msvc-workaround-for-clang-16.patch b/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-apply-msvc-workaround-for-clang-16.patch
new file mode 100644
index 0000000000..096269308d
--- /dev/null
+++ b/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-apply-msvc-workaround-for-clang-16.patch
@@ -0,0 +1,32 @@
+From 03047c81b2601362bcf79cae67e06d1fba0a6101 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Thu, 2 Mar 2023 20:17:57 -0800
+Subject: [PATCH] apply msvc workaround for clang >= 16
+
+This avoids a new Werror found with clang16
+
+boost-1.70.0/boost/mpl/aux_/integral_wrapper.hpp:73:31: error: integer value -1 is outside the valid range of values [0, 3] for this enumeration type [-Wenum-constexpr-conversion]
+ typedef AUX_WRAPPER_INST( BOOST_MPL_AUX_STATIC_CAST(AUX_WRAPPER_VALUE_TYPE, (value - 1)) ) prior;
+
+Upstream-Status: Pending
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ .../boost-1.70.0/boost/mpl/aux_/integral_wrapper.hpp | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/src/third_party/boost-1.70.0/boost/mpl/aux_/integral_wrapper.hpp b/src/third_party/boost-1.70.0/boost/mpl/aux_/integral_wrapper.hpp
+index 6bc05f7e96e..6bb8d24c9ce 100644
+--- a/src/third_party/boost-1.70.0/boost/mpl/aux_/integral_wrapper.hpp
++++ b/src/third_party/boost-1.70.0/boost/mpl/aux_/integral_wrapper.hpp
+@@ -56,7 +56,7 @@ struct AUX_WRAPPER_NAME
+ // have to #ifdef here: some compilers don't like the 'N + 1' form (MSVC),
+ // while some other don't like 'value + 1' (Borland), and some don't like
+ // either
+-#if BOOST_WORKAROUND(__EDG_VERSION__, <= 243)
++#if BOOST_WORKAROUND(__EDG_VERSION__, <= 243) || __clang_major__ > 15
+ private:
+ BOOST_STATIC_CONSTANT(AUX_WRAPPER_VALUE_TYPE, next_value = BOOST_MPL_AUX_STATIC_CAST(AUX_WRAPPER_VALUE_TYPE, (N + 1)));
+ BOOST_STATIC_CONSTANT(AUX_WRAPPER_VALUE_TYPE, prior_value = BOOST_MPL_AUX_STATIC_CAST(AUX_WRAPPER_VALUE_TYPE, (N - 1)));
+--
+2.39.2
+
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-asio-Dont-use-experimental-with-clang.patch b/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-asio-Dont-use-experimental-with-clang.patch
index e726933f56..ad944e4666 100644
--- a/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-asio-Dont-use-experimental-with-clang.patch
+++ b/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-asio-Dont-use-experimental-with-clang.patch
@@ -5,6 +5,8 @@ Subject: [PATCH 10/10] asio: Dont use experimental with clang
Signed-off-by: Khem Raj <raj.khem@gmail.com>
Signed-off-by: Vincent Prince <vincent.prince.fr@gmail.com>
+
+Upstream-Status: Pending
---
src/third_party/asio-master/asio/include/asio/detail/string_view.hpp | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-free_mon-Include-missing-cstdint.patch b/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-free_mon-Include-missing-cstdint.patch
new file mode 100644
index 0000000000..8cee14889f
--- /dev/null
+++ b/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-free_mon-Include-missing-cstdint.patch
@@ -0,0 +1,28 @@
+From 5d8218b8a1b5bc71e2a0cf543a000e194daba599 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Sun, 29 Jan 2023 17:15:30 -0800
+Subject: [PATCH] free_mon: Include missing <cstdint>
+
+gcc 13 moved some includes around and as a result <cstdint> is no
+longer transitively included [1]. Explicitly include it
+for uintXX_t.
+
+[1] https://gcc.gnu.org/gcc-13/porting_to.html#header-dep-changes
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+
+Upstream-Status: Pending
+---
+ src/mongo/db/free_mon/free_mon_options.h | 1 +
+ 1 file changed, 1 insertion(+)
+
+--- a/src/mongo/db/free_mon/free_mon_options.h
++++ b/src/mongo/db/free_mon/free_mon_options.h
+@@ -29,6 +29,7 @@
+
+ #pragma once
+
++#include <cstdint>
+ #include <string>
+ #include <vector>
+
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-server-Adjust-the-cache-alignment-assumptions.patch b/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-server-Adjust-the-cache-alignment-assumptions.patch
new file mode 100644
index 0000000000..52c9df9b1b
--- /dev/null
+++ b/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-server-Adjust-the-cache-alignment-assumptions.patch
@@ -0,0 +1,37 @@
+From 5c9e0d0fc9188bab0ae09c9c33df01938b0c1b6c Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Thu, 14 Apr 2022 09:25:33 -0700
+Subject: [PATCH] server: Adjust the cache alignment assumptions
+
+aarch64 has 256 for hardware_destructive_interference_size and gcc 12
+has added a warning to complain about mismatches which results in
+static_assert failures
+
+In file included from src/mongo/s/commands/cluster_find_cmd.cpp:39:
+src/mongo/db/stats/counters.h:185:47: error: static assertion failed: cache line spill
+ 185 | static_assert(sizeof(decltype(_together)) <= stdx::hardware_constructive_interference_size,
+ | ~~~~~~~~~~~~~~~~~~~~~~~~~~~^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The structure need to ensure true sharing for both the elements
+so align it to hardware_constructive_interference_size instead
+
+Upstream-Status: Inappropriate [https://jira.mongodb.org/browse/SERVER-65664]
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ src/mongo/db/stats/counters.h | 6 +++---
+ 1 file changed, 3 insertions(+), 3 deletions(-)
+
+--- a/src/mongo/db/stats/counters.h
++++ b/src/mongo/db/stats/counters.h
+@@ -182,8 +182,8 @@ private:
+ AtomicWord<long long> requests{0};
+ };
+ CacheAligned<Together> _together{};
+- static_assert(sizeof(decltype(_together)) <= stdx::hardware_constructive_interference_size,
+- "cache line spill");
++ static_assert(sizeof(Together) <= stdx::hardware_constructive_interference_size,
++ "cache line spill");
+
+ CacheAligned<AtomicWord<long long>> _logicalBytesOut{0};
+
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-wiredtiger-Avoid-using-off64_t.patch b/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-wiredtiger-Avoid-using-off64_t.patch
new file mode 100644
index 0000000000..abe6898554
--- /dev/null
+++ b/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0001-wiredtiger-Avoid-using-off64_t.patch
@@ -0,0 +1,30 @@
+From 0508c1518c2e7c586a231d344e9f93b08507885b Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Sat, 31 Dec 2022 14:23:40 -0800
+Subject: [PATCH] wiredtiger: Avoid using off64_t
+
+off64_t is not available on musl since off_t is already 64bit by
+default. Therefore replace using off64_t with off_t
+
+Upstream-Status: Pending
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+---
+ src/third_party/wiredtiger/src/os_posix/os_fs.c | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/src/third_party/wiredtiger/src/os_posix/os_fs.c b/src/third_party/wiredtiger/src/os_posix/os_fs.c
+index 3898eb74343..9ce2d5edb38 100644
+--- a/src/third_party/wiredtiger/src/os_posix/os_fs.c
++++ b/src/third_party/wiredtiger/src/os_posix/os_fs.c
+@@ -533,7 +533,7 @@ __posix_file_sync_nowait(WT_FILE_HANDLE *file_handle, WT_SESSION *wt_session)
+ pfh = (WT_FILE_HANDLE_POSIX *)file_handle;
+
+ /* See comment in __posix_sync(): sync cannot be retried or fail. */
+- WT_SYSCALL(sync_file_range(pfh->fd, (off64_t)0, (off64_t)0, SYNC_FILE_RANGE_WRITE), ret);
++ WT_SYSCALL(sync_file_range(pfh->fd, 0, 0, SYNC_FILE_RANGE_WRITE), ret);
+ if (ret == 0)
+ return (0);
+
+--
+2.39.0
+
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0002-Add-a-definition-for-the-macro-__ELF_NATIVE_CLASS.patch b/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0002-Add-a-definition-for-the-macro-__ELF_NATIVE_CLASS.patch
index 57f4168f5a..d1e662f291 100644
--- a/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0002-Add-a-definition-for-the-macro-__ELF_NATIVE_CLASS.patch
+++ b/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0002-Add-a-definition-for-the-macro-__ELF_NATIVE_CLASS.patch
@@ -7,6 +7,8 @@ It depends on the native arch's word size.
Signed-off-by: Khem Raj <raj.khem@gmail.com>
Signed-off-by: Vincent Prince <vincent.prince.fr@gmail.com>
+
+Upstream-Status: Pending
---
src/mongo/util/stacktrace_posix.cpp | 9 +++++++++
1 file changed, 9 insertions(+)
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0003-Fix-unknown-prefix-env.patch b/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0003-Fix-unknown-prefix-env.patch
index 910ef0b5f4..d2ba6eb805 100644
--- a/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0003-Fix-unknown-prefix-env.patch
+++ b/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0003-Fix-unknown-prefix-env.patch
@@ -1,3 +1,5 @@
+Upstream-Status: Pending
+
Index: git/SConstruct
===================================================================
--- git.orig/SConstruct
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0004-wiredtiger-Disable-strtouq-on-musl.patch b/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0004-wiredtiger-Disable-strtouq-on-musl.patch
index 2cea9bc31f..45051e103d 100644
--- a/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0004-wiredtiger-Disable-strtouq-on-musl.patch
+++ b/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/0004-wiredtiger-Disable-strtouq-on-musl.patch
@@ -4,6 +4,8 @@ Date: Sat, 2 Sep 2017 13:13:15 -0700
Subject: [PATCH 09/10] wiredtiger: Disable strtouq on musl
Signed-off-by: Khem Raj <raj.khem@gmail.com>
+
+Upstream-Status: Pending
---
src/third_party/wiredtiger/build_linux/wiredtiger_config.h | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/1296.patch b/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/1296.patch
index e4ae30776b..9259f96be8 100644
--- a/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/1296.patch
+++ b/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/1296.patch
@@ -1,4 +1,3 @@
-Upstream-Status: submitted https://github.com/mongodb/mongo/pull/1296
From 362be06fc16a5ad0f9e9aa90cc763c5242e8e35c Mon Sep 17 00:00:00 2001
From: Fabrice Fontaine <fontaine.fabrice@gmail.com>
Date: Sat, 9 Feb 2019 12:41:45 +0100
@@ -14,6 +13,8 @@ src/mongo/util/net/ssl_manager.cpp: In static member function 'static mongo::Sta
src/mongo/util/net/ssl_manager.cpp:575:79: error: invalid conversion from 'size_t* {aka unsigned int*}' to 'long unsigned int*' [-fpermissive]
if (mongoUnsignedAddOverflow64(tagAndLengthByteCount, derLength, outLength) ||
+Upstream-Status: Submitted [https://github.com/mongodb/mongo/pull/1296]
+
Signed-off-by: Fabrice Fontaine <fontaine.fabrice@gmail.com>
Signed-off-by: Vincent Prince <vincent.prince.fr@gmail.com>
---
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/arm64-support.patch b/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/arm64-support.patch
index 1a7bf0fc52..24b0ad58b6 100644
--- a/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/arm64-support.patch
+++ b/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb/arm64-support.patch
@@ -5,6 +5,8 @@ Subject: [PATCH 05/10] Add alises for arm64 which is same as aarch64
Signed-off-by: Khem Raj <raj.khem@gmail.com>
Signed-off-by: Vincent Prince <vincent.prince.fr@gmail.com>
+
+Upstream-Status: Pending
---
SConstruct | 1 +
src/third_party/IntelRDFPMathLib20U1/SConscript | 2 +-
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb_git.bb b/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb_git.bb
index 9c1e485b31..ee5c77a85d 100644
--- a/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb_git.bb
+++ b/meta-oe/dynamic-layers/meta-python/recipes-dbs/mongodb/mongodb_git.bb
@@ -11,10 +11,10 @@ DEPENDS = "openssl libpcap zlib boost curl python3 \
inherit scons dos2unix siteinfo python3native systemd useradd
-PV = "4.4.6"
-#v4.4.6
-SRCREV = "72e66213c2c3eab37d9358d5e78ad7f5c1d0d0d7"
-SRC_URI = "git://github.com/mongodb/mongo.git;branch=v4.4 \
+PV = "4.4.24"
+#v4.4.24
+SRCREV = "0b86b9b7b42ad9970c5f818c527dd86c0634243a"
+SRC_URI = "git://github.com/mongodb/mongo.git;branch=v4.4;protocol=https \
file://0001-Tell-scons-to-use-build-settings-from-environment-va.patch \
file://0001-Use-long-long-instead-of-int64_t.patch \
file://0001-Use-__GLIBC__-to-control-use-of-gnu_get_libc_version.patch \
@@ -29,44 +29,57 @@ SRC_URI = "git://github.com/mongodb/mongo.git;branch=v4.4 \
file://0001-include-needed-c-header.patch \
file://disable_runtime_check.patch \
file://ppc64_ARCH_BITS.patch \
- file://0001-Do-not-use-MINSIGSTKSZ.patch \
- file://0001-Use-explicit-typecast-to-size_t.patch \
+ file://0001-add-explict-static_cast-size_t-to-maxMemoryUsageByte.patch \
+ file://0001-server-Adjust-the-cache-alignment-assumptions.patch \
+ file://0001-The-std-lib-unary-binary_function-base-classes-are-d.patch \
+ file://0001-free_mon-Include-missing-cstdint.patch \
+ file://0001-apply-msvc-workaround-for-clang-16.patch \
+ file://0001-Fix-type-mismatch-on-32bit-arches.patch \
+ file://0001-Fix-build-on-32bit.patch \
"
-SRC_URI_append_libc-musl ="\
+SRC_URI:append:libc-musl ="\
file://0001-Mark-one-of-strerror_r-implementation-glibc-specific.patch \
file://0002-Fix-default-stack-size-to-256K.patch \
file://0004-wiredtiger-Disable-strtouq-on-musl.patch \
+ file://0001-wiredtiger-Avoid-using-off64_t.patch \
"
-SRC_URI_append_toolchain-clang = "\
+SRC_URI:append:toolchain-clang = "\
file://0001-asio-Dont-use-experimental-with-clang.patch \
"
-
S = "${WORKDIR}/git"
+CVE_STATUS[CVE-2014-8180] = "not-applicable-config: Not affecting our configuration so it can be safely ignored."
+CVE_STATUS[CVE-2017-2665] = "not-applicable-config: Not affecting our configuration so it can be safely ignored."
+
COMPATIBLE_HOST ?= '(x86_64|i.86|powerpc64|arm|aarch64).*-linux'
PACKAGECONFIG ??= "tcmalloc system-pcre"
# gperftools compilation fails for arm below v7 because of missing support of
# dmb operation. So we use system-allocator instead of tcmalloc
-PACKAGECONFIG_remove_armv6 = "tcmalloc"
-PACKAGECONFIG_remove_libc-musl = "tcmalloc"
-PACKAGECONFIG_remove_riscv64 = "tcmalloc"
-PACKAGECONFIG_remove_riscv32 = "tcmalloc"
+PACKAGECONFIG:remove:armv6 = "tcmalloc"
+PACKAGECONFIG:remove:libc-musl = "tcmalloc"
+PACKAGECONFIG:remove:riscv64 = "tcmalloc"
+PACKAGECONFIG:remove:riscv32 = "tcmalloc"
PACKAGECONFIG[tcmalloc] = "--use-system-tcmalloc,--allocator=system,gperftools,"
PACKAGECONFIG[shell] = ",--js-engine=none,,"
PACKAGECONFIG[system-pcre] = "--use-system-pcre,,libpcre,"
MONGO_ARCH ?= "${HOST_ARCH}"
-MONGO_ARCH_powerpc64le = "ppc64le"
+MONGO_ARCH:powerpc64le = "ppc64le"
WIREDTIGER ?= "off"
-WIREDTIGER_x86-64 = "on"
-WIREDTIGER_aarch64 = "on"
+WIREDTIGER:x86-64 = "on"
+WIREDTIGER:aarch64 = "on"
+
+# ld.gold: fatal error: build/59f4f0dd/mongo/mongod: Structure needs cleaning
+LDFLAGS:append:x86:libc-musl = " -fuse-ld=bfd"
+LDFLAGS:remove:toolchain-clang = "-fuse-ld=bfd"
EXTRA_OESCONS = "PREFIX=${prefix} \
DESTDIR=${D} \
+ MAXLINELENGTH='2097152' \
LIBPATH=${STAGING_LIBDIR} \
LINKFLAGS='${LDFLAGS}' \
CXXFLAGS='${CXXFLAGS}' \
@@ -78,50 +91,59 @@ EXTRA_OESCONS = "PREFIX=${prefix} \
--use-system-zlib \
--nostrip \
--endian=${@oe.utils.conditional('SITEINFO_ENDIANNESS', 'le', 'little', 'big', d)} \
+ --use-hardware-crc32=${@bb.utils.contains('TUNE_FEATURES', 'crc', 'on', 'off', d)} \
--wiredtiger='${WIREDTIGER}' \
--separate-debug \
${PACKAGECONFIG_CONFARGS}"
-
USERADD_PACKAGES = "${PN}"
-USERADD_PARAM_${PN} = "--system --no-create-home --home-dir /var/run/${BPN} --shell /bin/false --user-group ${BPN}"
-
+USERADD_PARAM:${PN} = "--system --no-create-home --home-dir /var/run/${BPN} --shell /bin/false --user-group ${BPN}"
scons_do_compile() {
- ${STAGING_BINDIR_NATIVE}/scons ${PARALLEL_MAKE} ${EXTRA_OESCONS} install-core || \
+ ${STAGING_BINDIR_NATIVE}/scons ${PARALLEL_MAKE} ${EXTRA_OESCONS} install-core ||
die "scons build execution failed."
}
scons_do_install() {
- # install binaries
- install -d ${D}${bindir}
- for i in mongod mongos mongo
- do
- if [ -f ${B}/build/opt/mongo/${i} ]
- then
- install -m 0755 ${B}/build/opt/mongo/${i} ${D}${bindir}/${i}
- else
- bbnote "${i} does not exist"
- fi
- done
-
- # install config
- install -d ${D}${sysconfdir}
- install -m 0644 ${S}/debian/mongod.conf ${D}${sysconfdir}/
-
- # install systemd service
- install -d ${D}${systemd_system_unitdir}
- install -m 0644 ${S}/debian/mongod.service ${D}${systemd_system_unitdir}
-
- # install mongo data folder
- install -m 755 -d ${D}${localstatedir}/lib/${BPN}
- chown ${PN}:${PN} ${D}${localstatedir}/lib/${BPN}
-
- # Log files
- install -m 755 -d ${D}${localstatedir}/log/${BPN}
- chown ${PN}:${PN} ${D}${localstatedir}/log/${BPN}
+ # install binaries
+ install -d ${D}${bindir}
+ for i in mongod mongos mongo; do
+ if [ -f ${B}/build/*/mongo/$i ]; then
+ install -m 0755 ${B}/build/*/mongo/$i ${D}${bindir}
+ else
+ bbnote "$i does not exist"
+ fi
+ done
+
+ # install config
+ install -d ${D}${sysconfdir}
+ install -m 0644 ${S}/debian/mongod.conf ${D}${sysconfdir}
+
+ # install systemd service
+ install -d ${D}${systemd_system_unitdir}
+ install -m 0644 ${S}/debian/mongod.service ${D}${systemd_system_unitdir}
+
+ # install mongo data folder
+ install -m 755 -d ${D}${localstatedir}/lib/${BPN}
+ chown ${BPN}:${BPN} ${D}${localstatedir}/lib/${BPN}
+
+ # Create /var/log/mongodb in runtime.
+ if [ "${@bb.utils.filter('DISTRO_FEATURES', 'systemd', d)}" ]; then
+ install -d ${D}${nonarch_libdir}/tmpfiles.d
+ echo "d ${localstatedir}/log/${BPN} 0755 ${BPN} ${BPN} -" > ${D}${nonarch_libdir}/tmpfiles.d/${BPN}.conf
+ fi
+ if [ "${@bb.utils.filter('DISTRO_FEATURES', 'sysvinit', d)}" ]; then
+ install -d ${D}${sysconfdir}/default/volatiles
+ echo "d ${BPN} ${BPN} 0755 ${localstatedir}/log/${BPN} none" > ${D}${sysconfdir}/default/volatiles/99_${BPN}
+ fi
}
-CONFFILES_${PN} = "${sysconfdir}/mongod.conf"
+CONFFILES:${PN} = "${sysconfdir}/mongod.conf"
+
+SYSTEMD_SERVICE:${PN} = "mongod.service"
+
+FILES:${PN} += "${nonarch_libdir}/tmpfiles.d"
+
+RDEPENDS:${PN} += "tzdata-core"
-SYSTEMD_SERVICE_${PN} = "mongod.service"
+SKIP_RECIPE[mongodb] ?= "Needs porting to python 3.12"
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-devtools/nanopb/nanopb-generator_0.4.8.bb b/meta-oe/dynamic-layers/meta-python/recipes-devtools/nanopb/nanopb-generator_0.4.8.bb
new file mode 100644
index 0000000000..c86750f8b6
--- /dev/null
+++ b/meta-oe/dynamic-layers/meta-python/recipes-devtools/nanopb/nanopb-generator_0.4.8.bb
@@ -0,0 +1,14 @@
+require nanopb.inc
+
+inherit python3-dir
+
+DEPENDS = "protobuf-native"
+RDEPENDS:${PN} += "python3-protobuf"
+
+EXTRA_OECMAKE += " \
+ -Dnanopb_PYTHON_INSTDIR_OVERRIDE=${PYTHON_SITEPACKAGES_DIR} \
+ -Dnanopb_BUILD_RUNTIME=OFF \
+ -Dnanopb_BUILD_GENERATOR=ON \
+ "
+
+FILES:${PN} += "${PYTHON_SITEPACKAGES_DIR}"
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-devtools/nanopb/nanopb-runtime_0.4.8.bb b/meta-oe/dynamic-layers/meta-python/recipes-devtools/nanopb/nanopb-runtime_0.4.8.bb
new file mode 100644
index 0000000000..e43931a4ec
--- /dev/null
+++ b/meta-oe/dynamic-layers/meta-python/recipes-devtools/nanopb/nanopb-runtime_0.4.8.bb
@@ -0,0 +1,17 @@
+require nanopb.inc
+
+EXTRA_OECMAKE += " \
+ -Dnanopb_PROTOC_PATH=/bin/false \
+ -DBUILD_SHARED_LIBS=ON \
+ -Dnanopb_BUILD_RUNTIME=ON \
+ -Dnanopb_BUILD_GENERATOR=OFF \
+ "
+
+# Maintain compatability with old header locations for packages
+# which haven't yet migrated to `nanopb/pb*.h`
+do_install:append() {
+ for hdr in ${D}${includedir}/nanopb/*; do
+ ln -sv nanopb/$(basename "$hdr") ${D}${includedir}/
+ done
+}
+
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-devtools/nanopb/nanopb.bb b/meta-oe/dynamic-layers/meta-python/recipes-devtools/nanopb/nanopb.bb
new file mode 100644
index 0000000000..d4ab31a9b1
--- /dev/null
+++ b/meta-oe/dynamic-layers/meta-python/recipes-devtools/nanopb/nanopb.bb
@@ -0,0 +1,16 @@
+SUMMARY = "Combined nanopb package"
+PV = "1.0"
+
+PACKAGE_ARCH = "${MACHINE_ARCH}"
+
+inherit packagegroup
+
+DEPENDS = " \
+ nanopb-generator \
+ nanopb-runtime \
+"
+
+RDEPENDS:${PN} = " \
+ nanopb-generator \
+ nanopb-runtime \
+"
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-devtools/nanopb/nanopb.inc b/meta-oe/dynamic-layers/meta-python/recipes-devtools/nanopb/nanopb.inc
new file mode 100644
index 0000000000..87dbc73e63
--- /dev/null
+++ b/meta-oe/dynamic-layers/meta-python/recipes-devtools/nanopb/nanopb.inc
@@ -0,0 +1,13 @@
+DESCRIPTION="Protocol Buffers with small code size"
+LICENSE="Zlib"
+LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=9db4b73a55a3994384112efcdb37c01f"
+
+SRC_URI = "git://github.com/nanopb/nanopb.git;branch=master;protocol=https"
+SRCREV = "6cfe48d6f1593f8fa5c0f90437f5e6522587745e"
+
+S = "${WORKDIR}/git"
+
+inherit cmake
+
+BBCLASSEXTEND = "native nativesdk"
+
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-devtools/nanopb/nanopb_0.4.5.bb b/meta-oe/dynamic-layers/meta-python/recipes-devtools/nanopb/nanopb_0.4.5.bb
deleted file mode 100644
index b2f8db9c0c..0000000000
--- a/meta-oe/dynamic-layers/meta-python/recipes-devtools/nanopb/nanopb_0.4.5.bb
+++ /dev/null
@@ -1,29 +0,0 @@
-DESCRIPTION="Protocol Buffers with small code size"
-LICENSE="Zlib"
-LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=9db4b73a55a3994384112efcdb37c01f"
-
-DEPENDS = "protobuf-native"
-
-SRC_URI = "git://github.com/nanopb/nanopb.git"
-SRCREV = "c9124132a604047d0ef97a09c0e99cd9bed2c818"
-
-S = "${WORKDIR}/git"
-
-inherit cmake python3native
-
-do_install_append() {
- install -Dm 0755 ${S}/generator/nanopb_generator.py ${D}${bindir}/nanopb_generator.py
- install -Dm 0755 ${S}/generator/protoc-gen-nanopb ${D}${bindir}/protoc-gen-nanopb
- install -Dm 0755 ${S}/generator/proto/__init__.py ${D}${PYTHON_SITEPACKAGES_DIR}/proto/__init__.py
-}
-
-FILES_${PN} += "${PYTHON_SITEPACKAGES_DIR}"
-FILES_${PN}-dev += "${libdir}/cmake/${BPN}"
-
-RDEPENDS_${PN} += "\
- ${PYTHON_PN}-protobuf \
- protobuf-compiler \
-"
-
-BBCLASSEXTEND = "native nativesdk"
-
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-extended/lcdproc/lcdproc/0001-Fix-compilation-with-GCC-10.x.patch b/meta-oe/dynamic-layers/meta-python/recipes-extended/lcdproc/lcdproc/0001-Fix-compilation-with-GCC-10.x.patch
deleted file mode 100644
index 35634dd9ba..0000000000
--- a/meta-oe/dynamic-layers/meta-python/recipes-extended/lcdproc/lcdproc/0001-Fix-compilation-with-GCC-10.x.patch
+++ /dev/null
@@ -1,46 +0,0 @@
-From 9b07f6a472c24f5e1b65746756764391be0d55e4 Mon Sep 17 00:00:00 2001
-From: Harald Geyer <harald@ccbib.org>
-Date: Mon, 10 Feb 2020 13:15:10 +0100
-Subject: [PATCH] Fix compilation with GCC >= 10.x
-
-Starting with GCC >= 10.x, -fno-common is used as default
-instead of -fcommon. This patch fixes the compilation.
-
-Closes: #148
-
-Upstream-Status: Backport
-Suggested-by: Conrad Kostecki <conrad@kostecki.com>
-Signed-off-by: Harald Geyer <harald@ccbib.org>
----
- clients/lcdproc/iface.c | 1 +
- clients/lcdproc/iface.h | 2 +-
- 2 files changed, 2 insertions(+), 1 deletion(-)
-
-diff --git a/clients/lcdproc/iface.c b/clients/lcdproc/iface.c
-index 40e50cb7..1ac355bd 100644
---- a/clients/lcdproc/iface.c
-+++ b/clients/lcdproc/iface.c
-@@ -32,6 +32,7 @@
- #define UNSET_INT -1
- #define UNSET_STR "\01"
-
-+IfaceInfo iface[MAX_INTERFACES];
-
- static int iface_count = 0; /* number of interfaces */
- static char unit_label[10] = "B"; /* default unit label is Bytes */
-diff --git a/clients/lcdproc/iface.h b/clients/lcdproc/iface.h
-index cc6dbaaf..c1bd6b5b 100644
---- a/clients/lcdproc/iface.h
-+++ b/clients/lcdproc/iface.h
-@@ -18,7 +18,7 @@
- /** max number of interfaces in multi-interface mode */
- #define MAX_INTERFACES 3
-
--IfaceInfo iface[MAX_INTERFACES]; /* interface info */
-+extern IfaceInfo iface[MAX_INTERFACES]; /* interface info */
-
- /** Update screen content */
- int iface_screen(int rep, int display, int *flags_ptr);
---
-2.28.0
-
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-extended/lcdproc/lcdproc/0001-Fix-parallel-build-fix-port-internal-make-dependenci.patch b/meta-oe/dynamic-layers/meta-python/recipes-extended/lcdproc/lcdproc/0001-Fix-parallel-build-fix-port-internal-make-dependenci.patch
deleted file mode 100644
index f6a7956db2..0000000000
--- a/meta-oe/dynamic-layers/meta-python/recipes-extended/lcdproc/lcdproc/0001-Fix-parallel-build-fix-port-internal-make-dependenci.patch
+++ /dev/null
@@ -1,33 +0,0 @@
-From a20feee4963bc38975fbaf44bbe85a31825f59db Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Sat, 6 Apr 2019 17:28:28 -0700
-Subject: [PATCH 1/3] Fix parallel build (fix port-internal make dependencies)
- on many cores
-
-Upstream-Status: Submitted [https://github.com/lcdproc/lcdproc/pull/142]
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- server/drivers/Makefile.am | 4 ++--
- 1 file changed, 2 insertions(+), 2 deletions(-)
-
-diff --git a/server/drivers/Makefile.am b/server/drivers/Makefile.am
-index e08f2b2d..4fd2e3f1 100644
---- a/server/drivers/Makefile.am
-+++ b/server/drivers/Makefile.am
-@@ -47,11 +47,11 @@ CwLnx_LDADD = libLCD.a libbignum.a
- futaba_LDADD = @LIBUSB_LIBS@ @LIBUSB_1_0_LIBS@ libLCD.a
- g15_LDADD = @LIBG15@
- glcd_LDADD = libLCD.a @GLCD_DRIVERS@ @FT2_LIBS@ @LIBPNG_LIBS@ @LIBSERDISP@ @LIBUSB_LIBS@ @LIBX11_LIBS@
--glcd_DEPENDENCIES = @GLCD_DRIVERS@ glcd-glcd-render.o
-+glcd_DEPENDENCIES = @GLCD_DRIVERS@ glcd-glcd-render.o libLCD.a
- glcdlib_LDADD = @LIBGLCD@
- glk_LDADD = libbignum.a
- hd44780_LDADD = libLCD.a @HD44780_DRIVERS@ @HD44780_I2C@ @LIBUSB_LIBS@ @LIBFTDI_LIBS@ @LIBUGPIO@ libbignum.a
--hd44780_DEPENDENCIES = @HD44780_DRIVERS@ @HD44780_I2C@
-+hd44780_DEPENDENCIES = @HD44780_DRIVERS@ @HD44780_I2C@ libLCD.a libbignum.a
- i2500vfd_LDADD = @LIBFTDI_LIBS@
- imon_LDADD = libLCD.a libbignum.a
- imonlcd_LDADD = libLCD.a
---
-2.24.1
-
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-extended/lcdproc/lcdproc/0002-Include-limits.h-for-PATH_MAX-definition.patch b/meta-oe/dynamic-layers/meta-python/recipes-extended/lcdproc/lcdproc/0002-Include-limits.h-for-PATH_MAX-definition.patch
deleted file mode 100644
index eb866bf10a..0000000000
--- a/meta-oe/dynamic-layers/meta-python/recipes-extended/lcdproc/lcdproc/0002-Include-limits.h-for-PATH_MAX-definition.patch
+++ /dev/null
@@ -1,41 +0,0 @@
-From d447a05ee560ba5894d2ed4cd93d0475c2f3c08e Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Tue, 17 Dec 2019 17:39:32 -0800
-Subject: [PATCH 2/3] Include <limits.h> for PATH_MAX definition
-
-musl libc exposes the missing include
-
-Upstream-Status: Submitted [https://github.com/lcdproc/lcdproc/pull/142]
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- server/drivers/hidraw_lib.c | 1 +
- server/drivers/linux_input.c | 1 +
- 2 files changed, 2 insertions(+)
-
-diff --git a/server/drivers/hidraw_lib.c b/server/drivers/hidraw_lib.c
-index 49b03f20..3b51f279 100644
---- a/server/drivers/hidraw_lib.c
-+++ b/server/drivers/hidraw_lib.c
-@@ -8,6 +8,7 @@
- #include <dirent.h>
- #include <errno.h>
- #include <fcntl.h>
-+#include <limits.h>
- #include <stdlib.h>
- #include <string.h>
- #include <sys/ioctl.h>
-diff --git a/server/drivers/linux_input.c b/server/drivers/linux_input.c
-index 5b914d4c..6fcfc591 100644
---- a/server/drivers/linux_input.c
-+++ b/server/drivers/linux_input.c
-@@ -5,6 +5,7 @@
-
- #include <dirent.h>
- #include <errno.h>
-+#include <limits.h>
- #include <stdint.h>
- #include <stdio.h>
- #include <stdlib.h>
---
-2.24.1
-
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-extended/lcdproc/lcdproc/0003-Fix-non-x86-platforms-on-musl.patch b/meta-oe/dynamic-layers/meta-python/recipes-extended/lcdproc/lcdproc/0003-Fix-non-x86-platforms-on-musl.patch
deleted file mode 100644
index e39e9bda14..0000000000
--- a/meta-oe/dynamic-layers/meta-python/recipes-extended/lcdproc/lcdproc/0003-Fix-non-x86-platforms-on-musl.patch
+++ /dev/null
@@ -1,35 +0,0 @@
-From 7fd144f101fa5c9316d3468ed26f55629afe1305 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Tue, 17 Dec 2019 17:55:54 -0800
-Subject: [PATCH 3/3] Fix non x86 platforms on musl
-
-Musl only specifies in/outb for x86/x86. Use the fallback path in case
-musl is used.
-
-This should fail compilation during the linking stage but for some reason
-does not. Will do if -Werror=implicit-function-declaration is specified.
-
-Original here: https://github.com/openwrt/packages/blob/master/utils/lcdproc/patches/110-in-outb.patch
-Upstream-Status: Pending
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- server/drivers/port.h | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/server/drivers/port.h b/server/drivers/port.h
-index c584cd4e..bde235b3 100644
---- a/server/drivers/port.h
-+++ b/server/drivers/port.h
-@@ -94,7 +94,7 @@ static inline int port_deny_multiple(unsigned short port, unsigned short count);
- /* ---------------------------- Linux ------------------------------------ */
- /* Use ioperm, inb and outb in <sys/io.h> (Linux) */
- /* And iopl for higher addresses of PCI LPT cards */
--#if defined HAVE_IOPERM
-+#if defined(__GLIBC__) || (defined(__x86__) || defined(__x86_64__))
-
- /* Glibc2 and Glibc1 */
- # ifdef HAVE_SYS_IO_H
---
-2.24.1
-
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-extended/lcdproc/lcdproc_git.bb b/meta-oe/dynamic-layers/meta-python/recipes-extended/lcdproc/lcdproc_git.bb
index 35585d0690..c29c51a2ae 100644
--- a/meta-oe/dynamic-layers/meta-python/recipes-extended/lcdproc/lcdproc_git.bb
+++ b/meta-oe/dynamic-layers/meta-python/recipes-extended/lcdproc/lcdproc_git.bb
@@ -3,20 +3,15 @@ shipped with this package can be used to acquire various kinds of system stats."
SUMMARY = "Drivers for character-based LCD displays"
HOMEPAGE = "http://lcdproc.org"
SECTION = "utils"
-LICENSE = "GPLv2+"
+LICENSE = "GPL-2.0-or-later"
DEPENDS = "ncurses lirc"
LIC_FILES_CHKSUM = "file://COPYING;md5=18810669f13b87348459e611d31ab760 \
file://README.md;beginline=107;md5=5db392f043253a2d64b1737068ce6b58"
-PV = "0.5.9+git${SRCPV}"
-SRCREV = "3a3d622d9bb74c44fa67bc20573751a207514134"
-SRC_URI = "git://github.com/lcdproc/lcdproc \
- file://0001-Fix-parallel-build-fix-port-internal-make-dependenci.patch \
- file://0002-Include-limits.h-for-PATH_MAX-definition.patch \
- file://0003-Fix-non-x86-platforms-on-musl.patch \
- file://0001-Fix-compilation-with-GCC-10.x.patch \
- "
+PV = "0.5.9+git"
+SRCREV = "0e2ce9b9c46c47363436f9ee730f7c71bf455f0f"
+SRC_URI = "git://github.com/lcdproc/lcdproc;branch=master;protocol=https"
S = "${WORKDIR}/git"
@@ -24,9 +19,9 @@ inherit autotools pkgconfig update-rc.d
LCD_DRIVERS ?= "all,!irman,!svga${SERIALVFD}"
SERIALVFD ?= ""
-SERIALVFD_libc-musl = ",!serialVFD"
-SERIALVFD_libc-musl_x86 = ""
-SERIALVFD_libc-musl_x86-64 = ""
+SERIALVFD:libc-musl = ",!serialVFD"
+SERIALVFD:libc-musl:x86 = ""
+SERIALVFD:libc-musl:x86-64 = ""
LCD_DEFAULT_DRIVER ?= "curses"
@@ -37,7 +32,7 @@ PACKAGECONFIG[g15] = ",,libg15 g15daemon libg15render,"
PACKAGECONFIG[hid] = "--enable-libhid,--disable-libhid,libhid"
PACKAGECONFIG[png] = "--enable-libpng,--disable-libpng,libpng"
-LCD_DRIVERS_append = "${@bb.utils.contains('PACKAGECONFIG', 'g15', '', ',!g15', d)}"
+LCD_DRIVERS:append = "${@bb.utils.contains('PACKAGECONFIG', 'g15', '', ',!g15', d)}"
EXTRA_OECONF = "--enable-drivers='${LCD_DRIVERS}'"
@@ -74,33 +69,33 @@ do_install () {
PACKAGES =+ "lcdd lcdvc"
-RRECOMMENDS_${PN} = "lcdd"
+RRECOMMENDS:${PN} = "lcdd"
-FILES_lcdd = "${sysconfdir}/LCDd.conf \
+FILES:lcdd = "${sysconfdir}/LCDd.conf \
${sbindir}/LCDd \
${sysconfdir}/init.d/lcdd"
-CONFFILES_lcdd = "${sysconfdir}/LCDd.conf"
-CONFFILES_${PN} = "${sysconfdir}/lcdproc.conf"
-CONFFILES_lcdvc = "${sysconfdir}/lcdvc.conf"
-FILES_lcdvc = "${sysconfdir}/lcdvc.conf ${sbindir}/lcdvc"
+CONFFILES:lcdd = "${sysconfdir}/LCDd.conf"
+CONFFILES:${PN} = "${sysconfdir}/lcdproc.conf"
+CONFFILES:lcdvc = "${sysconfdir}/lcdvc.conf"
+FILES:lcdvc = "${sysconfdir}/lcdvc.conf ${sbindir}/lcdvc"
# Driver packages
# USB / no USB trickery
-RCONFLICTS_lcdd-driver-hd47780nousb = "lcdd-driver-hd44780"
-RCONFLICTS_lcdd-driver-hd47780 = "lcdd-driver-hd44780nousb"
+RCONFLICTS:lcdd-driver-hd47780nousb = "lcdd-driver-hd44780"
+RCONFLICTS:lcdd-driver-hd47780 = "lcdd-driver-hd44780nousb"
INITSCRIPT_PACKAGES = "lcdd lcdproc"
-INITSCRIPT_NAME_lcdd = "lcdd"
-INITSCRIPT_NAME_lcdproc = "lcdproc"
-INITSCRIPT_PARAMS_lcdd = "defaults 70 21"
-INITSCRIPT_PARAMS_lcdproc = "defaults 71 20"
+INITSCRIPT_NAME:lcdd = "lcdd"
+INITSCRIPT_NAME:lcdproc = "lcdproc"
+INITSCRIPT_PARAMS:lcdd = "defaults 70 21"
+INITSCRIPT_PARAMS:lcdproc = "defaults 71 20"
-python populate_packages_prepend() {
+python populate_packages:prepend() {
plugindir = d.expand('${libdir}/lcdproc')
- do_split_packages(d, plugindir, '(.*)\.so$', 'lcdd-driver-%s', 'LCDd driver for %s', prepend=True)
+ do_split_packages(d, plugindir, r'(.*)\.so$', 'lcdd-driver-%s', 'LCDd driver for %s', prepend=True)
}
PACKAGES_DYNAMIC += "^lcdd-driver-.*"
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0001-Port-build-to-python3.patch b/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0001-Port-build-to-python3.patch
deleted file mode 100644
index 5cdad93e95..0000000000
--- a/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0001-Port-build-to-python3.patch
+++ /dev/null
@@ -1,6897 +0,0 @@
-From 33a373ba41d978af60c2f0230bcba6ad27357ec8 Mon Sep 17 00:00:00 2001
-From: =?UTF-8?q?Andreas=20M=C3=BCller?= <schnitzeltony@gmail.com>
-Date: Wed, 29 Jan 2020 16:25:11 +0100
-Subject: [PATCH] Port build to python3
-MIME-Version: 1.0
-Content-Type: text/plain; charset=UTF-8
-Content-Transfer-Encoding: 8bit
-
-* first tool of choice was 2to3
-* some parts were taken from [1] but during work it was found that this patch
- introduces interesting effects - see hash functions. Working more on this
- makes me guess that one has never worked...
-* Few parts were taken from upstream mirror [2]. Since they use six for porting
- to python3 it adds us a new dependency.
-* To get a better overview what is going on or failing some additional messages
- were added. The most verbose one is left disabled - see
- python/mozbuild/mozbuild/configure/__init__.py / Line 310 onwards
-* major changes upstream on build are not to expect so upgrading should cause
- little trouble and changes can be tracked by [3]
-* some solutions are workarounds/hacks so this patch will not be accepted
- upstream. This should not be a problem for us: once mozjs >= 68 will arrive
- we have to go to rust/cargo based build anyway.
-
-[1] https://code.foxkit.us/adelie/packages/blob/f2b5773da19ab397fbe64fd32dacc383cfe4cd77/user/mozjs/python3.patch
-[2] https://github.com/mozilla/gecko-dev
-[3] https://github.com/mozilla/gecko-dev/tree/esr60
-
-Upstream-Status: Inappropriate [Some Hacks]
-
-Signed-off-by: Andreas Müller <schnitzeltony@gmail.com>
----
- build/autoconf/config.status.m4 | 2 +-
- build/moz.configure/android-ndk.configure | 4 +-
- build/moz.configure/checks.configure | 4 +-
- build/moz.configure/init.configure | 31 +-
- build/moz.configure/keyfiles.configure | 4 +-
- build/moz.configure/old.configure | 32 +-
- build/moz.configure/toolchain.configure | 18 +-
- build/moz.configure/util.configure | 9 +-
- build/moz.configure/windows.configure | 10 +-
- build/templates.mozbuild | 2 +-
- config/MozZipFile.py | 12 +-
- config/expandlibs.py | 6 +-
- config/expandlibs_exec.py | 14 +-
- config/expandlibs_gen.py | 4 +-
- configure.py | 42 +-
- js/src/build/moz.build | 8 +-
- js/src/builtin/embedjs.py | 10 +-
- js/src/configure | 2 +-
- js/src/frontend/GenerateReservedWords.py | 6 +-
- js/src/gc/GenerateStatsPhases.py | 4 +-
- js/src/old-configure.in | 2 +
- memory/build/moz.build | 8 +-
- mozglue/build/moz.build | 22 +-
- .../mozbuild/mozbuild/action/check_binary.py | 2 +
- .../mozbuild/action/process_define_files.py | 4 +-
- python/mozbuild/mozbuild/backend/base.py | 8 +-
- python/mozbuild/mozbuild/backend/common.py | 8 +-
- .../mozbuild/backend/configenvironment.py | 14 +-
- .../mozbuild/mozbuild/backend/fastermake.py | 10 +-
- .../mozbuild/backend/recursivemake.py | 181 +++----
- python/mozbuild/mozbuild/config_status.py | 7 +-
- .../mozbuild/mozbuild/configure/__init__.py | 83 +++-
- .../mozbuild/configure/check_debug_ranges.py | 6 +-
- python/mozbuild/mozbuild/configure/options.py | 24 +-
- python/mozbuild/mozbuild/configure/util.py | 12 +-
- .../mozbuild/mozbuild/controller/building.py | 16 +-
- python/mozbuild/mozbuild/frontend/context.py | 89 ++--
- python/mozbuild/mozbuild/frontend/data.py | 8 +-
- python/mozbuild/mozbuild/frontend/emitter.py | 50 +-
- python/mozbuild/mozbuild/frontend/reader.py | 49 +-
- python/mozbuild/mozbuild/frontend/sandbox.py | 3 +-
- python/mozbuild/mozbuild/jar.py | 12 +-
- python/mozbuild/mozbuild/makeutil.py | 24 +-
- python/mozbuild/mozbuild/mozinfo.py | 8 +-
- python/mozbuild/mozbuild/preprocessor.py | 27 +-
- python/mozbuild/mozbuild/shellutil.py | 6 +-
- .../test/backend/test_recursivemake.py | 18 +-
- .../mozbuild/test/configure/common.py | 8 +-
- .../mozbuild/mozbuild/test/configure/lint.py | 8 +-
- .../test/configure/test_checks_configure.py | 8 +-
- .../test/configure/test_compile_checks.py | 4 +-
- .../mozbuild/test/configure/test_configure.py | 244 +++++-----
- .../mozbuild/test/configure/test_lint.py | 24 +-
- .../test/configure/test_moz_configure.py | 32 +-
- .../mozbuild/test/configure/test_options.py | 450 +++++++++---------
- .../configure/test_toolchain_configure.py | 22 +-
- .../test/configure/test_toolchain_helpers.py | 62 +--
- .../configure/test_toolkit_moz_configure.py | 2 +-
- .../mozbuild/test/configure/test_util.py | 8 +-
- python/mozbuild/mozbuild/testing.py | 10 +-
- python/mozbuild/mozbuild/util.py | 79 ++-
- python/mozbuild/mozbuild/virtualenv.py | 6 +-
- python/mozbuild/mozpack/chrome/manifest.py | 6 +-
- python/mozbuild/mozpack/copier.py | 12 +-
- python/mozbuild/mozpack/files.py | 22 +-
- python/mozbuild/mozpack/manifests.py | 16 +-
- python/mozbuild/mozpack/mozjar.py | 37 +-
- .../manifestparser/manifestparser/ini.py | 13 +-
- .../manifestparser/manifestparser.py | 24 +-
- testing/mozbase/mozinfo/mozinfo/mozinfo.py | 26 +-
- .../mozprocess/mozprocess/processhandler.py | 10 +-
- third_party/python/which/which.py | 18 +-
- 72 files changed, 1081 insertions(+), 993 deletions(-)
-
-diff --git a/build/autoconf/config.status.m4 b/build/autoconf/config.status.m4
-index c75575386..543c2d682 100644
---- a/build/autoconf/config.status.m4
-+++ b/build/autoconf/config.status.m4
-@@ -122,7 +122,7 @@ trap 'rm -f $CONFIG_STATUS conftest*; exit 1' 1 2 15
-
- dnl We're going to need [ ] for python syntax.
- changequote(<<<, >>>)dnl
--echo creating $CONFIG_STATUS
-+echo creating $CONFIG_STATUS in `pwd`
-
- cat > $CONFIG_STATUS <<EOF
-
-diff --git a/build/moz.configure/android-ndk.configure b/build/moz.configure/android-ndk.configure
-index 1067b8619..3b592a237 100644
---- a/build/moz.configure/android-ndk.configure
-+++ b/build/moz.configure/android-ndk.configure
-@@ -30,7 +30,7 @@ js_option('--with-android-version',
-
-
- @depends('--with-android-version', min_android_version, '--help')
--@imports(_from='__builtin__', _import='ValueError')
-+@imports(_from='builtins', _import='ValueError')
- def android_version(value, min_version, _):
- if not value:
- # Someone has passed --without-android-version.
-@@ -68,7 +68,7 @@ add_old_configure_assignment('android_ndk', ndk)
-
- @depends(ndk)
- @checking('for android ndk version')
--@imports(_from='__builtin__', _import='open')
-+@imports(_from='builtins', _import='open')
- def ndk_version(ndk):
- if not ndk:
- # Building 'js/src' for non-Android.
-diff --git a/build/moz.configure/checks.configure b/build/moz.configure/checks.configure
-index 516652da9..11e1091c6 100644
---- a/build/moz.configure/checks.configure
-+++ b/build/moz.configure/checks.configure
-@@ -14,7 +14,7 @@
-
-
- @template
--@imports(_from='__builtin__', _import='Exception')
-+@imports(_from='builtins', _import='Exception')
- def _declare_exceptions():
- class FatalCheckError(Exception):
- '''An exception to throw from a function decorated with @checking.
-@@ -57,7 +57,7 @@ def checking(what, callback=None):
- try:
- ret = func(*args, **kwargs)
- except FatalCheckError as e:
-- error = e.message
-+ error = str(e)
- display_ret = callback(ret) if callback else ret
- if display_ret is True:
- log.info('yes')
-diff --git a/build/moz.configure/init.configure b/build/moz.configure/init.configure
-index 648ac2ded..4d74547d8 100644
---- a/build/moz.configure/init.configure
-+++ b/build/moz.configure/init.configure
-@@ -14,7 +14,7 @@ option(env='DIST', nargs=1, help='DIST directory')
-
-
- @depends('--help', 'DIST')
--@imports(_from='__builtin__', _import='open')
-+@imports(_from='builtins', _import='open')
- @imports(_from='os.path', _import='exists')
- def check_build_environment(help, dist):
- topobjdir = os.path.realpath(os.path.abspath('.'))
-@@ -65,7 +65,7 @@ def check_build_environment(help, dist):
- # Check for CRLF line endings.
- with open(os.path.join(topsrcdir, 'configure.py'), 'rb') as fh:
- data = fh.read()
-- if '\r' in data:
-+ if b'\r' in data:
- die('\n ***\n'
- ' * The source tree appears to have Windows-style line endings.\n'
- ' *\n'
-@@ -269,7 +269,7 @@ def early_options():
- def early_options():
- return set(
- option.env
-- for option in __sandbox__._options.itervalues()
-+ for option in __sandbox__._options.values()
- if option.env
- )
- return early_options
-@@ -307,15 +307,15 @@ def mozconfig_options(mozconfig, automation, help):
- log.info(' %s' % arg)
- helper.add(arg, origin='mozconfig', args=helper._args)
-
-- for key, value in mozconfig['env']['added'].iteritems():
-+ for key, value in mozconfig['env']['added'].items():
- add(key, value)
- os.environ[key] = value
-- for key, (_, value) in mozconfig['env']['modified'].iteritems():
-+ for key, (_, value) in mozconfig['env']['modified'].items():
- add(key, value)
- os.environ[key] = value
-- for key, value in mozconfig['vars']['added'].iteritems():
-+ for key, value in mozconfig['vars']['added'].items():
- add(key, value)
-- for key, (_, value) in mozconfig['vars']['modified'].iteritems():
-+ for key, (_, value) in mozconfig['vars']['modified'].items():
- add(key, value)
-
-
-@@ -353,7 +353,7 @@ option(env='PYTHON3', nargs=1, help='Python 3 interpreter (3.5 or later)')
- @depends('PYTHON3')
- @checking('for Python 3',
- callback=lambda x: '%s (%s)' % (x.path, x.str_version) if x else 'no')
--@imports(_from='__builtin__', _import='Exception')
-+@imports(_from='builtins', _import='Exception')
- @imports(_from='mozbuild.pythonutil', _import='find_python3_executable')
- @imports(_from='mozbuild.pythonutil', _import='python_executable_version')
- def python3(env_python):
-@@ -377,9 +377,6 @@ def python3(env_python):
- if not python:
- return None
-
-- # The API returns a bytes whereas everything in configure is unicode.
-- python = python.decode('utf-8')
--
- return namespace(
- path=python,
- version=version,
-@@ -481,6 +478,8 @@ def hg_config(build_env, hg, version):
- @imports('re')
- def git_version(git):
- out = check_cmd_output(git, '--version').rstrip()
-+ if isinstance(out, bytes):
-+ out = out.decode('utf-8')
-
- match = re.search('git version (.*)$', out)
-
-@@ -551,8 +550,8 @@ option('--target', nargs=1,
- @imports(_from='mozbuild.configure.constants', _import='Endianness')
- @imports(_from='mozbuild.configure.constants', _import='Kernel')
- @imports(_from='mozbuild.configure.constants', _import='OS')
--@imports(_from='__builtin__', _import='KeyError')
--@imports(_from='__builtin__', _import='ValueError')
-+@imports(_from='builtins', _import='KeyError')
-+@imports(_from='builtins', _import='ValueError')
- def split_triplet(triplet, allow_unknown=False):
- # The standard triplet is defined as
- # CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM
-@@ -562,6 +561,8 @@ def split_triplet(triplet, allow_unknown=False):
- # Additionally, some may omit "unknown" when the manufacturer
- # is not specified and emit
- # CPU_TYPE-OPERATING_SYSTEM
-+ if isinstance(triplet, bytes):
-+ triplet = triplet.decode('utf-8')
- parts = triplet.split('-', 2)
- if len(parts) == 3:
- cpu, _, os = parts
-@@ -987,7 +988,7 @@ add_old_configure_assignment('MOZ_BUILD_APP', build_project)
- # - otherwise, if we have "a" in GRE_MILESTONE, we're building Nightly or Aurora
- # - otherwise, we're building Release/Beta (define RELEASE_OR_BETA)
- @depends(check_build_environment, '--help')
--@imports(_from='__builtin__', _import='open')
-+@imports(_from='builtins', _import='open')
- @imports('re')
- def milestone(build_env, _):
- milestone_path = os.path.join(build_env.topsrcdir,
-@@ -1105,7 +1106,7 @@ def enabled_in_nightly(milestone, _):
- def all_configure_options():
- result = []
- previous = None
-- for option in __sandbox__._options.itervalues():
-+ for option in __sandbox__._options.values():
- # __sandbox__._options contains items for both option.name and
- # option.env. But it's also an OrderedDict, meaning both are
- # consecutive.
-diff --git a/build/moz.configure/keyfiles.configure b/build/moz.configure/keyfiles.configure
-index 5d51cccea..14a35a3c6 100644
---- a/build/moz.configure/keyfiles.configure
-+++ b/build/moz.configure/keyfiles.configure
-@@ -16,8 +16,8 @@ def keyfile(desc, default=None, help=None, callback=lambda x: x):
-
- @depends('--with-%s-keyfile' % name)
- @checking('for the %s key' % desc, lambda x: x and x is not no_key)
-- @imports(_from='__builtin__', _import='open')
-- @imports(_from='__builtin__', _import='IOError')
-+ @imports(_from='builtins', _import='open')
-+ @imports(_from='builtins', _import='IOError')
- def keyfile(value):
- if value:
- try:
-diff --git a/build/moz.configure/old.configure b/build/moz.configure/old.configure
-index 81c10b91f..29b496bae 100644
---- a/build/moz.configure/old.configure
-+++ b/build/moz.configure/old.configure
-@@ -64,9 +64,9 @@ set_config('AUTOCONF', autoconf)
-
- @depends('OLD_CONFIGURE', mozconfig, autoconf, check_build_environment, shell,
- old_configure_assignments, build_project)
--@imports(_from='__builtin__', _import='open')
--@imports(_from='__builtin__', _import='print')
--@imports(_from='__builtin__', _import='sorted')
-+@imports(_from='builtins', _import='open')
-+@imports(_from='builtins', _import='print')
-+@imports(_from='builtins', _import='sorted')
- @imports('glob')
- @imports('itertools')
- @imports('subprocess')
-@@ -113,7 +113,7 @@ def prepare_configure(old_configure, mozconfig, autoconf, build_env, shell,
-
- # Make old-configure append to config.log, where we put our own log.
- # This could be done with a m4 macro, but it's way easier this way
-- script = script.replace('>./config.log', '>>./config.log')
-+ script = script.replace(b'>./config.log', b'>>./config.log')
-
- with open(old_configure, 'wb') as fh:
- fh.write(script)
-@@ -282,8 +282,8 @@ def old_configure_options(*options):
- '--x-includes',
- '--x-libraries',
- )
--@imports(_from='__builtin__', _import='compile')
--@imports(_from='__builtin__', _import='open')
-+@imports(_from='builtins', _import='compile')
-+@imports(_from='builtins', _import='open')
- @imports('logging')
- @imports('os')
- @imports('subprocess')
-@@ -326,7 +326,7 @@ def old_configure(prepare_configure, extra_old_configure_args, all_options,
- log.debug('Running %s', quote(*cmd))
- if extra_env:
- log.debug('with extra environment: %s',
-- ' '.join('%s=%s' % pair for pair in extra_env.iteritems()))
-+ ' '.join('%s=%s' % pair for pair in extra_env.items()))
-
- # Our logging goes to config.log, the same file old.configure uses.
- # We can't share the handle on the file, so close it. We assume nothing
-@@ -359,7 +359,7 @@ def old_configure(prepare_configure, extra_old_configure_args, all_options,
- # Every variation of the exec() function I tried led to:
- # SyntaxError: unqualified exec is not allowed in function 'main' it
- # contains a nested function with free variables
-- exec code in raw_config # noqa
-+ exec(code, raw_config) # noqa
-
- # Ensure all the flags known to old-configure appear in the
- # @old_configure_options above.
-@@ -393,16 +393,24 @@ def set_old_configure_define(name, value):
- @depends(old_configure)
- @imports('types')
- def post_old_configure(raw_config):
-+ log.info('post_old_configure started')
-+
- for k, v in raw_config['substs']:
- set_old_configure_config(
-- k[1:-1], v[1:-1] if isinstance(v, types.StringTypes) else v)
-+ k[1:-1], v[1:-1] if isinstance(v, str) else v)
-+
-+ log.info('post_old_configure 1 finished')
-
-- for k, v in dict(raw_config['defines']).iteritems():
-+ for k, v in dict(raw_config['defines']).items():
- set_old_configure_define(k[1:-1], v[1:-1])
-
-+ log.info('post_old_configure 2 finished')
-+
- set_old_configure_config('non_global_defines',
- raw_config['non_global_defines'])
-
-+ log.info('post_old_configure 3 finished')
-+
-
- # Assuming no other option is declared after this function, handle the
- # env options that were injected by mozconfig_options by creating dummy
-@@ -414,6 +422,7 @@ def post_old_configure(raw_config):
- @imports('__sandbox__')
- @imports(_from='mozbuild.configure.options', _import='Option')
- def remaining_mozconfig_options(_):
-+ log.info('remaining_mozconfig_options started')
- helper = __sandbox__._helper
- for arg in helper:
- if helper._origins[arg] != 'mozconfig':
-@@ -422,5 +431,6 @@ def remaining_mozconfig_options(_):
- if name.isupper() and name not in __sandbox__._options:
- option = Option(env=name, nargs='*', help=name)
- helper.handle(option)
-+ log.info('remaining_mozconfig_options finished')
-
- # Please do not add anything after remaining_mozconfig_options()
-diff --git a/build/moz.configure/toolchain.configure b/build/moz.configure/toolchain.configure
-index fc640c75e..c5508dfb7 100755
---- a/build/moz.configure/toolchain.configure
-+++ b/build/moz.configure/toolchain.configure
-@@ -391,7 +391,7 @@ def get_compiler_info(compiler, language):
- ('CPU', CPU_preprocessor_checks),
- ('KERNEL', kernel_preprocessor_checks),
- ):
-- for n, (value, condition) in enumerate(preprocessor_checks.iteritems()):
-+ for n, (value, condition) in enumerate(preprocessor_checks.items()):
- check += dedent('''\
- #%(if)s %(condition)s
- %%%(name)s "%(value)s"
-@@ -425,9 +425,9 @@ def get_compiler_info(compiler, language):
- data = {}
- for line in result.splitlines():
- if line.startswith(b'%'):
-- k, _, v = line.partition(' ')
-- k = k.lstrip('%')
-- data[k] = v.replace(' ', '').lstrip('"').rstrip('"')
-+ k, _, v = line.partition(b' ')
-+ k = k.lstrip(b'%').decode('utf-8')
-+ data[k] = v.replace(b' ', b'').lstrip(b'"').rstrip(b'"').decode('utf-8')
- log.debug('%s = %s', k, data[k])
-
- try:
-@@ -551,7 +551,7 @@ def check_compiler(compiler, language, target):
- )
-
-
--@imports(_from='__builtin__', _import='open')
-+@imports(_from='builtins', _import='open')
- @imports('json')
- @imports('subprocess')
- @imports('sys')
-@@ -606,7 +606,7 @@ def vs_major_version(value):
-
-
- @depends(host, target, vs_major_version, check_build_environment, '--with-visual-studio-version')
--@imports(_from='__builtin__', _import='sorted')
-+@imports(_from='builtins', _import='sorted')
- @imports(_from='operator', _import='itemgetter')
- @imports('platform')
- def vc_compiler_path(host, target, vs_major_version, env, vs_release_name):
-@@ -807,7 +807,7 @@ def compiler(language, host_or_target, c_compiler=None, other_compiler=None,
- target.os != 'Android':
- return namespace(**{
- k: [] if k == 'flags' else v
-- for k, v in other_compiler.__dict__.iteritems()
-+ for k, v in other_compiler.__dict__.items()
- })
-
- # Normally, we'd use `var` instead of `_var`, but the interaction with
-@@ -1238,7 +1238,7 @@ set_config('VISIBILITY_FLAGS', visibility_flags)
-
- @depends(c_compiler)
- @imports('multiprocessing')
--@imports(_from='__builtin__', _import='min')
-+@imports(_from='builtins', _import='min')
- def pgo_flags(compiler):
- if compiler.type in ('gcc', 'clang'):
- return namespace(
-@@ -1517,6 +1517,8 @@ def enable_gnu_linker(enable_gold_option, c_compiler, developer_options, build_e
- gold = check_cmd_output(*detection_cmd).strip()
- if not gold:
- return
-+ if isinstance(gold, bytes):
-+ gold = gold.decode('utf-8')
-
- goldFullPath = find_program(gold)
- if goldFullPath is None:
-diff --git a/build/moz.configure/util.configure b/build/moz.configure/util.configure
-index 3284fd8b5..218813e2d 100644
---- a/build/moz.configure/util.configure
-+++ b/build/moz.configure/util.configure
-@@ -25,7 +25,6 @@ def configure_error(message):
- # does not.
-
-
--@imports(_from='__builtin__', _import='unicode')
- @imports('subprocess')
- @imports('sys')
- @imports(_from='mozbuild.configure.util', _import='LineIO')
-@@ -39,10 +38,10 @@ def check_cmd_output(*args, **kwargs):
- if 'env' in kwargs:
- normalized_env = {}
- for k, v in kwargs['env'].items():
-- if isinstance(k, unicode):
-+ if isinstance(k, str):
- k = k.encode('utf-8', 'strict')
-
-- if isinstance(v, unicode):
-+ if isinstance(v, str):
- v = v.encode('utf-8', 'strict')
-
- normalized_env[k] = v
-@@ -285,7 +284,7 @@ def unique_list(l):
- # ('19.0', 'x64', r'C:\...\amd64\cl.exe')
- # ('19.0', 'x86', r'C:\...\amd64_x86\cl.exe')
- @imports(_import='_winreg', _as='winreg')
--@imports(_from='__builtin__', _import='WindowsError')
-+@imports(_from='builtins', _import='WindowsError')
- @imports(_from='fnmatch', _import='fnmatch')
- def get_registry_values(pattern, get_32_and_64_bit=False):
- def enum_helper(func, key):
-@@ -360,6 +359,8 @@ def get_registry_values(pattern, get_32_and_64_bit=False):
- @imports(_from='mozbuild.configure.util', _import='Version', _as='_Version')
- def Version(v):
- 'A version number that can be compared usefully.'
-+ if isinstance(v, bytes):
-+ v = v.decode('utf-8')
- return _Version(v)
-
- # Denotes a deprecated option. Combines option() and @depends:
-diff --git a/build/moz.configure/windows.configure b/build/moz.configure/windows.configure
-index a5b790e3b..2b88fc447 100644
---- a/build/moz.configure/windows.configure
-+++ b/build/moz.configure/windows.configure
-@@ -10,7 +10,7 @@ option('--with-windows-version', nargs=1, default='603',
-
-
- @depends('--with-windows-version')
--@imports(_from='__builtin__', _import='ValueError')
-+@imports(_from='builtins', _import='ValueError')
- def valid_windows_version(value):
- if not value:
- die('Cannot build with --without-windows-version')
-@@ -50,8 +50,8 @@ def windows_sdk_dir(value, host):
-
- @imports('os')
- @imports('re')
--@imports(_from='__builtin__', _import='sorted')
--@imports(_from='__builtin__', _import='WindowsError')
-+@imports(_from='builtins', _import='sorted')
-+@imports(_from='builtins', _import='WindowsError')
- def get_sdk_dirs(sdk, subdir):
- def get_dirs_containing(sdk, stem, subdir):
- base = os.path.join(sdk, stem)
-@@ -96,7 +96,7 @@ def valid_windows_sdk_dir_result(value):
-
- @depends(c_compiler, windows_sdk_dir, valid_windows_version, 'WINDOWSSDKDIR')
- @checking('for Windows SDK', valid_windows_sdk_dir_result)
--@imports(_from='__builtin__', _import='sorted')
-+@imports(_from='builtins', _import='sorted')
- @imports(_from='textwrap', _import='dedent')
- def valid_windows_sdk_dir(compiler, windows_sdk_dir, target_version,
- windows_sdk_dir_env):
-@@ -174,7 +174,7 @@ def valid_ucrt_sdk_dir_result(value):
- @depends(windows_sdk_dir, 'WINDOWSSDKDIR', c_compiler)
- @checking('for Universal CRT SDK', valid_ucrt_sdk_dir_result)
- @imports('os')
--@imports(_from='__builtin__', _import='sorted')
-+@imports(_from='builtins', _import='sorted')
- @imports(_import='mozpack.path', _as='mozpath')
- def valid_ucrt_sdk_dir(windows_sdk_dir, windows_sdk_dir_env, c_compiler):
- if windows_sdk_dir_env:
-diff --git a/build/templates.mozbuild b/build/templates.mozbuild
-index 3da850ce5..ae5e410fe 100644
---- a/build/templates.mozbuild
-+++ b/build/templates.mozbuild
-@@ -10,7 +10,7 @@ def Binary():
- templates.'''
-
- # Add -llog by default, since we use it all over the place.
-- if CONFIG['OS_TARGET'] == 'Android':
-+ if str(CONFIG['OS_TARGET']) == 'Android':
- OS_LIBS += ['log']
-
-
-diff --git a/config/MozZipFile.py b/config/MozZipFile.py
-index 337fe0521..dc7add4c3 100644
---- a/config/MozZipFile.py
-+++ b/config/MozZipFile.py
-@@ -18,7 +18,7 @@ class ZipFile(zipfile.ZipFile):
- def __init__(self, file, mode="r", compression=zipfile.ZIP_STORED,
- lock = False):
- if lock:
-- assert isinstance(file, basestring)
-+ assert isinstance(file, str)
- self.lockfile = lock_file(file + '.lck')
- else:
- self.lockfile = None
-@@ -46,7 +46,7 @@ class ZipFile(zipfile.ZipFile):
- date_time=time.localtime(time.time()))
- zinfo.compress_type = self.compression
- # Add some standard UNIX file access permissions (-rw-r--r--).
-- zinfo.external_attr = (0x81a4 & 0xFFFF) << 16L
-+ zinfo.external_attr = (0x81a4 & 0xFFFF) << 16
- else:
- zinfo = zinfo_or_arcname
-
-@@ -58,7 +58,7 @@ class ZipFile(zipfile.ZipFile):
- # as the old, reuse the existing entry.
-
- doSeek = False # store if we need to seek to the eof after overwriting
-- if self.NameToInfo.has_key(zinfo.filename):
-+ if zinfo.filename in self.NameToInfo:
- # Find the last ZipInfo with our name.
- # Last, because that's catching multiple overwrites
- i = len(self.filelist)
-@@ -109,14 +109,14 @@ class ZipFile(zipfile.ZipFile):
- # adjust file mode if we originally just wrote, now we rewrite
- self.fp.close()
- self.fp = open(self.filename, 'r+b')
-- all = map(lambda zi: (zi, True), self.filelist) + \
-- map(lambda zi: (zi, False), self._remove)
-+ all = [(zi, True) for zi in self.filelist] + \
-+ [(zi, False) for zi in self._remove]
- all.sort(lambda l, r: cmp(l[0].header_offset, r[0].header_offset))
- # empty _remove for multiple closes
- self._remove = []
-
- lengths = [all[i+1][0].header_offset - all[i][0].header_offset
-- for i in xrange(len(all)-1)]
-+ for i in range(len(all)-1)]
- lengths.append(self.end - all[-1][0].header_offset)
- to_pos = 0
- for (zi, keep), length in zip(all, lengths):
-diff --git a/config/expandlibs.py b/config/expandlibs.py
-index ac06c432f..df1fed15d 100644
---- a/config/expandlibs.py
-+++ b/config/expandlibs.py
-@@ -26,7 +26,7 @@ ${LIB_PREFIX}${ROOT}.${LIB_SUFFIX} following these rules:
- descriptor contains. And for each of these LIBS, also apply the same
- rules.
- '''
--from __future__ import with_statement
-+
- import sys, os, errno
- import expandlibs_config as conf
-
-@@ -36,7 +36,7 @@ def ensureParentDir(file):
- if dir and not os.path.exists(dir):
- try:
- os.makedirs(dir)
-- except OSError, error:
-+ except OSError as error:
- if error.errno != errno.EEXIST:
- raise
-
-@@ -140,4 +140,4 @@ class ExpandArgs(list):
- return [relativize(arg)]
-
- if __name__ == '__main__':
-- print " ".join(ExpandArgs(sys.argv[1:]))
-+ print(" ".join(ExpandArgs(sys.argv[1:])))
-diff --git a/config/expandlibs_exec.py b/config/expandlibs_exec.py
-index df656016c..fb786a6a8 100644
---- a/config/expandlibs_exec.py
-+++ b/config/expandlibs_exec.py
-@@ -20,7 +20,7 @@ With the --symbol-order argument, followed by a file name, it will add the
- relevant linker options to change the order in which the linker puts the
- symbols appear in the resulting binary. Only works for ELF targets.
- '''
--from __future__ import with_statement
-+
- import sys
- import os
- from expandlibs import (
-@@ -304,11 +304,11 @@ class SectionFinder(object):
- return syms
-
- def print_command(out, args):
-- print >>out, "Executing: " + " ".join(args)
-+ print("Executing: " + " ".join(args), file=out)
- for tmp in [f for f in args.tmp if os.path.isfile(f)]:
-- print >>out, tmp + ":"
-+ print(tmp + ":", file=out)
- with open(tmp) as file:
-- print >>out, "".join([" " + l for l in file.readlines()])
-+ print("".join([" " + l for l in file.readlines()]), file=out)
- out.flush()
-
- def main(args, proc_callback=None):
-@@ -338,13 +338,13 @@ def main(args, proc_callback=None):
- proc = subprocess.Popen(args, stdout = subprocess.PIPE, stderr = subprocess.STDOUT)
- if proc_callback:
- proc_callback(proc)
-- except Exception, e:
-- print >>sys.stderr, 'error: Launching', args, ':', e
-+ except Exception as e:
-+ print('error: Launching', args, ':', e, file=sys.stderr)
- raise e
- (stdout, stderr) = proc.communicate()
- if proc.returncode and not options.verbose:
- print_command(sys.stderr, args)
-- sys.stderr.write(stdout)
-+ sys.stderr.write(stdout.decode("utf-8"))
- sys.stderr.flush()
- if proc.returncode:
- return proc.returncode
-diff --git a/config/expandlibs_gen.py b/config/expandlibs_gen.py
-index b1de63cd0..dc62bd184 100644
---- a/config/expandlibs_gen.py
-+++ b/config/expandlibs_gen.py
-@@ -5,7 +5,7 @@
- '''Given a list of object files and library names, prints a library
- descriptor to standard output'''
-
--from __future__ import with_statement
-+
- import sys
- import os
- import expandlibs_config as conf
-@@ -38,4 +38,4 @@ if __name__ == '__main__':
-
- ensureParentDir(options.output)
- with open(options.output, 'w') as outfile:
-- print >>outfile, generate(args)
-+ print(generate(args), file=outfile)
-diff --git a/configure.py b/configure.py
-index 771e34e38..bee329d7c 100644
---- a/configure.py
-+++ b/configure.py
-@@ -2,10 +2,11 @@
- # License, v. 2.0. If a copy of the MPL was not distributed with this
- # file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
--from __future__ import print_function, unicode_literals
-+
-
- import codecs
- import itertools
-+import logging
- import os
- import sys
- import textwrap
-@@ -34,7 +35,9 @@ from mozbuild.util import (
- def main(argv):
- config = {}
- sandbox = ConfigureSandbox(config, os.environ, argv)
-+ print('sandbox.run started')
- sandbox.run(os.path.join(os.path.dirname(__file__), 'moz.configure'))
-+ print('sandbox.run finished')
-
- if sandbox._help:
- return 0
-@@ -56,12 +59,21 @@ def config_status(config):
-
- sanitized_config = {}
- sanitized_config['substs'] = {
-- k: sanitized_bools(v) for k, v in config.iteritems()
-+ k: sanitized_bools(v) for k, v in config.items()
- if k not in ('DEFINES', 'non_global_defines', 'TOPSRCDIR', 'TOPOBJDIR',
- 'ALL_CONFIGURE_PATHS')
- }
-+
-+ # Hack around OptionValue entries unknown during compile
-+ for opt in ('BUILD_BACKENDS', 'MOZ_UI_LOCALE', 'RUSTFLAGS' ):
-+ old = sanitized_config['substs'][opt]
-+ new = []
-+ for setting in old:
-+ new.append(setting)
-+ sanitized_config['substs'][opt] = new
-+
- sanitized_config['defines'] = {
-- k: sanitized_bools(v) for k, v in config['DEFINES'].iteritems()
-+ k: sanitized_bools(v) for k, v in config['DEFINES'].items()
- }
- sanitized_config['non_global_defines'] = config['non_global_defines']
- sanitized_config['topsrcdir'] = config['TOPSRCDIR']
-@@ -71,20 +83,17 @@ def config_status(config):
- # Create config.status. Eventually, we'll want to just do the work it does
- # here, when we're able to skip configure tests/use cached results/not rely
- # on autoconf.
-- print("Creating config.status", file=sys.stderr)
-- encoding = 'mbcs' if sys.platform == 'win32' else 'utf-8'
-- with codecs.open('config.status', 'w', encoding) as fh:
-+ logging.getLogger('moz.configure').info('Creating config.status')
-+ with codecs.open('config.status', 'w', 'utf-8') as fh:
- fh.write(textwrap.dedent('''\
- #!%(python)s
-- # coding=%(encoding)s
-- from __future__ import unicode_literals
-- from mozbuild.util import encode
-- encoding = '%(encoding)s'
-- ''') % {'python': config['PYTHON'], 'encoding': encoding})
-+ # coding=utf-8
-+ print("config.status started")
-+ ''') % {'python': config['PYTHON']})
- # A lot of the build backend code is currently expecting byte
- # strings and breaks in subtle ways with unicode strings. (bug 1296508)
-- for k, v in sanitized_config.iteritems():
-- fh.write('%s = encode(%s, encoding)\n' % (k, indented_repr(v)))
-+ for k, v in sanitized_config.items():
-+ fh.write('%s = %s\n' % (k, indented_repr(v)))
- fh.write("__all__ = ['topobjdir', 'topsrcdir', 'defines', "
- "'non_global_defines', 'substs', 'mozconfig']")
-
-@@ -97,6 +106,9 @@ def config_status(config):
- args = dict([(name, globals()[name]) for name in __all__])
- config_status(**args)
- '''))
-+ fh.write(textwrap.dedent('''
-+ print("config.status finished")
-+ '''))
-
- partial_config = PartialConfigEnvironment(config['TOPOBJDIR'])
- partial_config.write_vars(sanitized_config)
-@@ -116,7 +128,7 @@ def config_status(config):
- # executable permissions.
- os.chmod('config.status', 0o755)
- if config.get('MOZ_BUILD_APP') != 'js' or config.get('JS_STANDALONE'):
-- os.environ[b'WRITE_MOZINFO'] = b'1'
-+ os.environ['WRITE_MOZINFO'] = '1'
- from mozbuild.config_status import config_status
-
- # Some values in sanitized_config also have more complex types, such as
-@@ -127,7 +139,7 @@ def config_status(config):
-
- # A lot of the build backend code is currently expecting byte strings
- # and breaks in subtle ways with unicode strings.
-- return config_status(args=[], **encode(sanitized_config, encoding))
-+ return config_status(args=[], **sanitized_config)
- return 0
-
-
-diff --git a/js/src/build/moz.build b/js/src/build/moz.build
-index a7f5fa4ce..856cae32d 100644
---- a/js/src/build/moz.build
-+++ b/js/src/build/moz.build
-@@ -47,22 +47,22 @@ USE_LIBS += [
- 'zlib',
- ]
-
--if CONFIG['OS_ARCH'] not in ('WINNT', 'HP-UX'):
-+if str(CONFIG['OS_ARCH']) not in ('WINNT', 'HP-UX'):
- OS_LIBS += [
- 'm',
- ]
-
--if CONFIG['OS_ARCH'] == 'FreeBSD':
-+if str(CONFIG['OS_ARCH']) == 'FreeBSD':
- OS_LIBS += [
- '-pthread',
- ]
-
--if CONFIG['OS_ARCH'] == 'Linux':
-+if str(CONFIG['OS_ARCH']) == 'Linux':
- OS_LIBS += [
- 'dl',
- ]
-
--if CONFIG['OS_ARCH'] == 'SunOS':
-+if str(CONFIG['OS_ARCH']) == 'SunOS':
- OS_LIBS += [
- 'posix4',
- 'dl',
-diff --git a/js/src/builtin/embedjs.py b/js/src/builtin/embedjs.py
-index ba25e71c1..d4f2de122 100644
---- a/js/src/builtin/embedjs.py
-+++ b/js/src/builtin/embedjs.py
-@@ -36,7 +36,7 @@
- #
- # It uses the C preprocessor to process its inputs.
-
--from __future__ import with_statement
-+
- import re, sys, os, subprocess
- import shlex
- import which
-@@ -52,8 +52,8 @@ def ToCAsciiArray(lines):
-
- def ToCArray(lines):
- result = []
-- for chr in lines:
-- result.append(str(ord(chr)))
-+ for char in lines:
-+ result.append("0x%0.2X" % char)
- return ", ".join(result)
-
- HEADER_TEMPLATE = """\
-@@ -87,7 +87,7 @@ def embed(cxx, preprocessorOption, cppflags, msgs, sources, c_out, js_out, names
-
- js_out.write(processed)
- import zlib
-- compressed = zlib.compress(processed)
-+ compressed = zlib.compress(processed.encode('utf-8'))
- data = ToCArray(compressed)
- c_out.write(HEADER_TEMPLATE % {
- 'sources_type': 'unsigned char',
-@@ -107,7 +107,7 @@ def preprocess(cxx, preprocessorOption, source, args = []):
- tmpOut = 'self-hosting-preprocessed.pp';
- outputArg = shlex.split(preprocessorOption + tmpOut)
-
-- with open(tmpIn, 'wb') as input:
-+ with open(tmpIn, 'w') as input:
- input.write(source)
- print(' '.join(cxx + outputArg + args + [tmpIn]))
- result = subprocess.Popen(cxx + outputArg + args + [tmpIn]).wait()
-diff --git a/js/src/configure b/js/src/configure
-index 3b3a39af3..8f5ea41d0 100755
---- a/js/src/configure
-+++ b/js/src/configure
-@@ -24,4 +24,4 @@ export OLD_CONFIGURE="$SRCDIR"/old-configure
-
- set -- "$@" --enable-project=js
-
--which python2.7 > /dev/null && exec python2.7 "$TOPSRCDIR/configure.py" "$@" || exec python "$TOPSRCDIR/configure.py" "$@"
-+which python3 > /dev/null && exec python3 "$TOPSRCDIR/configure.py" "$@" || exec python "$TOPSRCDIR/configure.py" "$@"
-diff --git a/js/src/frontend/GenerateReservedWords.py b/js/src/frontend/GenerateReservedWords.py
-index 3aa2307b9..381c8e2b4 100644
---- a/js/src/frontend/GenerateReservedWords.py
-+++ b/js/src/frontend/GenerateReservedWords.py
-@@ -80,14 +80,14 @@ def split_list_per_column(reserved_word_list, column):
- per_column = column_dict.setdefault(word[column], [])
- per_column.append(item)
-
-- return sorted(column_dict.items(), key=lambda (char, word): ord(char))
-+ return sorted(list(column_dict.items()), key=lambda char_word: ord(char_word[0]))
-
- def generate_letter_switch(opt, unprocessed_columns, reserved_word_list,
- columns=None):
- assert(len(reserved_word_list) != 0);
-
- if not columns:
-- columns = range(0, unprocessed_columns)
-+ columns = list(range(0, unprocessed_columns))
-
- if len(reserved_word_list) == 1:
- index, word = reserved_word_list[0]
-@@ -161,7 +161,7 @@ def split_list_per_length(reserved_word_list):
- per_length = length_dict.setdefault(len(word), [])
- per_length.append(item)
-
-- return sorted(length_dict.items(), key=lambda (length, word): length)
-+ return sorted(list(length_dict.items()), key=lambda length_word: length_word[0])
-
- def generate_switch(opt, reserved_word_list):
- assert(len(reserved_word_list) != 0);
-diff --git a/js/src/gc/GenerateStatsPhases.py b/js/src/gc/GenerateStatsPhases.py
-index 2daf83555..e39a26a4b 100644
---- a/js/src/gc/GenerateStatsPhases.py
-+++ b/js/src/gc/GenerateStatsPhases.py
-@@ -267,7 +267,7 @@ def generateHeader(out):
- #
- # Generate PhaseKind enum.
- #
-- phaseKindNames = map(lambda phaseKind: phaseKind.name, AllPhaseKinds)
-+ phaseKindNames = [phaseKind.name for phaseKind in AllPhaseKinds]
- extraPhaseKinds = [
- "NONE = LIMIT",
- "EXPLICIT_SUSPENSION = LIMIT",
-@@ -279,7 +279,7 @@ def generateHeader(out):
- #
- # Generate Phase enum.
- #
-- phaseNames = map(lambda phase: phase.name, AllPhases)
-+ phaseNames = [phase.name for phase in AllPhases]
- extraPhases = [
- "NONE = LIMIT",
- "EXPLICIT_SUSPENSION = LIMIT",
-diff --git a/js/src/old-configure.in b/js/src/old-configure.in
-index 11c3d5a2e..389265404 100644
---- a/js/src/old-configure.in
-+++ b/js/src/old-configure.in
-@@ -1884,3 +1884,5 @@ if test "$JS_STANDALONE"; then
- fi
-
- rm -fr confdefs* $ac_clean_files
-+echo confdefs* $ac_clean_files removed
-+echo "old-configure done"
-diff --git a/memory/build/moz.build b/memory/build/moz.build
-index e2c715271..f09ce7935 100644
---- a/memory/build/moz.build
-+++ b/memory/build/moz.build
-@@ -30,7 +30,7 @@ else:
- 'fallback.cpp',
- ]
-
--if CONFIG['OS_TARGET'] == 'Darwin' and (CONFIG['MOZ_REPLACE_MALLOC'] or
-+if str(CONFIG['OS_TARGET']) == 'Darwin' and (CONFIG['MOZ_REPLACE_MALLOC'] or
- CONFIG['MOZ_MEMORY']):
- SOURCES += [
- 'zone.c',
-@@ -38,15 +38,15 @@ if CONFIG['OS_TARGET'] == 'Darwin' and (CONFIG['MOZ_REPLACE_MALLOC'] or
-
- Library('memory')
-
--if CONFIG['OS_TARGET'] == 'Android' and CONFIG['CC_TYPE'] == 'clang':
-+if str(CONFIG['OS_TARGET']) == 'Android' and str(CONFIG['CC_TYPE']) == 'clang':
- CXXFLAGS += [
- '-Wno-tautological-pointer-compare',
- ]
-
--if CONFIG['MOZ_BUILD_APP'] != 'memory':
-+if str(CONFIG['MOZ_BUILD_APP']) != 'memory':
- FINAL_LIBRARY = 'mozglue'
-
--if CONFIG['CC_TYPE'] in ('msvc', 'clang-cl'):
-+if str(CONFIG['CC_TYPE']) in ('msvc', 'clang-cl'):
- CXXFLAGS += ['-wd4273'] # inconsistent dll linkage (bug 558163)
-
- if CONFIG['MOZ_REPLACE_MALLOC_STATIC']:
-diff --git a/mozglue/build/moz.build b/mozglue/build/moz.build
-index 53758485a..5e9308802 100644
---- a/mozglue/build/moz.build
-+++ b/mozglue/build/moz.build
-@@ -9,12 +9,12 @@
- # If this is ever changed, update MOZ_SHARED_MOZGLUE in browser/installer/Makefile.in
- if CONFIG['JS_STANDALONE'] and not CONFIG['MOZ_MEMORY']:
- Library('mozglue')
--elif CONFIG['OS_TARGET'] in ('WINNT', 'Darwin', 'Android'):
-+elif str(CONFIG['OS_TARGET']) in ('WINNT', 'Darwin', 'Android'):
- SharedLibrary('mozglue')
- else:
- Library('mozglue')
-
--if CONFIG['OS_TARGET'] == 'Android':
-+if str(CONFIG['OS_TARGET']) == 'Android':
- SOURCES += [
- 'BionicGlue.cpp',
- ]
-@@ -24,14 +24,14 @@ if CONFIG['MOZ_ASAN']:
- 'AsanOptions.cpp',
- ]
-
--if CONFIG['OS_TARGET'] == 'WINNT':
-+if str(CONFIG['OS_TARGET']) == 'WINNT':
- DEFFILE = 'mozglue.def'
- # We'll break the DLL blocklist if we immediately load user32.dll
- DELAYLOAD_DLLS += [
- 'user32.dll',
- ]
-
-- if CONFIG['CC_TYPE'] == "msvc":
-+ if str(CONFIG['CC_TYPE']) == "msvc":
- CFLAGS += ['-guard:cf']
- CXXFLAGS += ['-guard:cf']
- LDFLAGS += ['-guard:cf']
-@@ -48,12 +48,12 @@ if CONFIG['MOZ_WIDGET_TOOLKIT']:
- 'dummy.cpp',
- ]
-
-- if CONFIG['OS_TARGET'] == 'WINNT':
-+ if str(CONFIG['OS_TARGET']) == 'WINNT':
- LOCAL_INCLUDES += [
- '/memory/build',
- ]
-
-- if CONFIG['CC_TYPE'] == "msvc":
-+ if str(CONFIG['CC_TYPE']) == "msvc":
- SOURCES += ['WindowsCFGStatus.cpp']
- SOURCES += [
- 'Authenticode.cpp',
-@@ -85,17 +85,17 @@ if CONFIG['MOZ_WIDGET_TOOLKIT']:
- 'WindowsDllBlocklist.h',
- ]
-
-- if CONFIG['CPU_ARCH'].startswith('x86'):
-+ if str(CONFIG['CPU_ARCH']).startswith('x86'):
- SOURCES += [
- 'SSE.cpp',
- ]
-
-- if CONFIG['CPU_ARCH'] == 'arm':
-+ if str(CONFIG['CPU_ARCH']) == 'arm':
- SOURCES += [
- 'arm.cpp',
- ]
-
-- if CONFIG['CPU_ARCH'].startswith('mips'):
-+ if str(CONFIG['CPU_ARCH']).startswith('mips'):
- SOURCES += [
- 'mips.cpp',
- ]
-@@ -114,7 +114,7 @@ LIBRARY_DEFINES['MOZ_HAS_MOZGLUE'] = True
-
- LDFLAGS += CONFIG['MOZ_GLUE_WRAP_LDFLAGS']
-
--if CONFIG['OS_TARGET'] == 'Darwin':
-+if str(CONFIG['OS_TARGET']) == 'Darwin':
- # On OSX 10.10.3, a dead lock happens in some cases involving dynamic
- # symbol resolution for symbols that jemalloc itself uses. While it
- # might be possible to find a way to avoid all such symbol resolutions,
-@@ -124,7 +124,7 @@ if CONFIG['OS_TARGET'] == 'Darwin':
- # for TLS.
- LDFLAGS += ['-Wl,-bind_at_load']
-
--if CONFIG['MOZ_LINKER'] and CONFIG['TARGET_CPU'] == 'arm':
-+if CONFIG['MOZ_LINKER'] and str(CONFIG['TARGET_CPU']) == 'arm':
- LDFLAGS += ['-Wl,-version-script,%s/arm-eabi-filter' % SRCDIR]
-
- DIST_INSTALL = True
-diff --git a/python/mozbuild/mozbuild/action/check_binary.py b/python/mozbuild/mozbuild/action/check_binary.py
-index 5665ef053..b696f73d6 100644
---- a/python/mozbuild/mozbuild/action/check_binary.py
-+++ b/python/mozbuild/mozbuild/action/check_binary.py
-@@ -104,6 +104,8 @@ def iter_readelf_symbols(target, binary):
-
- def iter_readelf_dynamic(target, binary):
- for line in get_output(target['readelf'], '-d', binary):
-+ if isinstance(line, bytes):
-+ line=line.decode('utf-8')
- data = line.split(None, 2)
- if data and len(data) == 3 and data[0].startswith('0x'):
- yield data[1].rstrip(')').lstrip('('), data[2]
-diff --git a/python/mozbuild/mozbuild/action/process_define_files.py b/python/mozbuild/mozbuild/action/process_define_files.py
-index 563fbb8fa..c3df2869b 100644
---- a/python/mozbuild/mozbuild/action/process_define_files.py
-+++ b/python/mozbuild/mozbuild/action/process_define_files.py
-@@ -2,7 +2,7 @@
- # License, v. 2.0. If a copy of the MPL was not distributed with this
- # file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
--from __future__ import absolute_import, print_function, unicode_literals
-+
-
- import argparse
- import os
-@@ -53,7 +53,7 @@ def process_define_file(output, input):
- 'CONFIGURE_DEFINE_FILE')
- defines = '\n'.join(sorted(
- '#define %s %s' % (name, val)
-- for name, val in config.defines['ALLDEFINES'].iteritems()))
-+ for name, val in config.defines['ALLDEFINES'].items()))
- l = l[:m.start('cmd') - 1] \
- + defines + l[m.end('name'):]
- elif cmd == 'define':
-diff --git a/python/mozbuild/mozbuild/backend/base.py b/python/mozbuild/mozbuild/backend/base.py
-index a8d5c94e0..7cda63475 100644
---- a/python/mozbuild/mozbuild/backend/base.py
-+++ b/python/mozbuild/mozbuild/backend/base.py
-@@ -2,7 +2,7 @@
- # License, v. 2.0. If a copy of the MPL was not distributed with this
- # file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
--from __future__ import absolute_import, unicode_literals
-+
-
- from abc import (
- ABCMeta,
-@@ -31,7 +31,7 @@ from .configenvironment import ConfigEnvironment
- from mozbuild.base import ExecutionSummary
-
-
--class BuildBackend(LoggingMixin):
-+class BuildBackend(LoggingMixin, metaclass=ABCMeta):
- """Abstract base class for build backends.
-
- A build backend is merely a consumer of the build configuration (the output
-@@ -39,8 +39,6 @@ class BuildBackend(LoggingMixin):
- is the discretion of the specific implementation.
- """
-
-- __metaclass__ = ABCMeta
--
- def __init__(self, environment):
- assert isinstance(environment, (ConfigEnvironment, EmptyConfig))
- self.populate_logger()
-@@ -311,7 +309,7 @@ class BuildBackend(LoggingMixin):
- srcdir = mozpath.dirname(obj.input_path)
- pp.context.update({
- k: ' '.join(v) if isinstance(v, list) else v
-- for k, v in obj.config.substs.iteritems()
-+ for k, v in obj.config.substs.items()
- })
- pp.context.update(
- top_srcdir=obj.topsrcdir,
-diff --git a/python/mozbuild/mozbuild/backend/common.py b/python/mozbuild/mozbuild/backend/common.py
-index d00cbbcaf..f747df446 100644
---- a/python/mozbuild/mozbuild/backend/common.py
-+++ b/python/mozbuild/mozbuild/backend/common.py
-@@ -2,7 +2,7 @@
- # License, v. 2.0. If a copy of the MPL was not distributed with this
- # file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
--from __future__ import absolute_import, unicode_literals
-+
-
- import json
- import os
-@@ -174,7 +174,7 @@ class CommonBackend(BuildBackend):
- if len(self._idl_manager.idls):
- self._write_rust_xpidl_summary(self._idl_manager)
- self._handle_idl_manager(self._idl_manager)
-- self._handle_generated_sources(mozpath.join(self.environment.topobjdir, 'dist/include/%s.h' % idl['root']) for idl in self._idl_manager.idls.values())
-+ self._handle_generated_sources(mozpath.join(self.environment.topobjdir, 'dist/include/%s.h' % idl['root']) for idl in list(self._idl_manager.idls.values()))
-
-
- for config in self._configs:
-@@ -372,14 +372,14 @@ class CommonBackend(BuildBackend):
-
- with self._write_file(mozpath.join(topobjdir, 'dist', 'xpcrs', 'rt', 'all.rs')) as fh:
- fh.write("// THIS FILE IS GENERATED - DO NOT EDIT\n\n")
-- for idl in manager.idls.values():
-+ for idl in list(manager.idls.values()):
- fh.write(include_tmpl % ("rt", idl['root']))
- fh.write(";\n")
-
- with self._write_file(mozpath.join(topobjdir, 'dist', 'xpcrs', 'bt', 'all.rs')) as fh:
- fh.write("// THIS FILE IS GENERATED - DO NOT EDIT\n\n")
- fh.write("&[\n")
-- for idl in manager.idls.values():
-+ for idl in list(manager.idls.values()):
- fh.write(include_tmpl % ("bt", idl['root']))
- fh.write(",\n")
- fh.write("]\n")
-diff --git a/python/mozbuild/mozbuild/backend/configenvironment.py b/python/mozbuild/mozbuild/backend/configenvironment.py
-index 3676a7d18..f0896cea4 100644
---- a/python/mozbuild/mozbuild/backend/configenvironment.py
-+++ b/python/mozbuild/mozbuild/backend/configenvironment.py
-@@ -2,14 +2,14 @@
- # License, v. 2.0. If a copy of the MPL was not distributed with this
- # file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
--from __future__ import absolute_import
-+
-
- import os
- import sys
- import json
-
- from collections import Iterable, OrderedDict
--from types import StringTypes, ModuleType
-+from types import ModuleType
-
- import mozpack.path as mozpath
-
-@@ -22,7 +22,7 @@ from mozbuild.shellutil import quote as shell_quote
-
-
- if sys.version_info.major == 2:
-- text_type = unicode
-+ text_type = str
- else:
- text_type = str
-
-@@ -151,7 +151,7 @@ class ConfigEnvironment(object):
- shell_quote(self.defines[name]).replace('$', '$$'))
- for name in sorted(global_defines)])
- def serialize(name, obj):
-- if isinstance(obj, StringTypes):
-+ if isinstance(obj, str):
- return obj
- if isinstance(obj, Iterable):
- return ' '.join(obj)
-@@ -185,8 +185,8 @@ class ConfigEnvironment(object):
- except UnicodeDecodeError:
- return v.decode('utf-8', 'replace')
-
-- for k, v in self.substs.items():
-- if not isinstance(v, StringTypes):
-+ for k, v in list(self.substs.items()):
-+ if not isinstance(v, str):
- if isinstance(v, Iterable):
- type(v)(decode(i) for i in v)
- elif not isinstance(v, text_type):
-@@ -255,7 +255,7 @@ class PartialConfigDict(object):
- existing_files = self._load_config_track()
-
- new_files = set()
-- for k, v in values.iteritems():
-+ for k, v in values.items():
- new_files.add(self._write_file(k, v))
-
- for filename in existing_files - new_files:
-diff --git a/python/mozbuild/mozbuild/backend/fastermake.py b/python/mozbuild/mozbuild/backend/fastermake.py
-index b029aa10f..b66ade64f 100644
---- a/python/mozbuild/mozbuild/backend/fastermake.py
-+++ b/python/mozbuild/mozbuild/backend/fastermake.py
-@@ -2,7 +2,7 @@
- # License, v. 2.0. If a copy of the MPL was not distributed with this
- # file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
--from __future__ import absolute_import, unicode_literals, print_function
-+
-
- from mozbuild.backend.base import PartialBackend
- from mozbuild.backend.common import CommonBackend
-@@ -140,7 +140,7 @@ class FasterMakeBackend(CommonBackend, PartialBackend):
- # Add information for chrome manifest generation
- manifest_targets = []
-
-- for target, entries in self._manifest_entries.iteritems():
-+ for target, entries in self._manifest_entries.items():
- manifest_targets.append(target)
- install_target = mozpath.basedir(target, install_manifests_bases)
- self._install_manifests[install_target].add_content(
-@@ -152,13 +152,13 @@ class FasterMakeBackend(CommonBackend, PartialBackend):
- % ' '.join(self._install_manifests.keys()))
-
- # Add dependencies we infered:
-- for target, deps in self._dependencies.iteritems():
-+ for target, deps in self._dependencies.items():
- mk.create_rule([target]).add_dependencies(
- '$(TOPOBJDIR)/%s' % d for d in deps)
-
- mk.add_statement('include $(TOPSRCDIR)/config/faster/rules.mk')
-
-- for base, install_manifest in self._install_manifests.iteritems():
-+ for base, install_manifest in self._install_manifests.items():
- with self._write_file(
- mozpath.join(self.environment.topobjdir, 'faster',
- 'install_%s' % base.replace('/', '_'))) as fh:
-@@ -167,7 +167,7 @@ class FasterMakeBackend(CommonBackend, PartialBackend):
- # For artifact builds only, write a single unified manifest for consumption by |mach watch|.
- if self.environment.is_artifact_build:
- unified_manifest = InstallManifest()
-- for base, install_manifest in self._install_manifests.iteritems():
-+ for base, install_manifest in self._install_manifests.items():
- # Expect 'dist/bin/**', which includes 'dist/bin' with no trailing slash.
- assert base.startswith('dist/bin')
- base = base[len('dist/bin'):]
-diff --git a/python/mozbuild/mozbuild/backend/recursivemake.py b/python/mozbuild/mozbuild/backend/recursivemake.py
-index dd9020d62..aa89cc297 100644
---- a/python/mozbuild/mozbuild/backend/recursivemake.py
-+++ b/python/mozbuild/mozbuild/backend/recursivemake.py
-@@ -2,7 +2,7 @@
- # License, v. 2.0. If a copy of the MPL was not distributed with this
- # file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
--from __future__ import absolute_import, unicode_literals
-+
-
- import logging
- import os
-@@ -12,7 +12,7 @@ from collections import (
- defaultdict,
- namedtuple,
- )
--from StringIO import StringIO
-+from io import StringIO
- from itertools import chain
-
- from mozpack.manifests import (
-@@ -80,75 +80,76 @@ from ..util import (
- )
- from ..makeutil import Makefile
- from mozbuild.shellutil import quote as shell_quote
-+from functools import reduce
-
- MOZBUILD_VARIABLES = [
-- b'ASFLAGS',
-- b'CMSRCS',
-- b'CMMSRCS',
-- b'CPP_UNIT_TESTS',
-- b'DIRS',
-- b'DIST_INSTALL',
-- b'EXTRA_DSO_LDOPTS',
-- b'EXTRA_JS_MODULES',
-- b'EXTRA_PP_COMPONENTS',
-- b'EXTRA_PP_JS_MODULES',
-- b'FORCE_SHARED_LIB',
-- b'FORCE_STATIC_LIB',
-- b'FINAL_LIBRARY',
-- b'HOST_CFLAGS',
-- b'HOST_CSRCS',
-- b'HOST_CMMSRCS',
-- b'HOST_CXXFLAGS',
-- b'HOST_EXTRA_LIBS',
-- b'HOST_LIBRARY_NAME',
-- b'HOST_PROGRAM',
-- b'HOST_SIMPLE_PROGRAMS',
-- b'JAR_MANIFEST',
-- b'JAVA_JAR_TARGETS',
-- b'LIBRARY_NAME',
-- b'LIBS',
-- b'MAKE_FRAMEWORK',
-- b'MODULE',
-- b'NO_DIST_INSTALL',
-- b'NO_EXPAND_LIBS',
-- b'NO_INTERFACES_MANIFEST',
-- b'NO_JS_MANIFEST',
-- b'OS_LIBS',
-- b'PARALLEL_DIRS',
-- b'PREF_JS_EXPORTS',
-- b'PROGRAM',
-- b'RESOURCE_FILES',
-- b'SHARED_LIBRARY_LIBS',
-- b'SHARED_LIBRARY_NAME',
-- b'SIMPLE_PROGRAMS',
-- b'SONAME',
-- b'STATIC_LIBRARY_NAME',
-- b'TEST_DIRS',
-- b'TOOL_DIRS',
-+ 'ASFLAGS',
-+ 'CMSRCS',
-+ 'CMMSRCS',
-+ 'CPP_UNIT_TESTS',
-+ 'DIRS',
-+ 'DIST_INSTALL',
-+ 'EXTRA_DSO_LDOPTS',
-+ 'EXTRA_JS_MODULES',
-+ 'EXTRA_PP_COMPONENTS',
-+ 'EXTRA_PP_JS_MODULES',
-+ 'FORCE_SHARED_LIB',
-+ 'FORCE_STATIC_LIB',
-+ 'FINAL_LIBRARY',
-+ 'HOST_CFLAGS',
-+ 'HOST_CSRCS',
-+ 'HOST_CMMSRCS',
-+ 'HOST_CXXFLAGS',
-+ 'HOST_EXTRA_LIBS',
-+ 'HOST_LIBRARY_NAME',
-+ 'HOST_PROGRAM',
-+ 'HOST_SIMPLE_PROGRAMS',
-+ 'JAR_MANIFEST',
-+ 'JAVA_JAR_TARGETS',
-+ 'LIBRARY_NAME',
-+ 'LIBS',
-+ 'MAKE_FRAMEWORK',
-+ 'MODULE',
-+ 'NO_DIST_INSTALL',
-+ 'NO_EXPAND_LIBS',
-+ 'NO_INTERFACES_MANIFEST',
-+ 'NO_JS_MANIFEST',
-+ 'OS_LIBS',
-+ 'PARALLEL_DIRS',
-+ 'PREF_JS_EXPORTS',
-+ 'PROGRAM',
-+ 'RESOURCE_FILES',
-+ 'SHARED_LIBRARY_LIBS',
-+ 'SHARED_LIBRARY_NAME',
-+ 'SIMPLE_PROGRAMS',
-+ 'SONAME',
-+ 'STATIC_LIBRARY_NAME',
-+ 'TEST_DIRS',
-+ 'TOOL_DIRS',
- # XXX config/Makefile.in specifies this in a make invocation
- #'USE_EXTENSION_MANIFEST',
-- b'XPCSHELL_TESTS',
-- b'XPIDL_MODULE',
-+ 'XPCSHELL_TESTS',
-+ 'XPIDL_MODULE',
- ]
-
- DEPRECATED_VARIABLES = [
-- b'EXPORT_LIBRARY',
-- b'EXTRA_LIBS',
-- b'HOST_LIBS',
-- b'LIBXUL_LIBRARY',
-- b'MOCHITEST_A11Y_FILES',
-- b'MOCHITEST_BROWSER_FILES',
-- b'MOCHITEST_BROWSER_FILES_PARTS',
-- b'MOCHITEST_CHROME_FILES',
-- b'MOCHITEST_FILES',
-- b'MOCHITEST_FILES_PARTS',
-- b'MOCHITEST_METRO_FILES',
-- b'MOCHITEST_ROBOCOP_FILES',
-- b'MODULE_OPTIMIZE_FLAGS',
-- b'MOZ_CHROME_FILE_FORMAT',
-- b'SHORT_LIBNAME',
-- b'TESTING_JS_MODULES',
-- b'TESTING_JS_MODULE_DIR',
-+ 'EXPORT_LIBRARY',
-+ 'EXTRA_LIBS',
-+ 'HOST_LIBS',
-+ 'LIBXUL_LIBRARY',
-+ 'MOCHITEST_A11Y_FILES',
-+ 'MOCHITEST_BROWSER_FILES',
-+ 'MOCHITEST_BROWSER_FILES_PARTS',
-+ 'MOCHITEST_CHROME_FILES',
-+ 'MOCHITEST_FILES',
-+ 'MOCHITEST_FILES_PARTS',
-+ 'MOCHITEST_METRO_FILES',
-+ 'MOCHITEST_ROBOCOP_FILES',
-+ 'MODULE_OPTIMIZE_FLAGS',
-+ 'MOZ_CHROME_FILE_FORMAT',
-+ 'SHORT_LIBNAME',
-+ 'TESTING_JS_MODULES',
-+ 'TESTING_JS_MODULE_DIR',
- ]
-
- MOZBUILD_VARIABLES_MESSAGE = 'It should only be defined in moz.build files.'
-@@ -207,7 +208,7 @@ class BackendMakeFile(object):
- self.fh.write(buf)
-
- def write_once(self, buf):
-- if isinstance(buf, unicode):
-+ if isinstance(buf, str):
- buf = buf.encode('utf-8')
- if b'\n' + buf not in self.fh.getvalue():
- self.write(buf)
-@@ -280,7 +281,7 @@ class RecursiveMakeTraversal(object):
- Helper function to call a filter from compute_dependencies and
- traverse.
- """
-- return filter(current, self.get_subdirs(current))
-+ return list(filter(current, self.get_subdirs(current)))
-
- def compute_dependencies(self, filter=None):
- """
-@@ -710,7 +711,7 @@ class RecursiveMakeBackend(CommonBackend):
- convenience variables, and the other dependency definitions for a
- hopefully proper directory traversal.
- """
-- for tier, no_skip in self._no_skip.items():
-+ for tier, no_skip in list(self._no_skip.items()):
- self.log(logging.DEBUG, 'fill_root_mk', {
- 'number': len(no_skip), 'tier': tier
- }, 'Using {number} directories during {tier}')
-@@ -757,7 +758,7 @@ class RecursiveMakeBackend(CommonBackend):
- for tier, filter in filters:
- main, all_deps = \
- self._traversal.compute_dependencies(filter)
-- for dir, deps in all_deps.items():
-+ for dir, deps in list(all_deps.items()):
- if deps is not None or (dir in self._idl_dirs \
- and tier == 'export'):
- rule = root_deps_mk.create_rule(['%s/%s' % (dir, tier)])
-@@ -770,7 +771,7 @@ class RecursiveMakeBackend(CommonBackend):
- rule.add_dependencies('%s/%s' % (d, tier) for d in main)
-
- all_compile_deps = reduce(lambda x,y: x|y,
-- self._compile_graph.values()) if self._compile_graph else set()
-+ list(self._compile_graph.values())) if self._compile_graph else set()
- # Include the following as dependencies of the top recursion target for
- # compilation:
- # - nodes that are not dependended upon by anything. Typically, this
-@@ -783,7 +784,7 @@ class RecursiveMakeBackend(CommonBackend):
- # as direct dependencies of the top recursion target, to somehow
- # prioritize them.
- # 1. See bug 1262241 comment 5.
-- compile_roots = [t for t, deps in self._compile_graph.iteritems()
-+ compile_roots = [t for t, deps in list(self._compile_graph.items())
- if not deps or t not in all_compile_deps]
-
- rule = root_deps_mk.create_rule(['recurse_compile'])
-@@ -845,14 +846,14 @@ class RecursiveMakeBackend(CommonBackend):
- rule.add_dependencies(['$(CURDIR)/%: %'])
-
- def _check_blacklisted_variables(self, makefile_in, makefile_content):
-- if b'EXTERNALLY_MANAGED_MAKE_FILE' in makefile_content:
-+ if 'EXTERNALLY_MANAGED_MAKE_FILE' in makefile_content:
- # Bypass the variable restrictions for externally managed makefiles.
- return
-
- for l in makefile_content.splitlines():
- l = l.strip()
- # Don't check comments
-- if l.startswith(b'#'):
-+ if l.startswith('#'):
- continue
- for x in chain(MOZBUILD_VARIABLES, DEPRECATED_VARIABLES):
- if x not in l:
-@@ -909,11 +910,11 @@ class RecursiveMakeBackend(CommonBackend):
- # Directories with a Makefile containing a tools target, or
- # XPI_PKGNAME or INSTALL_EXTENSION_ID can't be skipped and
- # must run during the 'tools' tier.
-- for t in (b'XPI_PKGNAME', b'INSTALL_EXTENSION_ID',
-- b'tools'):
-+ for t in ('XPI_PKGNAME', 'INSTALL_EXTENSION_ID',
-+ 'tools'):
- if t not in content:
- continue
-- if t == b'tools' and not re.search('(?:^|\s)tools.*::', content, re.M):
-+ if t == 'tools' and not re.search('(?:^|\s)tools.*::', content, re.M):
- continue
- if objdir == self.environment.topobjdir:
- continue
-@@ -933,7 +934,7 @@ class RecursiveMakeBackend(CommonBackend):
- self._fill_root_mk()
-
- # Make the master test manifest files.
-- for flavor, t in self._test_manifests.items():
-+ for flavor, t in list(self._test_manifests.items()):
- install_prefix, manifests = t
- manifest_stem = mozpath.join(install_prefix, '%s.ini' % flavor)
- self._write_master_test_manifest(mozpath.join(
-@@ -1039,7 +1040,7 @@ class RecursiveMakeBackend(CommonBackend):
- for p in ('Makefile', 'backend.mk', '.deps/.mkdir.done'):
- build_files.add_optional_exists(p)
-
-- for idl in manager.idls.values():
-+ for idl in list(manager.idls.values()):
- self._install_manifests['dist_idl'].add_link(idl['source'],
- idl['basename'])
- self._install_manifests['dist_include'].add_optional_exists('%s.h'
-@@ -1086,7 +1087,7 @@ class RecursiveMakeBackend(CommonBackend):
-
- interfaces_manifests = []
- dist_dir = mozpath.join(self.environment.topobjdir, 'dist')
-- for manifest, entries in manager.interface_manifests.items():
-+ for manifest, entries in list(manager.interface_manifests.items()):
- interfaces_manifests.append(mozpath.join('$(DEPTH)', manifest))
- for xpt in sorted(entries):
- registered_xpt_files.add(mozpath.join(
-@@ -1194,7 +1195,7 @@ class RecursiveMakeBackend(CommonBackend):
- # Don't allow files to be defined multiple times unless it is allowed.
- # We currently allow duplicates for non-test files or test files if
- # the manifest is listed as a duplicate.
-- for source, (dest, is_test) in obj.installs.items():
-+ for source, (dest, is_test) in list(obj.installs.items()):
- try:
- self._install_manifests['_test_files'].add_link(source, dest)
- except ValueError:
-@@ -1558,7 +1559,7 @@ class RecursiveMakeBackend(CommonBackend):
- man_dir = mozpath.join(self.environment.topobjdir, '_build_manifests',
- dest)
-
-- for k, manifest in manifests.items():
-+ for k, manifest in list(manifests.items()):
- with self._write_file(mozpath.join(man_dir, k)) as fh:
- manifest.write(fileobj=fh)
-
-@@ -1593,20 +1594,20 @@ class RecursiveMakeBackend(CommonBackend):
- pp.context.update(extra)
- if not pp.context.get('autoconfmk', ''):
- pp.context['autoconfmk'] = 'autoconf.mk'
-- pp.handleLine(b'# THIS FILE WAS AUTOMATICALLY GENERATED. DO NOT MODIFY BY HAND.\n');
-- pp.handleLine(b'DEPTH := @DEPTH@\n')
-- pp.handleLine(b'topobjdir := @topobjdir@\n')
-- pp.handleLine(b'topsrcdir := @top_srcdir@\n')
-- pp.handleLine(b'srcdir := @srcdir@\n')
-- pp.handleLine(b'VPATH := @srcdir@\n')
-- pp.handleLine(b'relativesrcdir := @relativesrcdir@\n')
-- pp.handleLine(b'include $(DEPTH)/config/@autoconfmk@\n')
-+ pp.handleLine('# THIS FILE WAS AUTOMATICALLY GENERATED. DO NOT MODIFY BY HAND.\n');
-+ pp.handleLine('DEPTH := @DEPTH@\n')
-+ pp.handleLine('topobjdir := @topobjdir@\n')
-+ pp.handleLine('topsrcdir := @top_srcdir@\n')
-+ pp.handleLine('srcdir := @srcdir@\n')
-+ pp.handleLine('VPATH := @srcdir@\n')
-+ pp.handleLine('relativesrcdir := @relativesrcdir@\n')
-+ pp.handleLine('include $(DEPTH)/config/@autoconfmk@\n')
- if not stub:
- pp.do_include(obj.input_path)
- # Empty line to avoid failures when last line in Makefile.in ends
- # with a backslash.
-- pp.handleLine(b'\n')
-- pp.handleLine(b'include $(topsrcdir)/config/recurse.mk\n')
-+ pp.handleLine('\n')
-+ pp.handleLine('include $(topsrcdir)/config/recurse.mk\n')
- if not stub:
- # Adding the Makefile.in here has the desired side-effect
- # that if the Makefile.in disappears, this will force
-diff --git a/python/mozbuild/mozbuild/config_status.py b/python/mozbuild/mozbuild/config_status.py
-index d46f1332d..a9a27a699 100644
---- a/python/mozbuild/mozbuild/config_status.py
-+++ b/python/mozbuild/mozbuild/config_status.py
-@@ -77,6 +77,7 @@ def config_status(topobjdir='.', topsrcdir='.', defines=None,
- See build/autoconf/config.status.m4.
- '''
-
-+ print("config_status started")
- if 'CONFIG_FILES' in os.environ:
- raise Exception('Using the CONFIG_FILES environment variable is not '
- 'supported.')
-@@ -119,7 +120,7 @@ def config_status(topobjdir='.', topsrcdir='.', defines=None,
- if 'WRITE_MOZINFO' in os.environ:
- write_mozinfo(os.path.join(topobjdir, 'mozinfo.json'), env, os.environ)
-
-- cpu_start = time.clock()
-+ cpu_start = time.perf_counter()
- time_start = time.time()
-
- # Make appropriate backend instances, defaulting to RecursiveMakeBackend,
-@@ -155,7 +156,7 @@ def config_status(topobjdir='.', topsrcdir='.', defines=None,
- summary = obj.gyp_summary()
- print(summary, file=sys.stderr)
-
-- cpu_time = time.clock() - cpu_start
-+ cpu_time = time.perf_counter() - cpu_start
- wall_time = time.time() - time_start
- efficiency = cpu_time / wall_time if wall_time else 100
- untracked = wall_time - execution_time
-@@ -179,3 +180,5 @@ def config_status(topobjdir='.', topsrcdir='.', defines=None,
- # Advertise Android Studio if it is appropriate.
- if MachCommandConditions.is_android(env):
- print(ANDROID_IDE_ADVERTISEMENT)
-+
-+ print("config_status finished")
-diff --git a/python/mozbuild/mozbuild/configure/__init__.py b/python/mozbuild/mozbuild/configure/__init__.py
-index d03615707..13d623d4f 100644
---- a/python/mozbuild/mozbuild/configure/__init__.py
-+++ b/python/mozbuild/mozbuild/configure/__init__.py
-@@ -2,9 +2,9 @@
- # License, v. 2.0. If a copy of the MPL was not distributed with this
- # file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
--from __future__ import absolute_import, print_function, unicode_literals
-
--import __builtin__
-+
-+import builtins
- import inspect
- import logging
- import os
-@@ -38,6 +38,8 @@ from mozbuild.util import (
-
- import mozpack.path as mozpath
-
-+import traceback
-+
-
- class ConfigureError(Exception):
- pass
-@@ -69,7 +71,7 @@ class SandboxDependsFunction(object):
- def __getattr__(self, key):
- return self._getattr(key).sandboxed
-
-- def __nonzero__(self):
-+ def __bool__(self):
- raise ConfigureError(
- 'Cannot do boolean operations on @depends functions.')
-
-@@ -96,6 +98,7 @@ class DependsFunction(object):
- sandbox._value_for(self)
- elif not sandbox._help:
- sandbox._execution_queue.append((sandbox._value_for, (self,)))
-+ sandbox.tasks_debug_out("DependsFunction.__init %s" % func.__name__)
-
- @property
- def name(self):
-@@ -206,6 +209,15 @@ class CombinedDependsFunction(DependsFunction):
- def __ne__(self, other):
- return not self == other
-
-+ def __hash__(self):
-+ # This was one was taken from [1] initially. Should not have done that:
-+ # it causes explosion of ConfigureSandbox._execution_queue with 100%
-+ # CPU load and eating all avaliable memory...
-+ #
-+ # [1] https://code.foxkit.us/adelie/packages/blob/f2b5773da19ab397fbe64fd32dacc383cfe4cd77/user/mozjs/python3.patch#L8068
-+ return hash((self._name, tuple(self.dependencies)))
-+
-+
- class SandboxedGlobal(dict):
- '''Identifiable dict type for use as function global'''
-
-@@ -253,11 +265,12 @@ class ConfigureSandbox(dict):
- # The default set of builtins. We expose unicode as str to make sandboxed
- # files more python3-ready.
- BUILTINS = ReadOnlyDict({
-- b: getattr(__builtin__, b)
-+ b: getattr(builtins, b)
- for b in ('None', 'False', 'True', 'int', 'bool', 'any', 'all', 'len',
- 'list', 'tuple', 'set', 'dict', 'isinstance', 'getattr',
-- 'hasattr', 'enumerate', 'range', 'zip')
-- }, __import__=forbidden_import, str=unicode)
-+ 'hasattr', 'enumerate', 'range', 'zip', '__build_class__',
-+ 'bytes', 'exec')
-+ }, __import__=forbidden_import, str=str)
-
- # Expose a limited set of functions from os.path
- OS = ReadOnlyNamespace(path=ReadOnlyNamespace(**{
-@@ -294,6 +307,11 @@ class ConfigureSandbox(dict):
- # Queue of functions to execute, with their arguments
- self._execution_queue = []
-
-+ # For debugging: Show number of tasks started in run() / added elsewhere
-+ # and some additional info
-+ self.task_debug = False # set True to enable
-+ self.tasks_started = 0
-+
- # Store the `when`s associated to some options.
- self._conditions = {}
-
-@@ -331,7 +349,7 @@ class ConfigureSandbox(dict):
- return method
- def wrapped(*args, **kwargs):
- out_args = [
-- arg.decode(encoding) if isinstance(arg, str) else arg
-+ arg.decode(encoding) if isinstance(arg, bytes) else arg
- for arg in args
- ]
- return method(*out_args, **kwargs)
-@@ -360,6 +378,14 @@ class ConfigureSandbox(dict):
- handler.setFormatter(formatter)
- logger.addHandler(handler)
-
-+ def tasks_debug_out(self, text):
-+ if self.task_debug:
-+ print("%s / queued %i / done %i" %(text, len(self._execution_queue), self.tasks_started))
-+ #if len(self._execution_queue) > 5000:
-+ # traceback.print_stack(file=sys.stdout)
-+ #if len(self._execution_queue) > 5010:
-+ # raise Exception("Too many tasks")
-+
- def include_file(self, path):
- '''Include one file in the sandbox. Users of this class probably want
- to use `run` instead.
-@@ -380,6 +406,9 @@ class ConfigureSandbox(dict):
- if path in self._all_paths:
- raise ConfigureError(
- 'Cannot include `%s` because it was included already.' % path)
-+
-+ if self.task_debug:
-+ print("include_file", path)
- self._paths.append(path)
- self._all_paths.add(path)
-
-@@ -398,7 +427,7 @@ class ConfigureSandbox(dict):
- if path:
- self.include_file(path)
-
-- for option in self._options.itervalues():
-+ for option in self._options.values():
- # All options must be referenced by some @depends function
- if option not in self._seen:
- raise ConfigureError(
-@@ -425,6 +454,8 @@ class ConfigureSandbox(dict):
-
- # Run the execution queue
- for func, args in self._execution_queue:
-+ self.tasks_started += 1
-+ self.tasks_debug_out("ConfigureSandbox.run(%s)" % func.__name__)
- func(*args)
-
- if self._help:
-@@ -504,7 +535,7 @@ class ConfigureSandbox(dict):
- value = PositiveOptionValue()
- elif value is False or value == ():
- value = NegativeOptionValue()
-- elif isinstance(value, types.StringTypes):
-+ elif isinstance(value, (str,)):
- value = PositiveOptionValue((value,))
- elif isinstance(value, tuple):
- value = PositiveOptionValue(value)
-@@ -544,7 +575,7 @@ class ConfigureSandbox(dict):
- return value
-
- def _dependency(self, arg, callee_name, arg_name=None):
-- if isinstance(arg, types.StringTypes):
-+ if isinstance(arg, (str,)):
- prefix, name, values = Option.split_option(arg)
- if values != ():
- raise ConfigureError("Option must not contain an '='")
-@@ -608,7 +639,7 @@ class ConfigureSandbox(dict):
- '''
- when = self._normalize_when(kwargs.get('when'), 'option')
- args = [self._resolve(arg) for arg in args]
-- kwargs = {k: self._resolve(v) for k, v in kwargs.iteritems()
-+ kwargs = {k: self._resolve(v) for k, v in kwargs.items()
- if k != 'when'}
- option = Option(*args, **kwargs)
- if when:
-@@ -689,7 +720,7 @@ class ConfigureSandbox(dict):
- with self.only_when_impl(when):
- what = self._resolve(what)
- if what:
-- if not isinstance(what, types.StringTypes):
-+ if not isinstance(what, (str,)):
- raise TypeError("Unexpected type: '%s'" % type(what).__name__)
- self.include_file(what)
-
-@@ -707,7 +738,7 @@ class ConfigureSandbox(dict):
- (k[:-len('_impl')], getattr(self, k))
- for k in dir(self) if k.endswith('_impl') and k != 'template_impl'
- )
-- glob.update((k, v) for k, v in self.iteritems() if k not in glob)
-+ glob.update((k, v) for k, v in self.items() if k not in glob)
-
- # Any function argument to the template must be prepared to be sandboxed.
- # If the template itself returns a function (in which case, it's very
-@@ -731,7 +762,7 @@ class ConfigureSandbox(dict):
- def wrapper(*args, **kwargs):
- args = [maybe_prepare_function(arg) for arg in args]
- kwargs = {k: maybe_prepare_function(v)
-- for k, v in kwargs.iteritems()}
-+ for k, v in kwargs.items()}
- ret = template(*args, **kwargs)
- if isfunction(ret):
- # We can't expect the sandboxed code to think about all the
-@@ -766,7 +797,7 @@ class ConfigureSandbox(dict):
- for value, required in (
- (_import, True), (_from, False), (_as, False)):
-
-- if not isinstance(value, types.StringTypes) and (
-+ if not isinstance(value, (str,)) and (
- required or value is not None):
- raise TypeError("Unexpected type: '%s'" % type(value).__name__)
- if value is not None and not self.RE_MODULE.match(value):
-@@ -807,7 +838,7 @@ class ConfigureSandbox(dict):
- # Special case for the open() builtin, because otherwise, using it
- # fails with "IOError: file() constructor not accessible in
- # restricted mode"
-- if what == '__builtin__.open':
-+ if what == 'builtins.open':
- return lambda *args, **kwargs: open(*args, **kwargs)
- # Until this proves to be a performance problem, just construct an
- # import statement and execute it.
-@@ -829,7 +860,7 @@ class ConfigureSandbox(dict):
- name = self._resolve(name, need_help_dependency=False)
- if name is None:
- return
-- if not isinstance(name, types.StringTypes):
-+ if not isinstance(name, (str,)):
- raise TypeError("Unexpected type: '%s'" % type(name).__name__)
- if name in data:
- raise ConfigureError(
-@@ -850,6 +881,7 @@ class ConfigureSandbox(dict):
-
- self._execution_queue.append((
- self._resolve_and_set, (self._config, name, value, when)))
-+ self.tasks_debug_out("ConfigureSandbox.set_config_impl / %s(%s" % (name, value))
-
- def set_define_impl(self, name, value, when=None):
- '''Implementation of set_define().
-@@ -864,6 +896,7 @@ class ConfigureSandbox(dict):
- defines = self._config.setdefault('DEFINES', {})
- self._execution_queue.append((
- self._resolve_and_set, (defines, name, value, when)))
-+ self.tasks_debug_out("ConfigureSandbox.set_define_impl / %s(%s)" % (name, value))
-
- def imply_option_impl(self, option, value, reason=None, when=None):
- '''Implementation of imply_option().
-@@ -922,7 +955,7 @@ class ConfigureSandbox(dict):
- if isinstance(possible_reasons[0], Option):
- reason = possible_reasons[0]
- if not reason and (isinstance(value, (bool, tuple)) or
-- isinstance(value, types.StringTypes)):
-+ isinstance(value, (str,))):
- # A reason can be provided automatically when imply_option
- # is called with an immediate value.
- _, filename, line, _, _, _ = inspect.stack()[1]
-@@ -955,10 +988,10 @@ class ConfigureSandbox(dict):
- if not inspect.isfunction(func):
- raise TypeError("Unexpected type: '%s'" % type(func).__name__)
- if func in self._prepared_functions:
-- return func, func.func_globals
-+ return func, func.__globals__
-
- glob = SandboxedGlobal(
-- (k, v) for k, v in func.func_globals.iteritems()
-+ (k, v) for k, v in func.__globals__.items()
- if (inspect.isfunction(v) and v not in self._templates) or (
- inspect.isclass(v) and issubclass(v, Exception))
- )
-@@ -979,20 +1012,20 @@ class ConfigureSandbox(dict):
- # Note this is not entirely bullet proof (if the value is e.g. a list,
- # the list contents could have changed), but covers the bases.
- closure = None
-- if func.func_closure:
-+ if func.__closure__:
- def makecell(content):
- def f():
- content
-- return f.func_closure[0]
-+ return f.__closure__[0]
-
- closure = tuple(makecell(cell.cell_contents)
-- for cell in func.func_closure)
-+ for cell in func.__closure__)
-
- new_func = self.wraps(func)(types.FunctionType(
-- func.func_code,
-+ func.__code__,
- glob,
- func.__name__,
-- func.func_defaults,
-+ func.__defaults__,
- closure
- ))
- @self.wraps(new_func)
-diff --git a/python/mozbuild/mozbuild/configure/check_debug_ranges.py b/python/mozbuild/mozbuild/configure/check_debug_ranges.py
-index c0caa9cc5..a3e1f37e1 100644
---- a/python/mozbuild/mozbuild/configure/check_debug_ranges.py
-+++ b/python/mozbuild/mozbuild/configure/check_debug_ranges.py
-@@ -6,7 +6,7 @@
- # to a given compilation unit. This is used as a helper to find a bug in some
- # versions of GNU ld.
-
--from __future__ import absolute_import
-+
-
- import subprocess
- import sys
-@@ -45,6 +45,8 @@ def get_range_length(range, debug_ranges):
- def main(bin, compilation_unit):
- p = subprocess.Popen(['objdump', '-W', bin], stdout = subprocess.PIPE, stderr = subprocess.PIPE)
- (out, err) = p.communicate()
-+ if isinstance(out, bytes):
-+ out = out.decode('utf-8')
- sections = re.split('\n(Contents of the|The section) ', out)
- debug_info = [s for s in sections if s.startswith('.debug_info')]
- debug_ranges = [s for s in sections if s.startswith('.debug_ranges')]
-@@ -59,4 +61,4 @@ def main(bin, compilation_unit):
-
-
- if __name__ == '__main__':
-- print(main(*sys.argv[1:]))
-+ print((main(*sys.argv[1:])))
-diff --git a/python/mozbuild/mozbuild/configure/options.py b/python/mozbuild/mozbuild/configure/options.py
-index 53ae2ae6d..4d80cad86 100644
---- a/python/mozbuild/mozbuild/configure/options.py
-+++ b/python/mozbuild/mozbuild/configure/options.py
-@@ -2,7 +2,7 @@
- # License, v. 2.0. If a copy of the MPL was not distributed with this
- # file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
--from __future__ import absolute_import, print_function, unicode_literals
-+
-
- import os
- import sys
-@@ -12,7 +12,7 @@ from collections import OrderedDict
-
- def istupleofstrings(obj):
- return isinstance(obj, tuple) and len(obj) and all(
-- isinstance(o, types.StringTypes) for o in obj)
-+ isinstance(o, (str,)) for o in obj)
-
-
- class OptionValue(tuple):
-@@ -88,7 +88,7 @@ class PositiveOptionValue(OptionValue):
- in the form of a tuple for when values are given to the option (in the form
- --option=value[,value2...].
- '''
-- def __nonzero__(self):
-+ def __bool__(self):
- return True
-
-
-@@ -113,7 +113,7 @@ class ConflictingOptionError(InvalidOptionError):
- if format_data:
- message = message.format(**format_data)
- super(ConflictingOptionError, self).__init__(message)
-- for k, v in format_data.iteritems():
-+ for k, v in format_data.items():
- setattr(self, k, v)
-
-
-@@ -149,7 +149,7 @@ class Option(object):
- 'At least an option name or an environment variable name must '
- 'be given')
- if name:
-- if not isinstance(name, types.StringTypes):
-+ if not isinstance(name, (str,)):
- raise InvalidOptionError('Option must be a string')
- if not name.startswith('--'):
- raise InvalidOptionError('Option must start with `--`')
-@@ -158,7 +158,7 @@ class Option(object):
- if not name.islower():
- raise InvalidOptionError('Option must be all lowercase')
- if env:
-- if not isinstance(env, types.StringTypes):
-+ if not isinstance(env, (str,)):
- raise InvalidOptionError(
- 'Environment variable name must be a string')
- if not env.isupper():
-@@ -168,8 +168,8 @@ class Option(object):
- isinstance(nargs, int) and nargs >= 0):
- raise InvalidOptionError(
- "nargs must be a positive integer, '?', '*' or '+'")
-- if (not isinstance(default, types.StringTypes) and
-- not isinstance(default, (bool, types.NoneType)) and
-+ if (not isinstance(default, (str,)) and
-+ not isinstance(default, (bool, type(None))) and
- not istupleofstrings(default)):
- raise InvalidOptionError(
- 'default must be a bool, a string or a tuple of strings')
-@@ -241,7 +241,7 @@ class Option(object):
- ', '.join("'%s'" % c for c in choices))
- elif has_choices:
- maxargs = self.maxargs
-- if len(choices) < maxargs and maxargs != sys.maxint:
-+ if len(choices) < maxargs and maxargs != sys.maxsize:
- raise InvalidOptionError('Not enough `choices` for `nargs`')
- self.choices = choices
- self.help = help
-@@ -255,7 +255,7 @@ class Option(object):
- where prefix is one of 'with', 'without', 'enable' or 'disable'.
- The '=values' part is optional. Values are separated with commas.
- '''
-- if not isinstance(option, types.StringTypes):
-+ if not isinstance(option, (str,)):
- raise InvalidOptionError('Option must be a string')
-
- elements = option.split('=', 1)
-@@ -308,7 +308,7 @@ class Option(object):
- def maxargs(self):
- if isinstance(self.nargs, int):
- return self.nargs
-- return 1 if self.nargs == '?' else sys.maxint
-+ return 1 if self.nargs == '?' else sys.maxsize
-
- def _validate_nargs(self, num):
- minargs, maxargs = self.minargs, self.maxargs
-@@ -499,5 +499,5 @@ class CommandLineHelper(object):
-
- def __iter__(self):
- for d in (self._args, self._extra_args):
-- for arg, pos in d.itervalues():
-+ for arg, pos in d.values():
- yield arg
-diff --git a/python/mozbuild/mozbuild/configure/util.py b/python/mozbuild/mozbuild/configure/util.py
-index 9d8b2eb0e..a12986e48 100644
---- a/python/mozbuild/mozbuild/configure/util.py
-+++ b/python/mozbuild/mozbuild/configure/util.py
-@@ -77,15 +77,7 @@ class ConfigureOutputHandler(logging.Handler):
- # Python has this feature where it sets the encoding of pipes to
- # ascii, which blatantly fails when trying to print out non-ascii.
- def fix_encoding(fh):
-- try:
-- isatty = fh.isatty()
-- except AttributeError:
-- isatty = True
--
-- if not isatty:
-- encoding = getpreferredencoding()
-- if encoding:
-- return codecs.getwriter(encoding)(fh)
-+ # no magic on oe / python3
- return fh
-
- self._stdout = fix_encoding(stdout)
-@@ -200,7 +192,7 @@ class LineIO(object):
- self._errors = errors
-
- def write(self, buf):
-- if self._encoding and isinstance(buf, str):
-+ if self._encoding and isinstance(buf, bytes):
- buf = buf.decode(self._encoding, self._errors)
- lines = buf.splitlines()
- if not lines:
-diff --git a/python/mozbuild/mozbuild/controller/building.py b/python/mozbuild/mozbuild/controller/building.py
-index d5af532f7..e9810fe58 100644
---- a/python/mozbuild/mozbuild/controller/building.py
-+++ b/python/mozbuild/mozbuild/controller/building.py
-@@ -2,7 +2,7 @@
- # License, v. 2.0. If a copy of the MPL was not distributed with this
- # file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
--from __future__ import absolute_import, unicode_literals
-+
-
- import errno
- import getpass
-@@ -146,7 +146,7 @@ class TierStatus(object):
- """
- o = []
-
-- for tier, state in self.tiers.items():
-+ for tier, state in list(self.tiers.items()):
- t_entry = dict(
- name=tier,
- start=state['begin_time'],
-@@ -574,7 +574,7 @@ class BuildProgressFooter(Footer):
-
- def __init__(self, terminal, monitor):
- Footer.__init__(self, terminal)
-- self.tiers = monitor.tiers.tier_status.viewitems()
-+ self.tiers = monitor.tiers.tier_status.items()
-
- def draw(self):
- """Draws this footer in the terminal."""
-@@ -911,8 +911,8 @@ class CCacheStats(object):
-
- return '\n'.join(lines)
-
-- def __nonzero__(self):
-- relative_values = [v for k, v in self._values.items()
-+ def __bool__(self):
-+ relative_values = [v for k, v in list(self._values.items())
- if k not in self.ABSOLUTE_KEYS]
- return (all(v >= 0 for v in relative_values) and
- any(v > 0 for v in relative_values))
-@@ -1156,7 +1156,7 @@ class BuildDriver(MozbuildObject):
-
- high_finder, finder_percent = monitor.have_high_finder_usage()
- if high_finder:
-- print(FINDER_SLOW_MESSAGE % finder_percent)
-+ print((FINDER_SLOW_MESSAGE % finder_percent))
-
- ccache_end = monitor.ccache_stats()
-
-@@ -1276,8 +1276,8 @@ class BuildDriver(MozbuildObject):
- """Install test files."""
-
- if self.is_clobber_needed():
-- print(INSTALL_TESTS_CLOBBER.format(
-- clobber_file=os.path.join(self.topobjdir, 'CLOBBER')))
-+ print((INSTALL_TESTS_CLOBBER.format(
-+ clobber_file=os.path.join(self.topobjdir, 'CLOBBER'))))
- sys.exit(1)
-
- if not test_objs:
-diff --git a/python/mozbuild/mozbuild/frontend/context.py b/python/mozbuild/mozbuild/frontend/context.py
-index fbdbefc1d..1aef6a65a 100644
---- a/python/mozbuild/mozbuild/frontend/context.py
-+++ b/python/mozbuild/mozbuild/frontend/context.py
-@@ -14,7 +14,7 @@ If you are looking for the absolute authority on what moz.build files can
- contain, you've come to the right place.
- """
-
--from __future__ import absolute_import, unicode_literals
-+
-
- import os
-
-@@ -237,15 +237,15 @@ class Context(KeyedDefaultDict):
- This function is transactional: if setitem fails for one of the values,
- the context is not updated at all."""
- if isinstance(iterable, dict):
-- iterable = iterable.items()
-+ iterable = list(iterable.items())
-
- update = {}
-- for key, value in itertools.chain(iterable, kwargs.items()):
-+ for key, value in itertools.chain(iterable, list(kwargs.items())):
- stored_type = self._validate(key, value)
- # Don't create an instance of stored_type if coercion is needed,
- # until all values are validated.
- update[key] = (value, stored_type)
-- for key, (value, stored_type) in update.items():
-+ for key, (value, stored_type) in list(update.items()):
- if not isinstance(value, stored_type):
- update[key] = stored_type(value)
- else:
-@@ -311,7 +311,7 @@ class BaseCompileFlags(ContextDerivedValue, dict):
- # a template were set and which were provided as defaults.
- template_name = getattr(context, 'template', None)
- if template_name in (None, 'Gyp'):
-- dict.__init__(self, ((k, v if v is None else TypedList(unicode)(v))
-+ dict.__init__(self, ((k, v if v is None else TypedList(str)(v))
- for k, v, _ in self.flag_variables))
- else:
- dict.__init__(self)
-@@ -520,13 +520,13 @@ class CompileFlags(BaseCompileFlags):
- if key in self and self[key] is None:
- raise ValueError('`%s` may not be set in COMPILE_FLAGS from moz.build, this '
- 'value is resolved from the emitter.' % key)
-- if not (isinstance(value, list) and all(isinstance(v, basestring) for v in value)):
-+ if not (isinstance(value, list) and all(isinstance(v, str) for v in value)):
- raise ValueError('A list of strings must be provided as a value for a '
- 'compile flags category.')
- dict.__setitem__(self, key, value)
-
-
--class FinalTargetValue(ContextDerivedValue, unicode):
-+class FinalTargetValue(ContextDerivedValue, str):
- def __new__(cls, context, value=""):
- if not value:
- value = 'dist/'
-@@ -536,7 +536,7 @@ class FinalTargetValue(ContextDerivedValue, unicode):
- value += 'bin'
- if context['DIST_SUBDIR']:
- value += '/' + context['DIST_SUBDIR']
-- return unicode.__new__(cls, value)
-+ return str.__new__(cls, value)
-
-
- def Enum(*values):
-@@ -584,7 +584,7 @@ class PathMeta(type):
- cls = SourcePath
- return super(PathMeta, cls).__call__(context, value)
-
--class Path(ContextDerivedValue, unicode):
-+class Path(ContextDerivedValue, str, metaclass=PathMeta):
- """Stores and resolves a source path relative to a given context
-
- This class is used as a backing type for some of the sandbox variables.
-@@ -595,7 +595,6 @@ class Path(ContextDerivedValue, unicode):
- - '!objdir/relative/paths'
- - '%/filesystem/absolute/paths'
- """
-- __metaclass__ = PathMeta
-
- def __new__(cls, context, value=None):
- return super(Path, cls).__new__(cls, value)
-@@ -612,10 +611,14 @@ class Path(ContextDerivedValue, unicode):
- """
- return Path(self.context, mozpath.join(self, *p))
-
-+ @staticmethod
-+ def cmp(a, b):
-+ return (a > b) - (a < b)
-+
- def __cmp__(self, other):
- if isinstance(other, Path) and self.srcdir != other.srcdir:
-- return cmp(self.full_path, other.full_path)
-- return cmp(unicode(self), other)
-+ return self.cmp(self.full_path, other.full_path)
-+ return self.cmp(str(self), other)
-
- # __cmp__ is not enough because unicode has __eq__, __ne__, etc. defined
- # and __cmp__ is only used for those when they don't exist.
-@@ -773,7 +776,7 @@ def ContextDerivedTypedRecord(*fields):
- __slots__ = tuple([name for name, _ in fields])
-
- def __init__(self, context):
-- for fname, ftype in self._fields.items():
-+ for fname, ftype in list(self._fields.items()):
- if issubclass(ftype, ContextDerivedValue):
- setattr(self, fname, self._fields[fname](context))
- else:
-@@ -909,8 +912,8 @@ def TypedListWithAction(typ, action):
- return _TypedListWithAction
-
- WebPlatformTestManifest = TypedNamedTuple("WebPlatformTestManifest",
-- [("manifest_path", unicode),
-- ("test_root", unicode)])
-+ [("manifest_path", str),
-+ ("test_root", str)])
- ManifestparserManifestList = OrderedPathListWithAction(read_manifestparser_manifest)
- ReftestManifestList = OrderedPathListWithAction(read_reftest_manifest)
- WptManifestList = TypedListWithAction(WebPlatformTestManifest, read_wpt_manifest)
-@@ -918,18 +921,18 @@ WptManifestList = TypedListWithAction(WebPlatformTestManifest, read_wpt_manifest
- OrderedSourceList = ContextDerivedTypedList(SourcePath, StrictOrderingOnAppendList)
- OrderedTestFlavorList = TypedList(Enum(*all_test_flavors()),
- StrictOrderingOnAppendList)
--OrderedStringList = TypedList(unicode, StrictOrderingOnAppendList)
-+OrderedStringList = TypedList(str, StrictOrderingOnAppendList)
- DependentTestsEntry = ContextDerivedTypedRecord(('files', OrderedSourceList),
- ('tags', OrderedStringList),
- ('flavors', OrderedTestFlavorList))
- BugzillaComponent = TypedNamedTuple('BugzillaComponent',
-- [('product', unicode), ('component', unicode)])
-+ [('product', str), ('component', str)])
- SchedulingComponents = ContextDerivedTypedRecord(
-- ('inclusive', TypedList(unicode, StrictOrderingOnAppendList)),
-- ('exclusive', TypedList(unicode, StrictOrderingOnAppendList)))
-+ ('inclusive', TypedList(str, StrictOrderingOnAppendList)),
-+ ('exclusive', TypedList(str, StrictOrderingOnAppendList)))
-
- GeneratedFilesList = StrictOrderingOnAppendListWithFlagsFactory({
-- 'script': unicode,
-+ 'script': str,
- 'inputs': list,
- 'flags': list, })
-
-@@ -1096,7 +1099,7 @@ class Files(SubContext):
- self.test_tags |= other.test_tags
- self.test_flavors |= other.test_flavors
-
-- for k, v in other.items():
-+ for k, v in list(other.items()):
- if k == 'IMPACTED_TESTS':
- self.test_files |= set(mozpath.relpath(e.full_path, e.context.config.topsrcdir)
- for e in v.files)
-@@ -1154,7 +1157,7 @@ class Files(SubContext):
-
- bug_components = Counter()
-
-- for f in files.values():
-+ for f in list(files.values()):
- bug_component = f.get('BUG_COMPONENT')
- if bug_component:
- bug_components[bug_component] += 1
-@@ -1232,7 +1235,7 @@ VARIABLES = {
- RustLibrary template instead.
- """),
-
-- 'RUST_LIBRARY_TARGET_DIR': (unicode, unicode,
-+ 'RUST_LIBRARY_TARGET_DIR': (str, str,
- """Where CARGO_TARGET_DIR should point when compiling this library. If
- not set, it defaults to the current objdir. It should be a relative path
- to the current objdir; absolute paths should not be used.
-@@ -1248,7 +1251,7 @@ VARIABLES = {
- HostRustLibrary template instead.
- """),
-
-- 'RUST_TEST': (unicode, unicode,
-+ 'RUST_TEST': (str, str,
- """Name of a Rust test to build and run via `cargo test`.
-
- This variable should not be used directly; you should be using the
-@@ -1487,7 +1490,7 @@ VARIABLES = {
- """Like ``OBJDIR_FILES``, with preprocessing. Use sparingly.
- """),
-
-- 'FINAL_LIBRARY': (unicode, unicode,
-+ 'FINAL_LIBRARY': (str, str,
- """Library in which the objects of the current directory will be linked.
-
- This variable contains the name of a library, defined elsewhere with
-@@ -1528,7 +1531,7 @@ VARIABLES = {
- with the host compiler.
- """),
-
-- 'HOST_LIBRARY_NAME': (unicode, unicode,
-+ 'HOST_LIBRARY_NAME': (str, str,
- """Name of target library generated when cross compiling.
- """),
-
-@@ -1546,7 +1549,7 @@ VARIABLES = {
- libraries that link into this library via FINAL_LIBRARY.
- """),
-
-- 'LIBRARY_NAME': (unicode, unicode,
-+ 'LIBRARY_NAME': (str, str,
- """The code name of the library generated for a directory.
-
- By default STATIC_LIBRARY_NAME and SHARED_LIBRARY_NAME take this name.
-@@ -1558,7 +1561,7 @@ VARIABLES = {
- ``example/components/xpcomsample.lib`` on Windows.
- """),
-
-- 'SHARED_LIBRARY_NAME': (unicode, unicode,
-+ 'SHARED_LIBRARY_NAME': (str, str,
- """The name of the static library generated for a directory, if it needs to
- differ from the library code name.
-
-@@ -1572,7 +1575,7 @@ VARIABLES = {
- Implies FORCE_SHARED_LIB.
- """),
-
-- 'STATIC_LIBRARY_NAME': (unicode, unicode,
-+ 'STATIC_LIBRARY_NAME': (str, str,
- """The name of the static library generated for a directory, if it needs to
- differ from the library code name.
-
-@@ -1604,31 +1607,31 @@ VARIABLES = {
-
- This variable contains a list of system libaries to link against.
- """),
-- 'RCFILE': (unicode, unicode,
-+ 'RCFILE': (str, str,
- """The program .rc file.
-
- This variable can only be used on Windows.
- """),
-
-- 'RESFILE': (unicode, unicode,
-+ 'RESFILE': (str, str,
- """The program .res file.
-
- This variable can only be used on Windows.
- """),
-
-- 'RCINCLUDE': (unicode, unicode,
-+ 'RCINCLUDE': (str, str,
- """The resource script file to be included in the default .res file.
-
- This variable can only be used on Windows.
- """),
-
-- 'DEFFILE': (unicode, unicode,
-+ 'DEFFILE': (str, str,
- """The program .def (module definition) file.
-
- This variable can only be used on Windows.
- """),
-
-- 'SYMBOLS_FILE': (Path, unicode,
-+ 'SYMBOLS_FILE': (Path, str,
- """A file containing a list of symbols to export from a shared library.
-
- The given file contains a list of symbols to be exported, and is
-@@ -1649,7 +1652,7 @@ VARIABLES = {
- ``BIN_SUFFIX``, the name will remain unchanged.
- """),
-
-- 'SONAME': (unicode, unicode,
-+ 'SONAME': (str, str,
- """The soname of the shared object currently being linked
-
- soname is the "logical name" of a shared object, often used to provide
-@@ -1719,7 +1722,7 @@ VARIABLES = {
- ``GENERATED_FILES``.
- """),
-
-- 'PROGRAM' : (unicode, unicode,
-+ 'PROGRAM' : (str, str,
- """Compiled executable name.
-
- If the configuration token ``BIN_SUFFIX`` is set, its value will be
-@@ -1727,7 +1730,7 @@ VARIABLES = {
- ``BIN_SUFFIX``, ``PROGRAM`` will remain unchanged.
- """),
-
-- 'HOST_PROGRAM' : (unicode, unicode,
-+ 'HOST_PROGRAM' : (str, str,
- """Compiled host executable name.
-
- If the configuration token ``HOST_BIN_SUFFIX`` is set, its value will be
-@@ -1765,7 +1768,7 @@ VARIABLES = {
- files.
- """),
-
-- 'XPIDL_MODULE': (unicode, unicode,
-+ 'XPIDL_MODULE': (str, str,
- """XPCOM Interface Definition Module Name.
-
- This is the name of the ``.xpt`` file that is created by linking
-@@ -1924,14 +1927,14 @@ VARIABLES = {
-
-
- # The following variables are used to control the target of installed files.
-- 'XPI_NAME': (unicode, unicode,
-+ 'XPI_NAME': (str, str,
- """The name of an extension XPI to generate.
-
- When this variable is present, the results of this directory will end up
- being packaged into an extension instead of the main dist/bin results.
- """),
-
-- 'DIST_SUBDIR': (unicode, unicode,
-+ 'DIST_SUBDIR': (str, str,
- """The name of an alternate directory to install files to.
-
- When this variable is present, the results of this directory will end up
-@@ -1939,7 +1942,7 @@ VARIABLES = {
- otherwise be placed.
- """),
-
-- 'FINAL_TARGET': (FinalTargetValue, unicode,
-+ 'FINAL_TARGET': (FinalTargetValue, str,
- """The name of the directory to install targets to.
-
- The directory is relative to the top of the object directory. The
-@@ -1970,7 +1973,7 @@ VARIABLES = {
-
- 'GYP_DIRS': (StrictOrderingOnAppendListWithFlagsFactory({
- 'variables': dict,
-- 'input': unicode,
-+ 'input': str,
- 'sandbox_vars': dict,
- 'no_chromium': bool,
- 'no_unified': bool,
-@@ -2194,7 +2197,7 @@ VARIABLES = {
- }
-
- # Sanity check: we don't want any variable above to have a list as storage type.
--for name, (storage_type, input_types, docs) in VARIABLES.items():
-+for name, (storage_type, input_types, docs) in list(VARIABLES.items()):
- if storage_type == list:
- raise RuntimeError('%s has a "list" storage type. Use "List" instead.'
- % name)
-diff --git a/python/mozbuild/mozbuild/frontend/data.py b/python/mozbuild/mozbuild/frontend/data.py
-index 442fc9e0a..837453a9f 100644
---- a/python/mozbuild/mozbuild/frontend/data.py
-+++ b/python/mozbuild/mozbuild/frontend/data.py
-@@ -15,7 +15,7 @@ contains the code for converting executed mozbuild files into these data
- structures.
- """
-
--from __future__ import absolute_import, unicode_literals
-+
-
- from mozbuild.util import StrictOrderingOnAppendList
- from mozpack.chrome.manifest import ManifestEntry
-@@ -182,7 +182,7 @@ class ComputedFlags(ContextDerived):
- if value:
- for dest_var in dest_vars:
- flags[dest_var].extend(value)
-- return flags.items()
-+ return list(flags.items())
-
- class XPIDLFile(ContextDerived):
- """Describes an XPIDL file to be compiled."""
-@@ -213,7 +213,7 @@ class BaseDefines(ContextDerived):
- self.defines = defines
-
- def get_defines(self):
-- for define, value in self.defines.iteritems():
-+ for define, value in self.defines.items():
- if value is True:
- yield('-D%s' % define)
- elif value is False:
-@@ -494,7 +494,7 @@ class SimpleProgram(BaseProgram):
- KIND = 'target'
-
- def source_files(self):
-- for srcs in self.sources.values():
-+ for srcs in list(self.sources.values()):
- for f in srcs:
- if mozpath.basename(mozpath.splitext(f)[0]) == mozpath.splitext(self.program)[0]:
- return [f]
-diff --git a/python/mozbuild/mozbuild/frontend/emitter.py b/python/mozbuild/mozbuild/frontend/emitter.py
-index 642b381c0..c28344a75 100644
---- a/python/mozbuild/mozbuild/frontend/emitter.py
-+++ b/python/mozbuild/mozbuild/frontend/emitter.py
-@@ -2,7 +2,7 @@
- # License, v. 2.0. If a copy of the MPL was not distributed with this
- # file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
--from __future__ import absolute_import, unicode_literals
-+
-
- import itertools
- import logging
-@@ -116,8 +116,8 @@ class TreeMetadataEmitter(LoggingMixin):
- # arguments. This gross hack works around the problem until we
- # rid ourselves of 2.6.
- self.info = {}
-- for k, v in mozinfo.info.items():
-- if isinstance(k, unicode):
-+ for k, v in list(mozinfo.info.items()):
-+ if isinstance(k, str):
- k = k.encode('ascii')
- self.info[k] = v
-
-@@ -234,7 +234,7 @@ class TreeMetadataEmitter(LoggingMixin):
-
-
- # Next do FINAL_LIBRARY linkage.
-- for lib in (l for libs in self._libs.values() for l in libs):
-+ for lib in (l for libs in list(self._libs.values()) for l in libs):
- if not isinstance(lib, (StaticLibrary, RustLibrary)) or not lib.link_into:
- continue
- if lib.link_into not in self._libs:
-@@ -294,13 +294,13 @@ class TreeMetadataEmitter(LoggingMixin):
- lib.link_into == outerlib.basename):
- propagate_defines(lib, defines)
-
-- for lib in (l for libs in self._libs.values() for l in libs):
-+ for lib in (l for libs in list(self._libs.values()) for l in libs):
- if isinstance(lib, Library):
- propagate_defines(lib, lib.lib_defines)
- yield lib
-
-
-- for lib in (l for libs in self._libs.values() for l in libs):
-+ for lib in (l for libs in list(self._libs.values()) for l in libs):
- lib_defines = list(lib.lib_defines.get_defines())
- if lib_defines:
- objdir_flags = self._compile_flags[lib.objdir]
-@@ -310,13 +310,13 @@ class TreeMetadataEmitter(LoggingMixin):
- if objdir_flags:
- objdir_flags.resolve_flags('LIBRARY_DEFINES', lib_defines)
-
-- for flags_obj in self._compile_flags.values():
-+ for flags_obj in list(self._compile_flags.values()):
- yield flags_obj
-
-- for flags_obj in self._compile_as_flags.values():
-+ for flags_obj in list(self._compile_as_flags.values()):
- yield flags_obj
-
-- for obj in self._binaries.values():
-+ for obj in list(self._binaries.values()):
- yield obj
-
-
-@@ -409,7 +409,7 @@ class TreeMetadataEmitter(LoggingMixin):
- libs[key] = l
- if key not in libs:
- libs[key] = l
-- candidates = libs.values()
-+ candidates = list(libs.values())
- if force_static and not candidates:
- if dir:
- raise SandboxValidationError(
-@@ -473,9 +473,9 @@ class TreeMetadataEmitter(LoggingMixin):
-
- def _verify_deps(self, context, crate_dir, crate_name, dependencies, description='Dependency'):
- """Verify that a crate's dependencies all specify local paths."""
-- for dep_crate_name, values in dependencies.iteritems():
-+ for dep_crate_name, values in dependencies.items():
- # A simple version number.
-- if isinstance(values, (str, unicode)):
-+ if isinstance(values, str):
- raise SandboxValidationError(
- '%s %s of crate %s does not list a path' % (description, dep_crate_name, crate_name),
- context)
-@@ -529,7 +529,7 @@ class TreeMetadataEmitter(LoggingMixin):
-
- cargo_target_dir = context.get('RUST_LIBRARY_TARGET_DIR', '.')
-
-- dependencies = set(config.get('dependencies', {}).iterkeys())
-+ dependencies = set(config.get('dependencies', {}).keys())
-
- features = context.get(cls.FEATURES_VAR, [])
- unique_features = set(features)
-@@ -863,7 +863,7 @@ class TreeMetadataEmitter(LoggingMixin):
- assert not gen_sources['UNIFIED_SOURCES']
-
- no_pgo = context.get('NO_PGO')
-- no_pgo_sources = [f for f, flags in all_flags.iteritems()
-+ no_pgo_sources = [f for f, flags in all_flags.items()
- if flags.no_pgo]
- if no_pgo:
- if no_pgo_sources:
-@@ -890,7 +890,7 @@ class TreeMetadataEmitter(LoggingMixin):
-
- # The inverse of the above, mapping suffixes to their canonical suffix.
- canonicalized_suffix_map = {}
-- for suffix, alternatives in suffix_map.iteritems():
-+ for suffix, alternatives in suffix_map.items():
- alternatives.add(suffix)
- for a in alternatives:
- canonicalized_suffix_map[a] = suffix
-@@ -914,7 +914,7 @@ class TreeMetadataEmitter(LoggingMixin):
- # Source files to track for linkables associated with this context.
- ctxt_sources = defaultdict(lambda: defaultdict(list))
-
-- for variable, (klass, gen_klass, suffixes) in varmap.items():
-+ for variable, (klass, gen_klass, suffixes) in list(varmap.items()):
- allowed_suffixes = set().union(*[suffix_map[s] for s in suffixes])
-
- # First ensure that we haven't been given filetypes that we don't
-@@ -941,20 +941,20 @@ class TreeMetadataEmitter(LoggingMixin):
- obj = cls(*arglist)
- srcs = obj.files
- if isinstance(obj, UnifiedSources) and obj.have_unified_mapping:
-- srcs = dict(obj.unified_source_mapping).keys()
-+ srcs = list(dict(obj.unified_source_mapping).keys())
- ctxt_sources[variable][canonical_suffix] += sorted(srcs)
- yield obj
-
- if ctxt_sources:
- for linkable in linkables:
- for target_var in ('SOURCES', 'UNIFIED_SOURCES'):
-- for suffix, srcs in ctxt_sources[target_var].items():
-+ for suffix, srcs in list(ctxt_sources[target_var].items()):
- linkable.sources[suffix] += srcs
- for host_linkable in host_linkables:
-- for suffix, srcs in ctxt_sources['HOST_SOURCES'].items():
-+ for suffix, srcs in list(ctxt_sources['HOST_SOURCES'].items()):
- host_linkable.sources[suffix] += srcs
-
-- for f, flags in all_flags.iteritems():
-+ for f, flags in all_flags.items():
- if flags.flags:
- ext = mozpath.splitext(f)[1]
- yield PerSourceFlag(context, f, flags.flags)
-@@ -1143,7 +1143,7 @@ class TreeMetadataEmitter(LoggingMixin):
- for obj in self._handle_linkables(context, passthru, generated_files):
- yield obj
-
-- generated_files.update(['%s%s' % (k, self.config.substs.get('BIN_SUFFIX', '')) for k in self._binaries.keys()])
-+ generated_files.update(['%s%s' % (k, self.config.substs.get('BIN_SUFFIX', '')) for k in list(self._binaries.keys())])
-
- components = []
- for var, cls in (
-@@ -1277,7 +1277,7 @@ class TreeMetadataEmitter(LoggingMixin):
- for obj in self._process_jar_manifests(context):
- yield obj
-
-- for name, jar in context.get('JAVA_JAR_TARGETS', {}).items():
-+ for name, jar in list(context.get('JAVA_JAR_TARGETS', {}).items()):
- yield ContextWrapped(context, jar)
-
- computed_as_flags.resolve_flags('MOZBUILD',
-@@ -1346,7 +1346,7 @@ class TreeMetadataEmitter(LoggingMixin):
- script = mozpath.join(mozpath.dirname(mozpath.dirname(__file__)),
- 'action', 'process_define_files.py')
- yield GeneratedFile(context, script, 'process_define_file',
-- unicode(path),
-+ str(path),
- [Path(context, path + '.in')])
-
- generated_files = context.get('GENERATED_FILES') or []
-@@ -1393,7 +1393,7 @@ class TreeMetadataEmitter(LoggingMixin):
- flags.flags, localized=localized)
-
- def _process_test_manifests(self, context):
-- for prefix, info in TEST_MANIFESTS.items():
-+ for prefix, info in list(TEST_MANIFESTS.items()):
- for path, manifest in context.get('%s_MANIFESTS' % prefix, []):
- for obj in self._process_test_manifest(context, info, path, manifest):
- yield obj
-@@ -1479,7 +1479,7 @@ class TreeMetadataEmitter(LoggingMixin):
-
- process_support_files(test)
-
-- for path, m_defaults in mpmanifest.manifest_defaults.items():
-+ for path, m_defaults in list(mpmanifest.manifest_defaults.items()):
- process_support_files(m_defaults)
-
- # We also copy manifests into the output directory,
-diff --git a/python/mozbuild/mozbuild/frontend/reader.py b/python/mozbuild/mozbuild/frontend/reader.py
-index c1efc1c3d..0cdf8b8db 100644
---- a/python/mozbuild/mozbuild/frontend/reader.py
-+++ b/python/mozbuild/mozbuild/frontend/reader.py
-@@ -16,7 +16,7 @@ The BuildReader contains basic logic for traversing a tree of mozbuild files.
- It does this by examining specific variables populated during execution.
- """
-
--from __future__ import absolute_import, print_function, unicode_literals
-+
-
- import ast
- import inspect
-@@ -81,12 +81,13 @@ from .context import (
-
- from mozbuild.base import ExecutionSummary
- from concurrent.futures.process import ProcessPoolExecutor
-+from functools import reduce
-
-
-
- if sys.version_info.major == 2:
-- text_type = unicode
-- type_type = types.TypeType
-+ text_type = str
-+ type_type = type
- else:
- text_type = str
- type_type = type
-@@ -127,7 +128,7 @@ class EmptyConfig(object):
-
- self.substs = self.PopulateOnGetDict(EmptyValue, substs or self.default_substs)
- udict = {}
-- for k, v in self.substs.items():
-+ for k, v in list(self.substs.items()):
- if isinstance(v, str):
- udict[k.decode('utf-8')] = v.decode('utf-8')
- else:
-@@ -311,7 +312,7 @@ class MozbuildSandbox(Sandbox):
- raise Exception('`template` is a function decorator. You must '
- 'use it as `@template` preceding a function declaration.')
-
-- name = func.func_name
-+ name = func.__name__
-
- if name in self.templates:
- raise KeyError(
-@@ -390,7 +391,7 @@ class MozbuildSandbox(Sandbox):
- klass = self._context.__class__
- self._context.__class__ = TemplateContext
- # The sandbox will do all the necessary checks for these merges.
-- for key, value in context.items():
-+ for key, value in list(context.items()):
- if isinstance(value, dict):
- self[key].update(value)
- elif isinstance(value, (list, HierarchicalStringList)):
-@@ -407,12 +408,14 @@ class MozbuildSandbox(Sandbox):
-
- class TemplateFunction(object):
- def __init__(self, func, sandbox):
-- self.path = func.func_code.co_filename
-- self.name = func.func_name
-+ self.path = func.__code__.co_filename
-+ self.name = func.__name__
-
-- code = func.func_code
-+ code = func.__code__
- firstlineno = code.co_firstlineno
- lines = sandbox._current_source.splitlines(True)
-+ if len(lines) and isinstance(lines[0], bytes):
-+ lines = [l.decode('utf-8') for l in lines]
- lines = inspect.getblock(lines[firstlineno - 1:])
-
- # The code lines we get out of inspect.getsourcelines look like
-@@ -430,7 +433,7 @@ class TemplateFunction(object):
- # actually never calls __getitem__ and __setitem__, so we need to
- # modify the AST so that accesses to globals are properly directed
- # to a dict.
-- self._global_name = b'_data' # AST wants str for this, not unicode
-+ self._global_name = '_data'
- # In case '_data' is a name used for a variable in the function code,
- # prepend more underscores until we find an unused name.
- while (self._global_name in code.co_names or
-@@ -449,8 +452,8 @@ class TemplateFunction(object):
- compile(func_ast, self.path, 'exec'),
- glob,
- self.name,
-- func.func_defaults,
-- func.func_closure,
-+ func.__defaults__,
-+ func.__closure__,
- )
- func()
-
-@@ -464,11 +467,11 @@ class TemplateFunction(object):
- '__builtins__': sandbox._builtins
- }
- func = types.FunctionType(
-- self._func.func_code,
-+ self._func.__code__,
- glob,
- self.name,
-- self._func.func_defaults,
-- self._func.func_closure
-+ self._func.__defaults__,
-+ self._func.__closure__
- )
- sandbox.exec_function(func, args, kwargs, self.path,
- becomes_current_path=False)
-@@ -484,7 +487,7 @@ class TemplateFunction(object):
- def visit_Str(self, node):
- # String nodes we got from the AST parser are str, but we want
- # unicode literals everywhere, so transform them.
-- node.s = unicode(node.s)
-+ node.s = str(node.s)
- return node
-
- def visit_Name(self, node):
-@@ -617,7 +620,7 @@ class BuildReaderError(Exception):
-
- for l in traceback.format_exception(type(self.other), self.other,
- self.trace):
-- s.write(unicode(l))
-+ s.write(str(l))
-
- return s.getvalue()
-
-@@ -767,7 +770,7 @@ class BuildReaderError(Exception):
- s.write(' %s\n' % inner.args[2])
- s.write('\n')
- close_matches = difflib.get_close_matches(inner.args[2],
-- VARIABLES.keys(), 2)
-+ list(VARIABLES.keys()), 2)
- if close_matches:
- s.write('Maybe you meant %s?\n' % ' or '.join(close_matches))
- s.write('\n')
-@@ -1152,7 +1155,7 @@ class BuildReader(object):
- context)
- non_unified_sources.add(source)
- action_overrides = {}
-- for action, script in gyp_dir.action_overrides.iteritems():
-+ for action, script in gyp_dir.action_overrides.items():
- action_overrides[action] = SourcePath(context, script)
-
- gyp_processor = GypProcessor(context.config,
-@@ -1188,7 +1191,7 @@ class BuildReader(object):
-
- recurse_info[d][key] = dict(sandbox.metadata[key])
-
-- for path, child_metadata in recurse_info.items():
-+ for path, child_metadata in list(recurse_info.items()):
- child_path = path.join('moz.build').full_path
-
- # Ensure we don't break out of the topsrcdir. We don't do realpath
-@@ -1279,7 +1282,7 @@ class BuildReader(object):
- # There is room to improve this code (and the code in
- # _find_relevant_mozbuilds) to better handle multiple files in the same
- # directory. Bug 1136966 tracks.
-- for path, mbpaths in relevants.items():
-+ for path, mbpaths in list(relevants.items()):
- path_mozbuilds[path] = [mozpath.join(topsrcdir, p) for p in mbpaths]
-
- for i, mbpath in enumerate(mbpaths[0:-1]):
-@@ -1316,7 +1319,7 @@ class BuildReader(object):
- all_contexts.append(context)
-
- result = {}
-- for path, paths in path_mozbuilds.items():
-+ for path, paths in list(path_mozbuilds.items()):
- result[path] = reduce(lambda x, y: x + y, (contexts[p] for p in paths), [])
-
- return result, all_contexts
-@@ -1356,7 +1359,7 @@ class BuildReader(object):
-
- r = {}
-
-- for path, ctxs in paths.items():
-+ for path, ctxs in list(paths.items()):
- # Should be normalized by read_relevant_mozbuilds.
- assert '\\' not in path
-
-diff --git a/python/mozbuild/mozbuild/frontend/sandbox.py b/python/mozbuild/mozbuild/frontend/sandbox.py
-index b2090802e..6d94291ea 100644
---- a/python/mozbuild/mozbuild/frontend/sandbox.py
-+++ b/python/mozbuild/mozbuild/frontend/sandbox.py
-@@ -17,7 +17,7 @@ KeyError are machine parseable. This machine-friendly data is used to present
- user-friendly error messages in the case of errors.
- """
-
--from __future__ import absolute_import, unicode_literals
-+
-
- import os
- import sys
-@@ -112,6 +112,7 @@ class Sandbox(dict):
- 'int': int,
- 'set': set,
- 'tuple': tuple,
-+ 'str': str,
- })
-
- def __init__(self, context, finder=default_finder):
-diff --git a/python/mozbuild/mozbuild/jar.py b/python/mozbuild/mozbuild/jar.py
-index 47a2eff63..96aea63ce 100644
---- a/python/mozbuild/mozbuild/jar.py
-+++ b/python/mozbuild/mozbuild/jar.py
-@@ -8,7 +8,7 @@ processing jar.mn files.
- See the documentation for jar.mn on MDC for further details on the format.
- '''
-
--from __future__ import absolute_import
-+
-
- import sys
- import os
-@@ -17,7 +17,7 @@ import re
- import logging
- from time import localtime
- from MozZipFile import ZipFile
--from cStringIO import StringIO
-+from io import StringIO
- from collections import defaultdict
-
- from mozbuild.preprocessor import Preprocessor
-@@ -302,9 +302,9 @@ class JarMaker(object):
- '''updateManifest replaces the % in the chrome registration entries
- with the given chrome base path, and updates the given manifest file.
- '''
-- myregister = dict.fromkeys(map(lambda s: s.replace('%',
-- chromebasepath), register))
-- addEntriesToListFile(manifestPath, myregister.iterkeys())
-+ myregister = dict.fromkeys([s.replace('%',
-+ chromebasepath) for s in register])
-+ addEntriesToListFile(manifestPath, iter(myregister.keys()))
-
- def makeJar(self, infile, jardir):
- '''makeJar is the main entry point to JarMaker.
-@@ -322,7 +322,7 @@ class JarMaker(object):
- elif self.relativesrcdir:
- self.localedirs = \
- self.generateLocaleDirs(self.relativesrcdir)
-- if isinstance(infile, basestring):
-+ if isinstance(infile, str):
- logging.info('processing ' + infile)
- self.sourcedirs.append(_normpath(os.path.dirname(infile)))
- pp = self.pp.clone()
-diff --git a/python/mozbuild/mozbuild/makeutil.py b/python/mozbuild/mozbuild/makeutil.py
-index fcd45bed2..f77c5d2c3 100644
---- a/python/mozbuild/mozbuild/makeutil.py
-+++ b/python/mozbuild/mozbuild/makeutil.py
-@@ -2,11 +2,10 @@
- # License, v. 2.0. If a copy of the MPL was not distributed with this
- # file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
--from __future__ import absolute_import
-+
-
- import os
- import re
--from types import StringTypes
- from collections import Iterable
-
-
-@@ -66,7 +65,7 @@ class _SimpleOrderedSet(object):
- self._list = []
- self._set = set()
-
-- def __nonzero__(self):
-+ def __bool__(self):
- return bool(self._set)
-
- def __iter__(self):
-@@ -103,19 +102,19 @@ class Rule(object):
-
- def add_targets(self, targets):
- '''Add additional targets to the rule.'''
-- assert isinstance(targets, Iterable) and not isinstance(targets, StringTypes)
-+ assert isinstance(targets, Iterable) and not isinstance(targets, str)
- self._targets.update(targets)
- return self
-
- def add_dependencies(self, deps):
- '''Add dependencies to the rule.'''
-- assert isinstance(deps, Iterable) and not isinstance(deps, StringTypes)
-+ assert isinstance(deps, Iterable) and not isinstance(deps, str)
- self._dependencies.update(deps)
- return self
-
- def add_commands(self, commands):
- '''Add commands to the rule.'''
-- assert isinstance(commands, Iterable) and not isinstance(commands, StringTypes)
-+ assert isinstance(commands, Iterable) and not isinstance(commands, str)
- self._commands.extend(commands)
- return self
-
-@@ -139,13 +138,16 @@ class Rule(object):
- '''
- if not self._targets:
- return
-- fh.write('%s:' % ' '.join(self._targets))
-+ wstring = '%s:' % ' '.join(self._targets)
- if self._dependencies:
-- fh.write(' %s' % ' '.join(self.dependencies()))
-- fh.write('\n')
-+ wstring += ' %s' % ' '.join(self.dependencies())
-+ wstring += '\n'
- for cmd in self._commands:
-- fh.write('\t%s\n' % cmd)
--
-+ wstring += '\t%s\n' % cmd
-+ try:
-+ fh.write(wstring.encode('utf-8'))
-+ except TypeError:
-+ fh.write(wstring)
-
- # colon followed by anything except a slash (Windows path detection)
- _depfilesplitter = re.compile(r':(?![\\/])')
-diff --git a/python/mozbuild/mozbuild/mozinfo.py b/python/mozbuild/mozbuild/mozinfo.py
-index 7e7ad1b2a..f03f20ba3 100755
---- a/python/mozbuild/mozbuild/mozinfo.py
-+++ b/python/mozbuild/mozbuild/mozinfo.py
-@@ -5,7 +5,7 @@
- # This module produces a JSON file that provides basic build info and
- # configuration metadata.
-
--from __future__ import absolute_import
-+
-
- import os
- import re
-@@ -33,7 +33,7 @@ def build_dict(config, env=os.environ):
- d['mozconfig'] = config.mozconfig
-
- # os
-- o = substs["OS_TARGET"]
-+ o = str(substs["OS_TARGET"])
- known_os = {"Linux": "linux",
- "WINNT": "win",
- "Darwin": "mac",
-@@ -148,7 +148,7 @@ def write_mozinfo(file, config, env=os.environ):
- and what keys are produced.
- """
- build_conf = build_dict(config, env)
-- if isinstance(file, basestring):
-- file = open(file, 'wb')
-+ if isinstance(file, str):
-+ file = open(file, 'w')
-
- json.dump(build_conf, file, sort_keys=True, indent=4)
-diff --git a/python/mozbuild/mozbuild/preprocessor.py b/python/mozbuild/mozbuild/preprocessor.py
-index 6780b8b72..19e59884e 100644
---- a/python/mozbuild/mozbuild/preprocessor.py
-+++ b/python/mozbuild/mozbuild/preprocessor.py
-@@ -27,7 +27,8 @@ import os
- import re
- from optparse import OptionParser
- import errno
--from makeutil import Makefile
-+from .makeutil import Makefile
-+from functools import reduce
-
- # hack around win32 mangling our line endings
- # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/65443
-@@ -230,7 +231,7 @@ class Expression:
- def __repr__(self):
- return self.value.__repr__()
-
-- class ParseError(StandardError):
-+ class ParseError(Exception):
- """
- Error raised when parsing fails.
- It has two members, offset and content, which give the offset of the
-@@ -278,7 +279,7 @@ class Preprocessor:
- self.context = Context()
- for k,v in {'FILE': '',
- 'LINE': 0,
-- 'DIRECTORY': os.path.abspath('.')}.iteritems():
-+ 'DIRECTORY': os.path.abspath('.')}.items():
- self.context[k] = v
- self.actionLevel = 0
- self.disableLevel = 0
-@@ -292,21 +293,21 @@ class Preprocessor:
- self.cmds = {}
- for cmd, level in {'define': 0,
- 'undef': 0,
-- 'if': sys.maxint,
-- 'ifdef': sys.maxint,
-- 'ifndef': sys.maxint,
-+ 'if': sys.maxsize,
-+ 'ifdef': sys.maxsize,
-+ 'ifndef': sys.maxsize,
- 'else': 1,
- 'elif': 1,
- 'elifdef': 1,
- 'elifndef': 1,
-- 'endif': sys.maxint,
-+ 'endif': sys.maxsize,
- 'expand': 0,
- 'literal': 0,
- 'filter': 0,
- 'unfilter': 0,
- 'include': 0,
- 'includesubst': 0,
-- 'error': 0}.iteritems():
-+ 'error': 0}.items():
- self.cmds[cmd] = (level, getattr(self, 'do_' + cmd))
- self.out = sys.stdout
- self.setMarker(marker)
-@@ -434,7 +435,7 @@ class Preprocessor:
- filteredLine = self.applyFilters(aLine)
- if filteredLine != aLine:
- self.actionLevel = 2
-- self.out.write(filteredLine)
-+ self.out.write(filteredLine.encode('utf-8'))
-
- def handleCommandLine(self, args, defaultToStdin = False):
- """
-@@ -468,7 +469,7 @@ class Preprocessor:
- raise Preprocessor.Error(self, "--depend doesn't work with stdout",
- None)
- try:
-- from makeutil import Makefile
-+ from .makeutil import Makefile
- except:
- raise Preprocessor.Error(self, "--depend requires the "
- "mozbuild.makeutil module", None)
-@@ -683,7 +684,7 @@ class Preprocessor:
- current = dict(self.filters)
- for f in filters:
- current[f] = getattr(self, 'filter_' + f)
-- filterNames = current.keys()
-+ filterNames = list(current.keys())
- filterNames.sort()
- self.filters = [(fn, current[fn]) for fn in filterNames]
- return
-@@ -693,7 +694,7 @@ class Preprocessor:
- for f in filters:
- if f in current:
- del current[f]
-- filterNames = current.keys()
-+ filterNames = list(current.keys())
- filterNames.sort()
- self.filters = [(fn, current[fn]) for fn in filterNames]
- return
-@@ -738,7 +739,7 @@ class Preprocessor:
- args can either be a file name, or a file-like object.
- Files should be opened, and will be closed after processing.
- """
-- isName = type(args) == str or type(args) == unicode
-+ isName = type(args) == str or type(args) == str
- oldCheckLineNumbers = self.checkLineNumbers
- self.checkLineNumbers = False
- if isName:
-diff --git a/python/mozbuild/mozbuild/shellutil.py b/python/mozbuild/mozbuild/shellutil.py
-index 185a970ee..c0c15f8f2 100644
---- a/python/mozbuild/mozbuild/shellutil.py
-+++ b/python/mozbuild/mozbuild/shellutil.py
-@@ -15,7 +15,7 @@ def _tokens2re(**tokens):
- # which matches the pattern and captures it in a named match group.
- # The group names and patterns are given as arguments.
- all_tokens = '|'.join('(?P<%s>%s)' % (name, value)
-- for name, value in tokens.iteritems())
-+ for name, value in tokens.items())
- nonescaped = r'(?<!\\)(?:%s)' % all_tokens
-
- # The final pattern matches either the above pattern, or an escaped
-@@ -96,7 +96,7 @@ class _ClineSplitter(object):
- self.cline = self.cline[m.end():]
-
- match = {name: value
-- for name, value in m.groupdict().items() if value}
-+ for name, value in list(m.groupdict().items()) if value}
- if 'quote' in match:
- # " or ' start a quoted string
- if match['quote'] == '"':
-@@ -144,7 +144,7 @@ class _ClineSplitter(object):
- self._push(self.cline[:m.start()])
- self.cline = self.cline[m.end():]
- match = {name: value
-- for name, value in m.groupdict().items() if value}
-+ for name, value in list(m.groupdict().items()) if value}
- if 'quote' in match:
- # a double quote ends the quoted string, so go back to
- # unquoted parsing
-diff --git a/python/mozbuild/mozbuild/test/backend/test_recursivemake.py b/python/mozbuild/mozbuild/test/backend/test_recursivemake.py
-index a05059f8a..355a96165 100644
---- a/python/mozbuild/mozbuild/test/backend/test_recursivemake.py
-+++ b/python/mozbuild/mozbuild/test/backend/test_recursivemake.py
-@@ -2,9 +2,9 @@
- # License, v. 2.0. If a copy of the MPL was not distributed with this
- # file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
--from __future__ import unicode_literals
-
--import cPickle as pickle
-+
-+import pickle as pickle
- import json
- import os
- import unittest
-@@ -333,7 +333,7 @@ class TestRecursiveMakeBackend(BackendTester):
- ],
- }
-
-- for var, val in expected.items():
-+ for var, val in list(expected.items()):
- # print("test_variable_passthru[%s]" % (var))
- found = [str for str in lines if str.startswith(var)]
- self.assertEqual(found, val)
-@@ -372,7 +372,7 @@ class TestRecursiveMakeBackend(BackendTester):
- ],
- }
-
-- for var, val in expected.items():
-+ for var, val in list(expected.items()):
- found = [str for str in lines if str.startswith(var)]
- self.assertEqual(found, val)
-
-@@ -563,7 +563,7 @@ class TestRecursiveMakeBackend(BackendTester):
-
- # This is not the most robust test in the world, but it gets the job
- # done.
-- entries = [e for e in m._dests.keys() if '**' in e]
-+ entries = [e for e in list(m._dests.keys()) if '**' in e]
- self.assertEqual(len(entries), 1)
- self.assertIn('support/**', entries[0])
-
-@@ -590,11 +590,11 @@ class TestRecursiveMakeBackend(BackendTester):
- set(['child/test_sub.js',
- 'child/data/**',
- 'child/another-file.sjs']))
-- for key in test_installs.keys():
-+ for key in list(test_installs.keys()):
- self.assertIn(key, test_installs)
-
- synthesized_manifest = InstallManifest()
-- for item, installs in test_installs.items():
-+ for item, installs in list(test_installs.items()):
- for install_info in installs:
- if len(install_info) == 3:
- synthesized_manifest.add_pattern_link(*install_info)
-@@ -602,7 +602,7 @@ class TestRecursiveMakeBackend(BackendTester):
- synthesized_manifest.add_link(*install_info)
-
- self.assertEqual(len(synthesized_manifest), 3)
-- for item, info in synthesized_manifest._dests.items():
-+ for item, info in list(synthesized_manifest._dests.items()):
- self.assertIn(item, m)
- self.assertEqual(info, m._dests[item])
-
-@@ -864,7 +864,7 @@ class TestRecursiveMakeBackend(BackendTester):
- expected[mozpath.join(env.topobjdir, 'final-target')] = [
- 'FINAL_TARGET = $(DEPTH)/random-final-target'
- ]
-- for key, expected_rules in expected.iteritems():
-+ for key, expected_rules in expected.items():
- backend_path = mozpath.join(key, 'backend.mk')
- lines = [l.strip() for l in open(backend_path, 'rt').readlines()[2:]]
- found = [str for str in lines if
-diff --git a/python/mozbuild/mozbuild/test/configure/common.py b/python/mozbuild/mozbuild/test/configure/common.py
-index 150c6e393..5e2c2afaa 100644
---- a/python/mozbuild/mozbuild/test/configure/common.py
-+++ b/python/mozbuild/mozbuild/test/configure/common.py
-@@ -2,7 +2,7 @@
- # License, v. 2.0. If a copy of the MPL was not distributed with this
- # file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
--from __future__ import absolute_import, print_function, unicode_literals
-+
-
- import copy
- import errno
-@@ -16,7 +16,7 @@ from mozbuild.configure import ConfigureSandbox
- from mozbuild.util import ReadOnlyNamespace
- from mozpack import path as mozpath
-
--from StringIO import StringIO
-+from io import StringIO
- from which import WhichError
-
- from buildconfig import (
-@@ -77,10 +77,10 @@ class ConfigureTestSandbox(ConfigureSandbox):
- self._search_path = environ.get('PATH', '').split(os.pathsep)
-
- self._subprocess_paths = {
-- mozpath.abspath(k): v for k, v in paths.iteritems() if v
-+ mozpath.abspath(k): v for k, v in paths.items() if v
- }
-
-- paths = paths.keys()
-+ paths = list(paths.keys())
-
- environ = dict(environ)
- if 'CONFIG_SHELL' not in environ:
-diff --git a/python/mozbuild/mozbuild/test/configure/lint.py b/python/mozbuild/mozbuild/test/configure/lint.py
-index 98f5f4fe7..a549b4bcf 100644
---- a/python/mozbuild/mozbuild/test/configure/lint.py
-+++ b/python/mozbuild/mozbuild/test/configure/lint.py
-@@ -2,11 +2,11 @@
- # License, v. 2.0. If a copy of the MPL was not distributed with this
- # file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
--from __future__ import absolute_import, print_function, unicode_literals
-+
-
- import os
- import unittest
--from StringIO import StringIO
-+from io import StringIO
- from mozunit import main
- from buildconfig import (
- topobjdir,
-@@ -40,9 +40,7 @@ class LintMeta(type):
- return type.__new__(mcs, name, bases, attrs)
-
-
--class Lint(unittest.TestCase):
-- __metaclass__ = LintMeta
--
-+class Lint(unittest.TestCase, metaclass=LintMeta):
- def setUp(self):
- self._curdir = os.getcwd()
- os.chdir(topobjdir)
-diff --git a/python/mozbuild/mozbuild/test/configure/test_checks_configure.py b/python/mozbuild/mozbuild/test/configure/test_checks_configure.py
-index c60000bb2..07091c077 100644
---- a/python/mozbuild/mozbuild/test/configure/test_checks_configure.py
-+++ b/python/mozbuild/mozbuild/test/configure/test_checks_configure.py
-@@ -2,9 +2,9 @@
- # License, v. 2.0. If a copy of the MPL was not distributed with this
- # file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
--from __future__ import absolute_import, print_function, unicode_literals
-
--from StringIO import StringIO
-+
-+from io import StringIO
- import os
- import sys
- import textwrap
-@@ -447,7 +447,7 @@ class TestChecksConfigure(unittest.TestCase):
- checking for a... %s
- ''' % self.OTHER_A))
-
-- dirs = map(mozpath.dirname, (self.OTHER_A, self.KNOWN_A))
-+ dirs = list(map(mozpath.dirname, (self.OTHER_A, self.KNOWN_A)))
- config, out, status = self.get_result(textwrap.dedent('''\
- check_prog("A", ("known-a",), paths=["%s"])
- ''' % os.pathsep.join(dirs)))
-@@ -457,7 +457,7 @@ class TestChecksConfigure(unittest.TestCase):
- checking for a... %s
- ''' % self.OTHER_A))
-
-- dirs = map(mozpath.dirname, (self.KNOWN_A, self.KNOWN_B))
-+ dirs = list(map(mozpath.dirname, (self.KNOWN_A, self.KNOWN_B)))
- config, out, status = self.get_result(textwrap.dedent('''\
- check_prog("A", ("known-a",), paths=["%s", "%s"])
- ''' % (os.pathsep.join(dirs), self.OTHER_A)))
-diff --git a/python/mozbuild/mozbuild/test/configure/test_compile_checks.py b/python/mozbuild/mozbuild/test/configure/test_compile_checks.py
-index b5be3bf2e..282000bb2 100644
---- a/python/mozbuild/mozbuild/test/configure/test_compile_checks.py
-+++ b/python/mozbuild/mozbuild/test/configure/test_compile_checks.py
-@@ -2,14 +2,14 @@
- # License, v. 2.0. If a copy of the MPL was not distributed with this
- # file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
--from __future__ import absolute_import, print_function, unicode_literals
-+
-
- import os
- import textwrap
- import unittest
- import mozpack.path as mozpath
-
--from StringIO import StringIO
-+from io import StringIO
-
- from buildconfig import topsrcdir
- from common import ConfigureTestSandbox
-diff --git a/python/mozbuild/mozbuild/test/configure/test_configure.py b/python/mozbuild/mozbuild/test/configure/test_configure.py
-index f37e2701f..eb0ca84fb 100644
---- a/python/mozbuild/mozbuild/test/configure/test_configure.py
-+++ b/python/mozbuild/mozbuild/test/configure/test_configure.py
-@@ -2,9 +2,9 @@
- # License, v. 2.0. If a copy of the MPL was not distributed with this
- # file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
--from __future__ import absolute_import, print_function, unicode_literals
-
--from StringIO import StringIO
-+
-+from io import StringIO
- import os
- import sys
- import textwrap
-@@ -43,7 +43,7 @@ class TestConfigure(unittest.TestCase):
-
- if '--help' in options:
- return out.getvalue(), config
-- self.assertEquals('', out.getvalue())
-+ self.assertEqual('', out.getvalue())
- return config
-
- def moz_configure(self, source):
-@@ -55,7 +55,7 @@ class TestConfigure(unittest.TestCase):
- def test_defaults(self):
- config = self.get_config()
- self.maxDiff = None
-- self.assertEquals({
-+ self.assertEqual({
- 'CHOICES': NegativeOptionValue(),
- 'DEFAULTED': PositiveOptionValue(('not-simple',)),
- 'IS_GCC': NegativeOptionValue(),
-@@ -71,9 +71,9 @@ class TestConfigure(unittest.TestCase):
- def test_help(self):
- help, config = self.get_config(['--help'], prog='configure')
-
-- self.assertEquals({}, config)
-+ self.assertEqual({}, config)
- self.maxDiff = None
-- self.assertEquals(
-+ self.assertEqual(
- 'Usage: configure [options]\n'
- '\n'
- 'Options: [defaults in brackets after descriptions]\n'
-@@ -109,7 +109,7 @@ class TestConfigure(unittest.TestCase):
- ):
- self.assertNotIn('ENABLED_SIMPLE', config)
- self.assertIn('SIMPLE', config)
-- self.assertEquals(NegativeOptionValue(), config['SIMPLE'])
-+ self.assertEqual(NegativeOptionValue(), config['SIMPLE'])
-
- for config in (
- self.get_config(['--enable-simple']),
-@@ -117,7 +117,7 @@ class TestConfigure(unittest.TestCase):
- ):
- self.assertIn('ENABLED_SIMPLE', config)
- self.assertIn('SIMPLE', config)
-- self.assertEquals(PositiveOptionValue(), config['SIMPLE'])
-+ self.assertEqual(PositiveOptionValue(), config['SIMPLE'])
- self.assertIs(config['SIMPLE'], config['ENABLED_SIMPLE'])
-
- # --enable-simple doesn't take values.
-@@ -135,7 +135,7 @@ class TestConfigure(unittest.TestCase):
- env={'MOZ_WITH_ENV': '1'}),
- ):
- self.assertIn('WITH_ENV', config)
-- self.assertEquals(NegativeOptionValue(), config['WITH_ENV'])
-+ self.assertEqual(NegativeOptionValue(), config['WITH_ENV'])
-
- for config in (
- self.get_config(['--enable-with-env']),
-@@ -145,7 +145,7 @@ class TestConfigure(unittest.TestCase):
- env={'MOZ_WITH_ENV': ''}),
- ):
- self.assertIn('WITH_ENV', config)
-- self.assertEquals(PositiveOptionValue(), config['WITH_ENV'])
-+ self.assertEqual(PositiveOptionValue(), config['WITH_ENV'])
-
- with self.assertRaises(InvalidOptionError):
- self.get_config(['--enable-with-env=value'])
-@@ -160,23 +160,23 @@ class TestConfigure(unittest.TestCase):
- self.get_config(['--enable-values', '--disable-values']),
- ):
- self.assertIn(name, config)
-- self.assertEquals(NegativeOptionValue(), config[name])
-+ self.assertEqual(NegativeOptionValue(), config[name])
-
- for config in (
- self.get_config(['--enable-values']),
- self.get_config(['--disable-values', '--enable-values']),
- ):
- self.assertIn(name, config)
-- self.assertEquals(PositiveOptionValue(), config[name])
-+ self.assertEqual(PositiveOptionValue(), config[name])
-
- config = self.get_config(['--enable-values=foo'])
- self.assertIn(name, config)
-- self.assertEquals(PositiveOptionValue(('foo',)), config[name])
-+ self.assertEqual(PositiveOptionValue(('foo',)), config[name])
-
- config = self.get_config(['--enable-values=foo,bar'])
- self.assertIn(name, config)
- self.assertTrue(config[name])
-- self.assertEquals(PositiveOptionValue(('foo', 'bar')), config[name])
-+ self.assertEqual(PositiveOptionValue(('foo', 'bar')), config[name])
-
- def test_values2(self):
- self.test_values('VALUES2')
-@@ -187,12 +187,12 @@ class TestConfigure(unittest.TestCase):
- def test_returned_default(self):
- config = self.get_config(['--enable-simple'])
- self.assertIn('DEFAULTED', config)
-- self.assertEquals(
-+ self.assertEqual(
- PositiveOptionValue(('simple',)), config['DEFAULTED'])
-
- config = self.get_config(['--disable-simple'])
- self.assertIn('DEFAULTED', config)
-- self.assertEquals(
-+ self.assertEqual(
- PositiveOptionValue(('not-simple',)), config['DEFAULTED'])
-
- def test_returned_choices(self):
-@@ -200,13 +200,13 @@ class TestConfigure(unittest.TestCase):
- config = self.get_config(
- ['--enable-values=alpha', '--returned-choices=%s' % val])
- self.assertIn('CHOICES', config)
-- self.assertEquals(PositiveOptionValue((val,)), config['CHOICES'])
-+ self.assertEqual(PositiveOptionValue((val,)), config['CHOICES'])
-
- for val in ('0', '1', '2'):
- config = self.get_config(
- ['--enable-values=numeric', '--returned-choices=%s' % val])
- self.assertIn('CHOICES', config)
-- self.assertEquals(PositiveOptionValue((val,)), config['CHOICES'])
-+ self.assertEqual(PositiveOptionValue((val,)), config['CHOICES'])
-
- with self.assertRaises(InvalidOptionError):
- self.get_config(['--enable-values=numeric',
-@@ -218,12 +218,12 @@ class TestConfigure(unittest.TestCase):
- def test_included(self):
- config = self.get_config(env={'CC': 'gcc'})
- self.assertIn('IS_GCC', config)
-- self.assertEquals(config['IS_GCC'], True)
-+ self.assertEqual(config['IS_GCC'], True)
-
- config = self.get_config(
- ['--enable-include=extra.configure', '--extra'])
- self.assertIn('EXTRA', config)
-- self.assertEquals(PositiveOptionValue(), config['EXTRA'])
-+ self.assertEqual(PositiveOptionValue(), config['EXTRA'])
-
- with self.assertRaises(InvalidOptionError):
- self.get_config(['--extra'])
-@@ -231,7 +231,7 @@ class TestConfigure(unittest.TestCase):
- def test_template(self):
- config = self.get_config(env={'CC': 'gcc'})
- self.assertIn('CFLAGS', config)
-- self.assertEquals(config['CFLAGS'], ['-Werror=foobar'])
-+ self.assertEqual(config['CFLAGS'], ['-Werror=foobar'])
-
- config = self.get_config(env={'CC': 'clang'})
- self.assertNotIn('CFLAGS', config)
-@@ -288,7 +288,7 @@ class TestConfigure(unittest.TestCase):
- sandbox
- )
-
-- import __builtin__
-+ import builtins
- self.assertIs(sandbox['foo'](), __builtin__)
-
- exec_(textwrap.dedent('''
-@@ -300,7 +300,7 @@ class TestConfigure(unittest.TestCase):
- )
-
- f = sandbox['foo']()
-- self.assertEquals(f.name, os.devnull)
-+ self.assertEqual(f.name, os.devnull)
- f.close()
-
- # This unlocks the sandbox
-@@ -336,8 +336,8 @@ class TestConfigure(unittest.TestCase):
- self.assertIs(sandbox['foo'](), sandbox)
-
- # Nothing leaked from the function being executed
-- self.assertEquals(sandbox.keys(), ['__builtins__', 'foo'])
-- self.assertEquals(sandbox['__builtins__'], ConfigureSandbox.BUILTINS)
-+ self.assertEqual(list(sandbox.keys()), ['__builtins__', 'foo'])
-+ self.assertEqual(sandbox['__builtins__'], ConfigureSandbox.BUILTINS)
-
- exec_(textwrap.dedent('''
- @template
-@@ -354,7 +354,7 @@ class TestConfigure(unittest.TestCase):
- with self.assertRaises(NameError) as e:
- sandbox._depends[sandbox['bar']].result()
-
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- "global name 'sys' is not defined")
-
- def test_apply_imports(self):
-@@ -380,28 +380,28 @@ class TestConfigure(unittest.TestCase):
- sandbox
- )
-
-- self.assertEquals(len(imports), 1)
-+ self.assertEqual(len(imports), 1)
-
- def test_os_path(self):
- config = self.get_config(['--with-imports=%s' % __file__])
- self.assertIn('HAS_ABSPATH', config)
-- self.assertEquals(config['HAS_ABSPATH'], True)
-+ self.assertEqual(config['HAS_ABSPATH'], True)
- self.assertIn('HAS_GETATIME', config)
-- self.assertEquals(config['HAS_GETATIME'], True)
-+ self.assertEqual(config['HAS_GETATIME'], True)
- self.assertIn('HAS_GETATIME2', config)
-- self.assertEquals(config['HAS_GETATIME2'], False)
-+ self.assertEqual(config['HAS_GETATIME2'], False)
-
- def test_template_call(self):
- config = self.get_config(env={'CC': 'gcc'})
- self.assertIn('TEMPLATE_VALUE', config)
-- self.assertEquals(config['TEMPLATE_VALUE'], 42)
-+ self.assertEqual(config['TEMPLATE_VALUE'], 42)
- self.assertIn('TEMPLATE_VALUE_2', config)
-- self.assertEquals(config['TEMPLATE_VALUE_2'], 21)
-+ self.assertEqual(config['TEMPLATE_VALUE_2'], 21)
-
- def test_template_imports(self):
- config = self.get_config(['--enable-imports-in-template'])
- self.assertIn('PLATFORM', config)
-- self.assertEquals(config['PLATFORM'], sys.platform)
-+ self.assertEqual(config['PLATFORM'], sys.platform)
-
- def test_decorators(self):
- config = {}
-@@ -419,27 +419,27 @@ class TestConfigure(unittest.TestCase):
- return self.get_config(*args, configure='set_config.configure')
-
- help, config = get_config(['--help'])
-- self.assertEquals(config, {})
-+ self.assertEqual(config, {})
-
- config = get_config(['--set-foo'])
- self.assertIn('FOO', config)
-- self.assertEquals(config['FOO'], True)
-+ self.assertEqual(config['FOO'], True)
-
- config = get_config(['--set-bar'])
- self.assertNotIn('FOO', config)
- self.assertIn('BAR', config)
-- self.assertEquals(config['BAR'], True)
-+ self.assertEqual(config['BAR'], True)
-
- config = get_config(['--set-value=qux'])
- self.assertIn('VALUE', config)
-- self.assertEquals(config['VALUE'], 'qux')
-+ self.assertEqual(config['VALUE'], 'qux')
-
- config = get_config(['--set-name=hoge'])
- self.assertIn('hoge', config)
-- self.assertEquals(config['hoge'], True)
-+ self.assertEqual(config['hoge'], True)
-
- config = get_config([])
-- self.assertEquals(config, {'BAR': False})
-+ self.assertEqual(config, {'BAR': False})
-
- with self.assertRaises(ConfigureError):
- # Both --set-foo and --set-name=FOO are going to try to
-@@ -454,11 +454,11 @@ class TestConfigure(unittest.TestCase):
- set_config('QUX', 'qux', when='--with-qux')
- '''):
- config = self.get_config()
-- self.assertEquals(config, {
-+ self.assertEqual(config, {
- 'FOO': 'foo',
- })
- config = self.get_config(['--with-qux'])
-- self.assertEquals(config, {
-+ self.assertEqual(config, {
- 'FOO': 'foo',
- 'QUX': 'qux',
- })
-@@ -468,27 +468,27 @@ class TestConfigure(unittest.TestCase):
- return self.get_config(*args, configure='set_define.configure')
-
- help, config = get_config(['--help'])
-- self.assertEquals(config, {'DEFINES': {}})
-+ self.assertEqual(config, {'DEFINES': {}})
-
- config = get_config(['--set-foo'])
- self.assertIn('FOO', config['DEFINES'])
-- self.assertEquals(config['DEFINES']['FOO'], True)
-+ self.assertEqual(config['DEFINES']['FOO'], True)
-
- config = get_config(['--set-bar'])
- self.assertNotIn('FOO', config['DEFINES'])
- self.assertIn('BAR', config['DEFINES'])
-- self.assertEquals(config['DEFINES']['BAR'], True)
-+ self.assertEqual(config['DEFINES']['BAR'], True)
-
- config = get_config(['--set-value=qux'])
- self.assertIn('VALUE', config['DEFINES'])
-- self.assertEquals(config['DEFINES']['VALUE'], 'qux')
-+ self.assertEqual(config['DEFINES']['VALUE'], 'qux')
-
- config = get_config(['--set-name=hoge'])
- self.assertIn('hoge', config['DEFINES'])
-- self.assertEquals(config['DEFINES']['hoge'], True)
-+ self.assertEqual(config['DEFINES']['hoge'], True)
-
- config = get_config([])
-- self.assertEquals(config['DEFINES'], {'BAR': False})
-+ self.assertEqual(config['DEFINES'], {'BAR': False})
-
- with self.assertRaises(ConfigureError):
- # Both --set-foo and --set-name=FOO are going to try to
-@@ -503,11 +503,11 @@ class TestConfigure(unittest.TestCase):
- set_define('QUX', 'qux', when='--with-qux')
- '''):
- config = self.get_config()
-- self.assertEquals(config['DEFINES'], {
-+ self.assertEqual(config['DEFINES'], {
- 'FOO': 'foo',
- })
- config = self.get_config(['--with-qux'])
-- self.assertEquals(config['DEFINES'], {
-+ self.assertEqual(config['DEFINES'], {
- 'FOO': 'foo',
- 'QUX': 'qux',
- })
-@@ -518,19 +518,19 @@ class TestConfigure(unittest.TestCase):
- *args, configure='imply_option/simple.configure')
-
- help, config = get_config(['--help'])
-- self.assertEquals(config, {})
-+ self.assertEqual(config, {})
-
- config = get_config([])
-- self.assertEquals(config, {})
-+ self.assertEqual(config, {})
-
- config = get_config(['--enable-foo'])
- self.assertIn('BAR', config)
-- self.assertEquals(config['BAR'], PositiveOptionValue())
-+ self.assertEqual(config['BAR'], PositiveOptionValue())
-
- with self.assertRaises(InvalidOptionError) as e:
- get_config(['--enable-foo', '--disable-bar'])
-
-- self.assertEquals(
-+ self.assertEqual(
- e.exception.message,
- "'--enable-bar' implied by '--enable-foo' conflicts with "
- "'--disable-bar' from the command-line")
-@@ -541,31 +541,31 @@ class TestConfigure(unittest.TestCase):
- *args, configure='imply_option/negative.configure')
-
- help, config = get_config(['--help'])
-- self.assertEquals(config, {})
-+ self.assertEqual(config, {})
-
- config = get_config([])
-- self.assertEquals(config, {})
-+ self.assertEqual(config, {})
-
- config = get_config(['--enable-foo'])
- self.assertIn('BAR', config)
-- self.assertEquals(config['BAR'], NegativeOptionValue())
-+ self.assertEqual(config['BAR'], NegativeOptionValue())
-
- with self.assertRaises(InvalidOptionError) as e:
- get_config(['--enable-foo', '--enable-bar'])
-
-- self.assertEquals(
-+ self.assertEqual(
- e.exception.message,
- "'--disable-bar' implied by '--enable-foo' conflicts with "
- "'--enable-bar' from the command-line")
-
- config = get_config(['--disable-hoge'])
- self.assertIn('BAR', config)
-- self.assertEquals(config['BAR'], NegativeOptionValue())
-+ self.assertEqual(config['BAR'], NegativeOptionValue())
-
- with self.assertRaises(InvalidOptionError) as e:
- get_config(['--disable-hoge', '--enable-bar'])
-
-- self.assertEquals(
-+ self.assertEqual(
- e.exception.message,
- "'--disable-bar' implied by '--disable-hoge' conflicts with "
- "'--enable-bar' from the command-line")
-@@ -576,23 +576,23 @@ class TestConfigure(unittest.TestCase):
- *args, configure='imply_option/values.configure')
-
- help, config = get_config(['--help'])
-- self.assertEquals(config, {})
-+ self.assertEqual(config, {})
-
- config = get_config([])
-- self.assertEquals(config, {})
-+ self.assertEqual(config, {})
-
- config = get_config(['--enable-foo=a'])
- self.assertIn('BAR', config)
-- self.assertEquals(config['BAR'], PositiveOptionValue(('a',)))
-+ self.assertEqual(config['BAR'], PositiveOptionValue(('a',)))
-
- config = get_config(['--enable-foo=a,b'])
- self.assertIn('BAR', config)
-- self.assertEquals(config['BAR'], PositiveOptionValue(('a','b')))
-+ self.assertEqual(config['BAR'], PositiveOptionValue(('a','b')))
-
- with self.assertRaises(InvalidOptionError) as e:
- get_config(['--enable-foo=a,b', '--disable-bar'])
-
-- self.assertEquals(
-+ self.assertEqual(
- e.exception.message,
- "'--enable-bar=a,b' implied by '--enable-foo' conflicts with "
- "'--disable-bar' from the command-line")
-@@ -603,15 +603,15 @@ class TestConfigure(unittest.TestCase):
- *args, configure='imply_option/infer.configure')
-
- help, config = get_config(['--help'])
-- self.assertEquals(config, {})
-+ self.assertEqual(config, {})
-
- config = get_config([])
-- self.assertEquals(config, {})
-+ self.assertEqual(config, {})
-
- with self.assertRaises(InvalidOptionError) as e:
- get_config(['--enable-foo', '--disable-bar'])
-
-- self.assertEquals(
-+ self.assertEqual(
- e.exception.message,
- "'--enable-bar' implied by '--enable-foo' conflicts with "
- "'--disable-bar' from the command-line")
-@@ -619,7 +619,7 @@ class TestConfigure(unittest.TestCase):
- with self.assertRaises(ConfigureError) as e:
- self.get_config([], configure='imply_option/infer_ko.configure')
-
-- self.assertEquals(
-+ self.assertEqual(
- e.exception.message,
- "Cannot infer what implies '--enable-bar'. Please add a `reason` "
- "to the `imply_option` call.")
-@@ -630,25 +630,25 @@ class TestConfigure(unittest.TestCase):
- *args, configure='imply_option/imm.configure')
-
- help, config = get_config(['--help'])
-- self.assertEquals(config, {})
-+ self.assertEqual(config, {})
-
- config = get_config([])
-- self.assertEquals(config, {})
-+ self.assertEqual(config, {})
-
- config_path = mozpath.abspath(
- mozpath.join(test_data_path, 'imply_option', 'imm.configure'))
-
-- with self.assertRaisesRegexp(InvalidOptionError,
-+ with self.assertRaisesRegex(InvalidOptionError,
- "--enable-foo' implied by 'imply_option at %s:7' conflicts with "
- "'--disable-foo' from the command-line" % config_path):
- get_config(['--disable-foo'])
-
-- with self.assertRaisesRegexp(InvalidOptionError,
-+ with self.assertRaisesRegex(InvalidOptionError,
- "--enable-bar=foo,bar' implied by 'imply_option at %s:16' conflicts"
- " with '--enable-bar=a,b,c' from the command-line" % config_path):
- get_config(['--enable-bar=a,b,c'])
-
-- with self.assertRaisesRegexp(InvalidOptionError,
-+ with self.assertRaisesRegex(InvalidOptionError,
- "--enable-baz=BAZ' implied by 'imply_option at %s:25' conflicts"
- " with '--enable-baz=QUUX' from the command-line" % config_path):
- get_config(['--enable-baz=QUUX'])
-@@ -660,7 +660,7 @@ class TestConfigure(unittest.TestCase):
- '''):
- self.get_config()
-
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- "`--with-foo`, emitted from `%s` line 2, is unknown."
- % mozpath.join(test_data_path, 'moz.configure'))
-
-@@ -675,7 +675,7 @@ class TestConfigure(unittest.TestCase):
- '''):
- self.get_config()
-
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- "Unexpected type: 'int'")
-
- def test_imply_option_when(self):
-@@ -686,12 +686,12 @@ class TestConfigure(unittest.TestCase):
- set_config('QUX', depends('--with-qux')(lambda x: x))
- '''):
- config = self.get_config()
-- self.assertEquals(config, {
-+ self.assertEqual(config, {
- 'QUX': NegativeOptionValue(),
- })
-
- config = self.get_config(['--with-foo'])
-- self.assertEquals(config, {
-+ self.assertEqual(config, {
- 'QUX': PositiveOptionValue(),
- })
-
-@@ -700,7 +700,7 @@ class TestConfigure(unittest.TestCase):
- with self.moz_configure('option("--with-foo", help="foo")'):
- self.get_config()
-
-- self.assertEquals(
-+ self.assertEqual(
- e.exception.message,
- 'Option `--with-foo` is not handled ; reference it with a @depends'
- )
-@@ -712,7 +712,7 @@ class TestConfigure(unittest.TestCase):
- '''):
- self.get_config()
-
-- self.assertEquals(
-+ self.assertEqual(
- e.exception.message,
- 'Option `--with-foo` already defined'
- )
-@@ -724,7 +724,7 @@ class TestConfigure(unittest.TestCase):
- '''):
- self.get_config()
-
-- self.assertEquals(
-+ self.assertEqual(
- e.exception.message,
- 'Option `MOZ_FOO` already defined'
- )
-@@ -736,7 +736,7 @@ class TestConfigure(unittest.TestCase):
- '''):
- self.get_config()
-
-- self.assertEquals(
-+ self.assertEqual(
- e.exception.message,
- 'Option `MOZ_FOO` already defined'
- )
-@@ -748,7 +748,7 @@ class TestConfigure(unittest.TestCase):
- '''):
- self.get_config()
-
-- self.assertEquals(
-+ self.assertEqual(
- e.exception.message,
- 'Option `MOZ_FOO` already defined'
- )
-@@ -760,7 +760,7 @@ class TestConfigure(unittest.TestCase):
- '''):
- self.get_config()
-
-- self.assertEquals(
-+ self.assertEqual(
- e.exception.message,
- 'Option `--with-foo` already defined'
- )
-@@ -776,18 +776,18 @@ class TestConfigure(unittest.TestCase):
- set_config('QUX', depends('--with-qux', when='--with-foo')(lambda x: x))
- '''):
- config = self.get_config()
-- self.assertEquals(config, {
-+ self.assertEqual(config, {
- 'FOO': NegativeOptionValue(),
- })
-
- config = self.get_config(['--with-foo'])
-- self.assertEquals(config, {
-+ self.assertEqual(config, {
- 'FOO': PositiveOptionValue(),
- 'QUX': NegativeOptionValue(),
- })
-
- config = self.get_config(['--with-foo', '--with-qux'])
-- self.assertEquals(config, {
-+ self.assertEqual(config, {
- 'FOO': PositiveOptionValue(),
- 'QUX': PositiveOptionValue(),
- })
-@@ -795,7 +795,7 @@ class TestConfigure(unittest.TestCase):
- with self.assertRaises(InvalidOptionError) as e:
- self.get_config(['--with-bar'])
-
-- self.assertEquals(
-+ self.assertEqual(
- e.exception.message,
- '--with-bar is not available in this configuration'
- )
-@@ -803,7 +803,7 @@ class TestConfigure(unittest.TestCase):
- with self.assertRaises(InvalidOptionError) as e:
- self.get_config(['--with-qux'])
-
-- self.assertEquals(
-+ self.assertEqual(
- e.exception.message,
- '--with-qux is not available in this configuration'
- )
-@@ -811,18 +811,18 @@ class TestConfigure(unittest.TestCase):
- with self.assertRaises(InvalidOptionError) as e:
- self.get_config(['QUX=1'])
-
-- self.assertEquals(
-+ self.assertEqual(
- e.exception.message,
- 'QUX is not available in this configuration'
- )
-
- config = self.get_config(env={'QUX': '1'})
-- self.assertEquals(config, {
-+ self.assertEqual(config, {
- 'FOO': NegativeOptionValue(),
- })
-
- help, config = self.get_config(['--help'])
-- self.assertEquals(help, textwrap.dedent('''\
-+ self.assertEqual(help, textwrap.dedent('''\
- Usage: configure [options]
-
- Options: [defaults in brackets after descriptions]
-@@ -833,7 +833,7 @@ class TestConfigure(unittest.TestCase):
- '''))
-
- help, config = self.get_config(['--help', '--with-foo'])
-- self.assertEquals(help, textwrap.dedent('''\
-+ self.assertEqual(help, textwrap.dedent('''\
- Usage: configure [options]
-
- Options: [defaults in brackets after descriptions]
-@@ -851,7 +851,7 @@ class TestConfigure(unittest.TestCase):
- with self.assertRaises(ConfigureError) as e:
- self.get_config()
-
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- '@depends function needs the same `when` as '
- 'options it depends on')
-
-@@ -868,7 +868,7 @@ class TestConfigure(unittest.TestCase):
- with self.assertRaises(ConfigureError) as e:
- self.get_config()
-
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- '@depends function needs the same `when` as '
- 'options it depends on')
-
-@@ -896,7 +896,7 @@ class TestConfigure(unittest.TestCase):
- with self.moz_configure('include("../foo.configure")'):
- self.get_config()
-
-- self.assertEquals(
-+ self.assertEqual(
- e.exception.message,
- 'Cannot include `%s` because it is not in a subdirectory of `%s`'
- % (mozpath.normpath(mozpath.join(test_data_path, '..',
-@@ -911,7 +911,7 @@ class TestConfigure(unittest.TestCase):
- '''):
- self.get_config()
-
-- self.assertEquals(
-+ self.assertEqual(
- e.exception.message,
- 'Cannot include `%s` because it was included already.'
- % mozpath.normpath(mozpath.join(test_data_path,
-@@ -924,7 +924,7 @@ class TestConfigure(unittest.TestCase):
- '''):
- self.get_config()
-
-- self.assertEquals(e.exception.message, "Unexpected type: 'int'")
-+ self.assertEqual(e.exception.message, "Unexpected type: 'int'")
-
- def test_include_when(self):
- with MockedOpen({
-@@ -967,26 +967,26 @@ class TestConfigure(unittest.TestCase):
- '''),
- }):
- config = self.get_config()
-- self.assertEquals(config, {})
-+ self.assertEqual(config, {})
-
- config = self.get_config(['--with-foo'])
-- self.assertEquals(config, {})
-+ self.assertEqual(config, {})
-
- config = self.get_config(['--with-bar'])
-- self.assertEquals(config, {
-+ self.assertEqual(config, {
- 'BAR': 'bar',
- })
-
- with self.assertRaises(InvalidOptionError) as e:
- self.get_config(['--with-qux'])
-
-- self.assertEquals(
-+ self.assertEqual(
- e.exception.message,
- '--with-qux is not available in this configuration'
- )
-
- config = self.get_config(['--with-foo', '--with-foo-really'])
-- self.assertEquals(config, {
-+ self.assertEqual(config, {
- 'FOO': 'foo',
- 'FOO2': True,
- })
-@@ -998,7 +998,7 @@ class TestConfigure(unittest.TestCase):
- '''):
- self.get_config()
-
-- self.assertEquals(e.exception.message, 'Cannot reassign builtins')
-+ self.assertEqual(e.exception.message, 'Cannot reassign builtins')
-
- with self.assertRaises(KeyError) as e:
- with self.moz_configure('''
-@@ -1006,7 +1006,7 @@ class TestConfigure(unittest.TestCase):
- '''):
- self.get_config()
-
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- 'Cannot assign `foo` because it is neither a '
- '@depends nor a @template')
-
-@@ -1019,7 +1019,7 @@ class TestConfigure(unittest.TestCase):
- '''):
- self.get_config()
-
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- "@depends needs at least one argument")
-
- with self.assertRaises(ConfigureError) as e:
-@@ -1030,7 +1030,7 @@ class TestConfigure(unittest.TestCase):
- '''):
- self.get_config()
-
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- "'--with-foo' is not a known option. Maybe it's "
- "declared too late?")
-
-@@ -1042,7 +1042,7 @@ class TestConfigure(unittest.TestCase):
- '''):
- self.get_config()
-
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- "Option must not contain an '='")
-
- with self.assertRaises(TypeError) as e:
-@@ -1053,7 +1053,7 @@ class TestConfigure(unittest.TestCase):
- '''):
- self.get_config()
-
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- "Cannot use object of type 'int' as argument "
- "to @depends")
-
-@@ -1065,7 +1065,7 @@ class TestConfigure(unittest.TestCase):
- '''):
- self.get_config()
-
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- "Cannot decorate generator functions with @depends")
-
- with self.assertRaises(TypeError) as e:
-@@ -1074,7 +1074,7 @@ class TestConfigure(unittest.TestCase):
- '''):
- self.get_config()
-
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- "Unexpected type: 'int'")
-
- with self.assertRaises(ConfigureError) as e:
-@@ -1088,7 +1088,7 @@ class TestConfigure(unittest.TestCase):
- '''):
- self.get_config()
-
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- "The `foo` function may not be called")
-
- with self.assertRaises(TypeError) as e:
-@@ -1099,7 +1099,7 @@ class TestConfigure(unittest.TestCase):
- '''):
- self.get_config()
-
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- "depends_impl() got an unexpected keyword argument 'foo'")
-
- def test_depends_when(self):
-@@ -1124,12 +1124,12 @@ class TestConfigure(unittest.TestCase):
- set_config('QUX', qux)
- '''):
- config = self.get_config()
-- self.assertEquals(config, {
-+ self.assertEqual(config, {
- 'FOO': 'foo',
- })
-
- config = self.get_config(['--with-qux'])
-- self.assertEquals(config, {
-+ self.assertEqual(config, {
- 'FOO': 'foo',
- 'QUX': 'qux',
- })
-@@ -1144,7 +1144,7 @@ class TestConfigure(unittest.TestCase):
- '''):
- self.get_config()
-
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- '@imports must appear after @template')
-
- with self.assertRaises(ConfigureError) as e:
-@@ -1157,7 +1157,7 @@ class TestConfigure(unittest.TestCase):
- '''):
- self.get_config()
-
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- '@imports must appear after @depends')
-
- for import_ in (
-@@ -1174,7 +1174,7 @@ class TestConfigure(unittest.TestCase):
- ''' % import_):
- self.get_config()
-
-- self.assertEquals(e.exception.message, "Unexpected type: 'int'")
-+ self.assertEqual(e.exception.message, "Unexpected type: 'int'")
-
- with self.assertRaises(TypeError) as e:
- with self.moz_configure('''
-@@ -1185,7 +1185,7 @@ class TestConfigure(unittest.TestCase):
- '''):
- self.get_config()
-
-- self.assertEquals(e.exception.message, "Unexpected type: 'int'")
-+ self.assertEqual(e.exception.message, "Unexpected type: 'int'")
-
- with self.assertRaises(ValueError) as e:
- with self.moz_configure('''
-@@ -1195,7 +1195,7 @@ class TestConfigure(unittest.TestCase):
- '''):
- self.get_config()
-
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- "Invalid argument to @imports: 'os*'")
-
- def test_only_when(self):
-@@ -1250,7 +1250,7 @@ class TestConfigure(unittest.TestCase):
- with self.assertRaises(InvalidOptionError) as e:
- self.get_config(['--foo'])
-
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- '--foo is not available in this configuration')
-
- # Cannot depend on an option defined in a only_when block, because we
-@@ -1261,7 +1261,7 @@ class TestConfigure(unittest.TestCase):
- with self.assertRaises(ConfigureError) as e:
- self.get_config()
-
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- '@depends function needs the same `when` as '
- 'options it depends on')
-
-@@ -1278,7 +1278,7 @@ class TestConfigure(unittest.TestCase):
- with self.assertRaises(InvalidOptionError) as e:
- self.get_config()
-
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- '--foo is not available in this configuration')
-
- # And similarly doesn't fail when the condition is true.
-diff --git a/python/mozbuild/mozbuild/test/configure/test_lint.py b/python/mozbuild/mozbuild/test/configure/test_lint.py
-index 6d8d4c49f..6ad897839 100644
---- a/python/mozbuild/mozbuild/test/configure/test_lint.py
-+++ b/python/mozbuild/mozbuild/test/configure/test_lint.py
-@@ -2,9 +2,9 @@
- # License, v. 2.0. If a copy of the MPL was not distributed with this
- # file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
--from __future__ import absolute_import, print_function, unicode_literals
-
--from StringIO import StringIO
-+
-+from io import StringIO
- import os
- import textwrap
- import unittest
-@@ -62,7 +62,7 @@ class TestLint(unittest.TestCase):
- '''):
- self.lint_test()
-
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- "`bar` depends on '--help' and `foo`. "
- "`foo` must depend on '--help'")
-
-@@ -85,7 +85,7 @@ class TestLint(unittest.TestCase):
- '''):
- self.lint_test()
-
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- "`bar` depends on '--help' and `foo`. "
- "`foo` must depend on '--help'")
-
-@@ -111,7 +111,7 @@ class TestLint(unittest.TestCase):
- '''):
- self.lint_test()
-
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- "Missing @depends for `foo`: '--help'")
-
- with self.assertRaises(ConfigureError) as e:
-@@ -130,7 +130,7 @@ class TestLint(unittest.TestCase):
- '''):
- self.lint_test()
-
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- "Missing @depends for `foo`: '--help'")
-
- with self.assertRaises(ConfigureError) as e:
-@@ -145,7 +145,7 @@ class TestLint(unittest.TestCase):
- '''):
- self.lint_test()
-
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- "Missing @depends for `foo`: '--help'")
-
- # This would have failed with "Missing @depends for `foo`: '--help'"
-@@ -178,7 +178,7 @@ class TestLint(unittest.TestCase):
- '''):
- self.lint_test()
-
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- "Missing @depends for `foo`: '--help'")
-
- # There is a default restricted `os` module when there is no explicit
-@@ -205,7 +205,7 @@ class TestLint(unittest.TestCase):
- '''):
- self.lint_test()
-
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- "%s:3: The dependency on `--foo` is unused."
- % mozpath.join(test_data_path, 'moz.configure'))
-
-@@ -222,7 +222,7 @@ class TestLint(unittest.TestCase):
- '''):
- self.lint_test()
-
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- "%s:5: The dependency on `bar` is unused."
- % mozpath.join(test_data_path, 'moz.configure'))
-
-@@ -236,7 +236,7 @@ class TestLint(unittest.TestCase):
- '''):
- self.lint_test()
-
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- "%s:2: The dependency on `<lambda>` is unused."
- % mozpath.join(test_data_path, 'moz.configure'))
-
-@@ -257,7 +257,7 @@ class TestLint(unittest.TestCase):
- '''):
- self.lint_test()
-
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- "%s:9: The dependency on `qux` is unused."
- % mozpath.join(test_data_path, 'moz.configure'))
-
-diff --git a/python/mozbuild/mozbuild/test/configure/test_moz_configure.py b/python/mozbuild/mozbuild/test/configure/test_moz_configure.py
-index b3342e268..7a26e9e09 100644
---- a/python/mozbuild/mozbuild/test/configure/test_moz_configure.py
-+++ b/python/mozbuild/mozbuild/test/configure/test_moz_configure.py
-@@ -2,7 +2,7 @@
- # License, v. 2.0. If a copy of the MPL was not distributed with this
- # file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
--from __future__ import absolute_import, print_function, unicode_literals
-+
-
- from mozunit import main
- from mozpack import path as mozpath
-@@ -27,10 +27,10 @@ class TestMozConfigure(BaseConfigureTest):
- shell = mozpath.abspath('/bin/sh')
- return result.replace('CONFIG_SHELL=%s ' % shell, '')
-
-- self.assertEquals('--enable-application=browser',
-+ self.assertEqual('--enable-application=browser',
- get_value_for(['--enable-application=browser']))
-
-- self.assertEquals('--enable-application=browser '
-+ self.assertEqual('--enable-application=browser '
- 'MOZ_VTUNE=1',
- get_value_for(['--enable-application=browser',
- 'MOZ_VTUNE=1']))
-@@ -39,25 +39,25 @@ class TestMozConfigure(BaseConfigureTest):
- environ={'MOZ_VTUNE': '1'},
- mozconfig='ac_add_options --enable-project=js')
-
-- self.assertEquals('--enable-project=js MOZ_VTUNE=1',
-+ self.assertEqual('--enable-project=js MOZ_VTUNE=1',
- value)
-
- # --disable-js-shell is the default, so it's filtered out.
-- self.assertEquals('--enable-application=browser',
-+ self.assertEqual('--enable-application=browser',
- get_value_for(['--enable-application=browser',
- '--disable-js-shell']))
-
- # Normally, --without-foo would be filtered out because that's the
- # default, but since it is a (fake) old-configure option, it always
- # appears.
-- self.assertEquals('--enable-application=browser --without-foo',
-+ self.assertEqual('--enable-application=browser --without-foo',
- get_value_for(['--enable-application=browser',
- '--without-foo']))
-- self.assertEquals('--enable-application=browser --with-foo',
-+ self.assertEqual('--enable-application=browser --with-foo',
- get_value_for(['--enable-application=browser',
- '--with-foo']))
-
-- self.assertEquals("--enable-application=browser '--with-foo=foo bar'",
-+ self.assertEqual("--enable-application=browser '--with-foo=foo bar'",
- get_value_for(['--enable-application=browser',
- '--with-foo=foo bar']))
-
-@@ -69,7 +69,7 @@ class TestMozConfigure(BaseConfigureTest):
- self.version = version
-
- def __call__(self, stdin, args):
-- this.assertEquals(args, ('-version',))
-+ this.assertEqual(args, ('-version',))
- return 0, self.version, ''
-
- def check_nsis_version(version):
-@@ -84,13 +84,13 @@ class TestMozConfigure(BaseConfigureTest):
- with self.assertRaises(SystemExit) as e:
- check_nsis_version('v3.0a2')
-
-- self.assertEquals(check_nsis_version('v3.0b1'), '3.0b1')
-- self.assertEquals(check_nsis_version('v3.0b2'), '3.0b2')
-- self.assertEquals(check_nsis_version('v3.0rc1'), '3.0rc1')
-- self.assertEquals(check_nsis_version('v3.0'), '3.0')
-- self.assertEquals(check_nsis_version('v3.0-2'), '3.0')
-- self.assertEquals(check_nsis_version('v3.0.1'), '3.0')
-- self.assertEquals(check_nsis_version('v3.1'), '3.1')
-+ self.assertEqual(check_nsis_version('v3.0b1'), '3.0b1')
-+ self.assertEqual(check_nsis_version('v3.0b2'), '3.0b2')
-+ self.assertEqual(check_nsis_version('v3.0rc1'), '3.0rc1')
-+ self.assertEqual(check_nsis_version('v3.0'), '3.0')
-+ self.assertEqual(check_nsis_version('v3.0-2'), '3.0')
-+ self.assertEqual(check_nsis_version('v3.0.1'), '3.0')
-+ self.assertEqual(check_nsis_version('v3.1'), '3.1')
-
-
- if __name__ == '__main__':
-diff --git a/python/mozbuild/mozbuild/test/configure/test_options.py b/python/mozbuild/mozbuild/test/configure/test_options.py
-index 9defccb2c..330ce3b1f 100644
---- a/python/mozbuild/mozbuild/test/configure/test_options.py
-+++ b/python/mozbuild/mozbuild/test/configure/test_options.py
-@@ -2,7 +2,7 @@
- # License, v. 2.0. If a copy of the MPL was not distributed with this
- # file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
--from __future__ import absolute_import, print_function, unicode_literals
-+
-
- import unittest
-
-@@ -28,139 +28,139 @@ class Option(Option):
- class TestOption(unittest.TestCase):
- def test_option(self):
- option = Option('--option')
-- self.assertEquals(option.prefix, '')
-- self.assertEquals(option.name, 'option')
-- self.assertEquals(option.env, None)
-+ self.assertEqual(option.prefix, '')
-+ self.assertEqual(option.name, 'option')
-+ self.assertEqual(option.env, None)
- self.assertFalse(option.default)
-
- option = Option('--enable-option')
-- self.assertEquals(option.prefix, 'enable')
-- self.assertEquals(option.name, 'option')
-- self.assertEquals(option.env, None)
-+ self.assertEqual(option.prefix, 'enable')
-+ self.assertEqual(option.name, 'option')
-+ self.assertEqual(option.env, None)
- self.assertFalse(option.default)
-
- option = Option('--disable-option')
-- self.assertEquals(option.prefix, 'disable')
-- self.assertEquals(option.name, 'option')
-- self.assertEquals(option.env, None)
-+ self.assertEqual(option.prefix, 'disable')
-+ self.assertEqual(option.name, 'option')
-+ self.assertEqual(option.env, None)
- self.assertTrue(option.default)
-
- option = Option('--with-option')
-- self.assertEquals(option.prefix, 'with')
-- self.assertEquals(option.name, 'option')
-- self.assertEquals(option.env, None)
-+ self.assertEqual(option.prefix, 'with')
-+ self.assertEqual(option.name, 'option')
-+ self.assertEqual(option.env, None)
- self.assertFalse(option.default)
-
- option = Option('--without-option')
-- self.assertEquals(option.prefix, 'without')
-- self.assertEquals(option.name, 'option')
-- self.assertEquals(option.env, None)
-+ self.assertEqual(option.prefix, 'without')
-+ self.assertEqual(option.name, 'option')
-+ self.assertEqual(option.env, None)
- self.assertTrue(option.default)
-
- option = Option('--without-option-foo', env='MOZ_OPTION')
-- self.assertEquals(option.env, 'MOZ_OPTION')
-+ self.assertEqual(option.env, 'MOZ_OPTION')
-
- option = Option(env='MOZ_OPTION')
-- self.assertEquals(option.prefix, '')
-- self.assertEquals(option.name, None)
-- self.assertEquals(option.env, 'MOZ_OPTION')
-+ self.assertEqual(option.prefix, '')
-+ self.assertEqual(option.name, None)
-+ self.assertEqual(option.env, 'MOZ_OPTION')
- self.assertFalse(option.default)
-
- with self.assertRaises(InvalidOptionError) as e:
- Option('--option', nargs=0, default=('a',))
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- "The given `default` doesn't satisfy `nargs`")
-
- with self.assertRaises(InvalidOptionError) as e:
- Option('--option', nargs=1, default=())
-- self.assertEquals(
-+ self.assertEqual(
- e.exception.message,
- 'default must be a bool, a string or a tuple of strings')
-
- with self.assertRaises(InvalidOptionError) as e:
- Option('--option', nargs=1, default=True)
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- "The given `default` doesn't satisfy `nargs`")
-
- with self.assertRaises(InvalidOptionError) as e:
- Option('--option', nargs=1, default=('a', 'b'))
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- "The given `default` doesn't satisfy `nargs`")
-
- with self.assertRaises(InvalidOptionError) as e:
- Option('--option', nargs=2, default=())
-- self.assertEquals(
-+ self.assertEqual(
- e.exception.message,
- 'default must be a bool, a string or a tuple of strings')
-
- with self.assertRaises(InvalidOptionError) as e:
- Option('--option', nargs=2, default=True)
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- "The given `default` doesn't satisfy `nargs`")
-
- with self.assertRaises(InvalidOptionError) as e:
- Option('--option', nargs=2, default=('a',))
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- "The given `default` doesn't satisfy `nargs`")
-
- with self.assertRaises(InvalidOptionError) as e:
- Option('--option', nargs='?', default=('a', 'b'))
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- "The given `default` doesn't satisfy `nargs`")
-
- with self.assertRaises(InvalidOptionError) as e:
- Option('--option', nargs='+', default=())
-- self.assertEquals(
-+ self.assertEqual(
- e.exception.message,
- 'default must be a bool, a string or a tuple of strings')
-
- with self.assertRaises(InvalidOptionError) as e:
- Option('--option', nargs='+', default=True)
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- "The given `default` doesn't satisfy `nargs`")
-
- # --disable options with a nargs value that requires at least one
- # argument need to be given a default.
- with self.assertRaises(InvalidOptionError) as e:
- Option('--disable-option', nargs=1)
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- "The given `default` doesn't satisfy `nargs`")
-
- with self.assertRaises(InvalidOptionError) as e:
- Option('--disable-option', nargs='+')
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- "The given `default` doesn't satisfy `nargs`")
-
- # Test nargs inference from default value
- option = Option('--with-foo', default=True)
-- self.assertEquals(option.nargs, 0)
-+ self.assertEqual(option.nargs, 0)
-
- option = Option('--with-foo', default=False)
-- self.assertEquals(option.nargs, 0)
-+ self.assertEqual(option.nargs, 0)
-
- option = Option('--with-foo', default='a')
-- self.assertEquals(option.nargs, '?')
-+ self.assertEqual(option.nargs, '?')
-
- option = Option('--with-foo', default=('a',))
-- self.assertEquals(option.nargs, '?')
-+ self.assertEqual(option.nargs, '?')
-
- option = Option('--with-foo', default=('a', 'b'))
-- self.assertEquals(option.nargs, '*')
-+ self.assertEqual(option.nargs, '*')
-
- option = Option(env='FOO', default=True)
-- self.assertEquals(option.nargs, 0)
-+ self.assertEqual(option.nargs, 0)
-
- option = Option(env='FOO', default=False)
-- self.assertEquals(option.nargs, 0)
-+ self.assertEqual(option.nargs, 0)
-
- option = Option(env='FOO', default='a')
-- self.assertEquals(option.nargs, '?')
-+ self.assertEqual(option.nargs, '?')
-
- option = Option(env='FOO', default=('a',))
-- self.assertEquals(option.nargs, '?')
-+ self.assertEqual(option.nargs, '?')
-
- option = Option(env='FOO', default=('a', 'b'))
-- self.assertEquals(option.nargs, '*')
-+ self.assertEqual(option.nargs, '*')
-
- def test_option_option(self):
- for option in (
-@@ -170,70 +170,70 @@ class TestOption(unittest.TestCase):
- '--with-option',
- '--without-option',
- ):
-- self.assertEquals(Option(option).option, option)
-- self.assertEquals(Option(option, env='FOO').option, option)
-+ self.assertEqual(Option(option).option, option)
-+ self.assertEqual(Option(option, env='FOO').option, option)
-
- opt = Option(option, default=False)
-- self.assertEquals(opt.option,
-+ self.assertEqual(opt.option,
- option.replace('-disable-', '-enable-')
- .replace('-without-', '-with-'))
-
- opt = Option(option, default=True)
-- self.assertEquals(opt.option,
-+ self.assertEqual(opt.option,
- option.replace('-enable-', '-disable-')
- .replace('-with-', '-without-'))
-
-- self.assertEquals(Option(env='FOO').option, 'FOO')
-+ self.assertEqual(Option(env='FOO').option, 'FOO')
-
- def test_option_choices(self):
- with self.assertRaises(InvalidOptionError) as e:
- Option('--option', nargs=3, choices=('a', 'b'))
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- 'Not enough `choices` for `nargs`')
-
- with self.assertRaises(InvalidOptionError) as e:
- Option('--without-option', nargs=1, choices=('a', 'b'))
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- 'A `default` must be given along with `choices`')
-
- with self.assertRaises(InvalidOptionError) as e:
- Option('--without-option', nargs='+', choices=('a', 'b'))
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- 'A `default` must be given along with `choices`')
-
- with self.assertRaises(InvalidOptionError) as e:
- Option('--without-option', default='c', choices=('a', 'b'))
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- "The `default` value must be one of 'a', 'b'")
-
- with self.assertRaises(InvalidOptionError) as e:
- Option('--without-option', default=('a', 'c',), choices=('a', 'b'))
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- "The `default` value must be one of 'a', 'b'")
-
- with self.assertRaises(InvalidOptionError) as e:
- Option('--without-option', default=('c',), choices=('a', 'b'))
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- "The `default` value must be one of 'a', 'b'")
-
- option = Option('--with-option', nargs='+', choices=('a', 'b'))
- with self.assertRaises(InvalidOptionError) as e:
- option.get_value('--with-option=c')
-- self.assertEquals(e.exception.message, "'c' is not one of 'a', 'b'")
-+ self.assertEqual(e.exception.message, "'c' is not one of 'a', 'b'")
-
- value = option.get_value('--with-option=b,a')
- self.assertTrue(value)
-- self.assertEquals(PositiveOptionValue(('b', 'a')), value)
-+ self.assertEqual(PositiveOptionValue(('b', 'a')), value)
-
- option = Option('--without-option', nargs='*', default='a',
- choices=('a', 'b'))
- with self.assertRaises(InvalidOptionError) as e:
- option.get_value('--with-option=c')
-- self.assertEquals(e.exception.message, "'c' is not one of 'a', 'b'")
-+ self.assertEqual(e.exception.message, "'c' is not one of 'a', 'b'")
-
- value = option.get_value('--with-option=b,a')
- self.assertTrue(value)
-- self.assertEquals(PositiveOptionValue(('b', 'a')), value)
-+ self.assertEqual(PositiveOptionValue(('b', 'a')), value)
-
- # Test nargs inference from choices
- option = Option('--with-option', choices=('a', 'b'))
-@@ -244,37 +244,37 @@ class TestOption(unittest.TestCase):
- choices=('a', 'b', 'c', 'd'))
-
- value = option.get_value('--with-option=+d')
-- self.assertEquals(PositiveOptionValue(('b', 'c', 'd')), value)
-+ self.assertEqual(PositiveOptionValue(('b', 'c', 'd')), value)
-
- value = option.get_value('--with-option=-b')
-- self.assertEquals(PositiveOptionValue(('c',)), value)
-+ self.assertEqual(PositiveOptionValue(('c',)), value)
-
- value = option.get_value('--with-option=-b,+d')
-- self.assertEquals(PositiveOptionValue(('c','d')), value)
-+ self.assertEqual(PositiveOptionValue(('c','d')), value)
-
- # Adding something that is in the default is fine
- value = option.get_value('--with-option=+b')
-- self.assertEquals(PositiveOptionValue(('b', 'c')), value)
-+ self.assertEqual(PositiveOptionValue(('b', 'c')), value)
-
- # Removing something that is not in the default is fine, as long as it
- # is one of the choices
- value = option.get_value('--with-option=-a')
-- self.assertEquals(PositiveOptionValue(('b', 'c')), value)
-+ self.assertEqual(PositiveOptionValue(('b', 'c')), value)
-
- with self.assertRaises(InvalidOptionError) as e:
- option.get_value('--with-option=-e')
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- "'e' is not one of 'a', 'b', 'c', 'd'")
-
- # Other "not a choice" errors.
- with self.assertRaises(InvalidOptionError) as e:
- option.get_value('--with-option=+e')
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- "'e' is not one of 'a', 'b', 'c', 'd'")
-
- with self.assertRaises(InvalidOptionError) as e:
- option.get_value('--with-option=e')
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- "'e' is not one of 'a', 'b', 'c', 'd'")
-
- def test_option_value_compare(self):
-@@ -294,7 +294,7 @@ class TestOption(unittest.TestCase):
-
- # For usability reasons, we raise TypeError when attempting to compare
- # against a non-tuple.
-- with self.assertRaisesRegexp(TypeError, 'cannot compare a'):
-+ with self.assertRaisesRegex(TypeError, 'cannot compare a'):
- val == 'foo'
-
- # But we allow empty option values to compare otherwise we can't
-@@ -313,36 +313,36 @@ class TestOption(unittest.TestCase):
-
- def test_option_value_format(self):
- val = PositiveOptionValue()
-- self.assertEquals('--with-value', val.format('--with-value'))
-- self.assertEquals('--with-value', val.format('--without-value'))
-- self.assertEquals('--enable-value', val.format('--enable-value'))
-- self.assertEquals('--enable-value', val.format('--disable-value'))
-- self.assertEquals('--value', val.format('--value'))
-- self.assertEquals('VALUE=1', val.format('VALUE'))
-+ self.assertEqual('--with-value', val.format('--with-value'))
-+ self.assertEqual('--with-value', val.format('--without-value'))
-+ self.assertEqual('--enable-value', val.format('--enable-value'))
-+ self.assertEqual('--enable-value', val.format('--disable-value'))
-+ self.assertEqual('--value', val.format('--value'))
-+ self.assertEqual('VALUE=1', val.format('VALUE'))
-
- val = PositiveOptionValue(('a',))
-- self.assertEquals('--with-value=a', val.format('--with-value'))
-- self.assertEquals('--with-value=a', val.format('--without-value'))
-- self.assertEquals('--enable-value=a', val.format('--enable-value'))
-- self.assertEquals('--enable-value=a', val.format('--disable-value'))
-- self.assertEquals('--value=a', val.format('--value'))
-- self.assertEquals('VALUE=a', val.format('VALUE'))
-+ self.assertEqual('--with-value=a', val.format('--with-value'))
-+ self.assertEqual('--with-value=a', val.format('--without-value'))
-+ self.assertEqual('--enable-value=a', val.format('--enable-value'))
-+ self.assertEqual('--enable-value=a', val.format('--disable-value'))
-+ self.assertEqual('--value=a', val.format('--value'))
-+ self.assertEqual('VALUE=a', val.format('VALUE'))
-
- val = PositiveOptionValue(('a', 'b'))
-- self.assertEquals('--with-value=a,b', val.format('--with-value'))
-- self.assertEquals('--with-value=a,b', val.format('--without-value'))
-- self.assertEquals('--enable-value=a,b', val.format('--enable-value'))
-- self.assertEquals('--enable-value=a,b', val.format('--disable-value'))
-- self.assertEquals('--value=a,b', val.format('--value'))
-- self.assertEquals('VALUE=a,b', val.format('VALUE'))
-+ self.assertEqual('--with-value=a,b', val.format('--with-value'))
-+ self.assertEqual('--with-value=a,b', val.format('--without-value'))
-+ self.assertEqual('--enable-value=a,b', val.format('--enable-value'))
-+ self.assertEqual('--enable-value=a,b', val.format('--disable-value'))
-+ self.assertEqual('--value=a,b', val.format('--value'))
-+ self.assertEqual('VALUE=a,b', val.format('VALUE'))
-
- val = NegativeOptionValue()
-- self.assertEquals('--without-value', val.format('--with-value'))
-- self.assertEquals('--without-value', val.format('--without-value'))
-- self.assertEquals('--disable-value', val.format('--enable-value'))
-- self.assertEquals('--disable-value', val.format('--disable-value'))
-- self.assertEquals('', val.format('--value'))
-- self.assertEquals('VALUE=', val.format('VALUE'))
-+ self.assertEqual('--without-value', val.format('--with-value'))
-+ self.assertEqual('--without-value', val.format('--without-value'))
-+ self.assertEqual('--disable-value', val.format('--enable-value'))
-+ self.assertEqual('--disable-value', val.format('--disable-value'))
-+ self.assertEqual('', val.format('--value'))
-+ self.assertEqual('VALUE=', val.format('VALUE'))
-
- def test_option_value(self, name='option', nargs=0, default=None):
- disabled = name.startswith(('disable-', 'without-'))
-@@ -359,28 +359,28 @@ class TestOption(unittest.TestCase):
-
- if nargs in (0, '?', '*') or disabled:
- value = option.get_value('--%s' % name, 'option')
-- self.assertEquals(value, posOptionValue())
-- self.assertEquals(value.origin, 'option')
-+ self.assertEqual(value, posOptionValue())
-+ self.assertEqual(value.origin, 'option')
- else:
- with self.assertRaises(InvalidOptionError) as e:
- option.get_value('--%s' % name)
- if nargs == 1:
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- '--%s takes 1 value' % name)
- elif nargs == '+':
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- '--%s takes 1 or more values' % name)
- else:
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- '--%s takes 2 values' % name)
-
- value = option.get_value('')
-- self.assertEquals(value, defaultValue)
-- self.assertEquals(value.origin, 'default')
-+ self.assertEqual(value, defaultValue)
-+ self.assertEqual(value.origin, 'default')
-
- value = option.get_value(None)
-- self.assertEquals(value, defaultValue)
-- self.assertEquals(value.origin, 'default')
-+ self.assertEqual(value, defaultValue)
-+ self.assertEqual(value.origin, 'default')
-
- with self.assertRaises(AssertionError):
- value = option.get_value('MOZ_OPTION=', 'environment')
-@@ -393,47 +393,47 @@ class TestOption(unittest.TestCase):
-
- if nargs in (1, '?', '*', '+') and not disabled:
- value = option.get_value('--%s=' % name, 'option')
-- self.assertEquals(value, PositiveOptionValue(('',)))
-- self.assertEquals(value.origin, 'option')
-+ self.assertEqual(value, PositiveOptionValue(('',)))
-+ self.assertEqual(value.origin, 'option')
- else:
- with self.assertRaises(InvalidOptionError) as e:
- option.get_value('--%s=' % name)
- if disabled:
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- 'Cannot pass a value to --%s' % name)
- else:
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- '--%s takes %d values' % (name, nargs))
-
- if nargs in (1, '?', '*', '+') and not disabled:
- value = option.get_value('--%s=foo' % name, 'option')
-- self.assertEquals(value, PositiveOptionValue(('foo',)))
-- self.assertEquals(value.origin, 'option')
-+ self.assertEqual(value, PositiveOptionValue(('foo',)))
-+ self.assertEqual(value.origin, 'option')
- else:
- with self.assertRaises(InvalidOptionError) as e:
- option.get_value('--%s=foo' % name)
- if disabled:
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- 'Cannot pass a value to --%s' % name)
- else:
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- '--%s takes %d values' % (name, nargs))
-
- if nargs in (2, '*', '+') and not disabled:
- value = option.get_value('--%s=foo,bar' % name, 'option')
-- self.assertEquals(value, PositiveOptionValue(('foo', 'bar')))
-- self.assertEquals(value.origin, 'option')
-+ self.assertEqual(value, PositiveOptionValue(('foo', 'bar')))
-+ self.assertEqual(value.origin, 'option')
- else:
- with self.assertRaises(InvalidOptionError) as e:
- option.get_value('--%s=foo,bar' % name, 'option')
- if disabled:
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- 'Cannot pass a value to --%s' % name)
- elif nargs == '?':
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- '--%s takes 0 or 1 values' % name)
- else:
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- '--%s takes %d value%s'
- % (name, nargs, 's' if nargs != 1 else ''))
-
-@@ -441,59 +441,59 @@ class TestOption(unittest.TestCase):
- default=default)
- if nargs in (0, '?', '*') or disabled:
- value = option.get_value('--%s' % name, 'option')
-- self.assertEquals(value, posOptionValue())
-- self.assertEquals(value.origin, 'option')
-+ self.assertEqual(value, posOptionValue())
-+ self.assertEqual(value.origin, 'option')
- else:
- with self.assertRaises(InvalidOptionError) as e:
- option.get_value('--%s' % name)
- if disabled:
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- 'Cannot pass a value to --%s' % name)
- elif nargs == '+':
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- '--%s takes 1 or more values' % name)
- else:
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- '--%s takes %d value%s'
- % (name, nargs, 's' if nargs != 1 else ''))
-
- value = option.get_value('')
-- self.assertEquals(value, defaultValue)
-- self.assertEquals(value.origin, 'default')
-+ self.assertEqual(value, defaultValue)
-+ self.assertEqual(value.origin, 'default')
-
- value = option.get_value(None)
-- self.assertEquals(value, defaultValue)
-- self.assertEquals(value.origin, 'default')
-+ self.assertEqual(value, defaultValue)
-+ self.assertEqual(value.origin, 'default')
-
- value = option.get_value('MOZ_OPTION=', 'environment')
-- self.assertEquals(value, NegativeOptionValue())
-- self.assertEquals(value.origin, 'environment')
-+ self.assertEqual(value, NegativeOptionValue())
-+ self.assertEqual(value.origin, 'environment')
-
- if nargs in (0, '?', '*'):
- value = option.get_value('MOZ_OPTION=1', 'environment')
-- self.assertEquals(value, PositiveOptionValue())
-- self.assertEquals(value.origin, 'environment')
-+ self.assertEqual(value, PositiveOptionValue())
-+ self.assertEqual(value.origin, 'environment')
- elif nargs in (1, '+'):
- value = option.get_value('MOZ_OPTION=1', 'environment')
-- self.assertEquals(value, PositiveOptionValue(('1',)))
-- self.assertEquals(value.origin, 'environment')
-+ self.assertEqual(value, PositiveOptionValue(('1',)))
-+ self.assertEqual(value.origin, 'environment')
- else:
- with self.assertRaises(InvalidOptionError) as e:
- option.get_value('MOZ_OPTION=1', 'environment')
-- self.assertEquals(e.exception.message, 'MOZ_OPTION takes 2 values')
-+ self.assertEqual(e.exception.message, 'MOZ_OPTION takes 2 values')
-
- if nargs in (1, '?', '*', '+') and not disabled:
- value = option.get_value('--%s=' % name, 'option')
-- self.assertEquals(value, PositiveOptionValue(('',)))
-- self.assertEquals(value.origin, 'option')
-+ self.assertEqual(value, PositiveOptionValue(('',)))
-+ self.assertEqual(value.origin, 'option')
- else:
- with self.assertRaises(InvalidOptionError) as e:
- option.get_value('--%s=' % name, 'option')
- if disabled:
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- 'Cannot pass a value to --%s' % name)
- else:
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- '--%s takes %d values' % (name, nargs))
-
- with self.assertRaises(AssertionError):
-@@ -501,26 +501,26 @@ class TestOption(unittest.TestCase):
-
- if nargs in (1, '?', '*', '+'):
- value = option.get_value('MOZ_OPTION=foo', 'environment')
-- self.assertEquals(value, PositiveOptionValue(('foo',)))
-- self.assertEquals(value.origin, 'environment')
-+ self.assertEqual(value, PositiveOptionValue(('foo',)))
-+ self.assertEqual(value.origin, 'environment')
- else:
- with self.assertRaises(InvalidOptionError) as e:
- option.get_value('MOZ_OPTION=foo', 'environment')
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- 'MOZ_OPTION takes %d values' % nargs)
-
- if nargs in (2, '*', '+'):
- value = option.get_value('MOZ_OPTION=foo,bar', 'environment')
-- self.assertEquals(value, PositiveOptionValue(('foo', 'bar')))
-- self.assertEquals(value.origin, 'environment')
-+ self.assertEqual(value, PositiveOptionValue(('foo', 'bar')))
-+ self.assertEqual(value.origin, 'environment')
- else:
- with self.assertRaises(InvalidOptionError) as e:
- option.get_value('MOZ_OPTION=foo,bar', 'environment')
- if nargs == '?':
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- 'MOZ_OPTION takes 0 or 1 values')
- else:
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- 'MOZ_OPTION takes %d value%s'
- % (nargs, 's' if nargs != 1 else ''))
-
-@@ -532,26 +532,26 @@ class TestOption(unittest.TestCase):
- env_option.get_value('--%s' % name)
-
- value = env_option.get_value('')
-- self.assertEquals(value, defaultValue)
-- self.assertEquals(value.origin, 'default')
-+ self.assertEqual(value, defaultValue)
-+ self.assertEqual(value.origin, 'default')
-
- value = env_option.get_value('MOZ_OPTION=', 'environment')
-- self.assertEquals(value, negOptionValue())
-- self.assertEquals(value.origin, 'environment')
-+ self.assertEqual(value, negOptionValue())
-+ self.assertEqual(value.origin, 'environment')
-
- if nargs in (0, '?', '*'):
- value = env_option.get_value('MOZ_OPTION=1', 'environment')
-- self.assertEquals(value, posOptionValue())
-+ self.assertEqual(value, posOptionValue())
- self.assertTrue(value)
-- self.assertEquals(value.origin, 'environment')
-+ self.assertEqual(value.origin, 'environment')
- elif nargs in (1, '+'):
- value = env_option.get_value('MOZ_OPTION=1', 'environment')
-- self.assertEquals(value, PositiveOptionValue(('1',)))
-- self.assertEquals(value.origin, 'environment')
-+ self.assertEqual(value, PositiveOptionValue(('1',)))
-+ self.assertEqual(value.origin, 'environment')
- else:
- with self.assertRaises(InvalidOptionError) as e:
- env_option.get_value('MOZ_OPTION=1', 'environment')
-- self.assertEquals(e.exception.message, 'MOZ_OPTION takes 2 values')
-+ self.assertEqual(e.exception.message, 'MOZ_OPTION takes 2 values')
-
- with self.assertRaises(AssertionError) as e:
- env_option.get_value('--%s' % name)
-@@ -561,26 +561,26 @@ class TestOption(unittest.TestCase):
-
- if nargs in (1, '?', '*', '+'):
- value = env_option.get_value('MOZ_OPTION=foo', 'environment')
-- self.assertEquals(value, PositiveOptionValue(('foo',)))
-- self.assertEquals(value.origin, 'environment')
-+ self.assertEqual(value, PositiveOptionValue(('foo',)))
-+ self.assertEqual(value.origin, 'environment')
- else:
- with self.assertRaises(InvalidOptionError) as e:
- env_option.get_value('MOZ_OPTION=foo', 'environment')
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- 'MOZ_OPTION takes %d values' % nargs)
-
- if nargs in (2, '*', '+'):
- value = env_option.get_value('MOZ_OPTION=foo,bar', 'environment')
-- self.assertEquals(value, PositiveOptionValue(('foo', 'bar')))
-- self.assertEquals(value.origin, 'environment')
-+ self.assertEqual(value, PositiveOptionValue(('foo', 'bar')))
-+ self.assertEqual(value.origin, 'environment')
- else:
- with self.assertRaises(InvalidOptionError) as e:
- env_option.get_value('MOZ_OPTION=foo,bar', 'environment')
- if nargs == '?':
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- 'MOZ_OPTION takes 0 or 1 values')
- else:
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- 'MOZ_OPTION takes %d value%s'
- % (nargs, 's' if nargs != 1 else ''))
-
-@@ -592,28 +592,28 @@ class TestOption(unittest.TestCase):
- default=default)
-
- value = option.get_value('--%s-option' % disable, 'option')
-- self.assertEquals(value, NegativeOptionValue())
-- self.assertEquals(value.origin, 'option')
-+ self.assertEqual(value, NegativeOptionValue())
-+ self.assertEqual(value.origin, 'option')
-
- option = self.test_option_value('%s-option' % disable, nargs=nargs,
- default=default)
-
- if nargs in (0, '?', '*'):
- value = option.get_value('--%s-option' % enable, 'option')
-- self.assertEquals(value, PositiveOptionValue())
-- self.assertEquals(value.origin, 'option')
-+ self.assertEqual(value, PositiveOptionValue())
-+ self.assertEqual(value.origin, 'option')
- else:
- with self.assertRaises(InvalidOptionError) as e:
- option.get_value('--%s-option' % enable, 'option')
- if nargs == 1:
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- '--%s-option takes 1 value' % enable)
- elif nargs == '+':
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- '--%s-option takes 1 or more values'
- % enable)
- else:
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- '--%s-option takes 2 values' % enable)
-
- def test_option_value_with(self):
-@@ -622,12 +622,12 @@ class TestOption(unittest.TestCase):
- def test_option_value_invalid_nargs(self):
- with self.assertRaises(InvalidOptionError) as e:
- Option('--option', nargs='foo')
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- "nargs must be a positive integer, '?', '*' or '+'")
-
- with self.assertRaises(InvalidOptionError) as e:
- Option('--option', nargs=-2)
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- "nargs must be a positive integer, '?', '*' or '+'")
-
- def test_option_value_nargs_1(self):
-@@ -638,7 +638,7 @@ class TestOption(unittest.TestCase):
- # A default is required
- with self.assertRaises(InvalidOptionError) as e:
- Option('--disable-option', nargs=1)
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- "The given `default` doesn't satisfy `nargs`")
-
- def test_option_value_nargs_2(self):
-@@ -649,7 +649,7 @@ class TestOption(unittest.TestCase):
- # A default is required
- with self.assertRaises(InvalidOptionError) as e:
- Option('--disable-option', nargs=2)
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- "The given `default` doesn't satisfy `nargs`")
-
- def test_option_value_nargs_0_or_1(self):
-@@ -676,7 +676,7 @@ class TestOption(unittest.TestCase):
- # A default is required
- with self.assertRaises(InvalidOptionError) as e:
- Option('--disable-option', nargs='+')
-- self.assertEquals(e.exception.message,
-+ self.assertEqual(e.exception.message,
- "The given `default` doesn't satisfy `nargs`")
-
-
-@@ -684,21 +684,21 @@ class TestCommandLineHelper(unittest.TestCase):
- def test_basic(self):
- helper = CommandLineHelper({}, ['cmd', '--foo', '--bar'])
-
-- self.assertEquals(['--foo', '--bar'], list(helper))
-+ self.assertEqual(['--foo', '--bar'], list(helper))
-
- helper.add('--enable-qux')
-
-- self.assertEquals(['--foo', '--bar', '--enable-qux'], list(helper))
-+ self.assertEqual(['--foo', '--bar', '--enable-qux'], list(helper))
-
- value, option = helper.handle(Option('--bar'))
-- self.assertEquals(['--foo', '--enable-qux'], list(helper))
-- self.assertEquals(PositiveOptionValue(), value)
-- self.assertEquals('--bar', option)
-+ self.assertEqual(['--foo', '--enable-qux'], list(helper))
-+ self.assertEqual(PositiveOptionValue(), value)
-+ self.assertEqual('--bar', option)
-
- value, option = helper.handle(Option('--baz'))
-- self.assertEquals(['--foo', '--enable-qux'], list(helper))
-- self.assertEquals(NegativeOptionValue(), value)
-- self.assertEquals(None, option)
-+ self.assertEqual(['--foo', '--enable-qux'], list(helper))
-+ self.assertEqual(NegativeOptionValue(), value)
-+ self.assertEqual(None, option)
-
- with self.assertRaises(AssertionError):
- CommandLineHelper({}, ['--foo', '--bar'])
-@@ -707,89 +707,89 @@ class TestCommandLineHelper(unittest.TestCase):
- foo = Option('--with-foo', nargs='*')
- helper = CommandLineHelper({}, ['cmd', '--with-foo=a,b'])
- value, option = helper.handle(foo)
-- self.assertEquals(PositiveOptionValue(('a', 'b')), value)
-- self.assertEquals('command-line', value.origin)
-- self.assertEquals('--with-foo=a,b', option)
-+ self.assertEqual(PositiveOptionValue(('a', 'b')), value)
-+ self.assertEqual('command-line', value.origin)
-+ self.assertEqual('--with-foo=a,b', option)
-
- helper = CommandLineHelper({}, ['cmd', '--with-foo=a,b',
- '--without-foo'])
- value, option = helper.handle(foo)
-- self.assertEquals(NegativeOptionValue(), value)
-- self.assertEquals('command-line', value.origin)
-- self.assertEquals('--without-foo', option)
-+ self.assertEqual(NegativeOptionValue(), value)
-+ self.assertEqual('command-line', value.origin)
-+ self.assertEqual('--without-foo', option)
-
- helper = CommandLineHelper({}, ['cmd', '--without-foo',
- '--with-foo=a,b'])
- value, option = helper.handle(foo)
-- self.assertEquals(PositiveOptionValue(('a', 'b')), value)
-- self.assertEquals('command-line', value.origin)
-- self.assertEquals('--with-foo=a,b', option)
-+ self.assertEqual(PositiveOptionValue(('a', 'b')), value)
-+ self.assertEqual('command-line', value.origin)
-+ self.assertEqual('--with-foo=a,b', option)
-
- foo = Option('--with-foo', env='FOO', nargs='*')
- helper = CommandLineHelper({'FOO': ''}, ['cmd', '--with-foo=a,b'])
- value, option = helper.handle(foo)
-- self.assertEquals(PositiveOptionValue(('a', 'b')), value)
-- self.assertEquals('command-line', value.origin)
-- self.assertEquals('--with-foo=a,b', option)
-+ self.assertEqual(PositiveOptionValue(('a', 'b')), value)
-+ self.assertEqual('command-line', value.origin)
-+ self.assertEqual('--with-foo=a,b', option)
-
- helper = CommandLineHelper({'FOO': 'a,b'}, ['cmd', '--without-foo'])
- value, option = helper.handle(foo)
-- self.assertEquals(NegativeOptionValue(), value)
-- self.assertEquals('command-line', value.origin)
-- self.assertEquals('--without-foo', option)
-+ self.assertEqual(NegativeOptionValue(), value)
-+ self.assertEqual('command-line', value.origin)
-+ self.assertEqual('--without-foo', option)
-
- helper = CommandLineHelper({'FOO': ''}, ['cmd', '--with-bar=a,b'])
- value, option = helper.handle(foo)
-- self.assertEquals(NegativeOptionValue(), value)
-- self.assertEquals('environment', value.origin)
-- self.assertEquals('FOO=', option)
-+ self.assertEqual(NegativeOptionValue(), value)
-+ self.assertEqual('environment', value.origin)
-+ self.assertEqual('FOO=', option)
-
- helper = CommandLineHelper({'FOO': 'a,b'}, ['cmd', '--without-bar'])
- value, option = helper.handle(foo)
-- self.assertEquals(PositiveOptionValue(('a', 'b')), value)
-- self.assertEquals('environment', value.origin)
-- self.assertEquals('FOO=a,b', option)
-+ self.assertEqual(PositiveOptionValue(('a', 'b')), value)
-+ self.assertEqual('environment', value.origin)
-+ self.assertEqual('FOO=a,b', option)
-
- helper = CommandLineHelper({}, ['cmd', '--with-foo=a,b', 'FOO='])
- value, option = helper.handle(foo)
-- self.assertEquals(NegativeOptionValue(), value)
-- self.assertEquals('command-line', value.origin)
-- self.assertEquals('FOO=', option)
-+ self.assertEqual(NegativeOptionValue(), value)
-+ self.assertEqual('command-line', value.origin)
-+ self.assertEqual('FOO=', option)
-
- helper = CommandLineHelper({}, ['cmd', '--without-foo', 'FOO=a,b'])
- value, option = helper.handle(foo)
-- self.assertEquals(PositiveOptionValue(('a', 'b')), value)
-- self.assertEquals('command-line', value.origin)
-- self.assertEquals('FOO=a,b', option)
-+ self.assertEqual(PositiveOptionValue(('a', 'b')), value)
-+ self.assertEqual('command-line', value.origin)
-+ self.assertEqual('FOO=a,b', option)
-
- helper = CommandLineHelper({}, ['cmd', 'FOO=', '--with-foo=a,b'])
- value, option = helper.handle(foo)
-- self.assertEquals(PositiveOptionValue(('a', 'b')), value)
-- self.assertEquals('command-line', value.origin)
-- self.assertEquals('--with-foo=a,b', option)
-+ self.assertEqual(PositiveOptionValue(('a', 'b')), value)
-+ self.assertEqual('command-line', value.origin)
-+ self.assertEqual('--with-foo=a,b', option)
-
- helper = CommandLineHelper({}, ['cmd', 'FOO=a,b', '--without-foo'])
- value, option = helper.handle(foo)
-- self.assertEquals(NegativeOptionValue(), value)
-- self.assertEquals('command-line', value.origin)
-- self.assertEquals('--without-foo', option)
-+ self.assertEqual(NegativeOptionValue(), value)
-+ self.assertEqual('command-line', value.origin)
-+ self.assertEqual('--without-foo', option)
-
- def test_extra_args(self):
- foo = Option('--with-foo', env='FOO', nargs='*')
- helper = CommandLineHelper({}, ['cmd'])
- helper.add('FOO=a,b,c', 'other-origin')
- value, option = helper.handle(foo)
-- self.assertEquals(PositiveOptionValue(('a', 'b', 'c')), value)
-- self.assertEquals('other-origin', value.origin)
-- self.assertEquals('FOO=a,b,c', option)
-+ self.assertEqual(PositiveOptionValue(('a', 'b', 'c')), value)
-+ self.assertEqual('other-origin', value.origin)
-+ self.assertEqual('FOO=a,b,c', option)
-
- helper = CommandLineHelper({}, ['cmd'])
- helper.add('FOO=a,b,c', 'other-origin')
- helper.add('--with-foo=a,b,c', 'other-origin')
- value, option = helper.handle(foo)
-- self.assertEquals(PositiveOptionValue(('a', 'b', 'c')), value)
-- self.assertEquals('other-origin', value.origin)
-- self.assertEquals('--with-foo=a,b,c', option)
-+ self.assertEqual(PositiveOptionValue(('a', 'b', 'c')), value)
-+ self.assertEqual('other-origin', value.origin)
-+ self.assertEqual('--with-foo=a,b,c', option)
-
- # Adding conflicting options is not allowed.
- helper = CommandLineHelper({}, ['cmd'])
-@@ -809,9 +809,9 @@ class TestCommandLineHelper(unittest.TestCase):
- # But adding the same is allowed.
- helper.add('FOO=a,b,c', 'other-origin')
- value, option = helper.handle(foo)
-- self.assertEquals(PositiveOptionValue(('a', 'b', 'c')), value)
-- self.assertEquals('other-origin', value.origin)
-- self.assertEquals('FOO=a,b,c', option)
-+ self.assertEqual(PositiveOptionValue(('a', 'b', 'c')), value)
-+ self.assertEqual('other-origin', value.origin)
-+ self.assertEqual('FOO=a,b,c', option)
-
- # The same rule as above applies when using the option form vs. the
- # variable form. But we can't detect it when .add is called.
-@@ -837,9 +837,9 @@ class TestCommandLineHelper(unittest.TestCase):
- helper.add('FOO=a,b,c', 'other-origin')
- helper.add('--with-foo=a,b,c', 'other-origin')
- value, option = helper.handle(foo)
-- self.assertEquals(PositiveOptionValue(('a', 'b', 'c')), value)
-- self.assertEquals('other-origin', value.origin)
-- self.assertEquals('--with-foo=a,b,c', option)
-+ self.assertEqual(PositiveOptionValue(('a', 'b', 'c')), value)
-+ self.assertEqual('other-origin', value.origin)
-+ self.assertEqual('--with-foo=a,b,c', option)
-
- # Conflicts are also not allowed against what is in the
- # environment/on the command line.
-@@ -869,19 +869,19 @@ class TestCommandLineHelper(unittest.TestCase):
- foo = Option('--foo',
- possible_origins=('command-line',))
- value, option = helper.handle(foo)
-- self.assertEquals(PositiveOptionValue(), value)
-- self.assertEquals('command-line', value.origin)
-- self.assertEquals('--foo', option)
-+ self.assertEqual(PositiveOptionValue(), value)
-+ self.assertEqual('command-line', value.origin)
-+ self.assertEqual('--foo', option)
-
- bar = Option('--bar',
- possible_origins=('mozconfig',))
-- with self.assertRaisesRegexp(InvalidOptionError,
-+ with self.assertRaisesRegex(InvalidOptionError,
- "--bar can not be set by command-line. Values are accepted from: mozconfig"):
- helper.handle(bar)
-
- baz = Option(env='BAZ',
- possible_origins=('implied',))
-- with self.assertRaisesRegexp(InvalidOptionError,
-+ with self.assertRaisesRegex(InvalidOptionError,
- "BAZ=1 can not be set by environment. Values are accepted from: implied"):
- helper.handle(baz)
-
-diff --git a/python/mozbuild/mozbuild/test/configure/test_toolchain_configure.py b/python/mozbuild/mozbuild/test/configure/test_toolchain_configure.py
-index cb7ff709e..c339a32bf 100755
---- a/python/mozbuild/mozbuild/test/configure/test_toolchain_configure.py
-+++ b/python/mozbuild/mozbuild/test/configure/test_toolchain_configure.py
-@@ -2,12 +2,12 @@
- # License, v. 2.0. If a copy of the MPL was not distributed with this
- # file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
--from __future__ import absolute_import, print_function, unicode_literals
-+
-
- import logging
- import os
-
--from StringIO import StringIO
-+from io import StringIO
-
- from mozunit import main
-
-@@ -366,9 +366,9 @@ class BaseToolchainTest(BaseConfigureTest):
- compiler = sandbox._value_for(sandbox[var])
- # Add var on both ends to make it clear which of the
- # variables is failing the test when that happens.
-- self.assertEquals((var, compiler), (var, result))
-+ self.assertEqual((var, compiler), (var, result))
- except SystemExit:
-- self.assertEquals((var, result),
-+ self.assertEqual((var, result),
- (var, self.out.getvalue().strip()))
- return
-
-@@ -407,7 +407,7 @@ class BaseToolchainTest(BaseConfigureTest):
- 'RUST_LIB_SUFFIX',
- 'OBJ_SUFFIX',
- ):
-- self.assertEquals('%s=%s' % (k, sandbox.get_config(k)),
-+ self.assertEqual('%s=%s' % (k, sandbox.get_config(k)),
- '%s=%s' % (k, library_name_info[k]))
-
-
-@@ -584,7 +584,7 @@ class LinuxToolchainTest(BaseToolchainTest):
- # We'll try gcc and clang, but since there is no gcc (gcc-x.y doesn't
- # count), find clang.
- paths = {
-- k: v for k, v in self.PATHS.iteritems()
-+ k: v for k, v in self.PATHS.items()
- if os.path.basename(k) not in ('gcc', 'g++')
- }
- self.do_toolchain_test(paths, {
-@@ -619,7 +619,7 @@ class LinuxToolchainTest(BaseToolchainTest):
- # Even if there are gcc-x.y or clang-x.y compilers available, we
- # don't try them. This could be considered something to improve.
- paths = {
-- k: v for k, v in self.PATHS.iteritems()
-+ k: v for k, v in self.PATHS.items()
- if os.path.basename(k) not in ('gcc', 'g++', 'clang', 'clang++')
- }
- self.do_toolchain_test(paths, {
-@@ -800,7 +800,7 @@ class OSXToolchainTest(BaseToolchainTest):
- def test_not_gcc(self):
- # We won't pick GCC if it's the only thing available.
- paths = {
-- k: v for k, v in self.PATHS.iteritems()
-+ k: v for k, v in self.PATHS.items()
- if os.path.basename(k) not in ('clang', 'clang++')
- }
- self.do_toolchain_test(paths, {
-@@ -976,7 +976,7 @@ class WindowsToolchainTest(BaseToolchainTest):
- def test_clang_cl(self):
- # We'll pick clang-cl if msvc can't be found.
- paths = {
-- k: v for k, v in self.PATHS.iteritems()
-+ k: v for k, v in self.PATHS.items()
- if os.path.basename(k) != 'cl'
- }
- self.do_toolchain_test(paths, {
-@@ -987,7 +987,7 @@ class WindowsToolchainTest(BaseToolchainTest):
- def test_gcc(self):
- # We'll pick GCC if msvc and clang-cl can't be found.
- paths = {
-- k: v for k, v in self.PATHS.iteritems()
-+ k: v for k, v in self.PATHS.items()
- if os.path.basename(k) not in ('cl', 'clang-cl')
- }
- self.do_toolchain_test(paths, {
-@@ -1006,7 +1006,7 @@ class WindowsToolchainTest(BaseToolchainTest):
- def test_clang(self):
- # We'll pick clang if nothing else is found.
- paths = {
-- k: v for k, v in self.PATHS.iteritems()
-+ k: v for k, v in self.PATHS.items()
- if os.path.basename(k) not in ('cl', 'clang-cl', 'gcc')
- }
- self.do_toolchain_test(paths, {
-diff --git a/python/mozbuild/mozbuild/test/configure/test_toolchain_helpers.py b/python/mozbuild/mozbuild/test/configure/test_toolchain_helpers.py
-index 8ec33a8b7..ba046ed12 100644
---- a/python/mozbuild/mozbuild/test/configure/test_toolchain_helpers.py
-+++ b/python/mozbuild/mozbuild/test/configure/test_toolchain_helpers.py
-@@ -2,7 +2,7 @@
- # License, v. 2.0. If a copy of the MPL was not distributed with this
- # file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
--from __future__ import absolute_import, print_function, unicode_literals
-+
-
- import copy
- import re
-@@ -10,7 +10,7 @@ import types
- import unittest
-
- from fnmatch import fnmatch
--from StringIO import StringIO
-+from io import StringIO
- from textwrap import dedent
-
- from mozunit import (
-@@ -43,7 +43,7 @@ class CompilerPreprocessor(Preprocessor):
- # Hack around it enough that the configure tests work properly.
- context = self.context
- def normalize_numbers(value):
-- if isinstance(value, types.StringTypes):
-+ if isinstance(value, (str,)):
- if value[-1:] == 'L' and value[:-1].isdigit():
- value = int(value[:-1])
- return value
-@@ -53,7 +53,7 @@ class CompilerPreprocessor(Preprocessor):
- return self.HAS_FEATURE.sub(r'\1\2', expr)
- self.context = self.Context(
- (normalize_has_feature(k), normalize_numbers(v))
-- for k, v in context.iteritems()
-+ for k, v in context.items()
- )
- try:
- return Preprocessor.do_if(self, normalize_has_feature(expression),
-@@ -95,7 +95,7 @@ class TestCompilerPreprocessor(unittest.TestCase):
- input.name = 'foo'
- pp.do_include(input)
-
-- self.assertEquals(pp.out.getvalue(), '1 . 2 . c "D"')
-+ self.assertEqual(pp.out.getvalue(), '1 . 2 . c "D"')
-
- def test_condition(self):
- pp = CompilerPreprocessor({
-@@ -125,7 +125,7 @@ class TestCompilerPreprocessor(unittest.TestCase):
- input.name = 'foo'
- pp.do_include(input)
-
-- self.assertEquals('IFDEF_A\nIF_A\nIF_B\nIF_NOT_C\n', pp.out.getvalue())
-+ self.assertEqual('IFDEF_A\nIF_A\nIF_B\nIF_NOT_C\n', pp.out.getvalue())
-
-
- class FakeCompiler(dict):
-@@ -164,9 +164,9 @@ class FakeCompiler(dict):
- '''
- def __init__(self, *definitions):
- for definition in definitions:
-- if all(not isinstance(d, dict) for d in definition.itervalues()):
-+ if all(not isinstance(d, dict) for d in definition.values()):
- definition = {None: definition}
-- for key, value in definition.iteritems():
-+ for key, value in definition.items():
- self.setdefault(key, {}).update(value)
-
- def __call__(self, stdin, args):
-@@ -178,14 +178,14 @@ class FakeCompiler(dict):
- pp = CompilerPreprocessor(self[None])
-
- def apply_defn(defn):
-- for k, v in defn.iteritems():
-+ for k, v in defn.items():
- if v is False:
- if k in pp.context:
- del pp.context[k]
- else:
- pp.context[k] = v
-
-- for glob, defn in self.iteritems():
-+ for glob, defn in self.items():
- if glob and not glob.startswith('-') and fnmatch(file, glob):
- apply_defn(defn)
-
-@@ -216,7 +216,7 @@ class TestFakeCompiler(unittest.TestCase):
- 'A': '1',
- 'B': '2',
- })
-- self.assertEquals(compiler(None, ['-E', 'file']),
-+ self.assertEqual(compiler(None, ['-E', 'file']),
- (0, '1 2 C', ''))
-
- compiler = FakeCompiler({
-@@ -238,25 +238,25 @@ class TestFakeCompiler(unittest.TestCase):
- 'B': '42',
- },
- })
-- self.assertEquals(compiler(None, ['-E', 'file']),
-+ self.assertEqual(compiler(None, ['-E', 'file']),
- (0, '1 2 C', ''))
-- self.assertEquals(compiler(None, ['-E', '-foo', 'file']),
-+ self.assertEqual(compiler(None, ['-E', '-foo', 'file']),
- (0, '1 2 foo', ''))
-- self.assertEquals(compiler(None, ['-E', '-bar', 'file']),
-+ self.assertEqual(compiler(None, ['-E', '-bar', 'file']),
- (0, '1 bar bar', ''))
-- self.assertEquals(compiler(None, ['-E', '-qux', 'file']),
-+ self.assertEqual(compiler(None, ['-E', '-qux', 'file']),
- (0, '1 B C', ''))
-- self.assertEquals(compiler(None, ['-E', '-foo', '-bar', 'file']),
-+ self.assertEqual(compiler(None, ['-E', '-foo', '-bar', 'file']),
- (0, '1 bar bar', ''))
-- self.assertEquals(compiler(None, ['-E', '-bar', '-foo', 'file']),
-+ self.assertEqual(compiler(None, ['-E', '-bar', '-foo', 'file']),
- (0, '1 bar foo', ''))
-- self.assertEquals(compiler(None, ['-E', '-bar', '-qux', 'file']),
-+ self.assertEqual(compiler(None, ['-E', '-bar', '-qux', 'file']),
- (0, '1 B bar', ''))
-- self.assertEquals(compiler(None, ['-E', '-qux', '-bar', 'file']),
-+ self.assertEqual(compiler(None, ['-E', '-qux', '-bar', 'file']),
- (0, '1 bar bar', ''))
-- self.assertEquals(compiler(None, ['-E', 'file.c']),
-+ self.assertEqual(compiler(None, ['-E', 'file.c']),
- (0, '1 42 C', ''))
-- self.assertEquals(compiler(None, ['-E', '-bar', 'file.c']),
-+ self.assertEqual(compiler(None, ['-E', '-bar', 'file.c']),
- (0, '1 bar bar', ''))
-
- def test_multiple_definitions(self):
-@@ -267,7 +267,7 @@ class TestFakeCompiler(unittest.TestCase):
- 'C': 3,
- })
-
-- self.assertEquals(compiler, {
-+ self.assertEqual(compiler, {
- None: {
- 'A': 1,
- 'B': 2,
-@@ -282,7 +282,7 @@ class TestFakeCompiler(unittest.TestCase):
- 'C': 3,
- })
-
-- self.assertEquals(compiler, {
-+ self.assertEqual(compiler, {
- None: {
- 'A': 1,
- 'B': 4,
-@@ -302,7 +302,7 @@ class TestFakeCompiler(unittest.TestCase):
- },
- })
-
-- self.assertEquals(compiler, {
-+ self.assertEqual(compiler, {
- None: {
- 'A': 1,
- 'B': 4,
-@@ -330,7 +330,7 @@ class TestFakeCompiler(unittest.TestCase):
- },
- })
-
-- self.assertEquals(compiler, {
-+ self.assertEqual(compiler, {
- None: {
- 'A': 1,
- 'B': 2,
-@@ -370,7 +370,7 @@ class CompilerResult(ReadOnlyNamespace):
- def __add__(self, other):
- assert isinstance(other, dict)
- result = copy.deepcopy(self.__dict__)
-- for k, v in other.iteritems():
-+ for k, v in other.items():
- if k == 'flags':
- result.setdefault(k, []).extend(v)
- else:
-@@ -381,7 +381,7 @@ class CompilerResult(ReadOnlyNamespace):
- class TestCompilerResult(unittest.TestCase):
- def test_compiler_result(self):
- result = CompilerResult()
-- self.assertEquals(result.__dict__, {
-+ self.assertEqual(result.__dict__, {
- 'wrapper': [],
- 'compiler': mozpath.abspath(''),
- 'version': '',
-@@ -397,7 +397,7 @@ class TestCompilerResult(unittest.TestCase):
- language='C',
- flags=['-std=gnu99'],
- )
-- self.assertEquals(result.__dict__, {
-+ self.assertEqual(result.__dict__, {
- 'wrapper': [],
- 'compiler': mozpath.abspath('/usr/bin/gcc'),
- 'version': '4.2.1',
-@@ -407,7 +407,7 @@ class TestCompilerResult(unittest.TestCase):
- })
-
- result2 = result + {'flags': ['-m32']}
-- self.assertEquals(result2.__dict__, {
-+ self.assertEqual(result2.__dict__, {
- 'wrapper': [],
- 'compiler': mozpath.abspath('/usr/bin/gcc'),
- 'version': '4.2.1',
-@@ -416,14 +416,14 @@ class TestCompilerResult(unittest.TestCase):
- 'flags': ['-std=gnu99', '-m32'],
- })
- # Original flags are untouched.
-- self.assertEquals(result.flags, ['-std=gnu99'])
-+ self.assertEqual(result.flags, ['-std=gnu99'])
-
- result3 = result + {
- 'compiler': '/usr/bin/gcc-4.7',
- 'version': '4.7.3',
- 'flags': ['-m32'],
- }
-- self.assertEquals(result3.__dict__, {
-+ self.assertEqual(result3.__dict__, {
- 'wrapper': [],
- 'compiler': mozpath.abspath('/usr/bin/gcc-4.7'),
- 'version': '4.7.3',
-diff --git a/python/mozbuild/mozbuild/test/configure/test_toolkit_moz_configure.py b/python/mozbuild/mozbuild/test/configure/test_toolkit_moz_configure.py
-index ac35d745f..cdd8ece1b 100644
---- a/python/mozbuild/mozbuild/test/configure/test_toolkit_moz_configure.py
-+++ b/python/mozbuild/mozbuild/test/configure/test_toolkit_moz_configure.py
-@@ -2,7 +2,7 @@
- # License, v. 2.0. If a copy of the MPL was not distributed with this
- # file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
--from __future__ import absolute_import, print_function, unicode_literals
-+
-
- import os
-
-diff --git a/python/mozbuild/mozbuild/test/configure/test_util.py b/python/mozbuild/mozbuild/test/configure/test_util.py
-index 9f9575fd0..c2db1a628 100644
---- a/python/mozbuild/mozbuild/test/configure/test_util.py
-+++ b/python/mozbuild/mozbuild/test/configure/test_util.py
-@@ -2,7 +2,7 @@
- # License, v. 2.0. If a copy of the MPL was not distributed with this
- # file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
--from __future__ import absolute_import, print_function, unicode_literals
-+
-
- import logging
- import os
-@@ -11,7 +11,7 @@ import textwrap
- import unittest
- import sys
-
--from StringIO import StringIO
-+from io import StringIO
-
- from mozunit import main
- from mozpack import path as mozpath
-@@ -434,11 +434,11 @@ class TestLogSubprocessOutput(unittest.TestCase):
- except SystemExit as e:
- status = e.code
-
-- self.assertEquals(status, 0)
-+ self.assertEqual(status, 0)
- quote_char = "'"
- if getpreferredencoding().lower() == 'utf-8':
- quote_char = '\u00B4'.encode('utf-8')
-- self.assertEquals(out.getvalue().strip(), quote_char)
-+ self.assertEqual(out.getvalue().strip(), quote_char)
-
-
- class TestVersion(unittest.TestCase):
-diff --git a/python/mozbuild/mozbuild/testing.py b/python/mozbuild/mozbuild/testing.py
-index 3229c3f77..82d250fab 100644
---- a/python/mozbuild/mozbuild/testing.py
-+++ b/python/mozbuild/mozbuild/testing.py
-@@ -2,9 +2,9 @@
- # License, v. 2.0. If a copy of the MPL was not distributed with this
- # file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
--from __future__ import absolute_import, unicode_literals
-
--import cPickle as pickle
-+
-+import pickle as pickle
- import os
- import sys
-
-@@ -70,7 +70,7 @@ REFTEST_FLAVORS = ('crashtest', 'reftest')
- WEB_PLATFORM_TESTS_FLAVORS = ('web-platform-tests',)
-
- def all_test_flavors():
-- return ([v[0] for v in TEST_MANIFESTS.values()] +
-+ return ([v[0] for v in list(TEST_MANIFESTS.values())] +
- list(REFTEST_FLAVORS) +
- list(WEB_PLATFORM_TESTS_FLAVORS))
-
-@@ -210,7 +210,7 @@ def install_test_files(topsrcdir, topobjdir, tests_root, test_objs):
- only a few tests need to be run.
- """
- flavor_info = {flavor: (root, prefix, install)
-- for (flavor, root, prefix, install) in TEST_MANIFESTS.values()}
-+ for (flavor, root, prefix, install) in list(TEST_MANIFESTS.values())}
- objdir_dest = mozpath.join(topobjdir, tests_root)
-
- converter = SupportFilesConverter()
-@@ -292,7 +292,7 @@ def read_wpt_manifest(context, paths):
- paths_file = os.path.join(context.config.topsrcdir, "testing",
- "web-platform", "tests", "tools", "localpaths.py")
- _globals = {"__file__": paths_file}
-- execfile(paths_file, _globals)
-+ exec(compile(open(paths_file, "rb").read(), paths_file, 'exec'), _globals)
- import manifest as wptmanifest
- finally:
- sys.path = old_path
-diff --git a/python/mozbuild/mozbuild/util.py b/python/mozbuild/mozbuild/util.py
-index 4ea227dc0..79cb15713 100644
---- a/python/mozbuild/mozbuild/util.py
-+++ b/python/mozbuild/mozbuild/util.py
-@@ -5,7 +5,7 @@
- # This file contains miscellaneous utility functions that don't belong anywhere
- # in particular.
-
--from __future__ import absolute_import, unicode_literals, print_function
-+
-
- import argparse
- import collections
-@@ -21,6 +21,9 @@ import stat
- import sys
- import time
- import types
-+import pprint
-+import six
-+import subprocess
-
- from collections import (
- defaultdict,
-@@ -36,7 +39,7 @@ from io import (
- if sys.version_info[0] == 3:
- str_type = str
- else:
-- str_type = basestring
-+ str_type = str
-
- if sys.platform == 'win32':
- _kernel32 = ctypes.windll.kernel32
-@@ -78,7 +81,7 @@ def hash_file(path, hasher=None):
- return h.hexdigest()
-
-
--class EmptyValue(unicode):
-+class EmptyValue(str):
- """A dummy type that behaves like an empty string and sequence.
-
- This type exists in order to support
-@@ -92,7 +95,7 @@ class EmptyValue(unicode):
- class ReadOnlyNamespace(object):
- """A class for objects with immutable attributes set at initialization."""
- def __init__(self, **kwargs):
-- for k, v in kwargs.iteritems():
-+ for k, v in kwargs.items():
- super(ReadOnlyNamespace, self).__setattr__(k, v)
-
- def __delattr__(self, key):
-@@ -224,7 +227,7 @@ class FileAvoidWrite(BytesIO):
- self.mode = mode
-
- def write(self, buf):
-- if isinstance(buf, unicode):
-+ if isinstance(buf, str):
- buf = buf.encode('utf-8')
- BytesIO.write(self, buf)
-
-@@ -267,6 +270,10 @@ class FileAvoidWrite(BytesIO):
- if 'b' in self.mode:
- writemode += 'b'
- with open(self.name, writemode) as file:
-+ if 'b' in self.mode and isinstance(buf, str):
-+ buf = buf.encode('utf-8')
-+ elif 'b' not in self.mode and isinstance(buf, bytes):
-+ buf = buf.decode('utf-8')
- file.write(buf)
-
- if self._capture_diff:
-@@ -381,7 +388,7 @@ class ListMixin(object):
- def __add__(self, other):
- # Allow None and EmptyValue is a special case because it makes undefined
- # variable references in moz.build behave better.
-- other = [] if isinstance(other, (types.NoneType, EmptyValue)) else other
-+ other = [] if isinstance(other, (type(None), EmptyValue)) else other
- if not isinstance(other, list):
- raise ValueError('Only lists can be appended to lists.')
-
-@@ -390,7 +397,7 @@ class ListMixin(object):
- return new_list
-
- def __iadd__(self, other):
-- other = [] if isinstance(other, (types.NoneType, EmptyValue)) else other
-+ other = [] if isinstance(other, (type(None), EmptyValue)) else other
- if not isinstance(other, list):
- raise ValueError('Only lists can be appended to lists.')
-
-@@ -561,14 +568,14 @@ def FlagsFactory(flags):
- functions below.
- """
- assert isinstance(flags, dict)
-- assert all(isinstance(v, type) for v in flags.values())
-+ assert all(isinstance(v, type) for v in list(flags.values()))
-
- class Flags(object):
-- __slots__ = flags.keys()
-+ __slots__ = list(flags.keys())
- _flags = flags
-
- def update(self, **kwargs):
-- for k, v in kwargs.iteritems():
-+ for k, v in kwargs.items():
- setattr(self, k, v)
-
- def __getattr__(self, name):
-@@ -1006,8 +1013,6 @@ def TypedNamedTuple(name, fields):
- 'got %s, expected %s' % (fname,
- type(value), ftype))
-
-- super(TypedTuple, self).__init__(*args, **kwargs)
--
- TypedTuple._fields = fields
-
- return TypedTuple
-@@ -1099,14 +1104,14 @@ def group_unified_files(files, unified_prefix, unified_suffix,
- # issue. So we do a little dance to filter it out ourselves.
- dummy_fill_value = ("dummy",)
- def filter_out_dummy(iterable):
-- return itertools.ifilter(lambda x: x != dummy_fill_value,
-+ return filter(lambda x: x != dummy_fill_value,
- iterable)
-
- # From the itertools documentation, slightly modified:
- def grouper(n, iterable):
- "grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx"
- args = [iter(iterable)] * n
-- return itertools.izip_longest(fillvalue=dummy_fill_value, *args)
-+ return itertools.zip_longest(fillvalue=dummy_fill_value, *args)
-
- for i, unified_group in enumerate(grouper(files_per_unified_file,
- files)):
-@@ -1123,7 +1128,7 @@ def pair(iterable):
- [(1,2), (3,4), (5,6)]
- '''
- i = iter(iterable)
-- return itertools.izip_longest(i, i)
-+ return itertools.zip_longest(i, i)
-
-
- VARIABLES_RE = re.compile('\$\((\w+)\)')
-@@ -1141,7 +1146,7 @@ def expand_variables(s, variables):
- value = variables.get(name)
- if not value:
- continue
-- if not isinstance(value, types.StringTypes):
-+ if not isinstance(value, (str,)):
- value = ' '.join(value)
- result += value
- return result
-@@ -1168,7 +1173,7 @@ class EnumStringComparisonError(Exception):
- pass
-
-
--class EnumString(unicode):
-+class EnumString(str):
- '''A string type that only can have a limited set of values, similarly to
- an Enum, and can only be compared against that set of values.
-
-@@ -1185,8 +1190,8 @@ class EnumString(unicode):
- def __eq__(self, other):
- if other not in self.POSSIBLE_VALUES:
- raise EnumStringComparisonError(
-- 'Can only compare with %s'
-- % ', '.join("'%s'" % v for v in self.POSSIBLE_VALUES))
-+ '%s is not in %s'
-+ % (other, ', '.join("'%s'" % v for v in self.POSSIBLE_VALUES)))
- return super(EnumString, self).__eq__(other)
-
- def __ne__(self, other):
-@@ -1204,14 +1209,14 @@ def _escape_char(c):
- # quoting could be done with either ' or ".
- if c == "'":
- return "\\'"
-- return unicode(c.encode('unicode_escape'))
-+ return str(c.encode('unicode_escape'))
-
- # Mapping table between raw characters below \x80 and their escaped
- # counterpart, when they differ
- _INDENTED_REPR_TABLE = {
- c: e
- for c, e in map(lambda x: (x, _escape_char(x)),
-- map(unichr, range(128)))
-+ map(chr, range(128)))
- if c != e
- }
- # Regexp matching all characters to escape.
-@@ -1219,7 +1224,33 @@ _INDENTED_REPR_RE = re.compile(
- '([' + ''.join(_INDENTED_REPR_TABLE.values()) + ']+)')
-
-
-+# Stolen from gecko master [1]
-+# [1] https://github.com/mozilla/gecko-dev/blob/4165a2e843c494bfb3e35d8a1fbf9c61209e3675/python/mozbuild/mozbuild/util.py#L1286
-+
-+# The default PrettyPrinter has some issues with UTF-8, so we need to override
-+# some stuff here.
-+class _PrettyPrinter(pprint.PrettyPrinter):
-+ def format(self, object, context, maxlevels, level):
-+ if not (isinstance(object, six.text_type) or
-+ isinstance(object, six.binary_type)):
-+ return super(_PrettyPrinter, self).format(
-+ object, context, maxlevels, level)
-+ # This is super hacky and weird, but the output of 'repr' actually
-+ # varies based on the default I/O encoding of the process, which isn't
-+ # necessarily utf-8. Instead we open a new shell and ask what the repr
-+ # WOULD be assuming the default encoding is utf-8. If you can come up
-+ # with a better way of doing this without simply re-implementing the
-+ # logic of "repr", please replace this.
-+ env = dict(os.environ)
-+ env['PYTHONIOENCODING'] = 'utf-8'
-+ ret = six.ensure_text(subprocess.check_output(
-+ [sys.executable], input='print(repr(%s))' % repr(object),
-+ universal_newlines=True, env=env, encoding='utf-8')).strip()
-+ return (ret, True, False)
-+
- def indented_repr(o, indent=4):
-+ return _PrettyPrinter(indent=indent).pformat(o)
-+
- '''Similar to repr(), but returns an indented representation of the object
-
- One notable difference with repr is that the returned representation
-@@ -1242,7 +1273,7 @@ def indented_repr(o, indent=4):
- elif isinstance(o, bytes):
- yield 'b'
- yield repr(o)
-- elif isinstance(o, unicode):
-+ elif isinstance(o, str):
- yield "'"
- # We want a readable string (non escaped unicode), but some
- # special characters need escaping (e.g. \n, \t, etc.)
-@@ -1272,11 +1303,11 @@ def encode(obj, encoding='utf-8'):
- if isinstance(obj, dict):
- return {
- encode(k, encoding): encode(v, encoding)
-- for k, v in obj.iteritems()
-+ for k, v in obj.items()
- }
- if isinstance(obj, bytes):
- return obj
-- if isinstance(obj, unicode):
-+ if isinstance(obj, str):
- return obj.encode(encoding)
- if isinstance(obj, Iterable):
- return [encode(i, encoding) for i in obj]
-diff --git a/python/mozbuild/mozbuild/virtualenv.py b/python/mozbuild/mozbuild/virtualenv.py
-index 38d06d71d..c67c046f3 100644
---- a/python/mozbuild/mozbuild/virtualenv.py
-+++ b/python/mozbuild/mozbuild/virtualenv.py
-@@ -531,9 +531,9 @@ def verify_python_version(log_handle):
-
- our = LooseVersion('%d.%d.%d' % (major, minor, micro))
-
-- if major != MINIMUM_PYTHON_MAJOR or our < MINIMUM_PYTHON_VERSION:
-- log_handle.write('Python %s or greater (but not Python 3) is '
-- 'required to build. ' % MINIMUM_PYTHON_VERSION)
-+ if our < MINIMUM_PYTHON_VERSION:
-+ log_handle.write('Python %s or greater is required to build. '
-+ % MINIMUM_PYTHON_VERSION)
- log_handle.write('You are running Python %s.\n' % our)
-
- if os.name in ('nt', 'ce'):
-diff --git a/python/mozbuild/mozpack/chrome/manifest.py b/python/mozbuild/mozpack/chrome/manifest.py
-index c91b99cf1..02eac0dbb 100644
---- a/python/mozbuild/mozpack/chrome/manifest.py
-+++ b/python/mozbuild/mozpack/chrome/manifest.py
-@@ -2,11 +2,11 @@
- # License, v. 2.0. If a copy of the MPL was not distributed with this
- # file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
--from __future__ import absolute_import
-+
-
- import re
- import os
--from urlparse import urlparse
-+from urllib.parse import urlparse
- import mozpack.path as mozpath
- from mozpack.chrome.flags import Flags
- from mozpack.errors import errors
-@@ -316,7 +316,7 @@ class ManifestContract(ManifestEntry):
- return self.serialize(self.contractID, self.cid)
-
- # All manifest classes by their type name.
--MANIFESTS_TYPES = dict([(c.type, c) for c in globals().values()
-+MANIFESTS_TYPES = dict([(c.type, c) for c in list(globals().values())
- if type(c) == type and issubclass(c, ManifestEntry)
- and hasattr(c, 'type') and c.type])
-
-diff --git a/python/mozbuild/mozpack/copier.py b/python/mozbuild/mozpack/copier.py
-index 1e521e52b..43ed9be4a 100644
---- a/python/mozbuild/mozpack/copier.py
-+++ b/python/mozbuild/mozpack/copier.py
-@@ -2,7 +2,7 @@
- # License, v. 2.0. If a copy of the MPL was not distributed with this
- # file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
--from __future__ import absolute_import
-+
-
- import os
- import stat
-@@ -113,7 +113,7 @@ class FileRegistry(object):
- '''
- Return all paths stored in the container, in the order they were added.
- '''
-- return self._files.keys()
-+ return list(self._files.keys())
-
- def __len__(self):
- '''
-@@ -146,7 +146,7 @@ class FileRegistry(object):
- for path, file in registry:
- (...)
- '''
-- return self._files.iteritems()
-+ return iter(self._files.items())
-
- def required_directories(self):
- '''
-@@ -155,7 +155,7 @@ class FileRegistry(object):
- unspecified (virtual) root directory (and do not include said root
- directory).
- '''
-- return set(k for k, v in self._required_directories.items() if v > 0)
-+ return set(k for k, v in list(self._required_directories.items()) if v > 0)
-
- def output_to_inputs_tree(self):
- '''
-@@ -295,7 +295,7 @@ class FileCopier(FileRegistry):
-
- Returns a FileCopyResult that details what changed.
- '''
-- assert isinstance(destination, basestring)
-+ assert isinstance(destination, str)
- assert not os.path.exists(destination) or os.path.isdir(destination)
-
- result = FileCopyResult()
-@@ -563,7 +563,7 @@ class Jarrer(FileRegistry, BaseFile):
- def exists(self):
- return self.deflater is not None
-
-- if isinstance(dest, basestring):
-+ if isinstance(dest, str):
- dest = Dest(dest)
- assert isinstance(dest, Dest)
-
-diff --git a/python/mozbuild/mozpack/files.py b/python/mozbuild/mozpack/files.py
-index 8ce353375..bf35e39b8 100644
---- a/python/mozbuild/mozpack/files.py
-+++ b/python/mozbuild/mozpack/files.py
-@@ -2,7 +2,7 @@
- # License, v. 2.0. If a copy of the MPL was not distributed with this
- # file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
--from __future__ import absolute_import
-+
-
- import errno
- import os
-@@ -57,7 +57,7 @@ else:
-
- def _copyfile(src, dest):
- # False indicates `dest` should be overwritten if it exists already.
-- if isinstance(src, unicode) and isinstance(dest, unicode):
-+ if isinstance(src, str) and isinstance(dest, str):
- _CopyFileW(src, dest, False)
- elif isinstance(src, str) and isinstance(dest, str):
- _CopyFileA(src, dest, False)
-@@ -164,7 +164,7 @@ class BaseFile(object):
- disabled when skip_if_older is False.
- Returns whether a copy was actually performed (True) or not (False).
- '''
-- if isinstance(dest, basestring):
-+ if isinstance(dest, str):
- dest = Dest(dest)
- else:
- assert isinstance(dest, Dest)
-@@ -278,11 +278,11 @@ class ExecutableFile(File):
- '''
- def copy(self, dest, skip_if_older=True):
- real_dest = dest
-- if not isinstance(dest, basestring):
-+ if not isinstance(dest, str):
- fd, dest = mkstemp()
- os.close(fd)
- os.remove(dest)
-- assert isinstance(dest, basestring)
-+ assert isinstance(dest, str)
- # If File.copy didn't actually copy because dest is newer, check the
- # file sizes. If dest is smaller, it means it is already stripped and
- # elfhacked, so we can skip.
-@@ -319,7 +319,7 @@ class AbsoluteSymlinkFile(File):
- File.__init__(self, path)
-
- def copy(self, dest, skip_if_older=True):
-- assert isinstance(dest, basestring)
-+ assert isinstance(dest, str)
-
- # The logic in this function is complicated by the fact that symlinks
- # aren't universally supported. So, where symlinks aren't supported, we
-@@ -410,7 +410,7 @@ class HardlinkFile(File):
- '''
-
- def copy(self, dest, skip_if_older=True):
-- assert isinstance(dest, basestring)
-+ assert isinstance(dest, str)
-
- if not hasattr(os, 'link'):
- return super(HardlinkFile, self).copy(
-@@ -471,7 +471,7 @@ class ExistingFile(BaseFile):
- self.required = required
-
- def copy(self, dest, skip_if_older=True):
-- if isinstance(dest, basestring):
-+ if isinstance(dest, str):
- dest = Dest(dest)
- else:
- assert isinstance(dest, Dest)
-@@ -517,7 +517,7 @@ class PreprocessedFile(BaseFile):
- '''
- Invokes the preprocessor to create the destination file.
- '''
-- if isinstance(dest, basestring):
-+ if isinstance(dest, str):
- dest = Dest(dest)
- else:
- assert isinstance(dest, Dest)
-@@ -657,7 +657,7 @@ class XPTFile(GeneratedFile):
- the individual XPTs to link.
- skip_if_older is ignored.
- '''
-- if isinstance(dest, basestring):
-+ if isinstance(dest, str):
- dest = Dest(dest)
- assert isinstance(dest, Dest)
-
-@@ -1108,7 +1108,7 @@ class ComposedFinder(BaseFinder):
- from mozpack.copier import FileRegistry
- self.files = FileRegistry()
-
-- for base, finder in sorted(finders.iteritems()):
-+ for base, finder in sorted(finders.items()):
- if self.files.contains(base):
- self.files.remove(base)
- for p, f in finder.find(''):
-diff --git a/python/mozbuild/mozpack/manifests.py b/python/mozbuild/mozpack/manifests.py
-index 27c66634b..f79b40086 100644
---- a/python/mozbuild/mozpack/manifests.py
-+++ b/python/mozbuild/mozpack/manifests.py
-@@ -2,8 +2,6 @@
- # License, v. 2.0. If a copy of the MPL was not distributed with this
- # file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
--from __future__ import absolute_import, unicode_literals
--
- from contextlib import contextmanager
- import json
-
-@@ -116,7 +114,7 @@ class InstallManifest(object):
- self._source_files = set()
-
- if path or fileobj:
-- with _auto_fileobj(path, fileobj, 'rb') as fh:
-+ with _auto_fileobj(path, fileobj, 'r') as fh:
- self._source_files.add(fh.name)
- self._load_from_fileobj(fh)
-
-@@ -175,7 +173,7 @@ class InstallManifest(object):
- dest, content = fields[1:]
-
- self.add_content(
-- self._decode_field_entry(content).encode('utf-8'), dest)
-+ self._decode_field_entry(content), dest)
- continue
-
- # Don't fail for non-actionable items, allowing
-@@ -228,7 +226,7 @@ class InstallManifest(object):
-
- It is an error if both are specified.
- """
-- with _auto_fileobj(path, fileobj, 'wb') as fh:
-+ with _auto_fileobj(path, fileobj, 'w') as fh:
- fh.write('%d\n' % self.CURRENT_VERSION)
-
- for dest in sorted(self._dests):
-@@ -242,13 +240,11 @@ class InstallManifest(object):
- for path in paths:
- source = mozpath.join(base, path)
- parts = ['%d' % type, mozpath.join(dest, path), source]
-- fh.write('%s\n' % self.FIELD_SEPARATOR.join(
-- p.encode('utf-8') for p in parts))
-+ fh.write('%s\n' % self.FIELD_SEPARATOR.join(parts))
- else:
- parts = ['%d' % entry[0], dest]
- parts.extend(entry[1:])
-- fh.write('%s\n' % self.FIELD_SEPARATOR.join(
-- p.encode('utf-8') for p in parts))
-+ fh.write('%s\n' % self.FIELD_SEPARATOR.join(parts))
-
- def add_link(self, source, dest):
- """Add a link to this manifest.
-@@ -439,7 +435,7 @@ class InstallManifest(object):
- if install_type == self.CONTENT:
- # GeneratedFile expect the buffer interface, which the unicode
- # type doesn't have, so encode to a str.
-- content = self._decode_field_entry(entry[1]).encode('utf-8')
-+ content = self._decode_field_entry(entry[1])
- registry.add(dest, GeneratedFile(content))
- continue
-
-diff --git a/python/mozbuild/mozpack/mozjar.py b/python/mozbuild/mozpack/mozjar.py
-index a723fd2c0..0b04d233c 100644
---- a/python/mozbuild/mozpack/mozjar.py
-+++ b/python/mozbuild/mozpack/mozjar.py
-@@ -2,7 +2,7 @@
- # License, v. 2.0. If a copy of the MPL was not distributed with this
- # file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
--from __future__ import absolute_import
-+
-
- from io import BytesIO
- import struct
-@@ -14,9 +14,10 @@ from zipfile import (
- ZIP_DEFLATED,
- )
- from collections import OrderedDict
--from urlparse import urlparse, ParseResult
-+from urllib.parse import urlparse, ParseResult
- import mozpack.path as mozpath
- from mozbuild.util import memoize
-+from functools import reduce
-
-
- JAR_STORED = ZIP_STORED
-@@ -72,7 +73,7 @@ class JarStruct(object):
- an instance with empty fields.
- '''
- assert self.MAGIC and isinstance(self.STRUCT, OrderedDict)
-- self.size_fields = set(t for t in self.STRUCT.itervalues()
-+ self.size_fields = set(t for t in self.STRUCT.values()
- if not t in JarStruct.TYPE_MAPPING)
- self._values = {}
- if data:
-@@ -94,7 +95,7 @@ class JarStruct(object):
- # For all fields used as other fields sizes, keep track of their value
- # separately.
- sizes = dict((t, 0) for t in self.size_fields)
-- for name, t in self.STRUCT.iteritems():
-+ for name, t in self.STRUCT.items():
- if t in JarStruct.TYPE_MAPPING:
- value, size = JarStruct.get_data(t, data[offset:])
- else:
-@@ -113,7 +114,7 @@ class JarStruct(object):
- Initialize an instance with empty fields.
- '''
- self.signature = self.MAGIC
-- for name, t in self.STRUCT.iteritems():
-+ for name, t in self.STRUCT.items():
- if name in self.size_fields:
- continue
- self._values[name] = 0 if t in JarStruct.TYPE_MAPPING else ''
-@@ -138,9 +139,9 @@ class JarStruct(object):
- from self.STRUCT.
- '''
- serialized = struct.pack('<I', self.signature)
-- sizes = dict((t, name) for name, t in self.STRUCT.iteritems()
-+ sizes = dict((t, name) for name, t in self.STRUCT.items()
- if not t in JarStruct.TYPE_MAPPING)
-- for name, t in self.STRUCT.iteritems():
-+ for name, t in self.STRUCT.items():
- if t in JarStruct.TYPE_MAPPING:
- format, size = JarStruct.TYPE_MAPPING[t]
- if name in sizes:
-@@ -159,7 +160,7 @@ class JarStruct(object):
- variable length fields.
- '''
- size = JarStruct.TYPE_MAPPING['uint32'][1]
-- for name, type in self.STRUCT.iteritems():
-+ for name, type in self.STRUCT.items():
- if type in JarStruct.TYPE_MAPPING:
- size += JarStruct.TYPE_MAPPING[type][1]
- else:
-@@ -180,7 +181,7 @@ class JarStruct(object):
- return key in self._values
-
- def __iter__(self):
-- return self._values.iteritems()
-+ return iter(self._values.items())
-
- def __repr__(self):
- return "<%s %s>" % (self.__class__.__name__,
-@@ -374,7 +375,7 @@ class JarReader(object):
- entries = self.entries
- if not entries:
- return JAR_STORED
-- return max(f['compression'] for f in entries.itervalues())
-+ return max(f['compression'] for f in entries.values())
-
- @property
- def entries(self):
-@@ -390,7 +391,7 @@ class JarReader(object):
- preload = JarStruct.get_data('uint32', self._data)[0]
- entries = OrderedDict()
- offset = self._cdir_end['cdir_offset']
-- for e in xrange(self._cdir_end['cdir_entries']):
-+ for e in range(self._cdir_end['cdir_entries']):
- entry = JarCdirEntry(self._data[offset:])
- offset += entry.size
- # Creator host system. 0 is MSDOS, 3 is Unix
-@@ -452,7 +453,7 @@ class JarReader(object):
- for file in jarReader:
- ...
- '''
-- for entry in self.entries.itervalues():
-+ for entry in self.entries.values():
- yield self._getreader(entry)
-
- def __getitem__(self, name):
-@@ -547,7 +548,7 @@ class JarWriter(object):
- headers = {}
- preload_size = 0
- # Prepare central directory entries
-- for entry, content in self._contents.itervalues():
-+ for entry, content in self._contents.values():
- header = JarLocalFileHeader()
- for name in entry.STRUCT:
- if name in header:
-@@ -562,7 +563,7 @@ class JarWriter(object):
- end['disk_entries'] = len(self._contents)
- end['cdir_entries'] = end['disk_entries']
- end['cdir_size'] = reduce(lambda x, y: x + y[0].size,
-- self._contents.values(), 0)
-+ list(self._contents.values()), 0)
- # On optimized archives, store the preloaded size and the central
- # directory entries, followed by the first end of central directory.
- if self._optimize:
-@@ -571,18 +572,18 @@ class JarWriter(object):
- if preload_size:
- preload_size += offset
- self._data.write(struct.pack('<I', preload_size))
-- for entry, _ in self._contents.itervalues():
-+ for entry, _ in self._contents.values():
- entry['offset'] += offset
- self._data.write(entry.serialize())
- self._data.write(end.serialize())
- # Store local file entries followed by compressed data
-- for entry, content in self._contents.itervalues():
-+ for entry, content in self._contents.values():
- self._data.write(headers[entry].serialize())
- self._data.write(content)
- # On non optimized archives, store the central directory entries.
- if not self._optimize:
- end['cdir_offset'] = offset
-- for entry, _ in self._contents.itervalues():
-+ for entry, _ in self._contents.values():
- self._data.write(entry.serialize())
- # Store the end of central directory.
- self._data.write(end.serialize())
-@@ -622,7 +623,7 @@ class JarWriter(object):
- deflater = data
- else:
- deflater = Deflater(compress, compress_level=self._compress_level)
-- if isinstance(data, basestring):
-+ if isinstance(data, str):
- deflater.write(data)
- elif hasattr(data, 'read'):
- if hasattr(data, 'seek'):
-diff --git a/testing/mozbase/manifestparser/manifestparser/ini.py b/testing/mozbase/manifestparser/manifestparser/ini.py
-index e5ba249c1..c141a18b7 100644
---- a/testing/mozbase/manifestparser/manifestparser/ini.py
-+++ b/testing/mozbase/manifestparser/manifestparser/ini.py
-@@ -2,7 +2,7 @@
- # License, v. 2.0. If a copy of the MPL was not distributed with this file,
- # You can obtain one at http://mozilla.org/MPL/2.0/.
-
--from __future__ import absolute_import
-+
-
- import os
- import sys
-@@ -12,7 +12,7 @@ __all__ = ['read_ini', 'combine_fields']
-
- class IniParseError(Exception):
- def __init__(self, fp, linenum, msg):
-- if isinstance(fp, basestring):
-+ if isinstance(fp, str):
- path = fp
- elif hasattr(fp, 'name'):
- path = fp.name
-@@ -43,12 +43,15 @@ def read_ini(fp, variables=None, default='DEFAULT', defaults_only=False,
- sections = []
- key = value = None
- section_names = set()
-- if isinstance(fp, basestring):
-+ if isinstance(fp, str):
- fp = file(fp)
-
- # read the lines
- for (linenum, line) in enumerate(fp.read().splitlines(), start=1):
-
-+ if isinstance(line, bytes):
-+ line = line.decode('utf-8')
-+
- stripped = line.strip()
-
- # ignore blank lines
-@@ -66,7 +69,7 @@ def read_ini(fp, variables=None, default='DEFAULT', defaults_only=False,
- inline_prefixes = {p: -1 for p in comments}
- while comment_start == sys.maxsize and inline_prefixes:
- next_prefixes = {}
-- for prefix, index in inline_prefixes.items():
-+ for prefix, index in list(inline_prefixes.items()):
- index = line.find(prefix, index+1)
- if index == -1:
- continue
-@@ -163,7 +166,7 @@ def combine_fields(global_vars, local_vars):
- 'support-files': '%s %s',
- }
- final_mapping = global_vars.copy()
-- for field_name, value in local_vars.items():
-+ for field_name, value in list(local_vars.items()):
- if field_name not in field_patterns or field_name not in global_vars:
- final_mapping[field_name] = value
- continue
-diff --git a/testing/mozbase/manifestparser/manifestparser/manifestparser.py b/testing/mozbase/manifestparser/manifestparser/manifestparser.py
-index 921369fd2..5b2f4c453 100755
---- a/testing/mozbase/manifestparser/manifestparser/manifestparser.py
-+++ b/testing/mozbase/manifestparser/manifestparser/manifestparser.py
-@@ -2,9 +2,9 @@
- # License, v. 2.0. If a copy of the MPL was not distributed with this file,
- # You can obtain one at http://mozilla.org/MPL/2.0/.
-
--from __future__ import absolute_import, print_function
-
--from StringIO import StringIO
-+
-+from io import StringIO
- import json
- import fnmatch
- import os
-@@ -23,7 +23,7 @@ from .filters import (
- __all__ = ['ManifestParser', 'TestManifest', 'convert']
-
- relpath = os.path.relpath
--string = (basestring,)
-+string = (str,)
-
-
- # path normalization
-@@ -195,7 +195,7 @@ class ManifestParser(object):
-
- # otherwise an item
- # apply ancestor defaults, while maintaining current file priority
-- data = dict(self._ancestor_defaults.items() + data.items())
-+ data = dict(list(self._ancestor_defaults.items()) + list(data.items()))
-
- test = data
- test['name'] = section
-@@ -323,19 +323,19 @@ class ManifestParser(object):
- # make some check functions
- if inverse:
- def has_tags(test):
-- return not tags.intersection(test.keys())
-+ return not tags.intersection(list(test.keys()))
-
- def dict_query(test):
-- for key, value in kwargs.items():
-+ for key, value in list(kwargs.items()):
- if test.get(key) == value:
- return False
- return True
- else:
- def has_tags(test):
-- return tags.issubset(test.keys())
-+ return tags.issubset(list(test.keys()))
-
- def dict_query(test):
-- for key, value in kwargs.items():
-+ for key, value in list(kwargs.items()):
- if test.get(key) != value:
- return False
- return True
-@@ -359,7 +359,7 @@ class ManifestParser(object):
- if tests is None:
- manifests = []
- # Make sure to return all the manifests, even ones without tests.
-- for manifest in self.manifest_defaults.keys():
-+ for manifest in list(self.manifest_defaults.keys()):
- if isinstance(manifest, tuple):
- parentmanifest, manifest = manifest
- if manifest not in manifests:
-@@ -409,7 +409,7 @@ class ManifestParser(object):
- """
-
- files = set([])
-- if isinstance(directories, basestring):
-+ if isinstance(directories, str):
- directories = [directories]
-
- # get files in directories
-@@ -476,7 +476,7 @@ class ManifestParser(object):
- print('[DEFAULT]', file=fp)
- for tag in global_tags:
- print('%s =' % tag, file=fp)
-- for key, value in global_kwargs.items():
-+ for key, value in list(global_kwargs.items()):
- print('%s = %s' % (key, value), file=fp)
- print(file=fp)
-
-@@ -602,7 +602,7 @@ class ManifestParser(object):
- internal function to import directories
- """
-
-- if isinstance(pattern, basestring):
-+ if isinstance(pattern, str):
- patterns = [pattern]
- else:
- patterns = pattern
-diff --git a/testing/mozbase/mozinfo/mozinfo/mozinfo.py b/testing/mozbase/mozinfo/mozinfo/mozinfo.py
-index 90c187568..6c5a1f322 100755
---- a/testing/mozbase/mozinfo/mozinfo/mozinfo.py
-+++ b/testing/mozbase/mozinfo/mozinfo/mozinfo.py
-@@ -8,7 +8,7 @@
- # linux) to the information; I certainly wouldn't want anyone parsing this
- # information and having behaviour depend on it
-
--from __future__ import absolute_import, print_function
-+
-
- import os
- import platform
-@@ -24,7 +24,7 @@ _os = os
- class unknown(object):
- """marker class for unknown information"""
-
-- def __nonzero__(self):
-+ def __bool__(self):
- return False
-
- def __str__(self):
-@@ -96,20 +96,17 @@ elif system.startswith(('MINGW', 'MSYS_NT')):
- info['os'] = 'win'
- os_version = version = unknown
- elif system == "Linux":
-- if hasattr(platform, "linux_distribution"):
-- (distro, os_version, codename) = platform.linux_distribution()
-- else:
-- (distro, os_version, codename) = platform.dist()
- if not processor:
- processor = machine
-- version = "%s %s" % (distro, os_version)
-+
-+ distro = 'OE/Yocto'
-+ os_version = version = unknown
-
- # Bug in Python 2's `platform` library:
- # It will return a triple of empty strings if the distribution is not supported.
- # It works on Python 3. If we don't have an OS version,
- # the unit tests fail to run.
- if not distro and not os_version and not codename:
-- distro = 'lfs'
- version = release
- os_version = release
-
-@@ -131,7 +128,7 @@ else:
- os_version = version = unknown
-
- info['version'] = version
--info['os_version'] = StringVersion(os_version)
-+info['os_version'] = version
-
- # processor type and bits
- if processor in ["i386", "i686"]:
-@@ -188,12 +185,7 @@ def update(new_info):
- to a json file containing the new info.
- """
-
-- PY3 = sys.version_info[0] == 3
-- if PY3:
-- string_types = str,
-- else:
-- string_types = basestring,
-- if isinstance(new_info, string_types):
-+ if isinstance(new_info, str):
- # lazy import
- import mozfile
- import json
-@@ -295,7 +287,7 @@ def main(args=None):
-
- # print out choices if requested
- flag = False
-- for key, value in options.__dict__.items():
-+ for key, value in list(options.__dict__.items()):
- if value is True:
- print('%s choices: %s' % (key, ' '.join([str(choice)
- for choice in choices[key]])))
-@@ -304,7 +296,7 @@ def main(args=None):
- return
-
- # otherwise, print out all info
-- for key, value in info.items():
-+ for key, value in list(info.items()):
- print('%s: %s' % (key, value))
-
-
-diff --git a/testing/mozbase/mozprocess/mozprocess/processhandler.py b/testing/mozbase/mozprocess/mozprocess/processhandler.py
-index 3efb650b7..2575a4702 100644
---- a/testing/mozbase/mozprocess/mozprocess/processhandler.py
-+++ b/testing/mozbase/mozprocess/mozprocess/processhandler.py
-@@ -2,7 +2,7 @@
- # License, v. 2.0. If a copy of the MPL was not distributed with this file,
- # You can obtain one at http://mozilla.org/MPL/2.0/.
-
--from __future__ import absolute_import, print_function
-+
-
- import errno
- import os
-@@ -13,7 +13,7 @@ import threading
- import time
- import traceback
-
--from Queue import Queue, Empty
-+from queue import Queue, Empty
- from datetime import datetime
-
-
-@@ -124,14 +124,14 @@ class ProcessHandlerMixin(object):
- thread = threading.current_thread().name
- print("DBG::MOZPROC PID:{} ({}) | {}".format(self.pid, thread, msg))
-
-- def __del__(self, _maxint=sys.maxint):
-+ def __del__(self, _maxint=sys.maxsize):
- if isWin:
- handle = getattr(self, '_handle', None)
- if handle:
- if hasattr(self, '_internal_poll'):
- self._internal_poll(_deadstate=_maxint)
- else:
-- self.poll(_deadstate=sys.maxint)
-+ self.poll(_deadstate=sys.maxsize)
- if handle or self._job or self._io_port:
- self._cleanup()
- else:
-@@ -243,7 +243,7 @@ class ProcessHandlerMixin(object):
- p2cread, p2cwrite,
- c2pread, c2pwrite,
- errread, errwrite) = args_tuple
-- if not isinstance(args, basestring):
-+ if not isinstance(args, str):
- args = subprocess.list2cmdline(args)
-
- # Always or in the create new process group
-diff --git a/third_party/python/which/which.py b/third_party/python/which/which.py
-index 9c7d10835..f02b2616e 100644
---- a/third_party/python/which/which.py
-+++ b/third_party/python/which/which.py
-@@ -90,13 +90,13 @@ def _getRegisteredExecutable(exeName):
- if sys.platform.startswith('win'):
- if os.path.splitext(exeName)[1].lower() != '.exe':
- exeName += '.exe'
-- import _winreg
-+ import winreg
- try:
- key = "SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\App Paths\\" +\
- exeName
-- value = _winreg.QueryValue(_winreg.HKEY_LOCAL_MACHINE, key)
-+ value = winreg.QueryValue(winreg.HKEY_LOCAL_MACHINE, key)
- registered = (value, "from HKLM\\"+key)
-- except _winreg.error:
-+ except winreg.error:
- pass
- if registered and not os.path.exists(registered[0]):
- registered = None
-@@ -244,7 +244,7 @@ def which(command, path=None, verbose=0, exts=None):
- If no match is found for the command, a WhichError is raised.
- """
- try:
-- match = whichgen(command, path, verbose, exts).next()
-+ match = next(whichgen(command, path, verbose, exts))
- except StopIteration:
- raise WhichError("Could not find '%s' on the path." % command)
- return match
-@@ -281,17 +281,17 @@ def main(argv):
- try:
- optlist, args = getopt.getopt(argv[1:], 'haVvqp:e:',
- ['help', 'all', 'version', 'verbose', 'quiet', 'path=', 'exts='])
-- except getopt.GetoptError, msg:
-+ except getopt.GetoptError as msg:
- sys.stderr.write("which: error: %s. Your invocation was: %s\n"\
- % (msg, argv))
- sys.stderr.write("Try 'which --help'.\n")
- return 1
- for opt, optarg in optlist:
- if opt in ('-h', '--help'):
-- print _cmdlnUsage
-+ print(_cmdlnUsage)
- return 0
- elif opt in ('-V', '--version'):
-- print "which %s" % __version__
-+ print("which %s" % __version__)
- return 0
- elif opt in ('-a', '--all'):
- all = 1
-@@ -319,9 +319,9 @@ def main(argv):
- nmatches = 0
- for match in whichgen(arg, path=altpath, verbose=verbose, exts=exts):
- if verbose:
-- print "%s (%s)" % match
-+ print("%s (%s)" % match)
- else:
-- print match
-+ print(match)
- nmatches += 1
- if not all:
- break
---
-2.21.0
-
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0002-js.pc.in-do-not-include-RequiredDefines.h-for-depend.patch b/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0002-js.pc.in-do-not-include-RequiredDefines.h-for-depend.patch
deleted file mode 100644
index e3f1883a99..0000000000
--- a/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0002-js.pc.in-do-not-include-RequiredDefines.h-for-depend.patch
+++ /dev/null
@@ -1,33 +0,0 @@
-From 0a61b0b98c152f10404ccbdeeac583a486638a7a Mon Sep 17 00:00:00 2001
-From: =?UTF-8?q?Andreas=20M=C3=BCller?= <schnitzeltony@googlemail.com>
-Date: Thu, 6 Jun 2013 18:36:01 +0200
-Subject: [PATCH] js.pc.in: do not include RequiredDefines.h for depending
- packages
-
-in our cross environment the would fail with:
-
-| cc1: fatal error: /usr/include/js-17.0/js/RequiredDefines.h: No such file or directory
-
-and currently it only defines __STDC_LIMIT_MACROS
-Upstream-Status: Inappropriate [embedded specific]
-Signed-off-by: Andreas Müller <schnitzeltony@googlemail.com>
-
-Rebase to 52.8.1
-Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
----
- js/src/build/js.pc.in | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/js/src/build/js.pc.in b/js/src/build/js.pc.in
-index 2eae393..c2dea62 100644
---- a/js/src/build/js.pc.in
-+++ b/js/src/build/js.pc.in
-@@ -8,4 +8,4 @@ Description: The Mozilla library for JavaScript
- Version: @MOZILLA_VERSION@
- @PKGCONF_REQUIRES_PRIVATE@
- Libs: -L${libdir} -l@JS_LIBRARY_NAME@
--Cflags: -include ${includedir}/@JS_LIBRARY_NAME@/js/RequiredDefines.h -I${includedir}/@JS_LIBRARY_NAME@
-+Cflags: -I${includedir}/@JS_LIBRARY_NAME@
---
-2.7.4
-
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0003-fix-cross-compilation-on-i586-targets.patch b/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0003-fix-cross-compilation-on-i586-targets.patch
deleted file mode 100644
index e0929a8a8f..0000000000
--- a/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0003-fix-cross-compilation-on-i586-targets.patch
+++ /dev/null
@@ -1,38 +0,0 @@
-From a452138a1dd274bfad381a701729783360dc86fb Mon Sep 17 00:00:00 2001
-From: Maciej Borzecki <maciej.borzecki@open-rnd.pl>
-Date: Tue, 5 Jan 2016 22:04:17 +0100
-Subject: [PATCH] fix cross compilation on i586 targets
-
-Remove offending -Wl,-rpath-link that may cause host libraries to be picked
-during linking. The patch applies a fix to configure.in. So as not to
-regenerate configure, similar fix is applied there.
-
-Upstream-Status: Inappropriate [embedded specific]
-
-Signed-off-by: Maciej Borzecki <maciej.borzecki@open-rnd.pl>
-
-Rebase to 52.8.1
-Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
-
-Rebase to 60.9.0 (firefox-esr sources)
-Signed-off-by: Andreas Müller <schnitzeltony@gmail.com>
----
- js/src/old-configure.in | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/js/src/old-configure.in b/js/src/old-configure.in
-index 3d53ee1..11c3d5a 100644
---- a/js/src/old-configure.in
-+++ b/js/src/old-configure.in
-@@ -405,7 +405,7 @@ AS='$(CC)'
- AS_DASH_C_FLAG='-c'
- MOZ_USER_DIR=".mozilla"
-
--MOZ_FIX_LINK_PATHS="-Wl,-rpath-link,${DIST}/bin -Wl,-rpath-link,${prefix}/lib"
-+MOZ_FIX_LINK_PATHS="-Wl,-rpath-link,${DIST}/bin"
-
- dnl Configure platform-specific CPU architecture compiler options.
- dnl ==============================================================
---
-2.21.0
-
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0004-do-not-create-python-environment.patch b/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0004-do-not-create-python-environment.patch
deleted file mode 100644
index 985fc36c18..0000000000
--- a/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0004-do-not-create-python-environment.patch
+++ /dev/null
@@ -1,64 +0,0 @@
-From 5028d1cd669c179ed49061316d04c8e8862a5bd8 Mon Sep 17 00:00:00 2001
-From: Hongxu Jia <hongxu.jia@windriver.com>
-Date: Thu, 12 Jul 2018 15:04:47 +0800
-Subject: [PATCH 1/5] do not create python environment
-
-Use oe's python environment rather than create one of host
-
-Upstream-Status: Inappropriate [oe specific]
-
-Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
-
-Rebase to 60.9.0 (firefox-esr sources)
-Signed-off-by: Andreas Müller <schnitzeltony@gmail.com>
----
- build/moz.configure/init.configure | 18 ------------------
- configure.py | 10 +++++++++-
- 3 files changed, 11 insertions(+), 21 deletions(-)
-
---- a/build/moz.configure/init.configure
-+++ b/build/moz.configure/init.configure
-@@ -250,24 +250,6 @@ def virtualenv_python(env_python, build_
- else:
- python = sys.executable
-
-- if not manager.up_to_date(python):
-- log.info('Creating Python environment')
-- manager.build(python)
--
-- python = normsep(manager.python_path)
--
-- if python != normsep(sys.executable):
-- log.info('Reexecuting in the virtualenv')
-- if env_python:
-- del os.environ['PYTHON']
-- # One would prefer to use os.execl, but that's completely borked on
-- # Windows.
-- sys.exit(subprocess.call([python] + sys.argv))
--
-- # We are now in the virtualenv
-- if not distutils.sysconfig.get_python_lib():
-- die('Could not determine python site packages directory')
--
- return python
-
-
---- a/configure.py
-+++ b/configure.py
-@@ -12,7 +12,15 @@ import textwrap
-
-
- base_dir = os.path.abspath(os.path.dirname(__file__))
--sys.path.insert(0, os.path.join(base_dir, 'python', 'mozbuild'))
-+sys.path.insert(0, os.path.join(base_dir, 'config'))
-+def get_immediate_subdirectories(a_dir):
-+ return [name for name in os.listdir(a_dir)
-+ if os.path.isdir(os.path.join(a_dir, name))]
-+for s in ["python", "testing/mozbase"]:
-+ sub_dir = os.path.join(base_dir, s)
-+ for module_dir in get_immediate_subdirectories(sub_dir):
-+ sys.path.insert(0, os.path.join(sub_dir, module_dir))
-+
- from mozbuild.configure import ConfigureSandbox
- from mozbuild.makeutil import Makefile
- from mozbuild.pythonutil import iter_modules_in_path
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0005-fix-cannot-find-link.patch b/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0005-fix-cannot-find-link.patch
deleted file mode 100644
index 4f7ebc68d7..0000000000
--- a/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0005-fix-cannot-find-link.patch
+++ /dev/null
@@ -1,34 +0,0 @@
-From e6dcee5f8a0f80ce99946b81fa1233611a149fe6 Mon Sep 17 00:00:00 2001
-From: Hongxu Jia <hongxu.jia@windriver.com>
-Date: Thu, 12 Jul 2018 18:00:52 +0800
-Subject: [PATCH 2/5] fix cannot find link
-
-..
-|DEBUG: link: Trying 'mips64-wrs-linux-ld --sysroot=tmp-glibc/work/
-mips64-wrs-linux/mozjs/52.8.1-r0/recipe-sysroot '
-|ERROR: Cannot find link
-...
-
-Upstream-Status: Inappropriate [oe specific]
-
-Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
----
- build/moz.configure/checks.configure | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/build/moz.configure/checks.configure b/build/moz.configure/checks.configure
-index 8c2dbc0..83bffc3 100644
---- a/build/moz.configure/checks.configure
-+++ b/build/moz.configure/checks.configure
-@@ -128,7 +128,7 @@ def check_prog(var, progs, what=None, input=None, allow_missing=False,
-
- for prog in value or progs:
- log.debug('%s: Trying %s', var.lower(), quote(prog))
-- result = find_program(prog, paths)
-+ result = find_program(prog.split()[0], paths)
- if result:
- return result
-
---
-2.7.4
-
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0006-workaround-autoconf-2.13-detection-failed.patch b/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0006-workaround-autoconf-2.13-detection-failed.patch
deleted file mode 100644
index a754ff16cf..0000000000
--- a/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0006-workaround-autoconf-2.13-detection-failed.patch
+++ /dev/null
@@ -1,28 +0,0 @@
-From 646a78262b18e19721cd41ee515215221dd241b6 Mon Sep 17 00:00:00 2001
-From: Hongxu Jia <hongxu.jia@windriver.com>
-Date: Thu, 12 Jul 2018 18:12:42 +0800
-Subject: [PATCH 3/5] workaround autoconf 2.13 detection failed
-
-Upstream-Status: Inappropriate [oe specific]
-
-Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
----
- build/moz.configure/old.configure | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/build/moz.configure/old.configure b/build/moz.configure/old.configure
-index b32c3f7..ece47f4 100644
---- a/build/moz.configure/old.configure
-+++ b/build/moz.configure/old.configure
-@@ -31,7 +31,7 @@ def autoconf(mozconfig, autoconf):
- autoconf = autoconf[0] if autoconf else None
-
- for ac in (mozconfig_autoconf, autoconf, 'autoconf-2.13', 'autoconf2.13',
-- 'autoconf213'):
-+ 'autoconf213', 'autoconf'):
- if ac:
- autoconf = find_program(ac)
- if autoconf:
---
-2.7.4
-
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0007-fix-do_compile-failed-on-mips.patch b/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0007-fix-do_compile-failed-on-mips.patch
deleted file mode 100644
index d1da109720..0000000000
--- a/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0007-fix-do_compile-failed-on-mips.patch
+++ /dev/null
@@ -1,33 +0,0 @@
-From 55d833dc3c194f1eb7841f308ad3b9ec3800d3b3 Mon Sep 17 00:00:00 2001
-From: Hongxu Jia <hongxu.jia@windriver.com>
-Date: Fri, 13 Jul 2018 15:48:32 +0800
-Subject: [PATCH 5/5] fix do_compile failed on mips
-
-Link with var-OS_LDFLAGS to fix the issue.
-Such as on mips:
-...
-|mips-wrsmllib32-linux-g++ -meb -mabi=32 -mhard-float ... -o libmozjs-52.so
-|/usr/include/c++/8.1.0/bits/atomic_base.h:514: error: undefined
-reference to '__atomic_fetch_add_8'
-...
-
-In recipe, set OS_LDFLAGS="-Wl,-latomic" could fix the issue.
-
-Upstream-Status: Inappropriate [oe specific]
-
-Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
----
- config/config.mk | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
---- a/config/config.mk
-+++ b/config/config.mk
-@@ -423,7 +423,7 @@ EXPAND_MKSHLIB_ARGS = --uselist
- ifdef SYMBOL_ORDER
- EXPAND_MKSHLIB_ARGS += --symbol-order $(SYMBOL_ORDER)
- endif
--EXPAND_MKSHLIB = $(EXPAND_LIBS_EXEC) $(EXPAND_MKSHLIB_ARGS) -- $(MKSHLIB)
-+EXPAND_MKSHLIB = $(EXPAND_LIBS_EXEC) $(EXPAND_MKSHLIB_ARGS) -- $(MKSHLIB) $(OS_LDFLAGS)
-
- # autoconf.mk sets OBJ_SUFFIX to an error to avoid use before including
- # this file
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0008-add-riscv-support.patch b/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0008-add-riscv-support.patch
deleted file mode 100644
index 0a41485610..0000000000
--- a/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0008-add-riscv-support.patch
+++ /dev/null
@@ -1,50 +0,0 @@
-Add RISC-V support
-
-Upstream-Status: Submitted [https://bugzilla.mozilla.org/show_bug.cgi?id=1318905]
-
-Signed-off-by: Ricardo Salveti <ricardo@foundries.io>
-
---- a/build/autoconf/config.guess
-+++ b/build/autoconf/config.guess
-@@ -1029,6 +1029,9 @@ EOF
- ppcle:Linux:*:*)
- echo powerpcle-unknown-linux-${LIBC}
- exit ;;
-+ riscv32:Linux:*:* | riscv64:Linux:*:*)
-+ echo ${UNAME_MACHINE}-unknown-linux-${LIBC}
-+ exit ;;
- s390:Linux:*:* | s390x:Linux:*:*)
- echo ${UNAME_MACHINE}-ibm-linux-${LIBC}
- exit ;;
---- a/build/moz.configure/init.configure
-+++ b/build/moz.configure/init.configure
-@@ -658,6 +658,9 @@ def split_triplet(triplet, allow_unknown
- elif cpu == 'sh4':
- canonical_cpu = 'sh4'
- endianness = 'little'
-+ elif cpu in ('riscv32', 'riscv64'):
-+ canonical_cpu = cpu
-+ endianness = 'little'
- elif allow_unknown:
- canonical_cpu = cpu
- endianness = 'unknown'
---- a/python/mozbuild/mozbuild/configure/constants.py
-+++ b/python/mozbuild/mozbuild/configure/constants.py
-@@ -50,6 +50,8 @@ CPU_bitness = {
- 'mips64': 64,
- 'ppc': 32,
- 'ppc64': 64,
-+ 'riscv32': 32,
-+ 'riscv64': 64,
- 's390': 32,
- 's390x': 64,
- 'sh4': 32,
-@@ -82,6 +84,8 @@ CPU_preprocessor_checks = OrderedDict((
- ('s390', '__s390__'),
- ('ppc64', '__powerpc64__'),
- ('ppc', '__powerpc__'),
-+ ('riscv32', '__riscv && __SIZEOF_POINTER__ == 4'),
-+ ('riscv64', '__riscv && __SIZEOF_POINTER__ == 8'),
- ('Alpha', '__alpha__'),
- ('hppa', '__hppa__'),
- ('sparc64', '__sparc__ && __arch64__'),
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0009-mozjs-fix-coredump-caused-by-getenv.patch b/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0009-mozjs-fix-coredump-caused-by-getenv.patch
deleted file mode 100644
index 477f73a2f7..0000000000
--- a/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0009-mozjs-fix-coredump-caused-by-getenv.patch
+++ /dev/null
@@ -1,27 +0,0 @@
-From 20b639b7364f9953fdacb058f9ba800bcbf029b4 Mon Sep 17 00:00:00 2001
-From: Changqing Li <changqing.li@windriver.com>
-Date: Thu, 2 Aug 2018 09:40:48 +0800
-Subject: [PATCH] mozjs: fix coredump caused by getenv
-
-Upstream-Status: Submitted [https://bugzilla.mozilla.org/show_bug.cgi?id=1480315]
-
-Signed-off-by: Changqing Li <changqing.li@windriver.com>
----
- mozglue/misc/TimeStamp.cpp | 1 +
- 1 file changed, 1 insertion(+)
-
-diff --git a/mozglue/misc/TimeStamp.cpp b/mozglue/misc/TimeStamp.cpp
-index 932b75c..7a4d71b 100644
---- a/mozglue/misc/TimeStamp.cpp
-+++ b/mozglue/misc/TimeStamp.cpp
-@@ -11,6 +11,7 @@
- #include "mozilla/TimeStamp.h"
- #include <stdio.h>
- #include <string.h>
-+#include <stdlib.h>
-
- namespace mozilla {
-
---
-2.7.4
-
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0010-format-overflow.patch b/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0010-format-overflow.patch
deleted file mode 100644
index e257fc6f62..0000000000
--- a/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0010-format-overflow.patch
+++ /dev/null
@@ -1,21 +0,0 @@
-Drop enable format string warnings to help gcc9
-
-Fixes
-| /mnt/a/yoe/build/tmp/work/core2-64-yoe-linux-musl/mozjs/52.9.1-r0/mozjs-52.9.1/js/src/jit/x64/BaseAssembler-x64.h:596:13: error: '%s' directive argument is null [-Werror=format-overflow=]
-| 596 | spew("movq " MEM_obs ", %s", ADDR_obs(offset, base, index, scale), GPReg64Name(dst));
-| | ~~~~^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Upstream-Status: Inappropriate [Workaround for gcc9]
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-
---- a/js/src/moz.build
-+++ b/js/src/moz.build
-@@ -785,7 +785,7 @@ if CONFIG['JS_HAS_CTYPES']:
- if CONFIG['CC_TYPE'] in ('clang', 'gcc'):
- # Also disable strict-aliasing for GCC compiler, that is enabled by default
- # starting with version 7.1, see Bug 1363009
-- CXXFLAGS += ['-Wno-shadow', '-Werror=format', '-fno-strict-aliasing']
-+ CXXFLAGS += ['-Wno-shadow', '-fno-strict-aliasing']
-
- # Suppress warnings in third-party code.
- if CONFIG['CC_TYPE'] in ('clang', 'gcc'):
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0011-To-fix-build-error-on-arm32BE.patch b/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0011-To-fix-build-error-on-arm32BE.patch
deleted file mode 100644
index 056f74a529..0000000000
--- a/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0011-To-fix-build-error-on-arm32BE.patch
+++ /dev/null
@@ -1,28 +0,0 @@
-From 9afb0e4d3b9209ea198052cea0401bef7ee25ad8 Mon Sep 17 00:00:00 2001
-From: Lei Maohui <leimaohui@cn.fujitsu.com>
-Date: Thu, 9 May 2019 12:23:40 +0900
-Subject: [PATCH] To fix build error on arm32BE.
-
-error: #error Target architecture was not detected as supported by Double-Conversion.
-
-Signed-off-by: Lei Maohui <leimaohui@cn.fujitsu.com>
----
- mfbt/double-conversion/double-conversion/utils.h | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/mfbt/double-conversion/double-conversion/utils.h b/mfbt/double-conversion/double-conversion/utils.h
-index 4f37218..93575cb 100644
---- a/mfbt/double-conversion/double-conversion/utils.h
-+++ b/mfbt/double-conversion/double-conversion/utils.h
-@@ -53,7 +53,7 @@
- // disabled.)
- // On Linux,x86 89255e-22 != Div_double(89255.0/1e22)
- #if defined(_M_X64) || defined(__x86_64__) || \
-- defined(__ARMEL__) || defined(__avr32__) || \
-+ defined(__arm__) || defined(__avr32__) || \
- defined(__hppa__) || defined(__ia64__) || \
- defined(__mips__) || \
- defined(__powerpc__) || defined(__ppc__) || defined(__ppc64__) || \
---
-2.7.4
-
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0012-JS_PUBLIC_API.patch b/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0012-JS_PUBLIC_API.patch
deleted file mode 100644
index 56b18ba8c6..0000000000
--- a/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0012-JS_PUBLIC_API.patch
+++ /dev/null
@@ -1,55 +0,0 @@
-patch from https://bugzilla.mozilla.org/show_bug.cgi?id=1426865
-
-Upstream-Status: Submitted [https://bugzilla.mozilla.org/show_bug.cgi?id=1426865]
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
---- a/js/public/TypeDecls.h
-+++ b/js/public/TypeDecls.h
-@@ -21,31 +21,32 @@
- #include <stdint.h>
-
- #include "js-config.h"
-+#include "jstypes.h"
-
- typedef uint8_t jsbytecode;
-
--class JSAtom;
--struct JSCompartment;
--struct JSContext;
--class JSFunction;
--class JSObject;
--struct JSRuntime;
--class JSScript;
--class JSString;
--class JSAddonId;
--struct JSFreeOp;
-+class JS_PUBLIC_API JSAtom;
-+struct JS_PUBLIC_API JSCompartment;
-+struct JS_PUBLIC_API JSContext;
-+class JS_PUBLIC_API JSFunction;
-+class JS_PUBLIC_API JSObject;
-+struct JS_PUBLIC_API JSRuntime;
-+class JS_PUBLIC_API JSScript;
-+class JS_PUBLIC_API JSString;
-+class JS_PUBLIC_API JSAddonId;
-+struct JS_PUBLIC_API JSFreeOp;
-
--struct jsid;
-+struct JS_PUBLIC_API jsid;
-
- namespace JS {
-
- typedef unsigned char Latin1Char;
-
--class Symbol;
--class Value;
--class Realm;
--struct Runtime;
--struct Zone;
-+class JS_PUBLIC_API Symbol;
-+class JS_PUBLIC_API Value;
-+class JS_PUBLIC_API Realm;
-+struct JS_PUBLIC_API Runtime;
-+struct JS_PUBLIC_API Zone;
-
- template <typename T>
- class Handle;
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0013-riscv-Disable-atomic-operations.patch b/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0013-riscv-Disable-atomic-operations.patch
deleted file mode 100644
index 2e810c87f9..0000000000
--- a/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0013-riscv-Disable-atomic-operations.patch
+++ /dev/null
@@ -1,38 +0,0 @@
-From 64ad80e6d95871f17be4cd01da15581f41ac0b2b Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Mon, 27 May 2019 21:10:34 -0700
-Subject: [PATCH] riscv: Disable atomic operations
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- js/src/jit/AtomicOperations.h | 2 ++
- 1 file changed, 2 insertions(+)
-
---- a/js/src/jit/AtomicOperations.h
-+++ b/js/src/jit/AtomicOperations.h
-@@ -393,6 +393,8 @@ inline bool AtomicOperations::isLockfree
- #include "jit/none/AtomicOperations-feeling-lucky.h"
- #elif defined(__s390__) || defined(__s390x__)
- #include "jit/none/AtomicOperations-feeling-lucky.h"
-+#elif defined(__riscv)
-+#include "jit/none/AtomicOperations-feeling-lucky.h"
- #else
- #error "No AtomicOperations support provided for this platform"
- #endif
---- a/js/src/jit/none/AtomicOperations-feeling-lucky.h
-+++ b/js/src/jit/none/AtomicOperations-feeling-lucky.h
-@@ -80,6 +80,14 @@
- #define GNUC_COMPATIBLE
- #endif
-
-+#ifdef __riscv
-+#define GNUC_COMPATIBLE
-+#ifdef __riscv_xlen == 64
-+#define HAS_64BIT_ATOMICS
-+#define HAS_64BIT_LOCKFREE
-+#endif
-+#endif
-+
- // The default implementation tactic for gcc/clang is to use the newer
- // __atomic intrinsics added for use in C++11 <atomic>. Where that
- // isn't available, we use GCC's older __sync functions instead.
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0014-remove-JS_VOLATIME_ARM.patch b/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0014-remove-JS_VOLATIME_ARM.patch
deleted file mode 100644
index a20873cb0b..0000000000
--- a/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/0014-remove-JS_VOLATIME_ARM.patch
+++ /dev/null
@@ -1,111 +0,0 @@
-# HG changeset patch
-# User Lars T Hansen <lhansen@mozilla.com>
-# Date 1538489772 -7200
-# Node ID bb430eaf5521aa8ab233a45b585ff9e5dfecf4c9
-# Parent e87d7028568e721e8d297ce62f9622e74d29bb37
-Bug 1495731 - remove JS_VOLATILE_ARM, it is no longer relevant. r=waldo
-
-JS_VOLATILE_ARM was a workaround for a gcc 4.7 bug on B2G where it
-would generate unaligned word accesses that should have been
-individual byte accesses. We now require at least gcc 6.1 (and ARM
-systems support unaligned accesses).
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-Upstream-Status: Backport [https://hg.mozilla.org/integration/mozilla-inbound/rev/bb430eaf5521]
-
---- a/js/src/vm/TypedArrayObject-inl.h
-+++ b/js/src/vm/TypedArrayObject-inl.h
-@@ -259,68 +259,61 @@ class ElementSpecific {
- return true;
- }
-
-- // Inhibit unaligned accesses on ARM (bug 1097253, a compiler bug).
--#if defined(__arm__) && defined(__GNUC__) && !defined(__clang__)
--#define JS_VOLATILE_ARM volatile
--#else
--#define JS_VOLATILE_ARM
--#endif
--
- SharedMem<void*> data = Ops::extract(source);
- switch (source->type()) {
- case Scalar::Int8: {
-- SharedMem<JS_VOLATILE_ARM int8_t*> src =
-- data.cast<JS_VOLATILE_ARM int8_t*>();
-+ SharedMem<int8_t*> src =
-+ data.cast<int8_t*>();
- for (uint32_t i = 0; i < count; ++i)
- Ops::store(dest++, ConvertNumber<T>(Ops::load(src++)));
- break;
- }
- case Scalar::Uint8:
- case Scalar::Uint8Clamped: {
-- SharedMem<JS_VOLATILE_ARM uint8_t*> src =
-- data.cast<JS_VOLATILE_ARM uint8_t*>();
-+ SharedMem<uint8_t*> src =
-+ data.cast<uint8_t*>();
- for (uint32_t i = 0; i < count; ++i)
- Ops::store(dest++, ConvertNumber<T>(Ops::load(src++)));
- break;
- }
- case Scalar::Int16: {
-- SharedMem<JS_VOLATILE_ARM int16_t*> src =
-- data.cast<JS_VOLATILE_ARM int16_t*>();
-+ SharedMem<int16_t*> src =
-+ data.cast<int16_t*>();
- for (uint32_t i = 0; i < count; ++i)
- Ops::store(dest++, ConvertNumber<T>(Ops::load(src++)));
- break;
- }
- case Scalar::Uint16: {
-- SharedMem<JS_VOLATILE_ARM uint16_t*> src =
-- data.cast<JS_VOLATILE_ARM uint16_t*>();
-+ SharedMem<uint16_t*> src =
-+ data.cast<uint16_t*>();
- for (uint32_t i = 0; i < count; ++i)
- Ops::store(dest++, ConvertNumber<T>(Ops::load(src++)));
- break;
- }
- case Scalar::Int32: {
-- SharedMem<JS_VOLATILE_ARM int32_t*> src =
-- data.cast<JS_VOLATILE_ARM int32_t*>();
-+ SharedMem<int32_t*> src =
-+ data.cast<int32_t*>();
- for (uint32_t i = 0; i < count; ++i)
- Ops::store(dest++, ConvertNumber<T>(Ops::load(src++)));
- break;
- }
- case Scalar::Uint32: {
-- SharedMem<JS_VOLATILE_ARM uint32_t*> src =
-- data.cast<JS_VOLATILE_ARM uint32_t*>();
-+ SharedMem<uint32_t*> src =
-+ data.cast<uint32_t*>();
- for (uint32_t i = 0; i < count; ++i)
- Ops::store(dest++, ConvertNumber<T>(Ops::load(src++)));
- break;
- }
- case Scalar::Float32: {
-- SharedMem<JS_VOLATILE_ARM float*> src =
-- data.cast<JS_VOLATILE_ARM float*>();
-+ SharedMem<float*> src =
-+ data.cast<float*>();
- for (uint32_t i = 0; i < count; ++i)
- Ops::store(dest++, ConvertNumber<T>(Ops::load(src++)));
- break;
- }
- case Scalar::Float64: {
-- SharedMem<JS_VOLATILE_ARM double*> src =
-- data.cast<JS_VOLATILE_ARM double*>();
-+ SharedMem<double*> src =
-+ data.cast<double*>();
- for (uint32_t i = 0; i < count; ++i)
- Ops::store(dest++, ConvertNumber<T>(Ops::load(src++)));
- break;
-@@ -329,8 +322,6 @@ class ElementSpecific {
- MOZ_CRASH("setFromTypedArray with a typed array with bogus type");
- }
-
--#undef JS_VOLATILE_ARM
--
- return true;
- }
-
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/mipsarchn32/0001-fix-compiling-failure-on-mips64-n32-bsp.patch b/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/mipsarchn32/0001-fix-compiling-failure-on-mips64-n32-bsp.patch
deleted file mode 100644
index b882d76ec2..0000000000
--- a/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/mipsarchn32/0001-fix-compiling-failure-on-mips64-n32-bsp.patch
+++ /dev/null
@@ -1,80 +0,0 @@
-From f2f8be496c8e34b4d909b688a95c6f8565201081 Mon Sep 17 00:00:00 2001
-From: Hongxu Jia <hongxu.jia@windriver.com>
-Date: Wed, 19 Jun 2019 14:30:44 +0800
-Subject: [PATCH] fix compiling failure on mips64-n32 bsp
-
-- Tweak mips64-n32 with mips32
-
-- The toolchain of mips64-n32 supports both of macro
- `__mips64' and `__mips__', but 32bit is required here.
-
-- N32 uses 64-bit registers but restricts addresses to 32 bits.
- https://www.linux-mips.org/pub/linux/mips/doc/ABI/MIPS-N32-ABI-Handbook.pdf
- Table 2-1 specifies the use of registers in n32 and native 64-bit mode.
- From the table, N32 and N64 have the same registers
-
-Upstream-Status: Inappropriate [oe specific]
-
-Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
-Signed-off-by: Mingli Yu <Mingli.Yu@windriver.com>
----
- build/moz.configure/init.configure | 5 ++++-
- js/src/jit/mips-shared/Architecture-mips-shared.h | 4 +++-
- python/mozbuild/mozbuild/configure/constants.py | 2 +-
- 3 files changed, 8 insertions(+), 3 deletions(-)
-
-diff --git a/build/moz.configure/init.configure b/build/moz.configure/init.configure
-index 648ac2d..d0bcaf8 100644
---- a/build/moz.configure/init.configure
-+++ b/build/moz.configure/init.configure
-@@ -650,7 +650,10 @@ def split_triplet(triplet, allow_unknown=False):
- canonical_cpu = 'mips32'
- endianness = 'little' if 'el' in cpu else 'big'
- elif cpu in ('mips64', 'mips64el'):
-- canonical_cpu = 'mips64'
-+ if 'n32' in triplet:
-+ canonical_cpu = 'mips32'
-+ else:
-+ canonical_cpu = 'mips64'
- endianness = 'little' if 'el' in cpu else 'big'
- elif cpu.startswith('aarch64'):
- canonical_cpu = 'aarch64'
-diff --git a/js/src/jit/mips-shared/Architecture-mips-shared.h b/js/src/jit/mips-shared/Architecture-mips-shared.h
-index e95ffd4..caf83f7 100644
---- a/js/src/jit/mips-shared/Architecture-mips-shared.h
-+++ b/js/src/jit/mips-shared/Architecture-mips-shared.h
-@@ -28,6 +28,8 @@
- #elif (defined(_MIPS_SIM) && (_MIPS_SIM == _ABI64)) || \
- defined(JS_SIMULATOR_MIPS64)
- #define USES_N64_ABI
-+#elif (defined(_MIPS_SIM) && (_MIPS_SIM == _ABIN32))
-+#define USES_N32_ABI
- #else
- #error "Unsupported ABI"
- #endif
-@@ -94,7 +96,7 @@ class Registers {
- ta1 = t5,
- ta2 = t6,
- ta3 = t7,
--#elif defined(USES_N64_ABI)
-+#elif defined(USES_N64_ABI) || defined(USES_N32_ABI)
- a4 = r8,
- a5 = r9,
- a6 = r10,
-diff --git a/python/mozbuild/mozbuild/configure/constants.py b/python/mozbuild/mozbuild/configure/constants.py
-index 1067b6a..e0f0405 100644
---- a/python/mozbuild/mozbuild/configure/constants.py
-+++ b/python/mozbuild/mozbuild/configure/constants.py
-@@ -90,8 +90,8 @@ CPU_preprocessor_checks = OrderedDict((
- ('hppa', '__hppa__'),
- ('sparc64', '__sparc__ && __arch64__'),
- ('sparc', '__sparc__'),
-- ('mips64', '__mips64'),
- ('mips32', '__mips__'),
-+ ('mips64', '__mips64'),
- ('sh4', '__sh__'),
- ))
-
---
-2.7.4
-
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/musl/0001-support-musl.patch b/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/musl/0001-support-musl.patch
deleted file mode 100644
index 770d5e0aaa..0000000000
--- a/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/musl/0001-support-musl.patch
+++ /dev/null
@@ -1,98 +0,0 @@
-From 04e8a611e958f0da1ccac61acae3a6f1a5168b20 Mon Sep 17 00:00:00 2001
-From: Hongxu Jia <hongxu.jia@windriver.com>
-Date: Fri, 13 Jul 2018 18:08:14 +0800
-Subject: [PATCH] support musl
-
-Upstream-Status: Pending
-
-Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
----
- mozglue/misc/TimeStamp_darwin.cpp | 1 -
- mozglue/misc/TimeStamp_posix.cpp | 1 -
- nsprpub/pr/src/misc/prsystem.c | 1 -
- python/psutil/psutil/_psutil_bsd.c | 1 -
- python/psutil/psutil/_psutil_osx.c | 1 -
- python/psutil/psutil/arch/bsd/process_info.c | 1 -
- python/psutil/psutil/arch/osx/process_info.c | 1 -
- 9 files changed, 3 insertions(+), 12 deletions(-)
-
---- a/mozglue/misc/TimeStamp_darwin.cpp
-+++ b/mozglue/misc/TimeStamp_darwin.cpp
-@@ -19,7 +19,6 @@
-
- #include <mach/mach_time.h>
- #include <sys/time.h>
--#include <sys/sysctl.h>
- #include <time.h>
- #include <unistd.h>
-
---- a/mozglue/misc/TimeStamp_posix.cpp
-+++ b/mozglue/misc/TimeStamp_posix.cpp
-@@ -21,7 +21,6 @@
- #if defined(__DragonFly__) || defined(__FreeBSD__) || defined(__NetBSD__) || \
- defined(__OpenBSD__)
- #include <sys/param.h>
--#include <sys/sysctl.h>
- #endif
-
- #if defined(__DragonFly__) || defined(__FreeBSD__)
---- a/nsprpub/pr/src/misc/prsystem.c
-+++ b/nsprpub/pr/src/misc/prsystem.c
-@@ -27,7 +27,6 @@
- || defined(OPENBSD) || defined(DRAGONFLY) || defined(DARWIN)
- #define _PR_HAVE_SYSCTL
- #include <sys/param.h>
--#include <sys/sysctl.h>
- #endif
-
- #if defined(DARWIN)
---- a/third_party/python/psutil/psutil/_psutil_bsd.c
-+++ b/third_party/python/psutil/psutil/_psutil_bsd.c
-@@ -29,7 +29,6 @@
- #include <paths.h>
- #include <sys/types.h>
- #include <sys/param.h>
--#include <sys/sysctl.h>
- #include <sys/user.h>
- #include <sys/proc.h>
- #include <sys/file.h>
---- a/third_party/python/psutil/psutil/_psutil_osx.c
-+++ b/third_party/python/psutil/psutil/_psutil_osx.c
-@@ -13,7 +13,6 @@
- #include <stdlib.h>
- #include <stdio.h>
- #include <utmpx.h>
--#include <sys/sysctl.h>
- #include <sys/vmmeter.h>
- #include <libproc.h>
- #include <sys/proc_info.h>
---- a/third_party/python/psutil/psutil/arch/osx/process_info.c
-+++ b/third_party/python/psutil/psutil/arch/osx/process_info.c
-@@ -16,7 +16,6 @@
- #include <stdlib.h>
- #include <stdio.h>
- #include <signal.h>
--#include <sys/sysctl.h>
- #include <libproc.h>
-
- #include "process_info.h"
---- a/memory/build/Mutex.h
-+++ b/memory/build/Mutex.h
-@@ -42,7 +42,7 @@ struct Mutex {
- if (pthread_mutexattr_init(&attr) != 0) {
- return false;
- }
-- pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_ADAPTIVE_NP);
-+ pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_STALLED);
- if (pthread_mutex_init(&mMutex, &attr) != 0) {
- pthread_mutexattr_destroy(&attr);
- return false;
-@@ -102,7 +102,7 @@ typedef Mutex StaticMutex;
-
- #if defined(XP_DARWIN)
- #define STATIC_MUTEX_INIT OS_SPINLOCK_INIT
--#elif defined(XP_LINUX) && !defined(ANDROID)
-+#elif defined(XP_LINUX) && !defined(ANDROID) && defined(__GLIBC__)
- #define STATIC_MUTEX_INIT PTHREAD_ADAPTIVE_MUTEX_INITIALIZER_NP
- #else
- #define STATIC_MUTEX_INIT PTHREAD_MUTEX_INITIALIZER
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/musl/0002-js-Fix-build-with-musl.patch b/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/musl/0002-js-Fix-build-with-musl.patch
deleted file mode 100644
index f4c6e2768d..0000000000
--- a/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs/musl/0002-js-Fix-build-with-musl.patch
+++ /dev/null
@@ -1,31 +0,0 @@
-From 0c9e8f586ba52a9aef5ed298e8315b2598b8fb72 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Sat, 25 May 2019 16:54:45 -0700
-Subject: [PATCH] js: Fix build with musl
-
-The MIPS specific header <sgidefs.h> is not provided by musl
-linux kernel headers provide <asm/sgidefs.h> which has same definitions
-
-Upstream-Status: Pending
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- js/src/jsmath.cpp | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/js/src/jsmath.cpp b/js/src/jsmath.cpp
-index a28968be..8facaa81 100644
---- a/js/src/jsmath.cpp
-+++ b/js/src/jsmath.cpp
-@@ -71,7 +71,7 @@
- #elif defined(__s390__)
- #define GETRANDOM_NR 349
- #elif defined(__mips__)
--#include <sgidefs.h>
-+#include <asm/sgidefs.h>
- #if _MIPS_SIM == _MIPS_SIM_ABI32
- #define GETRANDOM_NR 4353
- #elif _MIPS_SIM == _MIPS_SIM_ABI64
---
-2.21.0
-
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs_60.9.0.bb b/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs_60.9.0.bb
deleted file mode 100644
index 7367c30a5e..0000000000
--- a/meta-oe/dynamic-layers/meta-python/recipes-extended/mozjs/mozjs_60.9.0.bb
+++ /dev/null
@@ -1,147 +0,0 @@
-SUMMARY = "SpiderMonkey is Mozilla's JavaScript engine written in C/C++"
-HOMEPAGE = "https://developer.mozilla.org/en-US/docs/Mozilla/Projects/SpiderMonkey"
-LICENSE = "MPL-2.0"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=dc9b6ecd19a14a54a628edaaf23733bf"
-
-SRC_URI = " \
- https://archive.mozilla.org/pub/firefox/releases/${PV}esr/source/firefox-${PV}esr.source.tar.xz \
- file://0001-Port-build-to-python3.patch \
- file://0002-js.pc.in-do-not-include-RequiredDefines.h-for-depend.patch \
- file://0003-fix-cross-compilation-on-i586-targets.patch \
- file://0004-do-not-create-python-environment.patch \
- file://0005-fix-cannot-find-link.patch \
- file://0006-workaround-autoconf-2.13-detection-failed.patch \
- file://0007-fix-do_compile-failed-on-mips.patch \
- file://0008-add-riscv-support.patch \
- file://0009-mozjs-fix-coredump-caused-by-getenv.patch \
- file://0010-format-overflow.patch \
- file://0011-To-fix-build-error-on-arm32BE.patch \
- file://0012-JS_PUBLIC_API.patch \
- file://0013-riscv-Disable-atomic-operations.patch \
- file://0014-remove-JS_VOLATIME_ARM.patch \
-"
-SRC_URI_append_libc-musl = " \
- file://musl/0001-support-musl.patch \
- file://musl/0002-js-Fix-build-with-musl.patch \
-"
-SRC_URI_append_mipsarchn32 = " \
- file://mipsarchn32/0001-fix-compiling-failure-on-mips64-n32-bsp.patch \
-"
-SRC_URI[md5sum] = "69a0be9ce695e5dc4941ed0c78ef00c2"
-SRC_URI[sha256sum] = "9f453c8cc5669e46e38f977764d49a36295bf0d023619d9aac782e6bb3e8c53f"
-
-S = "${WORKDIR}/firefox-${@d.getVar("PV").replace("esr", "")}"
-
-inherit autotools pkgconfig perlnative python3native
-
-inherit features_check
-CONFLICT_DISTRO_FEATURES_mipsarchn32 = "ld-is-gold"
-
-DEPENDS += " \
- nspr zlib autoconf-2.13-native \
- python3-six-native python3-pytoml-native \
- python3-jsmin-native python3-six \
-"
-
-# Disable null pointer optimization in gcc >= 6
-# https://bugzilla.redhat.com/show_bug.cgi?id=1328045
-CFLAGS += "-fno-tree-vrp -fno-strict-aliasing -fno-delete-null-pointer-checks"
-CXXFLAGS += "-fno-tree-vrp -fno-strict-aliasing -fno-delete-null-pointer-checks"
-
-# nspr's package-config is ignored so set libs manually
-EXTRA_OECONF = " \
- --target=${TARGET_SYS} \
- --host=${BUILD_SYS} \
- --prefix=${prefix} \
- --libdir=${libdir} \
- --disable-tests --disable-strip --disable-optimize \
- --disable-jemalloc \
- --with-nspr-cflags='-I${STAGING_INCDIR}/nspr -I${STAGING_INCDIR}/nss3' \
- --with-nspr-libs='-lplds4 -lplc4 -lnspr4' \
- ${@bb.utils.contains('DISTRO_FEATURES', 'ld-is-gold', "--enable-gold", '--disable-gold', d)} \
-"
-
-# Without this, JS_Init() will fail for mips64.
-EXTRA_OECONF_append_mipsarch = " --with-intl-api=build"
-EXTRA_OECONF_append_powerpc = " --with-intl-api=build"
-
-EXTRA_OECONF_append_mipsarch = " --disable-ion"
-EXTRA_OECONF_append_riscv64 = " --disable-ion"
-EXTRA_OECONF_append_riscv32 = " --disable-ion"
-
-PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'x11', d)}"
-PACKAGECONFIG[x11] = "--x-includes=${STAGING_INCDIR} --x-libraries=${STAGING_LIBDIR},--x-includes=no --x-libraries=no,virtual/libx11"
-
-EXTRA_OEMAKE_task-compile += "BUILD_OPT=1 OS_LDFLAGS='-Wl,-latomic ${LDFLAGS}'"
-EXTRA_OEMAKE_task-install += "STATIC_LIBRARY_NAME=js_static"
-
-export HOST_CC = "${BUILD_CC}"
-export HOST_CXX = "${BUILD_CXX}"
-export HOST_CFLAGS = "${BUILD_CFLAGS}"
-export HOST_CPPFLAGS = "${BUILD_CPPFLAGS}"
-export HOST_CXXFLAGS = "${BUILD_CXXFLAGS}"
-
-do_configure() {
- export SHELL="/bin/sh"
- cd ${S}
- # Add mozjs python-modules necessary
- PYTHONPATH="${S}/third_party/python/which:${S}/config:${S}/build"
- for sub_dir in python testing/mozbase; do
- for module_dir in `ls $sub_dir -1`;do
- [ $module_dir = "virtualenv" ] && continue
- if [ -d "${S}/$sub_dir/$module_dir" ];then
- PYTHONPATH="$PYTHONPATH:${S}/$sub_dir/$module_dir"
- fi
- done
- done
- echo "$PYTHONPATH" > ${B}/PYTHONPATH
- export PYTHONPATH=`cat ${B}/PYTHONPATH`
-
- cd ${S}/js/src
- autoconf213 --macrodir=${STAGING_DATADIR_NATIVE}/autoconf213 old-configure.in > old-configure
-
- cd ${B}
- # use of /tmp can causes problems on heavily loaded hosts
- mkdir -p "${B}/lcl_tmp"
- TMPDIR="${B}/lcl_tmp" ${S}/js/src/configure ${EXTRA_OECONF}
-
- # Make standard Makefile checks pass
- touch ${S}/js/src/configure
- touch ${B}/config.status
-}
-
-do_compile_prepend() {
- export SHELL="/bin/sh"
- export PYTHONPATH=`cat ${B}/PYTHONPATH`
-}
-
-do_install_prepend() {
- export SHELL="/bin/sh"
- export PYTHONPATH=`cat ${B}/PYTHONPATH`
-}
-
-inherit multilib_script multilib_header
-
-MULTILIB_SCRIPTS += " ${PN}-dev:${bindir}/js60-config"
-
-do_install_append() {
- oe_multilib_header mozjs-60/js-config.h
- sed -e 's@${STAGING_DIR_HOST}@@g' \
- -i ${D}${bindir}/js60-config
-}
-
-PACKAGES =+ "lib${BPN}"
-FILES_lib${BPN} += "${libdir}/lib*.so"
-FILES_${PN}-dev += "${bindir}/js60-config"
-
-# Fails to build with thumb-1 (qemuarm)
-#| {standard input}: Assembler messages:
-#| {standard input}:2172: Error: shifts in CMP/MOV instructions are only supported in unified syntax -- `mov r2,r1,LSR#20'
-#| {standard input}:2173: Error: unshifted register required -- `bic r2,r2,#(1<<11)'
-#| {standard input}:2174: Error: unshifted register required -- `orr r1,r1,#(1<<20)'
-#| {standard input}:2176: Error: instruction not supported in Thumb16 mode -- `subs r2,r2,#0x300'
-#| {standard input}:2178: Error: instruction not supported in Thumb16 mode -- `subs r5,r2,#52'
-ARM_INSTRUCTION_SET_armv5 = "arm"
-ARM_INSTRUCTION_SET_armv4 = "arm"
-
-DISABLE_STATIC = ""
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-multimedia/kmsxx/kmsxx_git.bb b/meta-oe/dynamic-layers/meta-python/recipes-multimedia/kmsxx/kmsxx_git.bb
new file mode 100644
index 0000000000..cdba1a24d1
--- /dev/null
+++ b/meta-oe/dynamic-layers/meta-python/recipes-multimedia/kmsxx/kmsxx_git.bb
@@ -0,0 +1,35 @@
+# SPDX-License-Identifier: MIT
+#
+# Copyright Leica Geosystems AG
+#
+
+SUMMARY = "C++ library for kernel mode setting"
+HOMEPAGE = "https://github.com/tomba/kmsxx"
+LICENSE = "MPL-2.0"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=815ca599c9df247a0c7f619bab123dad"
+
+BRANCH = "master"
+SRC_URI = "git://github.com/tomba/kmsxx.git;protocol=https;branch=${BRANCH}"
+SRCREV = "412935a47b762c33e54a464243f2d789b065bbb6"
+PACKAGES =+ "${PN}-python"
+
+PACKAGECONFIG ?= "utils python "
+PACKAGECONFIG[omap] += "-Domap=enabled, -Domap=disabled"
+PACKAGECONFIG[python] += "-Dpykms=enabled, -Dpykms=disabled, python3 python3-pybind11"
+PACKAGECONFIG[utils] += "-Dutils=true, -Dutils=false"
+
+DEPENDS += "libdrm libevdev fmt"
+
+S = "${WORKDIR}/git"
+
+inherit meson pkgconfig
+
+do_install:append() {
+ if ${@bb.utils.contains('PACKAGECONFIG', 'utils', 'true', 'false', d)}; then
+ # kmstest already provided by libdrm-tests
+ mv ${D}${bindir}/kmstest ${D}${bindir}/kmsxxtest
+ fi
+}
+
+FILES:${PN} ="${bindir} ${libdir}"
+FILES:${PN}-python += "${PYTHON_SITEPACKAGES_DIR}/*"
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-support/nvmetcli/nvmetcli_0.7.bb b/meta-oe/dynamic-layers/meta-python/recipes-support/nvmetcli/nvmetcli_0.7.bb
new file mode 100644
index 0000000000..ec483321b6
--- /dev/null
+++ b/meta-oe/dynamic-layers/meta-python/recipes-support/nvmetcli/nvmetcli_0.7.bb
@@ -0,0 +1,39 @@
+SUMMARY = "NVM-Express target user space configuration utility."
+DESCRIPTION = "This package contains the command line interface to the NVMe \
+over Fabrics nvmet in the Linux kernel. It allows configuring the nvmet \
+interactively as well as saving / restoring the configuration to / from a json \
+file."
+LICENSE = "Apache-2.0"
+LIC_FILES_CHKSUM = "file://COPYING;md5=1dece7821bf3fd70fe1309eaa37d52a2"
+
+inherit systemd setuptools3
+
+# nvmet service will start and stop the NVMe Target configuration on boot and
+# shutdown from a saved NVMe Target configuration in the /etc/nvmet/config.json
+# file. This file is not installed by default since the configuration will vary
+# on real systems. Example configuration files are provided by including the
+# nvmetcli-examples package.
+SYSTEMD_SERVICE:${PN} = "nvmet.service"
+
+SYSTEMD_AUTO_ENABLE ?= "disable"
+
+RDEPENDS:${PN} += "python3 python3-six python3-pyparsing python3-configshell-fb"
+
+SRCREV = "0a6b088db2dc2e5de11e6f23f1e890e4b54fee64"
+SRC_URI = "git://git.infradead.org/users/hch/nvmetcli.git;branch=master"
+
+S = "${WORKDIR}/git"
+
+do_install:append() {
+ # Install example configuration scripts.
+ install -d ${D}${datadir}/nvmet
+ cp -fr ${S}/examples ${D}${datadir}/nvmet/
+
+ # Install systemd service file.
+ install -d ${D}${systemd_unitdir}/system
+ cp -fr ${S}/nvmet.service ${D}${systemd_unitdir}/system
+}
+
+# Examples package contains example json files used to configure nvmet.
+PACKAGES += "${PN}-examples"
+FILES:${PN}-examples = "${datadir}/nvmet/examples/*"
diff --git a/meta-oe/dynamic-layers/meta-python/recipes-support/smem/smem_1.5.bb b/meta-oe/dynamic-layers/meta-python/recipes-support/smem/smem_1.5.bb
index 90db9c3f3e..9f85532064 100644
--- a/meta-oe/dynamic-layers/meta-python/recipes-support/smem/smem_1.5.bb
+++ b/meta-oe/dynamic-layers/meta-python/recipes-support/smem/smem_1.5.bb
@@ -6,7 +6,7 @@ libraries and applications in a virtual memory system."
HOMEPAGE = "http://www.selenic.com/smem/"
SECTION = "Applications/System"
-LICENSE = "GPLv2+"
+LICENSE = "GPL-2.0-or-later"
LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263"
HG_CHANGESET = "98273ce331bb"
@@ -33,11 +33,11 @@ do_install() {
install -m 0644 ${S}/smem.8 ${D}/${mandir}/man8/
}
-RDEPENDS_${PN} = "python3-core python3-compression"
-RRECOMMENDS_${PN} = "python3-matplotlib python3-numpy"
+RDEPENDS:${PN} = "python3-core python3-compression"
+RRECOMMENDS:${PN} = "python3-matplotlib python3-numpy"
PACKAGE_BEFORE_PN = "smemcap"
-FILES_smemcap = "${bindir}/smemcap"
+FILES:smemcap = "${bindir}/smemcap"
BBCLASSEXTEND = "native"
diff --git a/meta-oe/dynamic-layers/networking-layer/recipes-core/packagegroups/packagegroup-meta-oe.bbappend b/meta-oe/dynamic-layers/networking-layer/recipes-core/packagegroups/packagegroup-meta-oe.bbappend
index 09c478eb10..b71b7e4f8a 100644
--- a/meta-oe/dynamic-layers/networking-layer/recipes-core/packagegroups/packagegroup-meta-oe.bbappend
+++ b/meta-oe/dynamic-layers/networking-layer/recipes-core/packagegroups/packagegroup-meta-oe.bbappend
@@ -1,6 +1,2 @@
-RDEPENDS_packagegroup-meta-oe-devtools += "\
- valijson \
-"
-
-RDEPENDS_packagegroup-meta-oe-support_append_x86 = "${@bb.utils.contains('BBFILE_COLLECTIONS', 'filesystems-layer', ' open-vm-tools', '', d)}"
-RDEPENDS_packagegroup-meta-oe-support_append_x86-64 = "${@bb.utils.contains('BBFILE_COLLECTIONS', 'filesystems-layer', ' open-vm-tools', '', d)}"
+RDEPENDS:packagegroup-meta-oe-support:append:x86 = "${@bb.utils.contains('BBFILE_COLLECTIONS', 'filesystems-layer', ' open-vm-tools', '', d)}"
+RDEPENDS:packagegroup-meta-oe-support:append:x86-64 = "${@bb.utils.contains('BBFILE_COLLECTIONS', 'filesystems-layer', ' open-vm-tools', '', d)}"
diff --git a/meta-oe/dynamic-layers/networking-layer/recipes-devtools/valijson/valijson_0.4.bb b/meta-oe/dynamic-layers/networking-layer/recipes-devtools/valijson/valijson_0.4.bb
deleted file mode 100644
index 63930b960c..0000000000
--- a/meta-oe/dynamic-layers/networking-layer/recipes-devtools/valijson/valijson_0.4.bb
+++ /dev/null
@@ -1,36 +0,0 @@
-SUMMARY = "Header-only C++ library for JSON Schema validation"
-HOMEPAGE = "https://github.com/tristanpenman/valijson"
-LICENSE = "BSD-2-Clause"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=015106c62262b2383f6c72063f0998f2"
-
-SRC_URI = "git://github.com/tristanpenman/valijson.git"
-SRCREV = "9183462118f58a3ca4be82b8f656f18707229737"
-
-S = "${WORKDIR}/git"
-
-inherit cmake
-
-EXTRA_OECMAKE = "-DINSTALL_HEADERS=1 -DBUILD_TESTS=0"
-
-DEPENDS = "curlpp"
-
-# valijson is a header only C++ library, so the main package will be empty.
-RDEPENDS_${PN}-dev = ""
-
-BBCLASSEXTEND = "native nativesdk"
-
-do_install() {
- install -d ${D}${includedir}/compat
- install -d ${D}${includedir}/valijson
- install -d ${D}${includedir}/valijson/adapters
- install -d ${D}${includedir}/valijson/constraints
- install -d ${D}${includedir}/valijson/internal
- install -d ${D}${includedir}/valijson/utils
-
- install -m 0644 ${S}/include/compat/* ${D}${includedir}/compat
- install -D -m 0644 ${S}/include/valijson/*.hpp -t ${D}${includedir}/valijson
- install -D -m 0644 ${S}/include/valijson/adapters/*.hpp -t ${D}${includedir}/valijson/adapters
- install -D -m 0644 ${S}/include/valijson/constraints/*.hpp -t ${D}${includedir}/valijson/constraints
- install -D -m 0644 ${S}/include/valijson/internal/*.hpp -t ${D}${includedir}/valijson/internal
- install -D -m 0644 ${S}/include/valijson/utils/*.hpp -t ${D}${includedir}/valijson/utils
-}
diff --git a/meta-oe/dynamic-layers/perl-layer/recipes-core/packagegroups/packagegroup-meta-oe.bbappend b/meta-oe/dynamic-layers/perl-layer/recipes-core/packagegroups/packagegroup-meta-oe.bbappend
index 2aee5975d7..5c48fd5c03 100644
--- a/meta-oe/dynamic-layers/perl-layer/recipes-core/packagegroups/packagegroup-meta-oe.bbappend
+++ b/meta-oe/dynamic-layers/perl-layer/recipes-core/packagegroups/packagegroup-meta-oe.bbappend
@@ -1 +1 @@
-RDEPENDS_packagegroup-meta-oe-support += "debsums rasdaemon"
+RDEPENDS:packagegroup-meta-oe-support += "rasdaemon"
diff --git a/meta-oe/dynamic-layers/perl-layer/recipes-support/debsums/debsums_2.2.2.bb b/meta-oe/dynamic-layers/perl-layer/recipes-support/debsums/debsums_2.2.2.bb
deleted file mode 100644
index 07ab5e62b1..0000000000
--- a/meta-oe/dynamic-layers/perl-layer/recipes-support/debsums/debsums_2.2.2.bb
+++ /dev/null
@@ -1,56 +0,0 @@
-SUMMARY = "Miscellaneous utilities specific to Debian"
-SUMMARY_${PN}-cron = "Cron scripts to control automatic debsum checking"
-DESCRIPTION = "A tool for verification of installed package files against \
-MD5 checksums debsums can verify the integrity of installed package files \
-against MD5 checksums installed by the package, or generated from a .deb \
-archive."
-DESCRIPTION_${PN}-cron = "Cron scripts to control automatic system integrity \
-checking via debsums."
-SECTION = "base"
-LICENSE = "GPLv2"
-LIC_FILES_CHKSUM = "file://debian/copyright;md5=770d751553e6559e9eaefd2e11ccf7e9"
-
-SRC_URI = "http://snapshot.debian.org/archive/debian/20170530T212108Z/pool/main/d/debsums/debsums_2.2.2.tar.xz"
-SRC_URI[md5sum] = "82b0710855a7e5212d4358163a269e79"
-SRC_URI[sha256sum] = "aa61896f93a6bbfe0161c21dcd67529ae8e1ec8c3ccf244523c52c4ad8253d97"
-
-# the package is taken from snapshots.debian.org; that source is static and goes stale
-# so we check the latest upstream from a directory that does get updated
-UPSTREAM_CHECK_URI = "${DEBIAN_MIRROR}/main/d/${BPN}/"
-
-inherit perlnative gettext
-
-DEPENDS += "po4a-native"
-
-do_install() {
- install -d ${D}/${sysconfdir}/cron.daily ${D}/${sysconfdir}/cron.weekly
- install -d ${D}/${sysconfdir}/cron.monthly ${D}${sbindir} ${D}${bindir}
- install -d ${D}${mandir}/man1 ${D}${mandir}/man8
- install -m 0755 debsums ${D}${bindir}/
- install -m 0755 rdebsums ${D}${bindir}/
- install -m 0755 debsums_init ${D}${sbindir}
- install -m 0644 man/debsums.1 ${D}${mandir}/man1/
- install -m 0644 man/rdebsums.1 ${D}${mandir}/man1/
- install -m 0644 man/debsums_init.8 ${D}${mandir}/man8/
- install -m 0644 debian/cron.daily \
- ${D}/${sysconfdir}/cron.daily/debsums
- install -m 0644 debian/cron.weekly \
- ${D}/${sysconfdir}/cron.weekly/debsums
- install -m 0644 debian/cron.monthly \
- ${D}/${sysconfdir}/cron.monthly/debsums
- # Must exist, defaults to empty.
- touch ${D}/${sysconfdir}/debsums-ignore
-}
-
-PACKAGES =+ "${PN}-cron"
-
-RDEPENDS_${PN} = "dpkg dpkg-perl libfile-fnmatch-perl perl \
- perl-module-constant perl-module-digest-md5 \
- perl-module-errno perl-module-fcntl \
- perl-module-file-basename perl-module-file-copy \
- perl-module-file-find perl-module-file-glob \
- perl-module-file-path perl-module-file-spec \
- perl-module-file-temp perl-module-getopt-long \
- perl-module-posix"
-
-FILES_${PN}-cron = "${sysconfdir}/cron.*"
diff --git a/meta-oe/dynamic-layers/perl-layer/recipes-support/rasdaemon/files/0001-Fix-system-header-includes.patch b/meta-oe/dynamic-layers/perl-layer/recipes-support/rasdaemon/files/0001-Fix-system-header-includes.patch
deleted file mode 100644
index 0164321312..0000000000
--- a/meta-oe/dynamic-layers/perl-layer/recipes-support/rasdaemon/files/0001-Fix-system-header-includes.patch
+++ /dev/null
@@ -1,44 +0,0 @@
-From 18786db1ad03716267927d983c83275469a1478a Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Fri, 8 May 2020 12:27:19 -0700
-Subject: [PATCH] Fix system header includes
-
-Use poll.h instead of sys/poll.h
-Fixes
-warning: #warning redirecting incorrect #include <sys/poll.h> to <poll.h> [-Wcpp]
-
-Include limits.h for PATH_MAX
-
-Fixes
-ras-events.c:359:16: error: 'PATH_MAX' undeclared (first use in this function)
- 359 | char pipe_raw[PATH_MAX];
- | ^~~~~~~~
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- ras-events.c | 3 ++-
- 1 file changed, 2 insertions(+), 1 deletion(-)
-
-diff --git a/ras-events.c b/ras-events.c
-index 511c93d..400e740 100644
---- a/ras-events.c
-+++ b/ras-events.c
-@@ -18,13 +18,14 @@
- #include <dirent.h>
- #include <errno.h>
- #include <fcntl.h>
-+#include <limits.h>
-+#include <poll.h>
- #include <stdio.h>
- #include <stdlib.h>
- #include <string.h>
- #include <unistd.h>
- #include <sys/stat.h>
- #include <sys/types.h>
--#include <sys/poll.h>
- #include <signal.h>
- #include <sys/signalfd.h>
- #include "libtrace/kbuffer.h"
---
-2.26.2
-
diff --git a/meta-oe/dynamic-layers/perl-layer/recipes-support/rasdaemon/rasdaemon_0.6.7.bb b/meta-oe/dynamic-layers/perl-layer/recipes-support/rasdaemon/rasdaemon_0.8.0.bb
index 39ed3d1071..c083a3a37a 100644
--- a/meta-oe/dynamic-layers/perl-layer/recipes-support/rasdaemon/rasdaemon_0.6.7.bb
+++ b/meta-oe/dynamic-layers/perl-layer/recipes-support/rasdaemon/rasdaemon_0.8.0.bb
@@ -1,18 +1,18 @@
DESCRIPTION = "Tools to provide a way to get Platform Reliability, Availability and Serviceability (RAS) reports made via the Kernel tracing events"
HOMEPAGE = "http://git.infradead.org/users/mchehab/rasdaemon.git"
-LICENSE = "GPLv2"
+LICENSE = "GPL-2.0-only"
LIC_FILES_CHKSUM = "file://COPYING;md5=d3070efe0afa3dc41608bd82c00bb0dc"
-SRC_URI = "git://github.com/mchehab/rasdaemon.git;branch=master \
- file://0001-Fix-system-header-includes.patch \
+SRC_URI = "git://github.com/mchehab/rasdaemon.git;branch=master;protocol=https \
file://rasdaemon.service \
file://init"
-SRCREV = "aa96737648d867a3d73e4151d05b54bbab494605"
+SRCREV = "4e83b848e7961af25028f3a2cecf37a63279a2bf"
S = "${WORKDIR}/git"
-RDEPENDS_${BPN} = "perl perl-module-file-basename perl-module-file-find perl-module-file-spec perl-module-getopt-long \
+DEPENDS = "libtraceevent"
+RDEPENDS:${BPN} = "perl perl-module-file-basename perl-module-file-find perl-module-file-spec perl-module-getopt-long \
perl-module-posix perl-module-file-glob libdbi-perl libdbd-sqlite-perl"
inherit autotools pkgconfig update-rc.d systemd
@@ -29,27 +29,23 @@ PACKAGECONFIG[hisi-ns-decode] = "--enable-hisi-ns-decode,--disable-hisi-ns-decod
PACKAGECONFIG[non-standard] = "--enable-non-standard,--disable-non-standard"
PACKAGECONFIG[abrt-report] = "--enable-abrt-report,--disable-abrt-report"
-DEPENDS_append_libc-musl = " argp-standalone"
-LDFLAGS_append_libc-musl = " -largp"
+DEPENDS:append:libc-musl = " argp-standalone"
+LDFLAGS:append:libc-musl = " -largp"
-do_configure_prepend () {
- ( cd ${S}; autoreconf -vfi )
-}
-
-do_install_append() {
+do_install:append() {
install -d ${D}${sysconfdir}/init.d
install -m 755 ${WORKDIR}/init ${D}${sysconfdir}/init.d/rasdaemon
install -d ${D}${systemd_unitdir}/system
install -m 0644 ${WORKDIR}/rasdaemon.service ${D}${systemd_unitdir}/system
}
-FILES_${PN} += "${sbindir}/rasdaemon \
+FILES:${PN} += "${sbindir}/rasdaemon \
${sysconfdir}/init.d \
${systemd_unitdir}/system/rasdaemon.service"
-SYSTEMD_SERVICE_${PN} = "rasdaemon.service"
+SYSTEMD_SERVICE:${PN} = "rasdaemon.service"
SYSTEMD_AUTO_ENABLE = "enable"
INITSCRIPT_PACKAGES = "${PN}"
-INITSCRIPT_NAME_${PN} = "rasdaemon"
-INITSCRIPT_PARAMS_${PN} = "defaults 89"
+INITSCRIPT_NAME:${PN} = "rasdaemon"
+INITSCRIPT_PARAMS:${PN} = "defaults 89"
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools-conf-configfs/10-adbd-configfs.conf b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools-conf-configfs/10-adbd-configfs.conf
new file mode 100644
index 0000000000..ddf155a907
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools-conf-configfs/10-adbd-configfs.conf
@@ -0,0 +1,4 @@
+[Service]
+ExecStartPre=/usr/bin/android-gadget-setup
+ExecStartPost=/usr/bin/android-gadget-start
+ExecStopPost=/usr/bin/android-gadget-cleanup
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools-conf-configfs/android-gadget-cleanup b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools-conf-configfs/android-gadget-cleanup
new file mode 100644
index 0000000000..517227d4a6
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools-conf-configfs/android-gadget-cleanup
@@ -0,0 +1,24 @@
+#!/bin/sh
+
+[ -d /sys/kernel/config/usb_gadget ] || exit 0
+
+cd /sys/kernel/config/usb_gadget
+
+cd adb
+
+echo "" > UDC || true
+
+killall adbd || true
+
+umount /dev/usb-ffs/adb
+
+rm configs/c.1/ffs.usb0
+
+rmdir configs/c.1/strings/0x409
+rmdir configs/c.1
+
+rmdir functions/ffs.usb0
+rmdir strings/0x409
+
+cd ..
+rmdir adb
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools-conf-configfs/android-gadget-setup b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools-conf-configfs/android-gadget-setup
new file mode 100644
index 0000000000..e44d1bacbe
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools-conf-configfs/android-gadget-setup
@@ -0,0 +1,35 @@
+#!/bin/sh
+
+set -e
+
+manufacturer=RPB
+model="Android device"
+serial=0123456789ABCDEF
+
+if [ -r /etc/android-gadget-setup.machine ] ; then
+ . /etc/android-gadget-setup.machine
+fi
+
+[ -d /sys/kernel/config/usb_gadget ] || modprobe libcomposite
+
+cd /sys/kernel/config/usb_gadget
+
+[ -d adb ] && /usr/bin/android-gadget-cleanup || true
+
+mkdir adb
+cd adb
+
+mkdir configs/c.1
+mkdir functions/ffs.usb0
+mkdir strings/0x409
+mkdir configs/c.1/strings/0x409
+echo 0x18d1 > idVendor
+echo 0xd002 > idProduct
+echo "$serial" > strings/0x409/serialnumber
+echo "$manufacturer" > strings/0x409/manufacturer
+echo "$model" > strings/0x409/product
+echo "Conf 1" > configs/c.1/strings/0x409/configuration
+ln -s functions/ffs.usb0 configs/c.1
+
+mkdir -p /dev/usb-ffs/adb
+mount -t functionfs usb0 /dev/usb-ffs/adb
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools-conf-configfs/android-gadget-start b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools-conf-configfs/android-gadget-start
new file mode 100644
index 0000000000..ca6c3df275
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools-conf-configfs/android-gadget-start
@@ -0,0 +1,7 @@
+#!/bin/sh
+
+set -e
+
+sleep 3
+
+ls /sys/class/udc/ > /sys/kernel/config/usb_gadget/adb/UDC
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools-conf-configfs_1.0.bb b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools-conf-configfs_1.0.bb
new file mode 100644
index 0000000000..1c26c7207b
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools-conf-configfs_1.0.bb
@@ -0,0 +1,35 @@
+DESCRIPTION = "Various utilities from Android - corresponding configuration files for using ConfigFS"
+SECTION = "console/utils"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
+
+SRC_URI = " \
+ file://android-gadget-setup \
+ file://android-gadget-start \
+ file://android-gadget-cleanup \
+ file://10-adbd-configfs.conf \
+"
+
+PACKAGE_ARCH = "${MACHINE_ARCH}"
+
+do_install() {
+ install -d ${D}${bindir}
+ install -m 0755 ${WORKDIR}/android-gadget-setup ${D}${bindir}
+ install -m 0755 ${WORKDIR}/android-gadget-start ${D}${bindir}
+ install -m 0755 ${WORKDIR}/android-gadget-cleanup ${D}${bindir}
+
+ if [ -r ${WORKDIR}/android-gadget-setup.machine ] ; then
+ install -d ${D}${sysconfdir}
+ install -m 0644 ${WORKDIR}/android-gadget-setup.machine ${D}${sysconfdir}
+ fi
+
+ install -d ${D}${systemd_unitdir}/system/android-tools-adbd.service.d
+ install -m 0644 ${WORKDIR}/10-adbd-configfs.conf ${D}${systemd_unitdir}/system/android-tools-adbd.service.d
+}
+
+FILES:${PN} += " \
+ ${systemd_unitdir}/system/ \
+"
+
+PROVIDES += "android-tools-conf"
+RPROVIDES:${PN} = "android-tools-conf"
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools-conf/android-gadget-setup b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools-conf/android-gadget-setup
new file mode 100644
index 0000000000..26cf30eddd
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools-conf/android-gadget-setup
@@ -0,0 +1,37 @@
+#!/bin/sh
+
+[ ! -e /dev/pts ] && mkdir -p /dev/pts
+[ ! -e /dev/pts/0 ] && mount devpts /dev/pts -t devpts
+
+# TODO enable the lines below once we have support for getprop
+# retrieve the product info from Android
+# manufacturer=$(getprop ro.product.manufacturer Android)
+# model=$(getprop ro.product.model Android)
+# serial=$(getprop ro.serialno 0123456789ABCDEF)
+
+#below are now needed in order to use FunctionFS for ADB, tested to work with 3.4+ kernels
+if grep -q functionfs /proc/filesystems; then
+ mkdir -p /dev/usb-ffs/adb
+ mount -t functionfs adb /dev/usb-ffs/adb
+ #android-gadget-setup doesn't provide below 2 and without them it won't work, so we provide them here.
+ echo adb > /sys/class/android_usb/android0/f_ffs/aliases
+ echo ffs > /sys/class/android_usb/android0/functions
+fi
+
+manufacturer="$(cat /system/build.prop | grep -o 'ro.product.manufacturer=.*' | cut -d'=' -f 2)"
+model="$(cat /system/build.prop | grep -o 'ro.product.model=.*' | cut -d'=' -f 2)"
+# get the device serial number from /proc/cmdline directly(since we have no getprop on
+# GNU/Linux)
+serial="$(cat /proc/cmdline | sed 's/.*androidboot.serialno=//' | sed 's/ .*//')"
+
+echo $serial > /sys/class/android_usb/android0/iSerial
+echo $manufacturer > /sys/class/android_usb/android0/iManufacturer
+echo $model > /sys/class/android_usb/android0/iProduct
+
+echo "0" > /sys/class/android_usb/android0/enable
+echo "18d1" > /sys/class/android_usb/android0/idVendor
+echo "D002" > /sys/class/android_usb/android0/idProduct
+echo "adb" > /sys/class/android_usb/android0/functions
+echo "1" > /sys/class/android_usb/android0/enable
+
+sleep 4
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools-conf_1.0.bb b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools-conf_1.0.bb
new file mode 100644
index 0000000000..b63ccbb080
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools-conf_1.0.bb
@@ -0,0 +1,18 @@
+DESCRIPTION = "Different utilities from Android - corresponding configuration files"
+SECTION = "console/utils"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
+
+SRC_URI = "file://android-gadget-setup"
+
+do_install() {
+ install -d ${D}${bindir}
+ install -m 0755 ${WORKDIR}/android-gadget-setup ${D}${bindir}
+}
+
+python () {
+ pn = d.getVar('PN')
+ profprov = d.getVar("PREFERRED_PROVIDER_" + pn)
+ if profprov and pn != profprov:
+ raise bb.parse.SkipRecipe("PREFERRED_PROVIDER_%s set to %s, not %s" % (pn, profprov, pn))
+}
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/0001-Fixes-for-yocto-build.patch b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/0001-Fixes-for-yocto-build.patch
new file mode 100644
index 0000000000..1d18d47ec3
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/0001-Fixes-for-yocto-build.patch
@@ -0,0 +1,164 @@
+From 5de85f8273c7284aa93e35c40f025d4d43d42df9 Mon Sep 17 00:00:00 2001
+From: Etienne Cordonnier <ecordonnier@snap.com>
+Date: Thu, 16 Mar 2023 18:59:35 +0100
+Subject: [PATCH] Fixes for yocto build
+
+Signed-off-by: JJ Robertson <jrobertson@snapchat.com>
+[rebased on version 29]
+Signed-off-by: Etienne Cordonnier <ecordonnier@snap.com>
+
+---
+Upstream-Status: Pending
+
+ system/core/adb/daemon/file_sync_service.cpp | 4 ++--
+ system/core/adb/daemon/framebuffer_service.h | 2 --
+ system/core/adb/daemon/main.cpp | 5 +++--
+ system/core/adb/daemon/restart_service.h | 2 --
+ system/core/adb/daemon/services.cpp | 6 ++----
+ system/core/adb/daemon/shell_service.cpp | 5 ++++-
+ system/core/adb/types.h | 1 +
+ 7 files changed, 12 insertions(+), 13 deletions(-)
+
+diff --git a/system/core/adb/daemon/file_sync_service.cpp b/system/core/adb/daemon/file_sync_service.cpp
+index d6af7087..92e7087a 100644
+--- a/system/core/adb/daemon/file_sync_service.cpp
++++ b/system/core/adb/daemon/file_sync_service.cpp
+@@ -111,7 +111,7 @@ static bool secure_mkdirs(const std::string& path) {
+ partial_path += path_component;
+
+ if (should_use_fs_config(partial_path)) {
+- fs_config(partial_path.c_str(), 1, nullptr, &uid, &gid, &mode, &capabilities);
++ // fs_config(partial_path.c_str(), 1, nullptr, &uid, &gid, &mode, &capabilities);
+ }
+ if (adb_mkdir(partial_path.c_str(), mode) == -1) {
+ if (errno != EEXIST) {
+@@ -469,7 +469,7 @@ static bool do_send(int s, const std::string& spec, std::vector<char>& buffer) {
+ uint64_t capabilities = 0;
+ if (should_use_fs_config(path)) {
+ unsigned int broken_api_hack = mode;
+- fs_config(path.c_str(), 0, nullptr, &uid, &gid, &broken_api_hack, &capabilities);
++ // fs_config(path.c_str(), 0, nullptr, &uid, &gid, &broken_api_hack, &capabilities);
+ mode = broken_api_hack;
+ }
+
+diff --git a/system/core/adb/daemon/framebuffer_service.h b/system/core/adb/daemon/framebuffer_service.h
+index bab44be3..264da597 100644
+--- a/system/core/adb/daemon/framebuffer_service.h
++++ b/system/core/adb/daemon/framebuffer_service.h
+@@ -18,6 +18,4 @@
+
+ #include "adb_unique_fd.h"
+
+-#if defined(__ANDROID__)
+ void framebuffer_service(unique_fd fd);
+-#endif
+diff --git a/system/core/adb/daemon/main.cpp b/system/core/adb/daemon/main.cpp
+index 3322574c..e807d13d 100644
+--- a/system/core/adb/daemon/main.cpp
++++ b/system/core/adb/daemon/main.cpp
+@@ -208,6 +208,9 @@ int adbd_main(int server_port) {
+ umask(0);
+
+ signal(SIGPIPE, SIG_IGN);
++ signal(SIGINT, SIG_DFL);
++ signal(SIGQUIT, SIG_DFL);
++
+
+ #if defined(__BIONIC__)
+ auto fdsan_level = android_fdsan_get_error_level();
+@@ -254,13 +257,11 @@ int adbd_main(int server_port) {
+
+ bool is_usb = false;
+
+-#if defined(__ANDROID__)
+ if (access(USB_FFS_ADB_EP0, F_OK) == 0) {
+ // Listen on USB.
+ usb_init();
+ is_usb = true;
+ }
+-#endif
+
+ // If one of these properties is set, also listen on that port.
+ // If one of the properties isn't set and we couldn't listen on usb, listen
+diff --git a/system/core/adb/daemon/restart_service.h b/system/core/adb/daemon/restart_service.h
+index 19840bd5..7a97614b 100644
+--- a/system/core/adb/daemon/restart_service.h
++++ b/system/core/adb/daemon/restart_service.h
+@@ -18,9 +18,7 @@
+
+ #include "adb_unique_fd.h"
+
+-#if defined(__ANDROID__)
+ void restart_root_service(unique_fd fd);
+ void restart_unroot_service(unique_fd fd);
+ void restart_tcp_service(unique_fd fd, int port);
+ void restart_usb_service(unique_fd fd);
+-#endif
+diff --git a/system/core/adb/daemon/services.cpp b/system/core/adb/daemon/services.cpp
+index 4ec90d27..d8541c23 100644
+--- a/system/core/adb/daemon/services.cpp
++++ b/system/core/adb/daemon/services.cpp
+@@ -156,7 +156,7 @@ static void spin_service(unique_fd fd) {
+ }
+ #endif
+ // Fall through
+- std::string cmd = "/system/bin/reboot ";
++ std::string cmd = "/sbin/reboot ";
+ cmd += name;
+ return StartSubprocess(cmd, nullptr, SubprocessType::kRaw, SubprocessProtocol::kNone);
+ }
+@@ -265,11 +265,10 @@ unique_fd daemon_service_to_fd(std::string_view name, atransport* transport) {
+ }
+ #endif
+
+-#if defined(__ANDROID__)
+ if (name.starts_with("framebuffer:")) {
+ return create_service_thread("fb", framebuffer_service);
+ } else if (android::base::ConsumePrefix(&name, "remount:")) {
+- std::string cmd = "/system/bin/remount ";
++ std::string cmd = "/sbin/remount ";
+ cmd += name;
+ return StartSubprocess(cmd, nullptr, SubprocessType::kRaw, SubprocessProtocol::kNone);
+ } else if (android::base::ConsumePrefix(&name, "reboot:")) {
+@@ -303,7 +302,6 @@ unique_fd daemon_service_to_fd(std::string_view name, atransport* transport) {
+ } else if (name.starts_with("usb:")) {
+ return create_service_thread("usb", restart_usb_service);
+ }
+-#endif
+
+ if (android::base::ConsumePrefix(&name, "dev:")) {
+ return unique_fd{unix_open(name, O_RDWR | O_CLOEXEC)};
+diff --git a/system/core/adb/daemon/shell_service.cpp b/system/core/adb/daemon/shell_service.cpp
+index f62032d0..ebcfe18d 100644
+--- a/system/core/adb/daemon/shell_service.cpp
++++ b/system/core/adb/daemon/shell_service.cpp
+@@ -273,13 +273,16 @@ bool Subprocess::ForkAndExec(std::string* error) {
+ env["HOSTNAME"] = GetHostName();
+ env["LOGNAME"] = pw->pw_name;
+ env["SHELL"] = pw->pw_shell;
+- env["TMPDIR"] = "/data/local/tmp";
+ env["USER"] = pw->pw_name;
+ }
+
+ if (!terminal_type_.empty()) {
+ env["TERM"] = terminal_type_;
+ }
++ if (env.find("PS1") == env.end()) {
++ env["PS1"] = "\\h:\\w\\$ ";
++ }
++
+
+ std::vector<std::string> joined_env;
+ for (const auto& it : env) {
+diff --git a/system/core/adb/types.h b/system/core/adb/types.h
+index c619fffc..f8e0f521 100644
+--- a/system/core/adb/types.h
++++ b/system/core/adb/types.h
+@@ -22,6 +22,7 @@
+ #include <memory>
+ #include <utility>
+ #include <vector>
++#include <string.h>
+
+ #include <android-base/logging.h>
+
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/0002-android-tools-modifications-to-make-it-build-in-yoct.patch b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/0002-android-tools-modifications-to-make-it-build-in-yoct.patch
new file mode 100644
index 0000000000..b719acec4c
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/0002-android-tools-modifications-to-make-it-build-in-yoct.patch
@@ -0,0 +1,375 @@
+From fe211cbc305a9316c1d4da2f086d6a50f1c92188 Mon Sep 17 00:00:00 2001
+From: Etienne Cordonnier <ecordonnier@snap.com>
+Date: Fri, 17 Mar 2023 10:33:11 +0100
+Subject: [PATCH] android-tools: modifications to make it build in yocto
+
+- Fix relocation errors on aarch64, e.g.:
+"relocation R_AARCH64_ADR_PREL_PG_HI21 against symbol `_ZTV19SparseFileBufSource' which may bind externally can not be used when making a shared object; recompile with -fPIC"
+
+- use ln -f to avoid error "File exists" on incremental builds
+
+- fix missing headers
+
+Signed-off-by: Etienne Cordonnier <ecordonnier@snap.com>
+
+---
+Upstream-Status: Pending
+
+ debian/external/boringssl/libcrypto.mk | 1 +
+ debian/system/core/append2simg.mk | 2 +-
+ debian/system/core/fastboot.mk | 2 +-
+ debian/system/core/img2simg.mk | 2 +-
+ debian/system/core/libbacktrace.mk | 8 ++++----
+ debian/system/core/libbase.mk | 5 +++--
+ debian/system/core/libcutils.mk | 5 +++--
+ debian/system/core/liblog.mk | 3 ++-
+ debian/system/core/libsparse.mk | 5 +++--
+ debian/system/core/libutils.mk | 2 +-
+ debian/system/core/libziparchive.mk | 5 +++--
+ debian/system/core/simg2img.mk | 2 +-
+ debian/system/core/simg2simg.mk | 2 +-
+ frameworks/native/libs/adbd_auth/adbd_auth.cpp | 2 ++
+ system/core/adb/adb_listeners.cpp | 12 ++++++------
+ system/core/adb/transport_local.cpp | 2 +-
+ 16 files changed, 34 insertions(+), 26 deletions(-)
+
+diff --git a/debian/external/boringssl/libcrypto.mk b/debian/external/boringssl/libcrypto.mk
+index c0ea54f4..b3a77bfd 100644
+--- a/debian/external/boringssl/libcrypto.mk
++++ b/debian/external/boringssl/libcrypto.mk
+@@ -27,6 +27,7 @@ CPPFLAGS += \
+ -DBORINGSSL_IMPLEMENTATION \
+ -DBORINGSSL_SHARED_LIBRARY \
+ -DOPENSSL_SMALL \
++ -DOPENSSL_NO_ASM \
+ -Iexternal/boringssl/src/crypto \
+ -Iexternal/boringssl/src/include \
+
+diff --git a/debian/system/core/append2simg.mk b/debian/system/core/append2simg.mk
+index 1599bdb8..598c751d 100644
+--- a/debian/system/core/append2simg.mk
++++ b/debian/system/core/append2simg.mk
+@@ -11,7 +11,7 @@ CPPFLAGS += \
+
+ LDFLAGS += \
+ -Ldebian/out/system/core \
+- -Wl,-rpath=/usr/lib/$(DEB_HOST_MULTIARCH)/android \
++ -Wl,-rpath='$$ORIGIN/../lib/android' \
+ -lbase \
+ -llog \
+ -lpthread \
+diff --git a/debian/system/core/fastboot.mk b/debian/system/core/fastboot.mk
+index d5c9a285..a59ba1a4 100644
+--- a/debian/system/core/fastboot.mk
++++ b/debian/system/core/fastboot.mk
+@@ -49,7 +49,7 @@ CPPFLAGS += \
+
+ LDFLAGS += \
+ -Ldebian/out/system/core \
+- -Wl,-rpath=/usr/lib/$(DEB_HOST_MULTIARCH)/android \
++ -Wl,-rpath='$$ORIGIN/../lib/android' \
+ -fuse-ld=gold \
+ -lbase \
+ -lcutils \
+diff --git a/debian/system/core/img2simg.mk b/debian/system/core/img2simg.mk
+index 11adf014..8baf5ba5 100644
+--- a/debian/system/core/img2simg.mk
++++ b/debian/system/core/img2simg.mk
+@@ -11,7 +11,7 @@ CPPFLAGS += \
+
+ LDFLAGS += \
+ -Ldebian/out/system/core \
+- -Wl,-rpath=/usr/lib/$(DEB_HOST_MULTIARCH)/android \
++ -Wl,-rpath='$$ORIGIN/../lib/android' \
+ -lbase \
+ -llog \
+ -lpthread \
+diff --git a/debian/system/core/libbacktrace.mk b/debian/system/core/libbacktrace.mk
+index e3411d76..86e69874 100644
+--- a/debian/system/core/libbacktrace.mk
++++ b/debian/system/core/libbacktrace.mk
+@@ -1,4 +1,4 @@
+-include /usr/share/dpkg/architecture.mk
++include rules_yocto.mk
+
+ NAME = libbacktrace
+
+@@ -83,10 +84,9 @@ CPPFLAGS += \
+ LDFLAGS += \
+ -L/usr/lib/p7zip \
+ -Ldebian/out/system/core \
+- -Wl,-rpath=/usr/lib/p7zip \
+- -Wl,-rpath=/usr/lib/$(DEB_HOST_MULTIARCH)/android \
++ -Wl,-rpath='$$ORIGIN' \
+ -Wl,-soname,$(NAME).so.0 \
+- -l:7z.so \
++ -l7z \
+ -lbase \
+ -llog \
+ -lpthread \
+@@ -101,7 +101,7 @@ endif
+ build: $(OBJECTS_CXX) $(OBJECTS_ASSEMBLY) debian/out/external/libunwind/libunwind.a
+ mkdir -p debian/out/system/core
+ $(CXX) $^ -o debian/out/system/core/$(NAME).so.0 $(LDFLAGS)
+- cd debian/out/system/core && ln -s $(NAME).so.0 $(NAME).so
++ cd debian/out/system/core && ln -sf $(NAME).so.0 $(NAME).so
+
+ $(OBJECTS_CXX): %.o: %.cpp
+ $(CXX) -c -o $@ $< $(CXXFLAGS) $(CPPFLAGS)
+diff --git a/debian/system/core/libbase.mk b/debian/system/core/libbase.mk
+index d2b074ba..8a90d6de 100644
+--- a/debian/system/core/libbase.mk
++++ b/debian/system/core/libbase.mk
+@@ -1,3 +1,4 @@
++include rules_yocto.mk
+ NAME = libbase
+
+ SOURCES = \
+@@ -30,7 +31,7 @@ CPPFLAGS += \
+
+ LDFLAGS += \
+ -Ldebian/out/system/core \
+- -Wl,-rpath=/usr/lib/$(DEB_HOST_MULTIARCH)/android \
++ -Wl,-rpath='$$ORIGIN' \
+ -Wl,-soname,$(NAME).so.0 \
+ -llog \
+ -lpthread \
+@@ -44,7 +45,7 @@ endif
+
+ build: $(OBJECTS)
+ $(CXX) $^ -o debian/out/system/core/$(NAME).so.0 $(LDFLAGS)
+- cd debian/out/system/core && ln -s $(NAME).so.0 $(NAME).so
++ cd debian/out/system/core && ln -sf $(NAME).so.0 $(NAME).so
+
+ $(OBJECTS): %.o: %.cpp
+ $(CXX) -c -o $@ $< $(CXXFLAGS) $(CPPFLAGS)
+diff --git a/debian/system/core/libcutils.mk b/debian/system/core/libcutils.mk
+index 9d928b56..c22b0965 100644
+--- a/debian/system/core/libcutils.mk
++++ b/debian/system/core/libcutils.mk
+@@ -1,3 +1,4 @@
++include rules_yocto.mk
+ NAME = libcutils
+
+ libcutils_nonwindows_sources = \
+@@ -47,7 +48,7 @@ CPPFLAGS += \
+
+ LDFLAGS += \
+ -Ldebian/out/system/core \
+- -Wl,-rpath=/usr/lib/$(DEB_HOST_MULTIARCH)/android \
++ -Wl,-rpath='$$ORIGIN' \
+ -Wl,-soname,$(NAME).so.0 \
+ -lbase \
+ -llog \
+@@ -56,7 +57,7 @@ LDFLAGS += \
+
+ build: $(OBJECTS_C) $(OBJECTS_CXX)
+ $(CXX) $^ -o debian/out/system/core/$(NAME).so.0 $(LDFLAGS)
+- cd debian/out/system/core && ln -s $(NAME).so.0 $(NAME).so
++ cd debian/out/system/core && ln -sf $(NAME).so.0 $(NAME).so
+
+ $(OBJECTS_C): %.o: %.c
+ $(CC) -c -o $@ $< $(CFLAGS) $(CPPFLAGS)
+diff --git a/debian/system/core/liblog.mk b/debian/system/core/liblog.mk
+index f8c3d7fe..34a07341 100644
+--- a/debian/system/core/liblog.mk
++++ b/debian/system/core/liblog.mk
+@@ -1,3 +1,4 @@
++include rules_yocto.mk
+ NAME = liblog
+
+ liblog_sources = \
+@@ -35,7 +36,7 @@ LDFLAGS += \
+ build: $(OBJECTS)
+ mkdir -p debian/out/system/core
+ $(CXX) $^ -o debian/out/system/core/$(NAME).so.0 $(LDFLAGS)
+- cd debian/out/system/core && ln -s $(NAME).so.0 $(NAME).so
++ cd debian/out/system/core && ln -sf $(NAME).so.0 $(NAME).so
+
+ $(OBJECTS): %.o: %.cpp
+ $(CXX) -c -o $@ $< $(CXXFLAGS) $(CPPFLAGS)
+diff --git a/debian/system/core/libsparse.mk b/debian/system/core/libsparse.mk
+index c2b2694c..2da12b8e 100644
+--- a/debian/system/core/libsparse.mk
++++ b/debian/system/core/libsparse.mk
+@@ -1,3 +1,4 @@
++include rules_yocto.mk
+ NAME = libsparse
+
+ SOURCES = \
+@@ -19,7 +20,7 @@ CPPFLAGS += \
+
+ LDFLAGS += \
+ -Ldebian/out/system/core \
+- -Wl,-rpath=/usr/lib/$(DEB_HOST_MULTIARCH)/android \
++ -Wl,-rpath='$$ORIGIN' \
+ -Wl,-soname,$(NAME).so.0 \
+ -lbase \
+ -lz \
+@@ -27,7 +28,7 @@ LDFLAGS += \
+
+ build: $(OBJECTS)
+ $(CXX) $^ -o debian/out/system/core/$(NAME).so.0 $(LDFLAGS)
+- cd debian/out/system/core && ln -s $(NAME).so.0 $(NAME).so
++ cd debian/out/system/core && ln -sf $(NAME).so.0 $(NAME).so
+
+ $(OBJECTS): %.o: %.cpp
+ $(CXX) -c -o $@ $< $(CXXFLAGS) $(CPPFLAGS)
+diff --git a/debian/system/core/libutils.mk b/debian/system/core/libutils.mk
+index c37b1d2d..332492c1 100644
+--- a/debian/system/core/libutils.mk
++++ b/debian/system/core/libutils.mk
+@@ -1,4 +1,4 @@
+-include /usr/share/dpkg/architecture.mk
++include rules_yocto.mk
+
+ NAME = libutils
+
+@@ -41,7 +41,7 @@ CPPFLAGS += \
+
+ LDFLAGS += \
+ -Ldebian/out/system/core \
+- -Wl,-rpath=/usr/lib/$(DEB_HOST_MULTIARCH)/android \
++ -Wl,-rpath='$$ORIGIN' \
+ -Wl,-soname,$(NAME).so.0 \
+ -lbacktrace \
+ -lcutils \
+diff --git a/debian/system/core/libziparchive.mk b/debian/system/core/libziparchive.mk
+index 1b286b4e..1b7499be 100644
+--- a/debian/system/core/libziparchive.mk
++++ b/debian/system/core/libziparchive.mk
+@@ -1,3 +1,4 @@
++include rules_yocto.mk
+ NAME = libziparchive
+
+ SOURCES = \
+@@ -19,7 +20,7 @@ CPPFLAGS += \
+
+ LDFLAGS += \
+ -Ldebian/out/system/core \
+- -Wl,-rpath=/usr/lib/$(DEB_HOST_MULTIARCH)/android \
++ -Wl,-rpath='$$ORIGIN' \
+ -Wl,-soname,$(NAME).so.0 \
+ -lbase \
+ -llog \
+@@ -29,7 +30,7 @@ LDFLAGS += \
+
+ build: $(OBJECTS)
+ $(CXX) $^ -o debian/out/system/core/$(NAME).so.0 $(LDFLAGS)
+- cd debian/out/system/core && ln -s $(NAME).so.0 $(NAME).so
++ cd debian/out/system/core && ln -sf $(NAME).so.0 $(NAME).so
+
+ $(OBJECTS): %.o: %.cc
+ $(CXX) -c -o $@ $< $(CXXFLAGS) $(CPPFLAGS)
+diff --git a/debian/system/core/simg2img.mk b/debian/system/core/simg2img.mk
+index f6e3f59d..df4f44f3 100644
+--- a/debian/system/core/simg2img.mk
++++ b/debian/system/core/simg2img.mk
+@@ -13,7 +13,7 @@ CPPFLAGS += \
+
+ LDFLAGS += \
+ -Ldebian/out/system/core \
+- -Wl,-rpath=/usr/lib/$(DEB_HOST_MULTIARCH)/android \
++ -Wl,-rpath='$$ORIGIN/../lib/android' \
+ -lbase \
+ -llog \
+ -lpthread \
+diff --git a/debian/system/core/simg2simg.mk b/debian/system/core/simg2simg.mk
+index ed53b04c..646ab030 100644
+--- a/debian/system/core/simg2simg.mk
++++ b/debian/system/core/simg2simg.mk
+@@ -13,7 +13,7 @@ CPPFLAGS += \
+
+ LDFLAGS += \
+ -Ldebian/out/system/core \
+- -Wl,-rpath=/usr/lib/$(DEB_HOST_MULTIARCH)/android \
++ -Wl,-rpath='$$ORIGIN/../lib/android' \
+ -lbase \
+ -llog \
+ -lpthread \
+diff --git a/frameworks/native/libs/adbd_auth/adbd_auth.cpp b/frameworks/native/libs/adbd_auth/adbd_auth.cpp
+index a9c23110..6ca334ba 100644
+--- a/frameworks/native/libs/adbd_auth/adbd_auth.cpp
++++ b/frameworks/native/libs/adbd_auth/adbd_auth.cpp
+@@ -23,8 +23,10 @@
+ #include <sys/eventfd.h>
+ #include <sys/uio.h>
+
++#include <atomic>
+ #include <chrono>
+ #include <deque>
++#include <optional>
+ #include <string>
+ #include <string_view>
+ #include <tuple>
+diff --git a/system/core/adb/adb_listeners.cpp b/system/core/adb/adb_listeners.cpp
+index 29909a55..440cdbd4 100644
+--- a/system/core/adb/adb_listeners.cpp
++++ b/system/core/adb/adb_listeners.cpp
+@@ -109,7 +109,7 @@ static void listener_event_func(int _fd, unsigned ev, void* _l)
+ }
+
+ // Called as a transport disconnect function. |arg| is the raw alistener*.
+-static void listener_disconnect(void* arg, atransport*) EXCLUDES(listener_list_mutex) {
++static void listener_disconnect(void* arg, atransport*) {
+ std::lock_guard<std::mutex> lock(listener_list_mutex);
+ for (auto iter = listener_list.begin(); iter != listener_list.end(); ++iter) {
+ if (iter->get() == arg) {
+@@ -121,7 +121,7 @@ static void listener_disconnect(void* arg, atransport*) EXCLUDES(listener_list_m
+ }
+
+ // Write the list of current listeners (network redirections) into a string.
+-std::string format_listeners() EXCLUDES(listener_list_mutex) {
++std::string format_listeners() {
+ std::lock_guard<std::mutex> lock(listener_list_mutex);
+ std::string result;
+ for (auto& l : listener_list) {
+@@ -140,7 +140,7 @@ std::string format_listeners() EXCLUDES(listener_list_mutex) {
+ }
+
+ InstallStatus remove_listener(const char* local_name, atransport* transport)
+- EXCLUDES(listener_list_mutex) {
++{
+ std::lock_guard<std::mutex> lock(listener_list_mutex);
+ for (auto iter = listener_list.begin(); iter != listener_list.end(); ++iter) {
+ if (local_name == (*iter)->local_name) {
+@@ -151,7 +151,7 @@ InstallStatus remove_listener(const char* local_name, atransport* transport)
+ return INSTALL_STATUS_LISTENER_NOT_FOUND;
+ }
+
+-void remove_all_listeners() EXCLUDES(listener_list_mutex) {
++void remove_all_listeners() {
+ std::lock_guard<std::mutex> lock(listener_list_mutex);
+ auto iter = listener_list.begin();
+ while (iter != listener_list.end()) {
+@@ -164,7 +164,7 @@ void remove_all_listeners() EXCLUDES(listener_list_mutex) {
+ }
+ }
+
+-void close_smartsockets() EXCLUDES(listener_list_mutex) {
++void close_smartsockets() {
+ std::lock_guard<std::mutex> lock(listener_list_mutex);
+ auto pred = [](const std::unique_ptr<alistener>& listener) {
+ return listener->local_name == "*smartsocket*";
+@@ -174,7 +174,7 @@ void close_smartsockets() EXCLUDES(listener_list_mutex) {
+
+ InstallStatus install_listener(const std::string& local_name, const char* connect_to,
+ atransport* transport, int no_rebind, int* resolved_tcp_port,
+- std::string* error) EXCLUDES(listener_list_mutex) {
++ std::string* error) {
+ std::lock_guard<std::mutex> lock(listener_list_mutex);
+ for (auto& l : listener_list) {
+ if (local_name == l->local_name) {
+diff --git a/system/core/adb/transport_local.cpp b/system/core/adb/transport_local.cpp
+index c7261860..5988ec4d 100644
+--- a/system/core/adb/transport_local.cpp
++++ b/system/core/adb/transport_local.cpp
+@@ -333,7 +333,7 @@ struct EmulatorConnection : public FdConnection {
+
+ /* Only call this function if you already hold local_transports_lock. */
+ static atransport* find_emulator_transport_by_adb_port_locked(int adb_port)
+- REQUIRES(local_transports_lock) {
++{
+ auto it = local_transports.find(adb_port);
+ if (it == local_transports.end()) {
+ return nullptr;
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/0003-Update-usage-of-usbdevfs_urb-to-match-new-kernel-UAP.patch b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/0003-Update-usage-of-usbdevfs_urb-to-match-new-kernel-UAP.patch
new file mode 100644
index 0000000000..ea934f44d8
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/0003-Update-usage-of-usbdevfs_urb-to-match-new-kernel-UAP.patch
@@ -0,0 +1,128 @@
+From 02b6b6977d80af4b9b806054fadb5a06cedf011d Mon Sep 17 00:00:00 2001
+From: Etienne Cordonnier <ecordonnier@snap.com>
+Date: Tue, 14 Mar 2023 11:33:50 +0100
+Subject: [PATCH] Update usage of usbdevfs_urb to match new kernel UAPI
+MIME-Version: 1.0
+Content-Type: text/plain; charset=UTF-8
+Content-Transfer-Encoding: 8bit
+
+Linux kernel API has been changed by commit 94dfc73e7cf4 ("treewide: uapi: Replace zero-length arrays with flexible-array members")
+where zero-length array iso_frame_desc in struct usbdevfs_urb was replaced with a proper flexible-array member.
+
+Current USB API usage causes a compilation error at Linux 6.0:
+
+In file included from /home/mae/.cache/kiss/proc/121205/build/android-tools/vendor/adb/client/usb_linux.cpp:28:
+/usr/include/linux/usbdevice_fs.h:134:41: error: flexible array member ‘usbdevfs_urb::iso_frame_desc’ not at end of ‘struct usb_handle’
+ 134 | struct usbdevfs_iso_packet_desc iso_frame_desc[];
+ | ^~~~~~~~~~~~~~
+/home/mae/.cache/kiss/proc/121205/build/android-tools/vendor/adb/client/usb_linux.cpp:76:18: note: next member ‘usbdevfs_urb usb_handle::urb_out’ declared here
+ 76 | usbdevfs_urb urb_out;
+ | ^~~~~~~
+/home/mae/.cache/kiss/proc/121205/build/android-tools/vendor/adb/client/usb_linux.cpp:61:8: note: in the definition of ‘struct usb_handle’
+ 61 | struct usb_handle {
+ | ^~~~~~~~~~
+
+Fix it by using pointers to a struct with flexible-array members.
+Current fix works both with the old and the new API.
+
+See https://github.com/nmeum/android-tools/issues/74 for more context.
+
+Tested: built on Linux against kernel 5.19 and 6.0; 'adb shell' over USB
+cable
+Acked-by: Gustavo A. R. Silva gustavoars@kernel.org
+Change-Id: I7f0f7b35d9a3ab980d3520b541b60c7857a6b101
+Signed-off-by: Anatol Pomozov <anatol.pomozov@gmail.com>
+
+[Backported on version 10]
+Signed-off-by: Etienne Cordonnier <ecordonnier@snap.com>
+
+---
+Upstream-Status: Pending
+
+ system/core/adb/client/usb_linux.cpp | 24 ++++++++++++++----------
+ 1 file changed, 14 insertions(+), 10 deletions(-)
+
+diff --git a/system/core/adb/client/usb_linux.cpp b/system/core/adb/client/usb_linux.cpp
+index 343e7b59..6a4479f3 100644
+--- a/system/core/adb/client/usb_linux.cpp
++++ b/system/core/adb/client/usb_linux.cpp
+@@ -71,8 +71,8 @@ struct usb_handle : public ::usb_handle {
+ unsigned zero_mask;
+ unsigned writeable = 1;
+
+- usbdevfs_urb urb_in;
+- usbdevfs_urb urb_out;
++ usbdevfs_urb *urb_in;
++ usbdevfs_urb *urb_out;
+
+ bool urb_in_busy = false;
+ bool urb_out_busy = false;
+@@ -305,7 +305,7 @@ static int usb_bulk_write(usb_handle* h, const void* data, int len) {
+ std::unique_lock<std::mutex> lock(h->mutex);
+ D("++ usb_bulk_write ++");
+
+- usbdevfs_urb* urb = &h->urb_out;
++ usbdevfs_urb* urb = h->urb_out;
+ memset(urb, 0, sizeof(*urb));
+ urb->type = USBDEVFS_URB_TYPE_BULK;
+ urb->endpoint = h->ep_out;
+@@ -344,7 +344,7 @@ static int usb_bulk_read(usb_handle* h, void* data, int len) {
+ std::unique_lock<std::mutex> lock(h->mutex);
+ D("++ usb_bulk_read ++");
+
+- usbdevfs_urb* urb = &h->urb_in;
++ usbdevfs_urb* urb = h->urb_in;
+ memset(urb, 0, sizeof(*urb));
+ urb->type = USBDEVFS_URB_TYPE_BULK;
+ urb->endpoint = h->ep_in;
+@@ -389,7 +389,7 @@ static int usb_bulk_read(usb_handle* h, void* data, int len) {
+ }
+ D("[ urb @%p status = %d, actual = %d ]", out, out->status, out->actual_length);
+
+- if (out == &h->urb_in) {
++ if (out == h->urb_in) {
+ D("[ reap urb - IN complete ]");
+ h->urb_in_busy = false;
+ if (urb->status != 0) {
+@@ -398,7 +398,7 @@ static int usb_bulk_read(usb_handle* h, void* data, int len) {
+ }
+ return urb->actual_length;
+ }
+- if (out == &h->urb_out) {
++ if (out == h->urb_out) {
+ D("[ reap urb - OUT compelete ]");
+ h->urb_out_busy = false;
+ h->cv.notify_all();
+@@ -502,10 +502,10 @@ void usb_kick(usb_handle* h) {
+ ** but this ensures that a reader blocked on REAPURB
+ ** will get unblocked
+ */
+- ioctl(h->fd, USBDEVFS_DISCARDURB, &h->urb_in);
+- ioctl(h->fd, USBDEVFS_DISCARDURB, &h->urb_out);
+- h->urb_in.status = -ENODEV;
+- h->urb_out.status = -ENODEV;
++ ioctl(h->fd, USBDEVFS_DISCARDURB, h->urb_in);
++ ioctl(h->fd, USBDEVFS_DISCARDURB, h->urb_out);
++ h->urb_in->status = -ENODEV;
++ h->urb_out->status = -ENODEV;
+ h->urb_in_busy = false;
+ h->urb_out_busy = false;
+ h->cv.notify_all();
+@@ -521,6 +521,8 @@ int usb_close(usb_handle* h) {
+
+ D("-- usb close %p (fd = %d) --", h, h->fd);
+
++ delete h->urb_in;
++ delete h->urb_out;
+ delete h;
+
+ return 0;
+@@ -556,6 +558,8 @@ static void register_device(const char* dev_name, const char* dev_path, unsigned
+ usb->ep_out = ep_out;
+ usb->zero_mask = zero_mask;
+ usb->max_packet_size = max_packet_size;
++ usb->urb_in = new usbdevfs_urb;
++ usb->urb_out = new usbdevfs_urb;
+
+ // Initialize mark so we don't get garbage collected after the device scan.
+ usb->mark = true;
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/0004-adb-Fix-build-on-big-endian-systems.patch b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/0004-adb-Fix-build-on-big-endian-systems.patch
new file mode 100644
index 0000000000..c82423908f
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/0004-adb-Fix-build-on-big-endian-systems.patch
@@ -0,0 +1,50 @@
+From e4a29888cb52c1eafd3ab57a6e220b38147ecfbe Mon Sep 17 00:00:00 2001
+From: Etienne Cordonnier <ecordonnier@snap.com>
+Date: Tue, 14 Mar 2023 13:39:23 +0100
+Subject: [PATCH] adb: Fix build on big endian systems
+
+The usb_linux_client.c file defines cpu_to_le16/32 by using the C
+library htole16/32 function calls. However, cpu_to_le16/32 are used
+when initializing structures, i.e in a context where a function call
+is not allowed.
+
+It works fine on little endian systems because htole16/32 are defined
+by the C library as no-ops. But on big-endian systems, they are
+actually doing something, which might involve calling a function,
+causing build failures.
+
+To solve this, we simply open-code cpu_to_le16/32 in a way that allows
+them to be used when initializing structures.
+
+Signed-off-by: Thomas Petazzoni <thomas.petazzoni@free-electrons.com>
+[Forward-ported to version 29]
+Signed-off-by: Etienne Cordonnier <ecordonnier@snap.com>
+
+---
+Upstream-Status: Pending
+
+ system/core/adb/daemon/usb_ffs.cpp | 11 +++++++++--
+ 1 file changed, 9 insertions(+), 2 deletions(-)
+
+diff --git a/system/core/adb/daemon/usb_ffs.cpp b/system/core/adb/daemon/usb_ffs.cpp
+index b19fa5d5..ef2291ca 100644
+--- a/system/core/adb/daemon/usb_ffs.cpp
++++ b/system/core/adb/daemon/usb_ffs.cpp
+@@ -39,8 +39,15 @@
+
+ #define USB_EXT_PROP_UNICODE 1
+
+-#define cpu_to_le16(x) htole16(x)
+-#define cpu_to_le32(x) htole32(x)
++#if __BYTE_ORDER == __LITTLE_ENDIAN
++# define cpu_to_le16(x) (x)
++# define cpu_to_le32(x) (x)
++#else
++# define cpu_to_le16(x) ((((x) >> 8) & 0xffu) | (((x) & 0xffu) << 8))
++# define cpu_to_le32(x) \
++ ((((x) & 0xff000000u) >> 24) | (((x) & 0x00ff0000u) >> 8) | \
++ (((x) & 0x0000ff00u) << 8) | (((x) & 0x000000ffu) << 24))
++#endif
+
+ // clang-format off
+ struct func_desc {
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/0005-adb-Allow-adbd-to-be-run-as-root.patch b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/0005-adb-Allow-adbd-to-be-run-as-root.patch
new file mode 100644
index 0000000000..dfbdc2af66
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/0005-adb-Allow-adbd-to-be-run-as-root.patch
@@ -0,0 +1,25 @@
+From 4ef35041ba5c02df48c31f2382e7c3c4316ad936 Mon Sep 17 00:00:00 2001
+From: Etienne Cordonnier <ecordonnier@snap.com>
+Date: Tue, 14 Mar 2023 13:53:51 +0100
+Subject: [PATCH] adb: Allow adbd to be run as root
+
+Signed-off-by: Etienne Cordonnier <ecordonnier@snap.com>
+
+---
+Upstream-Status: Pending
+
+ system/core/adb/daemon/main.cpp | 1 +
+ 1 file changed, 1 insertion(+)
+
+diff --git a/system/core/adb/daemon/main.cpp b/system/core/adb/daemon/main.cpp
+index e807d13d..309663a2 100644
+--- a/system/core/adb/daemon/main.cpp
++++ b/system/core/adb/daemon/main.cpp
+@@ -75,6 +75,7 @@ static bool should_drop_capabilities_bounding_set() {
+ }
+
+ static bool should_drop_privileges() {
++ return true;
+ // "adb root" not allowed, always drop privileges.
+ if (!ALLOW_ADBD_ROOT && !is_device_unlocked()) return true;
+
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/adbd.mk b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/adbd.mk
new file mode 100644
index 0000000000..3282216b8e
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/adbd.mk
@@ -0,0 +1,87 @@
+include rules_yocto.mk
+NAME = adbd
+
+SOURCES = \
+ adb/adbconnection/adbconnection_server.cpp \
+ adb/daemon/auth.cpp \
+ adb/daemon/file_sync_service.cpp \
+ adb/daemon/file_sync_service.h \
+ adb/daemon/framebuffer_service.cpp \
+ adb/daemon/framebuffer_service.h \
+ adb/daemon/jdwp_service.cpp \
+ adb/daemon/main.cpp \
+ adb/daemon/restart_service.cpp \
+ adb/daemon/restart_service.h \
+ adb/daemon/services.cpp \
+ adb/daemon/shell_service.cpp \
+ adb/daemon/shell_service.h \
+ adb/daemon/usb_ffs.cpp \
+ adb/daemon/usb_legacy.cpp \
+ adb/daemon/usb.cpp \
+ adb/shell_service_protocol.cpp \
+ adb/adb.cpp \
+ adb/adb_io.cpp \
+ adb/adb_listeners.cpp \
+ adb/adb_trace.cpp \
+ adb/adb_unique_fd.cpp \
+ adb/adb_utils.cpp \
+ adb/fdevent/fdevent.cpp \
+ adb/fdevent/fdevent_epoll.cpp \
+ adb/services.cpp \
+ adb/sockets.cpp \
+ adb/socket_spec.cpp \
+ adb/sysdeps/errno.cpp \
+ adb/sysdeps/posix/network.cpp \
+ adb/sysdeps_unix.cpp \
+ adb/transport.cpp \
+ adb/transport_fd.cpp \
+ adb/transport_local.cpp \
+ adb/transport_usb.cpp \
+ adb/types.cpp \
+ diagnose_usb/diagnose_usb.cpp \
+ libasyncio/AsyncIO.cpp \
+
+SOURCES := $(foreach source, $(SOURCES), system/core/$(source))
+
+SOURCES += \
+ frameworks/native/libs/adbd_auth/adbd_auth.cpp
+
+CXXFLAGS += -std=gnu++20
+CPPFLAGS += -Isystem/coreinclude -Isystem/core/adb -Isystem/core/base/include -Idebian/out/system/core -Isystem/tools/mkbootimg/include/bootimg -Isystem/core/fs_mgr/include \
+ -Isystem/core/fs_mgr/include_fstab \
+ -DADB_VERSION='"$(DEB_VERSION)"' -D_GNU_SOURCE
+LDFLAGS += -Wl,-rpath='$$ORIGIN/../lib/android' -Wl,-rpath-link='$$ORIGIN/../lib/android' \
+ -lpthread -Ldebian/out/system/core -Ldebian/out/external/boringssl -lbase -lcrypto_utils -l:libcrypto.a -lcutils -llog -lresolv
+
+PAGE_SIZE ?= 4096
+
+CXXFLAGS += -UADB_HOST
+CXXFLAGS += -DADB_HOST=0
+CXXFLAGS += -DALLOW_ADBD_DISABLE_VERITY
+CXXFLAGS += -DALLOW_ADBD_NO_AUTH
+CXXFLAGS += -DPLATFORM_TOOLS_VERSION='"28.0.2"'
+CXXFLAGS += -Isystem/core/diagnose_usb/include
+CXXFLAGS += -Isystem/core/adb/daemon/include
+CXXFLAGS += -Isystem/core/adb/adbconnection/include
+CXXFLAGS += -Isystem/core/libasyncio/include
+CXXFLAGS += -Isystem/core/libcutils/include
+CXXFLAGS += -Isystem/core/libcrypto_utils/include
+CXXFLAGS += -Isystem/core/liblog/include/
+CXXFLAGS += -Isystem/core/libutils/include
+CXXFLAGS += -Iframeworks/native/libs/adbd_auth/include
+CXXFLAGS += -Wno-c++11-narrowing
+CXXFLAGS += -DPAGE_SIZE=$(PAGE_SIZE)
+
+
+# -latomic should be the last library specified
+# https://github.com/android/ndk/issues/589
+ifneq ($(filter armel mipsel,$(DEB_HOST_ARCH)),)
+ LDFLAGS += -latomic
+endif
+
+build: $(SOURCES)
+ mkdir --parents debian/out/system/core
+ $(CXX) $^ -o debian/out/system/core/adbd $(CXXFLAGS) $(CPPFLAGS) $(LDFLAGS)
+
+clean:
+ $(RM) debian/out/system/core/adbd
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/android-tools-adbd.service b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/android-tools-adbd.service
new file mode 100644
index 0000000000..ddf8d7f74e
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/android-tools-adbd.service
@@ -0,0 +1,13 @@
+[Unit]
+Description=Android Debug Bridge
+ConditionPathExists=/var/usb-debugging-enabled
+Before=android-system.service
+
+[Service]
+Type=simple
+Restart=on-failure
+ExecStartPre=-/usr/bin/android-gadget-setup adb
+ExecStart=/usr/bin/adbd
+
+[Install]
+WantedBy=basic.target
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/Add-riscv64-support.patch b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/Add-riscv64-support.patch
new file mode 100644
index 0000000000..9fd0fa792c
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/Add-riscv64-support.patch
@@ -0,0 +1,653 @@
+From: Guo Ren <guoren@linux.alibaba.com>
+Date: Wed, 29 Jun 2022 16:46:46 +0800
+Subject: Add riscv64 support
+
+This patch contains the dwarf unwind support for 64bit risc-v.
+
+ * DwarfCfa.cpp (cfa_def_cfa_register): setup register if CFA_REG is
+ not setup for riscv64
+ * Elf.cpp (GetRelPc): convert offset to virtual address for riscv64.
+ * ElfInterface.cpp (GetVirtAddrFromOffset): New for riscv64.
+ * RegsRiscv64.cpp (StepIfSignalHandler): Fix signal frame check.
+ libunwindstack/include/unwindstack/
+ * ElfInterface.h (GetVirtAddrFromOffset): New for riscv64.
+ libunwindstack/tests/
+ * DwarfCfaTest.cpp (cfa_def_cfa_register): ok for riscv64.
+ * RegsStepIfSignalHandlerTest.cpp (riscv64_step_if_signal_handler): Fix
+ testcase for riscv64
+
+Test: Builds.
+Test: All unit tests pass.
+
+Signed-off-by: Guo Ren <guoren@linux.alibaba.com>
+Signed-off-by: Lifang Xia <lifang_xia@linux.alibaba.com>
+Signed-off-by: Mao Han <han_mao@linux.alibaba.com>
+Change-Id: Ib21ddf23cc83f332af202df7bffcaceec16063e0
+---
+Upstream-Status: Pending
+
+ system/core/libunwindstack/Android.bp | 1 +
+ system/core/libunwindstack/Elf.cpp | 2 +
+ system/core/libunwindstack/Regs.cpp | 10 ++
+ system/core/libunwindstack/RegsRiscv64.cpp | 156 +++++++++++++++++++++
+ .../core/libunwindstack/include/unwindstack/Elf.h | 5 +
+ .../include/unwindstack/MachineRiscv64.h | 59 ++++++++
+ .../include/unwindstack/RegsGetLocal.h | 43 ++++++
+ .../include/unwindstack/RegsRiscv64.h | 59 ++++++++
+ .../include/unwindstack/UcontextRiscv64.h | 80 +++++++++++
+ .../include/unwindstack/UserRiscv64.h | 37 +++++
+ system/core/libunwindstack/tools/unwind.cpp | 3 +
+ .../core/libunwindstack/tools/unwind_symbols.cpp | 3 +
+ 12 files changed, 458 insertions(+)
+ create mode 100644 system/core/libunwindstack/RegsRiscv64.cpp
+ create mode 100644 system/core/libunwindstack/include/unwindstack/MachineRiscv64.h
+ create mode 100644 system/core/libunwindstack/include/unwindstack/RegsRiscv64.h
+ create mode 100644 system/core/libunwindstack/include/unwindstack/UcontextRiscv64.h
+ create mode 100644 system/core/libunwindstack/include/unwindstack/UserRiscv64.h
+
+diff --git a/system/core/libunwindstack/Android.bp b/system/core/libunwindstack/Android.bp
+index 3695f72..f1f9c68 100644
+--- a/system/core/libunwindstack/Android.bp
++++ b/system/core/libunwindstack/Android.bp
+@@ -70,6 +70,7 @@ cc_library {
+ "RegsArm64.cpp",
+ "RegsX86.cpp",
+ "RegsX86_64.cpp",
++ "RegsRiscv64.cpp",
+ "RegsMips.cpp",
+ "RegsMips64.cpp",
+ "Unwinder.cpp",
+diff --git a/system/core/libunwindstack/Elf.cpp b/system/core/libunwindstack/Elf.cpp
+index f01b092..3c2088b 100644
+--- a/system/core/libunwindstack/Elf.cpp
++++ b/system/core/libunwindstack/Elf.cpp
+@@ -290,6 +290,8 @@ ElfInterface* Elf::CreateInterfaceFromMemory(Memory* memory) {
+ arch_ = ARCH_X86_64;
+ } else if (e_machine == EM_MIPS) {
+ arch_ = ARCH_MIPS64;
++ } else if (e_machine == EM_RISCV) {
++ arch_ = ARCH_RISCV64;
+ } else {
+ // Unsupported.
+ ALOGI("64 bit elf that is neither aarch64 nor x86_64 nor mips64: e_machine = %d\n",
+diff --git a/system/core/libunwindstack/Regs.cpp b/system/core/libunwindstack/Regs.cpp
+index c7dec52..447a554 100644
+--- a/system/core/libunwindstack/Regs.cpp
++++ b/system/core/libunwindstack/Regs.cpp
+@@ -27,12 +27,14 @@
+ #include <unwindstack/RegsArm64.h>
+ #include <unwindstack/RegsMips.h>
+ #include <unwindstack/RegsMips64.h>
++#include <unwindstack/RegsRiscv64.h>
+ #include <unwindstack/RegsX86.h>
+ #include <unwindstack/RegsX86_64.h>
+ #include <unwindstack/UserArm.h>
+ #include <unwindstack/UserArm64.h>
+ #include <unwindstack/UserMips.h>
+ #include <unwindstack/UserMips64.h>
++#include <unwindstack/UserRiscv64.h>
+ #include <unwindstack/UserX86.h>
+ #include <unwindstack/UserX86_64.h>
+
+@@ -67,6 +69,8 @@ Regs* Regs::RemoteGet(pid_t pid) {
+ return RegsMips::Read(buffer.data());
+ case sizeof(mips64_user_regs):
+ return RegsMips64::Read(buffer.data());
++ case sizeof(riscv64_user_regs):
++ return RegsRiscv64::Read(buffer.data());
+ }
+ return nullptr;
+ }
+@@ -85,6 +89,8 @@ Regs* Regs::CreateFromUcontext(ArchEnum arch, void* ucontext) {
+ return RegsMips::CreateFromUcontext(ucontext);
+ case ARCH_MIPS64:
+ return RegsMips64::CreateFromUcontext(ucontext);
++ case ARCH_RISCV64:
++ return RegsRiscv64::CreateFromUcontext(ucontext);
+ case ARCH_UNKNOWN:
+ default:
+ return nullptr;
+@@ -104,6 +110,8 @@ ArchEnum Regs::CurrentArch() {
+ return ARCH_MIPS;
+ #elif defined(__mips__) && defined(__LP64__)
+ return ARCH_MIPS64;
++#elif defined(__riscv)
++ return ARCH_RISCV64;
+ #else
+ abort();
+ #endif
+@@ -123,6 +131,8 @@ Regs* Regs::CreateFromLocal() {
+ regs = new RegsMips();
+ #elif defined(__mips__) && defined(__LP64__)
+ regs = new RegsMips64();
++#elif defined(__riscv)
++ regs = new RegsRiscv64();
+ #else
+ abort();
+ #endif
+diff --git a/system/core/libunwindstack/RegsRiscv64.cpp b/system/core/libunwindstack/RegsRiscv64.cpp
+new file mode 100644
+index 0000000..887762a
+--- /dev/null
++++ b/system/core/libunwindstack/RegsRiscv64.cpp
+@@ -0,0 +1,156 @@
++/*
++ * Copyright (C) 2022 The Android Open Source Project
++ *
++ * Licensed under the Apache License, Version 2.0 (the "License");
++ * you may not use this file except in compliance with the License.
++ * You may obtain a copy of the License at
++ *
++ * http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing, software
++ * distributed under the License is distributed on an "AS IS" BASIS,
++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++ * See the License for the specific language governing permissions and
++ * limitations under the License.
++ */
++
++#include <stdint.h>
++#include <string.h>
++
++#include <functional>
++
++#include <unwindstack/Elf.h>
++#include <unwindstack/MachineRiscv64.h>
++#include <unwindstack/MapInfo.h>
++#include <unwindstack/Memory.h>
++#include <unwindstack/RegsRiscv64.h>
++#include <unwindstack/UcontextRiscv64.h>
++#include <unwindstack/UserRiscv64.h>
++
++namespace unwindstack {
++
++RegsRiscv64::RegsRiscv64()
++ : RegsImpl<uint64_t>(RISCV64_REG_MAX, Location(LOCATION_REGISTER, RISCV64_REG_RA)) {}
++
++ArchEnum RegsRiscv64::Arch() {
++ return ARCH_RISCV64;
++}
++
++uint64_t RegsRiscv64::pc() {
++ return regs_[RISCV64_REG_PC];
++}
++
++uint64_t RegsRiscv64::sp() {
++ return regs_[RISCV64_REG_SP];
++}
++
++void RegsRiscv64::set_pc(uint64_t pc) {
++ regs_[RISCV64_REG_PC] = pc;
++}
++
++void RegsRiscv64::set_sp(uint64_t sp) {
++ regs_[RISCV64_REG_SP] = sp;
++}
++
++uint64_t RegsRiscv64::GetPcAdjustment(uint64_t rel_pc, Elf*) {
++ if (rel_pc < 8) {
++ return 0;
++ }
++ // For now, just assume no compact branches
++ return 8;
++}
++
++bool RegsRiscv64::SetPcFromReturnAddress(Memory*) {
++ uint64_t ra = regs_[RISCV64_REG_RA];
++ if (regs_[RISCV64_REG_PC] == ra) {
++ return false;
++ }
++
++ regs_[RISCV64_REG_PC] = ra;
++ return true;
++}
++
++void RegsRiscv64::IterateRegisters(std::function<void(const char*, uint64_t)> fn) {
++ fn("pc", regs_[RISCV64_REG_PC]);
++ fn("ra", regs_[RISCV64_REG_RA]);
++ fn("sp", regs_[RISCV64_REG_SP]);
++ fn("gp", regs_[RISCV64_REG_GP]);
++ fn("tp", regs_[RISCV64_REG_TP]);
++ fn("t0", regs_[RISCV64_REG_T0]);
++ fn("t1", regs_[RISCV64_REG_T1]);
++ fn("t2", regs_[RISCV64_REG_T2]);
++ fn("t3", regs_[RISCV64_REG_T3]);
++ fn("t4", regs_[RISCV64_REG_T4]);
++ fn("t5", regs_[RISCV64_REG_T5]);
++ fn("t6", regs_[RISCV64_REG_T6]);
++ fn("s0", regs_[RISCV64_REG_S0]);
++ fn("s1", regs_[RISCV64_REG_S1]);
++ fn("s2", regs_[RISCV64_REG_S2]);
++ fn("s3", regs_[RISCV64_REG_S3]);
++ fn("s4", regs_[RISCV64_REG_S4]);
++ fn("s5", regs_[RISCV64_REG_S5]);
++ fn("s6", regs_[RISCV64_REG_S6]);
++ fn("s7", regs_[RISCV64_REG_S7]);
++ fn("s8", regs_[RISCV64_REG_S8]);
++ fn("s9", regs_[RISCV64_REG_S9]);
++ fn("s10", regs_[RISCV64_REG_S10]);
++ fn("s11", regs_[RISCV64_REG_S11]);
++ fn("a0", regs_[RISCV64_REG_A0]);
++ fn("a1", regs_[RISCV64_REG_A1]);
++ fn("a2", regs_[RISCV64_REG_A2]);
++ fn("a3", regs_[RISCV64_REG_A3]);
++ fn("a4", regs_[RISCV64_REG_A4]);
++ fn("a5", regs_[RISCV64_REG_A5]);
++ fn("a6", regs_[RISCV64_REG_A6]);
++ fn("a7", regs_[RISCV64_REG_A7]);
++}
++
++Regs* RegsRiscv64::Read(void* remote_data) {
++ riscv64_user_regs* user = reinterpret_cast<riscv64_user_regs*>(remote_data);
++
++ RegsRiscv64* regs = new RegsRiscv64();
++ memcpy(regs->RawData(), &user->regs[0], RISCV64_REG_MAX * sizeof(uint64_t));
++ // uint64_t* reg_data = reinterpret_cast<uint64_t*>(regs->RawData());
++ return regs;
++}
++
++Regs* RegsRiscv64::CreateFromUcontext(void* ucontext) {
++ riscv64_ucontext_t* riscv64_ucontext = reinterpret_cast<riscv64_ucontext_t*>(ucontext);
++
++ RegsRiscv64* regs = new RegsRiscv64();
++ memcpy(regs->RawData(), &riscv64_ucontext->uc_mcontext.__gregs[0],
++ RISCV64_REG_MAX * sizeof(uint64_t));
++ return regs;
++}
++
++bool RegsRiscv64::StepIfSignalHandler(uint64_t elf_offset, Elf* elf, Memory* process_memory) {
++ uint64_t data;
++ Memory* elf_memory = elf->memory();
++ // Read from elf memory since it is usually more expensive to read from
++ // process memory.
++ if (!elf_memory->ReadFully(elf_offset, &data, sizeof(data))) {
++ return false;
++ }
++ // Look for the kernel sigreturn function.
++ // __kernel_rt_sigreturn:
++ // li a7, __NR_rt_sigreturn
++ // scall
++
++ const uint8_t li_scall[] = {0x93, 0x08, 0xb0, 0x08, 0x73, 0x00, 0x00, 0x00};
++ if (memcmp(&data, &li_scall, 8) != 0) {
++ return false;
++ }
++
++ // SP + sizeof(siginfo_t) + uc_mcontext offset + PC offset.
++ if (!process_memory->ReadFully(regs_[RISCV64_REG_SP] + 0x80 + 0xb0 + 0x00, regs_.data(),
++ sizeof(uint64_t) * (RISCV64_REG_MAX))) {
++ return false;
++ }
++ return true;
++}
++
++Regs* RegsRiscv64::Clone() {
++ return new RegsRiscv64(*this);
++}
++
++} // namespace unwindstack
+diff --git a/system/core/libunwindstack/include/unwindstack/Elf.h b/system/core/libunwindstack/include/unwindstack/Elf.h
+index 472ed92..88fa0ff 100644
+--- a/system/core/libunwindstack/include/unwindstack/Elf.h
++++ b/system/core/libunwindstack/include/unwindstack/Elf.h
+@@ -32,6 +32,10 @@
+ #define EM_AARCH64 183
+ #endif
+
++#if !defined(EM_RISCV)
++#define EM_RISCV 243
++#endif
++
+ namespace unwindstack {
+
+ // Forward declaration.
+@@ -46,6 +50,7 @@ enum ArchEnum : uint8_t {
+ ARCH_X86_64,
+ ARCH_MIPS,
+ ARCH_MIPS64,
++ ARCH_RISCV64,
+ };
+
+ class Elf {
+diff --git a/system/core/libunwindstack/include/unwindstack/MachineRiscv64.h b/system/core/libunwindstack/include/unwindstack/MachineRiscv64.h
+new file mode 100644
+index 0000000..397e680
+--- /dev/null
++++ b/system/core/libunwindstack/include/unwindstack/MachineRiscv64.h
+@@ -0,0 +1,59 @@
++/*
++ * Copyright (C) 2022 The Android Open Source Project
++ *
++ * Licensed under the Apache License, Version 2.0 (the "License");
++ * you may not use this file except in compliance with the License.
++ * You may obtain a copy of the License at
++ *
++ * http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing, software
++ * distributed under the License is distributed on an "AS IS" BASIS,
++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++ * See the License for the specific language governing permissions and
++ * limitations under the License.
++ */
++
++#pragma once
++
++#include <stdint.h>
++
++namespace unwindstack {
++
++enum Riscv64Reg : uint16_t {
++ RISCV64_REG_PC,
++ RISCV64_REG_RA,
++ RISCV64_REG_SP,
++ RISCV64_REG_GP,
++ RISCV64_REG_TP,
++ RISCV64_REG_T0,
++ RISCV64_REG_T1,
++ RISCV64_REG_T2,
++ RISCV64_REG_S0,
++ RISCV64_REG_S1,
++ RISCV64_REG_A0,
++ RISCV64_REG_A1,
++ RISCV64_REG_A2,
++ RISCV64_REG_A3,
++ RISCV64_REG_A4,
++ RISCV64_REG_A5,
++ RISCV64_REG_A6,
++ RISCV64_REG_A7,
++ RISCV64_REG_S2,
++ RISCV64_REG_S3,
++ RISCV64_REG_S4,
++ RISCV64_REG_S5,
++ RISCV64_REG_S6,
++ RISCV64_REG_S7,
++ RISCV64_REG_S8,
++ RISCV64_REG_S9,
++ RISCV64_REG_S10,
++ RISCV64_REG_S11,
++ RISCV64_REG_T3,
++ RISCV64_REG_T4,
++ RISCV64_REG_T5,
++ RISCV64_REG_T6,
++ RISCV64_REG_MAX,
++};
++
++} // namespace unwindstack
+diff --git a/system/core/libunwindstack/include/unwindstack/RegsGetLocal.h b/system/core/libunwindstack/include/unwindstack/RegsGetLocal.h
+index f0b5e3a..698eba2 100644
+--- a/system/core/libunwindstack/include/unwindstack/RegsGetLocal.h
++++ b/system/core/libunwindstack/include/unwindstack/RegsGetLocal.h
+@@ -81,6 +81,49 @@ inline __attribute__((__always_inline__)) void AsmGetRegs(void* reg_data) {
+ : "x12", "x13", "memory");
+ }
+
++#elif defined(__riscv)
++
++inline __attribute__((__always_inline__)) void AsmGetRegs(void* reg_data) {
++ asm volatile(
++ "1:\n"
++ "sd ra, 8(%[base])\n"
++ "sd sp, 16(%[base])\n"
++ "sd gp, 24(%[base])\n"
++ "sd tp, 32(%[base])\n"
++ "sd t0, 40(%[base])\n"
++ "sd t1, 48(%[base])\n"
++ "sd t2, 56(%[base])\n"
++ "sd s0, 64(%[base])\n"
++ "sd s1, 72(%[base])\n"
++ "sd a0, 80(%[base])\n"
++ "sd a1, 88(%[base])\n"
++ "sd a2, 96(%[base])\n"
++ "sd a3, 104(%[base])\n"
++ "sd a4, 112(%[base])\n"
++ "sd a5, 120(%[base])\n"
++ "sd a6, 128(%[base])\n"
++ "sd a7, 136(%[base])\n"
++ "sd s2, 144(%[base])\n"
++ "sd s3, 152(%[base])\n"
++ "sd s4, 160(%[base])\n"
++ "sd s5, 168(%[base])\n"
++ "sd s6, 176(%[base])\n"
++ "sd s7, 184(%[base])\n"
++ "sd s8, 192(%[base])\n"
++ "sd s9, 200(%[base])\n"
++ "sd s10, 208(%[base])\n"
++ "sd s11, 216(%[base])\n"
++ "sd t3, 224(%[base])\n"
++ "sd t4, 232(%[base])\n"
++ "sd t5, 240(%[base])\n"
++ "sd t6, 248(%[base])\n"
++ "la t1, 1b\n"
++ "sd t1, 0(%[base])\n"
++ : [base] "+r"(reg_data)
++ :
++ : "t1", "memory");
++}
++
+ #elif defined(__i386__) || defined(__x86_64__) || defined(__mips__)
+
+ extern "C" void AsmGetRegs(void* regs);
+diff --git a/system/core/libunwindstack/include/unwindstack/RegsRiscv64.h b/system/core/libunwindstack/include/unwindstack/RegsRiscv64.h
+new file mode 100644
+index 0000000..eb09397
+--- /dev/null
++++ b/system/core/libunwindstack/include/unwindstack/RegsRiscv64.h
+@@ -0,0 +1,59 @@
++/*
++ * Copyright (C) 2022 The Android Open Source Project
++ *
++ * Licensed under the Apache License, Version 2.0 (the "License");
++ * you may not use this file except in compliance with the License.
++ * You may obtain a copy of the License at
++ *
++ * http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing, software
++ * distributed under the License is distributed on an "AS IS" BASIS,
++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++ * See the License for the specific language governing permissions and
++ * limitations under the License.
++ */
++
++#pragma once
++
++#include <stdint.h>
++
++#include <functional>
++
++#include <unwindstack/Elf.h>
++#include <unwindstack/Regs.h>
++
++namespace unwindstack {
++
++// Forward declarations.
++class Memory;
++
++class RegsRiscv64 : public RegsImpl<uint64_t> {
++ public:
++ RegsRiscv64();
++ virtual ~RegsRiscv64() = default;
++
++ ArchEnum Arch() override final;
++
++ uint64_t GetPcAdjustment(uint64_t rel_pc, Elf* elf) override;
++
++ bool SetPcFromReturnAddress(Memory* process_memory) override;
++
++ bool StepIfSignalHandler(uint64_t elf_offset, Elf* elf, Memory* process_memory) override;
++
++ void IterateRegisters(std::function<void(const char*, uint64_t)>) override final;
++
++ uint64_t pc() override;
++ uint64_t sp() override;
++
++ void set_pc(uint64_t pc) override;
++ void set_sp(uint64_t sp) override;
++
++ Regs* Clone() override final;
++
++ static Regs* Read(void* data);
++
++ static Regs* CreateFromUcontext(void* ucontext);
++};
++
++} // namespace unwindstack
+diff --git a/system/core/libunwindstack/include/unwindstack/UcontextRiscv64.h b/system/core/libunwindstack/include/unwindstack/UcontextRiscv64.h
+new file mode 100644
+index 0000000..c6c82b1
+--- /dev/null
++++ b/system/core/libunwindstack/include/unwindstack/UcontextRiscv64.h
+@@ -0,0 +1,80 @@
++/*
++ * Copyright (C) 2014 The Android Open Source Project
++ * All rights reserved.
++ *
++ * Redistribution and use in source and binary forms, with or without
++ * modification, are permitted provided that the following conditions
++ * are met:
++ * * Redistributions of source code must retain the above copyright
++ * notice, this list of conditions and the following disclaimer.
++ * * Redistributions in binary form must reproduce the above copyright
++ * notice, this list of conditions and the following disclaimer in
++ * the documentation and/or other materials provided with the
++ * distribution.
++ *
++ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
++ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
++ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
++ * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
++ * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
++ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
++ * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
++ * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
++ * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
++ * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
++ * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
++ * SUCH DAMAGE.
++ */
++
++#pragma once
++
++#include <sys/cdefs.h>
++
++typedef uint64_t __riscv_mc_gp_state[32]; // unsigned long
++
++struct __riscv_mc_f_ext_state {
++ uint32_t __f[32];
++ uint32_t __fcsr;
++};
++
++struct __riscv_mc_d_ext_state {
++ uint64_t __f[32];
++ uint32_t __fcsr;
++};
++
++struct __riscv_mc_q_ext_state {
++ uint64_t __f[64] __attribute__((__aligned__(16)));
++ uint32_t __fcsr;
++ uint32_t __reserved[3];
++};
++
++union __riscv_mc_fp_state {
++ struct __riscv_mc_f_ext_state __f;
++ struct __riscv_mc_d_ext_state __d;
++ struct __riscv_mc_q_ext_state __q;
++};
++
++struct __riscv_stack_t {
++ uint64_t ss_sp;
++ int32_t ss_flags;
++ uint64_t ss_size;
++};
++
++struct riscv64_sigset_t {
++ uint64_t sig; // unsigned long
++};
++
++struct riscv64_mcontext_t {
++ __riscv_mc_gp_state __gregs;
++ union __riscv_mc_fp_state __fpregs;
++};
++
++struct riscv64_ucontext_t {
++ uint64_t uc_flags; // unsigned long
++ struct riscv64_ucontext_t* uc_link;
++ __riscv_stack_t uc_stack;
++ riscv64_sigset_t uc_sigmask;
++ /* The kernel adds extra padding here to allow sigset_t to grow. */
++ int8_t __padding[128 - sizeof(riscv64_sigset_t)]; // char
++ riscv64_mcontext_t uc_mcontext;
++};
+diff --git a/system/core/libunwindstack/include/unwindstack/UserRiscv64.h b/system/core/libunwindstack/include/unwindstack/UserRiscv64.h
+new file mode 100644
+index 0000000..1e91228
+--- /dev/null
++++ b/system/core/libunwindstack/include/unwindstack/UserRiscv64.h
+@@ -0,0 +1,37 @@
++/*
++ * Copyright (C) 2016 The Android Open Source Project
++ * All rights reserved.
++ *
++ * Redistribution and use in source and binary forms, with or without
++ * modification, are permitted provided that the following conditions
++ * are met:
++ * * Redistributions of source code must retain the above copyright
++ * notice, this list of conditions and the following disclaimer.
++ * * Redistributions in binary form must reproduce the above copyright
++ * notice, this list of conditions and the following disclaimer in
++ * the documentation and/or other materials provided with the
++ * distribution.
++ *
++ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
++ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
++ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
++ * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
++ * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
++ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
++ * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
++ * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
++ * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
++ * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
++ * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
++ * SUCH DAMAGE.
++ */
++
++#pragma once
++
++namespace unwindstack {
++
++struct riscv64_user_regs {
++ uint64_t regs[32];
++};
++
++} // namespace unwindstack
+diff --git a/system/core/libunwindstack/tools/unwind.cpp b/system/core/libunwindstack/tools/unwind.cpp
+index 1812e50..ae20891 100644
+--- a/system/core/libunwindstack/tools/unwind.cpp
++++ b/system/core/libunwindstack/tools/unwind.cpp
+@@ -83,6 +83,9 @@ void DoUnwind(pid_t pid) {
+ case unwindstack::ARCH_MIPS64:
+ printf("mips64");
+ break;
++ case unwindstack::ARCH_RISCV64:
++ printf("riscv64");
++ break;
+ default:
+ printf("unknown\n");
+ return;
+diff --git a/system/core/libunwindstack/tools/unwind_symbols.cpp b/system/core/libunwindstack/tools/unwind_symbols.cpp
+index 8df2284..976db56 100644
+--- a/system/core/libunwindstack/tools/unwind_symbols.cpp
++++ b/system/core/libunwindstack/tools/unwind_symbols.cpp
+@@ -77,6 +77,9 @@ int main(int argc, char** argv) {
+ case EM_AARCH64:
+ printf("ABI: arm64\n");
+ break;
++ case EM_RISCV:
++ printf("ABI: riscv64\n");
++ break;
+ case EM_386:
+ printf("ABI: x86\n");
+ break;
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/Added-missing-headers.patch b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/Added-missing-headers.patch
new file mode 100644
index 0000000000..d827d7d3c2
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/Added-missing-headers.patch
@@ -0,0 +1,46 @@
+Upstream-Status: Pending
+
+Description: Added missing headers causing compile errors
+Author: Umang Parmar <umangjparmar@gmail.com>
+Forwarded: not-needed
+
+--- a/system/core/adb/sysdeps/posix/network.cpp
++++ b/system/core/adb/sysdeps/posix/network.cpp
+@@ -22,6 +22,7 @@
+ #include <sys/socket.h>
+
+ #include <string>
++#include <cstring>
+
+ #include <android-base/logging.h>
+ #include <android-base/stringprintf.h>
+--- a/system/core/base/file.cpp
++++ b/system/core/base/file.cpp
+@@ -26,6 +26,7 @@
+ #include <sys/stat.h>
+ #include <sys/types.h>
+ #include <unistd.h>
++#include <cstring>
+
+ #include <memory>
+ #include <mutex>
+--- a/system/core/libbacktrace/BacktraceMap.cpp
++++ b/system/core/libbacktrace/BacktraceMap.cpp
+@@ -21,6 +21,7 @@
+ #include <stdint.h>
+ #include <sys/types.h>
+ #include <unistd.h>
++#include <algorithm>
+
+ #include <log/log.h>
+
+--- a/system/core/libbacktrace/UnwindStackMap.cpp
++++ b/system/core/libbacktrace/UnwindStackMap.cpp
+@@ -20,6 +20,7 @@
+
+ #include <string>
+ #include <vector>
++#include <algorithm>
+
+ #include <backtrace/BacktraceMap.h>
+ #include <unwindstack/Elf.h>
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/Nonnull.patch b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/Nonnull.patch
new file mode 100644
index 0000000000..54bd52c61a
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/Nonnull.patch
@@ -0,0 +1,22 @@
+Upstream-Status: Pending
+
+Description: Bring Clang's _Nonnull keyword to GCC
+Author: Kai-Chung Yan
+Forwarded: not-needed
+--- a/system/core/adb/sysdeps.h
++++ b/system/core/adb/sysdeps.h
+@@ -40,11 +40,12 @@
+ #include "sysdeps/network.h"
+ #include "sysdeps/stat.h"
+
++#define _Nonnull
++#define _Nullable
++
+ #ifdef _WIN32
+
+ // Clang-only nullability specifiers
+-#define _Nonnull
+-#define _Nullable
+
+ #include <ctype.h>
+ #include <direct.h>
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/Vector-cast.patch b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/Vector-cast.patch
new file mode 100644
index 0000000000..b2881e0213
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/Vector-cast.patch
@@ -0,0 +1,16 @@
+Upstream-Status: Pending
+
+Description: Fix the weird error by GCC7 that fails to match the correct parent method.
+Author: Kai-Chung Yan
+Forwarded: not-needed
+--- a/system/core/libutils/include/utils/Vector.h
++++ b/system/core/libutils/include/utils/Vector.h
+@@ -256,7 +256,7 @@
+
+ template<class TYPE> inline
+ const Vector<TYPE>& Vector<TYPE>::operator = (const Vector<TYPE>& rhs) const {
+- VectorImpl::operator = (static_cast<const VectorImpl&>(rhs));
++ VectorImpl::operator = (rhs);
+ return *this;
+ }
+
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/add-missing-headers.patch b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/add-missing-headers.patch
new file mode 100644
index 0000000000..681d2c6553
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/add-missing-headers.patch
@@ -0,0 +1,43 @@
+Upstream-Status: Pending
+
+Forwarded: not-needed
+--- a/system/core/fs_mgr/liblp/reader.cpp
++++ b/system/core/fs_mgr/liblp/reader.cpp
+@@ -22,6 +22,7 @@
+ #include <unistd.h>
+
+ #include <functional>
++#include <cstring>
+
+ #include <android-base/file.h>
+ #include <android-base/unique_fd.h>
+--- a/system/core/fs_mgr/liblp/writer.cpp
++++ b/system/core/fs_mgr/liblp/writer.cpp
+@@ -21,6 +21,7 @@
+ #include <unistd.h>
+
+ #include <string>
++#include <cstring>
+
+ #include <android-base/file.h>
+ #include <android-base/unique_fd.h>
+--- a/system/core/liblog/logger_write.cpp
++++ b/system/core/liblog/logger_write.cpp
+@@ -27,6 +27,7 @@
+ #include <android/set_abort_message.h>
+ #endif
+
++#include <mutex>
+ #include <shared_mutex>
+
+ #include <android-base/errno_restorer.h>
+--- a/system/core/libziparchive/zip_archive_stream_entry.cc
++++ b/system/core/libziparchive/zip_archive_stream_entry.cc
+@@ -23,6 +23,7 @@
+ #include <sys/types.h>
+ #include <unistd.h>
+
++#include <limits>
+ #include <memory>
+ #include <vector>
+
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/external/libunwind/20150704-CVE-2015-3239_dwarf_i.h.patch b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/external/libunwind/20150704-CVE-2015-3239_dwarf_i.h.patch
new file mode 100644
index 0000000000..e8e216161e
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/external/libunwind/20150704-CVE-2015-3239_dwarf_i.h.patch
@@ -0,0 +1,15 @@
+Upstream-Status: Pending
+
+Description: Off-by-one error in the dwarf_to_unw_regnum function in include/dwarf_i.h in
+libunwind 1.1 allows local users to have unspecified impact via invalid dwarf opcodes.
+--- a/external/libunwind/include/dwarf_i.h
++++ b/external/libunwind/include/dwarf_i.h
+@@ -20,7 +20,7 @@
+ extern const uint8_t dwarf_to_unw_regnum_map[DWARF_REGNUM_MAP_LENGTH];
+ /* REG is evaluated multiple times; it better be side-effects free! */
+ # define dwarf_to_unw_regnum(reg) \
+- (((reg) <= DWARF_REGNUM_MAP_LENGTH) ? dwarf_to_unw_regnum_map[reg] : 0)
++ (((reg) < DWARF_REGNUM_MAP_LENGTH) ? dwarf_to_unw_regnum_map[reg] : 0)
+ #endif
+
+ #ifdef UNW_LOCAL_ONLY
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/external/libunwind/legacy_built-in_sync_functions.patch b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/external/libunwind/legacy_built-in_sync_functions.patch
new file mode 100644
index 0000000000..a04a887c68
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/external/libunwind/legacy_built-in_sync_functions.patch
@@ -0,0 +1,28 @@
+Upstream-Status: Pending
+
+Description: Replace the legacy __sync built-in functions with __atomic ones
+ libunwind uses the built-in __sync_* functions which are deprecated by GCC and
+ should be replaced by __atomic_* ones. See the official manuals [1].
+ .
+ The legacy __sync functions do not require to specify the memory order but
+ __atomic ones do, so we choose the strongest one: __ATOMIC_SEQ_CST.
+ .
+ We do this because __sync_fetch_and_add() is not supported on armel.
+ .
+ [1]: https://gcc.gnu.org/onlinedocs/gcc/_005f_005fsync-Builtins.html
+Author: Kai-Chung Yan ()
+Last-Update: 2016-10-04
+Forwarded: not-needed
+--- a/external/libunwind/include/libunwind_i.h
++++ b/external/libunwind/include/libunwind_i.h
+@@ -155,8 +155,8 @@ cmpxchg_ptr (void *addr, void *old, void
+ u.vp = addr;
+ return __sync_bool_compare_and_swap(u.vlp, (long) old, (long) new);
+ }
+-# define fetch_and_add1(_ptr) __sync_fetch_and_add(_ptr, 1)
+-# define fetch_and_add(_ptr, value) __sync_fetch_and_add(_ptr, value)
++# define fetch_and_add1(_ptr) __atomic_fetch_add(_ptr, 1, __ATOMIC_SEQ_CST)
++# define fetch_and_add(_ptr, value) __atomic_fetch_add(_ptr, value, __ATOMIC_SEQ_CST)
+ # define HAVE_CMPXCHG
+ # define HAVE_FETCH_AND_ADD
+ #endif
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/external/libunwind/user_pt_regs.patch b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/external/libunwind/user_pt_regs.patch
new file mode 100644
index 0000000000..35df944d1c
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/external/libunwind/user_pt_regs.patch
@@ -0,0 +1,26 @@
+Upstream-Status: Pending
+
+Author: Kai-Chung Yan <seamlikok@gmail.com>
+Last-Update: 2016-08-24
+Description: Manual definition of struct user_pt_regs
+ On ARM64, libunwind uses struct user_pt_regs which is not defined in
+ anywhere, which causes FTBFS.
+Forwarded: not-needed
+--- a/external/libunwind/src/ptrace/_UPT_access_reg.c
++++ b/external/libunwind/src/ptrace/_UPT_access_reg.c
+@@ -26,6 +26,15 @@ WITH THE SOFTWARE OR THE USE OR OTHER DE
+
+ #include "_UPT_internal.h"
+
++#if defined(__aarch64__)
++ struct user_pt_regs {
++ __u64 regs[31];
++ __u64 sp;
++ __u64 pc;
++ __u64 pstate;
++ };
++#endif
++
+ #if UNW_TARGET_IA64
+ # include <elf.h>
+ # ifdef HAVE_ASM_PTRACE_OFFSETS_H
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/fix-attribute-issue-with-gcc.patch b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/fix-attribute-issue-with-gcc.patch
new file mode 100644
index 0000000000..441031f536
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/fix-attribute-issue-with-gcc.patch
@@ -0,0 +1,19 @@
+Upstream-Status: Pending
+
+Description: remove clang-ism
+Forwarded: not-needed
+
+--- a/system/core/base/include/android-base/logging.h
++++ b/system/core/base/include/android-base/logging.h
+@@ -451,10 +451,7 @@
+ // -Wno-user-defined-warnings to CPPFLAGS.
+ #pragma clang diagnostic push
+ #pragma clang diagnostic ignored "-Wgcc-compat"
+-#define OSTREAM_STRING_POINTER_USAGE_WARNING \
+- __attribute__((diagnose_if(true, "Unexpected logging of string pointer", "warning")))
+-inline OSTREAM_STRING_POINTER_USAGE_WARNING
+-std::ostream& operator<<(std::ostream& stream, const std::string* string_pointer) {
++inline std::ostream& operator<<(std::ostream& stream, const std::string* string_pointer) {
+ return stream << static_cast<const void*>(string_pointer);
+ }
+ #pragma clang diagnostic pop
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/fix-build-on-non-x86.patch b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/fix-build-on-non-x86.patch
new file mode 100644
index 0000000000..b1caa60c4a
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/fix-build-on-non-x86.patch
@@ -0,0 +1,26 @@
+Upstream-Status: Pending
+
+Description: non-x86 arches do not have PAGE_SIZE
+Forwarded: not-needed
+--- a/system/core/base/cmsg.cpp
++++ b/system/core/base/cmsg.cpp
+@@ -33,7 +33,8 @@
+ const std::vector<int>& fds) {
+ size_t cmsg_space = CMSG_SPACE(sizeof(int) * fds.size());
+ size_t cmsg_len = CMSG_LEN(sizeof(int) * fds.size());
+- if (cmsg_space >= PAGE_SIZE) {
++ size_t pagesize = static_cast<size_t>(sysconf(_SC_PAGE_SIZE));
++ if (cmsg_space >= pagesize) {
+ errno = ENOMEM;
+ return -1;
+ }
+@@ -75,7 +76,8 @@
+ fds->clear();
+
+ size_t cmsg_space = CMSG_SPACE(sizeof(int) * max_fds);
+- if (cmsg_space >= PAGE_SIZE) {
++ size_t pagesize = static_cast<size_t>(sysconf(_SC_PAGE_SIZE));
++ if (cmsg_space >= pagesize) {
+ errno = ENOMEM;
+ return -1;
+ }
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/fix-gettid-exception-declaration.patch b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/fix-gettid-exception-declaration.patch
new file mode 100644
index 0000000000..5f24d0b4a0
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/fix-gettid-exception-declaration.patch
@@ -0,0 +1,26 @@
+Upstream-Status: Pending
+
+Description: get libcutils building
+Forwarded: not-needed
+--- a/system/core/libcutils/include/cutils/threads.h
++++ b/system/core/libcutils/include/cutils/threads.h
+@@ -33,7 +33,7 @@
+ // Deprecated: use android::base::GetThreadId instead, which doesn't truncate on Mac/Windows.
+ //
+
+-extern pid_t gettid();
++extern pid_t gettid(void) __THROW;
+
+ //
+ // Deprecated: use `_Thread_local` in C or `thread_local` in C++.
+--- a/system/core/libcutils/threads.cpp
++++ b/system/core/libcutils/threads.cpp
+@@ -33,7 +33,7 @@
+
+ // No definition needed for Android because we'll just pick up bionic's copy.
+ #ifndef __ANDROID__
+-pid_t gettid() {
++pid_t gettid(void) __THROW {
+ #if defined(__APPLE__)
+ uint64_t tid;
+ pthread_threadid_np(NULL, &tid);
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/fix-standard-namespace-errors.patch b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/fix-standard-namespace-errors.patch
new file mode 100644
index 0000000000..4380308f13
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/fix-standard-namespace-errors.patch
@@ -0,0 +1,24 @@
+Upstream-Status: Pending
+
+Description: Add missing 'std::' scope identifiers.
+Forwarded: not-needed
+--- a/system/core/libunwindstack/include/unwindstack/DwarfMemory.h
++++ b/system/core/libunwindstack/include/unwindstack/DwarfMemory.h
+@@ -29,7 +29,7 @@
+ DwarfMemory(Memory* memory) : memory_(memory) {}
+ virtual ~DwarfMemory() = default;
+
+- bool ReadBytes(void* dst, size_t num_bytes);
++ bool ReadBytes(void* dst, std::size_t num_bytes);
+
+ template <typename SignedType>
+ bool ReadSigned(uint64_t* value);
+@@ -39,7 +39,7 @@
+ bool ReadSLEB128(int64_t* value);
+
+ template <typename AddressType>
+- size_t GetEncodedSize(uint8_t encoding);
++ std::size_t GetEncodedSize(uint8_t encoding);
+
+ bool AdjustEncodedValue(uint8_t encoding, uint64_t* value);
+
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/hard-code-build-number.patch b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/hard-code-build-number.patch
new file mode 100644
index 0000000000..4d7323bd2f
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/hard-code-build-number.patch
@@ -0,0 +1,46 @@
+Upstream-Status: Pending
+
+Description: just hard code rather than deal with circular deps
+Forwarded: not-needed
+--- a/system/core/adb/adb.cpp
++++ b/system/core/adb/adb.cpp
+@@ -44,8 +44,6 @@
+ #include <android-base/parsenetaddress.h>
+ #include <android-base/stringprintf.h>
+ #include <android-base/strings.h>
+-#include <build/version.h>
+-#include <platform_tools_version.h>
+
+ #include "adb_auth.h"
+ #include "adb_io.h"
+@@ -69,7 +67,7 @@
+ "Version %s-%s\n"
+ "Installed as %s\n",
+ ADB_VERSION_MAJOR, ADB_VERSION_MINOR, ADB_SERVER_VERSION,
+- PLATFORM_TOOLS_VERSION, android::build::GetBuildNumber().c_str(),
++ PLATFORM_TOOLS_VERSION, "debian",
+ android::base::GetExecutablePath().c_str());
+ }
+
+--- a/system/core/fastboot/fastboot.cpp
++++ b/system/core/fastboot/fastboot.cpp
+@@ -59,10 +59,8 @@
+ #include <android-base/stringprintf.h>
+ #include <android-base/strings.h>
+ #include <android-base/unique_fd.h>
+-#include <build/version.h>
+ #include <libavb/libavb.h>
+ #include <liblp/liblp.h>
+-#include <platform_tools_version.h>
+ #include <sparse/sparse.h>
+ #include <ziparchive/zip_archive.h>
+
+@@ -1680,7 +1678,7 @@
+ setvbuf(stdout, nullptr, _IONBF, 0);
+ setvbuf(stderr, nullptr, _IONBF, 0);
+ } else if (name == "version") {
+- fprintf(stdout, "fastboot version %s-%s\n", PLATFORM_TOOLS_VERSION, android::build::GetBuildNumber().c_str());
++ fprintf(stdout, "fastboot version %s-%s\n", PLATFORM_TOOLS_VERSION, "debian");
+ fprintf(stdout, "Installed as %s\n", android::base::GetExecutablePath().c_str());
+ return 0;
+ #if !defined(_WIN32)
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/libusb-header-path.patch b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/libusb-header-path.patch
new file mode 100644
index 0000000000..122bd70e7d
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/libusb-header-path.patch
@@ -0,0 +1,18 @@
+Upstream-Status: Pending
+
+Description: libusb.h comes from different location
+Author: Umang Parmar <umangjparmar@gmail.com>
+Forwarded: not-needed
+Last-Update: 2018-05-26
+
+--- a/system/core/adb/client/usb_libusb.cpp
++++ b/system/core/adb/client/usb_libusb.cpp
+@@ -30,7 +30,7 @@
+ #include <thread>
+ #include <unordered_map>
+
+-#include <libusb/libusb.h>
++#include <libusb-1.0/libusb.h>
+
+ #include <android-base/file.h>
+ #include <android-base/logging.h>
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/move-log-file-to-proper-dir.patch b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/move-log-file-to-proper-dir.patch
new file mode 100644
index 0000000000..e8494ab433
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/move-log-file-to-proper-dir.patch
@@ -0,0 +1,20 @@
+Upstream-Status: Pending
+
+Description: Update log file directory.
+Author: Umang Parmar <umangjparmar@gmail.com>
+Last Updated: 2018-05-17
+
+--- a/system/core/adb/adb_utils.cpp
++++ b/system/core/adb/adb_utils.cpp
+@@ -339,6 +339,11 @@
+
+ return temp_path_utf8 + log_name;
+ #else
++ std::string log_dir = android::base::StringPrintf("/run/user/%u/adb.log", getuid());
++ struct stat st = {0};
++ if (stat(log_dir.c_str(), &st) == 0) {
++ return log_dir;
++ }
+ const char* tmp_dir = getenv("TMPDIR");
+ if (tmp_dir == nullptr) tmp_dir = "/tmp";
+ return android::base::StringPrintf("%s/adb.%u.log", tmp_dir, getuid());
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/simg_dump-python3.patch b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/simg_dump-python3.patch
new file mode 100644
index 0000000000..6664dc2aa5
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/simg_dump-python3.patch
@@ -0,0 +1,64 @@
+Description: Port simg_dump to Python 3.
+Author: Antonio Russo <antonio.e.russo@gmail.com>
+Forwarded: not-needed
+Last-Update: 2019-01-05
+Origin: https://bugs.debian.org/945646
+
+---
+Upstream-Status: Pending
+
+Index: android-platform-tools/system/core/libsparse/simg_dump.py
+===================================================================
+--- android-platform-tools.orig/system/core/libsparse/simg_dump.py
++++ android-platform-tools/system/core/libsparse/simg_dump.py
+@@ -1,4 +1,4 @@
+-#! /usr/bin/env python
++#! /usr/bin/env python3
+
+ # Copyright (C) 2012 The Android Open Source Project
+ #
+@@ -14,7 +14,7 @@
+ # See the License for the specific language governing permissions and
+ # limitations under the License.
+
+-from __future__ import print_function
++
+ import csv
+ import getopt
+ import hashlib
+@@ -47,7 +47,7 @@ def main():
+ opts, args = getopt.getopt(sys.argv[1:],
+ "vsc:",
+ ["verbose", "showhash", "csvfile"])
+- except getopt.GetoptError, e:
++ except getopt.GetoptError as e:
+ print(e)
+ usage(me)
+ for o, a in opts:
+@@ -66,7 +66,7 @@ def main():
+ usage(me)
+
+ if csvfilename:
+- csvfile = open(csvfilename, "wb")
++ csvfile = open(csvfilename, "w", newline='')
+ csvwriter = csv.writer(csvfile)
+
+ output = verbose or csvfilename or showhash
+@@ -121,7 +121,7 @@ def main():
+ "output offset", "output blocks", "type", "hash"])
+
+ offset = 0
+- for i in xrange(1, total_chunks + 1):
++ for i in range(1, total_chunks + 1):
+ header_bin = FH.read(12)
+ header = struct.unpack("<2H2I", header_bin)
+ chunk_type = header[0]
+@@ -160,7 +160,7 @@ def main():
+ if showhash:
+ h = hashlib.sha1()
+ data = fill_bin * (blk_sz / 4);
+- for block in xrange(chunk_sz):
++ for block in range(chunk_sz):
+ h.update(data)
+ curhash = h.hexdigest()
+ elif chunk_type == 0xCAC3:
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/stdatomic.patch b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/stdatomic.patch
new file mode 100644
index 0000000000..e11f3cc783
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/stdatomic.patch
@@ -0,0 +1,66 @@
+Upstream-Status: Pending
+
+Description: Fix incompatibility between <stdatomic.h> and <atomic>
+ This 2 headers combined will cause errors for both GCC and Clang. This patch
+ makes sure only one of them is present at any time.
+Bug: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=60932
+Bug: https://reviews.llvm.org/D45470
+--- a/system/core/libcutils/include/cutils/trace.h
++++ b/system/core/libcutils/include/cutils/trace.h
+@@ -18,7 +18,14 @@
+ #define _LIBS_CUTILS_TRACE_H
+
+ #include <inttypes.h>
++#ifdef __cplusplus
++#include <atomic>
++using std::atomic_bool;
++using std::atomic_load_explicit;
++using std::memory_order_acquire;
++#else
+ #include <stdatomic.h>
++#endif
+ #include <stdbool.h>
+ #include <stdint.h>
+ #include <stdio.h>
+--- a/system/core/libcutils/include/cutils/atomic.h
++++ b/system/core/libcutils/include/cutils/atomic.h
+@@ -19,7 +19,23 @@
+
+ #include <stdint.h>
+ #include <sys/types.h>
++#ifdef __cplusplus
++#include <atomic>
++using std::atomic_compare_exchange_strong_explicit;
++using std::atomic_fetch_add_explicit;
++using std::atomic_fetch_or_explicit;
++using std::atomic_fetch_sub_explicit;
++using std::atomic_int_least32_t;
++using std::atomic_load_explicit;
++using std::atomic_store_explicit;
++using std::atomic_thread_fence;
++using std::memory_order::memory_order_acquire;
++using std::memory_order::memory_order_relaxed;
++using std::memory_order::memory_order_release;
++using std::memory_order::memory_order_seq_cst;
++#else
+ #include <stdatomic.h>
++#endif
+
+ #ifndef ANDROID_ATOMIC_INLINE
+ #define ANDROID_ATOMIC_INLINE static inline
+--- a/system/core/liblog/logger.h
++++ b/system/core/liblog/logger.h
+@@ -16,7 +16,13 @@
+
+ #pragma once
+
++#ifdef __cplusplus
++#include <atomic>
++using std::atomic_int;
++using std::atomic_uintptr_t;
++#else
+ #include <stdatomic.h>
++#endif
+ #include <sys/cdefs.h>
+
+ #include <log/log.h>
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/stub-out-fastdeploy.patch b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/stub-out-fastdeploy.patch
new file mode 100644
index 0000000000..d86ef230f7
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/stub-out-fastdeploy.patch
@@ -0,0 +1,95 @@
+Upstream-Status: Pending
+
+Description: Defer packaging fastdeploy with adb for 29.x.x tags.
+Forwarded: not-needed
+--- a/system/core/adb/client/commandline.cpp
++++ b/system/core/adb/client/commandline.cpp
+@@ -59,7 +59,6 @@
+ #include "bugreport.h"
+ #include "client/file_sync_client.h"
+ #include "commandline.h"
+-#include "fastdeploy.h"
+ #include "services.h"
+ #include "shell_protocol.h"
+ #include "sysdeps/chrono.h"
+--- a/system/core/adb/client/adb_install.cpp
++++ b/system/core/adb/client/adb_install.cpp
+@@ -35,7 +35,6 @@
+ #include "adb_utils.h"
+ #include "client/file_sync_client.h"
+ #include "commandline.h"
+-#include "fastdeploy.h"
+
+ static constexpr int kFastDeployMinApi = 24;
+
+@@ -167,14 +166,6 @@
+ }
+
+ if (use_fastdeploy) {
+- auto metadata = extract_metadata(file);
+- if (metadata.has_value()) {
+- // pass all but 1st (command) and last (apk path) parameters through to pm for
+- // session creation
+- std::vector<const char*> pm_args{argv + 1, argv + argc - 1};
+- auto patchFd = install_patch(pm_args.size(), pm_args.data());
+- return stream_patch(file, std::move(metadata.value()), std::move(patchFd));
+- }
+ }
+
+ struct stat sb;
+@@ -267,16 +258,6 @@
+ argv[last_apk] = apk_dest.c_str(); /* destination name, not source location */
+
+ if (use_fastdeploy) {
+- auto metadata = extract_metadata(apk_file[0]);
+- if (metadata.has_value()) {
+- auto patchFd = apply_patch_on_device(apk_dest.c_str());
+- int status = stream_patch(apk_file[0], std::move(metadata.value()), std::move(patchFd));
+-
+- result = pm_command(argc, argv);
+- delete_device_file(apk_dest);
+-
+- return status;
+- }
+ }
+
+ if (do_sync_push(apk_file, apk_dest.c_str(), false)) {
+@@ -292,7 +273,6 @@
+ InstallMode installMode = INSTALL_DEFAULT;
+ bool use_fastdeploy = false;
+ bool is_reinstall = false;
+- FastDeploy_AgentUpdateStrategy agent_update_strategy = FastDeploy_AgentUpdateDifferentVersion;
+
+ for (int i = 1; i < argc; i++) {
+ if (!strcmp(argv[i], "--streaming")) {
+@@ -313,13 +293,10 @@
+ use_fastdeploy = false;
+ } else if (!strcmp(argv[i], "--force-agent")) {
+ processedArgIndicies.push_back(i);
+- agent_update_strategy = FastDeploy_AgentUpdateAlways;
+ } else if (!strcmp(argv[i], "--date-check-agent")) {
+ processedArgIndicies.push_back(i);
+- agent_update_strategy = FastDeploy_AgentUpdateNewerTimeStamp;
+ } else if (!strcmp(argv[i], "--version-check-agent")) {
+ processedArgIndicies.push_back(i);
+- agent_update_strategy = FastDeploy_AgentUpdateDifferentVersion;
+ }
+ }
+
+@@ -331,13 +308,11 @@
+ error_exit("Attempting to use streaming install on unsupported device");
+ }
+
+- if (use_fastdeploy && get_device_api_level() < kFastDeployMinApi) {
+- printf("Fast Deploy is only compatible with devices of API version %d or higher, "
+- "ignoring.\n",
+- kFastDeployMinApi);
++ if (use_fastdeploy) {
++ printf("Fast Deploy is unavailable in this build of adb, "
++ "ignoring.\n");
+ use_fastdeploy = false;
+ }
+- fastdeploy_set_agent_update_strategy(agent_update_strategy);
+
+ std::vector<const char*> passthrough_argv;
+ for (int i = 0; i < argc; i++) {
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/Add-riscv64-support.patch b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/Add-riscv64-support.patch
new file mode 100644
index 0000000000..9fd0fa792c
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/Add-riscv64-support.patch
@@ -0,0 +1,653 @@
+From: Guo Ren <guoren@linux.alibaba.com>
+Date: Wed, 29 Jun 2022 16:46:46 +0800
+Subject: Add riscv64 support
+
+This patch contains the dwarf unwind support for 64bit risc-v.
+
+ * DwarfCfa.cpp (cfa_def_cfa_register): setup register if CFA_REG is
+ not setup for riscv64
+ * Elf.cpp (GetRelPc): convert offset to virtual address for riscv64.
+ * ElfInterface.cpp (GetVirtAddrFromOffset): New for riscv64.
+ * RegsRiscv64.cpp (StepIfSignalHandler): Fix signal frame check.
+ libunwindstack/include/unwindstack/
+ * ElfInterface.h (GetVirtAddrFromOffset): New for riscv64.
+ libunwindstack/tests/
+ * DwarfCfaTest.cpp (cfa_def_cfa_register): ok for riscv64.
+ * RegsStepIfSignalHandlerTest.cpp (riscv64_step_if_signal_handler): Fix
+ testcase for riscv64
+
+Test: Builds.
+Test: All unit tests pass.
+
+Signed-off-by: Guo Ren <guoren@linux.alibaba.com>
+Signed-off-by: Lifang Xia <lifang_xia@linux.alibaba.com>
+Signed-off-by: Mao Han <han_mao@linux.alibaba.com>
+Change-Id: Ib21ddf23cc83f332af202df7bffcaceec16063e0
+---
+Upstream-Status: Pending
+
+ system/core/libunwindstack/Android.bp | 1 +
+ system/core/libunwindstack/Elf.cpp | 2 +
+ system/core/libunwindstack/Regs.cpp | 10 ++
+ system/core/libunwindstack/RegsRiscv64.cpp | 156 +++++++++++++++++++++
+ .../core/libunwindstack/include/unwindstack/Elf.h | 5 +
+ .../include/unwindstack/MachineRiscv64.h | 59 ++++++++
+ .../include/unwindstack/RegsGetLocal.h | 43 ++++++
+ .../include/unwindstack/RegsRiscv64.h | 59 ++++++++
+ .../include/unwindstack/UcontextRiscv64.h | 80 +++++++++++
+ .../include/unwindstack/UserRiscv64.h | 37 +++++
+ system/core/libunwindstack/tools/unwind.cpp | 3 +
+ .../core/libunwindstack/tools/unwind_symbols.cpp | 3 +
+ 12 files changed, 458 insertions(+)
+ create mode 100644 system/core/libunwindstack/RegsRiscv64.cpp
+ create mode 100644 system/core/libunwindstack/include/unwindstack/MachineRiscv64.h
+ create mode 100644 system/core/libunwindstack/include/unwindstack/RegsRiscv64.h
+ create mode 100644 system/core/libunwindstack/include/unwindstack/UcontextRiscv64.h
+ create mode 100644 system/core/libunwindstack/include/unwindstack/UserRiscv64.h
+
+diff --git a/system/core/libunwindstack/Android.bp b/system/core/libunwindstack/Android.bp
+index 3695f72..f1f9c68 100644
+--- a/system/core/libunwindstack/Android.bp
++++ b/system/core/libunwindstack/Android.bp
+@@ -70,6 +70,7 @@ cc_library {
+ "RegsArm64.cpp",
+ "RegsX86.cpp",
+ "RegsX86_64.cpp",
++ "RegsRiscv64.cpp",
+ "RegsMips.cpp",
+ "RegsMips64.cpp",
+ "Unwinder.cpp",
+diff --git a/system/core/libunwindstack/Elf.cpp b/system/core/libunwindstack/Elf.cpp
+index f01b092..3c2088b 100644
+--- a/system/core/libunwindstack/Elf.cpp
++++ b/system/core/libunwindstack/Elf.cpp
+@@ -290,6 +290,8 @@ ElfInterface* Elf::CreateInterfaceFromMemory(Memory* memory) {
+ arch_ = ARCH_X86_64;
+ } else if (e_machine == EM_MIPS) {
+ arch_ = ARCH_MIPS64;
++ } else if (e_machine == EM_RISCV) {
++ arch_ = ARCH_RISCV64;
+ } else {
+ // Unsupported.
+ ALOGI("64 bit elf that is neither aarch64 nor x86_64 nor mips64: e_machine = %d\n",
+diff --git a/system/core/libunwindstack/Regs.cpp b/system/core/libunwindstack/Regs.cpp
+index c7dec52..447a554 100644
+--- a/system/core/libunwindstack/Regs.cpp
++++ b/system/core/libunwindstack/Regs.cpp
+@@ -27,12 +27,14 @@
+ #include <unwindstack/RegsArm64.h>
+ #include <unwindstack/RegsMips.h>
+ #include <unwindstack/RegsMips64.h>
++#include <unwindstack/RegsRiscv64.h>
+ #include <unwindstack/RegsX86.h>
+ #include <unwindstack/RegsX86_64.h>
+ #include <unwindstack/UserArm.h>
+ #include <unwindstack/UserArm64.h>
+ #include <unwindstack/UserMips.h>
+ #include <unwindstack/UserMips64.h>
++#include <unwindstack/UserRiscv64.h>
+ #include <unwindstack/UserX86.h>
+ #include <unwindstack/UserX86_64.h>
+
+@@ -67,6 +69,8 @@ Regs* Regs::RemoteGet(pid_t pid) {
+ return RegsMips::Read(buffer.data());
+ case sizeof(mips64_user_regs):
+ return RegsMips64::Read(buffer.data());
++ case sizeof(riscv64_user_regs):
++ return RegsRiscv64::Read(buffer.data());
+ }
+ return nullptr;
+ }
+@@ -85,6 +89,8 @@ Regs* Regs::CreateFromUcontext(ArchEnum arch, void* ucontext) {
+ return RegsMips::CreateFromUcontext(ucontext);
+ case ARCH_MIPS64:
+ return RegsMips64::CreateFromUcontext(ucontext);
++ case ARCH_RISCV64:
++ return RegsRiscv64::CreateFromUcontext(ucontext);
+ case ARCH_UNKNOWN:
+ default:
+ return nullptr;
+@@ -104,6 +110,8 @@ ArchEnum Regs::CurrentArch() {
+ return ARCH_MIPS;
+ #elif defined(__mips__) && defined(__LP64__)
+ return ARCH_MIPS64;
++#elif defined(__riscv)
++ return ARCH_RISCV64;
+ #else
+ abort();
+ #endif
+@@ -123,6 +131,8 @@ Regs* Regs::CreateFromLocal() {
+ regs = new RegsMips();
+ #elif defined(__mips__) && defined(__LP64__)
+ regs = new RegsMips64();
++#elif defined(__riscv)
++ regs = new RegsRiscv64();
+ #else
+ abort();
+ #endif
+diff --git a/system/core/libunwindstack/RegsRiscv64.cpp b/system/core/libunwindstack/RegsRiscv64.cpp
+new file mode 100644
+index 0000000..887762a
+--- /dev/null
++++ b/system/core/libunwindstack/RegsRiscv64.cpp
+@@ -0,0 +1,156 @@
++/*
++ * Copyright (C) 2022 The Android Open Source Project
++ *
++ * Licensed under the Apache License, Version 2.0 (the "License");
++ * you may not use this file except in compliance with the License.
++ * You may obtain a copy of the License at
++ *
++ * http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing, software
++ * distributed under the License is distributed on an "AS IS" BASIS,
++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++ * See the License for the specific language governing permissions and
++ * limitations under the License.
++ */
++
++#include <stdint.h>
++#include <string.h>
++
++#include <functional>
++
++#include <unwindstack/Elf.h>
++#include <unwindstack/MachineRiscv64.h>
++#include <unwindstack/MapInfo.h>
++#include <unwindstack/Memory.h>
++#include <unwindstack/RegsRiscv64.h>
++#include <unwindstack/UcontextRiscv64.h>
++#include <unwindstack/UserRiscv64.h>
++
++namespace unwindstack {
++
++RegsRiscv64::RegsRiscv64()
++ : RegsImpl<uint64_t>(RISCV64_REG_MAX, Location(LOCATION_REGISTER, RISCV64_REG_RA)) {}
++
++ArchEnum RegsRiscv64::Arch() {
++ return ARCH_RISCV64;
++}
++
++uint64_t RegsRiscv64::pc() {
++ return regs_[RISCV64_REG_PC];
++}
++
++uint64_t RegsRiscv64::sp() {
++ return regs_[RISCV64_REG_SP];
++}
++
++void RegsRiscv64::set_pc(uint64_t pc) {
++ regs_[RISCV64_REG_PC] = pc;
++}
++
++void RegsRiscv64::set_sp(uint64_t sp) {
++ regs_[RISCV64_REG_SP] = sp;
++}
++
++uint64_t RegsRiscv64::GetPcAdjustment(uint64_t rel_pc, Elf*) {
++ if (rel_pc < 8) {
++ return 0;
++ }
++ // For now, just assume no compact branches
++ return 8;
++}
++
++bool RegsRiscv64::SetPcFromReturnAddress(Memory*) {
++ uint64_t ra = regs_[RISCV64_REG_RA];
++ if (regs_[RISCV64_REG_PC] == ra) {
++ return false;
++ }
++
++ regs_[RISCV64_REG_PC] = ra;
++ return true;
++}
++
++void RegsRiscv64::IterateRegisters(std::function<void(const char*, uint64_t)> fn) {
++ fn("pc", regs_[RISCV64_REG_PC]);
++ fn("ra", regs_[RISCV64_REG_RA]);
++ fn("sp", regs_[RISCV64_REG_SP]);
++ fn("gp", regs_[RISCV64_REG_GP]);
++ fn("tp", regs_[RISCV64_REG_TP]);
++ fn("t0", regs_[RISCV64_REG_T0]);
++ fn("t1", regs_[RISCV64_REG_T1]);
++ fn("t2", regs_[RISCV64_REG_T2]);
++ fn("t3", regs_[RISCV64_REG_T3]);
++ fn("t4", regs_[RISCV64_REG_T4]);
++ fn("t5", regs_[RISCV64_REG_T5]);
++ fn("t6", regs_[RISCV64_REG_T6]);
++ fn("s0", regs_[RISCV64_REG_S0]);
++ fn("s1", regs_[RISCV64_REG_S1]);
++ fn("s2", regs_[RISCV64_REG_S2]);
++ fn("s3", regs_[RISCV64_REG_S3]);
++ fn("s4", regs_[RISCV64_REG_S4]);
++ fn("s5", regs_[RISCV64_REG_S5]);
++ fn("s6", regs_[RISCV64_REG_S6]);
++ fn("s7", regs_[RISCV64_REG_S7]);
++ fn("s8", regs_[RISCV64_REG_S8]);
++ fn("s9", regs_[RISCV64_REG_S9]);
++ fn("s10", regs_[RISCV64_REG_S10]);
++ fn("s11", regs_[RISCV64_REG_S11]);
++ fn("a0", regs_[RISCV64_REG_A0]);
++ fn("a1", regs_[RISCV64_REG_A1]);
++ fn("a2", regs_[RISCV64_REG_A2]);
++ fn("a3", regs_[RISCV64_REG_A3]);
++ fn("a4", regs_[RISCV64_REG_A4]);
++ fn("a5", regs_[RISCV64_REG_A5]);
++ fn("a6", regs_[RISCV64_REG_A6]);
++ fn("a7", regs_[RISCV64_REG_A7]);
++}
++
++Regs* RegsRiscv64::Read(void* remote_data) {
++ riscv64_user_regs* user = reinterpret_cast<riscv64_user_regs*>(remote_data);
++
++ RegsRiscv64* regs = new RegsRiscv64();
++ memcpy(regs->RawData(), &user->regs[0], RISCV64_REG_MAX * sizeof(uint64_t));
++ // uint64_t* reg_data = reinterpret_cast<uint64_t*>(regs->RawData());
++ return regs;
++}
++
++Regs* RegsRiscv64::CreateFromUcontext(void* ucontext) {
++ riscv64_ucontext_t* riscv64_ucontext = reinterpret_cast<riscv64_ucontext_t*>(ucontext);
++
++ RegsRiscv64* regs = new RegsRiscv64();
++ memcpy(regs->RawData(), &riscv64_ucontext->uc_mcontext.__gregs[0],
++ RISCV64_REG_MAX * sizeof(uint64_t));
++ return regs;
++}
++
++bool RegsRiscv64::StepIfSignalHandler(uint64_t elf_offset, Elf* elf, Memory* process_memory) {
++ uint64_t data;
++ Memory* elf_memory = elf->memory();
++ // Read from elf memory since it is usually more expensive to read from
++ // process memory.
++ if (!elf_memory->ReadFully(elf_offset, &data, sizeof(data))) {
++ return false;
++ }
++ // Look for the kernel sigreturn function.
++ // __kernel_rt_sigreturn:
++ // li a7, __NR_rt_sigreturn
++ // scall
++
++ const uint8_t li_scall[] = {0x93, 0x08, 0xb0, 0x08, 0x73, 0x00, 0x00, 0x00};
++ if (memcmp(&data, &li_scall, 8) != 0) {
++ return false;
++ }
++
++ // SP + sizeof(siginfo_t) + uc_mcontext offset + PC offset.
++ if (!process_memory->ReadFully(regs_[RISCV64_REG_SP] + 0x80 + 0xb0 + 0x00, regs_.data(),
++ sizeof(uint64_t) * (RISCV64_REG_MAX))) {
++ return false;
++ }
++ return true;
++}
++
++Regs* RegsRiscv64::Clone() {
++ return new RegsRiscv64(*this);
++}
++
++} // namespace unwindstack
+diff --git a/system/core/libunwindstack/include/unwindstack/Elf.h b/system/core/libunwindstack/include/unwindstack/Elf.h
+index 472ed92..88fa0ff 100644
+--- a/system/core/libunwindstack/include/unwindstack/Elf.h
++++ b/system/core/libunwindstack/include/unwindstack/Elf.h
+@@ -32,6 +32,10 @@
+ #define EM_AARCH64 183
+ #endif
+
++#if !defined(EM_RISCV)
++#define EM_RISCV 243
++#endif
++
+ namespace unwindstack {
+
+ // Forward declaration.
+@@ -46,6 +50,7 @@ enum ArchEnum : uint8_t {
+ ARCH_X86_64,
+ ARCH_MIPS,
+ ARCH_MIPS64,
++ ARCH_RISCV64,
+ };
+
+ class Elf {
+diff --git a/system/core/libunwindstack/include/unwindstack/MachineRiscv64.h b/system/core/libunwindstack/include/unwindstack/MachineRiscv64.h
+new file mode 100644
+index 0000000..397e680
+--- /dev/null
++++ b/system/core/libunwindstack/include/unwindstack/MachineRiscv64.h
+@@ -0,0 +1,59 @@
++/*
++ * Copyright (C) 2022 The Android Open Source Project
++ *
++ * Licensed under the Apache License, Version 2.0 (the "License");
++ * you may not use this file except in compliance with the License.
++ * You may obtain a copy of the License at
++ *
++ * http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing, software
++ * distributed under the License is distributed on an "AS IS" BASIS,
++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++ * See the License for the specific language governing permissions and
++ * limitations under the License.
++ */
++
++#pragma once
++
++#include <stdint.h>
++
++namespace unwindstack {
++
++enum Riscv64Reg : uint16_t {
++ RISCV64_REG_PC,
++ RISCV64_REG_RA,
++ RISCV64_REG_SP,
++ RISCV64_REG_GP,
++ RISCV64_REG_TP,
++ RISCV64_REG_T0,
++ RISCV64_REG_T1,
++ RISCV64_REG_T2,
++ RISCV64_REG_S0,
++ RISCV64_REG_S1,
++ RISCV64_REG_A0,
++ RISCV64_REG_A1,
++ RISCV64_REG_A2,
++ RISCV64_REG_A3,
++ RISCV64_REG_A4,
++ RISCV64_REG_A5,
++ RISCV64_REG_A6,
++ RISCV64_REG_A7,
++ RISCV64_REG_S2,
++ RISCV64_REG_S3,
++ RISCV64_REG_S4,
++ RISCV64_REG_S5,
++ RISCV64_REG_S6,
++ RISCV64_REG_S7,
++ RISCV64_REG_S8,
++ RISCV64_REG_S9,
++ RISCV64_REG_S10,
++ RISCV64_REG_S11,
++ RISCV64_REG_T3,
++ RISCV64_REG_T4,
++ RISCV64_REG_T5,
++ RISCV64_REG_T6,
++ RISCV64_REG_MAX,
++};
++
++} // namespace unwindstack
+diff --git a/system/core/libunwindstack/include/unwindstack/RegsGetLocal.h b/system/core/libunwindstack/include/unwindstack/RegsGetLocal.h
+index f0b5e3a..698eba2 100644
+--- a/system/core/libunwindstack/include/unwindstack/RegsGetLocal.h
++++ b/system/core/libunwindstack/include/unwindstack/RegsGetLocal.h
+@@ -81,6 +81,49 @@ inline __attribute__((__always_inline__)) void AsmGetRegs(void* reg_data) {
+ : "x12", "x13", "memory");
+ }
+
++#elif defined(__riscv)
++
++inline __attribute__((__always_inline__)) void AsmGetRegs(void* reg_data) {
++ asm volatile(
++ "1:\n"
++ "sd ra, 8(%[base])\n"
++ "sd sp, 16(%[base])\n"
++ "sd gp, 24(%[base])\n"
++ "sd tp, 32(%[base])\n"
++ "sd t0, 40(%[base])\n"
++ "sd t1, 48(%[base])\n"
++ "sd t2, 56(%[base])\n"
++ "sd s0, 64(%[base])\n"
++ "sd s1, 72(%[base])\n"
++ "sd a0, 80(%[base])\n"
++ "sd a1, 88(%[base])\n"
++ "sd a2, 96(%[base])\n"
++ "sd a3, 104(%[base])\n"
++ "sd a4, 112(%[base])\n"
++ "sd a5, 120(%[base])\n"
++ "sd a6, 128(%[base])\n"
++ "sd a7, 136(%[base])\n"
++ "sd s2, 144(%[base])\n"
++ "sd s3, 152(%[base])\n"
++ "sd s4, 160(%[base])\n"
++ "sd s5, 168(%[base])\n"
++ "sd s6, 176(%[base])\n"
++ "sd s7, 184(%[base])\n"
++ "sd s8, 192(%[base])\n"
++ "sd s9, 200(%[base])\n"
++ "sd s10, 208(%[base])\n"
++ "sd s11, 216(%[base])\n"
++ "sd t3, 224(%[base])\n"
++ "sd t4, 232(%[base])\n"
++ "sd t5, 240(%[base])\n"
++ "sd t6, 248(%[base])\n"
++ "la t1, 1b\n"
++ "sd t1, 0(%[base])\n"
++ : [base] "+r"(reg_data)
++ :
++ : "t1", "memory");
++}
++
+ #elif defined(__i386__) || defined(__x86_64__) || defined(__mips__)
+
+ extern "C" void AsmGetRegs(void* regs);
+diff --git a/system/core/libunwindstack/include/unwindstack/RegsRiscv64.h b/system/core/libunwindstack/include/unwindstack/RegsRiscv64.h
+new file mode 100644
+index 0000000..eb09397
+--- /dev/null
++++ b/system/core/libunwindstack/include/unwindstack/RegsRiscv64.h
+@@ -0,0 +1,59 @@
++/*
++ * Copyright (C) 2022 The Android Open Source Project
++ *
++ * Licensed under the Apache License, Version 2.0 (the "License");
++ * you may not use this file except in compliance with the License.
++ * You may obtain a copy of the License at
++ *
++ * http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing, software
++ * distributed under the License is distributed on an "AS IS" BASIS,
++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++ * See the License for the specific language governing permissions and
++ * limitations under the License.
++ */
++
++#pragma once
++
++#include <stdint.h>
++
++#include <functional>
++
++#include <unwindstack/Elf.h>
++#include <unwindstack/Regs.h>
++
++namespace unwindstack {
++
++// Forward declarations.
++class Memory;
++
++class RegsRiscv64 : public RegsImpl<uint64_t> {
++ public:
++ RegsRiscv64();
++ virtual ~RegsRiscv64() = default;
++
++ ArchEnum Arch() override final;
++
++ uint64_t GetPcAdjustment(uint64_t rel_pc, Elf* elf) override;
++
++ bool SetPcFromReturnAddress(Memory* process_memory) override;
++
++ bool StepIfSignalHandler(uint64_t elf_offset, Elf* elf, Memory* process_memory) override;
++
++ void IterateRegisters(std::function<void(const char*, uint64_t)>) override final;
++
++ uint64_t pc() override;
++ uint64_t sp() override;
++
++ void set_pc(uint64_t pc) override;
++ void set_sp(uint64_t sp) override;
++
++ Regs* Clone() override final;
++
++ static Regs* Read(void* data);
++
++ static Regs* CreateFromUcontext(void* ucontext);
++};
++
++} // namespace unwindstack
+diff --git a/system/core/libunwindstack/include/unwindstack/UcontextRiscv64.h b/system/core/libunwindstack/include/unwindstack/UcontextRiscv64.h
+new file mode 100644
+index 0000000..c6c82b1
+--- /dev/null
++++ b/system/core/libunwindstack/include/unwindstack/UcontextRiscv64.h
+@@ -0,0 +1,80 @@
++/*
++ * Copyright (C) 2014 The Android Open Source Project
++ * All rights reserved.
++ *
++ * Redistribution and use in source and binary forms, with or without
++ * modification, are permitted provided that the following conditions
++ * are met:
++ * * Redistributions of source code must retain the above copyright
++ * notice, this list of conditions and the following disclaimer.
++ * * Redistributions in binary form must reproduce the above copyright
++ * notice, this list of conditions and the following disclaimer in
++ * the documentation and/or other materials provided with the
++ * distribution.
++ *
++ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
++ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
++ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
++ * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
++ * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
++ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
++ * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
++ * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
++ * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
++ * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
++ * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
++ * SUCH DAMAGE.
++ */
++
++#pragma once
++
++#include <sys/cdefs.h>
++
++typedef uint64_t __riscv_mc_gp_state[32]; // unsigned long
++
++struct __riscv_mc_f_ext_state {
++ uint32_t __f[32];
++ uint32_t __fcsr;
++};
++
++struct __riscv_mc_d_ext_state {
++ uint64_t __f[32];
++ uint32_t __fcsr;
++};
++
++struct __riscv_mc_q_ext_state {
++ uint64_t __f[64] __attribute__((__aligned__(16)));
++ uint32_t __fcsr;
++ uint32_t __reserved[3];
++};
++
++union __riscv_mc_fp_state {
++ struct __riscv_mc_f_ext_state __f;
++ struct __riscv_mc_d_ext_state __d;
++ struct __riscv_mc_q_ext_state __q;
++};
++
++struct __riscv_stack_t {
++ uint64_t ss_sp;
++ int32_t ss_flags;
++ uint64_t ss_size;
++};
++
++struct riscv64_sigset_t {
++ uint64_t sig; // unsigned long
++};
++
++struct riscv64_mcontext_t {
++ __riscv_mc_gp_state __gregs;
++ union __riscv_mc_fp_state __fpregs;
++};
++
++struct riscv64_ucontext_t {
++ uint64_t uc_flags; // unsigned long
++ struct riscv64_ucontext_t* uc_link;
++ __riscv_stack_t uc_stack;
++ riscv64_sigset_t uc_sigmask;
++ /* The kernel adds extra padding here to allow sigset_t to grow. */
++ int8_t __padding[128 - sizeof(riscv64_sigset_t)]; // char
++ riscv64_mcontext_t uc_mcontext;
++};
+diff --git a/system/core/libunwindstack/include/unwindstack/UserRiscv64.h b/system/core/libunwindstack/include/unwindstack/UserRiscv64.h
+new file mode 100644
+index 0000000..1e91228
+--- /dev/null
++++ b/system/core/libunwindstack/include/unwindstack/UserRiscv64.h
+@@ -0,0 +1,37 @@
++/*
++ * Copyright (C) 2016 The Android Open Source Project
++ * All rights reserved.
++ *
++ * Redistribution and use in source and binary forms, with or without
++ * modification, are permitted provided that the following conditions
++ * are met:
++ * * Redistributions of source code must retain the above copyright
++ * notice, this list of conditions and the following disclaimer.
++ * * Redistributions in binary form must reproduce the above copyright
++ * notice, this list of conditions and the following disclaimer in
++ * the documentation and/or other materials provided with the
++ * distribution.
++ *
++ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
++ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
++ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
++ * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
++ * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
++ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
++ * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
++ * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
++ * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
++ * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
++ * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
++ * SUCH DAMAGE.
++ */
++
++#pragma once
++
++namespace unwindstack {
++
++struct riscv64_user_regs {
++ uint64_t regs[32];
++};
++
++} // namespace unwindstack
+diff --git a/system/core/libunwindstack/tools/unwind.cpp b/system/core/libunwindstack/tools/unwind.cpp
+index 1812e50..ae20891 100644
+--- a/system/core/libunwindstack/tools/unwind.cpp
++++ b/system/core/libunwindstack/tools/unwind.cpp
+@@ -83,6 +83,9 @@ void DoUnwind(pid_t pid) {
+ case unwindstack::ARCH_MIPS64:
+ printf("mips64");
+ break;
++ case unwindstack::ARCH_RISCV64:
++ printf("riscv64");
++ break;
+ default:
+ printf("unknown\n");
+ return;
+diff --git a/system/core/libunwindstack/tools/unwind_symbols.cpp b/system/core/libunwindstack/tools/unwind_symbols.cpp
+index 8df2284..976db56 100644
+--- a/system/core/libunwindstack/tools/unwind_symbols.cpp
++++ b/system/core/libunwindstack/tools/unwind_symbols.cpp
+@@ -77,6 +77,9 @@ int main(int argc, char** argv) {
+ case EM_AARCH64:
+ printf("ABI: arm64\n");
+ break;
++ case EM_RISCV:
++ printf("ABI: riscv64\n");
++ break;
+ case EM_386:
+ printf("ABI: x86\n");
+ break;
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/Added-missing-headers.patch b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/Added-missing-headers.patch
new file mode 100644
index 0000000000..d827d7d3c2
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/Added-missing-headers.patch
@@ -0,0 +1,46 @@
+Upstream-Status: Pending
+
+Description: Added missing headers causing compile errors
+Author: Umang Parmar <umangjparmar@gmail.com>
+Forwarded: not-needed
+
+--- a/system/core/adb/sysdeps/posix/network.cpp
++++ b/system/core/adb/sysdeps/posix/network.cpp
+@@ -22,6 +22,7 @@
+ #include <sys/socket.h>
+
+ #include <string>
++#include <cstring>
+
+ #include <android-base/logging.h>
+ #include <android-base/stringprintf.h>
+--- a/system/core/base/file.cpp
++++ b/system/core/base/file.cpp
+@@ -26,6 +26,7 @@
+ #include <sys/stat.h>
+ #include <sys/types.h>
+ #include <unistd.h>
++#include <cstring>
+
+ #include <memory>
+ #include <mutex>
+--- a/system/core/libbacktrace/BacktraceMap.cpp
++++ b/system/core/libbacktrace/BacktraceMap.cpp
+@@ -21,6 +21,7 @@
+ #include <stdint.h>
+ #include <sys/types.h>
+ #include <unistd.h>
++#include <algorithm>
+
+ #include <log/log.h>
+
+--- a/system/core/libbacktrace/UnwindStackMap.cpp
++++ b/system/core/libbacktrace/UnwindStackMap.cpp
+@@ -20,6 +20,7 @@
+
+ #include <string>
+ #include <vector>
++#include <algorithm>
+
+ #include <backtrace/BacktraceMap.h>
+ #include <unwindstack/Elf.h>
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/Nonnull.patch b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/Nonnull.patch
new file mode 100644
index 0000000000..54bd52c61a
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/Nonnull.patch
@@ -0,0 +1,22 @@
+Upstream-Status: Pending
+
+Description: Bring Clang's _Nonnull keyword to GCC
+Author: Kai-Chung Yan
+Forwarded: not-needed
+--- a/system/core/adb/sysdeps.h
++++ b/system/core/adb/sysdeps.h
+@@ -40,11 +40,12 @@
+ #include "sysdeps/network.h"
+ #include "sysdeps/stat.h"
+
++#define _Nonnull
++#define _Nullable
++
+ #ifdef _WIN32
+
+ // Clang-only nullability specifiers
+-#define _Nonnull
+-#define _Nullable
+
+ #include <ctype.h>
+ #include <direct.h>
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/Vector-cast.patch b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/Vector-cast.patch
new file mode 100644
index 0000000000..b2881e0213
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/Vector-cast.patch
@@ -0,0 +1,16 @@
+Upstream-Status: Pending
+
+Description: Fix the weird error by GCC7 that fails to match the correct parent method.
+Author: Kai-Chung Yan
+Forwarded: not-needed
+--- a/system/core/libutils/include/utils/Vector.h
++++ b/system/core/libutils/include/utils/Vector.h
+@@ -256,7 +256,7 @@
+
+ template<class TYPE> inline
+ const Vector<TYPE>& Vector<TYPE>::operator = (const Vector<TYPE>& rhs) const {
+- VectorImpl::operator = (static_cast<const VectorImpl&>(rhs));
++ VectorImpl::operator = (rhs);
+ return *this;
+ }
+
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/add-missing-headers.patch b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/add-missing-headers.patch
new file mode 100644
index 0000000000..681d2c6553
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/add-missing-headers.patch
@@ -0,0 +1,43 @@
+Upstream-Status: Pending
+
+Forwarded: not-needed
+--- a/system/core/fs_mgr/liblp/reader.cpp
++++ b/system/core/fs_mgr/liblp/reader.cpp
+@@ -22,6 +22,7 @@
+ #include <unistd.h>
+
+ #include <functional>
++#include <cstring>
+
+ #include <android-base/file.h>
+ #include <android-base/unique_fd.h>
+--- a/system/core/fs_mgr/liblp/writer.cpp
++++ b/system/core/fs_mgr/liblp/writer.cpp
+@@ -21,6 +21,7 @@
+ #include <unistd.h>
+
+ #include <string>
++#include <cstring>
+
+ #include <android-base/file.h>
+ #include <android-base/unique_fd.h>
+--- a/system/core/liblog/logger_write.cpp
++++ b/system/core/liblog/logger_write.cpp
+@@ -27,6 +27,7 @@
+ #include <android/set_abort_message.h>
+ #endif
+
++#include <mutex>
+ #include <shared_mutex>
+
+ #include <android-base/errno_restorer.h>
+--- a/system/core/libziparchive/zip_archive_stream_entry.cc
++++ b/system/core/libziparchive/zip_archive_stream_entry.cc
+@@ -23,6 +23,7 @@
+ #include <sys/types.h>
+ #include <unistd.h>
+
++#include <limits>
+ #include <memory>
+ #include <vector>
+
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/fix-attribute-issue-with-gcc.patch b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/fix-attribute-issue-with-gcc.patch
new file mode 100644
index 0000000000..441031f536
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/fix-attribute-issue-with-gcc.patch
@@ -0,0 +1,19 @@
+Upstream-Status: Pending
+
+Description: remove clang-ism
+Forwarded: not-needed
+
+--- a/system/core/base/include/android-base/logging.h
++++ b/system/core/base/include/android-base/logging.h
+@@ -451,10 +451,7 @@
+ // -Wno-user-defined-warnings to CPPFLAGS.
+ #pragma clang diagnostic push
+ #pragma clang diagnostic ignored "-Wgcc-compat"
+-#define OSTREAM_STRING_POINTER_USAGE_WARNING \
+- __attribute__((diagnose_if(true, "Unexpected logging of string pointer", "warning")))
+-inline OSTREAM_STRING_POINTER_USAGE_WARNING
+-std::ostream& operator<<(std::ostream& stream, const std::string* string_pointer) {
++inline std::ostream& operator<<(std::ostream& stream, const std::string* string_pointer) {
+ return stream << static_cast<const void*>(string_pointer);
+ }
+ #pragma clang diagnostic pop
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/fix-build-on-non-x86.patch b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/fix-build-on-non-x86.patch
new file mode 100644
index 0000000000..b1caa60c4a
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/fix-build-on-non-x86.patch
@@ -0,0 +1,26 @@
+Upstream-Status: Pending
+
+Description: non-x86 arches do not have PAGE_SIZE
+Forwarded: not-needed
+--- a/system/core/base/cmsg.cpp
++++ b/system/core/base/cmsg.cpp
+@@ -33,7 +33,8 @@
+ const std::vector<int>& fds) {
+ size_t cmsg_space = CMSG_SPACE(sizeof(int) * fds.size());
+ size_t cmsg_len = CMSG_LEN(sizeof(int) * fds.size());
+- if (cmsg_space >= PAGE_SIZE) {
++ size_t pagesize = static_cast<size_t>(sysconf(_SC_PAGE_SIZE));
++ if (cmsg_space >= pagesize) {
+ errno = ENOMEM;
+ return -1;
+ }
+@@ -75,7 +76,8 @@
+ fds->clear();
+
+ size_t cmsg_space = CMSG_SPACE(sizeof(int) * max_fds);
+- if (cmsg_space >= PAGE_SIZE) {
++ size_t pagesize = static_cast<size_t>(sysconf(_SC_PAGE_SIZE));
++ if (cmsg_space >= pagesize) {
+ errno = ENOMEM;
+ return -1;
+ }
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/fix-gettid-exception-declaration.patch b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/fix-gettid-exception-declaration.patch
new file mode 100644
index 0000000000..5f24d0b4a0
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/fix-gettid-exception-declaration.patch
@@ -0,0 +1,26 @@
+Upstream-Status: Pending
+
+Description: get libcutils building
+Forwarded: not-needed
+--- a/system/core/libcutils/include/cutils/threads.h
++++ b/system/core/libcutils/include/cutils/threads.h
+@@ -33,7 +33,7 @@
+ // Deprecated: use android::base::GetThreadId instead, which doesn't truncate on Mac/Windows.
+ //
+
+-extern pid_t gettid();
++extern pid_t gettid(void) __THROW;
+
+ //
+ // Deprecated: use `_Thread_local` in C or `thread_local` in C++.
+--- a/system/core/libcutils/threads.cpp
++++ b/system/core/libcutils/threads.cpp
+@@ -33,7 +33,7 @@
+
+ // No definition needed for Android because we'll just pick up bionic's copy.
+ #ifndef __ANDROID__
+-pid_t gettid() {
++pid_t gettid(void) __THROW {
+ #if defined(__APPLE__)
+ uint64_t tid;
+ pthread_threadid_np(NULL, &tid);
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/fix-standard-namespace-errors.patch b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/fix-standard-namespace-errors.patch
new file mode 100644
index 0000000000..4380308f13
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/fix-standard-namespace-errors.patch
@@ -0,0 +1,24 @@
+Upstream-Status: Pending
+
+Description: Add missing 'std::' scope identifiers.
+Forwarded: not-needed
+--- a/system/core/libunwindstack/include/unwindstack/DwarfMemory.h
++++ b/system/core/libunwindstack/include/unwindstack/DwarfMemory.h
+@@ -29,7 +29,7 @@
+ DwarfMemory(Memory* memory) : memory_(memory) {}
+ virtual ~DwarfMemory() = default;
+
+- bool ReadBytes(void* dst, size_t num_bytes);
++ bool ReadBytes(void* dst, std::size_t num_bytes);
+
+ template <typename SignedType>
+ bool ReadSigned(uint64_t* value);
+@@ -39,7 +39,7 @@
+ bool ReadSLEB128(int64_t* value);
+
+ template <typename AddressType>
+- size_t GetEncodedSize(uint8_t encoding);
++ std::size_t GetEncodedSize(uint8_t encoding);
+
+ bool AdjustEncodedValue(uint8_t encoding, uint64_t* value);
+
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/hard-code-build-number.patch b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/hard-code-build-number.patch
new file mode 100644
index 0000000000..4d7323bd2f
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/hard-code-build-number.patch
@@ -0,0 +1,46 @@
+Upstream-Status: Pending
+
+Description: just hard code rather than deal with circular deps
+Forwarded: not-needed
+--- a/system/core/adb/adb.cpp
++++ b/system/core/adb/adb.cpp
+@@ -44,8 +44,6 @@
+ #include <android-base/parsenetaddress.h>
+ #include <android-base/stringprintf.h>
+ #include <android-base/strings.h>
+-#include <build/version.h>
+-#include <platform_tools_version.h>
+
+ #include "adb_auth.h"
+ #include "adb_io.h"
+@@ -69,7 +67,7 @@
+ "Version %s-%s\n"
+ "Installed as %s\n",
+ ADB_VERSION_MAJOR, ADB_VERSION_MINOR, ADB_SERVER_VERSION,
+- PLATFORM_TOOLS_VERSION, android::build::GetBuildNumber().c_str(),
++ PLATFORM_TOOLS_VERSION, "debian",
+ android::base::GetExecutablePath().c_str());
+ }
+
+--- a/system/core/fastboot/fastboot.cpp
++++ b/system/core/fastboot/fastboot.cpp
+@@ -59,10 +59,8 @@
+ #include <android-base/stringprintf.h>
+ #include <android-base/strings.h>
+ #include <android-base/unique_fd.h>
+-#include <build/version.h>
+ #include <libavb/libavb.h>
+ #include <liblp/liblp.h>
+-#include <platform_tools_version.h>
+ #include <sparse/sparse.h>
+ #include <ziparchive/zip_archive.h>
+
+@@ -1680,7 +1678,7 @@
+ setvbuf(stdout, nullptr, _IONBF, 0);
+ setvbuf(stderr, nullptr, _IONBF, 0);
+ } else if (name == "version") {
+- fprintf(stdout, "fastboot version %s-%s\n", PLATFORM_TOOLS_VERSION, android::build::GetBuildNumber().c_str());
++ fprintf(stdout, "fastboot version %s-%s\n", PLATFORM_TOOLS_VERSION, "debian");
+ fprintf(stdout, "Installed as %s\n", android::base::GetExecutablePath().c_str());
+ return 0;
+ #if !defined(_WIN32)
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/libusb-header-path.patch b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/libusb-header-path.patch
new file mode 100644
index 0000000000..122bd70e7d
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/libusb-header-path.patch
@@ -0,0 +1,18 @@
+Upstream-Status: Pending
+
+Description: libusb.h comes from different location
+Author: Umang Parmar <umangjparmar@gmail.com>
+Forwarded: not-needed
+Last-Update: 2018-05-26
+
+--- a/system/core/adb/client/usb_libusb.cpp
++++ b/system/core/adb/client/usb_libusb.cpp
+@@ -30,7 +30,7 @@
+ #include <thread>
+ #include <unordered_map>
+
+-#include <libusb/libusb.h>
++#include <libusb-1.0/libusb.h>
+
+ #include <android-base/file.h>
+ #include <android-base/logging.h>
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/move-log-file-to-proper-dir.patch b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/move-log-file-to-proper-dir.patch
new file mode 100644
index 0000000000..e8494ab433
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/move-log-file-to-proper-dir.patch
@@ -0,0 +1,20 @@
+Upstream-Status: Pending
+
+Description: Update log file directory.
+Author: Umang Parmar <umangjparmar@gmail.com>
+Last Updated: 2018-05-17
+
+--- a/system/core/adb/adb_utils.cpp
++++ b/system/core/adb/adb_utils.cpp
+@@ -339,6 +339,11 @@
+
+ return temp_path_utf8 + log_name;
+ #else
++ std::string log_dir = android::base::StringPrintf("/run/user/%u/adb.log", getuid());
++ struct stat st = {0};
++ if (stat(log_dir.c_str(), &st) == 0) {
++ return log_dir;
++ }
+ const char* tmp_dir = getenv("TMPDIR");
+ if (tmp_dir == nullptr) tmp_dir = "/tmp";
+ return android::base::StringPrintf("%s/adb.%u.log", tmp_dir, getuid());
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/simg_dump-python3.patch b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/simg_dump-python3.patch
new file mode 100644
index 0000000000..6664dc2aa5
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/simg_dump-python3.patch
@@ -0,0 +1,64 @@
+Description: Port simg_dump to Python 3.
+Author: Antonio Russo <antonio.e.russo@gmail.com>
+Forwarded: not-needed
+Last-Update: 2019-01-05
+Origin: https://bugs.debian.org/945646
+
+---
+Upstream-Status: Pending
+
+Index: android-platform-tools/system/core/libsparse/simg_dump.py
+===================================================================
+--- android-platform-tools.orig/system/core/libsparse/simg_dump.py
++++ android-platform-tools/system/core/libsparse/simg_dump.py
+@@ -1,4 +1,4 @@
+-#! /usr/bin/env python
++#! /usr/bin/env python3
+
+ # Copyright (C) 2012 The Android Open Source Project
+ #
+@@ -14,7 +14,7 @@
+ # See the License for the specific language governing permissions and
+ # limitations under the License.
+
+-from __future__ import print_function
++
+ import csv
+ import getopt
+ import hashlib
+@@ -47,7 +47,7 @@ def main():
+ opts, args = getopt.getopt(sys.argv[1:],
+ "vsc:",
+ ["verbose", "showhash", "csvfile"])
+- except getopt.GetoptError, e:
++ except getopt.GetoptError as e:
+ print(e)
+ usage(me)
+ for o, a in opts:
+@@ -66,7 +66,7 @@ def main():
+ usage(me)
+
+ if csvfilename:
+- csvfile = open(csvfilename, "wb")
++ csvfile = open(csvfilename, "w", newline='')
+ csvwriter = csv.writer(csvfile)
+
+ output = verbose or csvfilename or showhash
+@@ -121,7 +121,7 @@ def main():
+ "output offset", "output blocks", "type", "hash"])
+
+ offset = 0
+- for i in xrange(1, total_chunks + 1):
++ for i in range(1, total_chunks + 1):
+ header_bin = FH.read(12)
+ header = struct.unpack("<2H2I", header_bin)
+ chunk_type = header[0]
+@@ -160,7 +160,7 @@ def main():
+ if showhash:
+ h = hashlib.sha1()
+ data = fill_bin * (blk_sz / 4);
+- for block in xrange(chunk_sz):
++ for block in range(chunk_sz):
+ h.update(data)
+ curhash = h.hexdigest()
+ elif chunk_type == 0xCAC3:
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/stdatomic.patch b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/stdatomic.patch
new file mode 100644
index 0000000000..e11f3cc783
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/stdatomic.patch
@@ -0,0 +1,66 @@
+Upstream-Status: Pending
+
+Description: Fix incompatibility between <stdatomic.h> and <atomic>
+ This 2 headers combined will cause errors for both GCC and Clang. This patch
+ makes sure only one of them is present at any time.
+Bug: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=60932
+Bug: https://reviews.llvm.org/D45470
+--- a/system/core/libcutils/include/cutils/trace.h
++++ b/system/core/libcutils/include/cutils/trace.h
+@@ -18,7 +18,14 @@
+ #define _LIBS_CUTILS_TRACE_H
+
+ #include <inttypes.h>
++#ifdef __cplusplus
++#include <atomic>
++using std::atomic_bool;
++using std::atomic_load_explicit;
++using std::memory_order_acquire;
++#else
+ #include <stdatomic.h>
++#endif
+ #include <stdbool.h>
+ #include <stdint.h>
+ #include <stdio.h>
+--- a/system/core/libcutils/include/cutils/atomic.h
++++ b/system/core/libcutils/include/cutils/atomic.h
+@@ -19,7 +19,23 @@
+
+ #include <stdint.h>
+ #include <sys/types.h>
++#ifdef __cplusplus
++#include <atomic>
++using std::atomic_compare_exchange_strong_explicit;
++using std::atomic_fetch_add_explicit;
++using std::atomic_fetch_or_explicit;
++using std::atomic_fetch_sub_explicit;
++using std::atomic_int_least32_t;
++using std::atomic_load_explicit;
++using std::atomic_store_explicit;
++using std::atomic_thread_fence;
++using std::memory_order::memory_order_acquire;
++using std::memory_order::memory_order_relaxed;
++using std::memory_order::memory_order_release;
++using std::memory_order::memory_order_seq_cst;
++#else
+ #include <stdatomic.h>
++#endif
+
+ #ifndef ANDROID_ATOMIC_INLINE
+ #define ANDROID_ATOMIC_INLINE static inline
+--- a/system/core/liblog/logger.h
++++ b/system/core/liblog/logger.h
+@@ -16,7 +16,13 @@
+
+ #pragma once
+
++#ifdef __cplusplus
++#include <atomic>
++using std::atomic_int;
++using std::atomic_uintptr_t;
++#else
+ #include <stdatomic.h>
++#endif
+ #include <sys/cdefs.h>
+
+ #include <log/log.h>
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/stub-out-fastdeploy.patch b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/stub-out-fastdeploy.patch
new file mode 100644
index 0000000000..d86ef230f7
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/stub-out-fastdeploy.patch
@@ -0,0 +1,95 @@
+Upstream-Status: Pending
+
+Description: Defer packaging fastdeploy with adb for 29.x.x tags.
+Forwarded: not-needed
+--- a/system/core/adb/client/commandline.cpp
++++ b/system/core/adb/client/commandline.cpp
+@@ -59,7 +59,6 @@
+ #include "bugreport.h"
+ #include "client/file_sync_client.h"
+ #include "commandline.h"
+-#include "fastdeploy.h"
+ #include "services.h"
+ #include "shell_protocol.h"
+ #include "sysdeps/chrono.h"
+--- a/system/core/adb/client/adb_install.cpp
++++ b/system/core/adb/client/adb_install.cpp
+@@ -35,7 +35,6 @@
+ #include "adb_utils.h"
+ #include "client/file_sync_client.h"
+ #include "commandline.h"
+-#include "fastdeploy.h"
+
+ static constexpr int kFastDeployMinApi = 24;
+
+@@ -167,14 +166,6 @@
+ }
+
+ if (use_fastdeploy) {
+- auto metadata = extract_metadata(file);
+- if (metadata.has_value()) {
+- // pass all but 1st (command) and last (apk path) parameters through to pm for
+- // session creation
+- std::vector<const char*> pm_args{argv + 1, argv + argc - 1};
+- auto patchFd = install_patch(pm_args.size(), pm_args.data());
+- return stream_patch(file, std::move(metadata.value()), std::move(patchFd));
+- }
+ }
+
+ struct stat sb;
+@@ -267,16 +258,6 @@
+ argv[last_apk] = apk_dest.c_str(); /* destination name, not source location */
+
+ if (use_fastdeploy) {
+- auto metadata = extract_metadata(apk_file[0]);
+- if (metadata.has_value()) {
+- auto patchFd = apply_patch_on_device(apk_dest.c_str());
+- int status = stream_patch(apk_file[0], std::move(metadata.value()), std::move(patchFd));
+-
+- result = pm_command(argc, argv);
+- delete_device_file(apk_dest);
+-
+- return status;
+- }
+ }
+
+ if (do_sync_push(apk_file, apk_dest.c_str(), false)) {
+@@ -292,7 +273,6 @@
+ InstallMode installMode = INSTALL_DEFAULT;
+ bool use_fastdeploy = false;
+ bool is_reinstall = false;
+- FastDeploy_AgentUpdateStrategy agent_update_strategy = FastDeploy_AgentUpdateDifferentVersion;
+
+ for (int i = 1; i < argc; i++) {
+ if (!strcmp(argv[i], "--streaming")) {
+@@ -313,13 +293,10 @@
+ use_fastdeploy = false;
+ } else if (!strcmp(argv[i], "--force-agent")) {
+ processedArgIndicies.push_back(i);
+- agent_update_strategy = FastDeploy_AgentUpdateAlways;
+ } else if (!strcmp(argv[i], "--date-check-agent")) {
+ processedArgIndicies.push_back(i);
+- agent_update_strategy = FastDeploy_AgentUpdateNewerTimeStamp;
+ } else if (!strcmp(argv[i], "--version-check-agent")) {
+ processedArgIndicies.push_back(i);
+- agent_update_strategy = FastDeploy_AgentUpdateDifferentVersion;
+ }
+ }
+
+@@ -331,13 +308,11 @@
+ error_exit("Attempting to use streaming install on unsupported device");
+ }
+
+- if (use_fastdeploy && get_device_api_level() < kFastDeployMinApi) {
+- printf("Fast Deploy is only compatible with devices of API version %d or higher, "
+- "ignoring.\n",
+- kFastDeployMinApi);
++ if (use_fastdeploy) {
++ printf("Fast Deploy is unavailable in this build of adb, "
++ "ignoring.\n");
+ use_fastdeploy = false;
+ }
+- fastdeploy_set_agent_update_strategy(agent_update_strategy);
+
+ std::vector<const char*> passthrough_argv;
+ for (int i = 0; i < argc; i++) {
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/throw-exception-on-unknown-os.patch b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/throw-exception-on-unknown-os.patch
new file mode 100644
index 0000000000..70e732131f
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/system/core/throw-exception-on-unknown-os.patch
@@ -0,0 +1,18 @@
+Upstream-Status: Pending
+
+Description: Turn #error into exceptions
+ So the library can be built on non-Linux platforms too, although can't
+ guarauntee its functionality regarding that piece of code.
+Forwarded: not-needed
+--- a/system/core/base/file.cpp
++++ b/system/core/base/file.cpp
+@@ -422,7 +422,8 @@
+ path[PATH_MAX - 1] = 0;
+ return path;
+ #else
+-#error unknown OS
++#include <stdexcept>
++ throw std::runtime_error(std::string("Unknown OS!"));
+ #endif
+ }
+
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/throw-exception-on-unknown-os.patch b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/throw-exception-on-unknown-os.patch
new file mode 100644
index 0000000000..70e732131f
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/debian/throw-exception-on-unknown-os.patch
@@ -0,0 +1,18 @@
+Upstream-Status: Pending
+
+Description: Turn #error into exceptions
+ So the library can be built on non-Linux platforms too, although can't
+ guarauntee its functionality regarding that piece of code.
+Forwarded: not-needed
+--- a/system/core/base/file.cpp
++++ b/system/core/base/file.cpp
+@@ -422,7 +422,8 @@
+ path[PATH_MAX - 1] = 0;
+ return path;
+ #else
+-#error unknown OS
++#include <stdexcept>
++ throw std::runtime_error(std::string("Unknown OS!"));
+ #endif
+ }
+
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/remount b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/remount
new file mode 100644
index 0000000000..751c3501ef
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/remount
@@ -0,0 +1,2 @@
+#!/bin/sh
+mount -o remount,rw /
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/rules_yocto.mk b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/rules_yocto.mk
new file mode 100644
index 0000000000..2c808d3c1e
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools/rules_yocto.mk
@@ -0,0 +1 @@
+CPPFLAGS += -fPIC
diff --git a/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools_29.0.6.r14.bb b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools_29.0.6.r14.bb
new file mode 100644
index 0000000000..fbad5e1368
--- /dev/null
+++ b/meta-oe/dynamic-layers/selinux/recipes-devtool/android-tools/android-tools_29.0.6.r14.bb
@@ -0,0 +1,194 @@
+DESCRIPTION = "Various utilities from Android"
+SECTION = "console/utils"
+LICENSE = "Apache-2.0 & GPL-2.0-only & BSD-2-Clause & BSD-3-Clause"
+LIC_FILES_CHKSUM = " \
+ file://${COMMON_LICENSE_DIR}/Apache-2.0;md5=89aea4e17d99a7cacdbeed46a0096b10 \
+ file://${COMMON_LICENSE_DIR}/GPL-2.0-only;md5=801f80980d171dd6425610833a22dbe6 \
+ file://${COMMON_LICENSE_DIR}/BSD-2-Clause;md5=cb641bc04cda31daea161b1bc15da69f \
+ file://${COMMON_LICENSE_DIR}/BSD-3-Clause;md5=550794465ba0ec5312d6919e203a55f9 \
+"
+DEPENDS = "libbsd libpcre zlib libcap libusb squashfs-tools p7zip libselinux googletest"
+
+SRCREV_core = "abfd66fafcbb691d7860df059f1df1c9b1ef29da"
+
+SRC_URI = " \
+ git://salsa.debian.org/android-tools-team/android-platform-tools;name=core;protocol=https;branch=master \
+"
+
+# Patches copied from android-platform-tools/debian/patches
+# and applied in the order defined by the file debian/patches/series
+SRC_URI += " \
+ file://debian/external/libunwind/user_pt_regs.patch \
+ file://debian/external/libunwind/legacy_built-in_sync_functions.patch \
+ file://debian/external/libunwind/20150704-CVE-2015-3239_dwarf_i.h.patch \
+ \
+ file://debian/system/core/move-log-file-to-proper-dir.patch \
+ file://debian/system/core/Added-missing-headers.patch \
+ file://debian/system/core/libusb-header-path.patch \
+ file://debian/system/core/stdatomic.patch \
+ file://debian/system/core/Nonnull.patch \
+ file://debian/system/core/Vector-cast.patch \
+ file://debian/system/core/throw-exception-on-unknown-os.patch \
+ file://debian/system/core/simg_dump-python3.patch \
+ file://debian/system/core/fix-attribute-issue-with-gcc.patch \
+ file://debian/system/core/fix-gettid-exception-declaration.patch \
+ file://debian/system/core/fix-build-on-non-x86.patch \
+ file://debian/system/core/add-missing-headers.patch \
+ file://debian/system/core/hard-code-build-number.patch \
+ file://debian/system/core/stub-out-fastdeploy.patch \
+ file://debian/system/core/fix-standard-namespace-errors.patch \
+ file://debian/system/core/Add-riscv64-support.patch \
+ \
+"
+
+# patches which don't come from debian
+SRC_URI += " \
+ file://rules_yocto.mk;subdir=git \
+ file://android-tools-adbd.service \
+ file://adbd.mk;subdir=git/debian/system/core \
+ file://remount \
+ file://0001-Fixes-for-yocto-build.patch \
+ file://0002-android-tools-modifications-to-make-it-build-in-yoct.patch \
+ file://0003-Update-usage-of-usbdevfs_urb-to-match-new-kernel-UAP.patch \
+ file://0004-adb-Fix-build-on-big-endian-systems.patch \
+ file://0005-adb-Allow-adbd-to-be-run-as-root.patch \
+"
+
+S = "${WORKDIR}/git"
+B = "${WORKDIR}/${BPN}"
+
+# http://errors.yoctoproject.org/Errors/Details/1debian881/
+ARM_INSTRUCTION_SET:armv4 = "arm"
+ARM_INSTRUCTION_SET:armv5 = "arm"
+
+COMPATIBLE_HOST:powerpc = "(null)"
+COMPATIBLE_HOST:powerpc64 = "(null)"
+COMPATIBLE_HOST:powerpc64le = "(null)"
+
+inherit systemd
+
+SYSTEMD_SERVICE:${PN}-adbd = "android-tools-adbd.service"
+
+# Find libbsd headers during native builds
+CC:append:class-native = " -I${STAGING_INCDIR}"
+CC:append:class-nativesdk = " -I${STAGING_INCDIR}"
+
+PREREQUISITE_core = "liblog libbase libsparse liblog libcutils"
+TOOLS_TO_BUILD = "libcrypto_utils libadb libziparchive fastboot adb img2simg simg2img libbacktrace"
+TOOLS_TO_BUILD:append:class-target = " adbd"
+
+do_compile() {
+
+ case "${HOST_ARCH}" in
+ arm)
+ export android_arch=linux-arm
+ cpu=arm
+ deb_host_arch=arm
+ ;;
+ aarch64)
+ export android_arch=linux-arm64
+ cpu=arm64
+ deb_host_arch=arm64
+ ;;
+ riscv64)
+ export android_arch=linux-riscv64
+ ;;
+ mips|mipsel)
+ export android_arch=linux-mips
+ cpu=mips
+ deb_host_arch=mips
+ ;;
+ mips64|mips64el)
+ export android_arch=linux-mips64
+ cpu=mips64
+ deb_host_arch=mips64
+ ;;
+ powerpc|powerpc64)
+ export android_arch=linux-ppc
+ ;;
+ i586|i686|x86_64)
+ export android_arch=linux-x86
+ cpu=x86_64
+ deb_host_arch=amd64
+ ;;
+ esac
+
+ export SRCDIR=${S}
+
+ oe_runmake -f ${S}/debian/external/boringssl/libcrypto.mk -C ${S}
+ oe_runmake -f ${S}/debian/external/libunwind/libunwind.mk -C ${S} CPU=${cpu}
+
+ for tool in ${PREREQUISITE_core}; do
+ oe_runmake -f ${S}/debian/system/core/${tool}.mk -C ${S}
+ done
+
+ for i in `find ${S}/debian/system/extras/ -name "*.mk"`; do
+ oe_runmake -f $i -C ${S}
+ done
+
+ for tool in ${TOOLS_TO_BUILD}; do
+ if [ "$tool" = "libbacktrace" ]; then
+ oe_runmake -f ${S}/debian/system/core/${tool}.mk -C ${S} DEB_HOST_ARCH=${deb_host_arch}
+ else
+ oe_runmake -f ${S}/debian/system/core/${tool}.mk -C ${S}
+ fi
+ done
+
+}
+
+do_install() {
+ install -d ${D}${base_sbindir}
+ install -m 0755 ${S}/../remount -D ${D}${base_sbindir}/remount
+
+ for tool in img2simg simg2img fastboot adbd; do
+ if echo ${TOOLS_TO_BUILD} | grep -q "$tool" ; then
+ install -D -p -m0755 ${S}/debian/out/system/core/$tool ${D}${bindir}/$tool
+ fi
+ done
+
+ # grep adb also matches adbd, so handle adb separately from other tools
+ if echo ${TOOLS_TO_BUILD} | grep -q "adb " ; then
+ install -d ${D}${bindir}
+ install -m0755 ${S}/debian/out/system/core/adb ${D}${bindir}
+ fi
+
+ # Outside the if statement to avoid errors during do_package
+ install -D -p -m0644 ${WORKDIR}/android-tools-adbd.service \
+ ${D}${systemd_unitdir}/system/android-tools-adbd.service
+
+ install -d ${D}${libdir}/android/
+ install -m0755 ${S}/debian/out/system/core/*.so.* ${D}${libdir}/android/
+ if echo ${TOOLS_TO_BUILD} | grep -q "mkbootimg" ; then
+ install -d ${D}${bindir}
+ install -m0755 ${B}/mkbootimg/mkbootimg ${D}${bindir}
+ fi
+}
+
+PACKAGES =+ "${PN}-fstools ${PN}-adbd"
+
+RDEPENDS:${BPN} = "${BPN}-conf p7zip"
+
+FILES:${PN}-adbd = "\
+ ${bindir}/adbd \
+ ${systemd_unitdir}/system/android-tools-adbd.service \
+"
+
+FILES:${PN}-fstools = "\
+ ${bindir}/ext2simg \
+ ${bindir}/ext4fixup \
+ ${bindir}/img2simg \
+ ${bindir}/make_ext4fs \
+ ${bindir}/simg2img \
+ ${bindir}/simg2simg \
+ ${bindir}/simg_dump \
+ ${bindir}/mkuserimg \
+"
+FILES:${PN} += "${libdir}/android ${libdir}/android/*"
+
+BBCLASSEXTEND = "native"
+
+android_tools_enable_devmode() {
+ touch ${IMAGE_ROOTFS}/var/usb-debugging-enabled
+}
+
+ROOTFS_POSTPROCESS_COMMAND_${PN}-adbd += "${@bb.utils.contains("USB_DEBUGGING_ENABLED", "1", "android_tools_enable_devmode;", "", d)}"