summaryrefslogtreecommitdiffstats
path: root/scripts/contrib
diff options
context:
space:
mode:
Diffstat (limited to 'scripts/contrib')
-rwxr-xr-xscripts/contrib/build-perf-test-wrapper.sh15
-rwxr-xr-xscripts/contrib/convert-overrides.py144
-rwxr-xr-xscripts/contrib/convert-spdx-licenses.py145
-rwxr-xr-xscripts/contrib/convert-srcuri.py77
-rwxr-xr-xscripts/contrib/convert-variable-renames.py116
-rwxr-xr-xscripts/contrib/documentation-audit.sh4
-rwxr-xr-xscripts/contrib/oe-build-perf-report-email.py154
-rwxr-xr-xscripts/contrib/test_build_time.sh2
8 files changed, 493 insertions, 164 deletions
diff --git a/scripts/contrib/build-perf-test-wrapper.sh b/scripts/contrib/build-perf-test-wrapper.sh
index fa71d4a2e9..0a85e6e708 100755
--- a/scripts/contrib/build-perf-test-wrapper.sh
+++ b/scripts/contrib/build-perf-test-wrapper.sh
@@ -87,21 +87,10 @@ if [ $# -ne 0 ]; then
exit 1
fi
-if [ -n "$email_to" ]; then
- if ! [ -x "$(command -v phantomjs)" ]; then
- echo "ERROR: Sending email needs phantomjs."
- exit 1
- fi
- if ! [ -x "$(command -v optipng)" ]; then
- echo "ERROR: Sending email needs optipng."
- exit 1
- fi
-fi
-
# Open a file descriptor for flock and acquire lock
LOCK_FILE="/tmp/oe-build-perf-test-wrapper.lock"
if ! exec 3> "$LOCK_FILE"; then
- echo "ERROR: Unable to open lock file"
+ echo "ERROR: Unable to open loemack file"
exit 1
fi
if ! flock -n 3; then
@@ -226,7 +215,7 @@ if [ -n "$results_repo" ]; then
if [ -n "$email_to" ]; then
echo "Emailing test report"
os_name=`get_os_release_var PRETTY_NAME`
- "$script_dir"/oe-build-perf-report-email.py --to "$email_to" --subject "Build Perf Test Report for $os_name" --text $report_txt --html $report_html "${OE_BUILD_PERF_REPORT_EMAIL_EXTRA_ARGS[@]}"
+ "$script_dir"/oe-build-perf-report-email.py --to "$email_to" --subject "Build Perf Test Report for $os_name" --text $report_txt "${OE_BUILD_PERF_REPORT_EMAIL_EXTRA_ARGS[@]}"
fi
# Upload report files, unless we're on detached head
diff --git a/scripts/contrib/convert-overrides.py b/scripts/contrib/convert-overrides.py
new file mode 100755
index 0000000000..4d41a4c475
--- /dev/null
+++ b/scripts/contrib/convert-overrides.py
@@ -0,0 +1,144 @@
+#!/usr/bin/env python3
+#
+# Conversion script to add new override syntax to existing bitbake metadata
+#
+# Copyright (C) 2021 Richard Purdie
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+#
+# To use this script on a new layer you need to list the overrides the
+# layer is known to use in the list below.
+#
+# Known constraint: Matching is 'loose' and in particular will find variable
+# and function names with "_append" and "_remove" in them. Those need to be
+# filtered out manually or in the skip list below.
+#
+
+import re
+import os
+import sys
+import tempfile
+import shutil
+import mimetypes
+
+if len(sys.argv) < 2:
+ print("Please specify a directory to run the conversion script against.")
+ sys.exit(1)
+
+# List of strings to treat as overrides
+vars = ["append", "prepend", "remove"]
+vars = vars + ["qemuarm", "qemux86", "qemumips", "qemuppc", "qemuriscv", "qemuall"]
+vars = vars + ["genericx86", "edgerouter", "beaglebone-yocto"]
+vars = vars + ["armeb", "arm", "armv5", "armv6", "armv4", "powerpc64", "aarch64", "riscv32", "riscv64", "x86", "mips64", "powerpc"]
+vars = vars + ["mipsarch", "x86-x32", "mips16e", "microblaze", "e5500-64b", "mipsisa32", "mipsisa64"]
+vars = vars + ["class-native", "class-target", "class-cross-canadian", "class-cross", "class-devupstream"]
+vars = vars + ["tune-", "pn-", "forcevariable"]
+vars = vars + ["libc-musl", "libc-glibc", "libc-newlib","libc-baremetal"]
+vars = vars + ["task-configure", "task-compile", "task-install", "task-clean", "task-image-qa", "task-rm_work", "task-image-complete", "task-populate-sdk"]
+vars = vars + ["toolchain-clang", "mydistro", "nios2", "sdkmingw32", "overrideone", "overridetwo"]
+vars = vars + ["linux-gnux32", "linux-muslx32", "linux-gnun32", "mingw32", "poky", "darwin", "linuxstdbase"]
+vars = vars + ["linux-gnueabi", "eabi"]
+vars = vars + ["virtclass-multilib", "virtclass-mcextend"]
+
+# List of strings to treat as overrides but only with whitespace following or another override (more restricted matching).
+# Handles issues with arc matching arch.
+shortvars = ["arc", "mips", "mipsel", "sh4"]
+
+# Variables which take packagenames as an override
+packagevars = ["FILES", "RDEPENDS", "RRECOMMENDS", "SUMMARY", "DESCRIPTION", "RSUGGESTS", "RPROVIDES", "RCONFLICTS", "PKG", "ALLOW_EMPTY",
+ "pkg_postrm", "pkg_postinst_ontarget", "pkg_postinst", "INITSCRIPT_NAME", "INITSCRIPT_PARAMS", "DEBIAN_NOAUTONAME", "ALTERNATIVE",
+ "PKGE", "PKGV", "PKGR", "USERADD_PARAM", "GROUPADD_PARAM", "CONFFILES", "SYSTEMD_SERVICE", "LICENSE", "SECTION", "pkg_preinst",
+ "pkg_prerm", "RREPLACES", "GROUPMEMS_PARAM", "SYSTEMD_AUTO_ENABLE", "SKIP_FILEDEPS", "PRIVATE_LIBS", "PACKAGE_ADD_METADATA",
+ "INSANE_SKIP", "DEBIANNAME", "SYSTEMD_SERVICE_ESCAPED"]
+
+# Expressions to skip if encountered, these are not overrides
+skips = ["parser_append", "recipe_to_append", "extra_append", "to_remove", "show_appends", "applied_appends", "file_appends", "handle_remove"]
+skips = skips + ["expanded_removes", "color_remove", "test_remove", "empty_remove", "toaster_prepend", "num_removed", "licfiles_append", "_write_append"]
+skips = skips + ["no_report_remove", "test_prepend", "test_append", "multiple_append", "test_remove", "shallow_remove", "do_remove_layer", "first_append"]
+skips = skips + ["parser_remove", "to_append", "no_remove", "bblayers_add_remove", "bblayers_remove", "apply_append", "is_x86", "base_dep_prepend"]
+skips = skips + ["autotools_dep_prepend", "go_map_arm", "alt_remove_links", "systemd_append_file", "file_append", "process_file_darwin"]
+skips = skips + ["run_loaddata_poky", "determine_if_poky_env", "do_populate_poky_src", "libc_cv_include_x86_isa_level", "test_rpm_remove", "do_install_armmultilib"]
+skips = skips + ["get_appends_for_files", "test_doubleref_remove", "test_bitbakelayers_add_remove", "elf32_x86_64", "colour_remove", "revmap_remove"]
+skips = skips + ["test_rpm_remove", "test_bitbakelayers_add_remove", "recipe_append_file", "log_data_removed", "recipe_append", "systemd_machine_unit_append"]
+skips = skips + ["recipetool_append", "changetype_remove", "try_appendfile_wc", "test_qemux86_directdisk", "test_layer_appends", "tgz_removed"]
+
+imagevars = ["IMAGE_CMD", "EXTRA_IMAGECMD", "IMAGE_TYPEDEP", "CONVERSION_CMD", "COMPRESS_CMD"]
+packagevars = packagevars + imagevars
+
+vars_re = {}
+for exp in vars:
+ vars_re[exp] = (re.compile('((^|[#\'"\s\-\+])[A-Za-z0-9_\-:${}\.]+)_' + exp), r"\1:" + exp)
+
+shortvars_re = {}
+for exp in shortvars:
+ shortvars_re[exp] = (re.compile('((^|[#\'"\s\-\+])[A-Za-z0-9_\-:${}\.]+)_' + exp + '([\(\'"\s:])'), r"\1:" + exp + r"\3")
+
+package_re = {}
+for exp in packagevars:
+ package_re[exp] = (re.compile('(^|[#\'"\s\-\+]+)' + exp + '_' + '([$a-z"\'\s%\[<{\\\*].)'), r"\1" + exp + r":\2")
+
+# Other substitutions to make
+subs = {
+ 'r = re.compile("([^:]+):\s*(.*)")' : 'r = re.compile("(^.+?):\s+(.*)")',
+ "val = d.getVar('%s_%s' % (var, pkg))" : "val = d.getVar('%s:%s' % (var, pkg))",
+ "f.write('%s_%s: %s\\n' % (var, pkg, encode(val)))" : "f.write('%s:%s: %s\\n' % (var, pkg, encode(val)))",
+ "d.getVar('%s_%s' % (scriptlet_name, pkg))" : "d.getVar('%s:%s' % (scriptlet_name, pkg))",
+ 'ret.append(v + "_" + p)' : 'ret.append(v + ":" + p)',
+}
+
+def processfile(fn):
+ print("processing file '%s'" % fn)
+ try:
+ fh, abs_path = tempfile.mkstemp()
+ with os.fdopen(fh, 'w') as new_file:
+ with open(fn, "r") as old_file:
+ for line in old_file:
+ skip = False
+ for s in skips:
+ if s in line:
+ skip = True
+ if "ptest_append" in line or "ptest_remove" in line or "ptest_prepend" in line:
+ skip = False
+ for sub in subs:
+ if sub in line:
+ line = line.replace(sub, subs[sub])
+ skip = True
+ if not skip:
+ for pvar in packagevars:
+ line = package_re[pvar][0].sub(package_re[pvar][1], line)
+ for var in vars:
+ line = vars_re[var][0].sub(vars_re[var][1], line)
+ for shortvar in shortvars:
+ line = shortvars_re[shortvar][0].sub(shortvars_re[shortvar][1], line)
+ if "pkg_postinst:ontarget" in line:
+ line = line.replace("pkg_postinst:ontarget", "pkg_postinst_ontarget")
+ new_file.write(line)
+ shutil.copymode(fn, abs_path)
+ os.remove(fn)
+ shutil.move(abs_path, fn)
+ except UnicodeDecodeError:
+ pass
+
+ourname = os.path.basename(sys.argv[0])
+ourversion = "0.9.3"
+
+if os.path.isfile(sys.argv[1]):
+ processfile(sys.argv[1])
+ sys.exit(0)
+
+for targetdir in sys.argv[1:]:
+ print("processing directory '%s'" % targetdir)
+ for root, dirs, files in os.walk(targetdir):
+ for name in files:
+ if name == ourname:
+ continue
+ fn = os.path.join(root, name)
+ if os.path.islink(fn):
+ continue
+ if "/.git/" in fn or fn.endswith(".html") or fn.endswith(".patch") or fn.endswith(".m4") or fn.endswith(".diff"):
+ continue
+ processfile(fn)
+
+print("All files processed with version %s" % ourversion)
diff --git a/scripts/contrib/convert-spdx-licenses.py b/scripts/contrib/convert-spdx-licenses.py
new file mode 100755
index 0000000000..4e194dee3f
--- /dev/null
+++ b/scripts/contrib/convert-spdx-licenses.py
@@ -0,0 +1,145 @@
+#!/usr/bin/env python3
+#
+# Conversion script to change LICENSE entries to SPDX identifiers
+#
+# Copyright (C) 2021-2022 Richard Purdie
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import re
+import os
+import sys
+import tempfile
+import shutil
+import mimetypes
+
+if len(sys.argv) < 2:
+ print("Please specify a directory to run the conversion script against.")
+ sys.exit(1)
+
+license_map = {
+"AGPL-3" : "AGPL-3.0-only",
+"AGPL-3+" : "AGPL-3.0-or-later",
+"AGPLv3" : "AGPL-3.0-only",
+"AGPLv3+" : "AGPL-3.0-or-later",
+"AGPLv3.0" : "AGPL-3.0-only",
+"AGPLv3.0+" : "AGPL-3.0-or-later",
+"AGPL-3.0" : "AGPL-3.0-only",
+"AGPL-3.0+" : "AGPL-3.0-or-later",
+"BSD-0-Clause" : "0BSD",
+"GPL-1" : "GPL-1.0-only",
+"GPL-1+" : "GPL-1.0-or-later",
+"GPLv1" : "GPL-1.0-only",
+"GPLv1+" : "GPL-1.0-or-later",
+"GPLv1.0" : "GPL-1.0-only",
+"GPLv1.0+" : "GPL-1.0-or-later",
+"GPL-1.0" : "GPL-1.0-only",
+"GPL-1.0+" : "GPL-1.0-or-later",
+"GPL-2" : "GPL-2.0-only",
+"GPL-2+" : "GPL-2.0-or-later",
+"GPLv2" : "GPL-2.0-only",
+"GPLv2+" : "GPL-2.0-or-later",
+"GPLv2.0" : "GPL-2.0-only",
+"GPLv2.0+" : "GPL-2.0-or-later",
+"GPL-2.0" : "GPL-2.0-only",
+"GPL-2.0+" : "GPL-2.0-or-later",
+"GPL-3" : "GPL-3.0-only",
+"GPL-3+" : "GPL-3.0-or-later",
+"GPLv3" : "GPL-3.0-only",
+"GPLv3+" : "GPL-3.0-or-later",
+"GPLv3.0" : "GPL-3.0-only",
+"GPLv3.0+" : "GPL-3.0-or-later",
+"GPL-3.0" : "GPL-3.0-only",
+"GPL-3.0+" : "GPL-3.0-or-later",
+"LGPLv2" : "LGPL-2.0-only",
+"LGPLv2+" : "LGPL-2.0-or-later",
+"LGPLv2.0" : "LGPL-2.0-only",
+"LGPLv2.0+" : "LGPL-2.0-or-later",
+"LGPL-2.0" : "LGPL-2.0-only",
+"LGPL-2.0+" : "LGPL-2.0-or-later",
+"LGPL2.1" : "LGPL-2.1-only",
+"LGPL2.1+" : "LGPL-2.1-or-later",
+"LGPLv2.1" : "LGPL-2.1-only",
+"LGPLv2.1+" : "LGPL-2.1-or-later",
+"LGPL-2.1" : "LGPL-2.1-only",
+"LGPL-2.1+" : "LGPL-2.1-or-later",
+"LGPLv3" : "LGPL-3.0-only",
+"LGPLv3+" : "LGPL-3.0-or-later",
+"LGPL-3.0" : "LGPL-3.0-only",
+"LGPL-3.0+" : "LGPL-3.0-or-later",
+"MPL-1" : "MPL-1.0",
+"MPLv1" : "MPL-1.0",
+"MPLv1.1" : "MPL-1.1",
+"MPLv2" : "MPL-2.0",
+"MIT-X" : "MIT",
+"MIT-style" : "MIT",
+"openssl" : "OpenSSL",
+"PSF" : "PSF-2.0",
+"PSFv2" : "PSF-2.0",
+"Python-2" : "Python-2.0",
+"Apachev2" : "Apache-2.0",
+"Apache-2" : "Apache-2.0",
+"Artisticv1" : "Artistic-1.0",
+"Artistic-1" : "Artistic-1.0",
+"AFL-2" : "AFL-2.0",
+"AFL-1" : "AFL-1.2",
+"AFLv2" : "AFL-2.0",
+"AFLv1" : "AFL-1.2",
+"CDDLv1" : "CDDL-1.0",
+"CDDL-1" : "CDDL-1.0",
+"EPLv1.0" : "EPL-1.0",
+"FreeType" : "FTL",
+"Nauman" : "Naumen",
+"tcl" : "TCL",
+"vim" : "Vim",
+"SGIv1" : "SGI-1",
+}
+
+def processfile(fn):
+ print("processing file '%s'" % fn)
+ try:
+ fh, abs_path = tempfile.mkstemp()
+ modified = False
+ with os.fdopen(fh, 'w') as new_file:
+ with open(fn, "r") as old_file:
+ for line in old_file:
+ if not line.startswith("LICENSE"):
+ new_file.write(line)
+ continue
+ orig = line
+ for license in sorted(license_map, key=len, reverse=True):
+ for ending in ['"', "'", " ", ")"]:
+ line = line.replace(license + ending, license_map[license] + ending)
+ if orig != line:
+ modified = True
+ new_file.write(line)
+ new_file.close()
+ if modified:
+ shutil.copymode(fn, abs_path)
+ os.remove(fn)
+ shutil.move(abs_path, fn)
+ except UnicodeDecodeError:
+ pass
+
+ourname = os.path.basename(sys.argv[0])
+ourversion = "0.01"
+
+if os.path.isfile(sys.argv[1]):
+ processfile(sys.argv[1])
+ sys.exit(0)
+
+for targetdir in sys.argv[1:]:
+ print("processing directory '%s'" % targetdir)
+ for root, dirs, files in os.walk(targetdir):
+ for name in files:
+ if name == ourname:
+ continue
+ fn = os.path.join(root, name)
+ if os.path.islink(fn):
+ continue
+ if "/.git/" in fn or fn.endswith(".html") or fn.endswith(".patch") or fn.endswith(".m4") or fn.endswith(".diff") or fn.endswith(".orig"):
+ continue
+ processfile(fn)
+
+print("All files processed with version %s" % ourversion)
diff --git a/scripts/contrib/convert-srcuri.py b/scripts/contrib/convert-srcuri.py
new file mode 100755
index 0000000000..587392334f
--- /dev/null
+++ b/scripts/contrib/convert-srcuri.py
@@ -0,0 +1,77 @@
+#!/usr/bin/env python3
+#
+# Conversion script to update SRC_URI to add branch to git urls
+#
+# Copyright (C) 2021 Richard Purdie
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import re
+import os
+import sys
+import tempfile
+import shutil
+import mimetypes
+
+if len(sys.argv) < 2:
+ print("Please specify a directory to run the conversion script against.")
+ sys.exit(1)
+
+def processfile(fn):
+ def matchline(line):
+ if "MIRROR" in line or ".*" in line or "GNOME_GIT" in line:
+ return False
+ return True
+ print("processing file '%s'" % fn)
+ try:
+ if "distro_alias.inc" in fn or "linux-yocto-custom.bb" in fn:
+ return
+ fh, abs_path = tempfile.mkstemp()
+ modified = False
+ with os.fdopen(fh, 'w') as new_file:
+ with open(fn, "r") as old_file:
+ for line in old_file:
+ if ("git://" in line or "gitsm://" in line) and "branch=" not in line and matchline(line):
+ if line.endswith('"\n'):
+ line = line.replace('"\n', ';branch=master"\n')
+ elif re.search('\s*\\\\$', line):
+ line = re.sub('\s*\\\\$', ';branch=master \\\\', line)
+ modified = True
+ if ("git://" in line or "gitsm://" in line) and "github.com" in line and "protocol=https" not in line and matchline(line):
+ if "protocol=git" in line:
+ line = line.replace('protocol=git', 'protocol=https')
+ elif line.endswith('"\n'):
+ line = line.replace('"\n', ';protocol=https"\n')
+ elif re.search('\s*\\\\$', line):
+ line = re.sub('\s*\\\\$', ';protocol=https \\\\', line)
+ modified = True
+ new_file.write(line)
+ if modified:
+ shutil.copymode(fn, abs_path)
+ os.remove(fn)
+ shutil.move(abs_path, fn)
+ except UnicodeDecodeError:
+ pass
+
+ourname = os.path.basename(sys.argv[0])
+ourversion = "0.1"
+
+if os.path.isfile(sys.argv[1]):
+ processfile(sys.argv[1])
+ sys.exit(0)
+
+for targetdir in sys.argv[1:]:
+ print("processing directory '%s'" % targetdir)
+ for root, dirs, files in os.walk(targetdir):
+ for name in files:
+ if name == ourname:
+ continue
+ fn = os.path.join(root, name)
+ if os.path.islink(fn):
+ continue
+ if "/.git/" in fn or fn.endswith(".html") or fn.endswith(".patch") or fn.endswith(".m4") or fn.endswith(".diff"):
+ continue
+ processfile(fn)
+
+print("All files processed with version %s" % ourversion)
diff --git a/scripts/contrib/convert-variable-renames.py b/scripts/contrib/convert-variable-renames.py
new file mode 100755
index 0000000000..856c001e11
--- /dev/null
+++ b/scripts/contrib/convert-variable-renames.py
@@ -0,0 +1,116 @@
+#!/usr/bin/env python3
+#
+# Conversion script to rename variables to versions with improved terminology.
+# Also highlights potentially problematic langage and removed variables.
+#
+# Copyright (C) 2021 Richard Purdie
+# Copyright (C) 2022 Wind River Systems, Inc.
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import re
+import os
+import sys
+import tempfile
+import shutil
+import mimetypes
+
+if len(sys.argv) < 2:
+ print("Please specify a directory to run the conversion script against.")
+ sys.exit(1)
+
+renames = {
+"BB_ENV_WHITELIST" : "BB_ENV_PASSTHROUGH",
+"BB_ENV_EXTRAWHITE" : "BB_ENV_PASSTHROUGH_ADDITIONS",
+"BB_HASHCONFIG_WHITELIST" : "BB_HASHCONFIG_IGNORE_VARS",
+"BB_SETSCENE_ENFORCE_WHITELIST" : "BB_SETSCENE_ENFORCE_IGNORE_TASKS",
+"BB_HASHBASE_WHITELIST" : "BB_BASEHASH_IGNORE_VARS",
+"BB_HASHTASK_WHITELIST" : "BB_TASKHASH_IGNORE_TASKS",
+"CVE_CHECK_PN_WHITELIST" : "CVE_CHECK_SKIP_RECIPE",
+"CVE_CHECK_WHITELIST" : "CVE_CHECK_IGNORE",
+"MULTI_PROVIDER_WHITELIST" : "BB_MULTI_PROVIDER_ALLOWED",
+"PNBLACKLIST" : "SKIP_RECIPE",
+"SDK_LOCAL_CONF_BLACKLIST" : "ESDK_LOCALCONF_REMOVE",
+"SDK_LOCAL_CONF_WHITELIST" : "ESDK_LOCALCONF_ALLOW",
+"SDK_INHERIT_BLACKLIST" : "ESDK_CLASS_INHERIT_DISABLE",
+"SSTATE_DUPWHITELIST" : "SSTATE_ALLOW_OVERLAP_FILES",
+"SYSROOT_DIRS_BLACKLIST" : "SYSROOT_DIRS_IGNORE",
+"UNKNOWN_CONFIGURE_WHITELIST" : "UNKNOWN_CONFIGURE_OPT_IGNORE",
+"ICECC_USER_CLASS_BL" : "ICECC_CLASS_DISABLE",
+"ICECC_SYSTEM_CLASS_BL" : "ICECC_CLASS_DISABLE",
+"ICECC_USER_PACKAGE_WL" : "ICECC_RECIPE_ENABLE",
+"ICECC_USER_PACKAGE_BL" : "ICECC_RECIPE_DISABLE",
+"ICECC_SYSTEM_PACKAGE_BL" : "ICECC_RECIPE_DISABLE",
+"LICENSE_FLAGS_WHITELIST" : "LICENSE_FLAGS_ACCEPTED",
+}
+
+removed_list = [
+"BB_STAMP_WHITELIST",
+"BB_STAMP_POLICY",
+"INHERIT_BLACKLIST",
+"TUNEABI_WHITELIST",
+]
+
+context_check_list = [
+"blacklist",
+"whitelist",
+"abort",
+]
+
+def processfile(fn):
+
+ print("processing file '%s'" % fn)
+ try:
+ fh, abs_path = tempfile.mkstemp()
+ modified = False
+ with os.fdopen(fh, 'w') as new_file:
+ with open(fn, "r") as old_file:
+ lineno = 0
+ for line in old_file:
+ lineno += 1
+ if not line or "BB_RENAMED_VARIABLE" in line:
+ continue
+ # Do the renames
+ for old_name, new_name in renames.items():
+ if old_name in line:
+ line = line.replace(old_name, new_name)
+ modified = True
+ # Find removed names
+ for removed_name in removed_list:
+ if removed_name in line:
+ print("%s needs further work at line %s because %s has been deprecated" % (fn, lineno, removed_name))
+ for check_word in context_check_list:
+ if re.search(check_word, line, re.IGNORECASE):
+ print("%s needs further work at line %s since it contains %s"% (fn, lineno, check_word))
+ new_file.write(line)
+ new_file.close()
+ if modified:
+ print("*** Modified file '%s'" % (fn))
+ shutil.copymode(fn, abs_path)
+ os.remove(fn)
+ shutil.move(abs_path, fn)
+ except UnicodeDecodeError:
+ pass
+
+ourname = os.path.basename(sys.argv[0])
+ourversion = "0.1"
+
+if os.path.isfile(sys.argv[1]):
+ processfile(sys.argv[1])
+ sys.exit(0)
+
+for targetdir in sys.argv[1:]:
+ print("processing directory '%s'" % targetdir)
+ for root, dirs, files in os.walk(targetdir):
+ for name in files:
+ if name == ourname:
+ continue
+ fn = os.path.join(root, name)
+ if os.path.islink(fn):
+ continue
+ if "ChangeLog" in fn or "/.git/" in fn or fn.endswith(".html") or fn.endswith(".patch") or fn.endswith(".m4") or fn.endswith(".diff") or fn.endswith(".orig"):
+ continue
+ processfile(fn)
+
+print("All files processed with version %s" % ourversion)
diff --git a/scripts/contrib/documentation-audit.sh b/scripts/contrib/documentation-audit.sh
index f436f9bae0..36f7f3287c 100755
--- a/scripts/contrib/documentation-audit.sh
+++ b/scripts/contrib/documentation-audit.sh
@@ -26,8 +26,8 @@ if [ -z "$BITBAKE" ]; then
fi
echo "REMINDER: you need to build for MACHINE=qemux86 or you won't get useful results"
-echo "REMINDER: you need to set LICENSE_FLAGS_WHITELIST appropriately in local.conf or "
-echo " you'll get false positives. For example, LICENSE_FLAGS_WHITELIST = \"commercial\""
+echo "REMINDER: you need to set LICENSE_FLAGS_ACCEPTED appropriately in local.conf or "
+echo " you'll get false positives. For example, LICENSE_FLAGS_ACCEPTED = \"commercial\""
for pkg in `bitbake -s | awk '{ print \$1 }'`; do
if [[ "$pkg" == "Loading" || "$pkg" == "Loaded" ||
diff --git a/scripts/contrib/oe-build-perf-report-email.py b/scripts/contrib/oe-build-perf-report-email.py
index de3862c897..c900720f6e 100755
--- a/scripts/contrib/oe-build-perf-report-email.py
+++ b/scripts/contrib/oe-build-perf-report-email.py
@@ -19,8 +19,6 @@ import socket
import subprocess
import sys
import tempfile
-from email.mime.image import MIMEImage
-from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
@@ -29,19 +27,6 @@ logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s")
log = logging.getLogger('oe-build-perf-report')
-# Find js scaper script
-SCRAPE_JS = os.path.join(os.path.dirname(__file__), '..', 'lib', 'build_perf',
- 'scrape-html-report.js')
-if not os.path.isfile(SCRAPE_JS):
- log.error("Unableto find oe-build-perf-report-scrape.js")
- sys.exit(1)
-
-
-class ReportError(Exception):
- """Local errors"""
- pass
-
-
def check_utils():
"""Check that all needed utils are installed in the system"""
missing = []
@@ -77,137 +62,19 @@ def parse_args(argv):
"the email parts")
parser.add_argument('--text',
help="Plain text message")
- parser.add_argument('--html',
- help="HTML peport generated by oe-build-perf-report")
- parser.add_argument('--phantomjs-args', action='append',
- help="Extra command line arguments passed to PhantomJS")
args = parser.parse_args(argv)
- if not args.html and not args.text:
- parser.error("Please specify --html and/or --text")
+ if not args.text:
+ parser.error("Please specify --text")
return args
-def decode_png(infile, outfile):
- """Parse/decode/optimize png data from a html element"""
- with open(infile) as f:
- raw_data = f.read()
-
- # Grab raw base64 data
- b64_data = re.sub('^.*href="data:image/png;base64,', '', raw_data, 1)
- b64_data = re.sub('">.+$', '', b64_data, 1)
-
- # Replace file with proper decoded png
- with open(outfile, 'wb') as f:
- f.write(base64.b64decode(b64_data))
-
- subprocess.check_output(['optipng', outfile], stderr=subprocess.STDOUT)
-
-
-def mangle_html_report(infile, outfile, pngs):
- """Mangle html file into a email compatible format"""
- paste = True
- png_dir = os.path.dirname(outfile)
- with open(infile) as f_in:
- with open(outfile, 'w') as f_out:
- for line in f_in.readlines():
- stripped = line.strip()
- # Strip out scripts
- if stripped == '<!--START-OF-SCRIPTS-->':
- paste = False
- elif stripped == '<!--END-OF-SCRIPTS-->':
- paste = True
- elif paste:
- if re.match('^.+href="data:image/png;base64', stripped):
- # Strip out encoded pngs (as they're huge in size)
- continue
- elif 'www.gstatic.com' in stripped:
- # HACK: drop references to external static pages
- continue
-
- # Replace charts with <img> elements
- match = re.match('<div id="(?P<id>\w+)"', stripped)
- if match and match.group('id') in pngs:
- f_out.write('<img src="cid:{}"\n'.format(match.group('id')))
- else:
- f_out.write(line)
-
-
-def scrape_html_report(report, outdir, phantomjs_extra_args=None):
- """Scrape html report into a format sendable by email"""
- tmpdir = tempfile.mkdtemp(dir='.')
- log.debug("Using tmpdir %s for phantomjs output", tmpdir)
-
- if not os.path.isdir(outdir):
- os.mkdir(outdir)
- if os.path.splitext(report)[1] not in ('.html', '.htm'):
- raise ReportError("Invalid file extension for report, needs to be "
- "'.html' or '.htm'")
-
- try:
- log.info("Scraping HTML report with PhangomJS")
- extra_args = phantomjs_extra_args if phantomjs_extra_args else []
- subprocess.check_output(['phantomjs', '--debug=true'] + extra_args +
- [SCRAPE_JS, report, tmpdir],
- stderr=subprocess.STDOUT)
-
- pngs = []
- images = []
- for fname in os.listdir(tmpdir):
- base, ext = os.path.splitext(fname)
- if ext == '.png':
- log.debug("Decoding %s", fname)
- decode_png(os.path.join(tmpdir, fname),
- os.path.join(outdir, fname))
- pngs.append(base)
- images.append(fname)
- elif ext in ('.html', '.htm'):
- report_file = fname
- else:
- log.warning("Unknown file extension: '%s'", ext)
- #shutil.move(os.path.join(tmpdir, fname), outdir)
-
- log.debug("Mangling html report file %s", report_file)
- mangle_html_report(os.path.join(tmpdir, report_file),
- os.path.join(outdir, report_file), pngs)
- return (os.path.join(outdir, report_file),
- [os.path.join(outdir, i) for i in images])
- finally:
- shutil.rmtree(tmpdir)
-
-def send_email(text_fn, html_fn, image_fns, subject, recipients, copy=[],
- blind_copy=[]):
- """Send email"""
+def send_email(text_fn, subject, recipients, copy=[], blind_copy=[]):
# Generate email message
- text_msg = html_msg = None
- if text_fn:
- with open(text_fn) as f:
- text_msg = MIMEText("Yocto build performance test report.\n" +
- f.read(), 'plain')
- if html_fn:
- html_msg = msg = MIMEMultipart('related')
- with open(html_fn) as f:
- html_msg.attach(MIMEText(f.read(), 'html'))
- for img_fn in image_fns:
- # Expect that content id is same as the filename
- cid = os.path.splitext(os.path.basename(img_fn))[0]
- with open(img_fn, 'rb') as f:
- image_msg = MIMEImage(f.read())
- image_msg['Content-ID'] = '<{}>'.format(cid)
- html_msg.attach(image_msg)
-
- if text_msg and html_msg:
- msg = MIMEMultipart('alternative')
- msg.attach(text_msg)
- msg.attach(html_msg)
- elif text_msg:
- msg = text_msg
- elif html_msg:
- msg = html_msg
- else:
- raise ReportError("Neither plain text nor html body specified")
+ with open(text_fn) as f:
+ msg = MIMEText("Yocto build performance test report.\n" + f.read(), 'plain')
pw_data = pwd.getpwuid(os.getuid())
full_name = pw_data.pw_gecos.split(',')[0]
@@ -245,25 +112,16 @@ def main(argv=None):
try:
log.debug("Storing email parts in %s", outdir)
- html_report = images = None
- if args.html:
- html_report, images = scrape_html_report(args.html, outdir,
- args.phantomjs_args)
-
if args.to:
log.info("Sending email to %s", ', '.join(args.to))
if args.cc:
log.info("Copying to %s", ', '.join(args.cc))
if args.bcc:
log.info("Blind copying to %s", ', '.join(args.bcc))
- send_email(args.text, html_report, images, args.subject,
- args.to, args.cc, args.bcc)
+ send_email(args.text, args.subject, args.to, args.cc, args.bcc)
except subprocess.CalledProcessError as err:
log.error("%s, with output:\n%s", str(err), err.output.decode())
return 1
- except ReportError as err:
- log.error(err)
- return 1
finally:
if not args.outdir:
log.debug("Wiping %s", outdir)
diff --git a/scripts/contrib/test_build_time.sh b/scripts/contrib/test_build_time.sh
index 23f238adf6..4012ac7ba7 100755
--- a/scripts/contrib/test_build_time.sh
+++ b/scripts/contrib/test_build_time.sh
@@ -97,7 +97,7 @@ if [ $? != 0 ] ; then
exit 251
fi
-if [ "$BB_ENV_EXTRAWHITE" != "" ] ; then
+if [ "BB_ENV_PASSTHROUGH_ADDITIONS" != "" ] ; then
echo "WARNING: you are running after sourcing the build environment script, this is not recommended"
fi