diff options
Diffstat (limited to 'scripts/contrib')
-rwxr-xr-x | scripts/contrib/bb-perf/bb-matrix-plot.sh | 4 | ||||
-rwxr-xr-x | scripts/contrib/bb-perf/buildstats-plot.sh | 78 | ||||
-rwxr-xr-x | scripts/contrib/bb-perf/buildstats.sh | 139 | ||||
-rwxr-xr-x | scripts/contrib/bbvars.py | 6 | ||||
-rwxr-xr-x | scripts/contrib/build-perf-test-wrapper.sh | 15 | ||||
-rwxr-xr-x | scripts/contrib/convert-overrides.py | 155 | ||||
-rwxr-xr-x | scripts/contrib/convert-spdx-licenses.py | 145 | ||||
-rwxr-xr-x | scripts/contrib/convert-srcuri.py | 77 | ||||
-rwxr-xr-x | scripts/contrib/convert-variable-renames.py | 116 | ||||
-rwxr-xr-x | scripts/contrib/ddimage | 2 | ||||
-rwxr-xr-x | scripts/contrib/dialog-power-control | 2 | ||||
-rwxr-xr-x | scripts/contrib/documentation-audit.sh | 6 | ||||
-rwxr-xr-x | scripts/contrib/graph-tool | 100 | ||||
-rwxr-xr-x | scripts/contrib/image-manifest | 523 | ||||
-rwxr-xr-x | scripts/contrib/list-packageconfig-flags.py | 2 | ||||
-rwxr-xr-x | scripts/contrib/oe-build-perf-report-email.py | 167 | ||||
-rwxr-xr-x | scripts/contrib/patchreview.py | 71 | ||||
-rwxr-xr-x | scripts/contrib/test_build_time.sh | 2 | ||||
-rwxr-xr-x | scripts/contrib/test_build_time_worker.sh | 2 | ||||
-rwxr-xr-x | scripts/contrib/verify-homepage.py | 2 |
20 files changed, 1296 insertions, 318 deletions
diff --git a/scripts/contrib/bb-perf/bb-matrix-plot.sh b/scripts/contrib/bb-perf/bb-matrix-plot.sh index e7bd129e9e..6672189c95 100755 --- a/scripts/contrib/bb-perf/bb-matrix-plot.sh +++ b/scripts/contrib/bb-perf/bb-matrix-plot.sh @@ -16,8 +16,8 @@ # Setup the defaults DATFILE="bb-matrix.dat" -XLABEL="BB_NUMBER_THREADS" -YLABEL="PARALLEL_MAKE" +XLABEL="BB\\\\_NUMBER\\\\_THREADS" +YLABEL="PARALLEL\\\\_MAKE" FIELD=3 DEF_TITLE="Elapsed Time (seconds)" PM3D_FRAGMENT="unset surface; set pm3d at s hidden3d 100" diff --git a/scripts/contrib/bb-perf/buildstats-plot.sh b/scripts/contrib/bb-perf/buildstats-plot.sh index 898834e5ac..45c27d0b97 100755 --- a/scripts/contrib/bb-perf/buildstats-plot.sh +++ b/scripts/contrib/bb-perf/buildstats-plot.sh @@ -39,7 +39,10 @@ set -o errexit BS_DIR="tmp/buildstats" N=10 +RECIPE="" +TASKS="compile:configure:fetch:install:patch:populate_lic:populate_sysroot:unpack" STATS="utime" +ACCUMULATE="" SUM="" OUTDATA_FILE="$PWD/buildstats-plot.out" @@ -51,11 +54,15 @@ Usage: $CMD [-b buildstats_dir] [-t do_task] (default: "$BS_DIR") -n N Top N recipes to display. Ignored if -S is present (default: "$N") + -r recipe The recipe mask to be searched + -t tasks The tasks to be computed + (default: "$TASKS") -s stats The stats to be matched. If more that one stat, units should be the same because data is plot as histogram. (see buildstats.sh -h for all options) or any other defined (build)stat separated by colons, i.e. stime:utime (default: "$STATS") + -a Accumulate all stats values for found recipes -S Sum values for a particular stat for found recipes -o Output data file. (default: "$OUTDATA_FILE") @@ -64,32 +71,41 @@ EOM } # Parse and validate arguments -while getopts "b:n:s:o:Sh" OPT; do - case $OPT in - b) - BS_DIR="$OPTARG" - ;; - n) - N="$OPTARG" - ;; - s) - STATS="$OPTARG" - ;; - S) - SUM="y" - ;; - o) - OUTDATA_FILE="$OPTARG" - ;; - h) - usage - exit 0 - ;; - *) - usage - exit 1 - ;; - esac +while getopts "b:n:r:t:s:o:aSh" OPT; do + case $OPT in + b) + BS_DIR="$OPTARG" + ;; + n) + N="$OPTARG" + ;; + r) + RECIPE="-r $OPTARG" + ;; + t) + TASKS="$OPTARG" + ;; + s) + STATS="$OPTARG" + ;; + a) + ACCUMULATE="-a" + ;; + S) + SUM="y" + ;; + o) + OUTDATA_FILE="$OPTARG" + ;; + h) + usage + exit 0 + ;; + *) + usage + exit 1 + ;; + esac done # Get number of stats @@ -101,10 +117,10 @@ CD=$(dirname $0) # Parse buildstats recipes to produce a single table OUTBUILDSTATS="$PWD/buildstats.log" -$CD/buildstats.sh -H -s "$STATS" -H > $OUTBUILDSTATS +$CD/buildstats.sh -b "$BS_DIR" -s "$STATS" -t "$TASKS" $RECIPE $ACCUMULATE -H > $OUTBUILDSTATS # Get headers -HEADERS=$(cat $OUTBUILDSTATS | sed -n -e '1s/ /-/g' -e '1s/:/ /gp') +HEADERS=$(cat $OUTBUILDSTATS | sed -n -e 's/\(.*\)/"\1"/' -e '1s/ /\\\\\\\\ /g' -e 's/_/\\\\\\\\_/g' -e '1s/:/" "/gp') echo -e "set boxwidth 0.9 relative" echo -e "set style data histograms" @@ -113,7 +129,7 @@ echo -e "set xtics rotate by 45 right" # Get output data if [ -z "$SUM" ]; then - cat $OUTBUILDSTATS | sed -e '1d' | sort -k3 -n -r | head -$N > $OUTDATA_FILE + cat $OUTBUILDSTATS | sed -e '1d' -e 's/_/\\\\_/g' | sort -k3 -n -r | head -$N > $OUTDATA_FILE # include task at recipe column sed -i -e "1i\ ${HEADERS}" $OUTDATA_FILE @@ -125,8 +141,8 @@ else declare -a sumargs j=0 for i in `seq $nstats`; do - sumargs[j]=sum; j=$(( $j + 1 )) - sumargs[j]=`expr 3 + $i - 1`; j=$(( $j + 1 )) + sumargs[j]=sum; j=$(( $j + 1 )) + sumargs[j]=`expr 3 + $i - 1`; j=$(( $j + 1 )) done # Do the processing with datamash diff --git a/scripts/contrib/bb-perf/buildstats.sh b/scripts/contrib/bb-perf/buildstats.sh index e9ec2d476a..e45cfc146d 100755 --- a/scripts/contrib/bb-perf/buildstats.sh +++ b/scripts/contrib/bb-perf/buildstats.sh @@ -36,8 +36,10 @@ Child rusage ru_majflt:Child rusage ru_inblock:Child rusage ru_oublock:Child rus Child rusage ru_nivcsw" BS_DIR="tmp/buildstats" +RECIPE="" TASKS="compile:configure:fetch:install:patch:populate_lic:populate_sysroot:unpack" STATS="$TIME" +ACCUMULATE="" HEADER="" # No header by default function usage { @@ -46,6 +48,7 @@ cat <<EOM Usage: $CMD [-b buildstats_dir] [-t do_task] -b buildstats The path where the folder resides (default: "$BS_DIR") + -r recipe The recipe to be computed -t tasks The tasks to be computed (default: "$TASKS") -s stats The stats to be matched. Options: TIME, IO, RUSAGE, CHILD_RUSAGE @@ -56,87 +59,109 @@ Usage: $CMD [-b buildstats_dir] [-t do_task] IO=$IO RUSAGE=$RUSAGE CHILD_RUSAGE=$CHILD_RUSAGE + -a Accumulate all stats values for found recipes -h Display this help message EOM } # Parse and validate arguments -while getopts "b:t:s:Hh" OPT; do - case $OPT in - b) - BS_DIR="$OPTARG" - ;; - t) - TASKS="$OPTARG" - ;; - s) - STATS="$OPTARG" - ;; - H) - HEADER="y" - ;; - h) - usage - exit 0 - ;; - *) - usage - exit 1 - ;; - esac +while getopts "b:r:t:s:aHh" OPT; do + case $OPT in + b) + BS_DIR="$OPTARG" + ;; + r) + RECIPE="$OPTARG" + ;; + t) + TASKS="$OPTARG" + ;; + s) + STATS="$OPTARG" + ;; + a) + ACCUMULATE="y" + ;; + H) + HEADER="y" + ;; + h) + usage + exit 0 + ;; + *) + usage + exit 1 + ;; + esac done # Ensure the buildstats folder exists if [ ! -d "$BS_DIR" ]; then - echo "ERROR: $BS_DIR does not exist" - usage - exit 1 + echo "ERROR: $BS_DIR does not exist" + usage + exit 1 fi stats="" IFS=":" for stat in ${STATS}; do - case $stat in - TIME) - stats="${stats}:${TIME}" - ;; - IO) - stats="${stats}:${IO}" - ;; - RUSAGE) - stats="${stats}:${RUSAGE}" - ;; - CHILD_RUSAGE) - stats="${stats}:${CHILD_RUSAGE}" - ;; - *) - stats="${STATS}" - esac + case $stat in + TIME) + stats="${stats}:${TIME}" + ;; + IO) + stats="${stats}:${IO}" + ;; + RUSAGE) + stats="${stats}:${RUSAGE}" + ;; + CHILD_RUSAGE) + stats="${stats}:${CHILD_RUSAGE}" + ;; + *) + stats="${STATS}" + ;; + esac done # remove possible colon at the beginning stats="$(echo "$stats" | sed -e 's/^://1')" # Provide a header if required by the user -[ -n "$HEADER" ] && { echo "task:recipe:$stats"; } +if [ -n "$HEADER" ] ; then + if [ -n "$ACCUMULATE" ]; then + echo "task:recipe:accumulated(${stats//:/;})" + else + echo "task:recipe:$stats" + fi +fi for task in ${TASKS}; do task="do_${task}" - for file in $(find ${BS_DIR} -type f -name ${task} | awk 'BEGIN{ ORS=""; OFS=":" } { print $0,"" }'); do + for file in $(find ${BS_DIR} -type f -path *${RECIPE}*/${task} | awk 'BEGIN{ ORS=""; OFS=":" } { print $0,"" }'); do recipe="$(basename $(dirname $file))" - times="" - for stat in ${stats}; do - [ -z "$stat" ] && { echo "empty stats"; } - time=$(sed -n -e "s/^\($stat\): \\(.*\\)/\\2/p" $file) - # in case the stat is not present, set the value as NA - [ -z "$time" ] && { time="NA"; } - # Append it to times - if [ -z "$times" ]; then - times="${time}" - else - times="${times} ${time}" - fi - done + times="" + for stat in ${stats}; do + [ -z "$stat" ] && { echo "empty stats"; } + time=$(sed -n -e "s/^\($stat\): \\(.*\\)/\\2/p" $file) + # in case the stat is not present, set the value as NA + [ -z "$time" ] && { time="NA"; } + # Append it to times + if [ -z "$times" ]; then + times="${time}" + else + times="${times} ${time}" + fi + done + if [ -n "$ACCUMULATE" ]; then + IFS=' '; valuesarray=(${times}); IFS=':' + times=0 + for value in "${valuesarray[@]}"; do + [ "$value" == "NA" ] && { echo "ERROR: stat is not present."; usage; exit 1; } + times=$(( $times + $value )) + done + fi echo "${task} ${recipe} ${times}" done done diff --git a/scripts/contrib/bbvars.py b/scripts/contrib/bbvars.py index 090133600b..a9cdf082ab 100755 --- a/scripts/contrib/bbvars.py +++ b/scripts/contrib/bbvars.py @@ -36,8 +36,8 @@ def bbvar_is_documented(var, documented_vars): def collect_documented_vars(docfiles): ''' Walk the docfiles and collect the documented variables ''' documented_vars = [] - prog = re.compile(".*($|[^A-Z_])<glossentry id=\'var-") - var_prog = re.compile('<glossentry id=\'var-(.*)\'>') + prog = re.compile(r".*($|[^A-Z_])<glossentry id=\'var-") + var_prog = re.compile(r'<glossentry id=\'var-(.*)\'>') for d in docfiles: with open(d) as f: documented_vars += var_prog.findall(f.read()) @@ -45,7 +45,7 @@ def collect_documented_vars(docfiles): return documented_vars def bbvar_doctag(var, docconf): - prog = re.compile('^%s\[doc\] *= *"(.*)"' % (var)) + prog = re.compile(r'^%s\[doc\] *= *"(.*)"' % (var)) if docconf == "": return "?" diff --git a/scripts/contrib/build-perf-test-wrapper.sh b/scripts/contrib/build-perf-test-wrapper.sh index fa71d4a2e9..0a85e6e708 100755 --- a/scripts/contrib/build-perf-test-wrapper.sh +++ b/scripts/contrib/build-perf-test-wrapper.sh @@ -87,21 +87,10 @@ if [ $# -ne 0 ]; then exit 1 fi -if [ -n "$email_to" ]; then - if ! [ -x "$(command -v phantomjs)" ]; then - echo "ERROR: Sending email needs phantomjs." - exit 1 - fi - if ! [ -x "$(command -v optipng)" ]; then - echo "ERROR: Sending email needs optipng." - exit 1 - fi -fi - # Open a file descriptor for flock and acquire lock LOCK_FILE="/tmp/oe-build-perf-test-wrapper.lock" if ! exec 3> "$LOCK_FILE"; then - echo "ERROR: Unable to open lock file" + echo "ERROR: Unable to open loemack file" exit 1 fi if ! flock -n 3; then @@ -226,7 +215,7 @@ if [ -n "$results_repo" ]; then if [ -n "$email_to" ]; then echo "Emailing test report" os_name=`get_os_release_var PRETTY_NAME` - "$script_dir"/oe-build-perf-report-email.py --to "$email_to" --subject "Build Perf Test Report for $os_name" --text $report_txt --html $report_html "${OE_BUILD_PERF_REPORT_EMAIL_EXTRA_ARGS[@]}" + "$script_dir"/oe-build-perf-report-email.py --to "$email_to" --subject "Build Perf Test Report for $os_name" --text $report_txt "${OE_BUILD_PERF_REPORT_EMAIL_EXTRA_ARGS[@]}" fi # Upload report files, unless we're on detached head diff --git a/scripts/contrib/convert-overrides.py b/scripts/contrib/convert-overrides.py new file mode 100755 index 0000000000..c69acb4095 --- /dev/null +++ b/scripts/contrib/convert-overrides.py @@ -0,0 +1,155 @@ +#!/usr/bin/env python3 +# +# Conversion script to add new override syntax to existing bitbake metadata +# +# Copyright (C) 2021 Richard Purdie +# +# SPDX-License-Identifier: GPL-2.0-only +# + +# +# To use this script on a new layer you need to list the overrides the +# layer is known to use in the list below. +# +# Known constraint: Matching is 'loose' and in particular will find variable +# and function names with "_append" and "_remove" in them. Those need to be +# filtered out manually or in the skip list below. +# + +import re +import os +import sys +import tempfile +import shutil +import mimetypes +import argparse + +parser = argparse.ArgumentParser(description="Convert override syntax") +parser.add_argument("--override", "-o", action="append", default=[], help="Add additional strings to consider as an override (e.g. custom machines/distros") +parser.add_argument("--skip", "-s", action="append", default=[], help="Add additional string to skip and not consider an override") +parser.add_argument("--skip-ext", "-e", action="append", default=[], help="Additional file suffixes to skip when processing (e.g. '.foo')") +parser.add_argument("--package-vars", action="append", default=[], help="Additional variables to treat as package variables") +parser.add_argument("--image-vars", action="append", default=[], help="Additional variables to treat as image variables") +parser.add_argument("--short-override", action="append", default=[], help="Additional strings to treat as short overrides") +parser.add_argument("path", nargs="+", help="Paths to convert") + +args = parser.parse_args() + +# List of strings to treat as overrides +vars = args.override +vars += ["append", "prepend", "remove"] +vars += ["qemuarm", "qemux86", "qemumips", "qemuppc", "qemuriscv", "qemuall"] +vars += ["genericx86", "edgerouter", "beaglebone-yocto"] +vars += ["armeb", "arm", "armv5", "armv6", "armv4", "powerpc64", "aarch64", "riscv32", "riscv64", "x86", "mips64", "powerpc"] +vars += ["mipsarch", "x86-x32", "mips16e", "microblaze", "e5500-64b", "mipsisa32", "mipsisa64"] +vars += ["class-native", "class-target", "class-cross-canadian", "class-cross", "class-devupstream"] +vars += ["tune-", "pn-", "forcevariable"] +vars += ["libc-musl", "libc-glibc", "libc-newlib","libc-baremetal"] +vars += ["task-configure", "task-compile", "task-install", "task-clean", "task-image-qa", "task-rm_work", "task-image-complete", "task-populate-sdk"] +vars += ["toolchain-clang", "mydistro", "nios2", "sdkmingw32", "overrideone", "overridetwo"] +vars += ["linux-gnux32", "linux-muslx32", "linux-gnun32", "mingw32", "poky", "darwin", "linuxstdbase"] +vars += ["linux-gnueabi", "eabi"] +vars += ["virtclass-multilib", "virtclass-mcextend"] + +# List of strings to treat as overrides but only with whitespace following or another override (more restricted matching). +# Handles issues with arc matching arch. +shortvars = ["arc", "mips", "mipsel", "sh4"] + args.short_override + +# Variables which take packagenames as an override +packagevars = ["FILES", "RDEPENDS", "RRECOMMENDS", "SUMMARY", "DESCRIPTION", "RSUGGESTS", "RPROVIDES", "RCONFLICTS", "PKG", "ALLOW_EMPTY", + "pkg_postrm", "pkg_postinst_ontarget", "pkg_postinst", "INITSCRIPT_NAME", "INITSCRIPT_PARAMS", "DEBIAN_NOAUTONAME", "ALTERNATIVE", + "PKGE", "PKGV", "PKGR", "USERADD_PARAM", "GROUPADD_PARAM", "CONFFILES", "SYSTEMD_SERVICE", "LICENSE", "SECTION", "pkg_preinst", + "pkg_prerm", "RREPLACES", "GROUPMEMS_PARAM", "SYSTEMD_AUTO_ENABLE", "SKIP_FILEDEPS", "PRIVATE_LIBS", "PACKAGE_ADD_METADATA", + "INSANE_SKIP", "DEBIANNAME", "SYSTEMD_SERVICE_ESCAPED"] + args.package_vars + +# Expressions to skip if encountered, these are not overrides +skips = args.skip +skips += ["parser_append", "recipe_to_append", "extra_append", "to_remove", "show_appends", "applied_appends", "file_appends", "handle_remove"] +skips += ["expanded_removes", "color_remove", "test_remove", "empty_remove", "toaster_prepend", "num_removed", "licfiles_append", "_write_append"] +skips += ["no_report_remove", "test_prepend", "test_append", "multiple_append", "test_remove", "shallow_remove", "do_remove_layer", "first_append"] +skips += ["parser_remove", "to_append", "no_remove", "bblayers_add_remove", "bblayers_remove", "apply_append", "is_x86", "base_dep_prepend"] +skips += ["autotools_dep_prepend", "go_map_arm", "alt_remove_links", "systemd_append_file", "file_append", "process_file_darwin"] +skips += ["run_loaddata_poky", "determine_if_poky_env", "do_populate_poky_src", "libc_cv_include_x86_isa_level", "test_rpm_remove", "do_install_armmultilib"] +skips += ["get_appends_for_files", "test_doubleref_remove", "test_bitbakelayers_add_remove", "elf32_x86_64", "colour_remove", "revmap_remove"] +skips += ["test_rpm_remove", "test_bitbakelayers_add_remove", "recipe_append_file", "log_data_removed", "recipe_append", "systemd_machine_unit_append"] +skips += ["recipetool_append", "changetype_remove", "try_appendfile_wc", "test_qemux86_directdisk", "test_layer_appends", "tgz_removed"] + +imagevars = ["IMAGE_CMD", "EXTRA_IMAGECMD", "IMAGE_TYPEDEP", "CONVERSION_CMD", "COMPRESS_CMD"] + args.image_vars +packagevars += imagevars + +skip_ext = [".html", ".patch", ".m4", ".diff"] + args.skip_ext + +vars_re = {} +for exp in vars: + vars_re[exp] = (re.compile(r'((^|[#\'"\s\-\+])[A-Za-z0-9_\-:${}\.]+)_' + exp), r"\1:" + exp) + +shortvars_re = {} +for exp in shortvars: + shortvars_re[exp] = (re.compile(r'((^|[#\'"\s\-\+])[A-Za-z0-9_\-:${}\.]+)_' + exp + r'([\(\'"\s:])'), r"\1:" + exp + r"\3") + +package_re = {} +for exp in packagevars: + package_re[exp] = (re.compile(r'(^|[#\'"\s\-\+]+)' + exp + r'_' + r'([$a-z"\'\s%\[<{\\\*].)'), r"\1" + exp + r":\2") + +# Other substitutions to make +subs = { + 'r = re.compile(r"([^:]+):\s*(.*)")' : 'r = re.compile(r"(^.+?):\s+(.*)")', + "val = d.getVar('%s_%s' % (var, pkg))" : "val = d.getVar('%s:%s' % (var, pkg))", + "f.write('%s_%s: %s\\n' % (var, pkg, encode(val)))" : "f.write('%s:%s: %s\\n' % (var, pkg, encode(val)))", + "d.getVar('%s_%s' % (scriptlet_name, pkg))" : "d.getVar('%s:%s' % (scriptlet_name, pkg))", + 'ret.append(v + "_" + p)' : 'ret.append(v + ":" + p)', +} + +def processfile(fn): + print("processing file '%s'" % fn) + try: + fh, abs_path = tempfile.mkstemp() + with os.fdopen(fh, 'w') as new_file: + with open(fn, "r") as old_file: + for line in old_file: + skip = False + for s in skips: + if s in line: + skip = True + if "ptest_append" in line or "ptest_remove" in line or "ptest_prepend" in line: + skip = False + for sub in subs: + if sub in line: + line = line.replace(sub, subs[sub]) + skip = True + if not skip: + for pvar in packagevars: + line = package_re[pvar][0].sub(package_re[pvar][1], line) + for var in vars: + line = vars_re[var][0].sub(vars_re[var][1], line) + for shortvar in shortvars: + line = shortvars_re[shortvar][0].sub(shortvars_re[shortvar][1], line) + if "pkg_postinst:ontarget" in line: + line = line.replace("pkg_postinst:ontarget", "pkg_postinst_ontarget") + new_file.write(line) + shutil.copymode(fn, abs_path) + os.remove(fn) + shutil.move(abs_path, fn) + except UnicodeDecodeError: + pass + +ourname = os.path.basename(sys.argv[0]) +ourversion = "0.9.3" + +for p in args.path: + if os.path.isfile(p): + processfile(p) + else: + print("processing directory '%s'" % p) + for root, dirs, files in os.walk(p): + for name in files: + if name == ourname: + continue + fn = os.path.join(root, name) + if os.path.islink(fn): + continue + if "/.git/" in fn or any(fn.endswith(ext) for ext in skip_ext): + continue + processfile(fn) + +print("All files processed with version %s" % ourversion) diff --git a/scripts/contrib/convert-spdx-licenses.py b/scripts/contrib/convert-spdx-licenses.py new file mode 100755 index 0000000000..4e194dee3f --- /dev/null +++ b/scripts/contrib/convert-spdx-licenses.py @@ -0,0 +1,145 @@ +#!/usr/bin/env python3 +# +# Conversion script to change LICENSE entries to SPDX identifiers +# +# Copyright (C) 2021-2022 Richard Purdie +# +# SPDX-License-Identifier: GPL-2.0-only +# + +import re +import os +import sys +import tempfile +import shutil +import mimetypes + +if len(sys.argv) < 2: + print("Please specify a directory to run the conversion script against.") + sys.exit(1) + +license_map = { +"AGPL-3" : "AGPL-3.0-only", +"AGPL-3+" : "AGPL-3.0-or-later", +"AGPLv3" : "AGPL-3.0-only", +"AGPLv3+" : "AGPL-3.0-or-later", +"AGPLv3.0" : "AGPL-3.0-only", +"AGPLv3.0+" : "AGPL-3.0-or-later", +"AGPL-3.0" : "AGPL-3.0-only", +"AGPL-3.0+" : "AGPL-3.0-or-later", +"BSD-0-Clause" : "0BSD", +"GPL-1" : "GPL-1.0-only", +"GPL-1+" : "GPL-1.0-or-later", +"GPLv1" : "GPL-1.0-only", +"GPLv1+" : "GPL-1.0-or-later", +"GPLv1.0" : "GPL-1.0-only", +"GPLv1.0+" : "GPL-1.0-or-later", +"GPL-1.0" : "GPL-1.0-only", +"GPL-1.0+" : "GPL-1.0-or-later", +"GPL-2" : "GPL-2.0-only", +"GPL-2+" : "GPL-2.0-or-later", +"GPLv2" : "GPL-2.0-only", +"GPLv2+" : "GPL-2.0-or-later", +"GPLv2.0" : "GPL-2.0-only", +"GPLv2.0+" : "GPL-2.0-or-later", +"GPL-2.0" : "GPL-2.0-only", +"GPL-2.0+" : "GPL-2.0-or-later", +"GPL-3" : "GPL-3.0-only", +"GPL-3+" : "GPL-3.0-or-later", +"GPLv3" : "GPL-3.0-only", +"GPLv3+" : "GPL-3.0-or-later", +"GPLv3.0" : "GPL-3.0-only", +"GPLv3.0+" : "GPL-3.0-or-later", +"GPL-3.0" : "GPL-3.0-only", +"GPL-3.0+" : "GPL-3.0-or-later", +"LGPLv2" : "LGPL-2.0-only", +"LGPLv2+" : "LGPL-2.0-or-later", +"LGPLv2.0" : "LGPL-2.0-only", +"LGPLv2.0+" : "LGPL-2.0-or-later", +"LGPL-2.0" : "LGPL-2.0-only", +"LGPL-2.0+" : "LGPL-2.0-or-later", +"LGPL2.1" : "LGPL-2.1-only", +"LGPL2.1+" : "LGPL-2.1-or-later", +"LGPLv2.1" : "LGPL-2.1-only", +"LGPLv2.1+" : "LGPL-2.1-or-later", +"LGPL-2.1" : "LGPL-2.1-only", +"LGPL-2.1+" : "LGPL-2.1-or-later", +"LGPLv3" : "LGPL-3.0-only", +"LGPLv3+" : "LGPL-3.0-or-later", +"LGPL-3.0" : "LGPL-3.0-only", +"LGPL-3.0+" : "LGPL-3.0-or-later", +"MPL-1" : "MPL-1.0", +"MPLv1" : "MPL-1.0", +"MPLv1.1" : "MPL-1.1", +"MPLv2" : "MPL-2.0", +"MIT-X" : "MIT", +"MIT-style" : "MIT", +"openssl" : "OpenSSL", +"PSF" : "PSF-2.0", +"PSFv2" : "PSF-2.0", +"Python-2" : "Python-2.0", +"Apachev2" : "Apache-2.0", +"Apache-2" : "Apache-2.0", +"Artisticv1" : "Artistic-1.0", +"Artistic-1" : "Artistic-1.0", +"AFL-2" : "AFL-2.0", +"AFL-1" : "AFL-1.2", +"AFLv2" : "AFL-2.0", +"AFLv1" : "AFL-1.2", +"CDDLv1" : "CDDL-1.0", +"CDDL-1" : "CDDL-1.0", +"EPLv1.0" : "EPL-1.0", +"FreeType" : "FTL", +"Nauman" : "Naumen", +"tcl" : "TCL", +"vim" : "Vim", +"SGIv1" : "SGI-1", +} + +def processfile(fn): + print("processing file '%s'" % fn) + try: + fh, abs_path = tempfile.mkstemp() + modified = False + with os.fdopen(fh, 'w') as new_file: + with open(fn, "r") as old_file: + for line in old_file: + if not line.startswith("LICENSE"): + new_file.write(line) + continue + orig = line + for license in sorted(license_map, key=len, reverse=True): + for ending in ['"', "'", " ", ")"]: + line = line.replace(license + ending, license_map[license] + ending) + if orig != line: + modified = True + new_file.write(line) + new_file.close() + if modified: + shutil.copymode(fn, abs_path) + os.remove(fn) + shutil.move(abs_path, fn) + except UnicodeDecodeError: + pass + +ourname = os.path.basename(sys.argv[0]) +ourversion = "0.01" + +if os.path.isfile(sys.argv[1]): + processfile(sys.argv[1]) + sys.exit(0) + +for targetdir in sys.argv[1:]: + print("processing directory '%s'" % targetdir) + for root, dirs, files in os.walk(targetdir): + for name in files: + if name == ourname: + continue + fn = os.path.join(root, name) + if os.path.islink(fn): + continue + if "/.git/" in fn or fn.endswith(".html") or fn.endswith(".patch") or fn.endswith(".m4") or fn.endswith(".diff") or fn.endswith(".orig"): + continue + processfile(fn) + +print("All files processed with version %s" % ourversion) diff --git a/scripts/contrib/convert-srcuri.py b/scripts/contrib/convert-srcuri.py new file mode 100755 index 0000000000..587392334f --- /dev/null +++ b/scripts/contrib/convert-srcuri.py @@ -0,0 +1,77 @@ +#!/usr/bin/env python3 +# +# Conversion script to update SRC_URI to add branch to git urls +# +# Copyright (C) 2021 Richard Purdie +# +# SPDX-License-Identifier: GPL-2.0-only +# + +import re +import os +import sys +import tempfile +import shutil +import mimetypes + +if len(sys.argv) < 2: + print("Please specify a directory to run the conversion script against.") + sys.exit(1) + +def processfile(fn): + def matchline(line): + if "MIRROR" in line or ".*" in line or "GNOME_GIT" in line: + return False + return True + print("processing file '%s'" % fn) + try: + if "distro_alias.inc" in fn or "linux-yocto-custom.bb" in fn: + return + fh, abs_path = tempfile.mkstemp() + modified = False + with os.fdopen(fh, 'w') as new_file: + with open(fn, "r") as old_file: + for line in old_file: + if ("git://" in line or "gitsm://" in line) and "branch=" not in line and matchline(line): + if line.endswith('"\n'): + line = line.replace('"\n', ';branch=master"\n') + elif re.search('\s*\\\\$', line): + line = re.sub('\s*\\\\$', ';branch=master \\\\', line) + modified = True + if ("git://" in line or "gitsm://" in line) and "github.com" in line and "protocol=https" not in line and matchline(line): + if "protocol=git" in line: + line = line.replace('protocol=git', 'protocol=https') + elif line.endswith('"\n'): + line = line.replace('"\n', ';protocol=https"\n') + elif re.search('\s*\\\\$', line): + line = re.sub('\s*\\\\$', ';protocol=https \\\\', line) + modified = True + new_file.write(line) + if modified: + shutil.copymode(fn, abs_path) + os.remove(fn) + shutil.move(abs_path, fn) + except UnicodeDecodeError: + pass + +ourname = os.path.basename(sys.argv[0]) +ourversion = "0.1" + +if os.path.isfile(sys.argv[1]): + processfile(sys.argv[1]) + sys.exit(0) + +for targetdir in sys.argv[1:]: + print("processing directory '%s'" % targetdir) + for root, dirs, files in os.walk(targetdir): + for name in files: + if name == ourname: + continue + fn = os.path.join(root, name) + if os.path.islink(fn): + continue + if "/.git/" in fn or fn.endswith(".html") or fn.endswith(".patch") or fn.endswith(".m4") or fn.endswith(".diff"): + continue + processfile(fn) + +print("All files processed with version %s" % ourversion) diff --git a/scripts/contrib/convert-variable-renames.py b/scripts/contrib/convert-variable-renames.py new file mode 100755 index 0000000000..eded90ca61 --- /dev/null +++ b/scripts/contrib/convert-variable-renames.py @@ -0,0 +1,116 @@ +#!/usr/bin/env python3 +# +# Conversion script to rename variables to versions with improved terminology. +# Also highlights potentially problematic language and removed variables. +# +# Copyright (C) 2021 Richard Purdie +# Copyright (C) 2022 Wind River Systems, Inc. +# +# SPDX-License-Identifier: GPL-2.0-only +# + +import re +import os +import sys +import tempfile +import shutil +import mimetypes + +if len(sys.argv) < 2: + print("Please specify a directory to run the conversion script against.") + sys.exit(1) + +renames = { +"BB_ENV_WHITELIST" : "BB_ENV_PASSTHROUGH", +"BB_ENV_EXTRAWHITE" : "BB_ENV_PASSTHROUGH_ADDITIONS", +"BB_HASHCONFIG_WHITELIST" : "BB_HASHCONFIG_IGNORE_VARS", +"BB_SETSCENE_ENFORCE_WHITELIST" : "BB_SETSCENE_ENFORCE_IGNORE_TASKS", +"BB_HASHBASE_WHITELIST" : "BB_BASEHASH_IGNORE_VARS", +"BB_HASHTASK_WHITELIST" : "BB_TASKHASH_IGNORE_TASKS", +"CVE_CHECK_PN_WHITELIST" : "CVE_CHECK_SKIP_RECIPE", +"CVE_CHECK_WHITELIST" : "CVE_CHECK_IGNORE", +"MULTI_PROVIDER_WHITELIST" : "BB_MULTI_PROVIDER_ALLOWED", +"PNBLACKLIST" : "SKIP_RECIPE", +"SDK_LOCAL_CONF_BLACKLIST" : "ESDK_LOCALCONF_REMOVE", +"SDK_LOCAL_CONF_WHITELIST" : "ESDK_LOCALCONF_ALLOW", +"SDK_INHERIT_BLACKLIST" : "ESDK_CLASS_INHERIT_DISABLE", +"SSTATE_DUPWHITELIST" : "SSTATE_ALLOW_OVERLAP_FILES", +"SYSROOT_DIRS_BLACKLIST" : "SYSROOT_DIRS_IGNORE", +"UNKNOWN_CONFIGURE_WHITELIST" : "UNKNOWN_CONFIGURE_OPT_IGNORE", +"ICECC_USER_CLASS_BL" : "ICECC_CLASS_DISABLE", +"ICECC_SYSTEM_CLASS_BL" : "ICECC_CLASS_DISABLE", +"ICECC_USER_PACKAGE_WL" : "ICECC_RECIPE_ENABLE", +"ICECC_USER_PACKAGE_BL" : "ICECC_RECIPE_DISABLE", +"ICECC_SYSTEM_PACKAGE_BL" : "ICECC_RECIPE_DISABLE", +"LICENSE_FLAGS_WHITELIST" : "LICENSE_FLAGS_ACCEPTED", +} + +removed_list = [ +"BB_STAMP_WHITELIST", +"BB_STAMP_POLICY", +"INHERIT_BLACKLIST", +"TUNEABI_WHITELIST", +] + +context_check_list = [ +"blacklist", +"whitelist", +"abort", +] + +def processfile(fn): + + print("processing file '%s'" % fn) + try: + fh, abs_path = tempfile.mkstemp() + modified = False + with os.fdopen(fh, 'w') as new_file: + with open(fn, "r") as old_file: + lineno = 0 + for line in old_file: + lineno += 1 + if not line or "BB_RENAMED_VARIABLE" in line: + continue + # Do the renames + for old_name, new_name in renames.items(): + if old_name in line: + line = line.replace(old_name, new_name) + modified = True + # Find removed names + for removed_name in removed_list: + if removed_name in line: + print("%s needs further work at line %s because %s has been deprecated" % (fn, lineno, removed_name)) + for check_word in context_check_list: + if re.search(check_word, line, re.IGNORECASE): + print("%s needs further work at line %s since it contains %s"% (fn, lineno, check_word)) + new_file.write(line) + new_file.close() + if modified: + print("*** Modified file '%s'" % (fn)) + shutil.copymode(fn, abs_path) + os.remove(fn) + shutil.move(abs_path, fn) + except UnicodeDecodeError: + pass + +ourname = os.path.basename(sys.argv[0]) +ourversion = "0.1" + +if os.path.isfile(sys.argv[1]): + processfile(sys.argv[1]) + sys.exit(0) + +for targetdir in sys.argv[1:]: + print("processing directory '%s'" % targetdir) + for root, dirs, files in os.walk(targetdir): + for name in files: + if name == ourname: + continue + fn = os.path.join(root, name) + if os.path.islink(fn): + continue + if "ChangeLog" in fn or "/.git/" in fn or fn.endswith(".html") or fn.endswith(".patch") or fn.endswith(".m4") or fn.endswith(".diff") or fn.endswith(".orig"): + continue + processfile(fn) + +print("All files processed with version %s" % ourversion) diff --git a/scripts/contrib/ddimage b/scripts/contrib/ddimage index 7f2ad112a6..70eee8ebea 100755 --- a/scripts/contrib/ddimage +++ b/scripts/contrib/ddimage @@ -1,5 +1,7 @@ #!/bin/sh # +# Copyright OpenEmbedded Contributors +# # SPDX-License-Identifier: GPL-2.0-only # diff --git a/scripts/contrib/dialog-power-control b/scripts/contrib/dialog-power-control index ad6070c369..82c84baa1d 100755 --- a/scripts/contrib/dialog-power-control +++ b/scripts/contrib/dialog-power-control @@ -1,5 +1,7 @@ #!/bin/sh # +# Copyright OpenEmbedded Contributors +# # SPDX-License-Identifier: GPL-2.0-only # # Simple script to show a manual power prompt for when you want to use diff --git a/scripts/contrib/documentation-audit.sh b/scripts/contrib/documentation-audit.sh index 1191f57a8e..7197a2fcea 100755 --- a/scripts/contrib/documentation-audit.sh +++ b/scripts/contrib/documentation-audit.sh @@ -1,5 +1,7 @@ #!/bin/bash # +# Copyright OpenEmbedded Contributors +# # SPDX-License-Identifier: GPL-2.0-only # # Perform an audit of which packages provide documentation and which @@ -26,8 +28,8 @@ if [ -z "$BITBAKE" ]; then fi echo "REMINDER: you need to build for MACHINE=qemux86 or you won't get useful results" -echo "REMINDER: you need to set LICENSE_FLAGS_WHITELIST appropriately in local.conf or " -echo " you'll get false positives. For example, LICENSE_FLAGS_WHITELIST = \"Commercial\"" +echo "REMINDER: you need to set LICENSE_FLAGS_ACCEPTED appropriately in local.conf or " +echo " you'll get false positives. For example, LICENSE_FLAGS_ACCEPTED = \"commercial\"" for pkg in `bitbake -s | awk '{ print \$1 }'`; do if [[ "$pkg" == "Loading" || "$pkg" == "Loaded" || diff --git a/scripts/contrib/graph-tool b/scripts/contrib/graph-tool index 6d2e68b82e..26488930e0 100755 --- a/scripts/contrib/graph-tool +++ b/scripts/contrib/graph-tool @@ -11,6 +11,13 @@ # import sys +import os +import argparse + +scripts_lib_path = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'lib')) +sys.path.insert(0, scripts_lib_path) +import argparse_oe + def get_path_networkx(dotfile, fromnode, tonode): try: @@ -34,47 +41,78 @@ def get_path_networkx(dotfile, fromnode, tonode): return networkx.all_simple_paths(graph, source=fromnode, target=tonode) -def find_paths(args, usage): - if len(args) < 3: - usage() - sys.exit(1) - - fromnode = args[1] - tonode = args[2] - +def find_paths(args): path = None - for path in get_path_networkx(args[0], fromnode, tonode): + for path in get_path_networkx(args.dotfile, args.fromnode, args.tonode): print(" -> ".join(map(str, path))) if not path: - print("ERROR: no path from %s to %s in graph" % (fromnode, tonode)) - sys.exit(1) + print("ERROR: no path from %s to %s in graph" % (args.fromnode, args.tonode)) + return 1 + + +def filter_graph(args): + import fnmatch + + exclude_tasks = [] + if args.exclude_tasks: + for task in args.exclude_tasks.split(','): + if not task.startswith('do_'): + task = 'do_%s' % task + exclude_tasks.append(task) + + def checkref(strval): + strval = strval.strip().strip('"') + target, taskname = strval.rsplit('.', 1) + if exclude_tasks: + for extask in exclude_tasks: + if fnmatch.fnmatch(taskname, extask): + return False + if strval in args.ref or target in args.ref: + return True + return False + + with open(args.infile, 'r') as f: + for line in f: + line = line.rstrip() + if line.startswith(('digraph', '}')): + print(line) + elif '->' in line: + linesplit = line.split('->') + if checkref(linesplit[0]) and checkref(linesplit[1]): + print(line) + elif (not args.no_nodes) and checkref(line.split()[0]): + print(line) + def main(): - import optparse - parser = optparse.OptionParser( - usage = '''%prog [options] <command> <arguments> + parser = argparse_oe.ArgumentParser(description='Small utility for working with .dot graph files') -Available commands: - find-paths <dotfile> <from> <to> - Find all of the paths between two nodes in a dot graph''') + subparsers = parser.add_subparsers(title='subcommands', metavar='<subcommand>') + subparsers.required = True - #parser.add_option("-d", "--debug", - # help = "Report all SRCREV values, not just ones where AUTOREV has been used", - # action="store_true", dest="debug", default=False) + parser_find_paths = subparsers.add_parser('find-paths', + help='Find all of the paths between two nodes in a dot graph', + description='Finds all of the paths between two nodes in a dot graph') + parser_find_paths.add_argument('dotfile', help='.dot graph to search in') + parser_find_paths.add_argument('fromnode', help='starting node name') + parser_find_paths.add_argument('tonode', help='ending node name') + parser_find_paths.set_defaults(func=find_paths) - options, args = parser.parse_args(sys.argv) - args = args[1:] + parser_filter = subparsers.add_parser('filter', + help='Pare down a task graph to contain only the specified references', + description='Pares down a task-depends.dot graph produced by bitbake -g to contain only the specified references') + parser_filter.add_argument('infile', help='Input file') + parser_filter.add_argument('ref', nargs='+', help='Reference to include (either recipe/target name or full target.taskname specification)') + parser_filter.add_argument('-n', '--no-nodes', action='store_true', help='Skip node formatting lines') + parser_filter.add_argument('-x', '--exclude-tasks', help='Comma-separated list of tasks to exclude (do_ prefix optional, wildcards allowed)') + parser_filter.set_defaults(func=filter_graph) - if len(args) < 1: - parser.print_help() - sys.exit(1) + args = parser.parse_args() - if args[0] == "find-paths": - find_paths(args[1:], parser.print_help) - else: - parser.print_help() - sys.exit(1) + ret = args.func(args) + return ret if __name__ == "__main__": - main() + ret = main() + sys.exit(ret) diff --git a/scripts/contrib/image-manifest b/scripts/contrib/image-manifest new file mode 100755 index 0000000000..4d65a99258 --- /dev/null +++ b/scripts/contrib/image-manifest @@ -0,0 +1,523 @@ +#!/usr/bin/env python3 + +# Script to extract information from image manifests +# +# Copyright (C) 2018 Intel Corporation +# Copyright (C) 2021 Wind River Systems, Inc. +# +# SPDX-License-Identifier: GPL-2.0-only +# + +import sys +import os +import argparse +import logging +import json +import shutil +import tempfile +import tarfile +from collections import OrderedDict + +scripts_path = os.path.dirname(__file__) +lib_path = scripts_path + '/../lib' +sys.path = sys.path + [lib_path] + +import scriptutils +logger = scriptutils.logger_create(os.path.basename(__file__)) + +import argparse_oe +import scriptpath +bitbakepath = scriptpath.add_bitbake_lib_path() +if not bitbakepath: + logger.error("Unable to find bitbake by searching parent directory of this script or PATH") + sys.exit(1) +logger.debug('Using standard bitbake path %s' % bitbakepath) +scriptpath.add_oe_lib_path() + +import bb.tinfoil +import bb.utils +import oe.utils +import oe.recipeutils + +def get_pkg_list(manifest): + pkglist = [] + with open(manifest, 'r') as f: + for line in f: + linesplit = line.split() + if len(linesplit) == 3: + # manifest file + pkglist.append(linesplit[0]) + elif len(linesplit) == 1: + # build dependency file + pkglist.append(linesplit[0]) + return sorted(pkglist) + +def list_packages(args): + pkglist = get_pkg_list(args.manifest) + for pkg in pkglist: + print('%s' % pkg) + +def pkg2recipe(tinfoil, pkg): + if "-native" in pkg: + logger.info('skipping %s' % pkg) + return None + + pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR') + pkgdatafile = os.path.join(pkgdata_dir, 'runtime-reverse', pkg) + logger.debug('pkgdatafile %s' % pkgdatafile) + try: + f = open(pkgdatafile, 'r') + for line in f: + if line.startswith('PN:'): + recipe = line.split(':', 1)[1].strip() + return recipe + except Exception: + logger.warning('%s is missing' % pkgdatafile) + return None + +def get_recipe_list(manifest, tinfoil): + pkglist = get_pkg_list(manifest) + recipelist = [] + for pkg in pkglist: + recipe = pkg2recipe(tinfoil,pkg) + if recipe: + if not recipe in recipelist: + recipelist.append(recipe) + + return sorted(recipelist) + +def list_recipes(args): + import bb.tinfoil + with bb.tinfoil.Tinfoil() as tinfoil: + tinfoil.logger.setLevel(logger.getEffectiveLevel()) + tinfoil.prepare(config_only=True) + recipelist = get_recipe_list(args.manifest, tinfoil) + for recipe in sorted(recipelist): + print('%s' % recipe) + +def list_layers(args): + + def find_git_repo(pth): + checkpth = pth + while checkpth != os.sep: + if os.path.exists(os.path.join(checkpth, '.git')): + return checkpth + checkpth = os.path.dirname(checkpth) + return None + + def get_git_remote_branch(repodir): + try: + stdout, _ = bb.process.run(['git', 'rev-parse', '--abbrev-ref', '--symbolic-full-name', '@{u}'], cwd=repodir) + except bb.process.ExecutionError as e: + stdout = None + if stdout: + return stdout.strip() + else: + return None + + def get_git_head_commit(repodir): + try: + stdout, _ = bb.process.run(['git', 'rev-parse', 'HEAD'], cwd=repodir) + except bb.process.ExecutionError as e: + stdout = None + if stdout: + return stdout.strip() + else: + return None + + def get_git_repo_url(repodir, remote='origin'): + import bb.process + # Try to get upstream repo location from origin remote + try: + stdout, _ = bb.process.run(['git', 'remote', '-v'], cwd=repodir) + except bb.process.ExecutionError as e: + stdout = None + if stdout: + for line in stdout.splitlines(): + splitline = line.split() + if len(splitline) > 1: + if splitline[0] == remote and scriptutils.is_src_url(splitline[1]): + return splitline[1] + return None + + with bb.tinfoil.Tinfoil() as tinfoil: + tinfoil.logger.setLevel(logger.getEffectiveLevel()) + tinfoil.prepare(config_only=False) + layers = OrderedDict() + for layerdir in tinfoil.config_data.getVar('BBLAYERS').split(): + layerdata = OrderedDict() + layername = os.path.basename(layerdir) + logger.debug('layername %s, layerdir %s' % (layername, layerdir)) + if layername in layers: + logger.warning('layername %s is not unique in configuration' % layername) + layername = os.path.basename(os.path.dirname(layerdir)) + '_' + os.path.basename(layerdir) + logger.debug('trying layername %s' % layername) + if layername in layers: + logger.error('Layer name %s is not unique in configuration' % layername) + sys.exit(2) + repodir = find_git_repo(layerdir) + if repodir: + remotebranch = get_git_remote_branch(repodir) + remote = 'origin' + if remotebranch and '/' in remotebranch: + rbsplit = remotebranch.split('/', 1) + layerdata['actual_branch'] = rbsplit[1] + remote = rbsplit[0] + layerdata['vcs_url'] = get_git_repo_url(repodir, remote) + if os.path.abspath(repodir) != os.path.abspath(layerdir): + layerdata['vcs_subdir'] = os.path.relpath(layerdir, repodir) + commit = get_git_head_commit(repodir) + if commit: + layerdata['vcs_commit'] = commit + layers[layername] = layerdata + + json.dump(layers, args.output, indent=2) + +def get_recipe(args): + with bb.tinfoil.Tinfoil() as tinfoil: + tinfoil.logger.setLevel(logger.getEffectiveLevel()) + tinfoil.prepare(config_only=True) + + recipe = pkg2recipe(tinfoil, args.package) + print(' %s package provided by %s' % (args.package, recipe)) + +def pkg_dependencies(args): + def get_recipe_info(tinfoil, recipe): + try: + info = tinfoil.get_recipe_info(recipe) + except Exception: + logger.error('Failed to get recipe info for: %s' % recipe) + sys.exit(1) + if not info: + logger.warning('No recipe info found for: %s' % recipe) + sys.exit(1) + append_files = tinfoil.get_file_appends(info.fn) + appends = True + data = tinfoil.parse_recipe_file(info.fn, appends, append_files) + data.pn = info.pn + data.pv = info.pv + return data + + def find_dependencies(tinfoil, assume_provided, recipe_info, packages, rn, order): + spaces = ' ' * order + data = recipe_info[rn] + if args.native: + logger.debug('%s- %s' % (spaces, data.pn)) + elif "-native" not in data.pn: + if "cross" not in data.pn: + logger.debug('%s- %s' % (spaces, data.pn)) + + depends = [] + for dep in data.depends: + if dep not in assume_provided: + depends.append(dep) + + # First find all dependencies not in package list. + for dep in depends: + if dep not in packages: + packages.append(dep) + dep_data = get_recipe_info(tinfoil, dep) + # Do this once now to reduce the number of bitbake calls. + dep_data.depends = dep_data.getVar('DEPENDS').split() + recipe_info[dep] = dep_data + + # Then recursively analyze all of the dependencies for the current recipe. + for dep in depends: + find_dependencies(tinfoil, assume_provided, recipe_info, packages, dep, order + 1) + + with bb.tinfoil.Tinfoil() as tinfoil: + tinfoil.logger.setLevel(logger.getEffectiveLevel()) + tinfoil.prepare() + + assume_provided = tinfoil.config_data.getVar('ASSUME_PROVIDED').split() + logger.debug('assumed provided:') + for ap in sorted(assume_provided): + logger.debug(' - %s' % ap) + + recipe = pkg2recipe(tinfoil, args.package) + data = get_recipe_info(tinfoil, recipe) + data.depends = [] + depends = data.getVar('DEPENDS').split() + for dep in depends: + if dep not in assume_provided: + data.depends.append(dep) + + recipe_info = dict([(recipe, data)]) + packages = [] + find_dependencies(tinfoil, assume_provided, recipe_info, packages, recipe, order=1) + + print('\nThe following packages are required to build %s' % recipe) + for p in sorted(packages): + data = recipe_info[p] + if "-native" not in data.pn: + if "cross" not in data.pn: + print(" %s (%s)" % (data.pn,p)) + + if args.native: + print('\nThe following native packages are required to build %s' % recipe) + for p in sorted(packages): + data = recipe_info[p] + if "-native" in data.pn: + print(" %s(%s)" % (data.pn,p)) + if "cross" in data.pn: + print(" %s(%s)" % (data.pn,p)) + +def default_config(): + vlist = OrderedDict() + vlist['PV'] = 'yes' + vlist['SUMMARY'] = 'no' + vlist['DESCRIPTION'] = 'no' + vlist['SECTION'] = 'no' + vlist['LICENSE'] = 'yes' + vlist['HOMEPAGE'] = 'no' + vlist['BUGTRACKER'] = 'no' + vlist['PROVIDES'] = 'no' + vlist['BBCLASSEXTEND'] = 'no' + vlist['DEPENDS'] = 'no' + vlist['PACKAGECONFIG'] = 'no' + vlist['SRC_URI'] = 'yes' + vlist['SRCREV'] = 'yes' + vlist['EXTRA_OECONF'] = 'no' + vlist['EXTRA_OESCONS'] = 'no' + vlist['EXTRA_OECMAKE'] = 'no' + vlist['EXTRA_OEMESON'] = 'no' + + clist = OrderedDict() + clist['variables'] = vlist + clist['filepath'] = 'no' + clist['sha256sum'] = 'no' + clist['layerdir'] = 'no' + clist['layer'] = 'no' + clist['inherits'] = 'no' + clist['source_urls'] = 'no' + clist['packageconfig_opts'] = 'no' + clist['patches'] = 'no' + clist['packagedir'] = 'no' + return clist + +def dump_config(args): + config = default_config() + f = open('default_config.json', 'w') + json.dump(config, f, indent=2) + logger.info('Default config list dumped to default_config.json') + +def export_manifest_info(args): + + def handle_value(value): + if value: + return oe.utils.squashspaces(value) + else: + return value + + if args.config: + logger.debug('config: %s' % args.config) + f = open(args.config, 'r') + config = json.load(f, object_pairs_hook=OrderedDict) + else: + config = default_config() + if logger.isEnabledFor(logging.DEBUG): + print('Configuration:') + json.dump(config, sys.stdout, indent=2) + print('') + + tmpoutdir = tempfile.mkdtemp(prefix=os.path.basename(__file__)+'-') + logger.debug('tmp dir: %s' % tmpoutdir) + + # export manifest + shutil.copy2(args.manifest,os.path.join(tmpoutdir, "manifest")) + + with bb.tinfoil.Tinfoil(tracking=True) as tinfoil: + tinfoil.logger.setLevel(logger.getEffectiveLevel()) + tinfoil.prepare(config_only=False) + + pkglist = get_pkg_list(args.manifest) + # export pkg list + f = open(os.path.join(tmpoutdir, "pkgs"), 'w') + for pkg in pkglist: + f.write('%s\n' % pkg) + f.close() + + recipelist = [] + for pkg in pkglist: + recipe = pkg2recipe(tinfoil,pkg) + if recipe: + if not recipe in recipelist: + recipelist.append(recipe) + recipelist.sort() + # export recipe list + f = open(os.path.join(tmpoutdir, "recipes"), 'w') + for recipe in recipelist: + f.write('%s\n' % recipe) + f.close() + + try: + rvalues = OrderedDict() + for pn in sorted(recipelist): + logger.debug('Package: %s' % pn) + rd = tinfoil.parse_recipe(pn) + + rvalues[pn] = OrderedDict() + + for varname in config['variables']: + if config['variables'][varname] == 'yes': + rvalues[pn][varname] = handle_value(rd.getVar(varname)) + + fpth = rd.getVar('FILE') + layerdir = oe.recipeutils.find_layerdir(fpth) + if config['filepath'] == 'yes': + rvalues[pn]['filepath'] = os.path.relpath(fpth, layerdir) + if config['sha256sum'] == 'yes': + rvalues[pn]['sha256sum'] = bb.utils.sha256_file(fpth) + + if config['layerdir'] == 'yes': + rvalues[pn]['layerdir'] = layerdir + + if config['layer'] == 'yes': + rvalues[pn]['layer'] = os.path.basename(layerdir) + + if config['inherits'] == 'yes': + gr = set(tinfoil.config_data.getVar("__inherit_cache") or []) + lr = set(rd.getVar("__inherit_cache") or []) + rvalues[pn]['inherits'] = sorted({os.path.splitext(os.path.basename(r))[0] for r in lr if r not in gr}) + + if config['source_urls'] == 'yes': + rvalues[pn]['source_urls'] = [] + for url in (rd.getVar('SRC_URI') or '').split(): + if not url.startswith('file://'): + url = url.split(';')[0] + rvalues[pn]['source_urls'].append(url) + + if config['packageconfig_opts'] == 'yes': + rvalues[pn]['packageconfig_opts'] = OrderedDict() + for key in rd.getVarFlags('PACKAGECONFIG').keys(): + if key == 'doc': + continue + rvalues[pn]['packageconfig_opts'][key] = rd.getVarFlag('PACKAGECONFIG', key) + + if config['patches'] == 'yes': + patches = oe.recipeutils.get_recipe_patches(rd) + rvalues[pn]['patches'] = [] + if patches: + recipeoutdir = os.path.join(tmpoutdir, pn, 'patches') + bb.utils.mkdirhier(recipeoutdir) + for patch in patches: + # Patches may be in other layers too + patchlayerdir = oe.recipeutils.find_layerdir(patch) + # patchlayerdir will be None for remote patches, which we ignore + # (since currently they are considered as part of sources) + if patchlayerdir: + rvalues[pn]['patches'].append((os.path.basename(patchlayerdir), os.path.relpath(patch, patchlayerdir))) + shutil.copy(patch, recipeoutdir) + + if config['packagedir'] == 'yes': + pn_dir = os.path.join(tmpoutdir, pn) + bb.utils.mkdirhier(pn_dir) + f = open(os.path.join(pn_dir, 'recipe.json'), 'w') + json.dump(rvalues[pn], f, indent=2) + f.close() + + with open(os.path.join(tmpoutdir, 'recipes.json'), 'w') as f: + json.dump(rvalues, f, indent=2) + + if args.output: + outname = os.path.basename(args.output) + else: + outname = os.path.splitext(os.path.basename(args.manifest))[0] + if outname.endswith('.tar.gz'): + outname = outname[:-7] + elif outname.endswith('.tgz'): + outname = outname[:-4] + + tarfn = outname + if tarfn.endswith(os.sep): + tarfn = tarfn[:-1] + if not tarfn.endswith(('.tar.gz', '.tgz')): + tarfn += '.tar.gz' + with open(tarfn, 'wb') as f: + with tarfile.open(None, "w:gz", f) as tar: + tar.add(tmpoutdir, outname) + finally: + shutil.rmtree(tmpoutdir) + + +def main(): + parser = argparse_oe.ArgumentParser(description="Image manifest utility", + epilog="Use %(prog)s <subcommand> --help to get help on a specific command") + parser.add_argument('-d', '--debug', help='Enable debug output', action='store_true') + parser.add_argument('-q', '--quiet', help='Print only errors', action='store_true') + subparsers = parser.add_subparsers(dest="subparser_name", title='subcommands', metavar='<subcommand>') + subparsers.required = True + + # get recipe info + parser_get_recipes = subparsers.add_parser('recipe-info', + help='Get recipe info', + description='Get recipe information for a package') + parser_get_recipes.add_argument('package', help='Package name') + parser_get_recipes.set_defaults(func=get_recipe) + + # list runtime dependencies + parser_pkg_dep = subparsers.add_parser('list-depends', + help='List dependencies', + description='List dependencies required to build the package') + parser_pkg_dep.add_argument('--native', help='also print native and cross packages', action='store_true') + parser_pkg_dep.add_argument('package', help='Package name') + parser_pkg_dep.set_defaults(func=pkg_dependencies) + + # list recipes + parser_recipes = subparsers.add_parser('list-recipes', + help='List recipes producing packages within an image', + description='Lists recipes producing the packages that went into an image, using the manifest and pkgdata') + parser_recipes.add_argument('manifest', help='Manifest file') + parser_recipes.set_defaults(func=list_recipes) + + # list packages + parser_packages = subparsers.add_parser('list-packages', + help='List packages within an image', + description='Lists packages that went into an image, using the manifest') + parser_packages.add_argument('manifest', help='Manifest file') + parser_packages.set_defaults(func=list_packages) + + # list layers + parser_layers = subparsers.add_parser('list-layers', + help='List included layers', + description='Lists included layers') + parser_layers.add_argument('-o', '--output', help='Output file - defaults to stdout if not specified', + default=sys.stdout, type=argparse.FileType('w')) + parser_layers.set_defaults(func=list_layers) + + # dump default configuration file + parser_dconfig = subparsers.add_parser('dump-config', + help='Dump default config', + description='Dump default config to default_config.json') + parser_dconfig.set_defaults(func=dump_config) + + # export recipe info for packages in manifest + parser_export = subparsers.add_parser('manifest-info', + help='Export recipe info for a manifest', + description='Export recipe information using the manifest') + parser_export.add_argument('-c', '--config', help='load config from json file') + parser_export.add_argument('-o', '--output', help='Output file (tarball) - defaults to manifest name if not specified') + parser_export.add_argument('manifest', help='Manifest file') + parser_export.set_defaults(func=export_manifest_info) + + args = parser.parse_args() + + if args.debug: + logger.setLevel(logging.DEBUG) + logger.debug("Debug Enabled") + elif args.quiet: + logger.setLevel(logging.ERROR) + + ret = args.func(args) + + return ret + + +if __name__ == "__main__": + try: + ret = main() + except Exception: + ret = 1 + import traceback + traceback.print_exc() + sys.exit(ret) diff --git a/scripts/contrib/list-packageconfig-flags.py b/scripts/contrib/list-packageconfig-flags.py index d6de4dc84d..bb288e9099 100755 --- a/scripts/contrib/list-packageconfig-flags.py +++ b/scripts/contrib/list-packageconfig-flags.py @@ -33,7 +33,7 @@ import bb.tinfoil def get_fnlist(bbhandler, pkg_pn, preferred): ''' Get all recipe file names ''' if preferred: - (latest_versions, preferred_versions) = bb.providers.findProviders(bbhandler.config_data, bbhandler.cooker.recipecaches[''], pkg_pn) + (latest_versions, preferred_versions, required_versions) = bb.providers.findProviders(bbhandler.config_data, bbhandler.cooker.recipecaches[''], pkg_pn) fn_list = [] for pn in sorted(pkg_pn): diff --git a/scripts/contrib/oe-build-perf-report-email.py b/scripts/contrib/oe-build-perf-report-email.py index de3862c897..7192113c28 100755 --- a/scripts/contrib/oe-build-perf-report-email.py +++ b/scripts/contrib/oe-build-perf-report-email.py @@ -19,8 +19,6 @@ import socket import subprocess import sys import tempfile -from email.mime.image import MIMEImage -from email.mime.multipart import MIMEMultipart from email.mime.text import MIMEText @@ -29,30 +27,6 @@ logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s") log = logging.getLogger('oe-build-perf-report') -# Find js scaper script -SCRAPE_JS = os.path.join(os.path.dirname(__file__), '..', 'lib', 'build_perf', - 'scrape-html-report.js') -if not os.path.isfile(SCRAPE_JS): - log.error("Unableto find oe-build-perf-report-scrape.js") - sys.exit(1) - - -class ReportError(Exception): - """Local errors""" - pass - - -def check_utils(): - """Check that all needed utils are installed in the system""" - missing = [] - for cmd in ('phantomjs', 'optipng'): - if not shutil.which(cmd): - missing.append(cmd) - if missing: - log.error("The following tools are missing: %s", ' '.join(missing)) - sys.exit(1) - - def parse_args(argv): """Parse command line arguments""" description = """Email build perf test report""" @@ -77,137 +51,19 @@ def parse_args(argv): "the email parts") parser.add_argument('--text', help="Plain text message") - parser.add_argument('--html', - help="HTML peport generated by oe-build-perf-report") - parser.add_argument('--phantomjs-args', action='append', - help="Extra command line arguments passed to PhantomJS") args = parser.parse_args(argv) - if not args.html and not args.text: - parser.error("Please specify --html and/or --text") + if not args.text: + parser.error("Please specify --text") return args -def decode_png(infile, outfile): - """Parse/decode/optimize png data from a html element""" - with open(infile) as f: - raw_data = f.read() - - # Grab raw base64 data - b64_data = re.sub('^.*href="data:image/png;base64,', '', raw_data, 1) - b64_data = re.sub('">.+$', '', b64_data, 1) - - # Replace file with proper decoded png - with open(outfile, 'wb') as f: - f.write(base64.b64decode(b64_data)) - - subprocess.check_output(['optipng', outfile], stderr=subprocess.STDOUT) - - -def mangle_html_report(infile, outfile, pngs): - """Mangle html file into a email compatible format""" - paste = True - png_dir = os.path.dirname(outfile) - with open(infile) as f_in: - with open(outfile, 'w') as f_out: - for line in f_in.readlines(): - stripped = line.strip() - # Strip out scripts - if stripped == '<!--START-OF-SCRIPTS-->': - paste = False - elif stripped == '<!--END-OF-SCRIPTS-->': - paste = True - elif paste: - if re.match('^.+href="data:image/png;base64', stripped): - # Strip out encoded pngs (as they're huge in size) - continue - elif 'www.gstatic.com' in stripped: - # HACK: drop references to external static pages - continue - - # Replace charts with <img> elements - match = re.match('<div id="(?P<id>\w+)"', stripped) - if match and match.group('id') in pngs: - f_out.write('<img src="cid:{}"\n'.format(match.group('id'))) - else: - f_out.write(line) - - -def scrape_html_report(report, outdir, phantomjs_extra_args=None): - """Scrape html report into a format sendable by email""" - tmpdir = tempfile.mkdtemp(dir='.') - log.debug("Using tmpdir %s for phantomjs output", tmpdir) - - if not os.path.isdir(outdir): - os.mkdir(outdir) - if os.path.splitext(report)[1] not in ('.html', '.htm'): - raise ReportError("Invalid file extension for report, needs to be " - "'.html' or '.htm'") - - try: - log.info("Scraping HTML report with PhangomJS") - extra_args = phantomjs_extra_args if phantomjs_extra_args else [] - subprocess.check_output(['phantomjs', '--debug=true'] + extra_args + - [SCRAPE_JS, report, tmpdir], - stderr=subprocess.STDOUT) - - pngs = [] - images = [] - for fname in os.listdir(tmpdir): - base, ext = os.path.splitext(fname) - if ext == '.png': - log.debug("Decoding %s", fname) - decode_png(os.path.join(tmpdir, fname), - os.path.join(outdir, fname)) - pngs.append(base) - images.append(fname) - elif ext in ('.html', '.htm'): - report_file = fname - else: - log.warning("Unknown file extension: '%s'", ext) - #shutil.move(os.path.join(tmpdir, fname), outdir) - - log.debug("Mangling html report file %s", report_file) - mangle_html_report(os.path.join(tmpdir, report_file), - os.path.join(outdir, report_file), pngs) - return (os.path.join(outdir, report_file), - [os.path.join(outdir, i) for i in images]) - finally: - shutil.rmtree(tmpdir) - -def send_email(text_fn, html_fn, image_fns, subject, recipients, copy=[], - blind_copy=[]): - """Send email""" +def send_email(text_fn, subject, recipients, copy=[], blind_copy=[]): # Generate email message - text_msg = html_msg = None - if text_fn: - with open(text_fn) as f: - text_msg = MIMEText("Yocto build performance test report.\n" + - f.read(), 'plain') - if html_fn: - html_msg = msg = MIMEMultipart('related') - with open(html_fn) as f: - html_msg.attach(MIMEText(f.read(), 'html')) - for img_fn in image_fns: - # Expect that content id is same as the filename - cid = os.path.splitext(os.path.basename(img_fn))[0] - with open(img_fn, 'rb') as f: - image_msg = MIMEImage(f.read()) - image_msg['Content-ID'] = '<{}>'.format(cid) - html_msg.attach(image_msg) - - if text_msg and html_msg: - msg = MIMEMultipart('alternative') - msg.attach(text_msg) - msg.attach(html_msg) - elif text_msg: - msg = text_msg - elif html_msg: - msg = html_msg - else: - raise ReportError("Neither plain text nor html body specified") + with open(text_fn) as f: + msg = MIMEText("Yocto build performance test report.\n" + f.read(), 'plain') pw_data = pwd.getpwuid(os.getuid()) full_name = pw_data.pw_gecos.split(',')[0] @@ -234,8 +90,6 @@ def main(argv=None): if args.debug: log.setLevel(logging.DEBUG) - check_utils() - if args.outdir: outdir = args.outdir if not os.path.exists(outdir): @@ -245,25 +99,16 @@ def main(argv=None): try: log.debug("Storing email parts in %s", outdir) - html_report = images = None - if args.html: - html_report, images = scrape_html_report(args.html, outdir, - args.phantomjs_args) - if args.to: log.info("Sending email to %s", ', '.join(args.to)) if args.cc: log.info("Copying to %s", ', '.join(args.cc)) if args.bcc: log.info("Blind copying to %s", ', '.join(args.bcc)) - send_email(args.text, html_report, images, args.subject, - args.to, args.cc, args.bcc) + send_email(args.text, args.subject, args.to, args.cc, args.bcc) except subprocess.CalledProcessError as err: log.error("%s, with output:\n%s", str(err), err.output.decode()) return 1 - except ReportError as err: - log.error(err) - return 1 finally: if not args.outdir: log.debug("Wiping %s", outdir) diff --git a/scripts/contrib/patchreview.py b/scripts/contrib/patchreview.py index 62c509f51c..bceae06561 100755 --- a/scripts/contrib/patchreview.py +++ b/scripts/contrib/patchreview.py @@ -1,14 +1,25 @@ #! /usr/bin/env python3 # +# Copyright OpenEmbedded Contributors +# # SPDX-License-Identifier: GPL-2.0-only # +import argparse +import collections +import json +import os +import os.path +import pathlib +import re +import subprocess + # TODO # - option to just list all broken files # - test suite # - validate signed-off-by -status_values = ("accepted", "pending", "inappropriate", "backport", "submitted", "denied") +status_values = ("accepted", "pending", "inappropriate", "backport", "submitted", "denied", "inactive-upstream") class Result: # Whether the patch has an Upstream-Status or not @@ -33,20 +44,18 @@ def blame_patch(patch): From a patch filename, return a list of "commit summary (author name <author email>)" strings representing the history. """ - import subprocess return subprocess.check_output(("git", "log", "--follow", "--find-renames", "--diff-filter=A", "--format=%s (%aN <%aE>)", "--", patch)).decode("utf-8").splitlines() -def patchreview(path, patches): - import re, os.path +def patchreview(patches): # General pattern: start of line, optional whitespace, tag with optional # hyphen or spaces, maybe a colon, some whitespace, then the value, all case # insensitive. sob_re = re.compile(r"^[\t ]*(Signed[-_ ]off[-_ ]by:?)[\t ]*(.+)", re.IGNORECASE | re.MULTILINE) - status_re = re.compile(r"^[\t ]*(Upstream[-_ ]Status:?)[\t ]*(\w*)", re.IGNORECASE | re.MULTILINE) + status_re = re.compile(r"^[\t ]*(Upstream[-_ ]Status:?)[\t ]*([\w-]*)", re.IGNORECASE | re.MULTILINE) cve_tag_re = re.compile(r"^[\t ]*(CVE:)[\t ]*(.*)", re.IGNORECASE | re.MULTILINE) cve_re = re.compile(r"cve-[0-9]{4}-[0-9]{4,6}", re.IGNORECASE) @@ -54,11 +63,10 @@ def patchreview(path, patches): for patch in patches: - fullpath = os.path.join(path, patch) result = Result() - results[fullpath] = result + results[patch] = result - content = open(fullpath, encoding='ascii', errors='ignore').read() + content = open(patch, encoding='ascii', errors='ignore').read() # Find the Signed-off-by tag match = sob_re.search(content) @@ -191,29 +199,56 @@ Patches in Pending state: %s""" % (total_patches, def histogram(results): from toolz import recipes, dicttoolz import math + counts = recipes.countby(lambda r: r.upstream_status, results.values()) bars = dicttoolz.valmap(lambda v: "#" * int(math.ceil(float(v) / len(results) * 100)), counts) for k in bars: print("%-20s %s (%d)" % (k.capitalize() if k else "No status", bars[k], counts[k])) +def find_layers(candidate): + # candidate can either be the path to a layer directly (eg meta-intel), or a + # repository that contains other layers (meta-arm). We can determine what by + # looking for a conf/layer.conf file. If that file exists then it's a layer, + # otherwise its a repository of layers and we can assume they're called + # meta-*. + + if (candidate / "conf" / "layer.conf").exists(): + return [candidate.absolute()] + else: + return [d.absolute() for d in candidate.iterdir() if d.is_dir() and (d.name == "meta" or d.name.startswith("meta-"))] + +# TODO these don't actually handle dynamic-layers/ + +def gather_patches(layers): + patches = [] + for directory in layers: + filenames = subprocess.check_output(("git", "-C", directory, "ls-files", "recipes-*/**/*.patch", "recipes-*/**/*.diff"), universal_newlines=True).split() + patches += [os.path.join(directory, f) for f in filenames] + return patches + +def count_recipes(layers): + count = 0 + for directory in layers: + output = subprocess.check_output(["git", "-C", directory, "ls-files", "recipes-*/**/*.bb"], universal_newlines=True) + count += len(output.splitlines()) + return count if __name__ == "__main__": - import argparse, subprocess, os - args = argparse.ArgumentParser(description="Patch Review Tool") args.add_argument("-b", "--blame", action="store_true", help="show blame for malformed patches") args.add_argument("-v", "--verbose", action="store_true", help="show per-patch results") args.add_argument("-g", "--histogram", action="store_true", help="show patch histogram") args.add_argument("-j", "--json", help="update JSON") - args.add_argument("directory", help="directory to scan") + args.add_argument("directory", type=pathlib.Path, metavar="DIRECTORY", help="directory to scan (layer, or repository of layers)") args = args.parse_args() - patches = subprocess.check_output(("git", "-C", args.directory, "ls-files", "recipes-*/**/*.patch", "recipes-*/**/*.diff")).decode("utf-8").split() - results = patchreview(args.directory, patches) + layers = find_layers(args.directory) + print(f"Found layers {' '.join((d.name for d in layers))}") + patches = gather_patches(layers) + results = patchreview(patches) analyse(results, want_blame=args.blame, verbose=args.verbose) if args.json: - import json, os.path, collections if os.path.isfile(args.json): data = json.load(open(args.json)) else: @@ -221,7 +256,11 @@ if __name__ == "__main__": row = collections.Counter() row["total"] = len(results) - row["date"] = subprocess.check_output(["git", "-C", args.directory, "show", "-s", "--pretty=format:%cd", "--date=format:%s"]).decode("utf-8").strip() + row["date"] = subprocess.check_output(["git", "-C", args.directory, "show", "-s", "--pretty=format:%cd", "--date=format:%s"], universal_newlines=True).strip() + row["commit"] = subprocess.check_output(["git", "-C", args.directory, "rev-parse", "HEAD"], universal_newlines=True).strip() + row['commit_count'] = subprocess.check_output(["git", "-C", args.directory, "rev-list", "--count", "HEAD"], universal_newlines=True).strip() + row['recipe_count'] = count_recipes(layers) + for r in results.values(): if r.upstream_status in status_values: row[r.upstream_status] += 1 @@ -231,7 +270,7 @@ if __name__ == "__main__": row['malformed-sob'] += 1 data.append(row) - json.dump(data, open(args.json, "w")) + json.dump(data, open(args.json, "w"), sort_keys=True, indent="\t") if args.histogram: print() diff --git a/scripts/contrib/test_build_time.sh b/scripts/contrib/test_build_time.sh index 23f238adf6..4012ac7ba7 100755 --- a/scripts/contrib/test_build_time.sh +++ b/scripts/contrib/test_build_time.sh @@ -97,7 +97,7 @@ if [ $? != 0 ] ; then exit 251 fi -if [ "$BB_ENV_EXTRAWHITE" != "" ] ; then +if [ "BB_ENV_PASSTHROUGH_ADDITIONS" != "" ] ; then echo "WARNING: you are running after sourcing the build environment script, this is not recommended" fi diff --git a/scripts/contrib/test_build_time_worker.sh b/scripts/contrib/test_build_time_worker.sh index 478e8b0d03..a2879d2336 100755 --- a/scripts/contrib/test_build_time_worker.sh +++ b/scripts/contrib/test_build_time_worker.sh @@ -1,5 +1,7 @@ #!/bin/bash # +# Copyright OpenEmbedded Contributors +# # SPDX-License-Identifier: GPL-2.0-only # # This is an example script to be used in conjunction with test_build_time.sh diff --git a/scripts/contrib/verify-homepage.py b/scripts/contrib/verify-homepage.py index 7bffa78e23..a90b5010bc 100755 --- a/scripts/contrib/verify-homepage.py +++ b/scripts/contrib/verify-homepage.py @@ -1,5 +1,7 @@ #!/usr/bin/env python3 # +# Copyright OpenEmbedded Contributors +# # SPDX-License-Identifier: GPL-2.0-only # # This script can be used to verify HOMEPAGE values for all recipes in |